diff --git a/builder/builder-next/adapters/snapshot/snapshot.go b/builder/builder-next/adapters/snapshot/snapshot.go index 4433984bbc..14ff5fd289 100644 --- a/builder/builder-next/adapters/snapshot/snapshot.go +++ b/builder/builder-next/adapters/snapshot/snapshot.go @@ -395,7 +395,7 @@ func (s *snapshotter) View(ctx context.Context, key, parent string, opts ...snap } func (s *snapshotter) Walk(context.Context, snapshots.WalkFunc, ...string) error { - return errors.Errorf("not-implemented") + return nil } func (s *snapshotter) Update(ctx context.Context, info snapshots.Info, fieldpaths ...string) (snapshots.Info, error) { diff --git a/builder/builder-next/controller.go b/builder/builder-next/controller.go index 99c9635519..5decff89f6 100644 --- a/builder/builder-next/controller.go +++ b/builder/builder-next/controller.go @@ -86,7 +86,11 @@ func newController(rt http.RoundTripper, opt Opt) (*control.Controller, error) { return nil, err } - md, err := metadata.NewStore(filepath.Join(root, "metadata.db")) + if err := cache.MigrateV2(context.Background(), filepath.Join(root, "metadata.db"), filepath.Join(root, "metadata_v2.db"), store, snapshotter, lm); err != nil { + return nil, err + } + + md, err := metadata.NewStore(filepath.Join(root, "metadata_v2.db")) if err != nil { return nil, err } diff --git a/builder/dockerfile/builder.go b/builder/dockerfile/builder.go index 7e463844c2..c7784e2a9f 100644 --- a/builder/dockerfile/builder.go +++ b/builder/dockerfile/builder.go @@ -194,7 +194,8 @@ func (b *Builder) build(source builder.Source, dockerfile *parser.Result) (*buil stages, metaArgs, err := instructions.Parse(dockerfile.AST) if err != nil { - if instructions.IsUnknownInstruction(err) { + var uiErr *instructions.UnknownInstruction + if errors.As(err, &uiErr) { buildsFailed.WithValues(metricsUnknownInstructionError).Inc() } return nil, errdefs.InvalidParameter(err) diff --git a/builder/dockerfile/dispatchers.go b/builder/dockerfile/dispatchers.go index 4fd59a7831..bf28a66a49 100644 --- a/builder/dockerfile/dispatchers.go +++ b/builder/dockerfile/dispatchers.go @@ -205,7 +205,8 @@ func dispatchTriggeredOnBuild(d dispatchRequest, triggers []string) error { } cmd, err := instructions.ParseCommand(ast.AST.Children[0]) if err != nil { - if instructions.IsUnknownInstruction(err) { + var uiErr *instructions.UnknownInstruction + if errors.As(err, &uiErr) { buildsFailed.WithValues(metricsUnknownInstructionError).Inc() } return err diff --git a/integration-cli/docker_cli_build_test.go b/integration-cli/docker_cli_build_test.go index f41b669c23..977271baee 100644 --- a/integration-cli/docker_cli_build_test.go +++ b/integration-cli/docker_cli_build_test.go @@ -6103,7 +6103,7 @@ func (s *DockerSuite) TestBuildLineErrorOnBuild(c *testing.T) { ONBUILD `)).Assert(c, icmd.Expected{ ExitCode: 1, - Err: "Dockerfile parse error line 2: ONBUILD requires at least one argument", + Err: "parse error line 2: ONBUILD requires at least one argument", }) } @@ -6117,7 +6117,7 @@ func (s *DockerSuite) TestBuildLineErrorUnknownInstruction(c *testing.T) { ERROR `)).Assert(c, icmd.Expected{ ExitCode: 1, - Err: "Dockerfile parse error line 3: unknown instruction: NOINSTRUCTION", + Err: "parse error line 3: unknown instruction: NOINSTRUCTION", }) } @@ -6134,7 +6134,7 @@ func (s *DockerSuite) TestBuildLineErrorWithEmptyLines(c *testing.T) { CMD ["/bin/init"] `)).Assert(c, icmd.Expected{ ExitCode: 1, - Err: "Dockerfile parse error line 6: unknown instruction: NOINSTRUCTION", + Err: "parse error line 6: unknown instruction: NOINSTRUCTION", }) } @@ -6148,7 +6148,7 @@ func (s *DockerSuite) TestBuildLineErrorWithComments(c *testing.T) { NOINSTRUCTION echo ba `)).Assert(c, icmd.Expected{ ExitCode: 1, - Err: "Dockerfile parse error line 5: unknown instruction: NOINSTRUCTION", + Err: "parse error line 5: unknown instruction: NOINSTRUCTION", }) } diff --git a/vendor.conf b/vendor.conf index 95be2d4701..2ff2688e45 100644 --- a/vendor.conf +++ b/vendor.conf @@ -28,7 +28,7 @@ github.com/imdario/mergo 1afb36080aec31e0d1528973ebe6 golang.org/x/sync cd5d95a43a6e21273425c7ae415d3df9ea832eeb # buildkit -github.com/moby/buildkit ae7ff7174f73bcb4df89b97e1623b3fb0bfb0a0c +github.com/moby/buildkit df35e9818d1f9066e616e03f4b8d727c97562e5b github.com/tonistiigi/fsutil c2c7d7b0e1441705cd802e5699c0a10b1dfe39fd github.com/grpc-ecosystem/grpc-opentracing 8e809c8a86450a29b90dcc9efbf062d0fe6d9746 github.com/opentracing/opentracing-go 1361b9cd60be79c4c3a7fa9841b3c132e40066a7 @@ -36,6 +36,7 @@ github.com/google/shlex e7afc7fbc51079733e9468cdfd1e github.com/opentracing-contrib/go-stdlib b1a47cfbdd7543e70e9ef3e73d0802ad306cc1cc github.com/mitchellh/hashstructure 2bca23e0e452137f789efbc8610126fd8b94f73b github.com/gofrs/flock 392e7fae8f1b0bdbd67dad7237d23f618feb6dbb # v0.7.1 +github.com/grpc-ecosystem/go-grpc-middleware 3c51f7f332123e8be5a157c0802a228ac85bf9db # v1.2.0 # libnetwork diff --git a/vendor/github.com/grpc-ecosystem/go-grpc-middleware/LICENSE b/vendor/github.com/grpc-ecosystem/go-grpc-middleware/LICENSE new file mode 100644 index 0000000000..b2b065037f --- /dev/null +++ b/vendor/github.com/grpc-ecosystem/go-grpc-middleware/LICENSE @@ -0,0 +1,201 @@ + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. \ No newline at end of file diff --git a/vendor/github.com/grpc-ecosystem/go-grpc-middleware/README.md b/vendor/github.com/grpc-ecosystem/go-grpc-middleware/README.md new file mode 100644 index 0000000000..3a4cc2175e --- /dev/null +++ b/vendor/github.com/grpc-ecosystem/go-grpc-middleware/README.md @@ -0,0 +1,85 @@ +# Go gRPC Middleware + +[![Travis Build](https://travis-ci.org/grpc-ecosystem/go-grpc-middleware.svg?branch=master)](https://travis-ci.org/grpc-ecosystem/go-grpc-middleware) +[![Go Report Card](https://goreportcard.com/badge/github.com/grpc-ecosystem/go-grpc-middleware)](https://goreportcard.com/report/github.com/grpc-ecosystem/go-grpc-middleware) +[![GoDoc](http://img.shields.io/badge/GoDoc-Reference-blue.svg)](https://godoc.org/github.com/grpc-ecosystem/go-grpc-middleware) +[![SourceGraph](https://sourcegraph.com/github.com/grpc-ecosystem/go-grpc-middleware/-/badge.svg)](https://sourcegraph.com/github.com/grpc-ecosystem/go-grpc-middleware/?badge) +[![codecov](https://codecov.io/gh/grpc-ecosystem/go-grpc-middleware/branch/master/graph/badge.svg)](https://codecov.io/gh/grpc-ecosystem/go-grpc-middleware) +[![Apache 2.0 License](https://img.shields.io/badge/License-Apache%202.0-blue.svg)](LICENSE) +[![quality: production](https://img.shields.io/badge/quality-production-orange.svg)](#status) +[![Slack](https://img.shields.io/badge/slack-%23grpc--middleware-brightgreen)](https://slack.com/share/IRUQCFC23/9Tm7hxRFVKKNoajQfMOcUiIk/enQtODc4ODI4NTIyMDcxLWM5NDA0ZTE4Njg5YjRjYWZkMTI5MzQwNDY3YzBjMzE1YzdjOGM5ZjI1NDNiM2JmNzI2YjM5ODE5OTRiNTEyOWE) + +[gRPC Go](https://github.com/grpc/grpc-go) Middleware: interceptors, helpers, utilities. + +## Middleware + +[gRPC Go](https://github.com/grpc/grpc-go) recently acquired support for +Interceptors, i.e. [middleware](https://medium.com/@matryer/writing-middleware-in-golang-and-how-go-makes-it-so-much-fun-4375c1246e81#.gv7tdlghs) +that is executed either on the gRPC Server before the request is passed onto the user's application logic, or on the gRPC client either around the user call. It is a perfect way to implement +common patterns: auth, logging, message, validation, retries or monitoring. + +These are generic building blocks that make it easy to build multiple microservices easily. +The purpose of this repository is to act as a go-to point for such reusable functionality. It contains +some of them itself, but also will link to useful external repos. + +`grpc_middleware` itself provides support for chaining interceptors, here's an example: + +```go +import "github.com/grpc-ecosystem/go-grpc-middleware" + +myServer := grpc.NewServer( + grpc.StreamInterceptor(grpc_middleware.ChainStreamServer( + grpc_ctxtags.StreamServerInterceptor(), + grpc_opentracing.StreamServerInterceptor(), + grpc_prometheus.StreamServerInterceptor, + grpc_zap.StreamServerInterceptor(zapLogger), + grpc_auth.StreamServerInterceptor(myAuthFunction), + grpc_recovery.StreamServerInterceptor(), + )), + grpc.UnaryInterceptor(grpc_middleware.ChainUnaryServer( + grpc_ctxtags.UnaryServerInterceptor(), + grpc_opentracing.UnaryServerInterceptor(), + grpc_prometheus.UnaryServerInterceptor, + grpc_zap.UnaryServerInterceptor(zapLogger), + grpc_auth.UnaryServerInterceptor(myAuthFunction), + grpc_recovery.UnaryServerInterceptor(), + )), +) +``` + +## Interceptors + +*Please send a PR to add new interceptors or middleware to this list* + +#### Auth + * [`grpc_auth`](auth) - a customizable (via `AuthFunc`) piece of auth middleware + +#### Logging + * [`grpc_ctxtags`](tags/) - a library that adds a `Tag` map to context, with data populated from request body + * [`grpc_zap`](logging/zap/) - integration of [zap](https://github.com/uber-go/zap) logging library into gRPC handlers. + * [`grpc_logrus`](logging/logrus/) - integration of [logrus](https://github.com/sirupsen/logrus) logging library into gRPC handlers. + * [`grpc_kit`](logging/kit/) - integration of [go-kit](https://github.com/go-kit/kit/tree/master/log) logging library into gRPC handlers. + +#### Monitoring + * [`grpc_prometheus`⚡](https://github.com/grpc-ecosystem/go-grpc-prometheus) - Prometheus client-side and server-side monitoring middleware + * [`otgrpc`⚡](https://github.com/grpc-ecosystem/grpc-opentracing/tree/master/go/otgrpc) - [OpenTracing](http://opentracing.io/) client-side and server-side interceptors + * [`grpc_opentracing`](tracing/opentracing) - [OpenTracing](http://opentracing.io/) client-side and server-side interceptors with support for streaming and handler-returned tags + +#### Client + * [`grpc_retry`](retry/) - a generic gRPC response code retry mechanism, client-side middleware + +#### Server + * [`grpc_validator`](validator/) - codegen inbound message validation from `.proto` options + * [`grpc_recovery`](recovery/) - turn panics into gRPC errors + * [`ratelimit`](ratelimit/) - grpc rate limiting by your own limiter + + +## Status + +This code has been running in *production* since May 2016 as the basis of the gRPC micro services stack at [Improbable](https://improbable.io). + +Additional tooling will be added, and contributions are welcome. + +## License + +`go-grpc-middleware` is released under the Apache 2.0 license. See the [LICENSE](LICENSE) file for details. diff --git a/vendor/github.com/grpc-ecosystem/go-grpc-middleware/chain.go b/vendor/github.com/grpc-ecosystem/go-grpc-middleware/chain.go new file mode 100644 index 0000000000..ea3738b896 --- /dev/null +++ b/vendor/github.com/grpc-ecosystem/go-grpc-middleware/chain.go @@ -0,0 +1,120 @@ +// Copyright 2016 Michal Witkowski. All Rights Reserved. +// See LICENSE for licensing terms. + +// gRPC Server Interceptor chaining middleware. + +package grpc_middleware + +import ( + "context" + + "google.golang.org/grpc" +) + +// ChainUnaryServer creates a single interceptor out of a chain of many interceptors. +// +// Execution is done in left-to-right order, including passing of context. +// For example ChainUnaryServer(one, two, three) will execute one before two before three, and three +// will see context changes of one and two. +func ChainUnaryServer(interceptors ...grpc.UnaryServerInterceptor) grpc.UnaryServerInterceptor { + n := len(interceptors) + + return func(ctx context.Context, req interface{}, info *grpc.UnaryServerInfo, handler grpc.UnaryHandler) (interface{}, error) { + chainer := func(currentInter grpc.UnaryServerInterceptor, currentHandler grpc.UnaryHandler) grpc.UnaryHandler { + return func(currentCtx context.Context, currentReq interface{}) (interface{}, error) { + return currentInter(currentCtx, currentReq, info, currentHandler) + } + } + + chainedHandler := handler + for i := n - 1; i >= 0; i-- { + chainedHandler = chainer(interceptors[i], chainedHandler) + } + + return chainedHandler(ctx, req) + } +} + +// ChainStreamServer creates a single interceptor out of a chain of many interceptors. +// +// Execution is done in left-to-right order, including passing of context. +// For example ChainUnaryServer(one, two, three) will execute one before two before three. +// If you want to pass context between interceptors, use WrapServerStream. +func ChainStreamServer(interceptors ...grpc.StreamServerInterceptor) grpc.StreamServerInterceptor { + n := len(interceptors) + + return func(srv interface{}, ss grpc.ServerStream, info *grpc.StreamServerInfo, handler grpc.StreamHandler) error { + chainer := func(currentInter grpc.StreamServerInterceptor, currentHandler grpc.StreamHandler) grpc.StreamHandler { + return func(currentSrv interface{}, currentStream grpc.ServerStream) error { + return currentInter(currentSrv, currentStream, info, currentHandler) + } + } + + chainedHandler := handler + for i := n - 1; i >= 0; i-- { + chainedHandler = chainer(interceptors[i], chainedHandler) + } + + return chainedHandler(srv, ss) + } +} + +// ChainUnaryClient creates a single interceptor out of a chain of many interceptors. +// +// Execution is done in left-to-right order, including passing of context. +// For example ChainUnaryClient(one, two, three) will execute one before two before three. +func ChainUnaryClient(interceptors ...grpc.UnaryClientInterceptor) grpc.UnaryClientInterceptor { + n := len(interceptors) + + return func(ctx context.Context, method string, req, reply interface{}, cc *grpc.ClientConn, invoker grpc.UnaryInvoker, opts ...grpc.CallOption) error { + chainer := func(currentInter grpc.UnaryClientInterceptor, currentInvoker grpc.UnaryInvoker) grpc.UnaryInvoker { + return func(currentCtx context.Context, currentMethod string, currentReq, currentRepl interface{}, currentConn *grpc.ClientConn, currentOpts ...grpc.CallOption) error { + return currentInter(currentCtx, currentMethod, currentReq, currentRepl, currentConn, currentInvoker, currentOpts...) + } + } + + chainedInvoker := invoker + for i := n - 1; i >= 0; i-- { + chainedInvoker = chainer(interceptors[i], chainedInvoker) + } + + return chainedInvoker(ctx, method, req, reply, cc, opts...) + } +} + +// ChainStreamClient creates a single interceptor out of a chain of many interceptors. +// +// Execution is done in left-to-right order, including passing of context. +// For example ChainStreamClient(one, two, three) will execute one before two before three. +func ChainStreamClient(interceptors ...grpc.StreamClientInterceptor) grpc.StreamClientInterceptor { + n := len(interceptors) + + return func(ctx context.Context, desc *grpc.StreamDesc, cc *grpc.ClientConn, method string, streamer grpc.Streamer, opts ...grpc.CallOption) (grpc.ClientStream, error) { + chainer := func(currentInter grpc.StreamClientInterceptor, currentStreamer grpc.Streamer) grpc.Streamer { + return func(currentCtx context.Context, currentDesc *grpc.StreamDesc, currentConn *grpc.ClientConn, currentMethod string, currentOpts ...grpc.CallOption) (grpc.ClientStream, error) { + return currentInter(currentCtx, currentDesc, currentConn, currentMethod, currentStreamer, currentOpts...) + } + } + + chainedStreamer := streamer + for i := n - 1; i >= 0; i-- { + chainedStreamer = chainer(interceptors[i], chainedStreamer) + } + + return chainedStreamer(ctx, desc, cc, method, opts...) + } +} + +// Chain creates a single interceptor out of a chain of many interceptors. +// +// WithUnaryServerChain is a grpc.Server config option that accepts multiple unary interceptors. +// Basically syntactic sugar. +func WithUnaryServerChain(interceptors ...grpc.UnaryServerInterceptor) grpc.ServerOption { + return grpc.UnaryInterceptor(ChainUnaryServer(interceptors...)) +} + +// WithStreamServerChain is a grpc.Server config option that accepts multiple stream interceptors. +// Basically syntactic sugar. +func WithStreamServerChain(interceptors ...grpc.StreamServerInterceptor) grpc.ServerOption { + return grpc.StreamInterceptor(ChainStreamServer(interceptors...)) +} diff --git a/vendor/github.com/grpc-ecosystem/go-grpc-middleware/doc.go b/vendor/github.com/grpc-ecosystem/go-grpc-middleware/doc.go new file mode 100644 index 0000000000..7168950364 --- /dev/null +++ b/vendor/github.com/grpc-ecosystem/go-grpc-middleware/doc.go @@ -0,0 +1,69 @@ +// Copyright 2016 Michal Witkowski. All Rights Reserved. +// See LICENSE for licensing terms. + +/* +`grpc_middleware` is a collection of gRPC middleware packages: interceptors, helpers and tools. + +Middleware + +gRPC is a fantastic RPC middleware, which sees a lot of adoption in the Golang world. However, the +upstream gRPC codebase is relatively bare bones. + +This package, and most of its child packages provides commonly needed middleware for gRPC: +client-side interceptors for retires, server-side interceptors for input validation and auth, +functions for chaining said interceptors, metadata convenience methods and more. + +Chaining + +By default, gRPC doesn't allow one to have more than one interceptor either on the client nor on +the server side. `grpc_middleware` provides convenient chaining methods + +Simple way of turning a multiple interceptors into a single interceptor. Here's an example for +server chaining: + + myServer := grpc.NewServer( + grpc.StreamInterceptor(grpc_middleware.ChainStreamServer(loggingStream, monitoringStream, authStream)), + grpc.UnaryInterceptor(grpc_middleware.ChainUnaryServer(loggingUnary, monitoringUnary, authUnary), + ) + +These interceptors will be executed from left to right: logging, monitoring and auth. + +Here's an example for client side chaining: + + clientConn, err = grpc.Dial( + address, + grpc.WithUnaryInterceptor(grpc_middleware.ChainUnaryClient(monitoringClientUnary, retryUnary)), + grpc.WithStreamInterceptor(grpc_middleware.ChainStreamClient(monitoringClientStream, retryStream)), + ) + client = pb_testproto.NewTestServiceClient(clientConn) + resp, err := client.PingEmpty(s.ctx, &myservice.Request{Msg: "hello"}) + +These interceptors will be executed from left to right: monitoring and then retry logic. + +The retry interceptor will call every interceptor that follows it whenever when a retry happens. + +Writing Your Own + +Implementing your own interceptor is pretty trivial: there are interfaces for that. But the interesting +bit exposing common data to handlers (and other middleware), similarly to HTTP Middleware design. +For example, you may want to pass the identity of the caller from the auth interceptor all the way +to the handling function. + +For example, a client side interceptor example for auth looks like: + + func FakeAuthUnaryInterceptor(ctx context.Context, req interface{}, info *grpc.UnaryServerInfo, handler grpc.UnaryHandler) (interface{}, error) { + newCtx := context.WithValue(ctx, "user_id", "john@example.com") + return handler(newCtx, req) + } + +Unfortunately, it's not as easy for streaming RPCs. These have the `context.Context` embedded within +the `grpc.ServerStream` object. To pass values through context, a wrapper (`WrappedServerStream`) is +needed. For example: + + func FakeAuthStreamingInterceptor(srv interface{}, stream grpc.ServerStream, info *grpc.StreamServerInfo, handler grpc.StreamHandler) error { + newStream := grpc_middleware.WrapServerStream(stream) + newStream.WrappedContext = context.WithValue(ctx, "user_id", "john@example.com") + return handler(srv, stream) + } +*/ +package grpc_middleware diff --git a/vendor/github.com/grpc-ecosystem/go-grpc-middleware/go.mod b/vendor/github.com/grpc-ecosystem/go-grpc-middleware/go.mod new file mode 100644 index 0000000000..6f8eeac43d --- /dev/null +++ b/vendor/github.com/grpc-ecosystem/go-grpc-middleware/go.mod @@ -0,0 +1,22 @@ +module github.com/grpc-ecosystem/go-grpc-middleware + +require ( + github.com/go-kit/kit v0.9.0 + github.com/go-logfmt/logfmt v0.4.0 // indirect + github.com/go-stack/stack v1.8.0 // indirect + github.com/gogo/protobuf v1.2.1 + github.com/golang/protobuf v1.3.2 + github.com/opentracing/opentracing-go v1.1.0 + github.com/pkg/errors v0.8.1 // indirect + github.com/sirupsen/logrus v1.4.2 + github.com/stretchr/testify v1.4.0 + go.uber.org/atomic v1.4.0 // indirect + go.uber.org/multierr v1.1.0 // indirect + go.uber.org/zap v1.10.0 + golang.org/x/net v0.0.0-20190311183353-d8887717615a + golang.org/x/oauth2 v0.0.0-20180821212333-d2e6202438be + google.golang.org/genproto v0.0.0-20190819201941-24fa4b261c55 // indirect + google.golang.org/grpc v1.19.0 +) + +go 1.13 diff --git a/vendor/github.com/grpc-ecosystem/go-grpc-middleware/wrappers.go b/vendor/github.com/grpc-ecosystem/go-grpc-middleware/wrappers.go new file mode 100644 index 0000000000..05ccfb3f24 --- /dev/null +++ b/vendor/github.com/grpc-ecosystem/go-grpc-middleware/wrappers.go @@ -0,0 +1,30 @@ +// Copyright 2016 Michal Witkowski. All Rights Reserved. +// See LICENSE for licensing terms. + +package grpc_middleware + +import ( + "context" + + "google.golang.org/grpc" +) + +// WrappedServerStream is a thin wrapper around grpc.ServerStream that allows modifying context. +type WrappedServerStream struct { + grpc.ServerStream + // WrappedContext is the wrapper's own Context. You can assign it. + WrappedContext context.Context +} + +// Context returns the wrapper's WrappedContext, overwriting the nested grpc.ServerStream.Context() +func (w *WrappedServerStream) Context() context.Context { + return w.WrappedContext +} + +// WrapServerStream returns a ServerStream that has the ability to overwrite context. +func WrapServerStream(stream grpc.ServerStream) *WrappedServerStream { + if existing, ok := stream.(*WrappedServerStream); ok { + return existing + } + return &WrappedServerStream{ServerStream: stream, WrappedContext: stream.Context()} +} diff --git a/vendor/github.com/moby/buildkit/README.md b/vendor/github.com/moby/buildkit/README.md index b5f6370175..19d73253ae 100644 --- a/vendor/github.com/moby/buildkit/README.md +++ b/vendor/github.com/moby/buildkit/README.md @@ -86,6 +86,7 @@ BuildKit is used by the following projects: - [PouchContainer](https://github.com/alibaba/pouch) - [Docker buildx](https://github.com/docker/buildx) - [Okteto Cloud](https://okteto.com/) +- [Earthly earthfiles](https://github.com/vladaionescu/earthly) ## Quick start @@ -148,6 +149,7 @@ Currently, the following high-level languages has been implemented for LLB: - [Buildpacks](https://github.com/tonistiigi/buildkit-pack) - [Mockerfile](https://matt-rickard.com/building-a-new-dockerfile-frontend/) - [Gockerfile](https://github.com/po3rin/gockerfile) +- [bldr (Pkgfile)](https://github.com/talos-systems/bldr/) - (open a PR to add your own language) ### Exploring Dockerfiles diff --git a/vendor/github.com/moby/buildkit/cache/contenthash/path.go b/vendor/github.com/moby/buildkit/cache/contenthash/path.go index 1084da084a..42b7fd8349 100644 --- a/vendor/github.com/moby/buildkit/cache/contenthash/path.go +++ b/vendor/github.com/moby/buildkit/cache/contenthash/path.go @@ -1,9 +1,10 @@ package contenthash import ( - "errors" "os" "path/filepath" + + "github.com/pkg/errors" ) var ( @@ -52,7 +53,7 @@ func walkLink(root, path string, linksWalked *int, cb onSymlinkFunc) (newpath st fi, err := os.Lstat(realPath) if err != nil { // If path does not yet exist, treat as non-symlink - if os.IsNotExist(err) { + if errors.Is(err, os.ErrNotExist) { return path, false, nil } return "", false, err diff --git a/vendor/github.com/moby/buildkit/cache/contenthash/tarsum.go b/vendor/github.com/moby/buildkit/cache/contenthash/tarsum.go index 3327ab2c20..de72d6cdd0 100644 --- a/vendor/github.com/moby/buildkit/cache/contenthash/tarsum.go +++ b/vendor/github.com/moby/buildkit/cache/contenthash/tarsum.go @@ -39,7 +39,7 @@ func v1TarHeaderSelect(h *tar.Header) (orderedHeaders [][2]string) { // Get extended attributes. xAttrKeys := make([]string, len(h.Xattrs)) for k := range h.Xattrs { - if !strings.HasPrefix(k, "security.") && !strings.HasPrefix(k, "system.") { + if k == "security.capability" || !strings.HasPrefix(k, "security.") && !strings.HasPrefix(k, "system.") { xAttrKeys = append(xAttrKeys, k) } } diff --git a/vendor/github.com/moby/buildkit/cache/manager.go b/vendor/github.com/moby/buildkit/cache/manager.go index c5ff035b83..3d1e884360 100644 --- a/vendor/github.com/moby/buildkit/cache/manager.go +++ b/vendor/github.com/moby/buildkit/cache/manager.go @@ -16,7 +16,7 @@ import ( "github.com/moby/buildkit/client" "github.com/moby/buildkit/identity" "github.com/moby/buildkit/snapshot" - "github.com/opencontainers/go-digest" + digest "github.com/opencontainers/go-digest" imagespecidentity "github.com/opencontainers/image-spec/identity" ocispec "github.com/opencontainers/image-spec/specs-go/v1" "github.com/pkg/errors" @@ -143,7 +143,7 @@ func (cm *cacheManager) GetByBlob(ctx context.Context, desc ocispec.Descriptor, for _, si := range sis { ref, err := cm.get(ctx, si.ID(), opts...) - if err != nil && errors.Cause(err) != errNotFound { + if err != nil && !IsNotFound(err) { return nil, errors.Wrapf(err, "failed to get record %s by blobchainid", si.ID()) } if p != nil { @@ -160,7 +160,7 @@ func (cm *cacheManager) GetByBlob(ctx context.Context, desc ocispec.Descriptor, var link ImmutableRef for _, si := range sis { ref, err := cm.get(ctx, si.ID(), opts...) - if err != nil && errors.Cause(err) != errNotFound { + if err != nil && !IsNotFound(err) { return nil, errors.Wrapf(err, "failed to get record %s by chainid", si.ID()) } link = ref @@ -338,7 +338,7 @@ func (cm *cacheManager) getRecord(ctx context.Context, id string, opts ...RefOpt mutable, err := cm.getRecord(ctx, mutableID) if err != nil { // check loading mutable deleted record from disk - if errors.Cause(err) == errNotFound { + if IsNotFound(err) { cm.md.Clear(id) } return nil, err @@ -906,12 +906,8 @@ func (cm *cacheManager) DiskUsage(ctx context.Context, opt client.DiskUsageInfo) return du, nil } -func IsLocked(err error) bool { - return errors.Cause(err) == ErrLocked -} - func IsNotFound(err error) bool { - return errors.Cause(err) == errNotFound + return errors.Is(err, errNotFound) } type RefOption interface{} diff --git a/vendor/github.com/moby/buildkit/cache/migrate_v2.go b/vendor/github.com/moby/buildkit/cache/migrate_v2.go index 9671c28258..d94ef1eef0 100644 --- a/vendor/github.com/moby/buildkit/cache/migrate_v2.go +++ b/vendor/github.com/moby/buildkit/cache/migrate_v2.go @@ -13,7 +13,7 @@ import ( "github.com/containerd/containerd/snapshots" "github.com/moby/buildkit/cache/metadata" "github.com/moby/buildkit/snapshot" - "github.com/opencontainers/go-digest" + digest "github.com/opencontainers/go-digest" "github.com/pkg/errors" "github.com/sirupsen/logrus" ) @@ -56,7 +56,7 @@ func migrateChainID(si *metadata.StorageItem, all map[string]*metadata.StorageIt func MigrateV2(ctx context.Context, from, to string, cs content.Store, s snapshot.Snapshotter, lm leases.Manager) error { _, err := os.Stat(to) if err != nil { - if !os.IsNotExist(errors.Cause(err)) { + if !errors.Is(err, os.ErrNotExist) { return errors.WithStack(err) } } else { @@ -65,7 +65,7 @@ func MigrateV2(ctx context.Context, from, to string, cs content.Store, s snapsho _, err = os.Stat(from) if err != nil { - if !os.IsNotExist(errors.Cause(err)) { + if !errors.Is(err, os.ErrNotExist) { return errors.WithStack(err) } return nil @@ -180,7 +180,7 @@ func MigrateV2(ctx context.Context, from, to string, cs content.Store, s snapsho }) if err != nil { // if we are running the migration twice - if errdefs.IsAlreadyExists(err) { + if errors.Is(err, errdefs.ErrAlreadyExists) { continue } return errors.Wrap(err, "failed to create lease") @@ -205,19 +205,22 @@ func MigrateV2(ctx context.Context, from, to string, cs content.Store, s snapsho // remove old root labels for _, item := range byID { - if _, err := s.Update(ctx, snapshots.Info{ - Name: getSnapshotID(item), - }, "labels.containerd.io/gc.root"); err != nil { - if !errdefs.IsNotFound(errors.Cause(err)) { - return err - } - } - - if blob := getBlob(item); blob != "" { - if _, err := cs.Update(ctx, content.Info{ - Digest: digest.Digest(blob), + em := getEqualMutable(item) + if em == "" { + if _, err := s.Update(ctx, snapshots.Info{ + Name: getSnapshotID(item), }, "labels.containerd.io/gc.root"); err != nil { - return err + if !errors.Is(err, errdefs.ErrNotFound) { + return err + } + } + + if blob := getBlob(item); blob != "" { + if _, err := cs.Update(ctx, content.Info{ + Digest: digest.Digest(blob), + }, "labels.containerd.io/gc.root"); err != nil { + return err + } } } } @@ -228,7 +231,7 @@ func MigrateV2(ctx context.Context, from, to string, cs content.Store, s snapsho if _, err := s.Update(ctx, snapshots.Info{ Name: info.Name, }, "labels.containerd.io/gc.root"); err != nil { - if !errdefs.IsNotFound(errors.Cause(err)) { + if !errors.Is(err, errdefs.ErrNotFound) { return err } } diff --git a/vendor/github.com/moby/buildkit/cache/refs.go b/vendor/github.com/moby/buildkit/cache/refs.go index cfedd6ae8b..d932f4e96c 100644 --- a/vendor/github.com/moby/buildkit/cache/refs.go +++ b/vendor/github.com/moby/buildkit/cache/refs.go @@ -17,7 +17,7 @@ import ( "github.com/moby/buildkit/snapshot" "github.com/moby/buildkit/util/flightcontrol" "github.com/moby/buildkit/util/leaseutil" - "github.com/opencontainers/go-digest" + digest "github.com/opencontainers/go-digest" imagespecidentity "github.com/opencontainers/image-spec/identity" ocispec "github.com/opencontainers/image-spec/specs-go/v1" "github.com/pkg/errors" @@ -160,7 +160,7 @@ func (cr *cacheRecord) Size(ctx context.Context) (int64, error) { if isDead { return int64(0), nil } - if !errdefs.IsNotFound(err) { + if !errors.Is(err, errdefs.ErrNotFound) { return s, errors.Wrapf(err, "failed to get usage for %s", cr.ID()) } } @@ -180,7 +180,10 @@ func (cr *cacheRecord) Size(ctx context.Context) (int64, error) { cr.mu.Unlock() return usage.Size, nil }) - return s.(int64), err + if err != nil { + return 0, err + } + return s.(int64), nil } func (cr *cacheRecord) Parent() ImmutableRef { @@ -349,7 +352,7 @@ func (sr *immutableRef) Extract(ctx context.Context) error { return nil, err } if err := sr.cm.Snapshotter.Commit(ctx, getSnapshotID(sr.md), key); err != nil { - if !errdefs.IsAlreadyExists(err) { + if !errors.Is(err, errdefs.ErrAlreadyExists) { return nil, err } } @@ -506,7 +509,7 @@ func (cr *cacheRecord) finalize(ctx context.Context, commit bool) error { return nil }) if err != nil { - if !errdefs.IsAlreadyExists(err) { // migrator adds leases for everything + if !errors.Is(err, errdefs.ErrAlreadyExists) { // migrator adds leases for everything return errors.Wrap(err, "failed to create lease") } } diff --git a/vendor/github.com/moby/buildkit/cache/remotecache/registry/registry.go b/vendor/github.com/moby/buildkit/cache/remotecache/registry/registry.go index a172917a50..7fe04e2df2 100644 --- a/vendor/github.com/moby/buildkit/cache/remotecache/registry/registry.go +++ b/vendor/github.com/moby/buildkit/cache/remotecache/registry/registry.go @@ -10,7 +10,7 @@ import ( "github.com/moby/buildkit/session" "github.com/moby/buildkit/util/contentutil" "github.com/moby/buildkit/util/resolver" - "github.com/opencontainers/go-digest" + digest "github.com/opencontainers/go-digest" ocispec "github.com/opencontainers/image-spec/specs-go/v1" specs "github.com/opencontainers/image-spec/specs-go/v1" "github.com/pkg/errors" diff --git a/vendor/github.com/moby/buildkit/client/client.go b/vendor/github.com/moby/buildkit/client/client.go index d256ec37e4..0546f4653c 100644 --- a/vendor/github.com/moby/buildkit/client/client.go +++ b/vendor/github.com/moby/buildkit/client/client.go @@ -8,12 +8,14 @@ import ( "net" "time" + grpc_middleware "github.com/grpc-ecosystem/go-grpc-middleware" "github.com/grpc-ecosystem/grpc-opentracing/go/otgrpc" controlapi "github.com/moby/buildkit/api/services/control" "github.com/moby/buildkit/client/connhelper" "github.com/moby/buildkit/session" "github.com/moby/buildkit/session/grpchijack" "github.com/moby/buildkit/util/appdefaults" + "github.com/moby/buildkit/util/grpcerrors" opentracing "github.com/opentracing/opentracing-go" "github.com/pkg/errors" "google.golang.org/grpc" @@ -31,6 +33,10 @@ func New(ctx context.Context, address string, opts ...ClientOpt) (*Client, error gopts := []grpc.DialOption{} needDialer := true needWithInsecure := true + + var unary []grpc.UnaryClientInterceptor + var stream []grpc.StreamClientInterceptor + for _, o := range opts { if _, ok := o.(*withFailFast); ok { gopts = append(gopts, grpc.FailOnNonTempDialError(true)) @@ -44,9 +50,8 @@ func New(ctx context.Context, address string, opts ...ClientOpt) (*Client, error needWithInsecure = false } if wt, ok := o.(*withTracer); ok { - gopts = append(gopts, - grpc.WithUnaryInterceptor(otgrpc.OpenTracingClientInterceptor(wt.tracer, otgrpc.LogPayloads())), - grpc.WithStreamInterceptor(otgrpc.OpenTracingStreamClientInterceptor(wt.tracer))) + unary = append(unary, otgrpc.OpenTracingClientInterceptor(wt.tracer, otgrpc.LogPayloads())) + stream = append(stream, otgrpc.OpenTracingStreamClientInterceptor(wt.tracer)) } if wd, ok := o.(*withDialer); ok { gopts = append(gopts, grpc.WithDialer(wd.dialer)) @@ -68,6 +73,22 @@ func New(ctx context.Context, address string, opts ...ClientOpt) (*Client, error if address == "" { address = appdefaults.Address } + + unary = append(unary, grpcerrors.UnaryClientInterceptor) + stream = append(stream, grpcerrors.StreamClientInterceptor) + + if len(unary) == 1 { + gopts = append(gopts, grpc.WithUnaryInterceptor(unary[0])) + } else if len(unary) > 1 { + gopts = append(gopts, grpc.WithUnaryInterceptor(grpc_middleware.ChainUnaryClient(unary...))) + } + + if len(stream) == 1 { + gopts = append(gopts, grpc.WithStreamInterceptor(stream[0])) + } else if len(stream) > 1 { + gopts = append(gopts, grpc.WithStreamInterceptor(grpc_middleware.ChainStreamClient(stream...))) + } + conn, err := grpc.DialContext(ctx, address, gopts...) if err != nil { return nil, errors.Wrapf(err, "failed to dial %q . make sure buildkitd is running", address) diff --git a/vendor/github.com/moby/buildkit/client/connhelper/connhelper.go b/vendor/github.com/moby/buildkit/client/connhelper/connhelper.go index e634bb5cdc..211df28e97 100644 --- a/vendor/github.com/moby/buildkit/client/connhelper/connhelper.go +++ b/vendor/github.com/moby/buildkit/client/connhelper/connhelper.go @@ -1,4 +1,5 @@ -// Package connhelper provides helpers for connecting to a remote daemon host with custom logic. +// Package connhelper provides helpers for connecting to a remote daemon host +// with custom logic. package connhelper import ( diff --git a/vendor/github.com/moby/buildkit/client/llb/async.go b/vendor/github.com/moby/buildkit/client/llb/async.go index bf28a957c7..48216e9895 100644 --- a/vendor/github.com/moby/buildkit/client/llb/async.go +++ b/vendor/github.com/moby/buildkit/client/llb/async.go @@ -61,7 +61,7 @@ func (as *asyncState) Do(ctx context.Context) error { if err != nil { select { case <-ctx.Done(): - if errors.Cause(err) == ctx.Err() { + if errors.Is(err, ctx.Err()) { return res, err } default: @@ -85,8 +85,8 @@ type errVertex struct { func (v *errVertex) Validate(context.Context) error { return v.err } -func (v *errVertex) Marshal(context.Context, *Constraints) (digest.Digest, []byte, *pb.OpMetadata, error) { - return "", nil, nil, v.err +func (v *errVertex) Marshal(context.Context, *Constraints) (digest.Digest, []byte, *pb.OpMetadata, []*SourceLocation, error) { + return "", nil, nil, nil, v.err } func (v *errVertex) Output() Output { return nil diff --git a/vendor/github.com/moby/buildkit/client/llb/definition.go b/vendor/github.com/moby/buildkit/client/llb/definition.go index 9be2b299ae..fe9f7c17f8 100644 --- a/vendor/github.com/moby/buildkit/client/llb/definition.go +++ b/vendor/github.com/moby/buildkit/client/llb/definition.go @@ -20,6 +20,7 @@ type DefinitionOp struct { ops map[digest.Digest]*pb.Op defs map[digest.Digest][]byte metas map[digest.Digest]pb.OpMetadata + sources map[digest.Digest][]*SourceLocation platforms map[digest.Digest]*specs.Platform dgst digest.Digest index pb.OutputIndex @@ -49,6 +50,38 @@ func NewDefinitionOp(def *pb.Definition) (*DefinitionOp, error) { platforms[dgst] = platform } + srcs := map[digest.Digest][]*SourceLocation{} + + if def.Source != nil { + sourceMaps := make([]*SourceMap, len(def.Source.Infos)) + for i, info := range def.Source.Infos { + var st *State + sdef := info.Definition + if sdef != nil { + op, err := NewDefinitionOp(sdef) + if err != nil { + return nil, err + } + state := NewState(op) + st = &state + } + sourceMaps[i] = NewSourceMap(st, info.Filename, info.Data) + } + + for dgst, locs := range def.Source.Locations { + for _, loc := range locs.Locations { + if loc.SourceIndex < 0 || int(loc.SourceIndex) >= len(sourceMaps) { + return nil, errors.Errorf("failed to find source map with index %d", loc.SourceIndex) + } + + srcs[digest.Digest(dgst)] = append(srcs[digest.Digest(dgst)], &SourceLocation{ + SourceMap: sourceMaps[int(loc.SourceIndex)], + Ranges: loc.Ranges, + }) + } + } + } + var index pb.OutputIndex if dgst != "" { index = ops[dgst].Inputs[0].Index @@ -59,6 +92,7 @@ func NewDefinitionOp(def *pb.Definition) (*DefinitionOp, error) { ops: ops, defs: defs, metas: def.Metadata, + sources: srcs, platforms: platforms, dgst: dgst, index: index, @@ -110,20 +144,20 @@ func (d *DefinitionOp) Validate(context.Context) error { return nil } -func (d *DefinitionOp) Marshal(ctx context.Context, c *Constraints) (digest.Digest, []byte, *pb.OpMetadata, error) { +func (d *DefinitionOp) Marshal(ctx context.Context, c *Constraints) (digest.Digest, []byte, *pb.OpMetadata, []*SourceLocation, error) { if d.dgst == "" { - return "", nil, nil, errors.Errorf("cannot marshal empty definition op") + return "", nil, nil, nil, errors.Errorf("cannot marshal empty definition op") } if err := d.Validate(ctx); err != nil { - return "", nil, nil, err + return "", nil, nil, nil, err } d.mu.Lock() defer d.mu.Unlock() meta := d.metas[d.dgst] - return d.dgst, d.defs[d.dgst], &meta, nil + return d.dgst, d.defs[d.dgst], &meta, d.sources[d.dgst], nil } diff --git a/vendor/github.com/moby/buildkit/client/llb/exec.go b/vendor/github.com/moby/buildkit/client/llb/exec.go index 75617d94dc..9d27bbcd1c 100644 --- a/vendor/github.com/moby/buildkit/client/llb/exec.go +++ b/vendor/github.com/moby/buildkit/client/llb/exec.go @@ -81,7 +81,7 @@ func (e *ExecOp) AddMount(target string, source Output, opt ...MountOption) Outp } m.output = o } - e.Store(nil, nil, nil) + e.Store(nil, nil, nil, nil) e.isValidated = false return m.output } @@ -124,12 +124,12 @@ func (e *ExecOp) Validate(ctx context.Context) error { return nil } -func (e *ExecOp) Marshal(ctx context.Context, c *Constraints) (digest.Digest, []byte, *pb.OpMetadata, error) { +func (e *ExecOp) Marshal(ctx context.Context, c *Constraints) (digest.Digest, []byte, *pb.OpMetadata, []*SourceLocation, error) { if e.Cached(c) { return e.Load() } if err := e.Validate(ctx); err != nil { - return "", nil, nil, err + return "", nil, nil, nil, err } // make sure mounts are sorted sort.Slice(e.mounts, func(i, j int) bool { @@ -138,7 +138,7 @@ func (e *ExecOp) Marshal(ctx context.Context, c *Constraints) (digest.Digest, [] env, err := getEnv(e.base)(ctx) if err != nil { - return "", nil, nil, err + return "", nil, nil, nil, err } if len(e.ssh) > 0 { @@ -161,17 +161,17 @@ func (e *ExecOp) Marshal(ctx context.Context, c *Constraints) (digest.Digest, [] args, err := getArgs(e.base)(ctx) if err != nil { - return "", nil, nil, err + return "", nil, nil, nil, err } cwd, err := getDir(e.base)(ctx) if err != nil { - return "", nil, nil, err + return "", nil, nil, nil, err } user, err := getUser(e.base)(ctx) if err != nil { - return "", nil, nil, err + return "", nil, nil, nil, err } meta := &pb.Meta{ @@ -182,7 +182,7 @@ func (e *ExecOp) Marshal(ctx context.Context, c *Constraints) (digest.Digest, [] } extraHosts, err := getExtraHosts(e.base)(ctx) if err != nil { - return "", nil, nil, err + return "", nil, nil, nil, err } if len(extraHosts) > 0 { hosts := make([]*pb.HostIP, len(extraHosts)) @@ -194,12 +194,12 @@ func (e *ExecOp) Marshal(ctx context.Context, c *Constraints) (digest.Digest, [] network, err := getNetwork(e.base)(ctx) if err != nil { - return "", nil, nil, err + return "", nil, nil, nil, err } security, err := getSecurity(e.base)(ctx) if err != nil { - return "", nil, nil, err + return "", nil, nil, nil, err } peo := &pb.ExecOp{ @@ -252,7 +252,7 @@ func (e *ExecOp) Marshal(ctx context.Context, c *Constraints) (digest.Digest, [] if e.constraints.Platform == nil { p, err := getPlatform(e.base)(ctx) if err != nil { - return "", nil, nil, err + return "", nil, nil, nil, err } e.constraints.Platform = p } @@ -267,11 +267,11 @@ func (e *ExecOp) Marshal(ctx context.Context, c *Constraints) (digest.Digest, [] inputIndex := pb.InputIndex(len(pop.Inputs)) if m.source != nil { if m.tmpfs { - return "", nil, nil, errors.Errorf("tmpfs mounts must use scratch") + return "", nil, nil, nil, errors.Errorf("tmpfs mounts must use scratch") } inp, err := m.source.ToInput(ctx, c) if err != nil { - return "", nil, nil, err + return "", nil, nil, nil, err } newInput := true @@ -356,9 +356,9 @@ func (e *ExecOp) Marshal(ctx context.Context, c *Constraints) (digest.Digest, [] dt, err := pop.Marshal() if err != nil { - return "", nil, nil, err + return "", nil, nil, nil, err } - e.Store(dt, md, c) + e.Store(dt, md, e.constraints.SourceLocations, c) return e.Load() } @@ -388,7 +388,7 @@ func (e *ExecOp) getMountIndexFn(m *mount) func() (pb.OutputIndex, error) { i := 0 for _, m2 := range e.mounts { - if m2.noOutput || m2.readonly || m2.cacheID != "" { + if m2.noOutput || m2.readonly || m2.tmpfs || m2.cacheID != "" { continue } if m == m2 { diff --git a/vendor/github.com/moby/buildkit/client/llb/fileop.go b/vendor/github.com/moby/buildkit/client/llb/fileop.go index de1512348c..db5ed00dd2 100644 --- a/vendor/github.com/moby/buildkit/client/llb/fileop.go +++ b/vendor/github.com/moby/buildkit/client/llb/fileop.go @@ -649,12 +649,12 @@ func (ms *marshalState) add(fa *FileAction, c *Constraints) (*fileActionState, e return st, nil } -func (f *FileOp) Marshal(ctx context.Context, c *Constraints) (digest.Digest, []byte, *pb.OpMetadata, error) { +func (f *FileOp) Marshal(ctx context.Context, c *Constraints) (digest.Digest, []byte, *pb.OpMetadata, []*SourceLocation, error) { if f.Cached(c) { return f.Load() } if err := f.Validate(ctx); err != nil { - return "", nil, nil, err + return "", nil, nil, nil, err } addCap(&f.constraints, pb.CapFileBase) @@ -669,7 +669,7 @@ func (f *FileOp) Marshal(ctx context.Context, c *Constraints) (digest.Digest, [] state := newMarshalState(ctx) _, err := state.add(f.action, c) if err != nil { - return "", nil, nil, err + return "", nil, nil, nil, err } pop.Inputs = state.inputs @@ -683,13 +683,13 @@ func (f *FileOp) Marshal(ctx context.Context, c *Constraints) (digest.Digest, [] if st.fa.state != nil { parent, err = st.fa.state.GetDir(ctx) if err != nil { - return "", nil, nil, err + return "", nil, nil, nil, err } } action, err := st.action.toProtoAction(ctx, parent, st.base) if err != nil { - return "", nil, nil, err + return "", nil, nil, nil, err } pfo.Actions = append(pfo.Actions, &pb.FileAction{ @@ -702,9 +702,9 @@ func (f *FileOp) Marshal(ctx context.Context, c *Constraints) (digest.Digest, [] dt, err := pop.Marshal() if err != nil { - return "", nil, nil, err + return "", nil, nil, nil, err } - f.Store(dt, md, c) + f.Store(dt, md, f.constraints.SourceLocations, c) return f.Load() } diff --git a/vendor/github.com/moby/buildkit/client/llb/marshal.go b/vendor/github.com/moby/buildkit/client/llb/marshal.go index 65a352fae8..282b592b7b 100644 --- a/vendor/github.com/moby/buildkit/client/llb/marshal.go +++ b/vendor/github.com/moby/buildkit/client/llb/marshal.go @@ -14,21 +14,24 @@ import ( type Definition struct { Def [][]byte Metadata map[digest.Digest]pb.OpMetadata + Source *pb.Source } func (def *Definition) ToPB() *pb.Definition { - md := make(map[digest.Digest]pb.OpMetadata) + md := make(map[digest.Digest]pb.OpMetadata, len(def.Metadata)) for k, v := range def.Metadata { md[k] = v } return &pb.Definition{ Def: def.Def, + Source: def.Source, Metadata: md, } } func (def *Definition) FromPB(x *pb.Definition) { def.Def = x.Def + def.Source = x.Source def.Metadata = make(map[digest.Digest]pb.OpMetadata) for k, v := range x.Metadata { def.Metadata[k] = v @@ -95,18 +98,20 @@ type MarshalCache struct { digest digest.Digest dt []byte md *pb.OpMetadata + srcs []*SourceLocation constraints *Constraints } func (mc *MarshalCache) Cached(c *Constraints) bool { return mc.dt != nil && mc.constraints == c } -func (mc *MarshalCache) Load() (digest.Digest, []byte, *pb.OpMetadata, error) { - return mc.digest, mc.dt, mc.md, nil +func (mc *MarshalCache) Load() (digest.Digest, []byte, *pb.OpMetadata, []*SourceLocation, error) { + return mc.digest, mc.dt, mc.md, mc.srcs, nil } -func (mc *MarshalCache) Store(dt []byte, md *pb.OpMetadata, c *Constraints) { +func (mc *MarshalCache) Store(dt []byte, md *pb.OpMetadata, srcs []*SourceLocation, c *Constraints) { mc.digest = digest.FromBytes(dt) mc.dt = dt mc.md = md mc.constraints = c + mc.srcs = srcs } diff --git a/vendor/github.com/moby/buildkit/client/llb/source.go b/vendor/github.com/moby/buildkit/client/llb/source.go index b64cfbcc5e..fd2f44f269 100644 --- a/vendor/github.com/moby/buildkit/client/llb/source.go +++ b/vendor/github.com/moby/buildkit/client/llb/source.go @@ -44,12 +44,12 @@ func (s *SourceOp) Validate(ctx context.Context) error { return nil } -func (s *SourceOp) Marshal(ctx context.Context, constraints *Constraints) (digest.Digest, []byte, *pb.OpMetadata, error) { +func (s *SourceOp) Marshal(ctx context.Context, constraints *Constraints) (digest.Digest, []byte, *pb.OpMetadata, []*SourceLocation, error) { if s.Cached(constraints) { return s.Load() } if err := s.Validate(ctx); err != nil { - return "", nil, nil, err + return "", nil, nil, nil, err } if strings.HasPrefix(s.id, "local://") { @@ -74,10 +74,10 @@ func (s *SourceOp) Marshal(ctx context.Context, constraints *Constraints) (diges dt, err := proto.Marshal() if err != nil { - return "", nil, nil, err + return "", nil, nil, nil, err } - s.Store(dt, md, constraints) + s.Store(dt, md, s.constraints.SourceLocations, constraints) return s.Load() } diff --git a/vendor/github.com/moby/buildkit/client/llb/sourcemap.go b/vendor/github.com/moby/buildkit/client/llb/sourcemap.go new file mode 100644 index 0000000000..87afde9954 --- /dev/null +++ b/vendor/github.com/moby/buildkit/client/llb/sourcemap.go @@ -0,0 +1,111 @@ +package llb + +import ( + "context" + + "github.com/moby/buildkit/solver/pb" + "github.com/opencontainers/go-digest" +) + +type SourceMap struct { + State *State + Definition *Definition + Filename string + Data []byte +} + +func NewSourceMap(st *State, filename string, dt []byte) *SourceMap { + return &SourceMap{ + State: st, + Filename: filename, + Data: dt, + } +} + +func (s *SourceMap) Location(r []*pb.Range) ConstraintsOpt { + return constraintsOptFunc(func(c *Constraints) { + if s == nil { + return + } + c.SourceLocations = append(c.SourceLocations, &SourceLocation{ + SourceMap: s, + Ranges: r, + }) + }) +} + +type SourceLocation struct { + SourceMap *SourceMap + Ranges []*pb.Range +} + +type sourceMapCollector struct { + maps []*SourceMap + index map[*SourceMap]int + locations map[digest.Digest][]*SourceLocation +} + +func newSourceMapCollector() *sourceMapCollector { + return &sourceMapCollector{ + index: map[*SourceMap]int{}, + locations: map[digest.Digest][]*SourceLocation{}, + } +} + +func (smc *sourceMapCollector) Add(dgst digest.Digest, ls []*SourceLocation) { + for _, l := range ls { + idx, ok := smc.index[l.SourceMap] + if !ok { + idx = len(smc.maps) + smc.maps = append(smc.maps, l.SourceMap) + } + smc.index[l.SourceMap] = idx + } + smc.locations[dgst] = ls +} + +func (smc *sourceMapCollector) Marshal(ctx context.Context, co ...ConstraintsOpt) (*pb.Source, error) { + s := &pb.Source{ + Locations: make(map[string]*pb.Locations), + } + for _, m := range smc.maps { + def := m.Definition + if def == nil && m.State != nil { + var err error + def, err = m.State.Marshal(ctx, co...) + if err != nil { + return nil, err + } + m.Definition = def + } + + info := &pb.SourceInfo{ + Data: m.Data, + Filename: m.Filename, + } + + if def != nil { + info.Definition = def.ToPB() + } + + s.Infos = append(s.Infos, info) + } + + for dgst, locs := range smc.locations { + pbLocs, ok := s.Locations[dgst.String()] + if !ok { + pbLocs = &pb.Locations{} + } + + for _, loc := range locs { + pbLocs.Locations = append(pbLocs.Locations, &pb.Location{ + SourceIndex: int32(smc.index[loc.SourceMap]), + Ranges: loc.Ranges, + }) + } + + s.Locations[dgst.String()] = pbLocs + } + + return s, nil +} diff --git a/vendor/github.com/moby/buildkit/client/llb/state.go b/vendor/github.com/moby/buildkit/client/llb/state.go index 195cee3e3d..dac9b8713e 100644 --- a/vendor/github.com/moby/buildkit/client/llb/state.go +++ b/vendor/github.com/moby/buildkit/client/llb/state.go @@ -24,7 +24,7 @@ type Output interface { type Vertex interface { Validate(context.Context) error - Marshal(context.Context, *Constraints) (digest.Digest, []byte, *pb.OpMetadata, error) + Marshal(context.Context, *Constraints) (digest.Digest, []byte, *pb.OpMetadata, []*SourceLocation, error) Output() Output Inputs() []Output } @@ -124,7 +124,9 @@ func (s State) Marshal(ctx context.Context, co ...ConstraintsOpt) (*Definition, o.SetConstraintsOption(c) } - def, err := marshal(ctx, s.Output().Vertex(ctx), def, map[digest.Digest]struct{}{}, map[Vertex]struct{}{}, c) + smc := newSourceMapCollector() + + def, err := marshal(ctx, s.Output().Vertex(ctx), def, smc, map[digest.Digest]struct{}{}, map[Vertex]struct{}{}, c) if err != nil { return def, err } @@ -159,23 +161,28 @@ func (s State) Marshal(ctx context.Context, co ...ConstraintsOpt) (*Definition, } def.Metadata[dgst] = md + sm, err := smc.Marshal(ctx, co...) + if err != nil { + return nil, err + } + def.Source = sm return def, nil } -func marshal(ctx context.Context, v Vertex, def *Definition, cache map[digest.Digest]struct{}, vertexCache map[Vertex]struct{}, c *Constraints) (*Definition, error) { +func marshal(ctx context.Context, v Vertex, def *Definition, s *sourceMapCollector, cache map[digest.Digest]struct{}, vertexCache map[Vertex]struct{}, c *Constraints) (*Definition, error) { if _, ok := vertexCache[v]; ok { return def, nil } for _, inp := range v.Inputs() { var err error - def, err = marshal(ctx, inp.Vertex(ctx), def, cache, vertexCache, c) + def, err = marshal(ctx, inp.Vertex(ctx), def, s, cache, vertexCache, c) if err != nil { return def, err } } - dgst, dt, opMeta, err := v.Marshal(ctx, c) + dgst, dt, opMeta, sls, err := v.Marshal(ctx, c) if err != nil { return def, err } @@ -186,6 +193,7 @@ func marshal(ctx context.Context, v Vertex, def *Definition, cache map[digest.Di if _, ok := cache[dgst]; ok { return def, nil } + s.Add(dgst, sls) def.Def = append(def.Def, dt) cache[dgst] = struct{}{} return def, nil @@ -367,7 +375,7 @@ func (o *output) ToInput(ctx context.Context, c *Constraints) (*pb.Input, error) return nil, err } } - dgst, _, _, err := o.vertex.Marshal(ctx, c) + dgst, _, _, _, err := o.vertex.Marshal(ctx, c) if err != nil { return nil, err } @@ -514,6 +522,7 @@ type Constraints struct { Metadata pb.OpMetadata LocalUniqueID string Caps *apicaps.CapSet + SourceLocations []*SourceLocation } func Platform(p specs.Platform) ConstraintsOpt { diff --git a/vendor/github.com/moby/buildkit/executor/oci/hosts.go b/vendor/github.com/moby/buildkit/executor/oci/hosts.go index 3b3f86db79..552b5851e0 100644 --- a/vendor/github.com/moby/buildkit/executor/oci/hosts.go +++ b/vendor/github.com/moby/buildkit/executor/oci/hosts.go @@ -11,6 +11,7 @@ import ( "github.com/docker/docker/pkg/idtools" "github.com/moby/buildkit/executor" "github.com/moby/buildkit/identity" + "github.com/pkg/errors" ) const hostsContent = ` @@ -41,7 +42,7 @@ func makeHostsFile(stateDir string, extraHosts []executor.HostIP, idmap *idtools if err == nil { return "", func() {}, nil } - if !os.IsNotExist(err) { + if !errors.Is(err, os.ErrNotExist) { return "", nil, err } diff --git a/vendor/github.com/moby/buildkit/executor/oci/mounts.go b/vendor/github.com/moby/buildkit/executor/oci/mounts.go index 8d32a95f87..62dbd388d5 100644 --- a/vendor/github.com/moby/buildkit/executor/oci/mounts.go +++ b/vendor/github.com/moby/buildkit/executor/oci/mounts.go @@ -68,7 +68,7 @@ func withROBind(src, dest string) func(m []specs.Mount) ([]specs.Mount, error) { Destination: dest, Type: "bind", Source: src, - Options: []string{"rbind", "ro"}, + Options: []string{"nosuid", "noexec", "nodev", "rbind", "ro"}, }) return m, nil } diff --git a/vendor/github.com/moby/buildkit/executor/oci/resolvconf.go b/vendor/github.com/moby/buildkit/executor/oci/resolvconf.go index 61fd36da3c..5768e8fd65 100644 --- a/vendor/github.com/moby/buildkit/executor/oci/resolvconf.go +++ b/vendor/github.com/moby/buildkit/executor/oci/resolvconf.go @@ -10,6 +10,7 @@ import ( "github.com/docker/libnetwork/resolvconf" "github.com/docker/libnetwork/types" "github.com/moby/buildkit/util/flightcontrol" + "github.com/pkg/errors" ) var g flightcontrol.Group @@ -34,7 +35,7 @@ func GetResolvConf(ctx context.Context, stateDir string, idmap *idtools.Identity if !generate { fi, err := os.Stat(p) if err != nil { - if !os.IsNotExist(err) { + if !errors.Is(err, os.ErrNotExist) { return "", err } generate = true @@ -42,7 +43,7 @@ func GetResolvConf(ctx context.Context, stateDir string, idmap *idtools.Identity if !generate { fiMain, err := os.Stat(resolvconf.Path()) if err != nil { - if !os.IsNotExist(err) { + if !errors.Is(err, os.ErrNotExist) { return nil, err } if lastNotEmpty { @@ -64,7 +65,7 @@ func GetResolvConf(ctx context.Context, stateDir string, idmap *idtools.Identity var dt []byte f, err := resolvconfGet() if err != nil { - if !os.IsNotExist(err) { + if !errors.Is(err, os.ErrNotExist) { return "", err } } else { diff --git a/vendor/github.com/moby/buildkit/executor/oci/user.go b/vendor/github.com/moby/buildkit/executor/oci/user.go index af64231fe4..4f7cb107ef 100644 --- a/vendor/github.com/moby/buildkit/executor/oci/user.go +++ b/vendor/github.com/moby/buildkit/executor/oci/user.go @@ -2,7 +2,6 @@ package oci import ( "context" - "errors" "os" "strconv" "strings" @@ -11,7 +10,8 @@ import ( containerdoci "github.com/containerd/containerd/oci" "github.com/containerd/continuity/fs" "github.com/opencontainers/runc/libcontainer/user" - "github.com/opencontainers/runtime-spec/specs-go" + specs "github.com/opencontainers/runtime-spec/specs-go" + "github.com/pkg/errors" ) func GetUser(ctx context.Context, root, username string) (uint32, uint32, []uint32, error) { diff --git a/vendor/github.com/moby/buildkit/executor/runcexecutor/executor.go b/vendor/github.com/moby/buildkit/executor/runcexecutor/executor.go index 26e432e61f..dc16c8f0a1 100644 --- a/vendor/github.com/moby/buildkit/executor/runcexecutor/executor.go +++ b/vendor/github.com/moby/buildkit/executor/runcexecutor/executor.go @@ -23,6 +23,7 @@ import ( "github.com/moby/buildkit/solver/pb" "github.com/moby/buildkit/util/network" rootlessspecconv "github.com/moby/buildkit/util/rootless/specconv" + "github.com/moby/buildkit/util/stack" "github.com/pkg/errors" "github.com/sirupsen/logrus" ) @@ -302,7 +303,7 @@ func (w *runcExecutor) Exec(ctx context.Context, meta executor.Meta, root cache. case <-ctx.Done(): return errors.Wrapf(ctx.Err(), err.Error()) default: - return err + return stack.Enable(err) } } diff --git a/vendor/github.com/moby/buildkit/frontend/dockerfile/builder/build.go b/vendor/github.com/moby/buildkit/frontend/dockerfile/builder/build.go index efcf7d851e..a91b46dd9d 100644 --- a/vendor/github.com/moby/buildkit/frontend/dockerfile/builder/build.go +++ b/vendor/github.com/moby/buildkit/frontend/dockerfile/builder/build.go @@ -19,8 +19,10 @@ import ( "github.com/moby/buildkit/client/llb" "github.com/moby/buildkit/exporter/containerimage/exptypes" "github.com/moby/buildkit/frontend/dockerfile/dockerfile2llb" + "github.com/moby/buildkit/frontend/dockerfile/parser" "github.com/moby/buildkit/frontend/gateway/client" gwpb "github.com/moby/buildkit/frontend/gateway/pb" + "github.com/moby/buildkit/solver/errdefs" "github.com/moby/buildkit/solver/pb" "github.com/moby/buildkit/util/apicaps" specs "github.com/opencontainers/image-spec/specs-go/v1" @@ -226,6 +228,8 @@ func Build(ctx context.Context, c client.Client) (*client.Result, error) { return nil, errors.Wrapf(err, "failed to marshal local source") } + var sourceMap *llb.SourceMap + eg, ctx2 := errgroup.WithContext(ctx) var dtDockerfile []byte var dtDockerignore []byte @@ -250,6 +254,9 @@ func Build(ctx context.Context, c client.Client) (*client.Result, error) { return errors.Wrapf(err, "failed to read dockerfile") } + sourceMap = llb.NewSourceMap(&src, filename, dtDockerfile) + sourceMap.Definition = def + dt, err := ref.ReadFile(ctx2, client.ReadRequest{ Filename: filename + ".dockerignore", }) @@ -310,9 +317,13 @@ func Build(ctx context.Context, c client.Client) (*client.Result, error) { } if _, ok := opts["cmdline"]; !ok { - ref, cmdline, ok := dockerfile2llb.DetectSyntax(bytes.NewBuffer(dtDockerfile)) + ref, cmdline, loc, ok := dockerfile2llb.DetectSyntax(bytes.NewBuffer(dtDockerfile)) if ok { - return forwardGateway(ctx, c, ref, cmdline) + res, err := forwardGateway(ctx, c, ref, cmdline) + if err != nil && len(errdefs.Sources(err)) == 0 { + return nil, wrapSource(err, sourceMap, loc) + } + return res, err } } @@ -338,7 +349,13 @@ func Build(ctx context.Context, c client.Client) (*client.Result, error) { for i, tp := range targetPlatforms { func(i int, tp *specs.Platform) { - eg.Go(func() error { + eg.Go(func() (err error) { + defer func() { + var el *parser.ErrorLocation + if errors.As(err, &el) { + err = wrapSource(err, sourceMap, el.Location) + } + }() st, img, err := dockerfile2llb.Dockerfile2LLB(ctx, dtDockerfile, dockerfile2llb.ConvertOpt{ Target: opts[keyTarget], MetaResolver: c, @@ -357,6 +374,7 @@ func Build(ctx context.Context, c client.Client) (*client.Result, error) { ForceNetMode: defaultNetMode, OverrideCopyImage: opts[keyOverrideCopyImage], LLBCaps: &caps, + SourceMap: sourceMap, }) if err != nil { @@ -639,3 +657,30 @@ func scopeToSubDir(c *llb.State, fileop bool, dir string) *llb.State { bc := unpack.AddMount("/out", llb.Scratch()) return &bc } + +func wrapSource(err error, sm *llb.SourceMap, ranges []parser.Range) error { + if sm == nil { + return err + } + s := errdefs.Source{ + Info: &pb.SourceInfo{ + Data: sm.Data, + Filename: sm.Filename, + Definition: sm.Definition.ToPB(), + }, + Ranges: make([]*pb.Range, 0, len(ranges)), + } + for _, r := range ranges { + s.Ranges = append(s.Ranges, &pb.Range{ + Start: pb.Position{ + Line: int32(r.Start.Line), + Character: int32(r.Start.Character), + }, + End: pb.Position{ + Line: int32(r.End.Line), + Character: int32(r.End.Character), + }, + }) + } + return errdefs.WithSource(err, s) +} diff --git a/vendor/github.com/moby/buildkit/frontend/dockerfile/dockerfile2llb/convert.go b/vendor/github.com/moby/buildkit/frontend/dockerfile/dockerfile2llb/convert.go index 441075a40e..28c7efcaec 100644 --- a/vendor/github.com/moby/buildkit/frontend/dockerfile/dockerfile2llb/convert.go +++ b/vendor/github.com/moby/buildkit/frontend/dockerfile/dockerfile2llb/convert.go @@ -7,6 +7,7 @@ import ( "fmt" "math" "net/url" + "os" "path" "path/filepath" "sort" @@ -60,6 +61,7 @@ type ConvertOpt struct { OverrideCopyImage string LLBCaps *apicaps.CapSet ContextLocalName string + SourceMap *llb.SourceMap } func Dockerfile2LLB(ctx context.Context, dt []byte, opt ConvertOpt) (*llb.State, *Image, error) { @@ -110,10 +112,10 @@ func Dockerfile2LLB(ctx context.Context, dt []byte, opt ConvertOpt) (*llb.State, for i, st := range stages { name, err := shlex.ProcessWordWithMap(st.BaseName, metaArgsToMap(optMetaArgs)) if err != nil { - return nil, nil, err + return nil, nil, parser.WithLocation(err, st.Location) } if name == "" { - return nil, nil, errors.Errorf("base name (%s) should not be blank", st.BaseName) + return nil, nil, parser.WithLocation(errors.Errorf("base name (%s) should not be blank", st.BaseName), st.Location) } st.BaseName = name @@ -132,12 +134,12 @@ func Dockerfile2LLB(ctx context.Context, dt []byte, opt ConvertOpt) (*llb.State, if v := st.Platform; v != "" { v, err := shlex.ProcessWordWithMap(v, metaArgsToMap(optMetaArgs)) if err != nil { - return nil, nil, errors.Wrapf(err, "failed to process arguments for platform %s", v) + return nil, nil, parser.WithLocation(errors.Wrapf(err, "failed to process arguments for platform %s", v), st.Location) } p, err := platforms.Parse(v) if err != nil { - return nil, nil, errors.Wrapf(err, "failed to parse platform %s", v) + return nil, nil, parser.WithLocation(errors.Wrapf(err, "failed to parse platform %s", v), st.Location) } ds.platform = &p } @@ -204,7 +206,7 @@ func Dockerfile2LLB(ctx context.Context, dt []byte, opt ConvertOpt) (*llb.State, } if has, state := hasCircularDependency(allDispatchStates.states); has { - return nil, nil, fmt.Errorf("circular dependency detected on stage: %s", state.stageName) + return nil, nil, errors.Errorf("circular dependency detected on stage: %s", state.stageName) } if len(allDispatchStates.states) == 1 { @@ -225,7 +227,7 @@ func Dockerfile2LLB(ctx context.Context, dt []byte, opt ConvertOpt) (*llb.State, eg.Go(func() error { ref, err := reference.ParseNormalizedNamed(d.stage.BaseName) if err != nil { - return errors.Wrapf(err, "failed to parse stage name %q", d.stage.BaseName) + return parser.WithLocation(errors.Wrapf(err, "failed to parse stage name %q", d.stage.BaseName), d.stage.Location) } platform := d.platform if platform == nil { @@ -278,7 +280,13 @@ func Dockerfile2LLB(ctx context.Context, dt []byte, opt ConvertOpt) (*llb.State, if isScratch { d.state = llb.Scratch() } else { - d.state = llb.Image(d.stage.BaseName, dfCmd(d.stage.SourceCode), llb.Platform(*platform), opt.ImageResolveMode, llb.WithCustomName(prefixCommand(d, "FROM "+d.stage.BaseName, opt.PrefixPlatform, platform))) + d.state = llb.Image(d.stage.BaseName, + dfCmd(d.stage.SourceCode), + llb.Platform(*platform), + opt.ImageResolveMode, + llb.WithCustomName(prefixCommand(d, "FROM "+d.stage.BaseName, opt.PrefixPlatform, platform)), + location(opt.SourceMap, d.stage.Location), + ) } d.platform = platform return nil @@ -316,12 +324,12 @@ func Dockerfile2LLB(ctx context.Context, dt []byte, opt ConvertOpt) (*llb.State, } if d.image.Config.WorkingDir != "" { if err = dispatchWorkdir(d, &instructions.WorkdirCommand{Path: d.image.Config.WorkingDir}, false, nil); err != nil { - return nil, nil, err + return nil, nil, parser.WithLocation(err, d.stage.Location) } } if d.image.Config.User != "" { if err = dispatchUser(d, &instructions.UserCommand{User: d.image.Config.User}, false); err != nil { - return nil, nil, err + return nil, nil, parser.WithLocation(err, d.stage.Location) } } d.state = d.state.Network(opt.ForceNetMode) @@ -340,19 +348,20 @@ func Dockerfile2LLB(ctx context.Context, dt []byte, opt ConvertOpt) (*llb.State, extraHosts: opt.ExtraHosts, copyImage: opt.OverrideCopyImage, llbCaps: opt.LLBCaps, + sourceMap: opt.SourceMap, } if opt.copyImage == "" { opt.copyImage = DefaultCopyImage } if err = dispatchOnBuildTriggers(d, d.image.Config.OnBuild, opt); err != nil { - return nil, nil, err + return nil, nil, parser.WithLocation(err, d.stage.Location) } d.image.Config.OnBuild = nil for _, cmd := range d.commands { if err := dispatch(d, cmd, opt); err != nil { - return nil, nil, err + return nil, nil, parser.WithLocation(err, cmd.Location()) } } @@ -421,7 +430,7 @@ func toCommand(ic instructions.Command, allDispatchStates *dispatchStates) (comm stn, ok = allDispatchStates.findStateByName(c.From) if !ok { stn = &dispatchState{ - stage: instructions.Stage{BaseName: c.From}, + stage: instructions.Stage{BaseName: c.From, Location: ic.Location()}, deps: make(map[*dispatchState]struct{}), unregistered: true, } @@ -457,6 +466,7 @@ type dispatchOpt struct { extraHosts []llb.HostIP copyImage string llbCaps *apicaps.CapSet + sourceMap *llb.SourceMap } func dispatch(d *dispatchState, cmd command, opt dispatchOpt) error { @@ -484,7 +494,7 @@ func dispatch(d *dispatchState, cmd command, opt dispatchOpt) error { case *instructions.WorkdirCommand: err = dispatchWorkdir(d, c, true, &opt) case *instructions.AddCommand: - err = dispatchCopy(d, c.SourcesAndDest, opt.buildContext, true, c, c.Chown, opt) + err = dispatchCopy(d, c.SourcesAndDest, opt.buildContext, true, c, c.Chown, c.Chmod, c.Location(), opt) if err == nil { for _, src := range c.Sources() { if !strings.HasPrefix(src, "http://") && !strings.HasPrefix(src, "https://") { @@ -519,7 +529,7 @@ func dispatch(d *dispatchState, cmd command, opt dispatchOpt) error { if len(cmd.sources) != 0 { l = cmd.sources[0].state } - err = dispatchCopy(d, c.SourcesAndDest, l, false, c, c.Chown, opt) + err = dispatchCopy(d, c.SourcesAndDest, l, false, c, c.Chown, c.Chmod, c.Location(), opt) if err == nil && len(cmd.sources) == 0 { for _, src := range c.Sources() { d.ctxPaths[path.Join("/", filepath.ToSlash(src))] = struct{}{} @@ -634,7 +644,7 @@ func dispatchRun(d *dispatchState, c *instructions.RunCommand, proxy *llb.ProxyE if err != nil { return err } - opt := []llb.RunOption{llb.Args(args), dfCmd(c)} + opt := []llb.RunOption{llb.Args(args), dfCmd(c), location(dopt.sourceMap, c.Location())} if d.ignoreCache { opt = append(opt, llb.IgnoreCache) } @@ -702,7 +712,10 @@ func dispatchWorkdir(d *dispatchState, c *instructions.WorkdirCommand, commit bo if err != nil { return err } - d.state = d.state.File(llb.Mkdir(wd, 0755, mkdirOpt...), llb.WithCustomName(prefixCommand(d, uppercaseCmd(processCmdEnv(opt.shlex, c.String(), env)), d.prefixPlatform, &platform))) + d.state = d.state.File(llb.Mkdir(wd, 0755, mkdirOpt...), + llb.WithCustomName(prefixCommand(d, uppercaseCmd(processCmdEnv(opt.shlex, c.String(), env)), d.prefixPlatform, &platform)), + location(opt.sourceMap, c.Location()), + ) withLayer = true } return commitToHistory(&d.image, "WORKDIR "+wd, withLayer, nil) @@ -710,7 +723,7 @@ func dispatchWorkdir(d *dispatchState, c *instructions.WorkdirCommand, commit bo return nil } -func dispatchCopyFileOp(d *dispatchState, c instructions.SourcesAndDest, sourceState llb.State, isAddCommand bool, cmdToPrint fmt.Stringer, chown string, opt dispatchOpt) error { +func dispatchCopyFileOp(d *dispatchState, c instructions.SourcesAndDest, sourceState llb.State, isAddCommand bool, cmdToPrint fmt.Stringer, chown string, chmod string, loc []parser.Range, opt dispatchOpt) error { pp, err := pathRelativeToWorkingDir(d.state, c.Dest()) if err != nil { return err @@ -726,6 +739,15 @@ func dispatchCopyFileOp(d *dispatchState, c instructions.SourcesAndDest, sourceS copyOpt = append(copyOpt, llb.WithUser(chown)) } + var mode *os.FileMode + if chmod != "" { + p, err := strconv.ParseUint(chmod, 8, 32) + if err == nil { + perm := os.FileMode(p) + mode = &perm + } + } + commitMessage := bytes.NewBufferString("") if isAddCommand { commitMessage.WriteString("ADD") @@ -768,6 +790,7 @@ func dispatchCopyFileOp(d *dispatchState, c instructions.SourcesAndDest, sourceS } } else { opts := append([]llb.CopyOption{&llb.CopyInfo{ + Mode: mode, FollowSymlinks: true, CopyDirContentsOnly: true, AttemptUnpack: isAddCommand, @@ -796,7 +819,10 @@ func dispatchCopyFileOp(d *dispatchState, c instructions.SourcesAndDest, sourceS return err } - fileOpt := []llb.ConstraintsOpt{llb.WithCustomName(prefixCommand(d, uppercaseCmd(processCmdEnv(opt.shlex, cmdToPrint.String(), env)), d.prefixPlatform, &platform))} + fileOpt := []llb.ConstraintsOpt{ + llb.WithCustomName(prefixCommand(d, uppercaseCmd(processCmdEnv(opt.shlex, cmdToPrint.String(), env)), d.prefixPlatform, &platform)), + location(opt.sourceMap, loc), + } if d.ignoreCache { fileOpt = append(fileOpt, llb.IgnoreCache) } @@ -805,9 +831,16 @@ func dispatchCopyFileOp(d *dispatchState, c instructions.SourcesAndDest, sourceS return commitToHistory(&d.image, commitMessage.String(), true, &d.state) } -func dispatchCopy(d *dispatchState, c instructions.SourcesAndDest, sourceState llb.State, isAddCommand bool, cmdToPrint fmt.Stringer, chown string, opt dispatchOpt) error { +func dispatchCopy(d *dispatchState, c instructions.SourcesAndDest, sourceState llb.State, isAddCommand bool, cmdToPrint fmt.Stringer, chown string, chmod string, loc []parser.Range, opt dispatchOpt) error { if useFileOp(opt.buildArgValues, opt.llbCaps) { - return dispatchCopyFileOp(d, c, sourceState, isAddCommand, cmdToPrint, chown, opt) + return dispatchCopyFileOp(d, c, sourceState, isAddCommand, cmdToPrint, chown, chmod, loc, opt) + } + + if chmod != "" { + if opt.llbCaps != nil && opt.llbCaps.Supports(pb.CapFileBase) != nil { + return errors.Wrap(opt.llbCaps.Supports(pb.CapFileBase), "chmod is not supported") + } + return errors.New("chmod is not supported") } img := llb.Image(opt.copyImage, llb.MarkImageInternal, llb.Platform(opt.buildPlatforms[0]), WithInternalName("helper image for file operations")) @@ -893,7 +926,14 @@ func dispatchCopy(d *dispatchState, c instructions.SourcesAndDest, sourceState l return err } - runOpt := []llb.RunOption{llb.Args(args), llb.Dir("/dest"), llb.ReadonlyRootFS(), dfCmd(cmdToPrint), llb.WithCustomName(prefixCommand(d, uppercaseCmd(processCmdEnv(opt.shlex, cmdToPrint.String(), env)), d.prefixPlatform, &platform))} + runOpt := []llb.RunOption{ + llb.Args(args), + llb.Dir("/dest"), + llb.ReadonlyRootFS(), + dfCmd(cmdToPrint), + llb.WithCustomName(prefixCommand(d, uppercaseCmd(processCmdEnv(opt.shlex, cmdToPrint.String(), env)), d.prefixPlatform, &platform)), + location(opt.sourceMap, loc), + } if d.ignoreCache { runOpt = append(runOpt, llb.IgnoreCache) } @@ -1361,3 +1401,20 @@ func useFileOp(args map[string]string, caps *apicaps.CapSet) bool { } return enabled && caps != nil && caps.Supports(pb.CapFileBase) == nil } + +func location(sm *llb.SourceMap, locations []parser.Range) llb.ConstraintsOpt { + loc := make([]*pb.Range, 0, len(locations)) + for _, l := range locations { + loc = append(loc, &pb.Range{ + Start: pb.Position{ + Line: int32(l.Start.Line), + Character: int32(l.Start.Character), + }, + End: pb.Position{ + Line: int32(l.End.Line), + Character: int32(l.End.Character), + }, + }) + } + return sm.Location(loc) +} diff --git a/vendor/github.com/moby/buildkit/frontend/dockerfile/dockerfile2llb/directives.go b/vendor/github.com/moby/buildkit/frontend/dockerfile/dockerfile2llb/directives.go index cf06b5ad85..3cf982b9a9 100644 --- a/vendor/github.com/moby/buildkit/frontend/dockerfile/dockerfile2llb/directives.go +++ b/vendor/github.com/moby/buildkit/frontend/dockerfile/dockerfile2llb/directives.go @@ -5,34 +5,51 @@ import ( "io" "regexp" "strings" + + "github.com/moby/buildkit/frontend/dockerfile/parser" ) const keySyntax = "syntax" var reDirective = regexp.MustCompile(`^#\s*([a-zA-Z][a-zA-Z0-9]*)\s*=\s*(.+?)\s*$`) -func DetectSyntax(r io.Reader) (string, string, bool) { +type Directive struct { + Name string + Value string + Location []parser.Range +} + +func DetectSyntax(r io.Reader) (string, string, []parser.Range, bool) { directives := ParseDirectives(r) if len(directives) == 0 { - return "", "", false + return "", "", nil, false } v, ok := directives[keySyntax] if !ok { - return "", "", false + return "", "", nil, false } - p := strings.SplitN(v, " ", 2) - return p[0], v, true + p := strings.SplitN(v.Value, " ", 2) + return p[0], v.Value, v.Location, true } -func ParseDirectives(r io.Reader) map[string]string { - m := map[string]string{} +func ParseDirectives(r io.Reader) map[string]Directive { + m := map[string]Directive{} s := bufio.NewScanner(r) + var l int for s.Scan() { + l++ match := reDirective.FindStringSubmatch(s.Text()) if len(match) == 0 { return m } - m[strings.ToLower(match[1])] = match[2] + m[strings.ToLower(match[1])] = Directive{ + Name: match[1], + Value: match[2], + Location: []parser.Range{{ + Start: parser.Position{Line: l}, + End: parser.Position{Line: l}, + }}, + } } return m } diff --git a/vendor/github.com/moby/buildkit/frontend/dockerfile/instructions/commands.go b/vendor/github.com/moby/buildkit/frontend/dockerfile/instructions/commands.go index ed96d7e075..4a4146a433 100644 --- a/vendor/github.com/moby/buildkit/frontend/dockerfile/instructions/commands.go +++ b/vendor/github.com/moby/buildkit/frontend/dockerfile/instructions/commands.go @@ -1,11 +1,12 @@ package instructions import ( - "errors" "strings" "github.com/docker/docker/api/types/container" "github.com/docker/docker/api/types/strslice" + "github.com/moby/buildkit/frontend/dockerfile/parser" + "github.com/pkg/errors" ) // KeyValuePair represent an arbitrary named value (useful in slice instead of map[string] string to preserve ordering) @@ -35,6 +36,7 @@ func (kvpo *KeyValuePairOptional) ValueString() string { // Command is implemented by every command present in a dockerfile type Command interface { Name() string + Location() []parser.Range } // KeyValuePairs is a slice of KeyValuePair @@ -42,8 +44,9 @@ type KeyValuePairs []KeyValuePair // withNameAndCode is the base of every command in a Dockerfile (String() returns its source code) type withNameAndCode struct { - code string - name string + code string + name string + location []parser.Range } func (c *withNameAndCode) String() string { @@ -55,8 +58,13 @@ func (c *withNameAndCode) Name() string { return c.name } +// Location of the command in source +func (c *withNameAndCode) Location() []parser.Range { + return c.location +} + func newWithNameAndCode(req parseRequest) withNameAndCode { - return withNameAndCode{code: strings.TrimSpace(req.original), name: req.command} + return withNameAndCode{code: strings.TrimSpace(req.original), name: req.command, location: req.location} } // SingleWordExpander is a provider for variable expansion where 1 word => 1 output @@ -180,10 +188,16 @@ type AddCommand struct { withNameAndCode SourcesAndDest Chown string + Chmod string } // Expand variables func (c *AddCommand) Expand(expander SingleWordExpander) error { + expandedChown, err := expander(c.Chown) + if err != nil { + return err + } + c.Chown = expandedChown return expandSliceInPlace(c.SourcesAndDest, expander) } @@ -196,6 +210,7 @@ type CopyCommand struct { SourcesAndDest From string Chown string + Chmod string } // Expand variables @@ -400,6 +415,7 @@ type Stage struct { BaseName string SourceCode string Platform string + Location []parser.Range } // AddCommand to the stage @@ -419,7 +435,7 @@ func IsCurrentStage(s []Stage, name string) bool { // CurrentStage return the last stage in a slice func CurrentStage(s []Stage) (*Stage, error) { if len(s) == 0 { - return nil, errors.New("No build stage in current context") + return nil, errors.New("no build stage in current context") } return &s[len(s)-1], nil } diff --git a/vendor/github.com/moby/buildkit/frontend/dockerfile/instructions/parse.go b/vendor/github.com/moby/buildkit/frontend/dockerfile/instructions/parse.go index 5dee06f190..1be9d7b299 100644 --- a/vendor/github.com/moby/buildkit/frontend/dockerfile/instructions/parse.go +++ b/vendor/github.com/moby/buildkit/frontend/dockerfile/instructions/parse.go @@ -21,6 +21,7 @@ type parseRequest struct { attributes map[string]bool flags *BFlags original string + location []parser.Range } var parseRunPreHooks []func(*RunCommand, parseRequest) error @@ -48,11 +49,15 @@ func newParseRequestFromNode(node *parser.Node) parseRequest { attributes: node.Attributes, original: node.Original, flags: NewBFlagsWithArgs(node.Flags), + location: node.Location(), } } // ParseInstruction converts an AST to a typed instruction (either a command or a build stage beginning when encountering a `FROM` statement) -func ParseInstruction(node *parser.Node) (interface{}, error) { +func ParseInstruction(node *parser.Node) (v interface{}, err error) { + defer func() { + err = parser.WithLocation(err, node.Location()) + }() req := newParseRequestFromNode(node) switch node.Value { case command.Env: @@ -105,7 +110,7 @@ func ParseCommand(node *parser.Node) (Command, error) { if c, ok := s.(Command); ok { return c, nil } - return nil, errors.Errorf("%T is not a command type", s) + return nil, parser.WithLocation(errors.Errorf("%T is not a command type", s), node.Location()) } // UnknownInstruction represents an error occurring when a command is unresolvable @@ -118,25 +123,17 @@ func (e *UnknownInstruction) Error() string { return fmt.Sprintf("unknown instruction: %s", strings.ToUpper(e.Instruction)) } -// IsUnknownInstruction checks if the error is an UnknownInstruction or a parseError containing an UnknownInstruction -func IsUnknownInstruction(err error) bool { - _, ok := err.(*UnknownInstruction) - if !ok { - var pe *parseError - if pe, ok = err.(*parseError); ok { - _, ok = pe.inner.(*UnknownInstruction) - } - } - return ok -} - type parseError struct { inner error node *parser.Node } func (e *parseError) Error() string { - return fmt.Sprintf("Dockerfile parse error line %d: %v", e.node.StartLine, e.inner.Error()) + return fmt.Sprintf("dockerfile parse error line %d: %v", e.node.StartLine, e.inner.Error()) +} + +func (e *parseError) Unwrap() error { + return e.inner } // Parse a Dockerfile into a collection of buildable stages. @@ -160,11 +157,11 @@ func Parse(ast *parser.Node) (stages []Stage, metaArgs []ArgCommand, err error) case Command: stage, err := CurrentStage(stages) if err != nil { - return nil, nil, err + return nil, nil, parser.WithLocation(err, n.Location()) } stage.AddCommand(c) default: - return nil, nil, errors.Errorf("%T is not a command type", cmd) + return nil, nil, parser.WithLocation(errors.Errorf("%T is not a command type", cmd), n.Location()) } } @@ -242,6 +239,7 @@ func parseAdd(req parseRequest) (*AddCommand, error) { return nil, errNoDestinationArgument("ADD") } flChown := req.flags.AddString("chown", "") + flChmod := req.flags.AddString("chmod", "") if err := req.flags.Parse(); err != nil { return nil, err } @@ -249,6 +247,7 @@ func parseAdd(req parseRequest) (*AddCommand, error) { SourcesAndDest: SourcesAndDest(req.args), withNameAndCode: newWithNameAndCode(req), Chown: flChown.Value, + Chmod: flChmod.Value, }, nil } @@ -258,6 +257,7 @@ func parseCopy(req parseRequest) (*CopyCommand, error) { } flChown := req.flags.AddString("chown", "") flFrom := req.flags.AddString("from", "") + flChmod := req.flags.AddString("chmod", "") if err := req.flags.Parse(); err != nil { return nil, err } @@ -266,6 +266,7 @@ func parseCopy(req parseRequest) (*CopyCommand, error) { From: flFrom.Value, withNameAndCode: newWithNameAndCode(req), Chown: flChown.Value, + Chmod: flChmod.Value, }, nil } @@ -287,6 +288,7 @@ func parseFrom(req parseRequest) (*Stage, error) { SourceCode: code, Commands: []Command{}, Platform: flPlatform.Value, + Location: req.location, }, nil } diff --git a/vendor/github.com/moby/buildkit/frontend/dockerfile/parser/errors.go b/vendor/github.com/moby/buildkit/frontend/dockerfile/parser/errors.go new file mode 100644 index 0000000000..9f28a5a2e1 --- /dev/null +++ b/vendor/github.com/moby/buildkit/frontend/dockerfile/parser/errors.go @@ -0,0 +1,58 @@ +package parser + +import ( + "github.com/moby/buildkit/util/stack" + "github.com/pkg/errors" +) + +// ErrorLocation gives a location in source code that caused the error +type ErrorLocation struct { + Location []Range + error +} + +// Unwrap unwraps to the next error +func (e *ErrorLocation) Unwrap() error { + return e.error +} + +// Range is a code section between two positions +type Range struct { + Start Position + End Position +} + +// Position is a point in source code +type Position struct { + Line int + Character int +} + +func withLocation(err error, start, end int) error { + return WithLocation(err, toRanges(start, end)) +} + +// WithLocation extends an error with a source code location +func WithLocation(err error, location []Range) error { + if err == nil { + return nil + } + var el *ErrorLocation + if errors.As(err, &el) { + return err + } + return stack.Enable(&ErrorLocation{ + error: err, + Location: location, + }) +} + +func toRanges(start, end int) (r []Range) { + if end <= start { + end = start + } + for i := start; i <= end; i++ { + r = append(r, Range{Start: Position{Line: i}, End: Position{Line: i}}) + } + return +} diff --git a/vendor/github.com/moby/buildkit/frontend/dockerfile/parser/line_parsers.go b/vendor/github.com/moby/buildkit/frontend/dockerfile/parser/line_parsers.go index 15f00ce792..441824c8b5 100644 --- a/vendor/github.com/moby/buildkit/frontend/dockerfile/parser/line_parsers.go +++ b/vendor/github.com/moby/buildkit/frontend/dockerfile/parser/line_parsers.go @@ -8,11 +8,12 @@ package parser import ( "encoding/json" - "errors" "fmt" "strings" "unicode" "unicode/utf8" + + "github.com/pkg/errors" ) var ( @@ -25,7 +26,7 @@ const ( // ignore the current argument. This will still leave a command parsed, but // will not incorporate the arguments into the ast. -func parseIgnore(rest string, d *Directive) (*Node, map[string]bool, error) { +func parseIgnore(rest string, d *directives) (*Node, map[string]bool, error) { return &Node{}, nil, nil } @@ -34,7 +35,7 @@ func parseIgnore(rest string, d *Directive) (*Node, map[string]bool, error) { // // ONBUILD RUN foo bar -> (onbuild (run foo bar)) // -func parseSubCommand(rest string, d *Directive) (*Node, map[string]bool, error) { +func parseSubCommand(rest string, d *directives) (*Node, map[string]bool, error) { if rest == "" { return nil, nil, nil } @@ -50,7 +51,7 @@ func parseSubCommand(rest string, d *Directive) (*Node, map[string]bool, error) // helper to parse words (i.e space delimited or quoted strings) in a statement. // The quotes are preserved as part of this function and they are stripped later // as part of processWords(). -func parseWords(rest string, d *Directive) []string { +func parseWords(rest string, d *directives) []string { const ( inSpaces = iota // looking for start of a word inWord @@ -137,7 +138,7 @@ func parseWords(rest string, d *Directive) []string { // parse environment like statements. Note that this does *not* handle // variable interpolation, which will be handled in the evaluator. -func parseNameVal(rest string, key string, d *Directive) (*Node, error) { +func parseNameVal(rest string, key string, d *directives) (*Node, error) { // This is kind of tricky because we need to support the old // variant: KEY name value // as well as the new one: KEY name=value ... @@ -151,7 +152,7 @@ func parseNameVal(rest string, key string, d *Directive) (*Node, error) { // Old format (KEY name value) if !strings.Contains(words[0], "=") { - parts := tokenWhitespace.Split(rest, 2) + parts := reWhitespace.Split(rest, 2) if len(parts) < 2 { return nil, fmt.Errorf(key + " must have two arguments") } @@ -192,12 +193,12 @@ func appendKeyValueNode(node, rootNode, prevNode *Node) (*Node, *Node) { return rootNode, prevNode } -func parseEnv(rest string, d *Directive) (*Node, map[string]bool, error) { +func parseEnv(rest string, d *directives) (*Node, map[string]bool, error) { node, err := parseNameVal(rest, "ENV", d) return node, nil, err } -func parseLabel(rest string, d *Directive) (*Node, map[string]bool, error) { +func parseLabel(rest string, d *directives) (*Node, map[string]bool, error) { node, err := parseNameVal(rest, commandLabel, d) return node, nil, err } @@ -210,7 +211,7 @@ func parseLabel(rest string, d *Directive) (*Node, map[string]bool, error) { // In addition, a keyword definition alone is of the form `keyword` like `name1` // above. And the assignments `name2=` and `name3=""` are equivalent and // assign an empty value to the respective keywords. -func parseNameOrNameVal(rest string, d *Directive) (*Node, map[string]bool, error) { +func parseNameOrNameVal(rest string, d *directives) (*Node, map[string]bool, error) { words := parseWords(rest, d) if len(words) == 0 { return nil, nil, nil @@ -236,7 +237,7 @@ func parseNameOrNameVal(rest string, d *Directive) (*Node, map[string]bool, erro // parses a whitespace-delimited set of arguments. The result is effectively a // linked list of string arguments. -func parseStringsWhitespaceDelimited(rest string, d *Directive) (*Node, map[string]bool, error) { +func parseStringsWhitespaceDelimited(rest string, d *directives) (*Node, map[string]bool, error) { if rest == "" { return nil, nil, nil } @@ -244,7 +245,7 @@ func parseStringsWhitespaceDelimited(rest string, d *Directive) (*Node, map[stri node := &Node{} rootnode := node prevnode := node - for _, str := range tokenWhitespace.Split(rest, -1) { // use regexp + for _, str := range reWhitespace.Split(rest, -1) { // use regexp prevnode = node node.Value = str node.Next = &Node{} @@ -260,7 +261,7 @@ func parseStringsWhitespaceDelimited(rest string, d *Directive) (*Node, map[stri } // parseString just wraps the string in quotes and returns a working node. -func parseString(rest string, d *Directive) (*Node, map[string]bool, error) { +func parseString(rest string, d *directives) (*Node, map[string]bool, error) { if rest == "" { return nil, nil, nil } @@ -270,7 +271,7 @@ func parseString(rest string, d *Directive) (*Node, map[string]bool, error) { } // parseJSON converts JSON arrays to an AST. -func parseJSON(rest string, d *Directive) (*Node, map[string]bool, error) { +func parseJSON(rest string, d *directives) (*Node, map[string]bool, error) { rest = strings.TrimLeftFunc(rest, unicode.IsSpace) if !strings.HasPrefix(rest, "[") { return nil, nil, fmt.Errorf(`Error parsing "%s" as a JSON array`, rest) @@ -303,7 +304,7 @@ func parseJSON(rest string, d *Directive) (*Node, map[string]bool, error) { // parseMaybeJSON determines if the argument appears to be a JSON array. If // so, passes to parseJSON; if not, quotes the result and returns a single // node. -func parseMaybeJSON(rest string, d *Directive) (*Node, map[string]bool, error) { +func parseMaybeJSON(rest string, d *directives) (*Node, map[string]bool, error) { if rest == "" { return nil, nil, nil } @@ -325,7 +326,7 @@ func parseMaybeJSON(rest string, d *Directive) (*Node, map[string]bool, error) { // parseMaybeJSONToList determines if the argument appears to be a JSON array. If // so, passes to parseJSON; if not, attempts to parse it as a whitespace // delimited string. -func parseMaybeJSONToList(rest string, d *Directive) (*Node, map[string]bool, error) { +func parseMaybeJSONToList(rest string, d *directives) (*Node, map[string]bool, error) { node, attrs, err := parseJSON(rest, d) if err == nil { @@ -339,7 +340,7 @@ func parseMaybeJSONToList(rest string, d *Directive) (*Node, map[string]bool, er } // The HEALTHCHECK command is like parseMaybeJSON, but has an extra type argument. -func parseHealthConfig(rest string, d *Directive) (*Node, map[string]bool, error) { +func parseHealthConfig(rest string, d *directives) (*Node, map[string]bool, error) { // Find end of first argument var sep int for ; sep < len(rest); sep++ { diff --git a/vendor/github.com/moby/buildkit/frontend/dockerfile/parser/parser.go b/vendor/github.com/moby/buildkit/frontend/dockerfile/parser/parser.go index e9268abb81..dc6d17848a 100644 --- a/vendor/github.com/moby/buildkit/frontend/dockerfile/parser/parser.go +++ b/vendor/github.com/moby/buildkit/frontend/dockerfile/parser/parser.go @@ -38,6 +38,11 @@ type Node struct { EndLine int // the line in the original dockerfile where the node ends } +// Location return the location of node in source code +func (node *Node) Location() []Range { + return toRanges(node.StartLine, node.EndLine) +} + // Dump dumps the AST defined by `node` as a list of sexps. // Returns a string suitable for printing. func (node *Node) Dump() string { @@ -79,28 +84,33 @@ func (node *Node) AddChild(child *Node, startLine, endLine int) { } var ( - dispatch map[string]func(string, *Directive) (*Node, map[string]bool, error) - tokenWhitespace = regexp.MustCompile(`[\t\v\f\r ]+`) - tokenEscapeCommand = regexp.MustCompile(`^#[ \t]*escape[ \t]*=[ \t]*(?P.).*$`) - tokenComment = regexp.MustCompile(`^#.*$`) + dispatch map[string]func(string, *directives) (*Node, map[string]bool, error) + reWhitespace = regexp.MustCompile(`[\t\v\f\r ]+`) + reDirectives = regexp.MustCompile(`^#\s*([a-zA-Z][a-zA-Z0-9]*)\s*=\s*(.+?)\s*$`) + reComment = regexp.MustCompile(`^#.*$`) ) // DefaultEscapeToken is the default escape token const DefaultEscapeToken = '\\' -// Directive is the structure used during a build run to hold the state of +var validDirectives = map[string]struct{}{ + "escape": {}, + "syntax": {}, +} + +// directive is the structure used during a build run to hold the state of // parsing directives. -type Directive struct { - escapeToken rune // Current escape token - lineContinuationRegex *regexp.Regexp // Current line continuation regex - processingComplete bool // Whether we are done looking for directives - escapeSeen bool // Whether the escape directive has been seen +type directives struct { + escapeToken rune // Current escape token + lineContinuationRegex *regexp.Regexp // Current line continuation regex + done bool // Whether we are done looking for directives + seen map[string]struct{} // Whether the escape directive has been seen } // setEscapeToken sets the default token for escaping characters in a Dockerfile. -func (d *Directive) setEscapeToken(s string) error { +func (d *directives) setEscapeToken(s string) error { if s != "`" && s != "\\" { - return fmt.Errorf("invalid ESCAPE '%s'. Must be ` or \\", s) + return errors.Errorf("invalid escape token '%s' does not match ` or \\", s) } d.escapeToken = rune(s[0]) d.lineContinuationRegex = regexp.MustCompile(`\` + s + `[ \t]*$`) @@ -110,33 +120,43 @@ func (d *Directive) setEscapeToken(s string) error { // possibleParserDirective looks for parser directives, eg '# escapeToken='. // Parser directives must precede any builder instruction or other comments, // and cannot be repeated. -func (d *Directive) possibleParserDirective(line string) error { - if d.processingComplete { +func (d *directives) possibleParserDirective(line string) error { + if d.done { return nil } - tecMatch := tokenEscapeCommand.FindStringSubmatch(strings.ToLower(line)) - if len(tecMatch) != 0 { - for i, n := range tokenEscapeCommand.SubexpNames() { - if n == "escapechar" { - if d.escapeSeen { - return errors.New("only one escape parser directive can be used") - } - d.escapeSeen = true - return d.setEscapeToken(tecMatch[i]) - } - } + match := reDirectives.FindStringSubmatch(line) + if len(match) == 0 { + d.done = true + return nil + } + + k := strings.ToLower(match[1]) + _, ok := validDirectives[k] + if !ok { + d.done = true + return nil + } + + if _, ok := d.seen[k]; ok { + return errors.Errorf("only one %s parser directive can be used", k) + } + d.seen[k] = struct{}{} + + if k == "escape" { + return d.setEscapeToken(match[2]) } - d.processingComplete = true return nil } -// NewDefaultDirective returns a new Directive with the default escapeToken token -func NewDefaultDirective() *Directive { - directive := Directive{} - directive.setEscapeToken(string(DefaultEscapeToken)) - return &directive +// newDefaultDirectives returns a new directives structure with the default escapeToken token +func newDefaultDirectives() *directives { + d := &directives{ + seen: map[string]struct{}{}, + } + d.setEscapeToken(string(DefaultEscapeToken)) + return d } func init() { @@ -146,7 +166,7 @@ func init() { // reformulating the arguments according to the rules in the parser // functions. Errors are propagated up by Parse() and the resulting AST can // be incorporated directly into the existing AST as a next. - dispatch = map[string]func(string, *Directive) (*Node, map[string]bool, error){ + dispatch = map[string]func(string, *directives) (*Node, map[string]bool, error){ command.Add: parseMaybeJSONToList, command.Arg: parseNameOrNameVal, command.Cmd: parseMaybeJSON, @@ -171,7 +191,7 @@ func init() { // newNodeFromLine splits the line into parts, and dispatches to a function // based on the command and command arguments. A Node is created from the // result of the dispatch. -func newNodeFromLine(line string, directive *Directive) (*Node, error) { +func newNodeFromLine(line string, d *directives) (*Node, error) { cmd, flags, args, err := splitCommand(line) if err != nil { return nil, err @@ -182,7 +202,7 @@ func newNodeFromLine(line string, directive *Directive) (*Node, error) { if fn == nil { fn = parseIgnore } - next, attrs, err := fn(args, directive) + next, attrs, err := fn(args, d) if err != nil { return nil, err } @@ -214,7 +234,7 @@ func (r *Result) PrintWarnings(out io.Writer) { // Parse reads lines from a Reader, parses the lines into an AST and returns // the AST and escape token func Parse(rwc io.Reader) (*Result, error) { - d := NewDefaultDirective() + d := newDefaultDirectives() currentLine := 0 root := &Node{StartLine: -1} scanner := bufio.NewScanner(rwc) @@ -229,7 +249,7 @@ func Parse(rwc io.Reader) (*Result, error) { } bytesRead, err = processLine(d, bytesRead, true) if err != nil { - return nil, err + return nil, withLocation(err, currentLine, 0) } currentLine++ @@ -243,7 +263,7 @@ func Parse(rwc io.Reader) (*Result, error) { for !isEndOfLine && scanner.Scan() { bytesRead, err := processLine(d, scanner.Bytes(), false) if err != nil { - return nil, err + return nil, withLocation(err, currentLine, 0) } currentLine++ @@ -267,7 +287,7 @@ func Parse(rwc io.Reader) (*Result, error) { child, err := newNodeFromLine(line, d) if err != nil { - return nil, err + return nil, withLocation(err, startLine, currentLine) } root.AddChild(child, startLine, currentLine) } @@ -277,18 +297,18 @@ func Parse(rwc io.Reader) (*Result, error) { } if root.StartLine < 0 { - return nil, errors.New("file with no instructions.") + return nil, withLocation(errors.New("file with no instructions"), currentLine, 0) } return &Result{ AST: root, Warnings: warnings, EscapeToken: d.escapeToken, - }, handleScannerError(scanner.Err()) + }, withLocation(handleScannerError(scanner.Err()), currentLine, 0) } func trimComments(src []byte) []byte { - return tokenComment.ReplaceAll(src, []byte{}) + return reComment.ReplaceAll(src, []byte{}) } func trimWhitespace(src []byte) []byte { @@ -296,7 +316,7 @@ func trimWhitespace(src []byte) []byte { } func isComment(line []byte) bool { - return tokenComment.Match(trimWhitespace(line)) + return reComment.Match(trimWhitespace(line)) } func isEmptyContinuationLine(line []byte) bool { @@ -305,7 +325,7 @@ func isEmptyContinuationLine(line []byte) bool { var utf8bom = []byte{0xEF, 0xBB, 0xBF} -func trimContinuationCharacter(line string, d *Directive) (string, bool) { +func trimContinuationCharacter(line string, d *directives) (string, bool) { if d.lineContinuationRegex.MatchString(line) { line = d.lineContinuationRegex.ReplaceAllString(line, "") return line, false @@ -315,7 +335,7 @@ func trimContinuationCharacter(line string, d *Directive) (string, bool) { // TODO: remove stripLeftWhitespace after deprecation period. It seems silly // to preserve whitespace on continuation lines. Why is that done? -func processLine(d *Directive, token []byte, stripLeftWhitespace bool) ([]byte, error) { +func processLine(d *directives, token []byte, stripLeftWhitespace bool) ([]byte, error) { if stripLeftWhitespace { token = trimWhitespace(token) } diff --git a/vendor/github.com/moby/buildkit/frontend/dockerfile/parser/split_command.go b/vendor/github.com/moby/buildkit/frontend/dockerfile/parser/split_command.go index 171f454f6d..3378167181 100644 --- a/vendor/github.com/moby/buildkit/frontend/dockerfile/parser/split_command.go +++ b/vendor/github.com/moby/buildkit/frontend/dockerfile/parser/split_command.go @@ -12,7 +12,7 @@ func splitCommand(line string) (string, []string, string, error) { var flags []string // Make sure we get the same results irrespective of leading/trailing spaces - cmdline := tokenWhitespace.Split(strings.TrimSpace(line), 2) + cmdline := reWhitespace.Split(strings.TrimSpace(line), 2) cmd := strings.ToLower(cmdline[0]) if len(cmdline) == 2 { diff --git a/vendor/github.com/moby/buildkit/frontend/gateway/gateway.go b/vendor/github.com/moby/buildkit/frontend/gateway/gateway.go index a6c8b6be2f..f70fdbaa1b 100644 --- a/vendor/github.com/moby/buildkit/frontend/gateway/gateway.go +++ b/vendor/github.com/moby/buildkit/frontend/gateway/gateway.go @@ -13,6 +13,8 @@ import ( "time" "github.com/docker/distribution/reference" + gogotypes "github.com/gogo/protobuf/types" + "github.com/golang/protobuf/ptypes/any" apitypes "github.com/moby/buildkit/api/types" "github.com/moby/buildkit/cache" cacheutil "github.com/moby/buildkit/cache/util" @@ -27,6 +29,7 @@ import ( "github.com/moby/buildkit/solver" opspb "github.com/moby/buildkit/solver/pb" "github.com/moby/buildkit/util/apicaps" + "github.com/moby/buildkit/util/grpcerrors" "github.com/moby/buildkit/util/tracing" "github.com/moby/buildkit/worker" specs "github.com/opencontainers/image-spec/specs-go/v1" @@ -218,7 +221,7 @@ func (gf *gatewayFrontend) Solve(ctx context.Context, llbBridge frontend.Fronten err = llbBridge.Exec(ctx, meta, rootFS, lbf.Stdin, lbf.Stdout, os.Stderr) if err != nil { - if errors.Cause(err) == context.Canceled && lbf.isErrServerClosed { + if errors.Is(err, context.Canceled) && lbf.isErrServerClosed { err = errors.Errorf("frontend grpc server closed unexpectedly") } // An existing error (set via Return rpc) takes @@ -309,7 +312,7 @@ func NewBridgeForwarder(ctx context.Context, llbBridge frontend.FrontendLLBBridg func newLLBBridgeForwarder(ctx context.Context, llbBridge frontend.FrontendLLBBridge, workers frontend.WorkerInfos, inputs map[string]*opspb.Definition) (*llbBridgeForwarder, context.Context, error) { ctx, cancel := context.WithCancel(ctx) lbf := NewBridgeForwarder(ctx, llbBridge, workers, inputs) - server := grpc.NewServer() + server := grpc.NewServer(grpc.UnaryInterceptor(grpcerrors.UnaryServerInterceptor), grpc.StreamInterceptor(grpcerrors.StreamServerInterceptor)) grpc_health_v1.RegisterHealthServer(server, health.NewServer()) pb.RegisterLLBBridgeServer(server, lbf) @@ -472,7 +475,9 @@ func (lbf *llbBridgeForwarder) Solve(ctx context.Context, req *pb.SolveRequest) return nil, errors.Errorf("solve did not return default result") } - pbRes := &pb.Result{} + pbRes := &pb.Result{ + Metadata: res.Metadata, + } var defaultID string lbf.mu.Lock() @@ -668,11 +673,11 @@ func (lbf *llbBridgeForwarder) Ping(context.Context, *pb.PingRequest) (*pb.PongR func (lbf *llbBridgeForwarder) Return(ctx context.Context, in *pb.ReturnRequest) (*pb.ReturnResponse, error) { if in.Error != nil { - return lbf.setResult(nil, status.ErrorProto(&spb.Status{ + return lbf.setResult(nil, grpcerrors.FromGRPC(status.ErrorProto(&spb.Status{ Code: in.Error.Code, Message: in.Error.Message, - // Details: in.Error.Details, - })) + Details: convertGogoAny(in.Error.Details), + }))) } else { r := &frontend.Result{ Metadata: in.Result.Metadata, @@ -752,3 +757,11 @@ type markTypeFrontend struct{} func (*markTypeFrontend) SetImageOption(ii *llb.ImageInfo) { ii.RecordType = string(client.UsageRecordTypeFrontend) } + +func convertGogoAny(in []*gogotypes.Any) []*any.Any { + out := make([]*any.Any, len(in)) + for i := range in { + out[i] = &any.Any{TypeUrl: in[i].TypeUrl, Value: in[i].Value} + } + return out +} diff --git a/vendor/github.com/moby/buildkit/frontend/gateway/grpcclient/client.go b/vendor/github.com/moby/buildkit/frontend/gateway/grpcclient/client.go index 427758e66c..1ae9ef09c7 100644 --- a/vendor/github.com/moby/buildkit/frontend/gateway/grpcclient/client.go +++ b/vendor/github.com/moby/buildkit/frontend/gateway/grpcclient/client.go @@ -10,11 +10,14 @@ import ( "time" "github.com/gogo/googleapis/google/rpc" + gogotypes "github.com/gogo/protobuf/types" + "github.com/golang/protobuf/ptypes/any" "github.com/moby/buildkit/client/llb" "github.com/moby/buildkit/frontend/gateway/client" pb "github.com/moby/buildkit/frontend/gateway/pb" opspb "github.com/moby/buildkit/solver/pb" "github.com/moby/buildkit/util/apicaps" + "github.com/moby/buildkit/util/grpcerrors" digest "github.com/opencontainers/go-digest" "github.com/pkg/errors" fstypes "github.com/tonistiigi/fsutil/types" @@ -29,7 +32,7 @@ type GrpcClient interface { } func New(ctx context.Context, opts map[string]string, session, product string, c pb.LLBBridgeClient, w []client.WorkerInfo) (GrpcClient, error) { - ctx, cancel := context.WithTimeout(ctx, 5*time.Second) + ctx, cancel := context.WithTimeout(ctx, 10*time.Second) defer cancel() resp, err := c.Ping(ctx, &pb.PingRequest{}) if err != nil { @@ -150,12 +153,12 @@ func (c *grpcClient) Run(ctx context.Context, f client.BuildFunc) (retError erro } } if retError != nil { - st, _ := status.FromError(errors.Cause(retError)) + st, _ := status.FromError(grpcerrors.ToGRPC(retError)) stp := st.Proto() req.Error = &rpc.Status{ Code: stp.Code, Message: stp.Message, - // Details: stp.Details, + Details: convertToGogoAny(stp.Details), } } if _, err := c.client.Return(ctx, req); err != nil && retError == nil { @@ -503,7 +506,7 @@ func grpcClientConn(ctx context.Context) (context.Context, *grpc.ClientConn, err return stdioConn(), nil }) - cc, err := grpc.DialContext(ctx, "", dialOpt, grpc.WithInsecure()) + cc, err := grpc.DialContext(ctx, "", dialOpt, grpc.WithInsecure(), grpc.WithUnaryInterceptor(grpcerrors.UnaryClientInterceptor), grpc.WithStreamInterceptor(grpcerrors.StreamClientInterceptor)) if err != nil { return nil, nil, errors.Wrap(err, "failed to create grpc client") } @@ -589,3 +592,11 @@ func workers() []client.WorkerInfo { func product() string { return os.Getenv("BUILDKIT_EXPORTEDPRODUCT") } + +func convertToGogoAny(in []*any.Any) []*gogotypes.Any { + out := make([]*gogotypes.Any, len(in)) + for i := range in { + out[i] = &gogotypes.Any{TypeUrl: in[i].TypeUrl, Value: in[i].Value} + } + return out +} diff --git a/vendor/github.com/moby/buildkit/frontend/gateway/pb/caps.go b/vendor/github.com/moby/buildkit/frontend/gateway/pb/caps.go index 6e811d3d99..afb51cf243 100644 --- a/vendor/github.com/moby/buildkit/frontend/gateway/pb/caps.go +++ b/vendor/github.com/moby/buildkit/frontend/gateway/pb/caps.go @@ -32,6 +32,9 @@ const ( // CapFrontendInputs is a capability to request frontend inputs from the // LLBBridge GRPC server. CapFrontendInputs apicaps.CapID = "frontend.inputs" + + // CapGatewaySolveMetadata can be used to check if solve calls from gateway reliably return metadata + CapGatewaySolveMetadata apicaps.CapID = "gateway.solve.metadata" ) func init() { @@ -126,4 +129,11 @@ func init() { Enabled: true, Status: apicaps.CapStatusExperimental, }) + + Caps.Init(apicaps.Cap{ + ID: CapGatewaySolveMetadata, + Name: "gateway metadata", + Enabled: true, + Status: apicaps.CapStatusExperimental, + }) } diff --git a/vendor/github.com/moby/buildkit/frontend/gateway/pb/gateway.proto b/vendor/github.com/moby/buildkit/frontend/gateway/pb/gateway.proto index 4413147736..8b4725e2a6 100644 --- a/vendor/github.com/moby/buildkit/frontend/gateway/pb/gateway.proto +++ b/vendor/github.com/moby/buildkit/frontend/gateway/pb/gateway.proto @@ -32,7 +32,7 @@ service LLBBridge { message Result { oneof result { - // Deprecated non-array refs. + // Deprecated non-array refs. string refDeprecated = 1; RefMapDeprecated refsDeprecated = 2; @@ -67,7 +67,7 @@ message InputsRequest { } message InputsResponse { - map Definitions = 1; + map Definitions = 1; } message ResolveImageConfigRequest { @@ -87,9 +87,9 @@ message SolveRequest { string Frontend = 2; map FrontendOpt = 3; // ImportCacheRefsDeprecated is deprecated in favor or the new Imports since BuildKit v0.4.0. - // When ImportCacheRefsDeprecated is set, the solver appends - // {.Type = "registry", .Attrs = {"ref": importCacheRef}} - // for each of the ImportCacheRefs entry to CacheImports for compatibility. (planned to be removed) + // When ImportCacheRefsDeprecated is set, the solver appends + // {.Type = "registry", .Attrs = {"ref": importCacheRef}} + // for each of the ImportCacheRefs entry to CacheImports for compatibility. (planned to be removed) repeated string ImportCacheRefsDeprecated = 4; bool allowResultReturn = 5; bool allowResultArrayRef = 6; diff --git a/vendor/github.com/moby/buildkit/go.mod b/vendor/github.com/moby/buildkit/go.mod index d01fdcda88..f92a2b7ce7 100644 --- a/vendor/github.com/moby/buildkit/go.mod +++ b/vendor/github.com/moby/buildkit/go.mod @@ -3,24 +3,22 @@ module github.com/moby/buildkit go 1.13 require ( - github.com/AkihiroSuda/containerd-fuse-overlayfs v0.0.0-20200220082720-bb896865146c + github.com/AkihiroSuda/containerd-fuse-overlayfs v0.0.0-20200512015515-32086ef23a5a + github.com/Azure/go-ansiterm v0.0.0-20170929234023-d6e3b3328b78 // indirect github.com/BurntSushi/toml v0.3.1 github.com/Microsoft/go-winio v0.4.15-0.20190919025122-fc70bd9a86b5 - github.com/Microsoft/hcsshim v0.8.7 // indirect github.com/apache/thrift v0.0.0-20161221203622-b2a4d4ae21c7 // indirect github.com/codahale/hdrhistogram v0.0.0-20160425231609-f8ad88b59a58 // indirect - github.com/containerd/cgroups v0.0.0-20200217135630-d732e370d46d // indirect - github.com/containerd/console v0.0.0-20191219165238-8375c3424e4d + github.com/containerd/cgroups v0.0.0-20200327175542-b44481373989 // indirect + github.com/containerd/console v1.0.0 github.com/containerd/containerd v1.4.0-0 - github.com/containerd/continuity v0.0.0-20200107194136-26c1120b8d41 - github.com/containerd/fifo v0.0.0-20191213151349-ff969a566b00 // indirect + github.com/containerd/continuity v0.0.0-20200413184840-d3ef23f19fbb + github.com/containerd/fifo v0.0.0-20200410184934-f15a3290365b // indirect github.com/containerd/go-cni v0.0.0-20200107172653-c154a49e2c75 github.com/containerd/go-runc v0.0.0-20200220073739-7016d3ce2328 - github.com/containerd/ttrpc v0.0.0-20200121165050-0be804eadb15 // indirect - github.com/containerd/typeurl v0.0.0-20200205145503-b45ef1f1f737 // indirect github.com/coreos/go-systemd/v22 v22.0.0 github.com/docker/cli v0.0.0-20200227165822-2298e6a3fe24 - github.com/docker/distribution v0.0.0-20200223014041-6b972e50feee + github.com/docker/distribution v2.7.1+incompatible github.com/docker/docker v0.0.0 github.com/docker/docker-credential-helpers v0.6.0 // indirect github.com/docker/go-connections v0.3.0 @@ -29,55 +27,52 @@ require ( github.com/gogo/googleapis v1.3.2 github.com/gogo/protobuf v1.3.1 github.com/golang/protobuf v1.3.3 - github.com/google/go-cmp v0.3.1 + github.com/google/go-cmp v0.4.0 github.com/google/shlex v0.0.0-20150127133951-6f45313302b9 github.com/google/uuid v1.1.1 // indirect + github.com/gorilla/mux v1.7.4 // indirect + github.com/grpc-ecosystem/go-grpc-middleware v1.2.0 github.com/grpc-ecosystem/grpc-opentracing v0.0.0-20180507213350-8e809c8a8645 github.com/hashicorp/go-immutable-radix v1.0.0 github.com/hashicorp/golang-lru v0.5.1 github.com/hashicorp/uuid v0.0.0-20160311170451-ebb0a03e909c // indirect - github.com/imdario/mergo v0.3.7 // indirect + github.com/imdario/mergo v0.3.9 // indirect github.com/ishidawataru/sctp v0.0.0-20191218070446-00ab2ac2db07 // indirect github.com/jaguilar/vt100 v0.0.0-20150826170717-2703a27b14ea - github.com/mitchellh/hashstructure v0.0.0-20170609045927-2bca23e0e452 + github.com/mitchellh/hashstructure v1.0.0 github.com/morikuni/aec v0.0.0-20170113033406-39771216ff4c - github.com/opencontainers/go-digest v1.0.0-rc1 + github.com/opencontainers/go-digest v1.0.0 github.com/opencontainers/image-spec v1.0.1 - github.com/opencontainers/runc v1.0.0-rc9.0.20200221051241-688cf6d43cc4 - github.com/opencontainers/runtime-spec v1.0.1 - github.com/opencontainers/selinux v1.3.2 // indirect + github.com/opencontainers/runc v1.0.0-rc10 + github.com/opencontainers/runtime-spec v1.0.2 + github.com/opencontainers/selinux v1.5.1 // indirect github.com/opentracing-contrib/go-stdlib v0.0.0-20171029140428-b1a47cfbdd75 - github.com/opentracing/opentracing-go v0.0.0-20171003133519-1361b9cd60be + github.com/opentracing/opentracing-go v1.1.0 github.com/pkg/errors v0.9.1 github.com/pkg/profile v1.2.1 github.com/serialx/hashring v0.0.0-20190422032157-8b2912629002 github.com/sirupsen/logrus v1.4.2 - github.com/stretchr/testify v1.4.0 + github.com/stretchr/testify v1.5.1 github.com/syndtr/gocapability v0.0.0-20180916011248-d98352740cb2 // indirect - github.com/tonistiigi/fsutil v0.0.0-20200326231323-c2c7d7b0e144 + github.com/tonistiigi/fsutil v0.0.0-20200512175118-ae3a8d753069 github.com/tonistiigi/units v0.0.0-20180711220420-6950e57a87ea - github.com/uber/jaeger-client-go v0.0.0-20180103221425-e02c85f9069e + github.com/uber/jaeger-client-go v2.11.2+incompatible github.com/uber/jaeger-lib v1.2.1 // indirect github.com/urfave/cli v1.22.2 - github.com/vishvananda/netlink v1.0.0 // indirect - github.com/vishvananda/netns v0.0.0-20180720170159-13995c7128cc // indirect + github.com/vishvananda/netlink v1.1.0 // indirect go.etcd.io/bbolt v1.3.3 golang.org/x/crypto v0.0.0-20200221231518-2aa609cf4a9d golang.org/x/net v0.0.0-20200226121028-0de0cce0169b golang.org/x/sync v0.0.0-20190911185100-cd5d95a43a6e - golang.org/x/sys v0.0.0-20200223170610-d5e6a3e2c0ae + golang.org/x/sys v0.0.0-20200302150141-5c8b2ff67527 golang.org/x/time v0.0.0-20191024005414-555d28b269f0 google.golang.org/genproto v0.0.0-20200227132054-3f1135a288c9 google.golang.org/grpc v1.27.1 - gopkg.in/check.v1 v1.0.0-20180628173108-788fd7840127 // indirect - gotest.tools v2.2.0+incompatible - gotest.tools/v3 v3.0.2 // indirect ) -replace github.com/hashicorp/go-immutable-radix => github.com/tonistiigi/go-immutable-radix v0.0.0-20170803185627-826af9ccf0fe - -replace github.com/jaguilar/vt100 => github.com/tonistiigi/vt100 v0.0.0-20190402012908-ad4c4a574305 - -replace github.com/containerd/containerd => github.com/containerd/containerd v1.3.1-0.20200227195959-4d242818bf55 - -replace github.com/docker/docker => github.com/docker/docker v1.4.2-0.20200227233006-38f52c9fec82 +replace ( + github.com/containerd/containerd => github.com/containerd/containerd v1.3.1-0.20200512144102-f13ba8f2f2fd + github.com/docker/docker => github.com/docker/docker v17.12.0-ce-rc1.0.20200310163718-4634ce647cf2+incompatible + github.com/hashicorp/go-immutable-radix => github.com/tonistiigi/go-immutable-radix v0.0.0-20170803185627-826af9ccf0fe + github.com/jaguilar/vt100 => github.com/tonistiigi/vt100 v0.0.0-20190402012908-ad4c4a574305 +) diff --git a/vendor/github.com/moby/buildkit/session/auth/auth.go b/vendor/github.com/moby/buildkit/session/auth/auth.go index 5717455f8e..d40dbce7f7 100644 --- a/vendor/github.com/moby/buildkit/session/auth/auth.go +++ b/vendor/github.com/moby/buildkit/session/auth/auth.go @@ -4,9 +4,8 @@ import ( "context" "github.com/moby/buildkit/session" - "github.com/pkg/errors" + "github.com/moby/buildkit/util/grpcerrors" "google.golang.org/grpc/codes" - "google.golang.org/grpc/status" ) func CredentialsFunc(ctx context.Context, c session.Caller) func(string) (string, string, error) { @@ -17,10 +16,10 @@ func CredentialsFunc(ctx context.Context, c session.Caller) func(string) (string Host: host, }) if err != nil { - if st, ok := status.FromError(errors.Cause(err)); ok && st.Code() == codes.Unimplemented { + if grpcerrors.Code(err) == codes.Unimplemented { return "", "", nil } - return "", "", errors.WithStack(err) + return "", "", err } return resp.Username, resp.Secret, nil } diff --git a/vendor/github.com/moby/buildkit/session/filesync/diffcopy.go b/vendor/github.com/moby/buildkit/session/filesync/diffcopy.go index f1d7d78ee3..ac255e4e1a 100644 --- a/vendor/github.com/moby/buildkit/session/filesync/diffcopy.go +++ b/vendor/github.com/moby/buildkit/session/filesync/diffcopy.go @@ -41,7 +41,7 @@ type streamWriterCloser struct { func (wc *streamWriterCloser) Write(dt []byte) (int, error) { if err := wc.ClientStream.SendMsg(&BytesMessage{Data: dt}); err != nil { // SendMsg return EOF on remote errors - if errors.Cause(err) == io.EOF { + if errors.Is(err, io.EOF) { if err := errors.WithStack(wc.ClientStream.RecvMsg(struct{}{})); err != nil { return 0, err } @@ -105,7 +105,7 @@ func writeTargetFile(ds grpc.Stream, wc io.WriteCloser) error { for { bm := BytesMessage{} if err := ds.RecvMsg(&bm); err != nil { - if errors.Cause(err) == io.EOF { + if errors.Is(err, io.EOF) { return nil } return errors.WithStack(err) diff --git a/vendor/github.com/moby/buildkit/session/filesync/filesync.go b/vendor/github.com/moby/buildkit/session/filesync/filesync.go index a45abe02e7..51dd3c5383 100644 --- a/vendor/github.com/moby/buildkit/session/filesync/filesync.go +++ b/vendor/github.com/moby/buildkit/session/filesync/filesync.go @@ -255,7 +255,7 @@ func (sp *fsSyncTarget) Register(server *grpc.Server) { RegisterFileSendServer(server, sp) } -func (sp *fsSyncTarget) DiffCopy(stream FileSend_DiffCopyServer) error { +func (sp *fsSyncTarget) DiffCopy(stream FileSend_DiffCopyServer) (err error) { if sp.outdir != "" { return syncTargetDiffCopy(stream, sp.outdir) } @@ -277,7 +277,12 @@ func (sp *fsSyncTarget) DiffCopy(stream FileSend_DiffCopyServer) error { if wc == nil { return status.Errorf(codes.AlreadyExists, "target already exists") } - defer wc.Close() + defer func() { + err1 := wc.Close() + if err != nil { + err = err1 + } + }() return writeTargetFile(stream, wc) } diff --git a/vendor/github.com/moby/buildkit/session/grpc.go b/vendor/github.com/moby/buildkit/session/grpc.go index 2798b6abba..02870ca16d 100644 --- a/vendor/github.com/moby/buildkit/session/grpc.go +++ b/vendor/github.com/moby/buildkit/session/grpc.go @@ -6,7 +6,9 @@ import ( "sync/atomic" "time" + grpc_middleware "github.com/grpc-ecosystem/go-grpc-middleware" "github.com/grpc-ecosystem/grpc-opentracing/go/otgrpc" + "github.com/moby/buildkit/util/grpcerrors" opentracing "github.com/opentracing/opentracing-go" "github.com/pkg/errors" "github.com/sirupsen/logrus" @@ -25,6 +27,9 @@ func serve(ctx context.Context, grpcServer *grpc.Server, conn net.Conn) { } func grpcClientConn(ctx context.Context, conn net.Conn) (context.Context, *grpc.ClientConn, error) { + var unary []grpc.UnaryClientInterceptor + var stream []grpc.StreamClientInterceptor + var dialCount int64 dialer := grpc.WithDialer(func(addr string, d time.Duration) (net.Conn, error) { if c := atomic.AddInt64(&dialCount, 1); c > 1 { @@ -40,10 +45,23 @@ func grpcClientConn(ctx context.Context, conn net.Conn) (context.Context, *grpc. if span := opentracing.SpanFromContext(ctx); span != nil { tracer := span.Tracer() - dialOpts = append(dialOpts, - grpc.WithUnaryInterceptor(otgrpc.OpenTracingClientInterceptor(tracer, traceFilter())), - grpc.WithStreamInterceptor(otgrpc.OpenTracingStreamClientInterceptor(tracer, traceFilter())), - ) + unary = append(unary, otgrpc.OpenTracingClientInterceptor(tracer, traceFilter())) + stream = append(stream, otgrpc.OpenTracingStreamClientInterceptor(tracer, traceFilter())) + } + + unary = append(unary, grpcerrors.UnaryClientInterceptor) + stream = append(stream, grpcerrors.StreamClientInterceptor) + + if len(unary) == 1 { + dialOpts = append(dialOpts, grpc.WithUnaryInterceptor(unary[0])) + } else if len(unary) > 1 { + dialOpts = append(dialOpts, grpc.WithUnaryInterceptor(grpc_middleware.ChainUnaryClient(unary...))) + } + + if len(stream) == 1 { + dialOpts = append(dialOpts, grpc.WithStreamInterceptor(stream[0])) + } else if len(stream) > 1 { + dialOpts = append(dialOpts, grpc.WithStreamInterceptor(grpc_middleware.ChainStreamClient(stream...))) } cc, err := grpc.DialContext(ctx, "", dialOpts...) diff --git a/vendor/github.com/moby/buildkit/session/secrets/secrets.go b/vendor/github.com/moby/buildkit/session/secrets/secrets.go index 3f3bb64483..604199df8e 100644 --- a/vendor/github.com/moby/buildkit/session/secrets/secrets.go +++ b/vendor/github.com/moby/buildkit/session/secrets/secrets.go @@ -4,9 +4,9 @@ import ( "context" "github.com/moby/buildkit/session" + "github.com/moby/buildkit/util/grpcerrors" "github.com/pkg/errors" "google.golang.org/grpc/codes" - "google.golang.org/grpc/status" ) type SecretStore interface { @@ -21,10 +21,10 @@ func GetSecret(ctx context.Context, c session.Caller, id string) ([]byte, error) ID: id, }) if err != nil { - if st, ok := status.FromError(errors.Cause(err)); ok && (st.Code() == codes.Unimplemented || st.Code() == codes.NotFound) { + if code := grpcerrors.Code(err); code == codes.Unimplemented || code == codes.NotFound { return nil, errors.Wrapf(ErrNotFound, "secret %s not found", id) } - return nil, errors.WithStack(err) + return nil, err } return resp.Data, nil } diff --git a/vendor/github.com/moby/buildkit/session/session.go b/vendor/github.com/moby/buildkit/session/session.go index 5d04738ef7..02c7420a8e 100644 --- a/vendor/github.com/moby/buildkit/session/session.go +++ b/vendor/github.com/moby/buildkit/session/session.go @@ -5,8 +5,10 @@ import ( "net" "strings" + grpc_middleware "github.com/grpc-ecosystem/go-grpc-middleware" "github.com/grpc-ecosystem/grpc-opentracing/go/otgrpc" "github.com/moby/buildkit/identity" + "github.com/moby/buildkit/util/grpcerrors" opentracing "github.com/opentracing/opentracing-go" "github.com/pkg/errors" "google.golang.org/grpc" @@ -45,13 +47,29 @@ type Session struct { func NewSession(ctx context.Context, name, sharedKey string) (*Session, error) { id := identity.NewID() + var unary []grpc.UnaryServerInterceptor + var stream []grpc.StreamServerInterceptor + serverOpts := []grpc.ServerOption{} if span := opentracing.SpanFromContext(ctx); span != nil { tracer := span.Tracer() - serverOpts = []grpc.ServerOption{ - grpc.StreamInterceptor(otgrpc.OpenTracingStreamServerInterceptor(span.Tracer(), traceFilter())), - grpc.UnaryInterceptor(otgrpc.OpenTracingServerInterceptor(tracer, traceFilter())), - } + unary = append(unary, otgrpc.OpenTracingServerInterceptor(tracer, traceFilter())) + stream = append(stream, otgrpc.OpenTracingStreamServerInterceptor(span.Tracer(), traceFilter())) + } + + unary = append(unary, grpcerrors.UnaryServerInterceptor) + stream = append(stream, grpcerrors.StreamServerInterceptor) + + if len(unary) == 1 { + serverOpts = append(serverOpts, grpc.UnaryInterceptor(unary[0])) + } else if len(unary) > 1 { + serverOpts = append(serverOpts, grpc.UnaryInterceptor(grpc_middleware.ChainUnaryServer(unary...))) + } + + if len(stream) == 1 { + serverOpts = append(serverOpts, grpc.StreamInterceptor(stream[0])) + } else if len(stream) > 1 { + serverOpts = append(serverOpts, grpc.StreamInterceptor(grpc_middleware.ChainStreamServer(stream...))) } s := &Session{ diff --git a/vendor/github.com/moby/buildkit/snapshot/containerd/content.go b/vendor/github.com/moby/buildkit/snapshot/containerd/content.go index 8e42d4257e..1be21e9004 100644 --- a/vendor/github.com/moby/buildkit/snapshot/containerd/content.go +++ b/vendor/github.com/moby/buildkit/snapshot/containerd/content.go @@ -5,7 +5,7 @@ import ( "github.com/containerd/containerd/content" "github.com/containerd/containerd/namespaces" - "github.com/opencontainers/go-digest" + digest "github.com/opencontainers/go-digest" ocispec "github.com/opencontainers/image-spec/specs-go/v1" "github.com/pkg/errors" ) diff --git a/vendor/github.com/moby/buildkit/solver/edge.go b/vendor/github.com/moby/buildkit/solver/edge.go index 0a076a2168..52613adbc5 100644 --- a/vendor/github.com/moby/buildkit/solver/edge.go +++ b/vendor/github.com/moby/buildkit/solver/edge.go @@ -26,12 +26,13 @@ func (t edgeStatusType) String() string { func newEdge(ed Edge, op activeOp, index *edgeIndex) *edge { e := &edge{ - edge: ed, - op: op, - depRequests: map[pipe.Receiver]*dep{}, - keyMap: map[string]struct{}{}, - cacheRecords: map[string]*CacheRecord{}, - index: index, + edge: ed, + op: op, + depRequests: map[pipe.Receiver]*dep{}, + keyMap: map[string]struct{}{}, + cacheRecords: map[string]*CacheRecord{}, + cacheRecordsLoaded: map[string]struct{}{}, + index: index, } return e } @@ -44,14 +45,16 @@ type edge struct { depRequests map[pipe.Receiver]*dep deps []*dep - cacheMapReq pipe.Receiver - cacheMapDone bool - cacheMapIndex int - cacheMapDigests []digest.Digest - execReq pipe.Receiver - err error - cacheRecords map[string]*CacheRecord - keyMap map[string]struct{} + cacheMapReq pipe.Receiver + cacheMapDone bool + cacheMapIndex int + cacheMapDigests []digest.Digest + execReq pipe.Receiver + execCacheLoad bool + err error + cacheRecords map[string]*CacheRecord + cacheRecordsLoaded map[string]struct{} + keyMap map[string]struct{} noCacheMatchPossible bool allDepsCompletedCacheFast bool @@ -425,7 +428,11 @@ func (e *edge) processUpdate(upt pipe.Receiver) (depChanged bool) { if upt == e.execReq && upt.Status().Completed { if err := upt.Status().Err; err != nil { e.execReq = nil - if !upt.Status().Canceled && e.err == nil { + if e.execCacheLoad { + for k := range e.cacheRecordsLoaded { + delete(e.cacheRecords, k) + } + } else if !upt.Status().Canceled && e.err == nil { e.err = err } } else { @@ -561,7 +568,9 @@ func (e *edge) recalcCurrentState() { } for _, r := range records { - e.cacheRecords[r.ID] = r + if _, ok := e.cacheRecordsLoaded[r.ID]; !ok { + e.cacheRecords[r.ID] = r + } } e.keys = append(e.keys, e.makeExportable(mergedKey, records)) @@ -821,6 +830,7 @@ func (e *edge) execIfPossible(f *pipeFactory) bool { return true } e.execReq = f.NewFuncRequest(e.loadCache) + e.execCacheLoad = true for req := range e.depRequests { req.Cancel() } @@ -831,6 +841,7 @@ func (e *edge) execIfPossible(f *pipeFactory) bool { return true } e.execReq = f.NewFuncRequest(e.execOp) + e.execCacheLoad = false return true } return false @@ -851,6 +862,7 @@ func (e *edge) loadCache(ctx context.Context) (interface{}, error) { } rec := getBestResult(recs) + e.cacheRecordsLoaded[rec.ID] = struct{}{} logrus.Debugf("load cache for %s with %s", e.edge.Vertex.Name(), rec.ID) res, err := e.op.LoadCache(ctx, rec) diff --git a/vendor/github.com/moby/buildkit/solver/errdefs/errdefs.pb.go b/vendor/github.com/moby/buildkit/solver/errdefs/errdefs.pb.go new file mode 100644 index 0000000000..f179e9958a --- /dev/null +++ b/vendor/github.com/moby/buildkit/solver/errdefs/errdefs.pb.go @@ -0,0 +1,129 @@ +// Code generated by protoc-gen-gogo. DO NOT EDIT. +// source: errdefs.proto + +package errdefs + +import ( + fmt "fmt" + proto "github.com/gogo/protobuf/proto" + pb "github.com/moby/buildkit/solver/pb" + math "math" +) + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.GoGoProtoPackageIsVersion3 // please upgrade the proto package + +type Vertex struct { + Digest string `protobuf:"bytes,1,opt,name=digest,proto3" json:"digest,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Vertex) Reset() { *m = Vertex{} } +func (m *Vertex) String() string { return proto.CompactTextString(m) } +func (*Vertex) ProtoMessage() {} +func (*Vertex) Descriptor() ([]byte, []int) { + return fileDescriptor_689dc58a5060aff5, []int{0} +} +func (m *Vertex) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Vertex.Unmarshal(m, b) +} +func (m *Vertex) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Vertex.Marshal(b, m, deterministic) +} +func (m *Vertex) XXX_Merge(src proto.Message) { + xxx_messageInfo_Vertex.Merge(m, src) +} +func (m *Vertex) XXX_Size() int { + return xxx_messageInfo_Vertex.Size(m) +} +func (m *Vertex) XXX_DiscardUnknown() { + xxx_messageInfo_Vertex.DiscardUnknown(m) +} + +var xxx_messageInfo_Vertex proto.InternalMessageInfo + +func (m *Vertex) GetDigest() string { + if m != nil { + return m.Digest + } + return "" +} + +type Source struct { + Info *pb.SourceInfo `protobuf:"bytes,1,opt,name=info,proto3" json:"info,omitempty"` + Ranges []*pb.Range `protobuf:"bytes,2,rep,name=ranges,proto3" json:"ranges,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Source) Reset() { *m = Source{} } +func (m *Source) String() string { return proto.CompactTextString(m) } +func (*Source) ProtoMessage() {} +func (*Source) Descriptor() ([]byte, []int) { + return fileDescriptor_689dc58a5060aff5, []int{1} +} +func (m *Source) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Source.Unmarshal(m, b) +} +func (m *Source) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Source.Marshal(b, m, deterministic) +} +func (m *Source) XXX_Merge(src proto.Message) { + xxx_messageInfo_Source.Merge(m, src) +} +func (m *Source) XXX_Size() int { + return xxx_messageInfo_Source.Size(m) +} +func (m *Source) XXX_DiscardUnknown() { + xxx_messageInfo_Source.DiscardUnknown(m) +} + +var xxx_messageInfo_Source proto.InternalMessageInfo + +func (m *Source) GetInfo() *pb.SourceInfo { + if m != nil { + return m.Info + } + return nil +} + +func (m *Source) GetRanges() []*pb.Range { + if m != nil { + return m.Ranges + } + return nil +} + +func init() { + proto.RegisterType((*Vertex)(nil), "errdefs.Vertex") + proto.RegisterType((*Source)(nil), "errdefs.Source") +} + +func init() { proto.RegisterFile("errdefs.proto", fileDescriptor_689dc58a5060aff5) } + +var fileDescriptor_689dc58a5060aff5 = []byte{ + // 177 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x2c, 0xcd, 0xc1, 0x8a, 0x83, 0x30, + 0x10, 0x80, 0x61, 0xdc, 0x5d, 0xb2, 0x18, 0xd9, 0x3d, 0xe4, 0x50, 0xa4, 0x27, 0xeb, 0xc9, 0x43, + 0x49, 0xc0, 0x3e, 0x45, 0x4f, 0x85, 0x14, 0x7a, 0x6f, 0x74, 0xb4, 0xa1, 0xea, 0x84, 0x49, 0x2c, + 0xed, 0xdb, 0x17, 0x6d, 0x8e, 0xff, 0x7c, 0x33, 0x0c, 0xff, 0x03, 0xa2, 0x16, 0x3a, 0x2f, 0x1d, + 0x61, 0x40, 0xf1, 0x1b, 0x73, 0xbb, 0xef, 0x6d, 0xb8, 0xcd, 0x46, 0x36, 0x38, 0xaa, 0x11, 0xcd, + 0x4b, 0x99, 0xd9, 0x0e, 0xed, 0xdd, 0x06, 0xe5, 0x71, 0x78, 0x00, 0x29, 0x67, 0x14, 0xba, 0x78, + 0x56, 0x16, 0x9c, 0x5d, 0x80, 0x02, 0x3c, 0xc5, 0x86, 0xb3, 0xd6, 0xf6, 0xe0, 0x43, 0x9e, 0x14, + 0x49, 0x95, 0xea, 0x58, 0xe5, 0x89, 0xb3, 0x33, 0xce, 0xd4, 0x80, 0x28, 0xf9, 0x8f, 0x9d, 0x3a, + 0x5c, 0x3d, 0xab, 0xff, 0xa5, 0x33, 0xf2, 0x23, 0xc7, 0xa9, 0x43, 0xbd, 0x9a, 0xd8, 0x71, 0x46, + 0xd7, 0xa9, 0x07, 0x9f, 0x7f, 0x15, 0xdf, 0x55, 0x56, 0xa7, 0xcb, 0x96, 0x5e, 0x26, 0x3a, 0x82, + 0x61, 0xeb, 0xe7, 0xc3, 0x3b, 0x00, 0x00, 0xff, 0xff, 0x93, 0xb5, 0x8b, 0x2a, 0xc1, 0x00, 0x00, + 0x00, +} diff --git a/vendor/github.com/moby/buildkit/solver/errdefs/errdefs.proto b/vendor/github.com/moby/buildkit/solver/errdefs/errdefs.proto new file mode 100644 index 0000000000..7e808cbbb7 --- /dev/null +++ b/vendor/github.com/moby/buildkit/solver/errdefs/errdefs.proto @@ -0,0 +1,14 @@ +syntax = "proto3"; + +package errdefs; + +import "github.com/moby/buildkit/solver/pb/ops.proto"; + +message Vertex { + string digest = 1; +} + +message Source { + pb.SourceInfo info = 1; + repeated pb.Range ranges = 2; +} diff --git a/vendor/github.com/moby/buildkit/solver/errdefs/generate.go b/vendor/github.com/moby/buildkit/solver/errdefs/generate.go new file mode 100644 index 0000000000..e0dc24cd3b --- /dev/null +++ b/vendor/github.com/moby/buildkit/solver/errdefs/generate.go @@ -0,0 +1,3 @@ +package errdefs + +//go:generate protoc -I=. -I=../../vendor/ -I=../../../../../ --gogo_out=. errdefs.proto diff --git a/vendor/github.com/moby/buildkit/solver/errdefs/source.go b/vendor/github.com/moby/buildkit/solver/errdefs/source.go new file mode 100644 index 0000000000..6c1f364957 --- /dev/null +++ b/vendor/github.com/moby/buildkit/solver/errdefs/source.go @@ -0,0 +1,128 @@ +package errdefs + +import ( + "fmt" + "io" + "strings" + + pb "github.com/moby/buildkit/solver/pb" + "github.com/moby/buildkit/util/grpcerrors" + "github.com/pkg/errors" +) + +func WithSource(err error, src Source) error { + if err == nil { + return nil + } + return &ErrorSource{Source: src, error: err} +} + +type ErrorSource struct { + Source + error +} + +func (e *ErrorSource) Unwrap() error { + return e.error +} + +func (e *ErrorSource) ToProto() grpcerrors.TypedErrorProto { + return &e.Source +} + +func Sources(err error) []*Source { + var out []*Source + var es *ErrorSource + if errors.As(err, &es) { + out = Sources(es.Unwrap()) + out = append(out, &es.Source) + } + return out +} + +func (s *Source) WrapError(err error) error { + return &ErrorSource{error: err, Source: *s} +} + +func (s *Source) Print(w io.Writer) error { + si := s.Info + if si == nil { + return nil + } + lines := strings.Split(string(si.Data), "\n") + + start, end, ok := getStartEndLine(s.Ranges) + if !ok { + return nil + } + if start > len(lines) || start < 1 { + return nil + } + if end > len(lines) { + end = len(lines) + } + + pad := 2 + if end == start { + pad = 4 + } + var p int + + prepadStart := start + for { + if p >= pad { + break + } + if start > 1 { + start-- + p++ + } + if end != len(lines) { + end++ + p++ + } + p++ + } + + fmt.Fprintf(w, "%s:%d\n--------------------\n", si.Filename, prepadStart) + for i := start; i <= end; i++ { + pfx := " " + if containsLine(s.Ranges, i) { + pfx = ">>>" + } + fmt.Fprintf(w, " %3d | %s %s\n", i, pfx, lines[i-1]) + } + fmt.Fprintf(w, "--------------------\n") + return nil +} + +func containsLine(rr []*pb.Range, l int) bool { + for _, r := range rr { + e := r.End.Line + if e < r.Start.Line { + e = r.Start.Line + } + if r.Start.Line <= int32(l) && e >= int32(l) { + return true + } + } + return false +} + +func getStartEndLine(rr []*pb.Range) (start int, end int, ok bool) { + first := true + for _, r := range rr { + e := r.End.Line + if e < r.Start.Line { + e = r.Start.Line + } + if first || int(r.Start.Line) < start { + start = int(r.Start.Line) + } + if int(e) > end { + end = int(e) + } + first = false + } + return start, end, !first +} diff --git a/vendor/github.com/moby/buildkit/solver/errdefs/vertex.go b/vendor/github.com/moby/buildkit/solver/errdefs/vertex.go new file mode 100644 index 0000000000..71fdb6cab5 --- /dev/null +++ b/vendor/github.com/moby/buildkit/solver/errdefs/vertex.go @@ -0,0 +1,36 @@ +package errdefs + +import ( + proto "github.com/golang/protobuf/proto" + "github.com/moby/buildkit/util/grpcerrors" + digest "github.com/opencontainers/go-digest" +) + +func init() { + proto.RegisterType((*Vertex)(nil), "errdefs.Vertex") + proto.RegisterType((*Source)(nil), "errdefs.Source") +} + +type VertexError struct { + Vertex + error +} + +func (e *VertexError) Unwrap() error { + return e.error +} + +func (e *VertexError) ToProto() grpcerrors.TypedErrorProto { + return &e.Vertex +} + +func WrapVertex(err error, dgst digest.Digest) error { + if err == nil { + return nil + } + return &VertexError{Vertex: Vertex{Digest: dgst.String()}, error: err} +} + +func (v *Vertex) WrapError(err error) error { + return &VertexError{error: err, Vertex: *v} +} diff --git a/vendor/github.com/moby/buildkit/solver/internal/pipe/pipe.go b/vendor/github.com/moby/buildkit/solver/internal/pipe/pipe.go index 81702f5236..a1a857f398 100644 --- a/vendor/github.com/moby/buildkit/solver/internal/pipe/pipe.go +++ b/vendor/github.com/moby/buildkit/solver/internal/pipe/pipe.go @@ -159,7 +159,7 @@ func (pw *sender) Finalize(v interface{}, err error) { } pw.status.Err = err pw.status.Completed = true - if errors.Cause(err) == context.Canceled && pw.req.Canceled { + if errors.Is(err, context.Canceled) && pw.req.Canceled { pw.status.Canceled = true } pw.sendChannel.Send(pw.status) diff --git a/vendor/github.com/moby/buildkit/solver/jobs.go b/vendor/github.com/moby/buildkit/solver/jobs.go index 925ae5c50a..2889e14613 100644 --- a/vendor/github.com/moby/buildkit/solver/jobs.go +++ b/vendor/github.com/moby/buildkit/solver/jobs.go @@ -9,6 +9,7 @@ import ( "github.com/moby/buildkit/client" "github.com/moby/buildkit/session" + "github.com/moby/buildkit/solver/errdefs" "github.com/moby/buildkit/util/flightcontrol" "github.com/moby/buildkit/util/progress" "github.com/moby/buildkit/util/tracing" @@ -53,6 +54,7 @@ type state struct { vtx Vertex clientVertex client.Vertex + origDigest digest.Digest // original LLB digest. TODO: probably better to use string ID so this isn't needed mu sync.Mutex op *sharedOp @@ -318,6 +320,7 @@ func (jl *Solver) loadUnlocked(v, parent Vertex, j *Job, cache map[Vertex]Vertex mainCache: jl.opts.DefaultCache, cache: map[string]CacheManager{}, solver: jl, + origDigest: origVtx.Digest(), } jl.actives[dgst] = st } @@ -564,7 +567,10 @@ func (s *sharedOp) LoadCache(ctx context.Context, rec *CacheRecord) (Result, err return res, err } -func (s *sharedOp) CalcSlowCache(ctx context.Context, index Index, f ResultBasedCacheFunc, res Result) (digest.Digest, error) { +func (s *sharedOp) CalcSlowCache(ctx context.Context, index Index, f ResultBasedCacheFunc, res Result) (dgst digest.Digest, err error) { + defer func() { + err = errdefs.WrapVertex(err, s.st.origDigest) + }() key, err := s.g.Do(ctx, fmt.Sprintf("slow-compute-%d", index), func(ctx context.Context) (interface{}, error) { s.slowMu.Lock() // TODO: add helpers for these stored values @@ -609,7 +615,10 @@ func (s *sharedOp) CalcSlowCache(ctx context.Context, index Index, f ResultBased return key.(digest.Digest), nil } -func (s *sharedOp) CacheMap(ctx context.Context, index int) (*cacheMapResp, error) { +func (s *sharedOp) CacheMap(ctx context.Context, index int) (resp *cacheMapResp, err error) { + defer func() { + err = errdefs.WrapVertex(err, s.st.origDigest) + }() op, err := s.getOp() if err != nil { return nil, err @@ -665,6 +674,9 @@ func (s *sharedOp) CacheMap(ctx context.Context, index int) (*cacheMapResp, erro } func (s *sharedOp) Exec(ctx context.Context, inputs []Result) (outputs []Result, exporters []ExportableCacheKey, err error) { + defer func() { + err = errdefs.WrapVertex(err, s.st.origDigest) + }() op, err := s.getOp() if err != nil { return nil, nil, err diff --git a/vendor/github.com/moby/buildkit/solver/llbsolver/bridge.go b/vendor/github.com/moby/buildkit/solver/llbsolver/bridge.go index c2d9ad2b7c..5990c29e72 100644 --- a/vendor/github.com/moby/buildkit/solver/llbsolver/bridge.go +++ b/vendor/github.com/moby/buildkit/solver/llbsolver/bridge.go @@ -18,12 +18,12 @@ import ( gw "github.com/moby/buildkit/frontend/gateway/client" "github.com/moby/buildkit/session" "github.com/moby/buildkit/solver" + "github.com/moby/buildkit/solver/errdefs" "github.com/moby/buildkit/solver/pb" "github.com/moby/buildkit/util/flightcontrol" "github.com/moby/buildkit/util/tracing" "github.com/moby/buildkit/worker" digest "github.com/opencontainers/go-digest" - specs "github.com/opencontainers/image-spec/specs-go/v1" "github.com/pkg/errors" "github.com/sirupsen/logrus" ) @@ -36,7 +36,6 @@ type llbBridge struct { resolveCacheImporterFuncs map[string]remotecache.ResolveCacheImporterFunc cms map[string]solver.CacheManager cmsMu sync.Mutex - platforms []specs.Platform sm *session.Manager } @@ -88,7 +87,7 @@ func (b *llbBridge) loadResult(ctx context.Context, def *pb.Definition, cacheImp } dpc := &detectPrunedCacheID{} - edge, err := Load(def, dpc.Load, ValidateEntitlements(ent), WithCacheSources(cms), RuntimePlatforms(b.platforms), WithValidateCaps()) + edge, err := Load(def, dpc.Load, ValidateEntitlements(ent), WithCacheSources(cms), NormalizeRuntimePlatforms(), WithValidateCaps()) if err != nil { return nil, errors.Wrap(err, "failed to load LLB") } @@ -182,7 +181,31 @@ func (rp *resultProxy) Release(ctx context.Context) error { return nil } -func (rp *resultProxy) Result(ctx context.Context) (solver.CachedResult, error) { +func (rp *resultProxy) wrapError(err error) error { + if err == nil { + return nil + } + var ve *errdefs.VertexError + if errors.As(err, &ve) { + if rp.def.Source != nil { + locs, ok := rp.def.Source.Locations[string(ve.Digest)] + if ok { + for _, loc := range locs.Locations { + err = errdefs.WithSource(err, errdefs.Source{ + Info: rp.def.Source.Infos[loc.SourceIndex], + Ranges: loc.Ranges, + }) + } + } + } + } + return err +} + +func (rp *resultProxy) Result(ctx context.Context) (res solver.CachedResult, err error) { + defer func() { + err = rp.wrapError(err) + }() r, err := rp.g.Do(ctx, "result", func(ctx context.Context) (interface{}, error) { rp.mu.Lock() if rp.released { diff --git a/vendor/github.com/moby/buildkit/solver/llbsolver/file/backend.go b/vendor/github.com/moby/buildkit/solver/llbsolver/file/backend.go index ce0c5aaf9e..a690012287 100644 --- a/vendor/github.com/moby/buildkit/solver/llbsolver/file/backend.go +++ b/vendor/github.com/moby/buildkit/solver/llbsolver/file/backend.go @@ -34,16 +34,17 @@ func mapUserToChowner(user *copy.User, idmap *idtools.IdentityMapping) (copy.Cho return nil, nil } old = ©.User{} // root - } - if idmap != nil { - identity, err := idmap.ToHost(idtools.Identity{ - UID: old.Uid, - GID: old.Gid, - }) - if err != nil { - return nil, err + // non-nil old is already mapped + if idmap != nil { + identity, err := idmap.ToHost(idtools.Identity{ + UID: old.Uid, + GID: old.Gid, + }) + if err != nil { + return nil, err + } + return ©.User{Uid: identity.UID, Gid: identity.GID}, nil } - return ©.User{Uid: identity.UID, Gid: identity.GID}, nil } return old, nil }, nil @@ -82,7 +83,7 @@ func mkdir(ctx context.Context, d string, action pb.FileActionMkDir, user *copy. } } else { if err := os.Mkdir(p, os.FileMode(action.Mode)&0777); err != nil { - if os.IsExist(err) { + if errors.Is(err, os.ErrExist) { return nil } return err @@ -151,7 +152,7 @@ func rmPath(root, src string, allowNotFound bool) error { } if err := os.RemoveAll(p); err != nil { - if os.IsNotExist(errors.Cause(err)) && allowNotFound { + if errors.Is(err, os.ErrNotExist) && allowNotFound { return nil } return err @@ -293,6 +294,7 @@ func (fb *Backend) Mkfile(ctx context.Context, m, user, group fileoptypes.Mount, return mkfile(ctx, dir, action, u, mnt.m.IdentityMapping()) } + func (fb *Backend) Rm(ctx context.Context, m fileoptypes.Mount, action pb.FileActionRm) error { mnt, ok := m.(*Mount) if !ok { @@ -308,6 +310,7 @@ func (fb *Backend) Rm(ctx context.Context, m fileoptypes.Mount, action pb.FileAc return rm(ctx, dir, action) } + func (fb *Backend) Copy(ctx context.Context, m1, m2, user, group fileoptypes.Mount, action pb.FileActionCopy) error { mnt1, ok := m1.(*Mount) if !ok { diff --git a/vendor/github.com/moby/buildkit/solver/llbsolver/ops/exec.go b/vendor/github.com/moby/buildkit/solver/llbsolver/ops/exec.go index 04355e5493..72f5358b12 100644 --- a/vendor/github.com/moby/buildkit/solver/llbsolver/ops/exec.go +++ b/vendor/github.com/moby/buildkit/solver/llbsolver/ops/exec.go @@ -31,6 +31,7 @@ import ( "github.com/moby/buildkit/solver" "github.com/moby/buildkit/solver/llbsolver" "github.com/moby/buildkit/solver/pb" + "github.com/moby/buildkit/util/grpcerrors" "github.com/moby/buildkit/util/progress/logs" utilsystem "github.com/moby/buildkit/util/system" "github.com/moby/buildkit/worker" @@ -41,7 +42,6 @@ import ( "github.com/sirupsen/logrus" bolt "go.etcd.io/bbolt" "google.golang.org/grpc/codes" - "google.golang.org/grpc/status" ) const execCacheType = "buildkit.exec.v0" @@ -293,7 +293,7 @@ func (g *cacheRefGetter) getRefCacheDirNoCache(ctx context.Context, key string, if mRef, err := g.cm.GetMutable(ctx, si.ID()); err == nil { logrus.Debugf("reusing ref for cache dir: %s", mRef.ID()) return mRef, nil - } else if errors.Cause(err) == cache.ErrLocked { + } else if errors.Is(err, cache.ErrLocked) { locked = true } } @@ -349,7 +349,7 @@ func (e *execOp) getSSHMountable(ctx context.Context, m *pb.Mount) (cache.Mounta if m.SSHOpt.Optional { return nil, nil } - if st, ok := status.FromError(errors.Cause(err)); ok && st.Code() == codes.Unimplemented { + if grpcerrors.Code(err) == codes.Unimplemented { return nil, errors.Errorf("no SSH key %q forwarded from the client", m.SSHOpt.ID) } return nil, err @@ -447,7 +447,7 @@ func (e *execOp) getSecretMountable(ctx context.Context, m *pb.Mount) (cache.Mou dt, err := secrets.GetSecret(ctx, caller, id) if err != nil { - if errors.Cause(err) == secrets.ErrNotFound && m.SecretOpt.Optional { + if errors.Is(err, secrets.ErrNotFound) && m.SecretOpt.Optional { return nil, nil } return nil, err @@ -708,6 +708,20 @@ func (e *execOp) Exec(ctx context.Context, inputs []solver.Result) ([]solver.Res return nil, err } + emu, err := getEmulator(e.platform, e.cm.IdentityMapping()) + if err == nil && emu != nil { + e.op.Meta.Args = append([]string{qemuMountName}, e.op.Meta.Args...) + + mounts = append(mounts, executor.Mount{ + Readonly: true, + Src: emu, + Dest: qemuMountName, + }) + } + if err != nil { + logrus.Warn(err.Error()) // TODO: remove this with pull support + } + meta := executor.Meta{ Args: e.op.Meta.Args, Env: e.op.Meta.Env, diff --git a/vendor/github.com/moby/buildkit/solver/llbsolver/ops/exec_binfmt.go b/vendor/github.com/moby/buildkit/solver/llbsolver/ops/exec_binfmt.go new file mode 100644 index 0000000000..5eea8ff953 --- /dev/null +++ b/vendor/github.com/moby/buildkit/solver/llbsolver/ops/exec_binfmt.go @@ -0,0 +1,114 @@ +package ops + +import ( + "context" + "io/ioutil" + "os" + "os/exec" + "path/filepath" + + "github.com/containerd/containerd/mount" + "github.com/containerd/containerd/platforms" + "github.com/docker/docker/pkg/idtools" + "github.com/moby/buildkit/snapshot" + "github.com/moby/buildkit/solver/pb" + "github.com/moby/buildkit/util/binfmt_misc" + specs "github.com/opencontainers/image-spec/specs-go/v1" + "github.com/pkg/errors" + copy "github.com/tonistiigi/fsutil/copy" +) + +const qemuMountName = "/dev/.buildkit_qemu_emulator" + +var qemuArchMap = map[string]string{ + "arm64": "aarch64", + "amd64": "x86_64", + "riscv64": "riscv64", + "arm": "arm", + "s390x": "s390x", + "ppc64le": "ppc64le", +} + +type emulator struct { + path string + idmap *idtools.IdentityMapping +} + +func (e *emulator) Mount(ctx context.Context, readonly bool) (snapshot.Mountable, error) { + return &staticEmulatorMount{path: e.path, idmap: e.idmap}, nil +} + +type staticEmulatorMount struct { + path string + idmap *idtools.IdentityMapping +} + +func (m *staticEmulatorMount) Mount() ([]mount.Mount, func() error, error) { + tmpdir, err := ioutil.TempDir("", "buildkit-qemu-emulator") + if err != nil { + return nil, nil, err + } + var ret bool + defer func() { + if !ret { + os.RemoveAll(tmpdir) + } + }() + + var uid, gid int + if m.idmap != nil { + root := m.idmap.RootPair() + uid = root.UID + gid = root.GID + } + if err := copy.Copy(context.TODO(), filepath.Dir(m.path), filepath.Base(m.path), tmpdir, qemuMountName, func(ci *copy.CopyInfo) { + m := 0555 + ci.Mode = &m + }, copy.WithChown(uid, gid)); err != nil { + return nil, nil, err + } + + ret = true + return []mount.Mount{{ + Type: "bind", + Source: filepath.Join(tmpdir, qemuMountName), + Options: []string{"ro", "bind"}, + }}, func() error { + return os.RemoveAll(tmpdir) + }, nil + +} +func (m *staticEmulatorMount) IdentityMapping() *idtools.IdentityMapping { + return m.idmap +} + +func getEmulator(p *pb.Platform, idmap *idtools.IdentityMapping) (*emulator, error) { + all := binfmt_misc.SupportedPlatforms(false) + m := make(map[string]struct{}, len(all)) + + for _, p := range all { + m[p] = struct{}{} + } + + pp := platforms.Normalize(specs.Platform{ + Architecture: p.Architecture, + OS: p.OS, + Variant: p.Variant, + }) + + if _, ok := m[platforms.Format(pp)]; ok { + return nil, nil + } + + a, ok := qemuArchMap[pp.Architecture] + if !ok { + a = pp.Architecture + } + + fn, err := exec.LookPath("buildkit-qemu-" + a) + if err != nil { + return nil, errors.Errorf("no emulator available for %v", pp.OS) + } + + return &emulator{path: fn}, nil +} diff --git a/vendor/github.com/moby/buildkit/solver/llbsolver/solver.go b/vendor/github.com/moby/buildkit/solver/llbsolver/solver.go index 640a973843..894e768380 100644 --- a/vendor/github.com/moby/buildkit/solver/llbsolver/solver.go +++ b/vendor/github.com/moby/buildkit/solver/llbsolver/solver.go @@ -20,7 +20,6 @@ import ( "github.com/moby/buildkit/util/progress" "github.com/moby/buildkit/worker" digest "github.com/opencontainers/go-digest" - specs "github.com/opencontainers/image-spec/specs-go/v1" "github.com/pkg/errors" "golang.org/x/sync/errgroup" ) @@ -43,7 +42,6 @@ type Solver struct { eachWorker func(func(worker.Worker) error) error frontends map[string]frontend.Frontend resolveCacheImporterFuncs map[string]remotecache.ResolveCacheImporterFunc - platforms []specs.Platform gatewayForwarder *controlgateway.GatewayForwarder sm *session.Manager entitlements []string @@ -61,13 +59,6 @@ func New(wc *worker.Controller, f map[string]frontend.Frontend, cache solver.Cac entitlements: ents, } - // executing is currently only allowed on default worker - w, err := wc.GetDefault() - if err != nil { - return nil, err - } - s.platforms = w.Platforms(false) - s.solver = solver.NewSolver(solver.SolverOpt{ ResolveOpFunc: s.resolver(), DefaultCache: cache, @@ -93,7 +84,6 @@ func (s *Solver) Bridge(b solver.Builder) frontend.FrontendLLBBridge { eachWorker: s.eachWorker, resolveCacheImporterFuncs: s.resolveCacheImporterFuncs, cms: map[string]solver.CacheManager{}, - platforms: s.platforms, sm: s.sm, } } diff --git a/vendor/github.com/moby/buildkit/solver/llbsolver/vertex.go b/vendor/github.com/moby/buildkit/solver/llbsolver/vertex.go index 2dbded12f5..f5aa0b9926 100644 --- a/vendor/github.com/moby/buildkit/solver/llbsolver/vertex.go +++ b/vendor/github.com/moby/buildkit/solver/llbsolver/vertex.go @@ -8,7 +8,6 @@ import ( "github.com/moby/buildkit/solver" "github.com/moby/buildkit/solver/pb" "github.com/moby/buildkit/source" - "github.com/moby/buildkit/util/binfmt_misc" "github.com/moby/buildkit/util/entitlements" digest "github.com/opencontainers/go-digest" specs "github.com/opencontainers/image-spec/specs-go/v1" @@ -69,12 +68,8 @@ func WithCacheSources(cms []solver.CacheManager) LoadOpt { } } -func RuntimePlatforms(p []specs.Platform) LoadOpt { +func NormalizeRuntimePlatforms() LoadOpt { var defaultPlatform *pb.Platform - pp := make([]specs.Platform, len(p)) - for i := range p { - pp[i] = platforms.Normalize(p[i]) - } return func(op *pb.Op, _ *pb.OpMetadata, opt *solver.VertexOptions) error { if op.Platform == nil { if defaultPlatform == nil { @@ -96,22 +91,6 @@ func RuntimePlatforms(p []specs.Platform) LoadOpt { Variant: normalizedPlatform.Variant, } - if _, ok := op.Op.(*pb.Op_Exec); ok { - var found bool - for _, pp := range pp { - if pp.OS == op.Platform.OS && pp.Architecture == op.Platform.Architecture && pp.Variant == op.Platform.Variant { - found = true - break - } - } - if !found { - if !binfmt_misc.Check(normalizedPlatform) { - return errors.Errorf("runtime execution on platform %s not supported", platforms.Format(specs.Platform{OS: op.Platform.OS, Architecture: op.Platform.Architecture, Variant: op.Platform.Variant})) - } else { - pp = append(pp, normalizedPlatform) - } - } - } return nil } } diff --git a/vendor/github.com/moby/buildkit/solver/pb/ops.pb.go b/vendor/github.com/moby/buildkit/solver/pb/ops.pb.go index bfab9e37bb..90e549d833 100644 --- a/vendor/github.com/moby/buildkit/solver/pb/ops.pb.go +++ b/vendor/github.com/moby/buildkit/solver/pb/ops.pb.go @@ -1054,6 +1054,300 @@ func (m *OpMetadata) GetCaps() map[github_com_moby_buildkit_util_apicaps.CapID]b return nil } +// Source is a source mapping description for a file +type Source struct { + Locations map[string]*Locations `protobuf:"bytes,1,rep,name=locations,proto3" json:"locations,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"` + Infos []*SourceInfo `protobuf:"bytes,2,rep,name=infos,proto3" json:"infos,omitempty"` +} + +func (m *Source) Reset() { *m = Source{} } +func (m *Source) String() string { return proto.CompactTextString(m) } +func (*Source) ProtoMessage() {} +func (*Source) Descriptor() ([]byte, []int) { + return fileDescriptor_8de16154b2733812, []int{13} +} +func (m *Source) XXX_Unmarshal(b []byte) error { + return m.Unmarshal(b) +} +func (m *Source) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + b = b[:cap(b)] + n, err := m.MarshalToSizedBuffer(b) + if err != nil { + return nil, err + } + return b[:n], nil +} +func (m *Source) XXX_Merge(src proto.Message) { + xxx_messageInfo_Source.Merge(m, src) +} +func (m *Source) XXX_Size() int { + return m.Size() +} +func (m *Source) XXX_DiscardUnknown() { + xxx_messageInfo_Source.DiscardUnknown(m) +} + +var xxx_messageInfo_Source proto.InternalMessageInfo + +func (m *Source) GetLocations() map[string]*Locations { + if m != nil { + return m.Locations + } + return nil +} + +func (m *Source) GetInfos() []*SourceInfo { + if m != nil { + return m.Infos + } + return nil +} + +// Locations is a list of ranges with a index to its source map. +type Locations struct { + Locations []*Location `protobuf:"bytes,1,rep,name=locations,proto3" json:"locations,omitempty"` +} + +func (m *Locations) Reset() { *m = Locations{} } +func (m *Locations) String() string { return proto.CompactTextString(m) } +func (*Locations) ProtoMessage() {} +func (*Locations) Descriptor() ([]byte, []int) { + return fileDescriptor_8de16154b2733812, []int{14} +} +func (m *Locations) XXX_Unmarshal(b []byte) error { + return m.Unmarshal(b) +} +func (m *Locations) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + b = b[:cap(b)] + n, err := m.MarshalToSizedBuffer(b) + if err != nil { + return nil, err + } + return b[:n], nil +} +func (m *Locations) XXX_Merge(src proto.Message) { + xxx_messageInfo_Locations.Merge(m, src) +} +func (m *Locations) XXX_Size() int { + return m.Size() +} +func (m *Locations) XXX_DiscardUnknown() { + xxx_messageInfo_Locations.DiscardUnknown(m) +} + +var xxx_messageInfo_Locations proto.InternalMessageInfo + +func (m *Locations) GetLocations() []*Location { + if m != nil { + return m.Locations + } + return nil +} + +// Source info contains the shared metadata of a source mapping +type SourceInfo struct { + Filename string `protobuf:"bytes,1,opt,name=filename,proto3" json:"filename,omitempty"` + Data []byte `protobuf:"bytes,2,opt,name=data,proto3" json:"data,omitempty"` + Definition *Definition `protobuf:"bytes,3,opt,name=definition,proto3" json:"definition,omitempty"` +} + +func (m *SourceInfo) Reset() { *m = SourceInfo{} } +func (m *SourceInfo) String() string { return proto.CompactTextString(m) } +func (*SourceInfo) ProtoMessage() {} +func (*SourceInfo) Descriptor() ([]byte, []int) { + return fileDescriptor_8de16154b2733812, []int{15} +} +func (m *SourceInfo) XXX_Unmarshal(b []byte) error { + return m.Unmarshal(b) +} +func (m *SourceInfo) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + b = b[:cap(b)] + n, err := m.MarshalToSizedBuffer(b) + if err != nil { + return nil, err + } + return b[:n], nil +} +func (m *SourceInfo) XXX_Merge(src proto.Message) { + xxx_messageInfo_SourceInfo.Merge(m, src) +} +func (m *SourceInfo) XXX_Size() int { + return m.Size() +} +func (m *SourceInfo) XXX_DiscardUnknown() { + xxx_messageInfo_SourceInfo.DiscardUnknown(m) +} + +var xxx_messageInfo_SourceInfo proto.InternalMessageInfo + +func (m *SourceInfo) GetFilename() string { + if m != nil { + return m.Filename + } + return "" +} + +func (m *SourceInfo) GetData() []byte { + if m != nil { + return m.Data + } + return nil +} + +func (m *SourceInfo) GetDefinition() *Definition { + if m != nil { + return m.Definition + } + return nil +} + +// Location defines list of areas in to source file +type Location struct { + SourceIndex int32 `protobuf:"varint,1,opt,name=sourceIndex,proto3" json:"sourceIndex,omitempty"` + Ranges []*Range `protobuf:"bytes,2,rep,name=ranges,proto3" json:"ranges,omitempty"` +} + +func (m *Location) Reset() { *m = Location{} } +func (m *Location) String() string { return proto.CompactTextString(m) } +func (*Location) ProtoMessage() {} +func (*Location) Descriptor() ([]byte, []int) { + return fileDescriptor_8de16154b2733812, []int{16} +} +func (m *Location) XXX_Unmarshal(b []byte) error { + return m.Unmarshal(b) +} +func (m *Location) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + b = b[:cap(b)] + n, err := m.MarshalToSizedBuffer(b) + if err != nil { + return nil, err + } + return b[:n], nil +} +func (m *Location) XXX_Merge(src proto.Message) { + xxx_messageInfo_Location.Merge(m, src) +} +func (m *Location) XXX_Size() int { + return m.Size() +} +func (m *Location) XXX_DiscardUnknown() { + xxx_messageInfo_Location.DiscardUnknown(m) +} + +var xxx_messageInfo_Location proto.InternalMessageInfo + +func (m *Location) GetSourceIndex() int32 { + if m != nil { + return m.SourceIndex + } + return 0 +} + +func (m *Location) GetRanges() []*Range { + if m != nil { + return m.Ranges + } + return nil +} + +// Range is an area in the source file +type Range struct { + Start Position `protobuf:"bytes,1,opt,name=start,proto3" json:"start"` + End Position `protobuf:"bytes,2,opt,name=end,proto3" json:"end"` +} + +func (m *Range) Reset() { *m = Range{} } +func (m *Range) String() string { return proto.CompactTextString(m) } +func (*Range) ProtoMessage() {} +func (*Range) Descriptor() ([]byte, []int) { + return fileDescriptor_8de16154b2733812, []int{17} +} +func (m *Range) XXX_Unmarshal(b []byte) error { + return m.Unmarshal(b) +} +func (m *Range) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + b = b[:cap(b)] + n, err := m.MarshalToSizedBuffer(b) + if err != nil { + return nil, err + } + return b[:n], nil +} +func (m *Range) XXX_Merge(src proto.Message) { + xxx_messageInfo_Range.Merge(m, src) +} +func (m *Range) XXX_Size() int { + return m.Size() +} +func (m *Range) XXX_DiscardUnknown() { + xxx_messageInfo_Range.DiscardUnknown(m) +} + +var xxx_messageInfo_Range proto.InternalMessageInfo + +func (m *Range) GetStart() Position { + if m != nil { + return m.Start + } + return Position{} +} + +func (m *Range) GetEnd() Position { + if m != nil { + return m.End + } + return Position{} +} + +// Position is single location in a source file +type Position struct { + Line int32 `protobuf:"varint,1,opt,name=Line,proto3" json:"Line,omitempty"` + Character int32 `protobuf:"varint,2,opt,name=Character,proto3" json:"Character,omitempty"` +} + +func (m *Position) Reset() { *m = Position{} } +func (m *Position) String() string { return proto.CompactTextString(m) } +func (*Position) ProtoMessage() {} +func (*Position) Descriptor() ([]byte, []int) { + return fileDescriptor_8de16154b2733812, []int{18} +} +func (m *Position) XXX_Unmarshal(b []byte) error { + return m.Unmarshal(b) +} +func (m *Position) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + b = b[:cap(b)] + n, err := m.MarshalToSizedBuffer(b) + if err != nil { + return nil, err + } + return b[:n], nil +} +func (m *Position) XXX_Merge(src proto.Message) { + xxx_messageInfo_Position.Merge(m, src) +} +func (m *Position) XXX_Size() int { + return m.Size() +} +func (m *Position) XXX_DiscardUnknown() { + xxx_messageInfo_Position.DiscardUnknown(m) +} + +var xxx_messageInfo_Position proto.InternalMessageInfo + +func (m *Position) GetLine() int32 { + if m != nil { + return m.Line + } + return 0 +} + +func (m *Position) GetCharacter() int32 { + if m != nil { + return m.Character + } + return 0 +} + type ExportCache struct { Value bool `protobuf:"varint,1,opt,name=Value,proto3" json:"Value,omitempty"` } @@ -1062,7 +1356,7 @@ func (m *ExportCache) Reset() { *m = ExportCache{} } func (m *ExportCache) String() string { return proto.CompactTextString(m) } func (*ExportCache) ProtoMessage() {} func (*ExportCache) Descriptor() ([]byte, []int) { - return fileDescriptor_8de16154b2733812, []int{13} + return fileDescriptor_8de16154b2733812, []int{19} } func (m *ExportCache) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) @@ -1105,7 +1399,7 @@ func (m *ProxyEnv) Reset() { *m = ProxyEnv{} } func (m *ProxyEnv) String() string { return proto.CompactTextString(m) } func (*ProxyEnv) ProtoMessage() {} func (*ProxyEnv) Descriptor() ([]byte, []int) { - return fileDescriptor_8de16154b2733812, []int{14} + return fileDescriptor_8de16154b2733812, []int{20} } func (m *ProxyEnv) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) @@ -1167,7 +1461,7 @@ func (m *WorkerConstraints) Reset() { *m = WorkerConstraints{} } func (m *WorkerConstraints) String() string { return proto.CompactTextString(m) } func (*WorkerConstraints) ProtoMessage() {} func (*WorkerConstraints) Descriptor() ([]byte, []int) { - return fileDescriptor_8de16154b2733812, []int{15} + return fileDescriptor_8de16154b2733812, []int{21} } func (m *WorkerConstraints) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) @@ -1206,13 +1500,15 @@ type Definition struct { // metadata contains metadata for the each of the Op messages. // A key must be an LLB op digest string. Currently, empty string is not expected as a key, but it may change in the future. Metadata map[github_com_opencontainers_go_digest.Digest]OpMetadata `protobuf:"bytes,2,rep,name=metadata,proto3,castkey=github.com/opencontainers/go-digest.Digest" json:"metadata" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"` + // Source contains the source mapping information for the vertexes in the definition + Source *Source `protobuf:"bytes,3,opt,name=Source,proto3" json:"Source,omitempty"` } func (m *Definition) Reset() { *m = Definition{} } func (m *Definition) String() string { return proto.CompactTextString(m) } func (*Definition) ProtoMessage() {} func (*Definition) Descriptor() ([]byte, []int) { - return fileDescriptor_8de16154b2733812, []int{16} + return fileDescriptor_8de16154b2733812, []int{22} } func (m *Definition) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) @@ -1251,6 +1547,13 @@ func (m *Definition) GetMetadata() map[github_com_opencontainers_go_digest.Diges return nil } +func (m *Definition) GetSource() *Source { + if m != nil { + return m.Source + } + return nil +} + type HostIP struct { Host string `protobuf:"bytes,1,opt,name=Host,proto3" json:"Host,omitempty"` IP string `protobuf:"bytes,2,opt,name=IP,proto3" json:"IP,omitempty"` @@ -1260,7 +1563,7 @@ func (m *HostIP) Reset() { *m = HostIP{} } func (m *HostIP) String() string { return proto.CompactTextString(m) } func (*HostIP) ProtoMessage() {} func (*HostIP) Descriptor() ([]byte, []int) { - return fileDescriptor_8de16154b2733812, []int{17} + return fileDescriptor_8de16154b2733812, []int{23} } func (m *HostIP) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) @@ -1307,7 +1610,7 @@ func (m *FileOp) Reset() { *m = FileOp{} } func (m *FileOp) String() string { return proto.CompactTextString(m) } func (*FileOp) ProtoMessage() {} func (*FileOp) Descriptor() ([]byte, []int) { - return fileDescriptor_8de16154b2733812, []int{18} + return fileDescriptor_8de16154b2733812, []int{24} } func (m *FileOp) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) @@ -1355,7 +1658,7 @@ func (m *FileAction) Reset() { *m = FileAction{} } func (m *FileAction) String() string { return proto.CompactTextString(m) } func (*FileAction) ProtoMessage() {} func (*FileAction) Descriptor() ([]byte, []int) { - return fileDescriptor_8de16154b2733812, []int{19} + return fileDescriptor_8de16154b2733812, []int{25} } func (m *FileAction) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) @@ -1478,7 +1781,7 @@ func (m *FileActionCopy) Reset() { *m = FileActionCopy{} } func (m *FileActionCopy) String() string { return proto.CompactTextString(m) } func (*FileActionCopy) ProtoMessage() {} func (*FileActionCopy) Descriptor() ([]byte, []int) { - return fileDescriptor_8de16154b2733812, []int{20} + return fileDescriptor_8de16154b2733812, []int{26} } func (m *FileActionCopy) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) @@ -1597,7 +1900,7 @@ func (m *FileActionMkFile) Reset() { *m = FileActionMkFile{} } func (m *FileActionMkFile) String() string { return proto.CompactTextString(m) } func (*FileActionMkFile) ProtoMessage() {} func (*FileActionMkFile) Descriptor() ([]byte, []int) { - return fileDescriptor_8de16154b2733812, []int{21} + return fileDescriptor_8de16154b2733812, []int{27} } func (m *FileActionMkFile) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) @@ -1674,7 +1977,7 @@ func (m *FileActionMkDir) Reset() { *m = FileActionMkDir{} } func (m *FileActionMkDir) String() string { return proto.CompactTextString(m) } func (*FileActionMkDir) ProtoMessage() {} func (*FileActionMkDir) Descriptor() ([]byte, []int) { - return fileDescriptor_8de16154b2733812, []int{22} + return fileDescriptor_8de16154b2733812, []int{28} } func (m *FileActionMkDir) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) @@ -1747,7 +2050,7 @@ func (m *FileActionRm) Reset() { *m = FileActionRm{} } func (m *FileActionRm) String() string { return proto.CompactTextString(m) } func (*FileActionRm) ProtoMessage() {} func (*FileActionRm) Descriptor() ([]byte, []int) { - return fileDescriptor_8de16154b2733812, []int{23} + return fileDescriptor_8de16154b2733812, []int{29} } func (m *FileActionRm) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) @@ -1802,7 +2105,7 @@ func (m *ChownOpt) Reset() { *m = ChownOpt{} } func (m *ChownOpt) String() string { return proto.CompactTextString(m) } func (*ChownOpt) ProtoMessage() {} func (*ChownOpt) Descriptor() ([]byte, []int) { - return fileDescriptor_8de16154b2733812, []int{24} + return fileDescriptor_8de16154b2733812, []int{30} } func (m *ChownOpt) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) @@ -1852,7 +2155,7 @@ func (m *UserOpt) Reset() { *m = UserOpt{} } func (m *UserOpt) String() string { return proto.CompactTextString(m) } func (*UserOpt) ProtoMessage() {} func (*UserOpt) Descriptor() ([]byte, []int) { - return fileDescriptor_8de16154b2733812, []int{25} + return fileDescriptor_8de16154b2733812, []int{31} } func (m *UserOpt) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) @@ -1931,7 +2234,7 @@ func (m *NamedUserOpt) Reset() { *m = NamedUserOpt{} } func (m *NamedUserOpt) String() string { return proto.CompactTextString(m) } func (*NamedUserOpt) ProtoMessage() {} func (*NamedUserOpt) Descriptor() ([]byte, []int) { - return fileDescriptor_8de16154b2733812, []int{26} + return fileDescriptor_8de16154b2733812, []int{32} } func (m *NamedUserOpt) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) @@ -1986,6 +2289,13 @@ func init() { proto.RegisterType((*OpMetadata)(nil), "pb.OpMetadata") proto.RegisterMapType((map[github_com_moby_buildkit_util_apicaps.CapID]bool)(nil), "pb.OpMetadata.CapsEntry") proto.RegisterMapType((map[string]string)(nil), "pb.OpMetadata.DescriptionEntry") + proto.RegisterType((*Source)(nil), "pb.Source") + proto.RegisterMapType((map[string]*Locations)(nil), "pb.Source.LocationsEntry") + proto.RegisterType((*Locations)(nil), "pb.Locations") + proto.RegisterType((*SourceInfo)(nil), "pb.SourceInfo") + proto.RegisterType((*Location)(nil), "pb.Location") + proto.RegisterType((*Range)(nil), "pb.Range") + proto.RegisterType((*Position)(nil), "pb.Position") proto.RegisterType((*ExportCache)(nil), "pb.ExportCache") proto.RegisterType((*ProxyEnv)(nil), "pb.ProxyEnv") proto.RegisterType((*WorkerConstraints)(nil), "pb.WorkerConstraints") @@ -2006,131 +2316,144 @@ func init() { func init() { proto.RegisterFile("ops.proto", fileDescriptor_8de16154b2733812) } var fileDescriptor_8de16154b2733812 = []byte{ - // 1978 bytes of a gzipped FileDescriptorProto - 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xbc, 0x58, 0x5f, 0x6f, 0x1b, 0xc7, - 0x11, 0xd7, 0x1d, 0xff, 0xde, 0x50, 0x92, 0xd9, 0x8d, 0x93, 0xb2, 0xaa, 0x2b, 0x29, 0x97, 0x34, - 0x90, 0x65, 0x5b, 0x02, 0x14, 0x20, 0x09, 0xf2, 0x50, 0x54, 0xfc, 0x63, 0x88, 0x49, 0x2c, 0x0a, - 0x4b, 0xdb, 0xe9, 0x9b, 0x71, 0xbc, 0x5b, 0x52, 0x07, 0xf2, 0x6e, 0x0f, 0x7b, 0x4b, 0x5b, 0x7c, - 0xe9, 0x83, 0x3f, 0x41, 0x80, 0x02, 0x7d, 0x6b, 0x81, 0xbe, 0x14, 0xe8, 0x87, 0xe8, 0x7b, 0x1e, - 0x83, 0xa2, 0x0f, 0x69, 0x1f, 0xd2, 0xc2, 0xfe, 0x22, 0xc5, 0xec, 0xee, 0xf1, 0x8e, 0xb4, 0x02, - 0xdb, 0x68, 0xd1, 0x27, 0xce, 0xcd, 0xfc, 0x76, 0x76, 0x76, 0x66, 0x76, 0x66, 0x96, 0xe0, 0xf0, - 0x24, 0x3d, 0x4a, 0x04, 0x97, 0x9c, 0xd8, 0xc9, 0x68, 0xe7, 0xde, 0x24, 0x94, 0x97, 0xf3, 0xd1, - 0x91, 0xcf, 0xa3, 0xe3, 0x09, 0x9f, 0xf0, 0x63, 0x25, 0x1a, 0xcd, 0xc7, 0xea, 0x4b, 0x7d, 0x28, - 0x4a, 0x2f, 0x71, 0xff, 0x64, 0x83, 0x3d, 0x48, 0xc8, 0xfb, 0x50, 0x0d, 0xe3, 0x64, 0x2e, 0xd3, - 0x96, 0xb5, 0x5f, 0x3a, 0x68, 0x9c, 0x38, 0x47, 0xc9, 0xe8, 0xa8, 0x8f, 0x1c, 0x6a, 0x04, 0x64, - 0x1f, 0xca, 0xec, 0x8a, 0xf9, 0x2d, 0x7b, 0xdf, 0x3a, 0x68, 0x9c, 0x00, 0x02, 0x7a, 0x57, 0xcc, - 0x1f, 0x24, 0x67, 0x1b, 0x54, 0x49, 0xc8, 0x47, 0x50, 0x4d, 0xf9, 0x5c, 0xf8, 0xac, 0x55, 0x52, - 0x98, 0x4d, 0xc4, 0x0c, 0x15, 0x47, 0xa1, 0x8c, 0x14, 0x35, 0x8d, 0xc3, 0x19, 0x6b, 0x95, 0x73, - 0x4d, 0xf7, 0xc3, 0x99, 0xc6, 0x28, 0x09, 0xf9, 0x00, 0x2a, 0xa3, 0x79, 0x38, 0x0b, 0x5a, 0x15, - 0x05, 0x69, 0x20, 0xa4, 0x8d, 0x0c, 0x85, 0xd1, 0x32, 0x72, 0x00, 0xf5, 0x64, 0xe6, 0xc9, 0x31, - 0x17, 0x51, 0x0b, 0xf2, 0x0d, 0x2f, 0x0c, 0x8f, 0x2e, 0xa5, 0xe4, 0x53, 0x68, 0xf8, 0x3c, 0x4e, - 0xa5, 0xf0, 0xc2, 0x58, 0xa6, 0xad, 0x86, 0x02, 0xbf, 0x8b, 0xe0, 0xaf, 0xb9, 0x98, 0x32, 0xd1, - 0xc9, 0x85, 0xb4, 0x88, 0x6c, 0x97, 0xc1, 0xe6, 0x89, 0xfb, 0x7b, 0x0b, 0xea, 0x99, 0x56, 0xe2, - 0xc2, 0xe6, 0xa9, 0xf0, 0x2f, 0x43, 0xc9, 0x7c, 0x39, 0x17, 0xac, 0x65, 0xed, 0x5b, 0x07, 0x0e, - 0x5d, 0xe1, 0x91, 0x6d, 0xb0, 0x07, 0x43, 0xe5, 0x28, 0x87, 0xda, 0x83, 0x21, 0x69, 0x41, 0xed, - 0xb1, 0x27, 0x42, 0x2f, 0x96, 0xca, 0x33, 0x0e, 0xcd, 0x3e, 0xc9, 0x2d, 0x70, 0x06, 0xc3, 0xc7, - 0x4c, 0xa4, 0x21, 0x8f, 0x95, 0x3f, 0x1c, 0x9a, 0x33, 0xc8, 0x2e, 0xc0, 0x60, 0x78, 0x9f, 0x79, - 0xa8, 0x34, 0x6d, 0x55, 0xf6, 0x4b, 0x07, 0x0e, 0x2d, 0x70, 0xdc, 0xdf, 0x42, 0x45, 0xc5, 0x88, - 0x7c, 0x01, 0xd5, 0x20, 0x9c, 0xb0, 0x54, 0x6a, 0x73, 0xda, 0x27, 0xdf, 0xfe, 0xb0, 0xb7, 0xf1, - 0xcf, 0x1f, 0xf6, 0x0e, 0x0b, 0xc9, 0xc0, 0x13, 0x16, 0xfb, 0x3c, 0x96, 0x5e, 0x18, 0x33, 0x91, - 0x1e, 0x4f, 0xf8, 0x3d, 0xbd, 0xe4, 0xa8, 0xab, 0x7e, 0xa8, 0xd1, 0x40, 0x6e, 0x43, 0x25, 0x8c, - 0x03, 0x76, 0xa5, 0xec, 0x2f, 0xb5, 0xdf, 0x31, 0xaa, 0x1a, 0x83, 0xb9, 0x4c, 0xe6, 0xb2, 0x8f, - 0x22, 0xaa, 0x11, 0xee, 0x1f, 0x2d, 0xa8, 0xea, 0x1c, 0x20, 0xb7, 0xa0, 0x1c, 0x31, 0xe9, 0xa9, - 0xfd, 0x1b, 0x27, 0x75, 0xf4, 0xed, 0x03, 0x26, 0x3d, 0xaa, 0xb8, 0x98, 0x5e, 0x11, 0x9f, 0xa3, - 0xef, 0xed, 0x3c, 0xbd, 0x1e, 0x20, 0x87, 0x1a, 0x01, 0xf9, 0x25, 0xd4, 0x62, 0x26, 0x9f, 0x71, - 0x31, 0x55, 0x3e, 0xda, 0xd6, 0x41, 0x3f, 0x67, 0xf2, 0x01, 0x0f, 0x18, 0xcd, 0x64, 0xe4, 0x2e, - 0xd4, 0x53, 0xe6, 0xcf, 0x45, 0x28, 0x17, 0xca, 0x5f, 0xdb, 0x27, 0x4d, 0x95, 0x65, 0x86, 0xa7, - 0xc0, 0x4b, 0x84, 0xfb, 0x17, 0x0b, 0xca, 0x68, 0x06, 0x21, 0x50, 0xf6, 0xc4, 0x44, 0x67, 0xb7, - 0x43, 0x15, 0x4d, 0x9a, 0x50, 0x62, 0xf1, 0x53, 0x65, 0x91, 0x43, 0x91, 0x44, 0x8e, 0xff, 0x2c, - 0x30, 0x31, 0x42, 0x12, 0xd7, 0xcd, 0x53, 0x26, 0x4c, 0x68, 0x14, 0x4d, 0x6e, 0x83, 0x93, 0x08, - 0x7e, 0xb5, 0x78, 0x82, 0xab, 0x2b, 0x85, 0xc4, 0x43, 0x66, 0x2f, 0x7e, 0x4a, 0xeb, 0x89, 0xa1, - 0xc8, 0x21, 0x00, 0xbb, 0x92, 0xc2, 0x3b, 0xe3, 0xa9, 0x4c, 0x5b, 0x55, 0x75, 0x76, 0x95, 0xef, - 0xc8, 0xe8, 0x5f, 0xd0, 0x82, 0xd4, 0xfd, 0x9b, 0x0d, 0x15, 0xe5, 0x12, 0x72, 0x80, 0x11, 0x48, - 0xe6, 0x3a, 0x98, 0xa5, 0x36, 0x31, 0x11, 0x00, 0x15, 0xeb, 0x65, 0x00, 0x30, 0xee, 0x3b, 0xe8, - 0x8d, 0x19, 0xf3, 0x25, 0x17, 0x26, 0xdd, 0x96, 0xdf, 0x68, 0x7a, 0x80, 0x19, 0xa1, 0x4f, 0xa3, - 0x68, 0x72, 0x07, 0xaa, 0x5c, 0x85, 0x51, 0x1d, 0xe8, 0x47, 0x82, 0x6b, 0x20, 0xa8, 0x5c, 0x30, - 0x2f, 0xe0, 0xf1, 0x6c, 0xa1, 0x8e, 0x59, 0xa7, 0xcb, 0x6f, 0x72, 0x07, 0x1c, 0x15, 0xb7, 0x87, - 0x8b, 0x84, 0xb5, 0xaa, 0x2a, 0x0e, 0x5b, 0xcb, 0x98, 0x22, 0x93, 0xe6, 0x72, 0xbc, 0xa8, 0xbe, - 0xe7, 0x5f, 0xb2, 0x41, 0x22, 0x5b, 0x37, 0x73, 0x7f, 0x75, 0x0c, 0x8f, 0x2e, 0xa5, 0xa8, 0x36, - 0x65, 0xbe, 0x60, 0x12, 0xa1, 0xef, 0x2a, 0xe8, 0x96, 0x09, 0xaf, 0x66, 0xd2, 0x5c, 0x4e, 0x5c, - 0xa8, 0x0e, 0x87, 0x67, 0x88, 0x7c, 0x2f, 0x2f, 0x24, 0x9a, 0x43, 0x8d, 0xc4, 0xed, 0x43, 0x3d, - 0xdb, 0x06, 0x6f, 0x65, 0xbf, 0x6b, 0xee, 0xab, 0xdd, 0xef, 0x92, 0x7b, 0x50, 0x4b, 0x2f, 0x3d, - 0x11, 0xc6, 0x13, 0xe5, 0xbb, 0xed, 0x93, 0x77, 0x96, 0x56, 0x0d, 0x35, 0x1f, 0x35, 0x65, 0x18, - 0x97, 0x83, 0xb3, 0x34, 0xe3, 0x15, 0x5d, 0x4d, 0x28, 0xcd, 0xc3, 0x40, 0xe9, 0xd9, 0xa2, 0x48, - 0x22, 0x67, 0x12, 0xea, 0x5c, 0xda, 0xa2, 0x48, 0x62, 0x40, 0x22, 0x1e, 0xe8, 0xb2, 0xb7, 0x45, - 0x15, 0x8d, 0x3e, 0xe6, 0x89, 0x0c, 0x79, 0xec, 0xcd, 0x32, 0x1f, 0x67, 0xdf, 0xee, 0x2c, 0x3b, - 0xdf, 0xff, 0x65, 0xb7, 0xdf, 0x59, 0x50, 0xcf, 0x6a, 0x35, 0x16, 0x9e, 0x30, 0x60, 0xb1, 0x0c, - 0xc7, 0x21, 0x13, 0x66, 0xe3, 0x02, 0x87, 0xdc, 0x83, 0x8a, 0x27, 0xa5, 0xc8, 0xae, 0xf3, 0x4f, - 0x8b, 0x85, 0xfe, 0xe8, 0x14, 0x25, 0xbd, 0x58, 0x8a, 0x05, 0xd5, 0xa8, 0x9d, 0xcf, 0x00, 0x72, - 0x26, 0xda, 0x3a, 0x65, 0x0b, 0xa3, 0x15, 0x49, 0x72, 0x13, 0x2a, 0x4f, 0xbd, 0xd9, 0x9c, 0x99, - 0x1c, 0xd6, 0x1f, 0x9f, 0xdb, 0x9f, 0x59, 0xee, 0x5f, 0x6d, 0xa8, 0x99, 0xc2, 0x4f, 0xee, 0x42, - 0x4d, 0x15, 0x7e, 0x63, 0xd1, 0xf5, 0x17, 0x23, 0x83, 0x90, 0xe3, 0x65, 0x47, 0x2b, 0xd8, 0x68, - 0x54, 0xe9, 0xce, 0x66, 0x6c, 0xcc, 0xfb, 0x5b, 0x29, 0x60, 0x63, 0xd3, 0xba, 0xb6, 0x11, 0xdd, - 0x65, 0xe3, 0x30, 0x0e, 0xd1, 0x3f, 0x14, 0x45, 0xe4, 0x6e, 0x76, 0xea, 0xb2, 0xd2, 0xf8, 0x5e, - 0x51, 0xe3, 0xab, 0x87, 0xee, 0x43, 0xa3, 0xb0, 0xcd, 0x35, 0xa7, 0xfe, 0xb0, 0x78, 0x6a, 0xb3, - 0xa5, 0x52, 0xa7, 0xfb, 0x6e, 0xee, 0x85, 0xff, 0xc2, 0x7f, 0x9f, 0x00, 0xe4, 0x2a, 0xdf, 0xbc, - 0xb0, 0xb8, 0xcf, 0x4b, 0x00, 0x83, 0x04, 0x4b, 0x67, 0xe0, 0xa9, 0xfa, 0xbd, 0x19, 0x4e, 0x62, - 0x2e, 0xd8, 0x13, 0x75, 0x55, 0xd5, 0xfa, 0x3a, 0x6d, 0x68, 0x9e, 0xba, 0x31, 0xe4, 0x14, 0x1a, - 0x01, 0x4b, 0x7d, 0x11, 0xaa, 0x84, 0x32, 0x4e, 0xdf, 0xc3, 0x33, 0xe5, 0x7a, 0x8e, 0xba, 0x39, - 0x42, 0xfb, 0xaa, 0xb8, 0x86, 0x9c, 0xc0, 0x26, 0xbb, 0x4a, 0xb8, 0x90, 0x66, 0x17, 0x3d, 0x1f, - 0xdc, 0xd0, 0x93, 0x06, 0xf2, 0xd5, 0x4e, 0xb4, 0xc1, 0xf2, 0x0f, 0xe2, 0x41, 0xd9, 0xf7, 0x12, - 0xdd, 0x1c, 0x1b, 0x27, 0xad, 0xb5, 0xfd, 0x3a, 0x5e, 0xa2, 0x9d, 0xd6, 0xfe, 0x18, 0xcf, 0xfa, - 0xfc, 0x5f, 0x7b, 0x77, 0x0a, 0x1d, 0x31, 0xe2, 0xa3, 0xc5, 0xb1, 0xca, 0x97, 0x69, 0x28, 0x8f, - 0xe7, 0x32, 0x9c, 0x1d, 0x7b, 0x49, 0x88, 0xea, 0x70, 0x61, 0xbf, 0x4b, 0x95, 0xea, 0x9d, 0x5f, - 0x41, 0x73, 0xdd, 0xee, 0xb7, 0x89, 0xc1, 0xce, 0xa7, 0xe0, 0x2c, 0xed, 0x78, 0xdd, 0xc2, 0x7a, - 0x31, 0x78, 0x1f, 0x40, 0xa3, 0x70, 0x6e, 0x04, 0x3e, 0x56, 0x40, 0xed, 0x7d, 0xfd, 0xe1, 0x3e, - 0xc7, 0xe1, 0x24, 0xeb, 0x37, 0xbf, 0x00, 0xb8, 0x94, 0x32, 0x79, 0xa2, 0x1a, 0x90, 0xd9, 0xc4, - 0x41, 0x8e, 0x42, 0x90, 0x3d, 0x68, 0xe0, 0x47, 0x6a, 0xe4, 0xda, 0x52, 0xb5, 0x22, 0xd5, 0x80, - 0x9f, 0x83, 0x33, 0x5e, 0x2e, 0xd7, 0x8d, 0xa3, 0x3e, 0xce, 0x56, 0xff, 0x0c, 0xea, 0x31, 0x37, - 0x32, 0xdd, 0x0f, 0x6b, 0x31, 0x57, 0x22, 0xf7, 0x0e, 0xfc, 0xe4, 0x95, 0x49, 0x8a, 0xbc, 0x07, - 0xd5, 0x71, 0x38, 0x93, 0xea, 0xba, 0x62, 0x8b, 0x35, 0x5f, 0xee, 0x3f, 0x2c, 0x80, 0xfc, 0x6a, - 0xa1, 0x47, 0xf0, 0xde, 0x21, 0x66, 0x53, 0xdf, 0xb3, 0x19, 0xd4, 0x23, 0x13, 0x41, 0x93, 0x47, - 0xb7, 0x56, 0xaf, 0xe3, 0x51, 0x16, 0x60, 0x1d, 0xdb, 0x13, 0x13, 0xdb, 0xb7, 0x99, 0x76, 0x96, - 0x3b, 0xec, 0x7c, 0x09, 0x5b, 0x2b, 0xea, 0xde, 0xf0, 0xa6, 0xe6, 0x59, 0x56, 0x0c, 0xd9, 0x5d, - 0xa8, 0xea, 0xd6, 0x8e, 0xf5, 0x17, 0x29, 0xa3, 0x46, 0xd1, 0xaa, 0x8e, 0x5f, 0x64, 0x73, 0x61, - 0xff, 0xc2, 0x3d, 0x81, 0xaa, 0x1e, 0x7c, 0xc9, 0x01, 0xd4, 0x3c, 0x1f, 0x8f, 0x96, 0x95, 0xab, - 0xed, 0x6c, 0x2a, 0x3e, 0x55, 0x6c, 0x9a, 0x89, 0xdd, 0xbf, 0xdb, 0x00, 0x39, 0xff, 0x2d, 0x66, - 0x85, 0xcf, 0x61, 0x3b, 0x65, 0x3e, 0x8f, 0x03, 0x4f, 0x2c, 0x94, 0xd4, 0x0c, 0x78, 0xd7, 0x2d, - 0x59, 0x43, 0x16, 0xe6, 0x86, 0xd2, 0xeb, 0xe7, 0x86, 0x03, 0x28, 0xfb, 0x3c, 0x59, 0x98, 0xeb, - 0x4b, 0x56, 0x0f, 0xd2, 0xe1, 0xc9, 0x02, 0xc7, 0x7c, 0x44, 0x90, 0x23, 0xa8, 0x46, 0x53, 0xf5, - 0x14, 0xd0, 0x63, 0xd4, 0xcd, 0x55, 0xec, 0x83, 0x29, 0xd2, 0xf8, 0x70, 0xd0, 0x28, 0x72, 0x07, - 0x2a, 0xd1, 0x34, 0x08, 0x85, 0x9a, 0x38, 0x1a, 0xba, 0x5f, 0x17, 0xe1, 0xdd, 0x50, 0xe0, 0xf3, - 0x40, 0x61, 0x88, 0x0b, 0xb6, 0x88, 0x5a, 0x35, 0x85, 0x6c, 0xae, 0x79, 0x33, 0x3a, 0xdb, 0xa0, - 0xb6, 0x88, 0xda, 0x75, 0xa8, 0x6a, 0xbf, 0xba, 0x7f, 0x2e, 0xc1, 0xf6, 0xaa, 0x95, 0x98, 0x07, - 0xa9, 0xf0, 0xb3, 0x3c, 0x48, 0x85, 0xbf, 0x1c, 0xa9, 0xec, 0xc2, 0x48, 0xe5, 0x42, 0x85, 0x3f, - 0x8b, 0x99, 0x28, 0xbe, 0x79, 0x3a, 0x97, 0xfc, 0x59, 0x8c, 0xc3, 0x83, 0x16, 0xad, 0xf4, 0xe2, - 0x8a, 0xe9, 0xc5, 0x1f, 0xc2, 0xd6, 0x98, 0xcf, 0x66, 0xfc, 0xd9, 0x70, 0x11, 0xcd, 0xc2, 0x78, - 0x6a, 0x1a, 0xf2, 0x2a, 0x93, 0x1c, 0xc0, 0x8d, 0x20, 0x14, 0x68, 0x4e, 0x87, 0xc7, 0x92, 0xc5, - 0x6a, 0x8a, 0x44, 0xdc, 0x3a, 0x9b, 0x7c, 0x01, 0xfb, 0x9e, 0x94, 0x2c, 0x4a, 0xe4, 0xa3, 0x38, - 0xf1, 0xfc, 0x69, 0x97, 0xfb, 0xea, 0x3e, 0x46, 0x89, 0x27, 0xc3, 0x51, 0x38, 0xc3, 0x81, 0xb9, - 0xa6, 0x96, 0xbe, 0x16, 0x47, 0x3e, 0x82, 0x6d, 0x5f, 0x30, 0x4f, 0xb2, 0x2e, 0x4b, 0xe5, 0x85, - 0x27, 0x2f, 0x5b, 0x75, 0xb5, 0x72, 0x8d, 0x8b, 0x67, 0xf0, 0xd0, 0xda, 0xaf, 0xc3, 0x59, 0xe0, - 0x7b, 0x22, 0x68, 0x39, 0xfa, 0x0c, 0x2b, 0x4c, 0x72, 0x04, 0x44, 0x31, 0x7a, 0x51, 0x22, 0x17, - 0x4b, 0x28, 0x28, 0xe8, 0x35, 0x12, 0x7c, 0x13, 0xc9, 0x30, 0x62, 0xa9, 0xf4, 0xa2, 0x44, 0xbd, - 0xd5, 0x4a, 0x34, 0x67, 0xb8, 0xdf, 0x58, 0xd0, 0x5c, 0x4f, 0x11, 0x74, 0x70, 0x82, 0x66, 0x9a, - 0xcb, 0x86, 0xf4, 0xd2, 0xe9, 0x76, 0xc1, 0xe9, 0x18, 0x40, 0xac, 0x2a, 0x18, 0xab, 0x4d, 0xaa, - 0xe8, 0x3c, 0x80, 0xe5, 0x1f, 0x0f, 0xe0, 0x8a, 0x49, 0x95, 0x75, 0x93, 0xfe, 0x60, 0xc1, 0x8d, - 0xb5, 0x34, 0x7c, 0x63, 0x8b, 0xf6, 0xa1, 0x11, 0x79, 0x53, 0x76, 0xe1, 0x09, 0x15, 0xdc, 0x92, - 0x6e, 0xac, 0x05, 0xd6, 0xff, 0xc0, 0xbe, 0x18, 0x36, 0x8b, 0xb9, 0x7f, 0xad, 0x6d, 0x59, 0x28, - 0xcf, 0xb9, 0xbc, 0xcf, 0xe7, 0x71, 0x60, 0xba, 0xd1, 0x2a, 0xf3, 0xd5, 0x80, 0x97, 0xae, 0x09, - 0xb8, 0x7b, 0x0e, 0xf5, 0xcc, 0x40, 0xb2, 0x67, 0x1e, 0x50, 0x56, 0xfe, 0x90, 0x7f, 0x94, 0x32, - 0x81, 0xb6, 0xeb, 0xd7, 0xd4, 0xfb, 0x50, 0x99, 0x08, 0x3e, 0x4f, 0x4c, 0x6d, 0x5d, 0x41, 0x68, - 0x89, 0x3b, 0x84, 0x9a, 0xe1, 0x90, 0x43, 0xa8, 0x8e, 0x16, 0xe7, 0x5e, 0xc4, 0x8c, 0x42, 0x75, - 0xb1, 0xf1, 0x3b, 0x30, 0x08, 0xac, 0x16, 0x1a, 0x41, 0x6e, 0x42, 0x79, 0xb4, 0xe8, 0x77, 0xf5, - 0x98, 0x8c, 0x35, 0x07, 0xbf, 0xda, 0x55, 0x6d, 0x90, 0xfb, 0x15, 0x6c, 0x16, 0xd7, 0xa1, 0x53, - 0xe2, 0x4c, 0xaf, 0x43, 0x15, 0x9d, 0x17, 0x57, 0xfb, 0x35, 0xc5, 0xf5, 0xf0, 0x00, 0x6a, 0xe6, - 0xa9, 0x4a, 0x1c, 0xa8, 0x3c, 0x3a, 0x1f, 0xf6, 0x1e, 0x36, 0x37, 0x48, 0x1d, 0xca, 0x67, 0x83, - 0xe1, 0xc3, 0xa6, 0x85, 0xd4, 0xf9, 0xe0, 0xbc, 0xd7, 0xb4, 0x0f, 0x6f, 0xc3, 0x66, 0xf1, 0xb1, - 0x4a, 0x1a, 0x50, 0x1b, 0x9e, 0x9e, 0x77, 0xdb, 0x83, 0xdf, 0x34, 0x37, 0xc8, 0x26, 0xd4, 0xfb, - 0xe7, 0xc3, 0x5e, 0xe7, 0x11, 0xed, 0x35, 0xad, 0xc3, 0x5f, 0x83, 0xb3, 0x7c, 0x4f, 0xa1, 0x86, - 0x76, 0xff, 0xbc, 0xdb, 0xdc, 0x20, 0x00, 0xd5, 0x61, 0xaf, 0x43, 0x7b, 0xa8, 0xb7, 0x06, 0xa5, - 0xe1, 0xf0, 0xac, 0x69, 0xe3, 0xae, 0x9d, 0xd3, 0xce, 0x59, 0xaf, 0x59, 0x42, 0xf2, 0xe1, 0x83, - 0x8b, 0xfb, 0xc3, 0x66, 0xf9, 0xf0, 0x13, 0xb8, 0xb1, 0xf6, 0x9e, 0x51, 0xab, 0xcf, 0x4e, 0x69, - 0x0f, 0x35, 0x35, 0xa0, 0x76, 0x41, 0xfb, 0x8f, 0x4f, 0x1f, 0xf6, 0x9a, 0x16, 0x0a, 0xbe, 0x1a, - 0x74, 0xbe, 0xec, 0x75, 0x9b, 0x76, 0xfb, 0xd6, 0xb7, 0x2f, 0x76, 0xad, 0xef, 0x5e, 0xec, 0x5a, - 0xdf, 0xbf, 0xd8, 0xb5, 0xfe, 0xfd, 0x62, 0xd7, 0xfa, 0xe6, 0xe5, 0xee, 0xc6, 0x77, 0x2f, 0x77, - 0x37, 0xbe, 0x7f, 0xb9, 0xbb, 0x31, 0xaa, 0xaa, 0xbf, 0x8e, 0x3e, 0xfe, 0x4f, 0x00, 0x00, 0x00, - 0xff, 0xff, 0x87, 0x95, 0x80, 0x20, 0x7a, 0x12, 0x00, 0x00, + // 2189 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xbc, 0x58, 0x4b, 0x6f, 0x1b, 0xc9, + 0xf1, 0x17, 0xdf, 0x64, 0x51, 0x92, 0xf9, 0xef, 0xf5, 0xee, 0x9f, 0xab, 0x38, 0x92, 0x76, 0xec, + 0x2c, 0x64, 0xd9, 0xa6, 0x00, 0x2d, 0xb0, 0x5e, 0x2c, 0x82, 0x20, 0xe2, 0xc3, 0x10, 0xd7, 0xb6, + 0x28, 0x34, 0xfd, 0xc8, 0xcd, 0x18, 0x0d, 0x9b, 0xd4, 0x40, 0xe4, 0xf4, 0xa0, 0xa7, 0x69, 0x8b, + 0x97, 0x1c, 0xfc, 0x09, 0x16, 0x08, 0x90, 0x5b, 0x02, 0xe4, 0x12, 0x20, 0xf7, 0x5c, 0x73, 0xdf, + 0xe3, 0x22, 0xc8, 0x61, 0x91, 0xc3, 0x26, 0xb0, 0x3f, 0x47, 0x80, 0xa0, 0xaa, 0x7b, 0x1e, 0x94, + 0x65, 0xd8, 0x46, 0x82, 0x9c, 0xd8, 0xfd, 0xab, 0x5f, 0x57, 0x57, 0x57, 0x55, 0xd7, 0x54, 0x13, + 0x6a, 0x32, 0x8c, 0x5a, 0xa1, 0x92, 0x5a, 0xb2, 0x7c, 0x78, 0xb2, 0x71, 0x67, 0xe2, 0xeb, 0xd3, + 0xf9, 0x49, 0xcb, 0x93, 0xb3, 0xbd, 0x89, 0x9c, 0xc8, 0x3d, 0x12, 0x9d, 0xcc, 0xc7, 0x34, 0xa3, + 0x09, 0x8d, 0xcc, 0x12, 0xe7, 0x0f, 0x79, 0xc8, 0x0f, 0x42, 0xf6, 0x19, 0x94, 0xfd, 0x20, 0x9c, + 0xeb, 0xa8, 0x99, 0xdb, 0x2e, 0xec, 0xd4, 0xf7, 0x6b, 0xad, 0xf0, 0xa4, 0xd5, 0x47, 0x84, 0x5b, + 0x01, 0xdb, 0x86, 0xa2, 0x38, 0x17, 0x5e, 0x33, 0xbf, 0x9d, 0xdb, 0xa9, 0xef, 0x03, 0x12, 0x7a, + 0xe7, 0xc2, 0x1b, 0x84, 0x87, 0x2b, 0x9c, 0x24, 0xec, 0x73, 0x28, 0x47, 0x72, 0xae, 0x3c, 0xd1, + 0x2c, 0x10, 0x67, 0x15, 0x39, 0x43, 0x42, 0x88, 0x65, 0xa5, 0xa8, 0x69, 0xec, 0x4f, 0x45, 0xb3, + 0x98, 0x6a, 0xba, 0xe7, 0x4f, 0x0d, 0x87, 0x24, 0xec, 0x3a, 0x94, 0x4e, 0xe6, 0xfe, 0x74, 0xd4, + 0x2c, 0x11, 0xa5, 0x8e, 0x94, 0x36, 0x02, 0xc4, 0x31, 0x32, 0xb6, 0x03, 0xd5, 0x70, 0xea, 0xea, + 0xb1, 0x54, 0xb3, 0x26, 0xa4, 0x1b, 0x1e, 0x5b, 0x8c, 0x27, 0x52, 0x76, 0x17, 0xea, 0x9e, 0x0c, + 0x22, 0xad, 0x5c, 0x3f, 0xd0, 0x51, 0xb3, 0x4e, 0xe4, 0x8f, 0x91, 0xfc, 0x54, 0xaa, 0x33, 0xa1, + 0x3a, 0xa9, 0x90, 0x67, 0x99, 0xed, 0x22, 0xe4, 0x65, 0xe8, 0xfc, 0x36, 0x07, 0xd5, 0x58, 0x2b, + 0x73, 0x60, 0xf5, 0x40, 0x79, 0xa7, 0xbe, 0x16, 0x9e, 0x9e, 0x2b, 0xd1, 0xcc, 0x6d, 0xe7, 0x76, + 0x6a, 0x7c, 0x09, 0x63, 0xeb, 0x90, 0x1f, 0x0c, 0xc9, 0x51, 0x35, 0x9e, 0x1f, 0x0c, 0x59, 0x13, + 0x2a, 0x4f, 0x5c, 0xe5, 0xbb, 0x81, 0x26, 0xcf, 0xd4, 0x78, 0x3c, 0x65, 0xd7, 0xa0, 0x36, 0x18, + 0x3e, 0x11, 0x2a, 0xf2, 0x65, 0x40, 0xfe, 0xa8, 0xf1, 0x14, 0x60, 0x9b, 0x00, 0x83, 0xe1, 0x3d, + 0xe1, 0xa2, 0xd2, 0xa8, 0x59, 0xda, 0x2e, 0xec, 0xd4, 0x78, 0x06, 0x71, 0x7e, 0x0d, 0x25, 0x8a, + 0x11, 0xfb, 0x06, 0xca, 0x23, 0x7f, 0x22, 0x22, 0x6d, 0xcc, 0x69, 0xef, 0x7f, 0xf7, 0xe3, 0xd6, + 0xca, 0xdf, 0x7f, 0xdc, 0xda, 0xcd, 0x24, 0x83, 0x0c, 0x45, 0xe0, 0xc9, 0x40, 0xbb, 0x7e, 0x20, + 0x54, 0xb4, 0x37, 0x91, 0x77, 0xcc, 0x92, 0x56, 0x97, 0x7e, 0xb8, 0xd5, 0xc0, 0x6e, 0x42, 0xc9, + 0x0f, 0x46, 0xe2, 0x9c, 0xec, 0x2f, 0xb4, 0x3f, 0xb2, 0xaa, 0xea, 0x83, 0xb9, 0x0e, 0xe7, 0xba, + 0x8f, 0x22, 0x6e, 0x18, 0xce, 0xef, 0x73, 0x50, 0x36, 0x39, 0xc0, 0xae, 0x41, 0x71, 0x26, 0xb4, + 0x4b, 0xfb, 0xd7, 0xf7, 0xab, 0xe8, 0xdb, 0x87, 0x42, 0xbb, 0x9c, 0x50, 0x4c, 0xaf, 0x99, 0x9c, + 0xa3, 0xef, 0xf3, 0x69, 0x7a, 0x3d, 0x44, 0x84, 0x5b, 0x01, 0xfb, 0x19, 0x54, 0x02, 0xa1, 0x5f, + 0x48, 0x75, 0x46, 0x3e, 0x5a, 0x37, 0x41, 0x3f, 0x12, 0xfa, 0xa1, 0x1c, 0x09, 0x1e, 0xcb, 0xd8, + 0x6d, 0xa8, 0x46, 0xc2, 0x9b, 0x2b, 0x5f, 0x2f, 0xc8, 0x5f, 0xeb, 0xfb, 0x0d, 0xca, 0x32, 0x8b, + 0x11, 0x39, 0x61, 0x38, 0x7f, 0xca, 0x41, 0x11, 0xcd, 0x60, 0x0c, 0x8a, 0xae, 0x9a, 0x98, 0xec, + 0xae, 0x71, 0x1a, 0xb3, 0x06, 0x14, 0x44, 0xf0, 0x9c, 0x2c, 0xaa, 0x71, 0x1c, 0x22, 0xe2, 0xbd, + 0x18, 0xd9, 0x18, 0xe1, 0x10, 0xd7, 0xcd, 0x23, 0xa1, 0x6c, 0x68, 0x68, 0xcc, 0x6e, 0x42, 0x2d, + 0x54, 0xf2, 0x7c, 0xf1, 0x0c, 0x57, 0x97, 0x32, 0x89, 0x87, 0x60, 0x2f, 0x78, 0xce, 0xab, 0xa1, + 0x1d, 0xb1, 0x5d, 0x00, 0x71, 0xae, 0x95, 0x7b, 0x28, 0x23, 0x1d, 0x35, 0xcb, 0x74, 0x76, 0xca, + 0x77, 0x04, 0xfa, 0xc7, 0x3c, 0x23, 0x75, 0xfe, 0x9a, 0x87, 0x12, 0xb9, 0x84, 0xed, 0x60, 0x04, + 0xc2, 0xb9, 0x09, 0x66, 0xa1, 0xcd, 0x6c, 0x04, 0x80, 0x62, 0x9d, 0x04, 0x00, 0xe3, 0xbe, 0x81, + 0xde, 0x98, 0x0a, 0x4f, 0x4b, 0x65, 0xd3, 0x2d, 0x99, 0xa3, 0xe9, 0x23, 0xcc, 0x08, 0x73, 0x1a, + 0x1a, 0xb3, 0x5b, 0x50, 0x96, 0x14, 0x46, 0x3a, 0xd0, 0x5b, 0x82, 0x6b, 0x29, 0xa8, 0x5c, 0x09, + 0x77, 0x24, 0x83, 0xe9, 0x82, 0x8e, 0x59, 0xe5, 0xc9, 0x9c, 0xdd, 0x82, 0x1a, 0xc5, 0xed, 0xd1, + 0x22, 0x14, 0xcd, 0x32, 0xc5, 0x61, 0x2d, 0x89, 0x29, 0x82, 0x3c, 0x95, 0xe3, 0x45, 0xf5, 0x5c, + 0xef, 0x54, 0x0c, 0x42, 0xdd, 0xbc, 0x9a, 0xfa, 0xab, 0x63, 0x31, 0x9e, 0x48, 0x51, 0x6d, 0x24, + 0x3c, 0x25, 0x34, 0x52, 0x3f, 0x26, 0xea, 0x9a, 0x0d, 0xaf, 0x01, 0x79, 0x2a, 0x67, 0x0e, 0x94, + 0x87, 0xc3, 0x43, 0x64, 0x7e, 0x92, 0x16, 0x12, 0x83, 0x70, 0x2b, 0x71, 0xfa, 0x50, 0x8d, 0xb7, + 0xc1, 0x5b, 0xd9, 0xef, 0xda, 0xfb, 0x9a, 0xef, 0x77, 0xd9, 0x1d, 0xa8, 0x44, 0xa7, 0xae, 0xf2, + 0x83, 0x09, 0xf9, 0x6e, 0x7d, 0xff, 0xa3, 0xc4, 0xaa, 0xa1, 0xc1, 0x51, 0x53, 0xcc, 0x71, 0x24, + 0xd4, 0x12, 0x33, 0xde, 0xd0, 0xd5, 0x80, 0xc2, 0xdc, 0x1f, 0x91, 0x9e, 0x35, 0x8e, 0x43, 0x44, + 0x26, 0xbe, 0xc9, 0xa5, 0x35, 0x8e, 0x43, 0x0c, 0xc8, 0x4c, 0x8e, 0x4c, 0xd9, 0x5b, 0xe3, 0x34, + 0x46, 0x1f, 0xcb, 0x50, 0xfb, 0x32, 0x70, 0xa7, 0xb1, 0x8f, 0xe3, 0xb9, 0x33, 0x8d, 0xcf, 0xf7, + 0x3f, 0xd9, 0xed, 0x37, 0x39, 0xa8, 0xc6, 0xb5, 0x1a, 0x0b, 0x8f, 0x3f, 0x12, 0x81, 0xf6, 0xc7, + 0xbe, 0x50, 0x76, 0xe3, 0x0c, 0xc2, 0xee, 0x40, 0xc9, 0xd5, 0x5a, 0xc5, 0xd7, 0xf9, 0xff, 0xb3, + 0x85, 0xbe, 0x75, 0x80, 0x92, 0x5e, 0xa0, 0xd5, 0x82, 0x1b, 0xd6, 0xc6, 0x57, 0x00, 0x29, 0x88, + 0xb6, 0x9e, 0x89, 0x85, 0xd5, 0x8a, 0x43, 0x76, 0x15, 0x4a, 0xcf, 0xdd, 0xe9, 0x5c, 0xd8, 0x1c, + 0x36, 0x93, 0xaf, 0xf3, 0x5f, 0xe5, 0x9c, 0xbf, 0xe4, 0xa1, 0x62, 0x0b, 0x3f, 0xbb, 0x0d, 0x15, + 0x2a, 0xfc, 0xd6, 0xa2, 0xcb, 0x2f, 0x46, 0x4c, 0x61, 0x7b, 0xc9, 0x17, 0x2d, 0x63, 0xa3, 0x55, + 0x65, 0xbe, 0x6c, 0xd6, 0xc6, 0xf4, 0xfb, 0x56, 0x18, 0x89, 0xb1, 0xfd, 0x74, 0xad, 0x23, 0xbb, + 0x2b, 0xc6, 0x7e, 0xe0, 0xa3, 0x7f, 0x38, 0x8a, 0xd8, 0xed, 0xf8, 0xd4, 0x45, 0xd2, 0xf8, 0x49, + 0x56, 0xe3, 0x9b, 0x87, 0xee, 0x43, 0x3d, 0xb3, 0xcd, 0x25, 0xa7, 0xbe, 0x91, 0x3d, 0xb5, 0xdd, + 0x92, 0xd4, 0x99, 0xef, 0x6e, 0xea, 0x85, 0xff, 0xc0, 0x7f, 0x5f, 0x02, 0xa4, 0x2a, 0xdf, 0xbf, + 0xb0, 0x38, 0x2f, 0x0b, 0x00, 0x83, 0x10, 0x4b, 0xe7, 0xc8, 0xa5, 0xfa, 0xbd, 0xea, 0x4f, 0x02, + 0xa9, 0xc4, 0x33, 0xba, 0xaa, 0xb4, 0xbe, 0xca, 0xeb, 0x06, 0xa3, 0x1b, 0xc3, 0x0e, 0xa0, 0x3e, + 0x12, 0x91, 0xa7, 0x7c, 0x4a, 0x28, 0xeb, 0xf4, 0x2d, 0x3c, 0x53, 0xaa, 0xa7, 0xd5, 0x4d, 0x19, + 0xc6, 0x57, 0xd9, 0x35, 0x6c, 0x1f, 0x56, 0xc5, 0x79, 0x28, 0x95, 0xb6, 0xbb, 0x98, 0xfe, 0xe0, + 0x8a, 0xe9, 0x34, 0x10, 0xa7, 0x9d, 0x78, 0x5d, 0xa4, 0x13, 0xe6, 0x42, 0xd1, 0x73, 0x43, 0xf3, + 0x71, 0xac, 0xef, 0x37, 0x2f, 0xec, 0xd7, 0x71, 0x43, 0xe3, 0xb4, 0xf6, 0x17, 0x78, 0xd6, 0x97, + 0xff, 0xd8, 0xba, 0x95, 0xf9, 0x22, 0xce, 0xe4, 0xc9, 0x62, 0x8f, 0xf2, 0xe5, 0xcc, 0xd7, 0x7b, + 0x73, 0xed, 0x4f, 0xf7, 0xdc, 0xd0, 0x47, 0x75, 0xb8, 0xb0, 0xdf, 0xe5, 0xa4, 0x7a, 0xe3, 0x17, + 0xd0, 0xb8, 0x68, 0xf7, 0x87, 0xc4, 0x60, 0xe3, 0x2e, 0xd4, 0x12, 0x3b, 0xde, 0xb5, 0xb0, 0x9a, + 0x0d, 0xde, 0x9f, 0x73, 0x50, 0x36, 0xb7, 0x8a, 0xdd, 0x85, 0xda, 0x54, 0x7a, 0x2e, 0x1a, 0x10, + 0xb7, 0x68, 0x9f, 0xa6, 0x97, 0xae, 0xf5, 0x20, 0x96, 0x19, 0xaf, 0xa6, 0x5c, 0x4c, 0x32, 0x3f, + 0x18, 0xcb, 0xf8, 0x16, 0xac, 0xa7, 0x8b, 0xfa, 0xc1, 0x58, 0x72, 0x23, 0xdc, 0xb8, 0x0f, 0xeb, + 0xcb, 0x2a, 0x2e, 0xb1, 0xf3, 0xfa, 0x72, 0xba, 0x52, 0x5d, 0x4e, 0x16, 0x65, 0xcd, 0xbe, 0x0b, + 0xb5, 0x04, 0x67, 0xbb, 0x6f, 0x1a, 0xbe, 0x9a, 0x5d, 0x99, 0xb1, 0xd5, 0x99, 0x02, 0xa4, 0xa6, + 0x61, 0xb1, 0xc2, 0x5e, 0x30, 0x70, 0x67, 0x71, 0x93, 0x95, 0xcc, 0xe9, 0xdb, 0xe6, 0x6a, 0x97, + 0x4c, 0x59, 0xe5, 0x34, 0x66, 0x2d, 0x80, 0x51, 0x72, 0x61, 0xdf, 0x72, 0x8d, 0x33, 0x0c, 0x67, + 0x00, 0xd5, 0xd8, 0x08, 0xb6, 0x0d, 0xf5, 0xc8, 0xee, 0x8c, 0x9d, 0x0f, 0x6e, 0x57, 0xe2, 0x59, + 0x08, 0x3b, 0x18, 0xe5, 0x06, 0x13, 0xb1, 0xd4, 0xc1, 0x70, 0x44, 0xb8, 0x15, 0x38, 0x4f, 0xa1, + 0x44, 0x00, 0x5e, 0xb3, 0x48, 0xbb, 0x4a, 0xdb, 0x66, 0xc8, 0x34, 0x07, 0x32, 0xa2, 0x6d, 0xdb, + 0x45, 0x4c, 0x44, 0x6e, 0x08, 0xec, 0x06, 0xb6, 0x20, 0x23, 0xeb, 0xd1, 0xcb, 0x78, 0x28, 0x76, + 0x7e, 0x0e, 0xd5, 0x18, 0xc6, 0x93, 0x3f, 0xf0, 0x03, 0x61, 0x4d, 0xa4, 0x31, 0x36, 0x91, 0x9d, + 0x53, 0x57, 0xb9, 0x9e, 0x16, 0xa6, 0x0d, 0x28, 0xf1, 0x14, 0x70, 0xae, 0x43, 0x3d, 0x73, 0x7b, + 0x30, 0xdd, 0x9e, 0x50, 0x18, 0xcd, 0x1d, 0x36, 0x13, 0xe7, 0x25, 0xb6, 0xb8, 0x71, 0xd7, 0xf2, + 0x53, 0x80, 0x53, 0xad, 0xc3, 0x67, 0xd4, 0xc6, 0x58, 0xdf, 0xd7, 0x10, 0x21, 0x06, 0xdb, 0x82, + 0x3a, 0x4e, 0x22, 0x2b, 0x37, 0xf9, 0x4e, 0x2b, 0x22, 0x43, 0xf8, 0x09, 0xd4, 0xc6, 0xc9, 0xf2, + 0x82, 0x0d, 0x5d, 0xbc, 0xfa, 0x53, 0xa8, 0x06, 0xd2, 0xca, 0x4c, 0x57, 0x55, 0x09, 0x24, 0x89, + 0x9c, 0x5b, 0xf0, 0x7f, 0x6f, 0xf4, 0xe3, 0xec, 0x13, 0x28, 0x8f, 0xfd, 0xa9, 0xa6, 0xa2, 0x8f, + 0x8d, 0x9a, 0x9d, 0x39, 0xff, 0xca, 0x01, 0xa4, 0x91, 0xc5, 0x7c, 0xc5, 0xea, 0x8d, 0x9c, 0x55, + 0x53, 0xad, 0xa7, 0x50, 0x9d, 0xd9, 0x3a, 0x60, 0x63, 0x76, 0x6d, 0x39, 0x1b, 0x5a, 0x71, 0x99, + 0x30, 0x15, 0x62, 0xdf, 0x56, 0x88, 0x0f, 0xe9, 0x99, 0x93, 0x1d, 0xa8, 0x19, 0xc9, 0xbe, 0x7d, + 0x20, 0xbd, 0x68, 0xdc, 0x4a, 0x36, 0xee, 0xc3, 0xda, 0xd2, 0x96, 0xef, 0xf9, 0x4d, 0x48, 0xeb, + 0x59, 0xf6, 0x96, 0xdd, 0x86, 0xb2, 0x69, 0x22, 0x31, 0x25, 0x70, 0x64, 0xd5, 0xd0, 0x98, 0x3a, + 0x86, 0xe3, 0xf8, 0x05, 0xd2, 0x3f, 0x76, 0xf6, 0xa1, 0x6c, 0x9e, 0x58, 0x6c, 0x07, 0x2a, 0xae, + 0x67, 0xae, 0x63, 0xa6, 0x24, 0xa0, 0xf0, 0x80, 0x60, 0x1e, 0x8b, 0x9d, 0xbf, 0xe5, 0x01, 0x52, + 0xfc, 0x03, 0xba, 0xd2, 0xaf, 0x61, 0x3d, 0x12, 0x9e, 0x0c, 0x46, 0xae, 0x5a, 0x90, 0xd4, 0x3e, + 0x25, 0x2e, 0x5b, 0x72, 0x81, 0x99, 0xe9, 0x50, 0x0b, 0xef, 0xee, 0x50, 0x77, 0xa0, 0xe8, 0xc9, + 0x70, 0x61, 0x3f, 0x14, 0x6c, 0xf9, 0x20, 0x1d, 0x19, 0x2e, 0xf0, 0x41, 0x89, 0x0c, 0xd6, 0x82, + 0xf2, 0xec, 0x8c, 0x1e, 0x9d, 0xa6, 0x61, 0xbf, 0xba, 0xcc, 0x7d, 0x78, 0x86, 0x63, 0x7c, 0xa2, + 0x1a, 0x16, 0xbb, 0x05, 0xa5, 0xd9, 0xd9, 0xc8, 0x57, 0xd4, 0xdb, 0xd6, 0x4d, 0x67, 0x98, 0xa5, + 0x77, 0x7d, 0x85, 0x0f, 0x51, 0xe2, 0x30, 0x07, 0xf2, 0x6a, 0xd6, 0xac, 0x10, 0xb3, 0x71, 0xc1, + 0x9b, 0xb3, 0xc3, 0x15, 0x9e, 0x57, 0xb3, 0x76, 0x15, 0xca, 0xc6, 0xaf, 0xce, 0x1f, 0x0b, 0xb0, + 0xbe, 0x6c, 0x25, 0xe6, 0x41, 0xa4, 0xbc, 0x38, 0x0f, 0x22, 0xe5, 0x25, 0xcd, 0x7b, 0x3e, 0xd3, + 0xbc, 0x3b, 0x50, 0x92, 0x2f, 0x02, 0xa1, 0xb2, 0xaf, 0xeb, 0xce, 0xa9, 0x7c, 0x11, 0x60, 0x9b, + 0x6a, 0x44, 0x4b, 0x5d, 0x5f, 0xc9, 0x76, 0x7d, 0x37, 0x60, 0x6d, 0x2c, 0xa7, 0x53, 0xf9, 0x62, + 0xb8, 0x98, 0x4d, 0xfd, 0xe0, 0xcc, 0xb6, 0x7e, 0xcb, 0x20, 0xdb, 0x81, 0x2b, 0x23, 0x5f, 0xa1, + 0x39, 0x1d, 0x19, 0x68, 0x11, 0xd0, 0x7b, 0x05, 0x79, 0x17, 0x61, 0xf6, 0x0d, 0x6c, 0xbb, 0x5a, + 0x8b, 0x59, 0xa8, 0x1f, 0x07, 0xa1, 0xeb, 0x9d, 0x75, 0xa5, 0x47, 0x77, 0x76, 0x16, 0xba, 0xda, + 0x3f, 0xf1, 0xa7, 0xf8, 0x34, 0xab, 0xd0, 0xd2, 0x77, 0xf2, 0xd8, 0xe7, 0xb0, 0xee, 0x29, 0xe1, + 0x6a, 0xd1, 0x15, 0x91, 0x3e, 0x76, 0xf5, 0x69, 0xb3, 0x4a, 0x2b, 0x2f, 0xa0, 0x78, 0x06, 0x17, + 0xad, 0x7d, 0xea, 0x4f, 0x47, 0x9e, 0xab, 0x46, 0xcd, 0x9a, 0x39, 0xc3, 0x12, 0xc8, 0x5a, 0xc0, + 0x08, 0xe8, 0xcd, 0x42, 0xbd, 0x48, 0xa8, 0x40, 0xd4, 0x4b, 0x24, 0x58, 0x38, 0xb5, 0x3f, 0x13, + 0x91, 0x76, 0x67, 0x21, 0xfd, 0x2b, 0x50, 0xe0, 0x29, 0xe0, 0x7c, 0x9b, 0x83, 0xc6, 0xc5, 0x14, + 0x41, 0x07, 0x87, 0x68, 0xa6, 0xbd, 0x6c, 0x38, 0x4e, 0x9c, 0x9e, 0xcf, 0x38, 0x3d, 0xfe, 0x42, + 0x15, 0x32, 0x5f, 0xa8, 0x24, 0x80, 0xc5, 0xb7, 0x07, 0x70, 0xc9, 0xa4, 0xd2, 0x45, 0x93, 0x7e, + 0x97, 0x83, 0x2b, 0x17, 0xd2, 0xf0, 0xbd, 0x2d, 0xda, 0x86, 0xfa, 0xcc, 0x3d, 0x13, 0xc7, 0xae, + 0xa2, 0xe0, 0x16, 0x4c, 0x0b, 0x97, 0x81, 0xfe, 0x0b, 0xf6, 0x05, 0xb0, 0x9a, 0xcd, 0xfd, 0x4b, + 0x6d, 0x8b, 0x43, 0x79, 0x24, 0xf5, 0x3d, 0x39, 0xb7, 0x5f, 0xbf, 0x38, 0x94, 0x31, 0xf8, 0x66, + 0xc0, 0x0b, 0x97, 0x04, 0xdc, 0x39, 0x82, 0x6a, 0x6c, 0x20, 0xdb, 0xb2, 0x4f, 0xf5, 0x5c, 0xfa, + 0x97, 0xd1, 0xe3, 0x48, 0x28, 0xb4, 0xdd, 0xbc, 0xdb, 0x3f, 0x83, 0xd2, 0x44, 0xc9, 0x79, 0x68, + 0x6b, 0xeb, 0x12, 0xc3, 0x48, 0x9c, 0x21, 0x54, 0x2c, 0xc2, 0x76, 0xa1, 0x7c, 0xb2, 0x38, 0x8a, + 0x9b, 0x0f, 0x7b, 0xb1, 0x71, 0x3e, 0xb2, 0x0c, 0xac, 0x16, 0x86, 0xc1, 0xae, 0x42, 0xf1, 0x64, + 0xd1, 0xef, 0x9a, 0x07, 0x19, 0xd6, 0x1c, 0x9c, 0xb5, 0xcb, 0xc6, 0x20, 0xe7, 0x01, 0xac, 0x66, + 0xd7, 0xa1, 0x53, 0x32, 0x4d, 0x0d, 0x8d, 0xd3, 0xe2, 0x9a, 0x7f, 0x47, 0x71, 0xdd, 0xdd, 0x81, + 0x8a, 0xfd, 0x53, 0x84, 0xd5, 0xa0, 0xf4, 0xf8, 0x68, 0xd8, 0x7b, 0xd4, 0x58, 0x61, 0x55, 0x28, + 0x1e, 0x0e, 0x86, 0x8f, 0x1a, 0x39, 0x1c, 0x1d, 0x0d, 0x8e, 0x7a, 0x8d, 0xfc, 0xee, 0x4d, 0x58, + 0xcd, 0xfe, 0x2d, 0xc2, 0xea, 0x50, 0x19, 0x1e, 0x1c, 0x75, 0xdb, 0x83, 0x5f, 0x35, 0x56, 0xd8, + 0x2a, 0x54, 0xfb, 0x47, 0xc3, 0x5e, 0xe7, 0x31, 0xef, 0x35, 0x72, 0xbb, 0xbf, 0x84, 0x5a, 0xf2, + 0x72, 0x47, 0x0d, 0xed, 0xfe, 0x51, 0xb7, 0xb1, 0xc2, 0x00, 0xca, 0xc3, 0x5e, 0x87, 0xf7, 0x50, + 0x6f, 0x05, 0x0a, 0xc3, 0xe1, 0x61, 0x23, 0x8f, 0xbb, 0x76, 0x0e, 0x3a, 0x87, 0xbd, 0x46, 0x01, + 0x87, 0x8f, 0x1e, 0x1e, 0xdf, 0x1b, 0x36, 0x8a, 0xbb, 0x5f, 0xc2, 0x95, 0x0b, 0x2f, 0x67, 0x5a, + 0x7d, 0x78, 0xc0, 0x7b, 0xa8, 0xa9, 0x0e, 0x95, 0x63, 0xde, 0x7f, 0x72, 0xf0, 0xa8, 0xd7, 0xc8, + 0xa1, 0xe0, 0xc1, 0xa0, 0x73, 0xbf, 0xd7, 0x6d, 0xe4, 0xdb, 0xd7, 0xbe, 0x7b, 0xb5, 0x99, 0xfb, + 0xfe, 0xd5, 0x66, 0xee, 0x87, 0x57, 0x9b, 0xb9, 0x7f, 0xbe, 0xda, 0xcc, 0x7d, 0xfb, 0x7a, 0x73, + 0xe5, 0xfb, 0xd7, 0x9b, 0x2b, 0x3f, 0xbc, 0xde, 0x5c, 0x39, 0x29, 0xd3, 0x9f, 0x94, 0x5f, 0xfc, + 0x3b, 0x00, 0x00, 0xff, 0xff, 0x7e, 0x60, 0x46, 0x7d, 0xe4, 0x14, 0x00, 0x00, } func (m *Op) Marshal() (dAtA []byte, err error) { @@ -3044,6 +3367,278 @@ func (m *OpMetadata) MarshalToSizedBuffer(dAtA []byte) (int, error) { return len(dAtA) - i, nil } +func (m *Source) Marshal() (dAtA []byte, err error) { + size := m.Size() + dAtA = make([]byte, size) + n, err := m.MarshalToSizedBuffer(dAtA[:size]) + if err != nil { + return nil, err + } + return dAtA[:n], nil +} + +func (m *Source) MarshalTo(dAtA []byte) (int, error) { + size := m.Size() + return m.MarshalToSizedBuffer(dAtA[:size]) +} + +func (m *Source) MarshalToSizedBuffer(dAtA []byte) (int, error) { + i := len(dAtA) + _ = i + var l int + _ = l + if len(m.Infos) > 0 { + for iNdEx := len(m.Infos) - 1; iNdEx >= 0; iNdEx-- { + { + size, err := m.Infos[iNdEx].MarshalToSizedBuffer(dAtA[:i]) + if err != nil { + return 0, err + } + i -= size + i = encodeVarintOps(dAtA, i, uint64(size)) + } + i-- + dAtA[i] = 0x12 + } + } + if len(m.Locations) > 0 { + keysForLocations := make([]string, 0, len(m.Locations)) + for k := range m.Locations { + keysForLocations = append(keysForLocations, string(k)) + } + github_com_gogo_protobuf_sortkeys.Strings(keysForLocations) + for iNdEx := len(keysForLocations) - 1; iNdEx >= 0; iNdEx-- { + v := m.Locations[string(keysForLocations[iNdEx])] + baseI := i + if v != nil { + { + size, err := v.MarshalToSizedBuffer(dAtA[:i]) + if err != nil { + return 0, err + } + i -= size + i = encodeVarintOps(dAtA, i, uint64(size)) + } + i-- + dAtA[i] = 0x12 + } + i -= len(keysForLocations[iNdEx]) + copy(dAtA[i:], keysForLocations[iNdEx]) + i = encodeVarintOps(dAtA, i, uint64(len(keysForLocations[iNdEx]))) + i-- + dAtA[i] = 0xa + i = encodeVarintOps(dAtA, i, uint64(baseI-i)) + i-- + dAtA[i] = 0xa + } + } + return len(dAtA) - i, nil +} + +func (m *Locations) Marshal() (dAtA []byte, err error) { + size := m.Size() + dAtA = make([]byte, size) + n, err := m.MarshalToSizedBuffer(dAtA[:size]) + if err != nil { + return nil, err + } + return dAtA[:n], nil +} + +func (m *Locations) MarshalTo(dAtA []byte) (int, error) { + size := m.Size() + return m.MarshalToSizedBuffer(dAtA[:size]) +} + +func (m *Locations) MarshalToSizedBuffer(dAtA []byte) (int, error) { + i := len(dAtA) + _ = i + var l int + _ = l + if len(m.Locations) > 0 { + for iNdEx := len(m.Locations) - 1; iNdEx >= 0; iNdEx-- { + { + size, err := m.Locations[iNdEx].MarshalToSizedBuffer(dAtA[:i]) + if err != nil { + return 0, err + } + i -= size + i = encodeVarintOps(dAtA, i, uint64(size)) + } + i-- + dAtA[i] = 0xa + } + } + return len(dAtA) - i, nil +} + +func (m *SourceInfo) Marshal() (dAtA []byte, err error) { + size := m.Size() + dAtA = make([]byte, size) + n, err := m.MarshalToSizedBuffer(dAtA[:size]) + if err != nil { + return nil, err + } + return dAtA[:n], nil +} + +func (m *SourceInfo) MarshalTo(dAtA []byte) (int, error) { + size := m.Size() + return m.MarshalToSizedBuffer(dAtA[:size]) +} + +func (m *SourceInfo) MarshalToSizedBuffer(dAtA []byte) (int, error) { + i := len(dAtA) + _ = i + var l int + _ = l + if m.Definition != nil { + { + size, err := m.Definition.MarshalToSizedBuffer(dAtA[:i]) + if err != nil { + return 0, err + } + i -= size + i = encodeVarintOps(dAtA, i, uint64(size)) + } + i-- + dAtA[i] = 0x1a + } + if len(m.Data) > 0 { + i -= len(m.Data) + copy(dAtA[i:], m.Data) + i = encodeVarintOps(dAtA, i, uint64(len(m.Data))) + i-- + dAtA[i] = 0x12 + } + if len(m.Filename) > 0 { + i -= len(m.Filename) + copy(dAtA[i:], m.Filename) + i = encodeVarintOps(dAtA, i, uint64(len(m.Filename))) + i-- + dAtA[i] = 0xa + } + return len(dAtA) - i, nil +} + +func (m *Location) Marshal() (dAtA []byte, err error) { + size := m.Size() + dAtA = make([]byte, size) + n, err := m.MarshalToSizedBuffer(dAtA[:size]) + if err != nil { + return nil, err + } + return dAtA[:n], nil +} + +func (m *Location) MarshalTo(dAtA []byte) (int, error) { + size := m.Size() + return m.MarshalToSizedBuffer(dAtA[:size]) +} + +func (m *Location) MarshalToSizedBuffer(dAtA []byte) (int, error) { + i := len(dAtA) + _ = i + var l int + _ = l + if len(m.Ranges) > 0 { + for iNdEx := len(m.Ranges) - 1; iNdEx >= 0; iNdEx-- { + { + size, err := m.Ranges[iNdEx].MarshalToSizedBuffer(dAtA[:i]) + if err != nil { + return 0, err + } + i -= size + i = encodeVarintOps(dAtA, i, uint64(size)) + } + i-- + dAtA[i] = 0x12 + } + } + if m.SourceIndex != 0 { + i = encodeVarintOps(dAtA, i, uint64(m.SourceIndex)) + i-- + dAtA[i] = 0x8 + } + return len(dAtA) - i, nil +} + +func (m *Range) Marshal() (dAtA []byte, err error) { + size := m.Size() + dAtA = make([]byte, size) + n, err := m.MarshalToSizedBuffer(dAtA[:size]) + if err != nil { + return nil, err + } + return dAtA[:n], nil +} + +func (m *Range) MarshalTo(dAtA []byte) (int, error) { + size := m.Size() + return m.MarshalToSizedBuffer(dAtA[:size]) +} + +func (m *Range) MarshalToSizedBuffer(dAtA []byte) (int, error) { + i := len(dAtA) + _ = i + var l int + _ = l + { + size, err := m.End.MarshalToSizedBuffer(dAtA[:i]) + if err != nil { + return 0, err + } + i -= size + i = encodeVarintOps(dAtA, i, uint64(size)) + } + i-- + dAtA[i] = 0x12 + { + size, err := m.Start.MarshalToSizedBuffer(dAtA[:i]) + if err != nil { + return 0, err + } + i -= size + i = encodeVarintOps(dAtA, i, uint64(size)) + } + i-- + dAtA[i] = 0xa + return len(dAtA) - i, nil +} + +func (m *Position) Marshal() (dAtA []byte, err error) { + size := m.Size() + dAtA = make([]byte, size) + n, err := m.MarshalToSizedBuffer(dAtA[:size]) + if err != nil { + return nil, err + } + return dAtA[:n], nil +} + +func (m *Position) MarshalTo(dAtA []byte) (int, error) { + size := m.Size() + return m.MarshalToSizedBuffer(dAtA[:size]) +} + +func (m *Position) MarshalToSizedBuffer(dAtA []byte) (int, error) { + i := len(dAtA) + _ = i + var l int + _ = l + if m.Character != 0 { + i = encodeVarintOps(dAtA, i, uint64(m.Character)) + i-- + dAtA[i] = 0x10 + } + if m.Line != 0 { + i = encodeVarintOps(dAtA, i, uint64(m.Line)) + i-- + dAtA[i] = 0x8 + } + return len(dAtA) - i, nil +} + func (m *ExportCache) Marshal() (dAtA []byte, err error) { size := m.Size() dAtA = make([]byte, size) @@ -3180,6 +3775,18 @@ func (m *Definition) MarshalToSizedBuffer(dAtA []byte) (int, error) { _ = i var l int _ = l + if m.Source != nil { + { + size, err := m.Source.MarshalToSizedBuffer(dAtA[:i]) + if err != nil { + return 0, err + } + i -= size + i = encodeVarintOps(dAtA, i, uint64(size)) + } + i-- + dAtA[i] = 0x1a + } if len(m.Metadata) > 0 { keysForMetadata := make([]string, 0, len(m.Metadata)) for k := range m.Metadata { @@ -4268,6 +4875,116 @@ func (m *OpMetadata) Size() (n int) { return n } +func (m *Source) Size() (n int) { + if m == nil { + return 0 + } + var l int + _ = l + if len(m.Locations) > 0 { + for k, v := range m.Locations { + _ = k + _ = v + l = 0 + if v != nil { + l = v.Size() + l += 1 + sovOps(uint64(l)) + } + mapEntrySize := 1 + len(k) + sovOps(uint64(len(k))) + l + n += mapEntrySize + 1 + sovOps(uint64(mapEntrySize)) + } + } + if len(m.Infos) > 0 { + for _, e := range m.Infos { + l = e.Size() + n += 1 + l + sovOps(uint64(l)) + } + } + return n +} + +func (m *Locations) Size() (n int) { + if m == nil { + return 0 + } + var l int + _ = l + if len(m.Locations) > 0 { + for _, e := range m.Locations { + l = e.Size() + n += 1 + l + sovOps(uint64(l)) + } + } + return n +} + +func (m *SourceInfo) Size() (n int) { + if m == nil { + return 0 + } + var l int + _ = l + l = len(m.Filename) + if l > 0 { + n += 1 + l + sovOps(uint64(l)) + } + l = len(m.Data) + if l > 0 { + n += 1 + l + sovOps(uint64(l)) + } + if m.Definition != nil { + l = m.Definition.Size() + n += 1 + l + sovOps(uint64(l)) + } + return n +} + +func (m *Location) Size() (n int) { + if m == nil { + return 0 + } + var l int + _ = l + if m.SourceIndex != 0 { + n += 1 + sovOps(uint64(m.SourceIndex)) + } + if len(m.Ranges) > 0 { + for _, e := range m.Ranges { + l = e.Size() + n += 1 + l + sovOps(uint64(l)) + } + } + return n +} + +func (m *Range) Size() (n int) { + if m == nil { + return 0 + } + var l int + _ = l + l = m.Start.Size() + n += 1 + l + sovOps(uint64(l)) + l = m.End.Size() + n += 1 + l + sovOps(uint64(l)) + return n +} + +func (m *Position) Size() (n int) { + if m == nil { + return 0 + } + var l int + _ = l + if m.Line != 0 { + n += 1 + sovOps(uint64(m.Line)) + } + if m.Character != 0 { + n += 1 + sovOps(uint64(m.Character)) + } + return n +} + func (m *ExportCache) Size() (n int) { if m == nil { return 0 @@ -4341,6 +5058,10 @@ func (m *Definition) Size() (n int) { n += mapEntrySize + 1 + sovOps(uint64(mapEntrySize)) } } + if m.Source != nil { + l = m.Source.Size() + n += 1 + l + sovOps(uint64(l)) + } return n } @@ -7391,6 +8112,780 @@ func (m *OpMetadata) Unmarshal(dAtA []byte) error { } return nil } +func (m *Source) Unmarshal(dAtA []byte) error { + l := len(dAtA) + iNdEx := 0 + for iNdEx < l { + preIndex := iNdEx + var wire uint64 + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return ErrIntOverflowOps + } + if iNdEx >= l { + return io.ErrUnexpectedEOF + } + b := dAtA[iNdEx] + iNdEx++ + wire |= uint64(b&0x7F) << shift + if b < 0x80 { + break + } + } + fieldNum := int32(wire >> 3) + wireType := int(wire & 0x7) + if wireType == 4 { + return fmt.Errorf("proto: Source: wiretype end group for non-group") + } + if fieldNum <= 0 { + return fmt.Errorf("proto: Source: illegal tag %d (wire type %d)", fieldNum, wire) + } + switch fieldNum { + case 1: + if wireType != 2 { + return fmt.Errorf("proto: wrong wireType = %d for field Locations", wireType) + } + var msglen int + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return ErrIntOverflowOps + } + if iNdEx >= l { + return io.ErrUnexpectedEOF + } + b := dAtA[iNdEx] + iNdEx++ + msglen |= int(b&0x7F) << shift + if b < 0x80 { + break + } + } + if msglen < 0 { + return ErrInvalidLengthOps + } + postIndex := iNdEx + msglen + if postIndex < 0 { + return ErrInvalidLengthOps + } + if postIndex > l { + return io.ErrUnexpectedEOF + } + if m.Locations == nil { + m.Locations = make(map[string]*Locations) + } + var mapkey string + var mapvalue *Locations + for iNdEx < postIndex { + entryPreIndex := iNdEx + var wire uint64 + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return ErrIntOverflowOps + } + if iNdEx >= l { + return io.ErrUnexpectedEOF + } + b := dAtA[iNdEx] + iNdEx++ + wire |= uint64(b&0x7F) << shift + if b < 0x80 { + break + } + } + fieldNum := int32(wire >> 3) + if fieldNum == 1 { + var stringLenmapkey uint64 + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return ErrIntOverflowOps + } + if iNdEx >= l { + return io.ErrUnexpectedEOF + } + b := dAtA[iNdEx] + iNdEx++ + stringLenmapkey |= uint64(b&0x7F) << shift + if b < 0x80 { + break + } + } + intStringLenmapkey := int(stringLenmapkey) + if intStringLenmapkey < 0 { + return ErrInvalidLengthOps + } + postStringIndexmapkey := iNdEx + intStringLenmapkey + if postStringIndexmapkey < 0 { + return ErrInvalidLengthOps + } + if postStringIndexmapkey > l { + return io.ErrUnexpectedEOF + } + mapkey = string(dAtA[iNdEx:postStringIndexmapkey]) + iNdEx = postStringIndexmapkey + } else if fieldNum == 2 { + var mapmsglen int + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return ErrIntOverflowOps + } + if iNdEx >= l { + return io.ErrUnexpectedEOF + } + b := dAtA[iNdEx] + iNdEx++ + mapmsglen |= int(b&0x7F) << shift + if b < 0x80 { + break + } + } + if mapmsglen < 0 { + return ErrInvalidLengthOps + } + postmsgIndex := iNdEx + mapmsglen + if postmsgIndex < 0 { + return ErrInvalidLengthOps + } + if postmsgIndex > l { + return io.ErrUnexpectedEOF + } + mapvalue = &Locations{} + if err := mapvalue.Unmarshal(dAtA[iNdEx:postmsgIndex]); err != nil { + return err + } + iNdEx = postmsgIndex + } else { + iNdEx = entryPreIndex + skippy, err := skipOps(dAtA[iNdEx:]) + if err != nil { + return err + } + if skippy < 0 { + return ErrInvalidLengthOps + } + if (iNdEx + skippy) > postIndex { + return io.ErrUnexpectedEOF + } + iNdEx += skippy + } + } + m.Locations[mapkey] = mapvalue + iNdEx = postIndex + case 2: + if wireType != 2 { + return fmt.Errorf("proto: wrong wireType = %d for field Infos", wireType) + } + var msglen int + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return ErrIntOverflowOps + } + if iNdEx >= l { + return io.ErrUnexpectedEOF + } + b := dAtA[iNdEx] + iNdEx++ + msglen |= int(b&0x7F) << shift + if b < 0x80 { + break + } + } + if msglen < 0 { + return ErrInvalidLengthOps + } + postIndex := iNdEx + msglen + if postIndex < 0 { + return ErrInvalidLengthOps + } + if postIndex > l { + return io.ErrUnexpectedEOF + } + m.Infos = append(m.Infos, &SourceInfo{}) + if err := m.Infos[len(m.Infos)-1].Unmarshal(dAtA[iNdEx:postIndex]); err != nil { + return err + } + iNdEx = postIndex + default: + iNdEx = preIndex + skippy, err := skipOps(dAtA[iNdEx:]) + if err != nil { + return err + } + if skippy < 0 { + return ErrInvalidLengthOps + } + if (iNdEx + skippy) < 0 { + return ErrInvalidLengthOps + } + if (iNdEx + skippy) > l { + return io.ErrUnexpectedEOF + } + iNdEx += skippy + } + } + + if iNdEx > l { + return io.ErrUnexpectedEOF + } + return nil +} +func (m *Locations) Unmarshal(dAtA []byte) error { + l := len(dAtA) + iNdEx := 0 + for iNdEx < l { + preIndex := iNdEx + var wire uint64 + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return ErrIntOverflowOps + } + if iNdEx >= l { + return io.ErrUnexpectedEOF + } + b := dAtA[iNdEx] + iNdEx++ + wire |= uint64(b&0x7F) << shift + if b < 0x80 { + break + } + } + fieldNum := int32(wire >> 3) + wireType := int(wire & 0x7) + if wireType == 4 { + return fmt.Errorf("proto: Locations: wiretype end group for non-group") + } + if fieldNum <= 0 { + return fmt.Errorf("proto: Locations: illegal tag %d (wire type %d)", fieldNum, wire) + } + switch fieldNum { + case 1: + if wireType != 2 { + return fmt.Errorf("proto: wrong wireType = %d for field Locations", wireType) + } + var msglen int + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return ErrIntOverflowOps + } + if iNdEx >= l { + return io.ErrUnexpectedEOF + } + b := dAtA[iNdEx] + iNdEx++ + msglen |= int(b&0x7F) << shift + if b < 0x80 { + break + } + } + if msglen < 0 { + return ErrInvalidLengthOps + } + postIndex := iNdEx + msglen + if postIndex < 0 { + return ErrInvalidLengthOps + } + if postIndex > l { + return io.ErrUnexpectedEOF + } + m.Locations = append(m.Locations, &Location{}) + if err := m.Locations[len(m.Locations)-1].Unmarshal(dAtA[iNdEx:postIndex]); err != nil { + return err + } + iNdEx = postIndex + default: + iNdEx = preIndex + skippy, err := skipOps(dAtA[iNdEx:]) + if err != nil { + return err + } + if skippy < 0 { + return ErrInvalidLengthOps + } + if (iNdEx + skippy) < 0 { + return ErrInvalidLengthOps + } + if (iNdEx + skippy) > l { + return io.ErrUnexpectedEOF + } + iNdEx += skippy + } + } + + if iNdEx > l { + return io.ErrUnexpectedEOF + } + return nil +} +func (m *SourceInfo) Unmarshal(dAtA []byte) error { + l := len(dAtA) + iNdEx := 0 + for iNdEx < l { + preIndex := iNdEx + var wire uint64 + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return ErrIntOverflowOps + } + if iNdEx >= l { + return io.ErrUnexpectedEOF + } + b := dAtA[iNdEx] + iNdEx++ + wire |= uint64(b&0x7F) << shift + if b < 0x80 { + break + } + } + fieldNum := int32(wire >> 3) + wireType := int(wire & 0x7) + if wireType == 4 { + return fmt.Errorf("proto: SourceInfo: wiretype end group for non-group") + } + if fieldNum <= 0 { + return fmt.Errorf("proto: SourceInfo: illegal tag %d (wire type %d)", fieldNum, wire) + } + switch fieldNum { + case 1: + if wireType != 2 { + return fmt.Errorf("proto: wrong wireType = %d for field Filename", wireType) + } + var stringLen uint64 + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return ErrIntOverflowOps + } + if iNdEx >= l { + return io.ErrUnexpectedEOF + } + b := dAtA[iNdEx] + iNdEx++ + stringLen |= uint64(b&0x7F) << shift + if b < 0x80 { + break + } + } + intStringLen := int(stringLen) + if intStringLen < 0 { + return ErrInvalidLengthOps + } + postIndex := iNdEx + intStringLen + if postIndex < 0 { + return ErrInvalidLengthOps + } + if postIndex > l { + return io.ErrUnexpectedEOF + } + m.Filename = string(dAtA[iNdEx:postIndex]) + iNdEx = postIndex + case 2: + if wireType != 2 { + return fmt.Errorf("proto: wrong wireType = %d for field Data", wireType) + } + var byteLen int + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return ErrIntOverflowOps + } + if iNdEx >= l { + return io.ErrUnexpectedEOF + } + b := dAtA[iNdEx] + iNdEx++ + byteLen |= int(b&0x7F) << shift + if b < 0x80 { + break + } + } + if byteLen < 0 { + return ErrInvalidLengthOps + } + postIndex := iNdEx + byteLen + if postIndex < 0 { + return ErrInvalidLengthOps + } + if postIndex > l { + return io.ErrUnexpectedEOF + } + m.Data = append(m.Data[:0], dAtA[iNdEx:postIndex]...) + if m.Data == nil { + m.Data = []byte{} + } + iNdEx = postIndex + case 3: + if wireType != 2 { + return fmt.Errorf("proto: wrong wireType = %d for field Definition", wireType) + } + var msglen int + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return ErrIntOverflowOps + } + if iNdEx >= l { + return io.ErrUnexpectedEOF + } + b := dAtA[iNdEx] + iNdEx++ + msglen |= int(b&0x7F) << shift + if b < 0x80 { + break + } + } + if msglen < 0 { + return ErrInvalidLengthOps + } + postIndex := iNdEx + msglen + if postIndex < 0 { + return ErrInvalidLengthOps + } + if postIndex > l { + return io.ErrUnexpectedEOF + } + if m.Definition == nil { + m.Definition = &Definition{} + } + if err := m.Definition.Unmarshal(dAtA[iNdEx:postIndex]); err != nil { + return err + } + iNdEx = postIndex + default: + iNdEx = preIndex + skippy, err := skipOps(dAtA[iNdEx:]) + if err != nil { + return err + } + if skippy < 0 { + return ErrInvalidLengthOps + } + if (iNdEx + skippy) < 0 { + return ErrInvalidLengthOps + } + if (iNdEx + skippy) > l { + return io.ErrUnexpectedEOF + } + iNdEx += skippy + } + } + + if iNdEx > l { + return io.ErrUnexpectedEOF + } + return nil +} +func (m *Location) Unmarshal(dAtA []byte) error { + l := len(dAtA) + iNdEx := 0 + for iNdEx < l { + preIndex := iNdEx + var wire uint64 + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return ErrIntOverflowOps + } + if iNdEx >= l { + return io.ErrUnexpectedEOF + } + b := dAtA[iNdEx] + iNdEx++ + wire |= uint64(b&0x7F) << shift + if b < 0x80 { + break + } + } + fieldNum := int32(wire >> 3) + wireType := int(wire & 0x7) + if wireType == 4 { + return fmt.Errorf("proto: Location: wiretype end group for non-group") + } + if fieldNum <= 0 { + return fmt.Errorf("proto: Location: illegal tag %d (wire type %d)", fieldNum, wire) + } + switch fieldNum { + case 1: + if wireType != 0 { + return fmt.Errorf("proto: wrong wireType = %d for field SourceIndex", wireType) + } + m.SourceIndex = 0 + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return ErrIntOverflowOps + } + if iNdEx >= l { + return io.ErrUnexpectedEOF + } + b := dAtA[iNdEx] + iNdEx++ + m.SourceIndex |= int32(b&0x7F) << shift + if b < 0x80 { + break + } + } + case 2: + if wireType != 2 { + return fmt.Errorf("proto: wrong wireType = %d for field Ranges", wireType) + } + var msglen int + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return ErrIntOverflowOps + } + if iNdEx >= l { + return io.ErrUnexpectedEOF + } + b := dAtA[iNdEx] + iNdEx++ + msglen |= int(b&0x7F) << shift + if b < 0x80 { + break + } + } + if msglen < 0 { + return ErrInvalidLengthOps + } + postIndex := iNdEx + msglen + if postIndex < 0 { + return ErrInvalidLengthOps + } + if postIndex > l { + return io.ErrUnexpectedEOF + } + m.Ranges = append(m.Ranges, &Range{}) + if err := m.Ranges[len(m.Ranges)-1].Unmarshal(dAtA[iNdEx:postIndex]); err != nil { + return err + } + iNdEx = postIndex + default: + iNdEx = preIndex + skippy, err := skipOps(dAtA[iNdEx:]) + if err != nil { + return err + } + if skippy < 0 { + return ErrInvalidLengthOps + } + if (iNdEx + skippy) < 0 { + return ErrInvalidLengthOps + } + if (iNdEx + skippy) > l { + return io.ErrUnexpectedEOF + } + iNdEx += skippy + } + } + + if iNdEx > l { + return io.ErrUnexpectedEOF + } + return nil +} +func (m *Range) Unmarshal(dAtA []byte) error { + l := len(dAtA) + iNdEx := 0 + for iNdEx < l { + preIndex := iNdEx + var wire uint64 + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return ErrIntOverflowOps + } + if iNdEx >= l { + return io.ErrUnexpectedEOF + } + b := dAtA[iNdEx] + iNdEx++ + wire |= uint64(b&0x7F) << shift + if b < 0x80 { + break + } + } + fieldNum := int32(wire >> 3) + wireType := int(wire & 0x7) + if wireType == 4 { + return fmt.Errorf("proto: Range: wiretype end group for non-group") + } + if fieldNum <= 0 { + return fmt.Errorf("proto: Range: illegal tag %d (wire type %d)", fieldNum, wire) + } + switch fieldNum { + case 1: + if wireType != 2 { + return fmt.Errorf("proto: wrong wireType = %d for field Start", wireType) + } + var msglen int + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return ErrIntOverflowOps + } + if iNdEx >= l { + return io.ErrUnexpectedEOF + } + b := dAtA[iNdEx] + iNdEx++ + msglen |= int(b&0x7F) << shift + if b < 0x80 { + break + } + } + if msglen < 0 { + return ErrInvalidLengthOps + } + postIndex := iNdEx + msglen + if postIndex < 0 { + return ErrInvalidLengthOps + } + if postIndex > l { + return io.ErrUnexpectedEOF + } + if err := m.Start.Unmarshal(dAtA[iNdEx:postIndex]); err != nil { + return err + } + iNdEx = postIndex + case 2: + if wireType != 2 { + return fmt.Errorf("proto: wrong wireType = %d for field End", wireType) + } + var msglen int + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return ErrIntOverflowOps + } + if iNdEx >= l { + return io.ErrUnexpectedEOF + } + b := dAtA[iNdEx] + iNdEx++ + msglen |= int(b&0x7F) << shift + if b < 0x80 { + break + } + } + if msglen < 0 { + return ErrInvalidLengthOps + } + postIndex := iNdEx + msglen + if postIndex < 0 { + return ErrInvalidLengthOps + } + if postIndex > l { + return io.ErrUnexpectedEOF + } + if err := m.End.Unmarshal(dAtA[iNdEx:postIndex]); err != nil { + return err + } + iNdEx = postIndex + default: + iNdEx = preIndex + skippy, err := skipOps(dAtA[iNdEx:]) + if err != nil { + return err + } + if skippy < 0 { + return ErrInvalidLengthOps + } + if (iNdEx + skippy) < 0 { + return ErrInvalidLengthOps + } + if (iNdEx + skippy) > l { + return io.ErrUnexpectedEOF + } + iNdEx += skippy + } + } + + if iNdEx > l { + return io.ErrUnexpectedEOF + } + return nil +} +func (m *Position) Unmarshal(dAtA []byte) error { + l := len(dAtA) + iNdEx := 0 + for iNdEx < l { + preIndex := iNdEx + var wire uint64 + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return ErrIntOverflowOps + } + if iNdEx >= l { + return io.ErrUnexpectedEOF + } + b := dAtA[iNdEx] + iNdEx++ + wire |= uint64(b&0x7F) << shift + if b < 0x80 { + break + } + } + fieldNum := int32(wire >> 3) + wireType := int(wire & 0x7) + if wireType == 4 { + return fmt.Errorf("proto: Position: wiretype end group for non-group") + } + if fieldNum <= 0 { + return fmt.Errorf("proto: Position: illegal tag %d (wire type %d)", fieldNum, wire) + } + switch fieldNum { + case 1: + if wireType != 0 { + return fmt.Errorf("proto: wrong wireType = %d for field Line", wireType) + } + m.Line = 0 + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return ErrIntOverflowOps + } + if iNdEx >= l { + return io.ErrUnexpectedEOF + } + b := dAtA[iNdEx] + iNdEx++ + m.Line |= int32(b&0x7F) << shift + if b < 0x80 { + break + } + } + case 2: + if wireType != 0 { + return fmt.Errorf("proto: wrong wireType = %d for field Character", wireType) + } + m.Character = 0 + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return ErrIntOverflowOps + } + if iNdEx >= l { + return io.ErrUnexpectedEOF + } + b := dAtA[iNdEx] + iNdEx++ + m.Character |= int32(b&0x7F) << shift + if b < 0x80 { + break + } + } + default: + iNdEx = preIndex + skippy, err := skipOps(dAtA[iNdEx:]) + if err != nil { + return err + } + if skippy < 0 { + return ErrInvalidLengthOps + } + if (iNdEx + skippy) < 0 { + return ErrInvalidLengthOps + } + if (iNdEx + skippy) > l { + return io.ErrUnexpectedEOF + } + iNdEx += skippy + } + } + + if iNdEx > l { + return io.ErrUnexpectedEOF + } + return nil +} func (m *ExportCache) Unmarshal(dAtA []byte) error { l := len(dAtA) iNdEx := 0 @@ -7920,6 +9415,42 @@ func (m *Definition) Unmarshal(dAtA []byte) error { } m.Metadata[github_com_opencontainers_go_digest.Digest(mapkey)] = *mapvalue iNdEx = postIndex + case 3: + if wireType != 2 { + return fmt.Errorf("proto: wrong wireType = %d for field Source", wireType) + } + var msglen int + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return ErrIntOverflowOps + } + if iNdEx >= l { + return io.ErrUnexpectedEOF + } + b := dAtA[iNdEx] + iNdEx++ + msglen |= int(b&0x7F) << shift + if b < 0x80 { + break + } + } + if msglen < 0 { + return ErrInvalidLengthOps + } + postIndex := iNdEx + msglen + if postIndex < 0 { + return ErrInvalidLengthOps + } + if postIndex > l { + return io.ErrUnexpectedEOF + } + if m.Source == nil { + m.Source = &Source{} + } + if err := m.Source.Unmarshal(dAtA[iNdEx:postIndex]); err != nil { + return err + } + iNdEx = postIndex default: iNdEx = preIndex skippy, err := skipOps(dAtA[iNdEx:]) diff --git a/vendor/github.com/moby/buildkit/solver/pb/ops.proto b/vendor/github.com/moby/buildkit/solver/pb/ops.proto index a24aad12c4..087c346163 100644 --- a/vendor/github.com/moby/buildkit/solver/pb/ops.proto +++ b/vendor/github.com/moby/buildkit/solver/pb/ops.proto @@ -177,6 +177,42 @@ message OpMetadata { map caps = 5 [(gogoproto.castkey) = "github.com/moby/buildkit/util/apicaps.CapID", (gogoproto.nullable) = false]; } +// Source is a source mapping description for a file +message Source { + map locations = 1; + repeated SourceInfo infos = 2; +} + +// Locations is a list of ranges with a index to its source map. +message Locations { + repeated Location locations = 1; +} + +// Source info contains the shared metadata of a source mapping +message SourceInfo { + string filename = 1; + bytes data = 2; + Definition definition = 3; +} + +// Location defines list of areas in to source file +message Location { + int32 sourceIndex = 1; + repeated Range ranges = 2; +} + +// Range is an area in the source file +message Range { + Position start = 1 [(gogoproto.nullable) = false]; + Position end = 2 [(gogoproto.nullable) = false]; +} + +// Position is single location in a source file +message Position { + int32 Line = 1; + int32 Character = 2; +} + message ExportCache { bool Value = 1; } @@ -200,6 +236,8 @@ message Definition { // metadata contains metadata for the each of the Op messages. // A key must be an LLB op digest string. Currently, empty string is not expected as a key, but it may change in the future. map metadata = 2 [(gogoproto.castkey) = "github.com/opencontainers/go-digest.Digest", (gogoproto.nullable) = false]; + // Source contains the source mapping information for the vertexes in the definition + Source Source = 3; } message HostIP { @@ -302,4 +340,4 @@ message UserOpt { message NamedUserOpt { string name = 1; int64 input = 2 [(gogoproto.customtype) = "InputIndex", (gogoproto.nullable) = false]; -} \ No newline at end of file +} diff --git a/vendor/github.com/moby/buildkit/source/git/gitsource.go b/vendor/github.com/moby/buildkit/source/git/gitsource.go index 419bf3b6be..5c36a47dfd 100644 --- a/vendor/github.com/moby/buildkit/source/git/gitsource.go +++ b/vendor/github.com/moby/buildkit/source/git/gitsource.go @@ -72,7 +72,7 @@ func (gs *gitSource) mountRemote(ctx context.Context, remote string) (target str for _, si := range sis { remoteRef, err = gs.cache.GetMutable(ctx, si.ID()) if err != nil { - if cache.IsLocked(err) { + if errors.Is(err, cache.ErrLocked) { // should never really happen as no other function should access this metadata, but lets be graceful logrus.Warnf("mutable ref for %s %s was locked: %v", remote, si.ID(), err) continue diff --git a/vendor/github.com/moby/buildkit/util/binfmt_misc/check.go b/vendor/github.com/moby/buildkit/util/binfmt_misc/check.go deleted file mode 100644 index 4caf59449b..0000000000 --- a/vendor/github.com/moby/buildkit/util/binfmt_misc/check.go +++ /dev/null @@ -1,42 +0,0 @@ -package binfmt_misc - -import ( - "bytes" - "compress/gzip" - "io" - "io/ioutil" - "os" - "os/exec" - "path/filepath" -) - -func check(bin string) error { - tmpdir, err := ioutil.TempDir("", "qemu-check") - if err != nil { - return err - } - defer os.RemoveAll(tmpdir) - pp := filepath.Join(tmpdir, "check") - - r, err := gzip.NewReader(bytes.NewReader([]byte(bin))) - if err != nil { - return err - } - defer r.Close() - - f, err := os.OpenFile(pp, os.O_RDWR|os.O_CREATE|os.O_TRUNC, 0700) - if err != nil { - return err - } - - if _, err := io.Copy(f, r); err != nil { - f.Close() - return err - } - f.Close() - - cmd := exec.Command("/check") - withChroot(cmd, tmpdir) - err = cmd.Run() - return err -} diff --git a/vendor/github.com/moby/buildkit/util/binfmt_misc/check_unix.go b/vendor/github.com/moby/buildkit/util/binfmt_misc/check_unix.go index 22c7ce7cae..670e6d2c21 100644 --- a/vendor/github.com/moby/buildkit/util/binfmt_misc/check_unix.go +++ b/vendor/github.com/moby/buildkit/util/binfmt_misc/check_unix.go @@ -3,7 +3,13 @@ package binfmt_misc import ( + "bytes" + "compress/gzip" + "io" + "io/ioutil" + "os" "os/exec" + "path/filepath" "syscall" ) @@ -12,3 +18,34 @@ func withChroot(cmd *exec.Cmd, dir string) { Chroot: dir, } } + +func check(bin string) error { + tmpdir, err := ioutil.TempDir("", "qemu-check") + if err != nil { + return err + } + defer os.RemoveAll(tmpdir) + pp := filepath.Join(tmpdir, "check") + + r, err := gzip.NewReader(bytes.NewReader([]byte(bin))) + if err != nil { + return err + } + defer r.Close() + + f, err := os.OpenFile(pp, os.O_RDWR|os.O_CREATE|os.O_TRUNC, 0700) + if err != nil { + return err + } + + if _, err := io.Copy(f, r); err != nil { + f.Close() + return err + } + f.Close() + + cmd := exec.Command("/check") + withChroot(cmd, tmpdir) + err = cmd.Run() + return err +} diff --git a/vendor/github.com/moby/buildkit/util/binfmt_misc/check_windows.go b/vendor/github.com/moby/buildkit/util/binfmt_misc/check_windows.go index 3c28c8bdd3..f246184778 100644 --- a/vendor/github.com/moby/buildkit/util/binfmt_misc/check_windows.go +++ b/vendor/github.com/moby/buildkit/util/binfmt_misc/check_windows.go @@ -3,8 +3,13 @@ package binfmt_misc import ( + "errors" "os/exec" ) func withChroot(cmd *exec.Cmd, dir string) { } + +func check(bin string) error { + return errors.New("binfmt is not supported on Windows") +} diff --git a/vendor/github.com/moby/buildkit/util/entitlements/entitlements.go b/vendor/github.com/moby/buildkit/util/entitlements/entitlements.go index f30b8ccc9a..f65b426bb2 100644 --- a/vendor/github.com/moby/buildkit/util/entitlements/entitlements.go +++ b/vendor/github.com/moby/buildkit/util/entitlements/entitlements.go @@ -43,7 +43,7 @@ func WhiteList(allowed, supported []Entitlement) (Set, error) { } if supported != nil { if !supm.Allowed(e) { - return nil, errors.Errorf("entitlement %s is not allowed", e) + return nil, errors.Errorf("granting entitlement %s is not allowed by build daemon configuration", e) } } m[e] = struct{}{} diff --git a/vendor/github.com/moby/buildkit/util/flightcontrol/flightcontrol.go b/vendor/github.com/moby/buildkit/util/flightcontrol/flightcontrol.go index f06d4e8954..a543364c75 100644 --- a/vendor/github.com/moby/buildkit/util/flightcontrol/flightcontrol.go +++ b/vendor/github.com/moby/buildkit/util/flightcontrol/flightcontrol.go @@ -35,7 +35,7 @@ func (g *Group) Do(ctx context.Context, key string, fn func(ctx context.Context) var backoff time.Duration for { v, err = g.do(ctx, key, fn) - if err == nil || errors.Cause(err) != errRetry { + if err == nil || !errors.Is(err, errRetry) { return v, err } // backoff logic diff --git a/vendor/github.com/moby/buildkit/util/grpcerrors/grpcerrors.go b/vendor/github.com/moby/buildkit/util/grpcerrors/grpcerrors.go new file mode 100644 index 0000000000..acba2750ca --- /dev/null +++ b/vendor/github.com/moby/buildkit/util/grpcerrors/grpcerrors.go @@ -0,0 +1,188 @@ +package grpcerrors + +import ( + gogotypes "github.com/gogo/protobuf/types" + "github.com/golang/protobuf/proto" + "github.com/golang/protobuf/ptypes" + "github.com/golang/protobuf/ptypes/any" + "github.com/moby/buildkit/util/stack" + spb "google.golang.org/genproto/googleapis/rpc/status" + "google.golang.org/grpc/codes" + "google.golang.org/grpc/status" +) + +type TypedError interface { + ToProto() TypedErrorProto +} + +type TypedErrorProto interface { + proto.Message + WrapError(error) error +} + +func ToGRPC(err error) error { + if err == nil { + return nil + } + st, ok := AsGRPCStatus(err) + if !ok || st == nil { + st = status.New(Code(err), err.Error()) + } + if st.Code() != Code(err) { + pb := st.Proto() + pb.Code = int32(Code(err)) + st = status.FromProto(pb) + } + + var details []proto.Message + + for _, st := range stack.Traces(err) { + details = append(details, st) + } + + each(err, func(err error) { + if te, ok := err.(TypedError); ok { + details = append(details, te.ToProto()) + } + }) + + if len(details) > 0 { + if st2, err := st.WithDetails(details...); err == nil { + st = st2 + } + } + + return st.Err() +} + +func Code(err error) codes.Code { + if se, ok := err.(interface { + Code() codes.Code + }); ok { + return se.Code() + } + + if se, ok := err.(interface { + GRPCStatus() *status.Status + }); ok { + return se.GRPCStatus().Code() + } + + wrapped, ok := err.(interface { + Unwrap() error + }) + if ok { + return Code(wrapped.Unwrap()) + } + + return status.FromContextError(err).Code() +} + +func WrapCode(err error, code codes.Code) error { + return &withCode{error: err, code: code} +} + +func AsGRPCStatus(err error) (*status.Status, bool) { + if err == nil { + return nil, true + } + if se, ok := err.(interface { + GRPCStatus() *status.Status + }); ok { + return se.GRPCStatus(), true + } + + wrapped, ok := err.(interface { + Unwrap() error + }) + if ok { + return AsGRPCStatus(wrapped.Unwrap()) + } + + return nil, false +} + +func FromGRPC(err error) error { + if err == nil { + return nil + } + st, ok := status.FromError(err) + if !ok { + return err + } + + pb := st.Proto() + + n := &spb.Status{ + Code: pb.Code, + Message: pb.Message, + } + + details := make([]TypedErrorProto, 0, len(pb.Details)) + stacks := make([]*stack.Stack, 0, len(pb.Details)) + + // details that we don't understand are copied as proto + for _, d := range pb.Details { + var m interface{} + detail := &ptypes.DynamicAny{} + if err := ptypes.UnmarshalAny(d, detail); err != nil { + detail := &gogotypes.DynamicAny{} + if err := gogotypes.UnmarshalAny(gogoAny(d), detail); err != nil { + n.Details = append(n.Details, d) + continue + } + m = detail.Message + } else { + m = detail.Message + } + + switch v := m.(type) { + case *stack.Stack: + stacks = append(stacks, v) + case TypedErrorProto: + details = append(details, v) + default: + n.Details = append(n.Details, d) + } + + } + + err = status.FromProto(n).Err() + + for _, s := range stacks { + if s != nil { + err = stack.Wrap(err, *s) + } + } + + for _, d := range details { + err = d.WrapError(err) + } + + return stack.Enable(err) +} + +type withCode struct { + code codes.Code + error +} + +func (e *withCode) Unwrap() error { + return e.error +} + +func each(err error, fn func(error)) { + fn(err) + if wrapped, ok := err.(interface { + Unwrap() error + }); ok { + each(wrapped.Unwrap(), fn) + } +} + +func gogoAny(in *any.Any) *gogotypes.Any { + return &gogotypes.Any{ + TypeUrl: in.TypeUrl, + Value: in.Value, + } +} diff --git a/vendor/github.com/moby/buildkit/util/grpcerrors/intercept.go b/vendor/github.com/moby/buildkit/util/grpcerrors/intercept.go new file mode 100644 index 0000000000..77618c1cee --- /dev/null +++ b/vendor/github.com/moby/buildkit/util/grpcerrors/intercept.go @@ -0,0 +1,28 @@ +package grpcerrors + +import ( + "context" + + "google.golang.org/grpc" +) + +func UnaryServerInterceptor(ctx context.Context, req interface{}, info *grpc.UnaryServerInfo, handler grpc.UnaryHandler) (resp interface{}, err error) { + resp, err = handler(ctx, req) + if err != nil { + err = ToGRPC(err) + } + return resp, err +} + +func StreamServerInterceptor(srv interface{}, ss grpc.ServerStream, info *grpc.StreamServerInfo, handler grpc.StreamHandler) error { + return ToGRPC(handler(srv, ss)) +} + +func UnaryClientInterceptor(ctx context.Context, method string, req, reply interface{}, cc *grpc.ClientConn, invoker grpc.UnaryInvoker, opts ...grpc.CallOption) error { + return FromGRPC(invoker(ctx, method, req, reply, cc, opts...)) +} + +func StreamClientInterceptor(ctx context.Context, desc *grpc.StreamDesc, cc *grpc.ClientConn, method string, streamer grpc.Streamer, opts ...grpc.CallOption) (grpc.ClientStream, error) { + s, err := streamer(ctx, desc, cc, method, opts...) + return s, ToGRPC(err) +} diff --git a/vendor/github.com/moby/buildkit/util/resolver/resolver.go b/vendor/github.com/moby/buildkit/util/resolver/resolver.go index 8d8b9f7fee..4add0b397d 100644 --- a/vendor/github.com/moby/buildkit/util/resolver/resolver.go +++ b/vendor/github.com/moby/buildkit/util/resolver/resolver.go @@ -50,7 +50,7 @@ func fillInsecureOpts(host string, c config.RegistryConfig, h *docker.RegistryHo func loadTLSConfig(c config.RegistryConfig) (*tls.Config, error) { for _, d := range c.TLSConfigDir { fs, err := ioutil.ReadDir(d) - if err != nil && !os.IsNotExist(err) && !os.IsPermission(err) { + if err != nil && !errors.Is(err, os.ErrNotExist) && !errors.Is(err, os.ErrPermission) { return nil, errors.WithStack(err) } for _, f := range fs { diff --git a/vendor/github.com/moby/buildkit/util/rootless/specconv/specconv_linux.go b/vendor/github.com/moby/buildkit/util/rootless/specconv/specconv_linux.go index 12646e430a..7118f8d6d2 100644 --- a/vendor/github.com/moby/buildkit/util/rootless/specconv/specconv_linux.go +++ b/vendor/github.com/moby/buildkit/util/rootless/specconv/specconv_linux.go @@ -3,7 +3,7 @@ package specconv import ( "strings" - "github.com/opencontainers/runtime-spec/specs-go" + specs "github.com/opencontainers/runtime-spec/specs-go" ) // ToRootless converts spec to be compatible with "rootless" runc. diff --git a/vendor/github.com/moby/buildkit/util/stack/generate.go b/vendor/github.com/moby/buildkit/util/stack/generate.go new file mode 100644 index 0000000000..97516baa99 --- /dev/null +++ b/vendor/github.com/moby/buildkit/util/stack/generate.go @@ -0,0 +1,3 @@ +package stack + +//go:generate protoc -I=. -I=../../vendor/ --go_out=. stack.proto diff --git a/vendor/github.com/moby/buildkit/util/stack/stack.go b/vendor/github.com/moby/buildkit/util/stack/stack.go new file mode 100644 index 0000000000..6d3cfc8332 --- /dev/null +++ b/vendor/github.com/moby/buildkit/util/stack/stack.go @@ -0,0 +1,151 @@ +package stack + +import ( + "fmt" + io "io" + "os" + "strconv" + "strings" + + "github.com/pkg/errors" +) + +var version string +var revision string + +func SetVersionInfo(v, r string) { + version = v + revision = r +} + +func Traces(err error) []*Stack { + var st []*Stack + + wrapped, ok := err.(interface { + Unwrap() error + }) + if ok { + st = Traces(wrapped.Unwrap()) + } + + if ste, ok := err.(interface { + StackTrace() errors.StackTrace + }); ok { + st = append(st, convertStack(ste.StackTrace())) + } + + if ste, ok := err.(interface { + StackTrace() *Stack + }); ok { + st = append(st, ste.StackTrace()) + } + + return st +} + +func Enable(err error) error { + if err == nil { + return nil + } + if !hasLocalStackTrace(err) { + return errors.WithStack(err) + } + return err +} + +func Wrap(err error, s Stack) error { + return &withStack{stack: s, error: err} +} + +func hasLocalStackTrace(err error) bool { + wrapped, ok := err.(interface { + Unwrap() error + }) + if ok && hasLocalStackTrace(wrapped.Unwrap()) { + return true + } + + _, ok = err.(interface { + StackTrace() errors.StackTrace + }) + return ok +} + +func Formatter(err error) fmt.Formatter { + return &formatter{err} +} + +type formatter struct { + error +} + +func (w *formatter) Format(s fmt.State, verb rune) { + if w.error == nil { + fmt.Fprintf(s, "%v", w.error) + return + } + switch verb { + case 'v': + if s.Flag('+') { + fmt.Fprintf(s, "%s\n", w.Error()) + for _, stack := range Traces(w.error) { + fmt.Fprintf(s, "%d %s %s\n", stack.Pid, stack.Version, strings.Join(stack.Cmdline, " ")) + for _, f := range stack.Frames { + fmt.Fprintf(s, "%s\n\t%s:%d\n", f.Name, f.File, f.Line) + } + fmt.Fprintln(s) + } + return + } + fallthrough + case 's': + io.WriteString(s, w.Error()) + case 'q': + fmt.Fprintf(s, "%q", w.Error()) + } +} + +func convertStack(s errors.StackTrace) *Stack { + var out Stack + for _, f := range s { + dt, err := f.MarshalText() + if err != nil { + continue + } + p := strings.SplitN(string(dt), " ", 2) + if len(p) != 2 { + continue + } + idx := strings.LastIndexByte(p[1], ':') + if idx == -1 { + continue + } + line, err := strconv.Atoi(p[1][idx+1:]) + if err != nil { + continue + } + out.Frames = append(out.Frames, &Frame{ + Name: p[0], + File: p[1][:idx], + Line: int32(line), + }) + } + out.Cmdline = os.Args + out.Pid = int32(os.Getpid()) + out.Version = version + out.Revision = revision + return &out +} + +type withStack struct { + stack Stack + error +} + +func (e *withStack) Unwrap() error { + return e.error +} + +func (e *withStack) StackTrace() *Stack { + return &e.stack +} diff --git a/vendor/github.com/moby/buildkit/util/stack/stack.pb.go b/vendor/github.com/moby/buildkit/util/stack/stack.pb.go new file mode 100644 index 0000000000..cefb30a8f5 --- /dev/null +++ b/vendor/github.com/moby/buildkit/util/stack/stack.pb.go @@ -0,0 +1,170 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: stack.proto + +package stack + +import ( + fmt "fmt" + proto "github.com/golang/protobuf/proto" + math "math" +) + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion3 // please upgrade the proto package + +type Stack struct { + Frames []*Frame `protobuf:"bytes,1,rep,name=frames,proto3" json:"frames,omitempty"` + Cmdline []string `protobuf:"bytes,2,rep,name=cmdline,proto3" json:"cmdline,omitempty"` + Pid int32 `protobuf:"varint,3,opt,name=pid,proto3" json:"pid,omitempty"` + Version string `protobuf:"bytes,4,opt,name=version,proto3" json:"version,omitempty"` + Revision string `protobuf:"bytes,5,opt,name=revision,proto3" json:"revision,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Stack) Reset() { *m = Stack{} } +func (m *Stack) String() string { return proto.CompactTextString(m) } +func (*Stack) ProtoMessage() {} +func (*Stack) Descriptor() ([]byte, []int) { + return fileDescriptor_b44c07feb2ca0a5a, []int{0} +} + +func (m *Stack) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Stack.Unmarshal(m, b) +} +func (m *Stack) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Stack.Marshal(b, m, deterministic) +} +func (m *Stack) XXX_Merge(src proto.Message) { + xxx_messageInfo_Stack.Merge(m, src) +} +func (m *Stack) XXX_Size() int { + return xxx_messageInfo_Stack.Size(m) +} +func (m *Stack) XXX_DiscardUnknown() { + xxx_messageInfo_Stack.DiscardUnknown(m) +} + +var xxx_messageInfo_Stack proto.InternalMessageInfo + +func (m *Stack) GetFrames() []*Frame { + if m != nil { + return m.Frames + } + return nil +} + +func (m *Stack) GetCmdline() []string { + if m != nil { + return m.Cmdline + } + return nil +} + +func (m *Stack) GetPid() int32 { + if m != nil { + return m.Pid + } + return 0 +} + +func (m *Stack) GetVersion() string { + if m != nil { + return m.Version + } + return "" +} + +func (m *Stack) GetRevision() string { + if m != nil { + return m.Revision + } + return "" +} + +type Frame struct { + Name string `protobuf:"bytes,1,opt,name=Name,proto3" json:"Name,omitempty"` + File string `protobuf:"bytes,2,opt,name=File,proto3" json:"File,omitempty"` + Line int32 `protobuf:"varint,3,opt,name=Line,proto3" json:"Line,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Frame) Reset() { *m = Frame{} } +func (m *Frame) String() string { return proto.CompactTextString(m) } +func (*Frame) ProtoMessage() {} +func (*Frame) Descriptor() ([]byte, []int) { + return fileDescriptor_b44c07feb2ca0a5a, []int{1} +} + +func (m *Frame) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Frame.Unmarshal(m, b) +} +func (m *Frame) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Frame.Marshal(b, m, deterministic) +} +func (m *Frame) XXX_Merge(src proto.Message) { + xxx_messageInfo_Frame.Merge(m, src) +} +func (m *Frame) XXX_Size() int { + return xxx_messageInfo_Frame.Size(m) +} +func (m *Frame) XXX_DiscardUnknown() { + xxx_messageInfo_Frame.DiscardUnknown(m) +} + +var xxx_messageInfo_Frame proto.InternalMessageInfo + +func (m *Frame) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +func (m *Frame) GetFile() string { + if m != nil { + return m.File + } + return "" +} + +func (m *Frame) GetLine() int32 { + if m != nil { + return m.Line + } + return 0 +} + +func init() { + proto.RegisterType((*Stack)(nil), "stack.Stack") + proto.RegisterType((*Frame)(nil), "stack.Frame") +} + +func init() { proto.RegisterFile("stack.proto", fileDescriptor_b44c07feb2ca0a5a) } + +var fileDescriptor_b44c07feb2ca0a5a = []byte{ + // 185 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x3c, 0x8f, 0x3d, 0xce, 0x82, 0x40, + 0x10, 0x86, 0xb3, 0xdf, 0xb2, 0x7c, 0x3a, 0x58, 0x98, 0xa9, 0x36, 0x56, 0x1b, 0x62, 0x41, 0x45, + 0xa1, 0x47, 0x30, 0xa1, 0x32, 0x16, 0x78, 0x02, 0x84, 0x35, 0xd9, 0xc8, 0x5f, 0x76, 0x09, 0xd7, + 0xf0, 0xca, 0x66, 0x06, 0xb4, 0x7b, 0xde, 0x9f, 0xe4, 0x9d, 0x81, 0x24, 0x4c, 0x55, 0xfd, 0xca, + 0x47, 0x3f, 0x4c, 0x03, 0x2a, 0x16, 0xe9, 0x5b, 0x80, 0xba, 0x13, 0xe1, 0x11, 0xe2, 0xa7, 0xaf, + 0x3a, 0x1b, 0xb4, 0x30, 0x32, 0x4b, 0x4e, 0xbb, 0x7c, 0xa9, 0x17, 0x64, 0x96, 0x6b, 0x86, 0x1a, + 0xfe, 0xeb, 0xae, 0x69, 0x5d, 0x6f, 0xf5, 0x9f, 0x91, 0xd9, 0xb6, 0xfc, 0x4a, 0xdc, 0x83, 0x1c, + 0x5d, 0xa3, 0xa5, 0x11, 0x99, 0x2a, 0x09, 0xa9, 0x3b, 0x5b, 0x1f, 0xdc, 0xd0, 0xeb, 0xc8, 0x08, + 0xea, 0xae, 0x12, 0x0f, 0xb0, 0xf1, 0x76, 0x76, 0x1c, 0x29, 0x8e, 0x7e, 0x3a, 0xbd, 0x80, 0xe2, + 0x49, 0x44, 0x88, 0x6e, 0x55, 0x67, 0xb5, 0xe0, 0x02, 0x33, 0x79, 0x85, 0x6b, 0x69, 0x9b, 0x3d, + 0x62, 0xf2, 0xae, 0x74, 0xcf, 0xb2, 0xcc, 0xfc, 0x88, 0xf9, 0xc9, 0xf3, 0x27, 0x00, 0x00, 0xff, + 0xff, 0xfd, 0x2c, 0xbb, 0xfb, 0xf3, 0x00, 0x00, 0x00, +} diff --git a/vendor/github.com/moby/buildkit/util/stack/stack.proto b/vendor/github.com/moby/buildkit/util/stack/stack.proto new file mode 100644 index 0000000000..9c63bc3626 --- /dev/null +++ b/vendor/github.com/moby/buildkit/util/stack/stack.proto @@ -0,0 +1,17 @@ +syntax = "proto3"; + +package stack; + +message Stack { + repeated Frame frames = 1; + repeated string cmdline = 2; + int32 pid = 3; + string version = 4; + string revision = 5; +} + +message Frame { + string Name = 1; + string File = 2; + int32 Line = 3; +} \ No newline at end of file