commit
d6f5e972de
|
@ -34,6 +34,8 @@ import (
|
||||||
"github.com/moby/buildkit/session"
|
"github.com/moby/buildkit/session"
|
||||||
"github.com/moby/buildkit/session/secrets/secretsprovider"
|
"github.com/moby/buildkit/session/secrets/secretsprovider"
|
||||||
"github.com/moby/buildkit/session/sshforward/sshprovider"
|
"github.com/moby/buildkit/session/sshforward/sshprovider"
|
||||||
|
"github.com/moby/buildkit/solver/errdefs"
|
||||||
|
"github.com/moby/buildkit/solver/pb"
|
||||||
"github.com/moby/buildkit/util/contentutil"
|
"github.com/moby/buildkit/util/contentutil"
|
||||||
"github.com/moby/buildkit/util/entitlements"
|
"github.com/moby/buildkit/util/entitlements"
|
||||||
"github.com/moby/buildkit/util/testutil"
|
"github.com/moby/buildkit/util/testutil"
|
||||||
|
@ -113,6 +115,8 @@ func TestIntegration(t *testing.T) {
|
||||||
testTarExporterWithSocket,
|
testTarExporterWithSocket,
|
||||||
testTarExporterSymlink,
|
testTarExporterSymlink,
|
||||||
testMultipleRegistryCacheImportExport,
|
testMultipleRegistryCacheImportExport,
|
||||||
|
testSourceMap,
|
||||||
|
testSourceMapFromRef,
|
||||||
}, mirrors)
|
}, mirrors)
|
||||||
|
|
||||||
integration.Run(t, []integration.Test{
|
integration.Run(t, []integration.Test{
|
||||||
|
@ -2681,6 +2685,126 @@ func testReadonlyRootFS(t *testing.T, sb integration.Sandbox) {
|
||||||
checkAllReleasable(t, c, sb, true)
|
checkAllReleasable(t, c, sb, true)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func testSourceMap(t *testing.T, sb integration.Sandbox) {
|
||||||
|
c, err := New(context.TODO(), sb.Address())
|
||||||
|
require.NoError(t, err)
|
||||||
|
defer c.Close()
|
||||||
|
|
||||||
|
sm1 := llb.NewSourceMap(nil, "foo", []byte("data1"))
|
||||||
|
sm2 := llb.NewSourceMap(nil, "bar", []byte("data2"))
|
||||||
|
|
||||||
|
st := llb.Scratch().Run(
|
||||||
|
llb.Shlex("not-exist"),
|
||||||
|
sm1.Location([]*pb.Range{{Start: pb.Position{Line: 7}}}),
|
||||||
|
sm2.Location([]*pb.Range{{Start: pb.Position{Line: 8}}}),
|
||||||
|
sm1.Location([]*pb.Range{{Start: pb.Position{Line: 9}}}),
|
||||||
|
)
|
||||||
|
|
||||||
|
def, err := st.Marshal(context.TODO())
|
||||||
|
require.NoError(t, err)
|
||||||
|
|
||||||
|
_, err = c.Solve(context.TODO(), def, SolveOpt{}, nil)
|
||||||
|
require.Error(t, err)
|
||||||
|
|
||||||
|
srcs := errdefs.Sources(err)
|
||||||
|
require.Equal(t, 3, len(srcs))
|
||||||
|
|
||||||
|
// Source errors are wrapped in the order provided as llb.ConstraintOpts, so
|
||||||
|
// when they are unwrapped, the first unwrapped error is the last location
|
||||||
|
// provided.
|
||||||
|
require.Equal(t, "foo", srcs[0].Info.Filename)
|
||||||
|
require.Equal(t, []byte("data1"), srcs[0].Info.Data)
|
||||||
|
require.Nil(t, srcs[0].Info.Definition)
|
||||||
|
|
||||||
|
require.Equal(t, 1, len(srcs[0].Ranges))
|
||||||
|
require.Equal(t, int32(9), srcs[0].Ranges[0].Start.Line)
|
||||||
|
require.Equal(t, int32(0), srcs[0].Ranges[0].Start.Character)
|
||||||
|
|
||||||
|
require.Equal(t, "bar", srcs[1].Info.Filename)
|
||||||
|
require.Equal(t, []byte("data2"), srcs[1].Info.Data)
|
||||||
|
require.Nil(t, srcs[1].Info.Definition)
|
||||||
|
|
||||||
|
require.Equal(t, 1, len(srcs[1].Ranges))
|
||||||
|
require.Equal(t, int32(8), srcs[1].Ranges[0].Start.Line)
|
||||||
|
require.Equal(t, int32(0), srcs[1].Ranges[0].Start.Character)
|
||||||
|
|
||||||
|
require.Equal(t, "foo", srcs[2].Info.Filename)
|
||||||
|
require.Equal(t, []byte("data1"), srcs[2].Info.Data)
|
||||||
|
require.Nil(t, srcs[2].Info.Definition)
|
||||||
|
|
||||||
|
require.Equal(t, 1, len(srcs[2].Ranges))
|
||||||
|
require.Equal(t, int32(7), srcs[2].Ranges[0].Start.Line)
|
||||||
|
require.Equal(t, int32(0), srcs[2].Ranges[0].Start.Character)
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
func testSourceMapFromRef(t *testing.T, sb integration.Sandbox) {
|
||||||
|
requiresLinux(t)
|
||||||
|
c, err := New(context.TODO(), sb.Address())
|
||||||
|
require.NoError(t, err)
|
||||||
|
defer c.Close()
|
||||||
|
|
||||||
|
srcState := llb.Scratch().File(
|
||||||
|
llb.Mkfile("foo", 0600, []byte("data")))
|
||||||
|
sm := llb.NewSourceMap(&srcState, "bar", []byte("bardata"))
|
||||||
|
|
||||||
|
frontend := func(ctx context.Context, c gateway.Client) (*gateway.Result, error) {
|
||||||
|
st := llb.Scratch().File(
|
||||||
|
llb.Mkdir("foo/bar", 0600), //fails because /foo doesn't exist
|
||||||
|
sm.Location([]*pb.Range{{Start: pb.Position{Line: 3, Character: 1}}}),
|
||||||
|
)
|
||||||
|
|
||||||
|
def, err := st.Marshal(context.TODO())
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
res, err := c.Solve(ctx, gateway.SolveRequest{
|
||||||
|
Definition: def.ToPB(),
|
||||||
|
})
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
ref, err := res.SingleRef()
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
st2, err := ref.ToState()
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
st = llb.Scratch().File(
|
||||||
|
llb.Copy(st2, "foo", "foo2"),
|
||||||
|
)
|
||||||
|
|
||||||
|
def, err = st.Marshal(context.TODO())
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
return c.Solve(ctx, gateway.SolveRequest{
|
||||||
|
Definition: def.ToPB(),
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
_, err = c.Build(context.TODO(), SolveOpt{}, "", frontend, nil)
|
||||||
|
require.Error(t, err)
|
||||||
|
|
||||||
|
srcs := errdefs.Sources(err)
|
||||||
|
require.Equal(t, 1, len(srcs))
|
||||||
|
|
||||||
|
require.Equal(t, "bar", srcs[0].Info.Filename)
|
||||||
|
require.Equal(t, []byte("bardata"), srcs[0].Info.Data)
|
||||||
|
require.NotNil(t, srcs[0].Info.Definition)
|
||||||
|
|
||||||
|
require.Equal(t, 1, len(srcs[0].Ranges))
|
||||||
|
require.Equal(t, int32(3), srcs[0].Ranges[0].Start.Line)
|
||||||
|
require.Equal(t, int32(1), srcs[0].Ranges[0].Start.Character)
|
||||||
|
}
|
||||||
|
|
||||||
func testProxyEnv(t *testing.T, sb integration.Sandbox) {
|
func testProxyEnv(t *testing.T, sb integration.Sandbox) {
|
||||||
c, err := New(context.TODO(), sb.Address())
|
c, err := New(context.TODO(), sb.Address())
|
||||||
require.NoError(t, err)
|
require.NoError(t, err)
|
||||||
|
|
|
@ -85,8 +85,8 @@ type errVertex struct {
|
||||||
func (v *errVertex) Validate(context.Context) error {
|
func (v *errVertex) Validate(context.Context) error {
|
||||||
return v.err
|
return v.err
|
||||||
}
|
}
|
||||||
func (v *errVertex) Marshal(context.Context, *Constraints) (digest.Digest, []byte, *pb.OpMetadata, error) {
|
func (v *errVertex) Marshal(context.Context, *Constraints) (digest.Digest, []byte, *pb.OpMetadata, []*SourceLocation, error) {
|
||||||
return "", nil, nil, v.err
|
return "", nil, nil, nil, v.err
|
||||||
}
|
}
|
||||||
func (v *errVertex) Output() Output {
|
func (v *errVertex) Output() Output {
|
||||||
return nil
|
return nil
|
||||||
|
|
|
@ -20,6 +20,7 @@ type DefinitionOp struct {
|
||||||
ops map[digest.Digest]*pb.Op
|
ops map[digest.Digest]*pb.Op
|
||||||
defs map[digest.Digest][]byte
|
defs map[digest.Digest][]byte
|
||||||
metas map[digest.Digest]pb.OpMetadata
|
metas map[digest.Digest]pb.OpMetadata
|
||||||
|
sources map[digest.Digest][]*SourceLocation
|
||||||
platforms map[digest.Digest]*specs.Platform
|
platforms map[digest.Digest]*specs.Platform
|
||||||
dgst digest.Digest
|
dgst digest.Digest
|
||||||
index pb.OutputIndex
|
index pb.OutputIndex
|
||||||
|
@ -49,6 +50,38 @@ func NewDefinitionOp(def *pb.Definition) (*DefinitionOp, error) {
|
||||||
platforms[dgst] = platform
|
platforms[dgst] = platform
|
||||||
}
|
}
|
||||||
|
|
||||||
|
srcs := map[digest.Digest][]*SourceLocation{}
|
||||||
|
|
||||||
|
if def.Source != nil {
|
||||||
|
sourceMaps := make([]*SourceMap, len(def.Source.Infos))
|
||||||
|
for i, info := range def.Source.Infos {
|
||||||
|
var st *State
|
||||||
|
sdef := info.Definition
|
||||||
|
if sdef != nil {
|
||||||
|
op, err := NewDefinitionOp(sdef)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
state := NewState(op)
|
||||||
|
st = &state
|
||||||
|
}
|
||||||
|
sourceMaps[i] = NewSourceMap(st, info.Filename, info.Data)
|
||||||
|
}
|
||||||
|
|
||||||
|
for dgst, locs := range def.Source.Locations {
|
||||||
|
for _, loc := range locs.Locations {
|
||||||
|
if loc.SourceIndex < 0 || int(loc.SourceIndex) >= len(sourceMaps) {
|
||||||
|
return nil, errors.Errorf("failed to find source map with index %d", loc.SourceIndex)
|
||||||
|
}
|
||||||
|
|
||||||
|
srcs[digest.Digest(dgst)] = append(srcs[digest.Digest(dgst)], &SourceLocation{
|
||||||
|
SourceMap: sourceMaps[int(loc.SourceIndex)],
|
||||||
|
Ranges: loc.Ranges,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
var index pb.OutputIndex
|
var index pb.OutputIndex
|
||||||
if dgst != "" {
|
if dgst != "" {
|
||||||
index = ops[dgst].Inputs[0].Index
|
index = ops[dgst].Inputs[0].Index
|
||||||
|
@ -59,6 +92,7 @@ func NewDefinitionOp(def *pb.Definition) (*DefinitionOp, error) {
|
||||||
ops: ops,
|
ops: ops,
|
||||||
defs: defs,
|
defs: defs,
|
||||||
metas: def.Metadata,
|
metas: def.Metadata,
|
||||||
|
sources: srcs,
|
||||||
platforms: platforms,
|
platforms: platforms,
|
||||||
dgst: dgst,
|
dgst: dgst,
|
||||||
index: index,
|
index: index,
|
||||||
|
@ -110,20 +144,20 @@ func (d *DefinitionOp) Validate(context.Context) error {
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func (d *DefinitionOp) Marshal(ctx context.Context, c *Constraints) (digest.Digest, []byte, *pb.OpMetadata, error) {
|
func (d *DefinitionOp) Marshal(ctx context.Context, c *Constraints) (digest.Digest, []byte, *pb.OpMetadata, []*SourceLocation, error) {
|
||||||
if d.dgst == "" {
|
if d.dgst == "" {
|
||||||
return "", nil, nil, errors.Errorf("cannot marshal empty definition op")
|
return "", nil, nil, nil, errors.Errorf("cannot marshal empty definition op")
|
||||||
}
|
}
|
||||||
|
|
||||||
if err := d.Validate(ctx); err != nil {
|
if err := d.Validate(ctx); err != nil {
|
||||||
return "", nil, nil, err
|
return "", nil, nil, nil, err
|
||||||
}
|
}
|
||||||
|
|
||||||
d.mu.Lock()
|
d.mu.Lock()
|
||||||
defer d.mu.Unlock()
|
defer d.mu.Unlock()
|
||||||
|
|
||||||
meta := d.metas[d.dgst]
|
meta := d.metas[d.dgst]
|
||||||
return d.dgst, d.defs[d.dgst], &meta, nil
|
return d.dgst, d.defs[d.dgst], &meta, d.sources[d.dgst], nil
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -81,7 +81,7 @@ func (e *ExecOp) AddMount(target string, source Output, opt ...MountOption) Outp
|
||||||
}
|
}
|
||||||
m.output = o
|
m.output = o
|
||||||
}
|
}
|
||||||
e.Store(nil, nil, nil)
|
e.Store(nil, nil, nil, nil)
|
||||||
e.isValidated = false
|
e.isValidated = false
|
||||||
return m.output
|
return m.output
|
||||||
}
|
}
|
||||||
|
@ -124,12 +124,12 @@ func (e *ExecOp) Validate(ctx context.Context) error {
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func (e *ExecOp) Marshal(ctx context.Context, c *Constraints) (digest.Digest, []byte, *pb.OpMetadata, error) {
|
func (e *ExecOp) Marshal(ctx context.Context, c *Constraints) (digest.Digest, []byte, *pb.OpMetadata, []*SourceLocation, error) {
|
||||||
if e.Cached(c) {
|
if e.Cached(c) {
|
||||||
return e.Load()
|
return e.Load()
|
||||||
}
|
}
|
||||||
if err := e.Validate(ctx); err != nil {
|
if err := e.Validate(ctx); err != nil {
|
||||||
return "", nil, nil, err
|
return "", nil, nil, nil, err
|
||||||
}
|
}
|
||||||
// make sure mounts are sorted
|
// make sure mounts are sorted
|
||||||
sort.Slice(e.mounts, func(i, j int) bool {
|
sort.Slice(e.mounts, func(i, j int) bool {
|
||||||
|
@ -138,7 +138,7 @@ func (e *ExecOp) Marshal(ctx context.Context, c *Constraints) (digest.Digest, []
|
||||||
|
|
||||||
env, err := getEnv(e.base)(ctx)
|
env, err := getEnv(e.base)(ctx)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return "", nil, nil, err
|
return "", nil, nil, nil, err
|
||||||
}
|
}
|
||||||
|
|
||||||
if len(e.ssh) > 0 {
|
if len(e.ssh) > 0 {
|
||||||
|
@ -161,17 +161,17 @@ func (e *ExecOp) Marshal(ctx context.Context, c *Constraints) (digest.Digest, []
|
||||||
|
|
||||||
args, err := getArgs(e.base)(ctx)
|
args, err := getArgs(e.base)(ctx)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return "", nil, nil, err
|
return "", nil, nil, nil, err
|
||||||
}
|
}
|
||||||
|
|
||||||
cwd, err := getDir(e.base)(ctx)
|
cwd, err := getDir(e.base)(ctx)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return "", nil, nil, err
|
return "", nil, nil, nil, err
|
||||||
}
|
}
|
||||||
|
|
||||||
user, err := getUser(e.base)(ctx)
|
user, err := getUser(e.base)(ctx)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return "", nil, nil, err
|
return "", nil, nil, nil, err
|
||||||
}
|
}
|
||||||
|
|
||||||
meta := &pb.Meta{
|
meta := &pb.Meta{
|
||||||
|
@ -182,7 +182,7 @@ func (e *ExecOp) Marshal(ctx context.Context, c *Constraints) (digest.Digest, []
|
||||||
}
|
}
|
||||||
extraHosts, err := getExtraHosts(e.base)(ctx)
|
extraHosts, err := getExtraHosts(e.base)(ctx)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return "", nil, nil, err
|
return "", nil, nil, nil, err
|
||||||
}
|
}
|
||||||
if len(extraHosts) > 0 {
|
if len(extraHosts) > 0 {
|
||||||
hosts := make([]*pb.HostIP, len(extraHosts))
|
hosts := make([]*pb.HostIP, len(extraHosts))
|
||||||
|
@ -194,12 +194,12 @@ func (e *ExecOp) Marshal(ctx context.Context, c *Constraints) (digest.Digest, []
|
||||||
|
|
||||||
network, err := getNetwork(e.base)(ctx)
|
network, err := getNetwork(e.base)(ctx)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return "", nil, nil, err
|
return "", nil, nil, nil, err
|
||||||
}
|
}
|
||||||
|
|
||||||
security, err := getSecurity(e.base)(ctx)
|
security, err := getSecurity(e.base)(ctx)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return "", nil, nil, err
|
return "", nil, nil, nil, err
|
||||||
}
|
}
|
||||||
|
|
||||||
peo := &pb.ExecOp{
|
peo := &pb.ExecOp{
|
||||||
|
@ -252,7 +252,7 @@ func (e *ExecOp) Marshal(ctx context.Context, c *Constraints) (digest.Digest, []
|
||||||
if e.constraints.Platform == nil {
|
if e.constraints.Platform == nil {
|
||||||
p, err := getPlatform(e.base)(ctx)
|
p, err := getPlatform(e.base)(ctx)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return "", nil, nil, err
|
return "", nil, nil, nil, err
|
||||||
}
|
}
|
||||||
e.constraints.Platform = p
|
e.constraints.Platform = p
|
||||||
}
|
}
|
||||||
|
@ -267,11 +267,11 @@ func (e *ExecOp) Marshal(ctx context.Context, c *Constraints) (digest.Digest, []
|
||||||
inputIndex := pb.InputIndex(len(pop.Inputs))
|
inputIndex := pb.InputIndex(len(pop.Inputs))
|
||||||
if m.source != nil {
|
if m.source != nil {
|
||||||
if m.tmpfs {
|
if m.tmpfs {
|
||||||
return "", nil, nil, errors.Errorf("tmpfs mounts must use scratch")
|
return "", nil, nil, nil, errors.Errorf("tmpfs mounts must use scratch")
|
||||||
}
|
}
|
||||||
inp, err := m.source.ToInput(ctx, c)
|
inp, err := m.source.ToInput(ctx, c)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return "", nil, nil, err
|
return "", nil, nil, nil, err
|
||||||
}
|
}
|
||||||
|
|
||||||
newInput := true
|
newInput := true
|
||||||
|
@ -356,9 +356,9 @@ func (e *ExecOp) Marshal(ctx context.Context, c *Constraints) (digest.Digest, []
|
||||||
|
|
||||||
dt, err := pop.Marshal()
|
dt, err := pop.Marshal()
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return "", nil, nil, err
|
return "", nil, nil, nil, err
|
||||||
}
|
}
|
||||||
e.Store(dt, md, c)
|
e.Store(dt, md, e.constraints.SourceLocations, c)
|
||||||
return e.Load()
|
return e.Load()
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -649,12 +649,12 @@ func (ms *marshalState) add(fa *FileAction, c *Constraints) (*fileActionState, e
|
||||||
return st, nil
|
return st, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func (f *FileOp) Marshal(ctx context.Context, c *Constraints) (digest.Digest, []byte, *pb.OpMetadata, error) {
|
func (f *FileOp) Marshal(ctx context.Context, c *Constraints) (digest.Digest, []byte, *pb.OpMetadata, []*SourceLocation, error) {
|
||||||
if f.Cached(c) {
|
if f.Cached(c) {
|
||||||
return f.Load()
|
return f.Load()
|
||||||
}
|
}
|
||||||
if err := f.Validate(ctx); err != nil {
|
if err := f.Validate(ctx); err != nil {
|
||||||
return "", nil, nil, err
|
return "", nil, nil, nil, err
|
||||||
}
|
}
|
||||||
|
|
||||||
addCap(&f.constraints, pb.CapFileBase)
|
addCap(&f.constraints, pb.CapFileBase)
|
||||||
|
@ -669,7 +669,7 @@ func (f *FileOp) Marshal(ctx context.Context, c *Constraints) (digest.Digest, []
|
||||||
state := newMarshalState(ctx)
|
state := newMarshalState(ctx)
|
||||||
_, err := state.add(f.action, c)
|
_, err := state.add(f.action, c)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return "", nil, nil, err
|
return "", nil, nil, nil, err
|
||||||
}
|
}
|
||||||
pop.Inputs = state.inputs
|
pop.Inputs = state.inputs
|
||||||
|
|
||||||
|
@ -683,13 +683,13 @@ func (f *FileOp) Marshal(ctx context.Context, c *Constraints) (digest.Digest, []
|
||||||
if st.fa.state != nil {
|
if st.fa.state != nil {
|
||||||
parent, err = st.fa.state.GetDir(ctx)
|
parent, err = st.fa.state.GetDir(ctx)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return "", nil, nil, err
|
return "", nil, nil, nil, err
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
action, err := st.action.toProtoAction(ctx, parent, st.base)
|
action, err := st.action.toProtoAction(ctx, parent, st.base)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return "", nil, nil, err
|
return "", nil, nil, nil, err
|
||||||
}
|
}
|
||||||
|
|
||||||
pfo.Actions = append(pfo.Actions, &pb.FileAction{
|
pfo.Actions = append(pfo.Actions, &pb.FileAction{
|
||||||
|
@ -702,9 +702,9 @@ func (f *FileOp) Marshal(ctx context.Context, c *Constraints) (digest.Digest, []
|
||||||
|
|
||||||
dt, err := pop.Marshal()
|
dt, err := pop.Marshal()
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return "", nil, nil, err
|
return "", nil, nil, nil, err
|
||||||
}
|
}
|
||||||
f.Store(dt, md, c)
|
f.Store(dt, md, f.constraints.SourceLocations, c)
|
||||||
return f.Load()
|
return f.Load()
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -33,7 +33,7 @@ type build struct {
|
||||||
}
|
}
|
||||||
|
|
||||||
func (b *build) ToInput(ctx context.Context, c *llb.Constraints) (*pb.Input, error) {
|
func (b *build) ToInput(ctx context.Context, c *llb.Constraints) (*pb.Input, error) {
|
||||||
dgst, _, _, err := b.Marshal(ctx, c)
|
dgst, _, _, _, err := b.Marshal(ctx, c)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
|
@ -48,7 +48,7 @@ func (b *build) Validate(context.Context) error {
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func (b *build) Marshal(ctx context.Context, c *llb.Constraints) (digest.Digest, []byte, *pb.OpMetadata, error) {
|
func (b *build) Marshal(ctx context.Context, c *llb.Constraints) (digest.Digest, []byte, *pb.OpMetadata, []*llb.SourceLocation, error) {
|
||||||
if b.Cached(c) {
|
if b.Cached(c) {
|
||||||
return b.Load()
|
return b.Load()
|
||||||
}
|
}
|
||||||
|
@ -76,16 +76,16 @@ func (b *build) Marshal(ctx context.Context, c *llb.Constraints) (digest.Digest,
|
||||||
|
|
||||||
inp, err := b.source.ToInput(ctx, c)
|
inp, err := b.source.ToInput(ctx, c)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return "", nil, nil, err
|
return "", nil, nil, nil, err
|
||||||
}
|
}
|
||||||
|
|
||||||
pop.Inputs = append(pop.Inputs, inp)
|
pop.Inputs = append(pop.Inputs, inp)
|
||||||
|
|
||||||
dt, err := pop.Marshal()
|
dt, err := pop.Marshal()
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return "", nil, nil, err
|
return "", nil, nil, nil, err
|
||||||
}
|
}
|
||||||
b.Store(dt, md, c)
|
b.Store(dt, md, b.constraints.SourceLocations, c)
|
||||||
return b.Load()
|
return b.Load()
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -13,7 +13,7 @@ import (
|
||||||
func TestMarshal(t *testing.T) {
|
func TestMarshal(t *testing.T) {
|
||||||
t.Parallel()
|
t.Parallel()
|
||||||
b := NewBuildOp(newDummyOutput("foobar"), WithFilename("myfilename"))
|
b := NewBuildOp(newDummyOutput("foobar"), WithFilename("myfilename"))
|
||||||
dgst, dt, opMeta, err := b.Marshal(context.TODO(), &llb.Constraints{})
|
dgst, dt, opMeta, _, err := b.Marshal(context.TODO(), &llb.Constraints{})
|
||||||
_ = opMeta
|
_ = opMeta
|
||||||
require.NoError(t, err)
|
require.NoError(t, err)
|
||||||
|
|
||||||
|
|
|
@ -14,21 +14,24 @@ import (
|
||||||
type Definition struct {
|
type Definition struct {
|
||||||
Def [][]byte
|
Def [][]byte
|
||||||
Metadata map[digest.Digest]pb.OpMetadata
|
Metadata map[digest.Digest]pb.OpMetadata
|
||||||
|
Source *pb.Source
|
||||||
}
|
}
|
||||||
|
|
||||||
func (def *Definition) ToPB() *pb.Definition {
|
func (def *Definition) ToPB() *pb.Definition {
|
||||||
md := make(map[digest.Digest]pb.OpMetadata)
|
md := make(map[digest.Digest]pb.OpMetadata, len(def.Metadata))
|
||||||
for k, v := range def.Metadata {
|
for k, v := range def.Metadata {
|
||||||
md[k] = v
|
md[k] = v
|
||||||
}
|
}
|
||||||
return &pb.Definition{
|
return &pb.Definition{
|
||||||
Def: def.Def,
|
Def: def.Def,
|
||||||
|
Source: def.Source,
|
||||||
Metadata: md,
|
Metadata: md,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
func (def *Definition) FromPB(x *pb.Definition) {
|
func (def *Definition) FromPB(x *pb.Definition) {
|
||||||
def.Def = x.Def
|
def.Def = x.Def
|
||||||
|
def.Source = x.Source
|
||||||
def.Metadata = make(map[digest.Digest]pb.OpMetadata)
|
def.Metadata = make(map[digest.Digest]pb.OpMetadata)
|
||||||
for k, v := range x.Metadata {
|
for k, v := range x.Metadata {
|
||||||
def.Metadata[k] = v
|
def.Metadata[k] = v
|
||||||
|
@ -95,18 +98,20 @@ type MarshalCache struct {
|
||||||
digest digest.Digest
|
digest digest.Digest
|
||||||
dt []byte
|
dt []byte
|
||||||
md *pb.OpMetadata
|
md *pb.OpMetadata
|
||||||
|
srcs []*SourceLocation
|
||||||
constraints *Constraints
|
constraints *Constraints
|
||||||
}
|
}
|
||||||
|
|
||||||
func (mc *MarshalCache) Cached(c *Constraints) bool {
|
func (mc *MarshalCache) Cached(c *Constraints) bool {
|
||||||
return mc.dt != nil && mc.constraints == c
|
return mc.dt != nil && mc.constraints == c
|
||||||
}
|
}
|
||||||
func (mc *MarshalCache) Load() (digest.Digest, []byte, *pb.OpMetadata, error) {
|
func (mc *MarshalCache) Load() (digest.Digest, []byte, *pb.OpMetadata, []*SourceLocation, error) {
|
||||||
return mc.digest, mc.dt, mc.md, nil
|
return mc.digest, mc.dt, mc.md, mc.srcs, nil
|
||||||
}
|
}
|
||||||
func (mc *MarshalCache) Store(dt []byte, md *pb.OpMetadata, c *Constraints) {
|
func (mc *MarshalCache) Store(dt []byte, md *pb.OpMetadata, srcs []*SourceLocation, c *Constraints) {
|
||||||
mc.digest = digest.FromBytes(dt)
|
mc.digest = digest.FromBytes(dt)
|
||||||
mc.dt = dt
|
mc.dt = dt
|
||||||
mc.md = md
|
mc.md = md
|
||||||
mc.constraints = c
|
mc.constraints = c
|
||||||
|
mc.srcs = srcs
|
||||||
}
|
}
|
||||||
|
|
|
@ -44,12 +44,12 @@ func (s *SourceOp) Validate(ctx context.Context) error {
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func (s *SourceOp) Marshal(ctx context.Context, constraints *Constraints) (digest.Digest, []byte, *pb.OpMetadata, error) {
|
func (s *SourceOp) Marshal(ctx context.Context, constraints *Constraints) (digest.Digest, []byte, *pb.OpMetadata, []*SourceLocation, error) {
|
||||||
if s.Cached(constraints) {
|
if s.Cached(constraints) {
|
||||||
return s.Load()
|
return s.Load()
|
||||||
}
|
}
|
||||||
if err := s.Validate(ctx); err != nil {
|
if err := s.Validate(ctx); err != nil {
|
||||||
return "", nil, nil, err
|
return "", nil, nil, nil, err
|
||||||
}
|
}
|
||||||
|
|
||||||
if strings.HasPrefix(s.id, "local://") {
|
if strings.HasPrefix(s.id, "local://") {
|
||||||
|
@ -74,10 +74,10 @@ func (s *SourceOp) Marshal(ctx context.Context, constraints *Constraints) (diges
|
||||||
|
|
||||||
dt, err := proto.Marshal()
|
dt, err := proto.Marshal()
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return "", nil, nil, err
|
return "", nil, nil, nil, err
|
||||||
}
|
}
|
||||||
|
|
||||||
s.Store(dt, md, constraints)
|
s.Store(dt, md, s.constraints.SourceLocations, constraints)
|
||||||
return s.Load()
|
return s.Load()
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -0,0 +1,111 @@
|
||||||
|
package llb
|
||||||
|
|
||||||
|
import (
|
||||||
|
"context"
|
||||||
|
|
||||||
|
"github.com/moby/buildkit/solver/pb"
|
||||||
|
"github.com/opencontainers/go-digest"
|
||||||
|
)
|
||||||
|
|
||||||
|
type SourceMap struct {
|
||||||
|
State *State
|
||||||
|
Definition *Definition
|
||||||
|
Filename string
|
||||||
|
Data []byte
|
||||||
|
}
|
||||||
|
|
||||||
|
func NewSourceMap(st *State, filename string, dt []byte) *SourceMap {
|
||||||
|
return &SourceMap{
|
||||||
|
State: st,
|
||||||
|
Filename: filename,
|
||||||
|
Data: dt,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (s *SourceMap) Location(r []*pb.Range) ConstraintsOpt {
|
||||||
|
return constraintsOptFunc(func(c *Constraints) {
|
||||||
|
if s == nil {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
c.SourceLocations = append(c.SourceLocations, &SourceLocation{
|
||||||
|
SourceMap: s,
|
||||||
|
Ranges: r,
|
||||||
|
})
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
type SourceLocation struct {
|
||||||
|
SourceMap *SourceMap
|
||||||
|
Ranges []*pb.Range
|
||||||
|
}
|
||||||
|
|
||||||
|
type sourceMapCollector struct {
|
||||||
|
maps []*SourceMap
|
||||||
|
index map[*SourceMap]int
|
||||||
|
locations map[digest.Digest][]*SourceLocation
|
||||||
|
}
|
||||||
|
|
||||||
|
func newSourceMapCollector() *sourceMapCollector {
|
||||||
|
return &sourceMapCollector{
|
||||||
|
index: map[*SourceMap]int{},
|
||||||
|
locations: map[digest.Digest][]*SourceLocation{},
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (smc *sourceMapCollector) Add(dgst digest.Digest, ls []*SourceLocation) {
|
||||||
|
for _, l := range ls {
|
||||||
|
idx, ok := smc.index[l.SourceMap]
|
||||||
|
if !ok {
|
||||||
|
idx = len(smc.maps)
|
||||||
|
smc.maps = append(smc.maps, l.SourceMap)
|
||||||
|
}
|
||||||
|
smc.index[l.SourceMap] = idx
|
||||||
|
}
|
||||||
|
smc.locations[dgst] = ls
|
||||||
|
}
|
||||||
|
|
||||||
|
func (smc *sourceMapCollector) Marshal(ctx context.Context, co ...ConstraintsOpt) (*pb.Source, error) {
|
||||||
|
s := &pb.Source{
|
||||||
|
Locations: make(map[string]*pb.Locations),
|
||||||
|
}
|
||||||
|
for _, m := range smc.maps {
|
||||||
|
def := m.Definition
|
||||||
|
if def == nil && m.State != nil {
|
||||||
|
var err error
|
||||||
|
def, err = m.State.Marshal(ctx, co...)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
m.Definition = def
|
||||||
|
}
|
||||||
|
|
||||||
|
info := &pb.SourceInfo{
|
||||||
|
Data: m.Data,
|
||||||
|
Filename: m.Filename,
|
||||||
|
}
|
||||||
|
|
||||||
|
if def != nil {
|
||||||
|
info.Definition = def.ToPB()
|
||||||
|
}
|
||||||
|
|
||||||
|
s.Infos = append(s.Infos, info)
|
||||||
|
}
|
||||||
|
|
||||||
|
for dgst, locs := range smc.locations {
|
||||||
|
pbLocs, ok := s.Locations[dgst.String()]
|
||||||
|
if !ok {
|
||||||
|
pbLocs = &pb.Locations{}
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, loc := range locs {
|
||||||
|
pbLocs.Locations = append(pbLocs.Locations, &pb.Location{
|
||||||
|
SourceIndex: int32(smc.index[loc.SourceMap]),
|
||||||
|
Ranges: loc.Ranges,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
s.Locations[dgst.String()] = pbLocs
|
||||||
|
}
|
||||||
|
|
||||||
|
return s, nil
|
||||||
|
}
|
|
@ -24,7 +24,7 @@ type Output interface {
|
||||||
|
|
||||||
type Vertex interface {
|
type Vertex interface {
|
||||||
Validate(context.Context) error
|
Validate(context.Context) error
|
||||||
Marshal(context.Context, *Constraints) (digest.Digest, []byte, *pb.OpMetadata, error)
|
Marshal(context.Context, *Constraints) (digest.Digest, []byte, *pb.OpMetadata, []*SourceLocation, error)
|
||||||
Output() Output
|
Output() Output
|
||||||
Inputs() []Output
|
Inputs() []Output
|
||||||
}
|
}
|
||||||
|
@ -124,7 +124,9 @@ func (s State) Marshal(ctx context.Context, co ...ConstraintsOpt) (*Definition,
|
||||||
o.SetConstraintsOption(c)
|
o.SetConstraintsOption(c)
|
||||||
}
|
}
|
||||||
|
|
||||||
def, err := marshal(ctx, s.Output().Vertex(ctx), def, map[digest.Digest]struct{}{}, map[Vertex]struct{}{}, c)
|
smc := newSourceMapCollector()
|
||||||
|
|
||||||
|
def, err := marshal(ctx, s.Output().Vertex(ctx), def, smc, map[digest.Digest]struct{}{}, map[Vertex]struct{}{}, c)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return def, err
|
return def, err
|
||||||
}
|
}
|
||||||
|
@ -159,23 +161,28 @@ func (s State) Marshal(ctx context.Context, co ...ConstraintsOpt) (*Definition,
|
||||||
}
|
}
|
||||||
|
|
||||||
def.Metadata[dgst] = md
|
def.Metadata[dgst] = md
|
||||||
|
sm, err := smc.Marshal(ctx, co...)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
def.Source = sm
|
||||||
|
|
||||||
return def, nil
|
return def, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func marshal(ctx context.Context, v Vertex, def *Definition, cache map[digest.Digest]struct{}, vertexCache map[Vertex]struct{}, c *Constraints) (*Definition, error) {
|
func marshal(ctx context.Context, v Vertex, def *Definition, s *sourceMapCollector, cache map[digest.Digest]struct{}, vertexCache map[Vertex]struct{}, c *Constraints) (*Definition, error) {
|
||||||
if _, ok := vertexCache[v]; ok {
|
if _, ok := vertexCache[v]; ok {
|
||||||
return def, nil
|
return def, nil
|
||||||
}
|
}
|
||||||
for _, inp := range v.Inputs() {
|
for _, inp := range v.Inputs() {
|
||||||
var err error
|
var err error
|
||||||
def, err = marshal(ctx, inp.Vertex(ctx), def, cache, vertexCache, c)
|
def, err = marshal(ctx, inp.Vertex(ctx), def, s, cache, vertexCache, c)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return def, err
|
return def, err
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
dgst, dt, opMeta, err := v.Marshal(ctx, c)
|
dgst, dt, opMeta, sls, err := v.Marshal(ctx, c)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return def, err
|
return def, err
|
||||||
}
|
}
|
||||||
|
@ -186,6 +193,7 @@ func marshal(ctx context.Context, v Vertex, def *Definition, cache map[digest.Di
|
||||||
if _, ok := cache[dgst]; ok {
|
if _, ok := cache[dgst]; ok {
|
||||||
return def, nil
|
return def, nil
|
||||||
}
|
}
|
||||||
|
s.Add(dgst, sls)
|
||||||
def.Def = append(def.Def, dt)
|
def.Def = append(def.Def, dt)
|
||||||
cache[dgst] = struct{}{}
|
cache[dgst] = struct{}{}
|
||||||
return def, nil
|
return def, nil
|
||||||
|
@ -367,7 +375,7 @@ func (o *output) ToInput(ctx context.Context, c *Constraints) (*pb.Input, error)
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
dgst, _, _, err := o.vertex.Marshal(ctx, c)
|
dgst, _, _, _, err := o.vertex.Marshal(ctx, c)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
|
@ -514,6 +522,7 @@ type Constraints struct {
|
||||||
Metadata pb.OpMetadata
|
Metadata pb.OpMetadata
|
||||||
LocalUniqueID string
|
LocalUniqueID string
|
||||||
Caps *apicaps.CapSet
|
Caps *apicaps.CapSet
|
||||||
|
SourceLocations []*SourceLocation
|
||||||
}
|
}
|
||||||
|
|
||||||
func Platform(p specs.Platform) ConstraintsOpt {
|
func Platform(p specs.Platform) ConstraintsOpt {
|
||||||
|
|
|
@ -4,6 +4,8 @@ import (
|
||||||
"context"
|
"context"
|
||||||
"testing"
|
"testing"
|
||||||
|
|
||||||
|
"github.com/moby/buildkit/solver/pb"
|
||||||
|
digest "github.com/opencontainers/go-digest"
|
||||||
"github.com/stretchr/testify/assert"
|
"github.com/stretchr/testify/assert"
|
||||||
"github.com/stretchr/testify/require"
|
"github.com/stretchr/testify/require"
|
||||||
)
|
)
|
||||||
|
@ -53,6 +55,52 @@ func TestFormattingPatterns(t *testing.T) {
|
||||||
assert.Equal(t, "/foo/bar1", getDirHelper(t, s2))
|
assert.Equal(t, "/foo/bar1", getDirHelper(t, s2))
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func TestStateSourceMapMarshal(t *testing.T) {
|
||||||
|
t.Parallel()
|
||||||
|
|
||||||
|
sm1 := NewSourceMap(nil, "foo", []byte("data1"))
|
||||||
|
sm2 := NewSourceMap(nil, "bar", []byte("data2"))
|
||||||
|
|
||||||
|
s := Image(
|
||||||
|
"myimage",
|
||||||
|
sm1.Location([]*pb.Range{{Start: pb.Position{Line: 7}}}),
|
||||||
|
sm2.Location([]*pb.Range{{Start: pb.Position{Line: 8}}}),
|
||||||
|
sm1.Location([]*pb.Range{{Start: pb.Position{Line: 9}}}),
|
||||||
|
)
|
||||||
|
|
||||||
|
def, err := s.Marshal(context.TODO())
|
||||||
|
require.NoError(t, err)
|
||||||
|
|
||||||
|
require.Equal(t, 2, len(def.Def))
|
||||||
|
dgst := digest.FromBytes(def.Def[0])
|
||||||
|
|
||||||
|
require.Equal(t, 2, len(def.Source.Infos))
|
||||||
|
require.Equal(t, 1, len(def.Source.Locations))
|
||||||
|
|
||||||
|
require.Equal(t, "foo", def.Source.Infos[0].Filename)
|
||||||
|
require.Equal(t, []byte("data1"), def.Source.Infos[0].Data)
|
||||||
|
require.Nil(t, def.Source.Infos[0].Definition)
|
||||||
|
|
||||||
|
require.Equal(t, "bar", def.Source.Infos[1].Filename)
|
||||||
|
require.Equal(t, []byte("data2"), def.Source.Infos[1].Data)
|
||||||
|
require.Nil(t, def.Source.Infos[1].Definition)
|
||||||
|
|
||||||
|
require.NotNil(t, def.Source.Locations[dgst.String()])
|
||||||
|
require.Equal(t, 3, len(def.Source.Locations[dgst.String()].Locations))
|
||||||
|
|
||||||
|
require.Equal(t, int32(0), def.Source.Locations[dgst.String()].Locations[0].SourceIndex)
|
||||||
|
require.Equal(t, 1, len(def.Source.Locations[dgst.String()].Locations[0].Ranges))
|
||||||
|
require.Equal(t, int32(7), def.Source.Locations[dgst.String()].Locations[0].Ranges[0].Start.Line)
|
||||||
|
|
||||||
|
require.Equal(t, int32(1), def.Source.Locations[dgst.String()].Locations[1].SourceIndex)
|
||||||
|
require.Equal(t, 1, len(def.Source.Locations[dgst.String()].Locations[1].Ranges))
|
||||||
|
require.Equal(t, int32(8), def.Source.Locations[dgst.String()].Locations[1].Ranges[0].Start.Line)
|
||||||
|
|
||||||
|
require.Equal(t, int32(0), def.Source.Locations[dgst.String()].Locations[2].SourceIndex)
|
||||||
|
require.Equal(t, 1, len(def.Source.Locations[dgst.String()].Locations[2].Ranges))
|
||||||
|
require.Equal(t, int32(9), def.Source.Locations[dgst.String()].Locations[2].Ranges[0].Start.Line)
|
||||||
|
}
|
||||||
|
|
||||||
func getEnvHelper(t *testing.T, s State, k string) (string, bool) {
|
func getEnvHelper(t *testing.T, s State, k string) (string, bool) {
|
||||||
t.Helper()
|
t.Helper()
|
||||||
v, ok, err := s.GetEnv(context.TODO(), k)
|
v, ok, err := s.GetEnv(context.TODO(), k)
|
||||||
|
|
|
@ -134,11 +134,9 @@ func Build(ctx context.Context, c client.Client) (*client.Result, error) {
|
||||||
|
|
||||||
var buildContext *llb.State
|
var buildContext *llb.State
|
||||||
isNotLocalContext := false
|
isNotLocalContext := false
|
||||||
isNotLocalDockerfile := false
|
|
||||||
if st, ok := detectGitContext(opts[localNameContext], opts[keyContextKeepGitDir]); ok {
|
if st, ok := detectGitContext(opts[localNameContext], opts[keyContextKeepGitDir]); ok {
|
||||||
if !forceLocalDockerfile {
|
if !forceLocalDockerfile {
|
||||||
src = *st
|
src = *st
|
||||||
isNotLocalDockerfile = true
|
|
||||||
}
|
}
|
||||||
buildContext = st
|
buildContext = st
|
||||||
} else if httpPrefix.MatchString(opts[localNameContext]) {
|
} else if httpPrefix.MatchString(opts[localNameContext]) {
|
||||||
|
@ -188,7 +186,6 @@ func Build(ctx context.Context, c client.Client) (*client.Result, error) {
|
||||||
bc := unpack.AddMount("/out", llb.Scratch())
|
bc := unpack.AddMount("/out", llb.Scratch())
|
||||||
if !forceLocalDockerfile {
|
if !forceLocalDockerfile {
|
||||||
src = bc
|
src = bc
|
||||||
isNotLocalDockerfile = true
|
|
||||||
}
|
}
|
||||||
buildContext = &bc
|
buildContext = &bc
|
||||||
}
|
}
|
||||||
|
@ -196,7 +193,6 @@ func Build(ctx context.Context, c client.Client) (*client.Result, error) {
|
||||||
filename = "context"
|
filename = "context"
|
||||||
if !forceLocalDockerfile {
|
if !forceLocalDockerfile {
|
||||||
src = httpContext
|
src = httpContext
|
||||||
isNotLocalDockerfile = true
|
|
||||||
}
|
}
|
||||||
buildContext = &httpContext
|
buildContext = &httpContext
|
||||||
isNotLocalContext = true
|
isNotLocalContext = true
|
||||||
|
@ -211,7 +207,6 @@ func Build(ctx context.Context, c client.Client) (*client.Result, error) {
|
||||||
inputDockerfile, ok := inputs[DefaultLocalNameDockerfile]
|
inputDockerfile, ok := inputs[DefaultLocalNameDockerfile]
|
||||||
if ok {
|
if ok {
|
||||||
src = inputDockerfile
|
src = inputDockerfile
|
||||||
isNotLocalDockerfile = true
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -233,6 +228,8 @@ func Build(ctx context.Context, c client.Client) (*client.Result, error) {
|
||||||
return nil, errors.Wrapf(err, "failed to marshal local source")
|
return nil, errors.Wrapf(err, "failed to marshal local source")
|
||||||
}
|
}
|
||||||
|
|
||||||
|
var sourceMap *llb.SourceMap
|
||||||
|
|
||||||
eg, ctx2 := errgroup.WithContext(ctx)
|
eg, ctx2 := errgroup.WithContext(ctx)
|
||||||
var dtDockerfile []byte
|
var dtDockerfile []byte
|
||||||
var dtDockerignore []byte
|
var dtDockerignore []byte
|
||||||
|
@ -257,6 +254,9 @@ func Build(ctx context.Context, c client.Client) (*client.Result, error) {
|
||||||
return errors.Wrapf(err, "failed to read dockerfile")
|
return errors.Wrapf(err, "failed to read dockerfile")
|
||||||
}
|
}
|
||||||
|
|
||||||
|
sourceMap = llb.NewSourceMap(&src, filename, dtDockerfile)
|
||||||
|
sourceMap.Definition = def
|
||||||
|
|
||||||
dt, err := ref.ReadFile(ctx2, client.ReadRequest{
|
dt, err := ref.ReadFile(ctx2, client.ReadRequest{
|
||||||
Filename: filename + ".dockerignore",
|
Filename: filename + ".dockerignore",
|
||||||
})
|
})
|
||||||
|
@ -317,9 +317,13 @@ func Build(ctx context.Context, c client.Client) (*client.Result, error) {
|
||||||
}
|
}
|
||||||
|
|
||||||
if _, ok := opts["cmdline"]; !ok {
|
if _, ok := opts["cmdline"]; !ok {
|
||||||
ref, cmdline, ok := dockerfile2llb.DetectSyntax(bytes.NewBuffer(dtDockerfile))
|
ref, cmdline, loc, ok := dockerfile2llb.DetectSyntax(bytes.NewBuffer(dtDockerfile))
|
||||||
if ok {
|
if ok {
|
||||||
return forwardGateway(ctx, c, ref, cmdline)
|
res, err := forwardGateway(ctx, c, ref, cmdline)
|
||||||
|
if err != nil && len(errdefs.Sources(err)) == 0 {
|
||||||
|
return nil, wrapSource(err, sourceMap, loc)
|
||||||
|
}
|
||||||
|
return res, err
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -349,10 +353,7 @@ func Build(ctx context.Context, c client.Client) (*client.Result, error) {
|
||||||
defer func() {
|
defer func() {
|
||||||
var el *parser.ErrorLocation
|
var el *parser.ErrorLocation
|
||||||
if errors.As(err, &el) {
|
if errors.As(err, &el) {
|
||||||
if isNotLocalDockerfile {
|
err = wrapSource(err, sourceMap, el.Location)
|
||||||
localNameDockerfile = ""
|
|
||||||
}
|
|
||||||
err = wrapSource(err, dtDockerfile, filename, localNameDockerfile, el.Location)
|
|
||||||
}
|
}
|
||||||
}()
|
}()
|
||||||
st, img, err := dockerfile2llb.Dockerfile2LLB(ctx, dtDockerfile, dockerfile2llb.ConvertOpt{
|
st, img, err := dockerfile2llb.Dockerfile2LLB(ctx, dtDockerfile, dockerfile2llb.ConvertOpt{
|
||||||
|
@ -373,6 +374,7 @@ func Build(ctx context.Context, c client.Client) (*client.Result, error) {
|
||||||
ForceNetMode: defaultNetMode,
|
ForceNetMode: defaultNetMode,
|
||||||
OverrideCopyImage: opts[keyOverrideCopyImage],
|
OverrideCopyImage: opts[keyOverrideCopyImage],
|
||||||
LLBCaps: &caps,
|
LLBCaps: &caps,
|
||||||
|
SourceMap: sourceMap,
|
||||||
})
|
})
|
||||||
|
|
||||||
if err != nil {
|
if err != nil {
|
||||||
|
@ -656,22 +658,27 @@ func scopeToSubDir(c *llb.State, fileop bool, dir string) *llb.State {
|
||||||
return &bc
|
return &bc
|
||||||
}
|
}
|
||||||
|
|
||||||
func wrapSource(err error, dt []byte, filename, local string, locations []parser.Range) error {
|
func wrapSource(err error, sm *llb.SourceMap, ranges []parser.Range) error {
|
||||||
s := errdefs.Source{
|
if sm == nil {
|
||||||
Data: dt,
|
return err
|
||||||
Filename: filename,
|
|
||||||
Local: local,
|
|
||||||
Locations: make([]*errdefs.Range, 0, len(locations)),
|
|
||||||
}
|
}
|
||||||
for _, l := range locations {
|
s := errdefs.Source{
|
||||||
s.Locations = append(s.Locations, &errdefs.Range{
|
Info: &pb.SourceInfo{
|
||||||
Start: &errdefs.Position{
|
Data: sm.Data,
|
||||||
Line: int32(l.Start.Line),
|
Filename: sm.Filename,
|
||||||
Character: int32(l.Start.Character),
|
Definition: sm.Definition.ToPB(),
|
||||||
|
},
|
||||||
|
Ranges: make([]*pb.Range, 0, len(ranges)),
|
||||||
|
}
|
||||||
|
for _, r := range ranges {
|
||||||
|
s.Ranges = append(s.Ranges, &pb.Range{
|
||||||
|
Start: pb.Position{
|
||||||
|
Line: int32(r.Start.Line),
|
||||||
|
Character: int32(r.Start.Character),
|
||||||
},
|
},
|
||||||
End: &errdefs.Position{
|
End: pb.Position{
|
||||||
Line: int32(l.End.Line),
|
Line: int32(r.End.Line),
|
||||||
Character: int32(l.End.Character),
|
Character: int32(r.End.Character),
|
||||||
},
|
},
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
|
@ -60,6 +60,7 @@ type ConvertOpt struct {
|
||||||
OverrideCopyImage string
|
OverrideCopyImage string
|
||||||
LLBCaps *apicaps.CapSet
|
LLBCaps *apicaps.CapSet
|
||||||
ContextLocalName string
|
ContextLocalName string
|
||||||
|
SourceMap *llb.SourceMap
|
||||||
}
|
}
|
||||||
|
|
||||||
func Dockerfile2LLB(ctx context.Context, dt []byte, opt ConvertOpt) (*llb.State, *Image, error) {
|
func Dockerfile2LLB(ctx context.Context, dt []byte, opt ConvertOpt) (*llb.State, *Image, error) {
|
||||||
|
@ -278,7 +279,13 @@ func Dockerfile2LLB(ctx context.Context, dt []byte, opt ConvertOpt) (*llb.State,
|
||||||
if isScratch {
|
if isScratch {
|
||||||
d.state = llb.Scratch()
|
d.state = llb.Scratch()
|
||||||
} else {
|
} else {
|
||||||
d.state = llb.Image(d.stage.BaseName, dfCmd(d.stage.SourceCode), llb.Platform(*platform), opt.ImageResolveMode, llb.WithCustomName(prefixCommand(d, "FROM "+d.stage.BaseName, opt.PrefixPlatform, platform)))
|
d.state = llb.Image(d.stage.BaseName,
|
||||||
|
dfCmd(d.stage.SourceCode),
|
||||||
|
llb.Platform(*platform),
|
||||||
|
opt.ImageResolveMode,
|
||||||
|
llb.WithCustomName(prefixCommand(d, "FROM "+d.stage.BaseName, opt.PrefixPlatform, platform)),
|
||||||
|
location(opt.SourceMap, d.stage.Location),
|
||||||
|
)
|
||||||
}
|
}
|
||||||
d.platform = platform
|
d.platform = platform
|
||||||
return nil
|
return nil
|
||||||
|
@ -340,6 +347,7 @@ func Dockerfile2LLB(ctx context.Context, dt []byte, opt ConvertOpt) (*llb.State,
|
||||||
extraHosts: opt.ExtraHosts,
|
extraHosts: opt.ExtraHosts,
|
||||||
copyImage: opt.OverrideCopyImage,
|
copyImage: opt.OverrideCopyImage,
|
||||||
llbCaps: opt.LLBCaps,
|
llbCaps: opt.LLBCaps,
|
||||||
|
sourceMap: opt.SourceMap,
|
||||||
}
|
}
|
||||||
if opt.copyImage == "" {
|
if opt.copyImage == "" {
|
||||||
opt.copyImage = DefaultCopyImage
|
opt.copyImage = DefaultCopyImage
|
||||||
|
@ -421,7 +429,7 @@ func toCommand(ic instructions.Command, allDispatchStates *dispatchStates) (comm
|
||||||
stn, ok = allDispatchStates.findStateByName(c.From)
|
stn, ok = allDispatchStates.findStateByName(c.From)
|
||||||
if !ok {
|
if !ok {
|
||||||
stn = &dispatchState{
|
stn = &dispatchState{
|
||||||
stage: instructions.Stage{BaseName: c.From},
|
stage: instructions.Stage{BaseName: c.From, Location: ic.Location()},
|
||||||
deps: make(map[*dispatchState]struct{}),
|
deps: make(map[*dispatchState]struct{}),
|
||||||
unregistered: true,
|
unregistered: true,
|
||||||
}
|
}
|
||||||
|
@ -457,6 +465,7 @@ type dispatchOpt struct {
|
||||||
extraHosts []llb.HostIP
|
extraHosts []llb.HostIP
|
||||||
copyImage string
|
copyImage string
|
||||||
llbCaps *apicaps.CapSet
|
llbCaps *apicaps.CapSet
|
||||||
|
sourceMap *llb.SourceMap
|
||||||
}
|
}
|
||||||
|
|
||||||
func dispatch(d *dispatchState, cmd command, opt dispatchOpt) error {
|
func dispatch(d *dispatchState, cmd command, opt dispatchOpt) error {
|
||||||
|
@ -484,7 +493,7 @@ func dispatch(d *dispatchState, cmd command, opt dispatchOpt) error {
|
||||||
case *instructions.WorkdirCommand:
|
case *instructions.WorkdirCommand:
|
||||||
err = dispatchWorkdir(d, c, true, &opt)
|
err = dispatchWorkdir(d, c, true, &opt)
|
||||||
case *instructions.AddCommand:
|
case *instructions.AddCommand:
|
||||||
err = dispatchCopy(d, c.SourcesAndDest, opt.buildContext, true, c, c.Chown, opt)
|
err = dispatchCopy(d, c.SourcesAndDest, opt.buildContext, true, c, c.Chown, c.Location(), opt)
|
||||||
if err == nil {
|
if err == nil {
|
||||||
for _, src := range c.Sources() {
|
for _, src := range c.Sources() {
|
||||||
if !strings.HasPrefix(src, "http://") && !strings.HasPrefix(src, "https://") {
|
if !strings.HasPrefix(src, "http://") && !strings.HasPrefix(src, "https://") {
|
||||||
|
@ -519,7 +528,7 @@ func dispatch(d *dispatchState, cmd command, opt dispatchOpt) error {
|
||||||
if len(cmd.sources) != 0 {
|
if len(cmd.sources) != 0 {
|
||||||
l = cmd.sources[0].state
|
l = cmd.sources[0].state
|
||||||
}
|
}
|
||||||
err = dispatchCopy(d, c.SourcesAndDest, l, false, c, c.Chown, opt)
|
err = dispatchCopy(d, c.SourcesAndDest, l, false, c, c.Chown, c.Location(), opt)
|
||||||
if err == nil && len(cmd.sources) == 0 {
|
if err == nil && len(cmd.sources) == 0 {
|
||||||
for _, src := range c.Sources() {
|
for _, src := range c.Sources() {
|
||||||
d.ctxPaths[path.Join("/", filepath.ToSlash(src))] = struct{}{}
|
d.ctxPaths[path.Join("/", filepath.ToSlash(src))] = struct{}{}
|
||||||
|
@ -634,7 +643,7 @@ func dispatchRun(d *dispatchState, c *instructions.RunCommand, proxy *llb.ProxyE
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
opt := []llb.RunOption{llb.Args(args), dfCmd(c)}
|
opt := []llb.RunOption{llb.Args(args), dfCmd(c), location(dopt.sourceMap, c.Location())}
|
||||||
if d.ignoreCache {
|
if d.ignoreCache {
|
||||||
opt = append(opt, llb.IgnoreCache)
|
opt = append(opt, llb.IgnoreCache)
|
||||||
}
|
}
|
||||||
|
@ -702,7 +711,10 @@ func dispatchWorkdir(d *dispatchState, c *instructions.WorkdirCommand, commit bo
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
d.state = d.state.File(llb.Mkdir(wd, 0755, mkdirOpt...), llb.WithCustomName(prefixCommand(d, uppercaseCmd(processCmdEnv(opt.shlex, c.String(), env)), d.prefixPlatform, &platform)))
|
d.state = d.state.File(llb.Mkdir(wd, 0755, mkdirOpt...),
|
||||||
|
llb.WithCustomName(prefixCommand(d, uppercaseCmd(processCmdEnv(opt.shlex, c.String(), env)), d.prefixPlatform, &platform)),
|
||||||
|
location(opt.sourceMap, c.Location()),
|
||||||
|
)
|
||||||
withLayer = true
|
withLayer = true
|
||||||
}
|
}
|
||||||
return commitToHistory(&d.image, "WORKDIR "+wd, withLayer, nil)
|
return commitToHistory(&d.image, "WORKDIR "+wd, withLayer, nil)
|
||||||
|
@ -710,7 +722,7 @@ func dispatchWorkdir(d *dispatchState, c *instructions.WorkdirCommand, commit bo
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func dispatchCopyFileOp(d *dispatchState, c instructions.SourcesAndDest, sourceState llb.State, isAddCommand bool, cmdToPrint fmt.Stringer, chown string, opt dispatchOpt) error {
|
func dispatchCopyFileOp(d *dispatchState, c instructions.SourcesAndDest, sourceState llb.State, isAddCommand bool, cmdToPrint fmt.Stringer, chown string, loc []parser.Range, opt dispatchOpt) error {
|
||||||
pp, err := pathRelativeToWorkingDir(d.state, c.Dest())
|
pp, err := pathRelativeToWorkingDir(d.state, c.Dest())
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return err
|
||||||
|
@ -796,7 +808,10 @@ func dispatchCopyFileOp(d *dispatchState, c instructions.SourcesAndDest, sourceS
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
|
||||||
fileOpt := []llb.ConstraintsOpt{llb.WithCustomName(prefixCommand(d, uppercaseCmd(processCmdEnv(opt.shlex, cmdToPrint.String(), env)), d.prefixPlatform, &platform))}
|
fileOpt := []llb.ConstraintsOpt{
|
||||||
|
llb.WithCustomName(prefixCommand(d, uppercaseCmd(processCmdEnv(opt.shlex, cmdToPrint.String(), env)), d.prefixPlatform, &platform)),
|
||||||
|
location(opt.sourceMap, loc),
|
||||||
|
}
|
||||||
if d.ignoreCache {
|
if d.ignoreCache {
|
||||||
fileOpt = append(fileOpt, llb.IgnoreCache)
|
fileOpt = append(fileOpt, llb.IgnoreCache)
|
||||||
}
|
}
|
||||||
|
@ -805,9 +820,9 @@ func dispatchCopyFileOp(d *dispatchState, c instructions.SourcesAndDest, sourceS
|
||||||
return commitToHistory(&d.image, commitMessage.String(), true, &d.state)
|
return commitToHistory(&d.image, commitMessage.String(), true, &d.state)
|
||||||
}
|
}
|
||||||
|
|
||||||
func dispatchCopy(d *dispatchState, c instructions.SourcesAndDest, sourceState llb.State, isAddCommand bool, cmdToPrint fmt.Stringer, chown string, opt dispatchOpt) error {
|
func dispatchCopy(d *dispatchState, c instructions.SourcesAndDest, sourceState llb.State, isAddCommand bool, cmdToPrint fmt.Stringer, chown string, loc []parser.Range, opt dispatchOpt) error {
|
||||||
if useFileOp(opt.buildArgValues, opt.llbCaps) {
|
if useFileOp(opt.buildArgValues, opt.llbCaps) {
|
||||||
return dispatchCopyFileOp(d, c, sourceState, isAddCommand, cmdToPrint, chown, opt)
|
return dispatchCopyFileOp(d, c, sourceState, isAddCommand, cmdToPrint, chown, loc, opt)
|
||||||
}
|
}
|
||||||
|
|
||||||
img := llb.Image(opt.copyImage, llb.MarkImageInternal, llb.Platform(opt.buildPlatforms[0]), WithInternalName("helper image for file operations"))
|
img := llb.Image(opt.copyImage, llb.MarkImageInternal, llb.Platform(opt.buildPlatforms[0]), WithInternalName("helper image for file operations"))
|
||||||
|
@ -893,7 +908,14 @@ func dispatchCopy(d *dispatchState, c instructions.SourcesAndDest, sourceState l
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
|
||||||
runOpt := []llb.RunOption{llb.Args(args), llb.Dir("/dest"), llb.ReadonlyRootFS(), dfCmd(cmdToPrint), llb.WithCustomName(prefixCommand(d, uppercaseCmd(processCmdEnv(opt.shlex, cmdToPrint.String(), env)), d.prefixPlatform, &platform))}
|
runOpt := []llb.RunOption{
|
||||||
|
llb.Args(args),
|
||||||
|
llb.Dir("/dest"),
|
||||||
|
llb.ReadonlyRootFS(),
|
||||||
|
dfCmd(cmdToPrint),
|
||||||
|
llb.WithCustomName(prefixCommand(d, uppercaseCmd(processCmdEnv(opt.shlex, cmdToPrint.String(), env)), d.prefixPlatform, &platform)),
|
||||||
|
location(opt.sourceMap, loc),
|
||||||
|
}
|
||||||
if d.ignoreCache {
|
if d.ignoreCache {
|
||||||
runOpt = append(runOpt, llb.IgnoreCache)
|
runOpt = append(runOpt, llb.IgnoreCache)
|
||||||
}
|
}
|
||||||
|
@ -1361,3 +1383,20 @@ func useFileOp(args map[string]string, caps *apicaps.CapSet) bool {
|
||||||
}
|
}
|
||||||
return enabled && caps != nil && caps.Supports(pb.CapFileBase) == nil
|
return enabled && caps != nil && caps.Supports(pb.CapFileBase) == nil
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func location(sm *llb.SourceMap, locations []parser.Range) llb.ConstraintsOpt {
|
||||||
|
loc := make([]*pb.Range, 0, len(locations))
|
||||||
|
for _, l := range locations {
|
||||||
|
loc = append(loc, &pb.Range{
|
||||||
|
Start: pb.Position{
|
||||||
|
Line: int32(l.Start.Line),
|
||||||
|
Character: int32(l.Start.Character),
|
||||||
|
},
|
||||||
|
End: pb.Position{
|
||||||
|
Line: int32(l.End.Line),
|
||||||
|
Character: int32(l.End.Character),
|
||||||
|
},
|
||||||
|
})
|
||||||
|
}
|
||||||
|
return sm.Location(loc)
|
||||||
|
}
|
||||||
|
|
|
@ -5,34 +5,51 @@ import (
|
||||||
"io"
|
"io"
|
||||||
"regexp"
|
"regexp"
|
||||||
"strings"
|
"strings"
|
||||||
|
|
||||||
|
"github.com/moby/buildkit/frontend/dockerfile/parser"
|
||||||
)
|
)
|
||||||
|
|
||||||
const keySyntax = "syntax"
|
const keySyntax = "syntax"
|
||||||
|
|
||||||
var reDirective = regexp.MustCompile(`^#\s*([a-zA-Z][a-zA-Z0-9]*)\s*=\s*(.+?)\s*$`)
|
var reDirective = regexp.MustCompile(`^#\s*([a-zA-Z][a-zA-Z0-9]*)\s*=\s*(.+?)\s*$`)
|
||||||
|
|
||||||
func DetectSyntax(r io.Reader) (string, string, bool) {
|
type Directive struct {
|
||||||
|
Name string
|
||||||
|
Value string
|
||||||
|
Location []parser.Range
|
||||||
|
}
|
||||||
|
|
||||||
|
func DetectSyntax(r io.Reader) (string, string, []parser.Range, bool) {
|
||||||
directives := ParseDirectives(r)
|
directives := ParseDirectives(r)
|
||||||
if len(directives) == 0 {
|
if len(directives) == 0 {
|
||||||
return "", "", false
|
return "", "", nil, false
|
||||||
}
|
}
|
||||||
v, ok := directives[keySyntax]
|
v, ok := directives[keySyntax]
|
||||||
if !ok {
|
if !ok {
|
||||||
return "", "", false
|
return "", "", nil, false
|
||||||
}
|
}
|
||||||
p := strings.SplitN(v, " ", 2)
|
p := strings.SplitN(v.Value, " ", 2)
|
||||||
return p[0], v, true
|
return p[0], v.Value, v.Location, true
|
||||||
}
|
}
|
||||||
|
|
||||||
func ParseDirectives(r io.Reader) map[string]string {
|
func ParseDirectives(r io.Reader) map[string]Directive {
|
||||||
m := map[string]string{}
|
m := map[string]Directive{}
|
||||||
s := bufio.NewScanner(r)
|
s := bufio.NewScanner(r)
|
||||||
|
var l int
|
||||||
for s.Scan() {
|
for s.Scan() {
|
||||||
|
l++
|
||||||
match := reDirective.FindStringSubmatch(s.Text())
|
match := reDirective.FindStringSubmatch(s.Text())
|
||||||
if len(match) == 0 {
|
if len(match) == 0 {
|
||||||
return m
|
return m
|
||||||
}
|
}
|
||||||
m[strings.ToLower(match[1])] = match[2]
|
m[strings.ToLower(match[1])] = Directive{
|
||||||
|
Name: match[1],
|
||||||
|
Value: match[2],
|
||||||
|
Location: []parser.Range{{
|
||||||
|
Start: parser.Position{Line: l},
|
||||||
|
End: parser.Position{Line: l},
|
||||||
|
}},
|
||||||
|
}
|
||||||
}
|
}
|
||||||
return m
|
return m
|
||||||
}
|
}
|
||||||
|
|
|
@ -22,11 +22,11 @@ func TestDirectives(t *testing.T) {
|
||||||
|
|
||||||
v, ok := d["escape"]
|
v, ok := d["escape"]
|
||||||
require.True(t, ok)
|
require.True(t, ok)
|
||||||
require.Equal(t, v, "\\")
|
require.Equal(t, v.Value, "\\")
|
||||||
|
|
||||||
v, ok = d["key"]
|
v, ok = d["key"]
|
||||||
require.True(t, ok)
|
require.True(t, ok)
|
||||||
require.Equal(t, v, "FOO bar")
|
require.Equal(t, v.Value, "FOO bar")
|
||||||
|
|
||||||
// for some reason Moby implementation in case insensitive for escape
|
// for some reason Moby implementation in case insensitive for escape
|
||||||
dt = `# EScape=\
|
dt = `# EScape=\
|
||||||
|
@ -40,11 +40,11 @@ func TestDirectives(t *testing.T) {
|
||||||
|
|
||||||
v, ok = d["escape"]
|
v, ok = d["escape"]
|
||||||
require.True(t, ok)
|
require.True(t, ok)
|
||||||
require.Equal(t, v, "\\")
|
require.Equal(t, v.Value, "\\")
|
||||||
|
|
||||||
v, ok = d["key"]
|
v, ok = d["key"]
|
||||||
require.True(t, ok)
|
require.True(t, ok)
|
||||||
require.Equal(t, v, "FOO bar")
|
require.Equal(t, v.Value, "FOO bar")
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestSyntaxDirective(t *testing.T) {
|
func TestSyntaxDirective(t *testing.T) {
|
||||||
|
@ -54,15 +54,17 @@ func TestSyntaxDirective(t *testing.T) {
|
||||||
FROM busybox
|
FROM busybox
|
||||||
`
|
`
|
||||||
|
|
||||||
ref, cmdline, ok := DetectSyntax(bytes.NewBuffer([]byte(dt)))
|
ref, cmdline, loc, ok := DetectSyntax(bytes.NewBuffer([]byte(dt)))
|
||||||
require.True(t, ok)
|
require.True(t, ok)
|
||||||
require.Equal(t, ref, "dockerfile:experimental")
|
require.Equal(t, ref, "dockerfile:experimental")
|
||||||
require.Equal(t, cmdline, "dockerfile:experimental // opts")
|
require.Equal(t, cmdline, "dockerfile:experimental // opts")
|
||||||
|
require.Equal(t, 1, loc[0].Start.Line)
|
||||||
|
require.Equal(t, 1, loc[0].End.Line)
|
||||||
|
|
||||||
dt = `FROM busybox
|
dt = `FROM busybox
|
||||||
RUN ls
|
RUN ls
|
||||||
`
|
`
|
||||||
ref, cmdline, ok = DetectSyntax(bytes.NewBuffer([]byte(dt)))
|
ref, cmdline, _, ok = DetectSyntax(bytes.NewBuffer([]byte(dt)))
|
||||||
require.False(t, ok)
|
require.False(t, ok)
|
||||||
require.Equal(t, ref, "")
|
require.Equal(t, ref, "")
|
||||||
require.Equal(t, cmdline, "")
|
require.Equal(t, cmdline, "")
|
||||||
|
|
|
@ -100,6 +100,7 @@ var allTests = []integration.Test{
|
||||||
testOnBuildCleared,
|
testOnBuildCleared,
|
||||||
testFrontendUseForwardedSolveResults,
|
testFrontendUseForwardedSolveResults,
|
||||||
testFrontendInputs,
|
testFrontendInputs,
|
||||||
|
testErrorsSourceMap,
|
||||||
}
|
}
|
||||||
|
|
||||||
var fileOpTests = []integration.Test{
|
var fileOpTests = []integration.Test{
|
||||||
|
|
|
@ -0,0 +1,117 @@
|
||||||
|
package dockerfile
|
||||||
|
|
||||||
|
import (
|
||||||
|
"context"
|
||||||
|
"fmt"
|
||||||
|
"os"
|
||||||
|
"testing"
|
||||||
|
|
||||||
|
"github.com/containerd/continuity/fs/fstest"
|
||||||
|
"github.com/moby/buildkit/client"
|
||||||
|
"github.com/moby/buildkit/frontend/dockerfile/builder"
|
||||||
|
"github.com/moby/buildkit/solver/errdefs"
|
||||||
|
"github.com/moby/buildkit/util/testutil/integration"
|
||||||
|
"github.com/stretchr/testify/require"
|
||||||
|
)
|
||||||
|
|
||||||
|
func testErrorsSourceMap(t *testing.T, sb integration.Sandbox) {
|
||||||
|
f := getFrontend(t, sb)
|
||||||
|
|
||||||
|
tcases := []struct {
|
||||||
|
name string
|
||||||
|
dockerfile string
|
||||||
|
errorLine []int
|
||||||
|
}{
|
||||||
|
{
|
||||||
|
name: "invalidenv",
|
||||||
|
dockerfile: `from alpine
|
||||||
|
env`,
|
||||||
|
errorLine: []int{2},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "invalidsyntax",
|
||||||
|
dockerfile: `#syntax=foobar
|
||||||
|
from alpine`,
|
||||||
|
errorLine: []int{1},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "invalidrun",
|
||||||
|
dockerfile: `from scratch
|
||||||
|
env foo=bar
|
||||||
|
run what`,
|
||||||
|
errorLine: []int{3},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "invalidcopy",
|
||||||
|
dockerfile: `from scratch
|
||||||
|
env foo=bar
|
||||||
|
copy foo bar
|
||||||
|
env bar=baz`,
|
||||||
|
errorLine: []int{3},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "invalidflag",
|
||||||
|
dockerfile: `from scratch
|
||||||
|
env foo=bar
|
||||||
|
copy --foo=bar / /
|
||||||
|
env bar=baz`,
|
||||||
|
errorLine: []int{3},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "invalidcopyfrom",
|
||||||
|
dockerfile: `from scratch
|
||||||
|
env foo=bar
|
||||||
|
copy --from=invalid foo bar
|
||||||
|
env bar=baz`,
|
||||||
|
errorLine: []int{3},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "invalidmultiline",
|
||||||
|
dockerfile: `from scratch
|
||||||
|
run wh\
|
||||||
|
at
|
||||||
|
env bar=baz`,
|
||||||
|
errorLine: []int{2, 3},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, tc := range tcases {
|
||||||
|
t.Run(tc.name, func(t *testing.T) {
|
||||||
|
dir, err := tmpdir(
|
||||||
|
fstest.CreateFile("Dockerfile", []byte(tc.dockerfile), 0600),
|
||||||
|
)
|
||||||
|
require.NoError(t, err)
|
||||||
|
defer os.RemoveAll(dir)
|
||||||
|
|
||||||
|
c, err := client.New(context.TODO(), sb.Address())
|
||||||
|
require.NoError(t, err)
|
||||||
|
defer c.Close()
|
||||||
|
|
||||||
|
_, err = f.Solve(context.TODO(), c, client.SolveOpt{
|
||||||
|
LocalDirs: map[string]string{
|
||||||
|
builder.DefaultLocalNameDockerfile: dir,
|
||||||
|
builder.DefaultLocalNameContext: dir,
|
||||||
|
},
|
||||||
|
}, nil)
|
||||||
|
require.Error(t, err)
|
||||||
|
|
||||||
|
srcs := errdefs.Sources(err)
|
||||||
|
require.Equal(t, 1, len(srcs))
|
||||||
|
|
||||||
|
require.Equal(t, "Dockerfile", srcs[0].Info.Filename)
|
||||||
|
require.Equal(t, tc.dockerfile, string(srcs[0].Info.Data))
|
||||||
|
require.Equal(t, len(tc.errorLine), len(srcs[0].Ranges))
|
||||||
|
require.NotNil(t, srcs[0].Info.Definition)
|
||||||
|
|
||||||
|
next:
|
||||||
|
for _, l := range tc.errorLine {
|
||||||
|
for _, l2 := range srcs[0].Ranges {
|
||||||
|
if l2.Start.Line == int32(l) {
|
||||||
|
continue next
|
||||||
|
}
|
||||||
|
}
|
||||||
|
require.Fail(t, fmt.Sprintf("line %d not found", l))
|
||||||
|
}
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
|
@ -32,7 +32,7 @@ service LLBBridge {
|
||||||
|
|
||||||
message Result {
|
message Result {
|
||||||
oneof result {
|
oneof result {
|
||||||
// Deprecated non-array refs.
|
// Deprecated non-array refs.
|
||||||
string refDeprecated = 1;
|
string refDeprecated = 1;
|
||||||
RefMapDeprecated refsDeprecated = 2;
|
RefMapDeprecated refsDeprecated = 2;
|
||||||
|
|
||||||
|
@ -67,7 +67,7 @@ message InputsRequest {
|
||||||
}
|
}
|
||||||
|
|
||||||
message InputsResponse {
|
message InputsResponse {
|
||||||
map<string, pb.Definition> Definitions = 1;
|
map<string, pb.Definition> Definitions = 1;
|
||||||
}
|
}
|
||||||
|
|
||||||
message ResolveImageConfigRequest {
|
message ResolveImageConfigRequest {
|
||||||
|
@ -87,9 +87,9 @@ message SolveRequest {
|
||||||
string Frontend = 2;
|
string Frontend = 2;
|
||||||
map<string, string> FrontendOpt = 3;
|
map<string, string> FrontendOpt = 3;
|
||||||
// ImportCacheRefsDeprecated is deprecated in favor or the new Imports since BuildKit v0.4.0.
|
// ImportCacheRefsDeprecated is deprecated in favor or the new Imports since BuildKit v0.4.0.
|
||||||
// When ImportCacheRefsDeprecated is set, the solver appends
|
// When ImportCacheRefsDeprecated is set, the solver appends
|
||||||
// {.Type = "registry", .Attrs = {"ref": importCacheRef}}
|
// {.Type = "registry", .Attrs = {"ref": importCacheRef}}
|
||||||
// for each of the ImportCacheRefs entry to CacheImports for compatibility. (planned to be removed)
|
// for each of the ImportCacheRefs entry to CacheImports for compatibility. (planned to be removed)
|
||||||
repeated string ImportCacheRefsDeprecated = 4;
|
repeated string ImportCacheRefsDeprecated = 4;
|
||||||
bool allowResultReturn = 5;
|
bool allowResultReturn = 5;
|
||||||
bool allowResultArrayRef = 6;
|
bool allowResultArrayRef = 6;
|
||||||
|
|
|
@ -1,11 +1,12 @@
|
||||||
// Code generated by protoc-gen-go. DO NOT EDIT.
|
// Code generated by protoc-gen-gogo. DO NOT EDIT.
|
||||||
// source: errdefs.proto
|
// source: errdefs.proto
|
||||||
|
|
||||||
package errdefs
|
package errdefs
|
||||||
|
|
||||||
import (
|
import (
|
||||||
fmt "fmt"
|
fmt "fmt"
|
||||||
proto "github.com/golang/protobuf/proto"
|
proto "github.com/gogo/protobuf/proto"
|
||||||
|
pb "github.com/moby/buildkit/solver/pb"
|
||||||
math "math"
|
math "math"
|
||||||
)
|
)
|
||||||
|
|
||||||
|
@ -18,7 +19,7 @@ var _ = math.Inf
|
||||||
// is compatible with the proto package it is being compiled against.
|
// is compatible with the proto package it is being compiled against.
|
||||||
// A compilation error at this line likely means your copy of the
|
// A compilation error at this line likely means your copy of the
|
||||||
// proto package needs to be updated.
|
// proto package needs to be updated.
|
||||||
const _ = proto.ProtoPackageIsVersion3 // please upgrade the proto package
|
const _ = proto.GoGoProtoPackageIsVersion3 // please upgrade the proto package
|
||||||
|
|
||||||
type Vertex struct {
|
type Vertex struct {
|
||||||
Digest string `protobuf:"bytes,1,opt,name=digest,proto3" json:"digest,omitempty"`
|
Digest string `protobuf:"bytes,1,opt,name=digest,proto3" json:"digest,omitempty"`
|
||||||
|
@ -33,7 +34,6 @@ func (*Vertex) ProtoMessage() {}
|
||||||
func (*Vertex) Descriptor() ([]byte, []int) {
|
func (*Vertex) Descriptor() ([]byte, []int) {
|
||||||
return fileDescriptor_689dc58a5060aff5, []int{0}
|
return fileDescriptor_689dc58a5060aff5, []int{0}
|
||||||
}
|
}
|
||||||
|
|
||||||
func (m *Vertex) XXX_Unmarshal(b []byte) error {
|
func (m *Vertex) XXX_Unmarshal(b []byte) error {
|
||||||
return xxx_messageInfo_Vertex.Unmarshal(m, b)
|
return xxx_messageInfo_Vertex.Unmarshal(m, b)
|
||||||
}
|
}
|
||||||
|
@ -60,13 +60,11 @@ func (m *Vertex) GetDigest() string {
|
||||||
}
|
}
|
||||||
|
|
||||||
type Source struct {
|
type Source struct {
|
||||||
Locations []*Range `protobuf:"bytes,1,rep,name=locations,proto3" json:"locations,omitempty"`
|
Info *pb.SourceInfo `protobuf:"bytes,1,opt,name=info,proto3" json:"info,omitempty"`
|
||||||
Filename string `protobuf:"bytes,2,opt,name=filename,proto3" json:"filename,omitempty"`
|
Ranges []*pb.Range `protobuf:"bytes,2,rep,name=ranges,proto3" json:"ranges,omitempty"`
|
||||||
Local string `protobuf:"bytes,3,opt,name=local,proto3" json:"local,omitempty"`
|
XXX_NoUnkeyedLiteral struct{} `json:"-"`
|
||||||
Data []byte `protobuf:"bytes,4,opt,name=data,proto3" json:"data,omitempty"`
|
XXX_unrecognized []byte `json:"-"`
|
||||||
XXX_NoUnkeyedLiteral struct{} `json:"-"`
|
XXX_sizecache int32 `json:"-"`
|
||||||
XXX_unrecognized []byte `json:"-"`
|
|
||||||
XXX_sizecache int32 `json:"-"`
|
|
||||||
}
|
}
|
||||||
|
|
||||||
func (m *Source) Reset() { *m = Source{} }
|
func (m *Source) Reset() { *m = Source{} }
|
||||||
|
@ -75,7 +73,6 @@ func (*Source) ProtoMessage() {}
|
||||||
func (*Source) Descriptor() ([]byte, []int) {
|
func (*Source) Descriptor() ([]byte, []int) {
|
||||||
return fileDescriptor_689dc58a5060aff5, []int{1}
|
return fileDescriptor_689dc58a5060aff5, []int{1}
|
||||||
}
|
}
|
||||||
|
|
||||||
func (m *Source) XXX_Unmarshal(b []byte) error {
|
func (m *Source) XXX_Unmarshal(b []byte) error {
|
||||||
return xxx_messageInfo_Source.Unmarshal(m, b)
|
return xxx_messageInfo_Source.Unmarshal(m, b)
|
||||||
}
|
}
|
||||||
|
@ -94,152 +91,39 @@ func (m *Source) XXX_DiscardUnknown() {
|
||||||
|
|
||||||
var xxx_messageInfo_Source proto.InternalMessageInfo
|
var xxx_messageInfo_Source proto.InternalMessageInfo
|
||||||
|
|
||||||
func (m *Source) GetLocations() []*Range {
|
func (m *Source) GetInfo() *pb.SourceInfo {
|
||||||
if m != nil {
|
if m != nil {
|
||||||
return m.Locations
|
return m.Info
|
||||||
}
|
}
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func (m *Source) GetFilename() string {
|
func (m *Source) GetRanges() []*pb.Range {
|
||||||
if m != nil {
|
if m != nil {
|
||||||
return m.Filename
|
return m.Ranges
|
||||||
}
|
|
||||||
return ""
|
|
||||||
}
|
|
||||||
|
|
||||||
func (m *Source) GetLocal() string {
|
|
||||||
if m != nil {
|
|
||||||
return m.Local
|
|
||||||
}
|
|
||||||
return ""
|
|
||||||
}
|
|
||||||
|
|
||||||
func (m *Source) GetData() []byte {
|
|
||||||
if m != nil {
|
|
||||||
return m.Data
|
|
||||||
}
|
}
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
type Range struct {
|
|
||||||
Start *Position `protobuf:"bytes,1,opt,name=start,proto3" json:"start,omitempty"`
|
|
||||||
End *Position `protobuf:"bytes,2,opt,name=end,proto3" json:"end,omitempty"`
|
|
||||||
XXX_NoUnkeyedLiteral struct{} `json:"-"`
|
|
||||||
XXX_unrecognized []byte `json:"-"`
|
|
||||||
XXX_sizecache int32 `json:"-"`
|
|
||||||
}
|
|
||||||
|
|
||||||
func (m *Range) Reset() { *m = Range{} }
|
|
||||||
func (m *Range) String() string { return proto.CompactTextString(m) }
|
|
||||||
func (*Range) ProtoMessage() {}
|
|
||||||
func (*Range) Descriptor() ([]byte, []int) {
|
|
||||||
return fileDescriptor_689dc58a5060aff5, []int{2}
|
|
||||||
}
|
|
||||||
|
|
||||||
func (m *Range) XXX_Unmarshal(b []byte) error {
|
|
||||||
return xxx_messageInfo_Range.Unmarshal(m, b)
|
|
||||||
}
|
|
||||||
func (m *Range) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
|
|
||||||
return xxx_messageInfo_Range.Marshal(b, m, deterministic)
|
|
||||||
}
|
|
||||||
func (m *Range) XXX_Merge(src proto.Message) {
|
|
||||||
xxx_messageInfo_Range.Merge(m, src)
|
|
||||||
}
|
|
||||||
func (m *Range) XXX_Size() int {
|
|
||||||
return xxx_messageInfo_Range.Size(m)
|
|
||||||
}
|
|
||||||
func (m *Range) XXX_DiscardUnknown() {
|
|
||||||
xxx_messageInfo_Range.DiscardUnknown(m)
|
|
||||||
}
|
|
||||||
|
|
||||||
var xxx_messageInfo_Range proto.InternalMessageInfo
|
|
||||||
|
|
||||||
func (m *Range) GetStart() *Position {
|
|
||||||
if m != nil {
|
|
||||||
return m.Start
|
|
||||||
}
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func (m *Range) GetEnd() *Position {
|
|
||||||
if m != nil {
|
|
||||||
return m.End
|
|
||||||
}
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
|
|
||||||
type Position struct {
|
|
||||||
Line int32 `protobuf:"varint,1,opt,name=Line,proto3" json:"Line,omitempty"`
|
|
||||||
Character int32 `protobuf:"varint,2,opt,name=Character,proto3" json:"Character,omitempty"`
|
|
||||||
XXX_NoUnkeyedLiteral struct{} `json:"-"`
|
|
||||||
XXX_unrecognized []byte `json:"-"`
|
|
||||||
XXX_sizecache int32 `json:"-"`
|
|
||||||
}
|
|
||||||
|
|
||||||
func (m *Position) Reset() { *m = Position{} }
|
|
||||||
func (m *Position) String() string { return proto.CompactTextString(m) }
|
|
||||||
func (*Position) ProtoMessage() {}
|
|
||||||
func (*Position) Descriptor() ([]byte, []int) {
|
|
||||||
return fileDescriptor_689dc58a5060aff5, []int{3}
|
|
||||||
}
|
|
||||||
|
|
||||||
func (m *Position) XXX_Unmarshal(b []byte) error {
|
|
||||||
return xxx_messageInfo_Position.Unmarshal(m, b)
|
|
||||||
}
|
|
||||||
func (m *Position) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
|
|
||||||
return xxx_messageInfo_Position.Marshal(b, m, deterministic)
|
|
||||||
}
|
|
||||||
func (m *Position) XXX_Merge(src proto.Message) {
|
|
||||||
xxx_messageInfo_Position.Merge(m, src)
|
|
||||||
}
|
|
||||||
func (m *Position) XXX_Size() int {
|
|
||||||
return xxx_messageInfo_Position.Size(m)
|
|
||||||
}
|
|
||||||
func (m *Position) XXX_DiscardUnknown() {
|
|
||||||
xxx_messageInfo_Position.DiscardUnknown(m)
|
|
||||||
}
|
|
||||||
|
|
||||||
var xxx_messageInfo_Position proto.InternalMessageInfo
|
|
||||||
|
|
||||||
func (m *Position) GetLine() int32 {
|
|
||||||
if m != nil {
|
|
||||||
return m.Line
|
|
||||||
}
|
|
||||||
return 0
|
|
||||||
}
|
|
||||||
|
|
||||||
func (m *Position) GetCharacter() int32 {
|
|
||||||
if m != nil {
|
|
||||||
return m.Character
|
|
||||||
}
|
|
||||||
return 0
|
|
||||||
}
|
|
||||||
|
|
||||||
func init() {
|
func init() {
|
||||||
proto.RegisterType((*Vertex)(nil), "errdefs.Vertex")
|
proto.RegisterType((*Vertex)(nil), "errdefs.Vertex")
|
||||||
proto.RegisterType((*Source)(nil), "errdefs.Source")
|
proto.RegisterType((*Source)(nil), "errdefs.Source")
|
||||||
proto.RegisterType((*Range)(nil), "errdefs.Range")
|
|
||||||
proto.RegisterType((*Position)(nil), "errdefs.Position")
|
|
||||||
}
|
}
|
||||||
|
|
||||||
func init() { proto.RegisterFile("errdefs.proto", fileDescriptor_689dc58a5060aff5) }
|
func init() { proto.RegisterFile("errdefs.proto", fileDescriptor_689dc58a5060aff5) }
|
||||||
|
|
||||||
var fileDescriptor_689dc58a5060aff5 = []byte{
|
var fileDescriptor_689dc58a5060aff5 = []byte{
|
||||||
// 237 bytes of a gzipped FileDescriptorProto
|
// 177 bytes of a gzipped FileDescriptorProto
|
||||||
0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x6c, 0x90, 0xbb, 0x4b, 0x04, 0x31,
|
0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x2c, 0xcd, 0xc1, 0x8a, 0x83, 0x30,
|
||||||
0x10, 0xc6, 0x59, 0xf7, 0xb2, 0xde, 0xce, 0xa9, 0xe0, 0x20, 0x12, 0xc4, 0x62, 0x59, 0x0b, 0xb7,
|
0x10, 0x80, 0x61, 0xdc, 0x5d, 0xb2, 0x18, 0xd9, 0x3d, 0xe4, 0x50, 0xa4, 0x27, 0xeb, 0xc9, 0x43,
|
||||||
0x90, 0x2b, 0xce, 0xd6, 0xce, 0xd6, 0x42, 0x22, 0xda, 0xc7, 0xdd, 0xb9, 0x33, 0xb0, 0x26, 0x32,
|
0x49, 0xc0, 0x3e, 0x45, 0x4f, 0x85, 0x14, 0x7a, 0x6f, 0x74, 0xb4, 0xa1, 0xea, 0x84, 0x49, 0x2c,
|
||||||
0x89, 0x60, 0xe1, 0x1f, 0x2f, 0x99, 0xbd, 0x47, 0x63, 0x37, 0xdf, 0x83, 0xdf, 0x17, 0x02, 0xa7,
|
0xed, 0xdb, 0x17, 0x6d, 0x8e, 0xff, 0x7c, 0x33, 0x0c, 0xff, 0x03, 0xa2, 0x16, 0x3a, 0x2f, 0x1d,
|
||||||
0xc4, 0x3c, 0xd0, 0x3a, 0x2e, 0xbf, 0x38, 0xa4, 0x80, 0xc7, 0x5b, 0xd9, 0x36, 0x50, 0xbd, 0x11,
|
0x61, 0x40, 0xf1, 0x1b, 0x73, 0xbb, 0xef, 0x6d, 0xb8, 0xcd, 0x46, 0x36, 0x38, 0xaa, 0x11, 0xcd,
|
||||||
0x27, 0xfa, 0xc1, 0x4b, 0xa8, 0x06, 0xb7, 0xa1, 0x98, 0x74, 0xd1, 0x14, 0x5d, 0x6d, 0xb6, 0xaa,
|
0x4b, 0x99, 0xd9, 0x0e, 0xed, 0xdd, 0x06, 0xe5, 0x71, 0x78, 0x00, 0x29, 0x67, 0x14, 0xba, 0x78,
|
||||||
0xfd, 0x85, 0xea, 0x25, 0x7c, 0x73, 0x4f, 0x78, 0x07, 0xf5, 0x18, 0x7a, 0x9b, 0x5c, 0xf0, 0x51,
|
0x56, 0x16, 0x9c, 0x5d, 0x80, 0x02, 0x3c, 0xc5, 0x86, 0xb3, 0xd6, 0xf6, 0xe0, 0x43, 0x9e, 0x14,
|
||||||
0x17, 0x4d, 0xd9, 0x2d, 0x56, 0x67, 0xcb, 0x1d, 0xd7, 0x58, 0xbf, 0x21, 0x73, 0x28, 0xe0, 0x15,
|
0x49, 0x95, 0xea, 0x58, 0xe5, 0x89, 0xb3, 0x33, 0xce, 0xd4, 0x80, 0x28, 0xf9, 0x8f, 0x9d, 0x3a,
|
||||||
0xcc, 0xd7, 0x6e, 0x24, 0x6f, 0x3f, 0x49, 0x1f, 0x09, 0x71, 0xaf, 0xf1, 0x02, 0x54, 0x2e, 0x8e,
|
0x5c, 0x3d, 0xab, 0xff, 0xa5, 0x33, 0xf2, 0x23, 0xc7, 0xa9, 0x43, 0xbd, 0x9a, 0xd8, 0x71, 0x46,
|
||||||
0xba, 0x94, 0x60, 0x12, 0x88, 0x30, 0x1b, 0x6c, 0xb2, 0x7a, 0xd6, 0x14, 0xdd, 0x89, 0x91, 0xbb,
|
0xd7, 0xa9, 0x07, 0x9f, 0x7f, 0x15, 0xdf, 0x55, 0x56, 0xa7, 0xcb, 0x96, 0x5e, 0x26, 0x3a, 0x82,
|
||||||
0x7d, 0x05, 0x25, 0x64, 0xbc, 0x05, 0x15, 0x93, 0xe5, 0xe9, 0x75, 0x8b, 0xd5, 0xf9, 0x7e, 0xf8,
|
0x61, 0xeb, 0xe7, 0xc3, 0x3b, 0x00, 0x00, 0xff, 0xff, 0x93, 0xb5, 0x8b, 0x2a, 0xc1, 0x00, 0x00,
|
||||||
0x39, 0x44, 0x97, 0x17, 0xcd, 0x94, 0xe3, 0x0d, 0x94, 0xe4, 0x07, 0x99, 0xfc, 0xb7, 0x96, 0xd3,
|
0x00,
|
||||||
0xf6, 0x01, 0xe6, 0x3b, 0x23, 0xcf, 0x3e, 0x39, 0x4f, 0x02, 0x56, 0x46, 0x6e, 0xbc, 0x86, 0xfa,
|
|
||||||
0xf1, 0xc3, 0xb2, 0xed, 0x13, 0xb1, 0xa0, 0x94, 0x39, 0x18, 0xef, 0x95, 0x7c, 0xe2, 0xfd, 0x5f,
|
|
||||||
0x00, 0x00, 0x00, 0xff, 0xff, 0x49, 0x7f, 0xe8, 0xa9, 0x55, 0x01, 0x00, 0x00,
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -2,23 +2,13 @@ syntax = "proto3";
|
||||||
|
|
||||||
package errdefs;
|
package errdefs;
|
||||||
|
|
||||||
|
import "github.com/moby/buildkit/solver/pb/ops.proto";
|
||||||
|
|
||||||
message Vertex {
|
message Vertex {
|
||||||
string digest = 1;
|
string digest = 1;
|
||||||
}
|
}
|
||||||
|
|
||||||
message Source {
|
message Source {
|
||||||
repeated Range locations = 1;
|
pb.SourceInfo info = 1;
|
||||||
string filename = 2;
|
repeated pb.Range ranges = 2;
|
||||||
string local = 3;
|
|
||||||
bytes data = 4;
|
|
||||||
}
|
|
||||||
|
|
||||||
message Range {
|
|
||||||
Position start = 1;
|
|
||||||
Position end = 2;
|
|
||||||
}
|
|
||||||
|
|
||||||
message Position {
|
|
||||||
int32 Line = 1;
|
|
||||||
int32 Character = 2;
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,3 +1,3 @@
|
||||||
package errdefs
|
package errdefs
|
||||||
|
|
||||||
//go:generate protoc -I=. -I=../../vendor/ --go_out=. errdefs.proto
|
//go:generate protoc -I=. -I=../../vendor/ -I=../../../../../ --gogo_out=. errdefs.proto
|
||||||
|
|
|
@ -5,6 +5,7 @@ import (
|
||||||
"io"
|
"io"
|
||||||
"strings"
|
"strings"
|
||||||
|
|
||||||
|
pb "github.com/moby/buildkit/solver/pb"
|
||||||
"github.com/moby/buildkit/util/grpcerrors"
|
"github.com/moby/buildkit/util/grpcerrors"
|
||||||
"github.com/pkg/errors"
|
"github.com/pkg/errors"
|
||||||
)
|
)
|
||||||
|
@ -44,9 +45,13 @@ func (s *Source) WrapError(err error) error {
|
||||||
}
|
}
|
||||||
|
|
||||||
func (s *Source) Print(w io.Writer) error {
|
func (s *Source) Print(w io.Writer) error {
|
||||||
lines := strings.Split(string(s.Data), "\n")
|
si := s.Info
|
||||||
|
if si == nil {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
lines := strings.Split(string(si.Data), "\n")
|
||||||
|
|
||||||
start, end, ok := getStartEndLine(s.Locations)
|
start, end, ok := getStartEndLine(s.Ranges)
|
||||||
if !ok {
|
if !ok {
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
@ -79,10 +84,10 @@ func (s *Source) Print(w io.Writer) error {
|
||||||
p++
|
p++
|
||||||
}
|
}
|
||||||
|
|
||||||
fmt.Fprintf(w, "%s:%d\n--------------------\n", s.Filename, prepadStart)
|
fmt.Fprintf(w, "%s:%d\n--------------------\n", si.Filename, prepadStart)
|
||||||
for i := start; i <= end; i++ {
|
for i := start; i <= end; i++ {
|
||||||
pfx := " "
|
pfx := " "
|
||||||
if containsLine(s.Locations, i) {
|
if containsLine(s.Ranges, i) {
|
||||||
pfx = ">>>"
|
pfx = ">>>"
|
||||||
}
|
}
|
||||||
fmt.Fprintf(w, " %3d | %s %s\n", i, pfx, lines[i-1])
|
fmt.Fprintf(w, " %3d | %s %s\n", i, pfx, lines[i-1])
|
||||||
|
@ -91,42 +96,33 @@ func (s *Source) Print(w io.Writer) error {
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func containsLine(rr []*Range, l int) bool {
|
func containsLine(rr []*pb.Range, l int) bool {
|
||||||
for _, r := range rr {
|
for _, r := range rr {
|
||||||
var s, e int
|
e := r.End.Line
|
||||||
if r.Start == nil {
|
if e < r.Start.Line {
|
||||||
continue
|
e = r.Start.Line
|
||||||
}
|
}
|
||||||
s = int(r.Start.Line)
|
if r.Start.Line <= int32(l) && e >= int32(l) {
|
||||||
if r.End != nil {
|
|
||||||
e = int(r.End.Line)
|
|
||||||
}
|
|
||||||
if e < s {
|
|
||||||
e = s
|
|
||||||
}
|
|
||||||
if s <= l && e >= l {
|
|
||||||
return true
|
return true
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
return false
|
return false
|
||||||
}
|
}
|
||||||
|
|
||||||
func getStartEndLine(rr []*Range) (start int, end int, ok bool) {
|
func getStartEndLine(rr []*pb.Range) (start int, end int, ok bool) {
|
||||||
|
first := true
|
||||||
for _, r := range rr {
|
for _, r := range rr {
|
||||||
if r.Start != nil {
|
e := r.End.Line
|
||||||
if !ok || start > int(r.Start.Line) {
|
if e < r.Start.Line {
|
||||||
start = int(r.Start.Line)
|
e = r.Start.Line
|
||||||
}
|
|
||||||
if end < start {
|
|
||||||
end = start
|
|
||||||
}
|
|
||||||
ok = true
|
|
||||||
}
|
}
|
||||||
if r.End != nil {
|
if first || int(r.Start.Line) < start {
|
||||||
if end < int(r.End.Line) {
|
start = int(r.Start.Line)
|
||||||
end = int(r.End.Line)
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
if int(e) > end {
|
||||||
|
end = int(e)
|
||||||
|
}
|
||||||
|
first = false
|
||||||
}
|
}
|
||||||
return
|
return start, end, !first
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,10 +1,16 @@
|
||||||
package errdefs
|
package errdefs
|
||||||
|
|
||||||
import (
|
import (
|
||||||
|
proto "github.com/golang/protobuf/proto"
|
||||||
"github.com/moby/buildkit/util/grpcerrors"
|
"github.com/moby/buildkit/util/grpcerrors"
|
||||||
digest "github.com/opencontainers/go-digest"
|
digest "github.com/opencontainers/go-digest"
|
||||||
)
|
)
|
||||||
|
|
||||||
|
func init() {
|
||||||
|
proto.RegisterType((*Vertex)(nil), "errdefs.Vertex")
|
||||||
|
proto.RegisterType((*Source)(nil), "errdefs.Source")
|
||||||
|
}
|
||||||
|
|
||||||
type VertexError struct {
|
type VertexError struct {
|
||||||
Vertex
|
Vertex
|
||||||
error
|
error
|
||||||
|
|
|
@ -18,6 +18,7 @@ import (
|
||||||
gw "github.com/moby/buildkit/frontend/gateway/client"
|
gw "github.com/moby/buildkit/frontend/gateway/client"
|
||||||
"github.com/moby/buildkit/session"
|
"github.com/moby/buildkit/session"
|
||||||
"github.com/moby/buildkit/solver"
|
"github.com/moby/buildkit/solver"
|
||||||
|
"github.com/moby/buildkit/solver/errdefs"
|
||||||
"github.com/moby/buildkit/solver/pb"
|
"github.com/moby/buildkit/solver/pb"
|
||||||
"github.com/moby/buildkit/util/flightcontrol"
|
"github.com/moby/buildkit/util/flightcontrol"
|
||||||
"github.com/moby/buildkit/util/tracing"
|
"github.com/moby/buildkit/util/tracing"
|
||||||
|
@ -182,7 +183,31 @@ func (rp *resultProxy) Release(ctx context.Context) error {
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func (rp *resultProxy) Result(ctx context.Context) (solver.CachedResult, error) {
|
func (rp *resultProxy) wrapError(err error) error {
|
||||||
|
if err == nil {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
var ve *errdefs.VertexError
|
||||||
|
if errors.As(err, &ve) {
|
||||||
|
if rp.def.Source != nil {
|
||||||
|
locs, ok := rp.def.Source.Locations[string(ve.Digest)]
|
||||||
|
if ok {
|
||||||
|
for _, loc := range locs.Locations {
|
||||||
|
err = errdefs.WithSource(err, errdefs.Source{
|
||||||
|
Info: rp.def.Source.Infos[loc.SourceIndex],
|
||||||
|
Ranges: loc.Ranges,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
func (rp *resultProxy) Result(ctx context.Context) (res solver.CachedResult, err error) {
|
||||||
|
defer func() {
|
||||||
|
err = rp.wrapError(err)
|
||||||
|
}()
|
||||||
r, err := rp.g.Do(ctx, "result", func(ctx context.Context) (interface{}, error) {
|
r, err := rp.g.Do(ctx, "result", func(ctx context.Context) (interface{}, error) {
|
||||||
rp.mu.Lock()
|
rp.mu.Lock()
|
||||||
if rp.released {
|
if rp.released {
|
||||||
|
|
1809
solver/pb/ops.pb.go
1809
solver/pb/ops.pb.go
File diff suppressed because it is too large
Load Diff
|
@ -177,6 +177,42 @@ message OpMetadata {
|
||||||
map<string, bool> caps = 5 [(gogoproto.castkey) = "github.com/moby/buildkit/util/apicaps.CapID", (gogoproto.nullable) = false];
|
map<string, bool> caps = 5 [(gogoproto.castkey) = "github.com/moby/buildkit/util/apicaps.CapID", (gogoproto.nullable) = false];
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Source is a source mapping description for a file
|
||||||
|
message Source {
|
||||||
|
map<string, Locations> locations = 1;
|
||||||
|
repeated SourceInfo infos = 2;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Locations is a list of ranges with a index to its source map.
|
||||||
|
message Locations {
|
||||||
|
repeated Location locations = 1;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Source info contains the shared metadata of a source mapping
|
||||||
|
message SourceInfo {
|
||||||
|
string filename = 1;
|
||||||
|
bytes data = 2;
|
||||||
|
Definition definition = 3;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Location defines list of areas in to source file
|
||||||
|
message Location {
|
||||||
|
int32 sourceIndex = 1;
|
||||||
|
repeated Range ranges = 2;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Range is an area in the source file
|
||||||
|
message Range {
|
||||||
|
Position start = 1 [(gogoproto.nullable) = false];
|
||||||
|
Position end = 2 [(gogoproto.nullable) = false];
|
||||||
|
}
|
||||||
|
|
||||||
|
// Position is single location in a source file
|
||||||
|
message Position {
|
||||||
|
int32 Line = 1;
|
||||||
|
int32 Character = 2;
|
||||||
|
}
|
||||||
|
|
||||||
message ExportCache {
|
message ExportCache {
|
||||||
bool Value = 1;
|
bool Value = 1;
|
||||||
}
|
}
|
||||||
|
@ -200,6 +236,8 @@ message Definition {
|
||||||
// metadata contains metadata for the each of the Op messages.
|
// metadata contains metadata for the each of the Op messages.
|
||||||
// A key must be an LLB op digest string. Currently, empty string is not expected as a key, but it may change in the future.
|
// A key must be an LLB op digest string. Currently, empty string is not expected as a key, but it may change in the future.
|
||||||
map<string, OpMetadata> metadata = 2 [(gogoproto.castkey) = "github.com/opencontainers/go-digest.Digest", (gogoproto.nullable) = false];
|
map<string, OpMetadata> metadata = 2 [(gogoproto.castkey) = "github.com/opencontainers/go-digest.Digest", (gogoproto.nullable) = false];
|
||||||
|
// Source contains the source mapping information for the vertexes in the definition
|
||||||
|
Source Source = 3;
|
||||||
}
|
}
|
||||||
|
|
||||||
message HostIP {
|
message HostIP {
|
||||||
|
@ -302,4 +340,4 @@ message UserOpt {
|
||||||
message NamedUserOpt {
|
message NamedUserOpt {
|
||||||
string name = 1;
|
string name = 1;
|
||||||
int64 input = 2 [(gogoproto.customtype) = "InputIndex", (gogoproto.nullable) = false];
|
int64 input = 2 [(gogoproto.customtype) = "InputIndex", (gogoproto.nullable) = false];
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,8 +1,10 @@
|
||||||
package grpcerrors
|
package grpcerrors
|
||||||
|
|
||||||
import (
|
import (
|
||||||
|
gogotypes "github.com/gogo/protobuf/types"
|
||||||
"github.com/golang/protobuf/proto"
|
"github.com/golang/protobuf/proto"
|
||||||
"github.com/golang/protobuf/ptypes"
|
"github.com/golang/protobuf/ptypes"
|
||||||
|
"github.com/golang/protobuf/ptypes/any"
|
||||||
"github.com/moby/buildkit/util/stack"
|
"github.com/moby/buildkit/util/stack"
|
||||||
spb "google.golang.org/genproto/googleapis/rpc/status"
|
spb "google.golang.org/genproto/googleapis/rpc/status"
|
||||||
"google.golang.org/grpc/codes"
|
"google.golang.org/grpc/codes"
|
||||||
|
@ -121,13 +123,20 @@ func FromGRPC(err error) error {
|
||||||
|
|
||||||
// details that we don't understand are copied as proto
|
// details that we don't understand are copied as proto
|
||||||
for _, d := range pb.Details {
|
for _, d := range pb.Details {
|
||||||
|
var m interface{}
|
||||||
detail := &ptypes.DynamicAny{}
|
detail := &ptypes.DynamicAny{}
|
||||||
if err := ptypes.UnmarshalAny(d, detail); err != nil {
|
if err := ptypes.UnmarshalAny(d, detail); err != nil {
|
||||||
n.Details = append(n.Details, d)
|
detail := &gogotypes.DynamicAny{}
|
||||||
continue
|
if err := gogotypes.UnmarshalAny(gogoAny(d), detail); err != nil {
|
||||||
|
n.Details = append(n.Details, d)
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
m = detail.Message
|
||||||
|
} else {
|
||||||
|
m = detail.Message
|
||||||
}
|
}
|
||||||
|
|
||||||
switch v := detail.Message.(type) {
|
switch v := m.(type) {
|
||||||
case *stack.Stack:
|
case *stack.Stack:
|
||||||
stacks = append(stacks, v)
|
stacks = append(stacks, v)
|
||||||
case TypedErrorProto:
|
case TypedErrorProto:
|
||||||
|
@ -170,3 +179,10 @@ func each(err error, fn func(error)) {
|
||||||
each(wrapped.Unwrap(), fn)
|
each(wrapped.Unwrap(), fn)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func gogoAny(in *any.Any) *gogotypes.Any {
|
||||||
|
return &gogotypes.Any{
|
||||||
|
TypeUrl: in.TypeUrl,
|
||||||
|
Value: in.Value,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
Loading…
Reference in New Issue