Allow multiple source locations to be added as a constraint

Signed-off-by: Edgar Lee <edgarl@netflix.com>
v0.8
Edgar Lee 2020-05-20 16:44:57 -07:00
parent 7a90a36b46
commit 59fa9fc9a0
11 changed files with 63 additions and 46 deletions

View File

@ -2690,9 +2690,14 @@ func testSourceMap(t *testing.T, sb integration.Sandbox) {
require.NoError(t, err)
defer c.Close()
sm := llb.NewSourceMap(nil, "foo", []byte("data"))
sm1 := llb.NewSourceMap(nil, "foo", []byte("data1"))
sm2 := llb.NewSourceMap(nil, "bar", []byte("data2"))
st := llb.Scratch().Run(llb.Shlex("not-exist"), sm.Location([]*pb.Range{{Start: pb.Position{Line: 7}}}))
st := llb.Scratch().Run(
llb.Shlex("not-exist"),
sm1.Location([]*pb.Range{{Start: pb.Position{Line: 7}}}),
sm2.Location([]*pb.Range{{Start: pb.Position{Line: 8}}}),
)
def, err := st.Marshal(context.TODO())
require.NoError(t, err)
@ -2701,15 +2706,27 @@ func testSourceMap(t *testing.T, sb integration.Sandbox) {
require.Error(t, err)
srcs := errdefs.Sources(err)
require.Equal(t, 1, len(srcs))
require.Equal(t, 2, len(srcs))
require.Equal(t, "foo", srcs[0].Info.Filename)
require.Equal(t, []byte("data"), srcs[0].Info.Data)
// Source errors are wrapped in the order provided as llb.ConstraintOpts, so
// when they are unwrapped, the first unwrapped error is the last location
// provided.
require.Equal(t, "bar", srcs[0].Info.Filename)
require.Equal(t, []byte("data2"), srcs[0].Info.Data)
require.Nil(t, srcs[0].Info.Definition)
require.Equal(t, 1, len(srcs[0].Locations))
require.Equal(t, int32(7), srcs[0].Locations[0].Start.Line)
require.Equal(t, int32(0), srcs[0].Locations[0].Start.Character)
require.Equal(t, 1, len(srcs[0].Ranges))
require.Equal(t, int32(8), srcs[0].Ranges[0].Start.Line)
require.Equal(t, int32(0), srcs[0].Ranges[0].Start.Character)
require.Equal(t, "foo", srcs[1].Info.Filename)
require.Equal(t, []byte("data1"), srcs[1].Info.Data)
require.Nil(t, srcs[1].Info.Definition)
require.Equal(t, 1, len(srcs[1].Ranges))
require.Equal(t, int32(7), srcs[1].Ranges[0].Start.Line)
require.Equal(t, int32(0), srcs[1].Ranges[0].Start.Character)
}
func testSourceMapFromRef(t *testing.T, sb integration.Sandbox) {
@ -2774,9 +2791,9 @@ func testSourceMapFromRef(t *testing.T, sb integration.Sandbox) {
require.Equal(t, []byte("bardata"), srcs[0].Info.Data)
require.NotNil(t, srcs[0].Info.Definition)
require.Equal(t, 1, len(srcs[0].Locations))
require.Equal(t, int32(3), srcs[0].Locations[0].Start.Line)
require.Equal(t, int32(1), srcs[0].Locations[0].Start.Character)
require.Equal(t, 1, len(srcs[0].Ranges))
require.Equal(t, int32(3), srcs[0].Ranges[0].Start.Line)
require.Equal(t, int32(1), srcs[0].Ranges[0].Start.Character)
}
func testProxyEnv(t *testing.T, sb integration.Sandbox) {

View File

@ -85,7 +85,7 @@ type errVertex struct {
func (v *errVertex) Validate(context.Context) error {
return v.err
}
func (v *errVertex) Marshal(context.Context, *Constraints) (digest.Digest, []byte, *pb.OpMetadata, *SourceLocation, error) {
func (v *errVertex) Marshal(context.Context, *Constraints) (digest.Digest, []byte, *pb.OpMetadata, []*SourceLocation, error) {
return "", nil, nil, nil, v.err
}
func (v *errVertex) Output() Output {

View File

@ -20,7 +20,7 @@ type DefinitionOp struct {
ops map[digest.Digest]*pb.Op
defs map[digest.Digest][]byte
metas map[digest.Digest]pb.OpMetadata
sources map[digest.Digest]*SourceLocation
sources map[digest.Digest][]*SourceLocation
platforms map[digest.Digest]*specs.Platform
dgst digest.Digest
index pb.OutputIndex
@ -50,7 +50,7 @@ func NewDefinitionOp(def *pb.Definition) (*DefinitionOp, error) {
platforms[dgst] = platform
}
srcs := map[digest.Digest]*SourceLocation{}
srcs := map[digest.Digest][]*SourceLocation{}
if def.Source != nil {
sourceMaps := make([]*SourceMap, len(def.Source.Infos))
@ -74,10 +74,10 @@ func NewDefinitionOp(def *pb.Definition) (*DefinitionOp, error) {
return nil, errors.Errorf("failed to find source map with index %d", loc.SourceIndex)
}
srcs[digest.Digest(dgst)] = &SourceLocation{
srcs[digest.Digest(dgst)] = append(srcs[digest.Digest(dgst)], &SourceLocation{
SourceMap: sourceMaps[int(loc.SourceIndex)],
Ranges: loc.Ranges,
}
})
}
}
}
@ -144,7 +144,7 @@ func (d *DefinitionOp) Validate(context.Context) error {
return nil
}
func (d *DefinitionOp) Marshal(ctx context.Context, c *Constraints) (digest.Digest, []byte, *pb.OpMetadata, *SourceLocation, error) {
func (d *DefinitionOp) Marshal(ctx context.Context, c *Constraints) (digest.Digest, []byte, *pb.OpMetadata, []*SourceLocation, error) {
if d.dgst == "" {
return "", nil, nil, nil, errors.Errorf("cannot marshal empty definition op")
}

View File

@ -124,7 +124,7 @@ func (e *ExecOp) Validate(ctx context.Context) error {
return nil
}
func (e *ExecOp) Marshal(ctx context.Context, c *Constraints) (digest.Digest, []byte, *pb.OpMetadata, *SourceLocation, error) {
func (e *ExecOp) Marshal(ctx context.Context, c *Constraints) (digest.Digest, []byte, *pb.OpMetadata, []*SourceLocation, error) {
if e.Cached(c) {
return e.Load()
}
@ -358,7 +358,7 @@ func (e *ExecOp) Marshal(ctx context.Context, c *Constraints) (digest.Digest, []
if err != nil {
return "", nil, nil, nil, err
}
e.Store(dt, md, e.constraints.Source, c)
e.Store(dt, md, e.constraints.SourceLocations, c)
return e.Load()
}

View File

@ -649,7 +649,7 @@ func (ms *marshalState) add(fa *FileAction, c *Constraints) (*fileActionState, e
return st, nil
}
func (f *FileOp) Marshal(ctx context.Context, c *Constraints) (digest.Digest, []byte, *pb.OpMetadata, *SourceLocation, error) {
func (f *FileOp) Marshal(ctx context.Context, c *Constraints) (digest.Digest, []byte, *pb.OpMetadata, []*SourceLocation, error) {
if f.Cached(c) {
return f.Load()
}
@ -704,7 +704,7 @@ func (f *FileOp) Marshal(ctx context.Context, c *Constraints) (digest.Digest, []
if err != nil {
return "", nil, nil, nil, err
}
f.Store(dt, md, f.constraints.Source, c)
f.Store(dt, md, f.constraints.SourceLocations, c)
return f.Load()
}

View File

@ -48,7 +48,7 @@ func (b *build) Validate(context.Context) error {
return nil
}
func (b *build) Marshal(ctx context.Context, c *llb.Constraints) (digest.Digest, []byte, *pb.OpMetadata, *llb.SourceLocation, error) {
func (b *build) Marshal(ctx context.Context, c *llb.Constraints) (digest.Digest, []byte, *pb.OpMetadata, []*llb.SourceLocation, error) {
if b.Cached(c) {
return b.Load()
}
@ -85,7 +85,7 @@ func (b *build) Marshal(ctx context.Context, c *llb.Constraints) (digest.Digest,
if err != nil {
return "", nil, nil, nil, err
}
b.Store(dt, md, b.constraints.Source, c)
b.Store(dt, md, b.constraints.SourceLocations, c)
return b.Load()
}

View File

@ -98,20 +98,20 @@ type MarshalCache struct {
digest digest.Digest
dt []byte
md *pb.OpMetadata
src *SourceLocation
srcs []*SourceLocation
constraints *Constraints
}
func (mc *MarshalCache) Cached(c *Constraints) bool {
return mc.dt != nil && mc.constraints == c
}
func (mc *MarshalCache) Load() (digest.Digest, []byte, *pb.OpMetadata, *SourceLocation, error) {
return mc.digest, mc.dt, mc.md, mc.src, nil
func (mc *MarshalCache) Load() (digest.Digest, []byte, *pb.OpMetadata, []*SourceLocation, error) {
return mc.digest, mc.dt, mc.md, mc.srcs, nil
}
func (mc *MarshalCache) Store(dt []byte, md *pb.OpMetadata, src *SourceLocation, c *Constraints) {
func (mc *MarshalCache) Store(dt []byte, md *pb.OpMetadata, srcs []*SourceLocation, c *Constraints) {
mc.digest = digest.FromBytes(dt)
mc.dt = dt
mc.md = md
mc.constraints = c
mc.src = src
mc.srcs = srcs
}

View File

@ -44,7 +44,7 @@ func (s *SourceOp) Validate(ctx context.Context) error {
return nil
}
func (s *SourceOp) Marshal(ctx context.Context, constraints *Constraints) (digest.Digest, []byte, *pb.OpMetadata, *SourceLocation, error) {
func (s *SourceOp) Marshal(ctx context.Context, constraints *Constraints) (digest.Digest, []byte, *pb.OpMetadata, []*SourceLocation, error) {
if s.Cached(constraints) {
return s.Load()
}
@ -77,7 +77,7 @@ func (s *SourceOp) Marshal(ctx context.Context, constraints *Constraints) (diges
return "", nil, nil, nil, err
}
s.Store(dt, md, s.constraints.Source, constraints)
s.Store(dt, md, s.constraints.SourceLocations, constraints)
return s.Load()
}

View File

@ -27,10 +27,10 @@ func (s *SourceMap) Location(r []*pb.Range) ConstraintsOpt {
if s == nil {
return
}
c.Source = &SourceLocation{
c.SourceLocations = append(c.SourceLocations, &SourceLocation{
SourceMap: s,
Ranges: r,
}
})
})
}
@ -51,14 +51,16 @@ func newSourceMapCollector() *sourceMapCollector {
}
}
func (smc *sourceMapCollector) Add(dgst digest.Digest, l *SourceLocation) {
idx, ok := smc.index[l.SourceMap]
if !ok {
idx = len(smc.maps)
smc.maps = append(smc.maps, l.SourceMap)
smc.ranges = append(smc.ranges, map[digest.Digest][]*pb.Range{})
func (smc *sourceMapCollector) Add(dgst digest.Digest, ls []*SourceLocation) {
for _, l := range ls {
idx, ok := smc.index[l.SourceMap]
if !ok {
idx = len(smc.maps)
smc.maps = append(smc.maps, l.SourceMap)
smc.ranges = append(smc.ranges, map[digest.Digest][]*pb.Range{})
}
smc.ranges[idx][dgst] = l.Ranges
}
smc.ranges[idx][dgst] = l.Ranges
}
func (smc *sourceMapCollector) Marshal(ctx context.Context, co ...ConstraintsOpt) (*pb.Source, error) {

View File

@ -24,7 +24,7 @@ type Output interface {
type Vertex interface {
Validate(context.Context) error
Marshal(context.Context, *Constraints) (digest.Digest, []byte, *pb.OpMetadata, *SourceLocation, error)
Marshal(context.Context, *Constraints) (digest.Digest, []byte, *pb.OpMetadata, []*SourceLocation, error)
Output() Output
Inputs() []Output
}
@ -182,7 +182,7 @@ func marshal(ctx context.Context, v Vertex, def *Definition, s *sourceMapCollect
}
}
dgst, dt, opMeta, sl, err := v.Marshal(ctx, c)
dgst, dt, opMeta, sls, err := v.Marshal(ctx, c)
if err != nil {
return def, err
}
@ -193,9 +193,7 @@ func marshal(ctx context.Context, v Vertex, def *Definition, s *sourceMapCollect
if _, ok := cache[dgst]; ok {
return def, nil
}
if sl != nil {
s.Add(dgst, sl)
}
s.Add(dgst, sls)
def.Def = append(def.Def, dt)
cache[dgst] = struct{}{}
return def, nil
@ -524,7 +522,7 @@ type Constraints struct {
Metadata pb.OpMetadata
LocalUniqueID string
Caps *apicaps.CapSet
Source *SourceLocation
SourceLocations []*SourceLocation
}
func Platform(p specs.Platform) ConstraintsOpt {

View File

@ -100,7 +100,7 @@ env bar=baz`,
require.Equal(t, "Dockerfile", srcs[0].Info.Filename)
require.Equal(t, tc.dockerfile, string(srcs[0].Info.Data))
require.Equal(t, len(tc.errorLine), len(srcs[0].Locations))
require.Equal(t, len(tc.errorLine), len(srcs[0].Ranges))
require.NotNil(t, srcs[0].Info.Definition)
next: