buildinfo: refactor

Signed-off-by: CrazyMax <crazy-max@users.noreply.github.com>
master
CrazyMax 2021-11-20 19:10:45 +01:00
parent 58bac77c86
commit b4e37a867f
No known key found for this signature in database
GPG Key ID: 3248E46B6BB8C7F7
14 changed files with 307 additions and 169 deletions

View File

@ -1,7 +1,6 @@
package exptypes package exptypes
import ( import (
srctypes "github.com/moby/buildkit/source/types"
digest "github.com/opencontainers/go-digest" digest "github.com/opencontainers/go-digest"
ocispecs "github.com/opencontainers/image-spec/specs-go/v1" ocispecs "github.com/opencontainers/image-spec/specs-go/v1"
) )
@ -27,29 +26,3 @@ type Platform struct {
ID string ID string
Platform ocispecs.Platform Platform ocispecs.Platform
} }
// BuildInfo defines build dependencies that will be added to image config as
// moby.buildkit.buildinfo.v1 key and returned in solver ExporterResponse as
// ExporterBuildInfo key.
type BuildInfo struct {
// Type defines the BuildInfoType source type (docker-image, git, http).
Type BuildInfoType `json:"type,omitempty"`
// Ref is the reference of the source.
Ref string `json:"ref,omitempty"`
// Alias is a special field used to match with the actual source ref
// because frontend might have already transformed a string user typed
// before generating LLB.
Alias string `json:"alias,omitempty"`
// Pin is the source digest.
Pin string `json:"pin,omitempty"`
}
// BuildInfoType contains source type.
type BuildInfoType string
// List of source types.
const (
BuildInfoTypeDockerImage BuildInfoType = srctypes.DockerImageScheme
BuildInfoTypeGit BuildInfoType = srctypes.GitScheme
BuildInfoTypeHTTP BuildInfoType = srctypes.HTTPScheme
)

View File

@ -21,6 +21,7 @@ import (
"github.com/moby/buildkit/solver" "github.com/moby/buildkit/solver"
"github.com/moby/buildkit/util/bklog" "github.com/moby/buildkit/util/bklog"
"github.com/moby/buildkit/util/buildinfo" "github.com/moby/buildkit/util/buildinfo"
binfotypes "github.com/moby/buildkit/util/buildinfo/types"
"github.com/moby/buildkit/util/compression" "github.com/moby/buildkit/util/compression"
"github.com/moby/buildkit/util/progress" "github.com/moby/buildkit/util/progress"
"github.com/moby/buildkit/util/system" "github.com/moby/buildkit/util/system"
@ -416,9 +417,9 @@ func patchImageConfig(dt []byte, descs []ocispecs.Descriptor, history []ocispecs
if err != nil { if err != nil {
return nil, err return nil, err
} }
m[buildinfo.ImageConfigField] = dt m[binfotypes.ImageConfigField] = dt
} else if _, ok := m[buildinfo.ImageConfigField]; ok { } else if _, ok := m[binfotypes.ImageConfigField]; ok {
delete(m, buildinfo.ImageConfigField) delete(m, binfotypes.ImageConfigField)
} }
dt, err = json.Marshal(m) dt, err = json.Marshal(m)

View File

@ -19,12 +19,12 @@ import (
"github.com/docker/go-connections/nat" "github.com/docker/go-connections/nat"
"github.com/moby/buildkit/client/llb" "github.com/moby/buildkit/client/llb"
"github.com/moby/buildkit/client/llb/imagemetaresolver" "github.com/moby/buildkit/client/llb/imagemetaresolver"
"github.com/moby/buildkit/exporter/containerimage/exptypes"
"github.com/moby/buildkit/frontend/dockerfile/instructions" "github.com/moby/buildkit/frontend/dockerfile/instructions"
"github.com/moby/buildkit/frontend/dockerfile/parser" "github.com/moby/buildkit/frontend/dockerfile/parser"
"github.com/moby/buildkit/frontend/dockerfile/shell" "github.com/moby/buildkit/frontend/dockerfile/shell"
"github.com/moby/buildkit/solver/pb" "github.com/moby/buildkit/solver/pb"
"github.com/moby/buildkit/util/apicaps" "github.com/moby/buildkit/util/apicaps"
binfotypes "github.com/moby/buildkit/util/buildinfo/types"
"github.com/moby/buildkit/util/suggest" "github.com/moby/buildkit/util/suggest"
"github.com/moby/buildkit/util/system" "github.com/moby/buildkit/util/system"
"github.com/moby/sys/signal" "github.com/moby/sys/signal"
@ -346,10 +346,10 @@ func Dockerfile2LLB(ctx context.Context, dt []byte, opt ConvertOpt) (*llb.State,
} }
} }
if !isScratch { if !isScratch {
// image not scratch set original image name as ref // if image not scratch set original image name as ref
// and actual reference as alias in BuildInfo // and actual reference as alias in BuildSource
d.buildInfo = &exptypes.BuildInfo{ d.buildSource = &binfotypes.Source{
Type: exptypes.BuildInfoTypeDockerImage, Type: binfotypes.SourceTypeDockerImage,
Ref: origName, Ref: origName,
Alias: ref.String(), Alias: ref.String(),
Pin: dgst.String(), Pin: dgst.String(),
@ -381,7 +381,7 @@ func Dockerfile2LLB(ctx context.Context, dt []byte, opt ConvertOpt) (*llb.State,
buildContext := &mutableOutput{} buildContext := &mutableOutput{}
ctxPaths := map[string]struct{}{} ctxPaths := map[string]struct{}{}
var buildInfos []exptypes.BuildInfo var buildSources []binfotypes.Source
for _, d := range allDispatchStates.states { for _, d := range allDispatchStates.states {
if !isReachable(target, d) { if !isReachable(target, d) {
@ -389,8 +389,8 @@ func Dockerfile2LLB(ctx context.Context, dt []byte, opt ConvertOpt) (*llb.State,
} }
// collect build dependencies // collect build dependencies
if d.buildInfo != nil { if d.buildSource != nil {
buildInfos = append(buildInfos, *d.buildInfo) buildSources = append(buildSources, *d.buildSource)
} }
if d.base != nil { if d.base != nil {
@ -469,11 +469,13 @@ func Dockerfile2LLB(ctx context.Context, dt []byte, opt ConvertOpt) (*llb.State,
// set target with gathered build dependencies // set target with gathered build dependencies
target.image.BuildInfo = []byte{} target.image.BuildInfo = []byte{}
if len(buildInfos) > 0 { if len(buildSources) > 0 {
sort.Slice(buildInfos, func(i, j int) bool { sort.Slice(buildSources, func(i, j int) bool {
return buildInfos[i].Ref < buildInfos[j].Ref return buildSources[i].Ref < buildSources[j].Ref
})
target.image.BuildInfo, err = json.Marshal(&binfotypes.BuildInfo{
Sources: buildSources,
}) })
target.image.BuildInfo, err = json.Marshal(buildInfos)
if err != nil { if err != nil {
return nil, nil, err return nil, nil, err
} }
@ -685,7 +687,7 @@ type dispatchState struct {
cmdIndex int cmdIndex int
cmdTotal int cmdTotal int
prefixPlatform bool prefixPlatform bool
buildInfo *exptypes.BuildInfo buildSource *binfotypes.Source
} }
type dispatchStates struct { type dispatchStates struct {

View File

@ -5,10 +5,10 @@ import (
"strings" "strings"
"testing" "testing"
"github.com/moby/buildkit/exporter/containerimage/exptypes"
"github.com/moby/buildkit/frontend/dockerfile/instructions" "github.com/moby/buildkit/frontend/dockerfile/instructions"
"github.com/moby/buildkit/frontend/dockerfile/shell" "github.com/moby/buildkit/frontend/dockerfile/shell"
"github.com/moby/buildkit/util/appcontext" "github.com/moby/buildkit/util/appcontext"
binfotypes "github.com/moby/buildkit/util/buildinfo/types"
ocispecs "github.com/opencontainers/image-spec/specs-go/v1" ocispecs "github.com/opencontainers/image-spec/specs-go/v1"
"github.com/stretchr/testify/assert" "github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require" "github.com/stretchr/testify/require"
@ -202,11 +202,13 @@ ADD https://raw.githubusercontent.com/moby/buildkit/master/README.md /
` `
_, image, err := Dockerfile2LLB(appcontext.Context(), []byte(df), ConvertOpt{ _, image, err := Dockerfile2LLB(appcontext.Context(), []byte(df), ConvertOpt{
TargetPlatform: &ocispecs.Platform{ TargetPlatform: &ocispecs.Platform{
Architecture: "amd64", OS: "linux", Architecture: "amd64",
OS: "linux",
}, },
BuildPlatforms: []ocispecs.Platform{ BuildPlatforms: []ocispecs.Platform{
{ {
Architecture: "amd64", OS: "linux", Architecture: "amd64",
OS: "linux",
}, },
}, },
}) })
@ -214,13 +216,15 @@ ADD https://raw.githubusercontent.com/moby/buildkit/master/README.md /
require.NoError(t, err) require.NoError(t, err)
require.NotNil(t, image.BuildInfo) require.NotNil(t, image.BuildInfo)
var bi []exptypes.BuildInfo var bi binfotypes.BuildInfo
err = json.Unmarshal(image.BuildInfo, &bi) err = json.Unmarshal(image.BuildInfo, &bi)
require.NoError(t, err) require.NoError(t, err)
require.Equal(t, 1, len(bi))
assert.Equal(t, exptypes.BuildInfoTypeDockerImage, bi[0].Type) sources := bi.Sources
assert.Equal(t, "busybox", bi[0].Ref) require.Equal(t, 1, len(sources))
assert.True(t, strings.HasPrefix(bi[0].Alias, "docker.io/library/busybox@"))
assert.NotEmpty(t, bi[0].Pin) assert.Equal(t, binfotypes.SourceTypeDockerImage, sources[0].Type)
assert.Equal(t, "busybox", sources[0].Ref)
assert.True(t, strings.HasPrefix(sources[0].Alias, "docker.io/library/busybox@"))
assert.NotEmpty(t, sources[0].Pin)
} }

View File

@ -4,6 +4,7 @@ import (
"time" "time"
"github.com/docker/docker/api/types/strslice" "github.com/docker/docker/api/types/strslice"
binfotypes "github.com/moby/buildkit/util/buildinfo/types"
"github.com/moby/buildkit/util/system" "github.com/moby/buildkit/util/system"
ocispecs "github.com/opencontainers/image-spec/specs-go/v1" ocispecs "github.com/opencontainers/image-spec/specs-go/v1"
) )
@ -54,8 +55,8 @@ type Image struct {
// Variant defines platform variant. To be added to OCI. // Variant defines platform variant. To be added to OCI.
Variant string `json:"variant,omitempty"` Variant string `json:"variant,omitempty"`
// BuildInfo defines build dependencies. // binfotypes.ImageConfig defines build dependencies.
BuildInfo []byte `json:"moby.buildkit.buildinfo.v1,omitempty"` binfotypes.ImageConfig
} }
func clone(src Image) Image { func clone(src Image) Image {

View File

@ -40,6 +40,7 @@ import (
"github.com/moby/buildkit/session/upload/uploadprovider" "github.com/moby/buildkit/session/upload/uploadprovider"
"github.com/moby/buildkit/solver/errdefs" "github.com/moby/buildkit/solver/errdefs"
"github.com/moby/buildkit/solver/pb" "github.com/moby/buildkit/solver/pb"
binfotypes "github.com/moby/buildkit/util/buildinfo/types"
"github.com/moby/buildkit/util/contentutil" "github.com/moby/buildkit/util/contentutil"
"github.com/moby/buildkit/util/testutil" "github.com/moby/buildkit/util/testutil"
"github.com/moby/buildkit/util/testutil/httpserver" "github.com/moby/buildkit/util/testutil/httpserver"
@ -115,7 +116,7 @@ var allTests = integration.TestFuncs(
testExportCacheLoop, testExportCacheLoop,
testWildcardRenameCache, testWildcardRenameCache,
testDockerfileInvalidInstruction, testDockerfileInvalidInstruction,
testBuildInfo, testBuildSources,
testShmSize, testShmSize,
testUlimit, testUlimit,
testCgroupParent, testCgroupParent,
@ -5184,7 +5185,7 @@ RUN echo $(hostname) | grep foo
} }
// moby/buildkit#2311 // moby/buildkit#2311
func testBuildInfo(t *testing.T, sb integration.Sandbox) { func testBuildSources(t *testing.T, sb integration.Sandbox) {
f := getFrontend(t, sb) f := getFrontend(t, sb)
gitDir, err := ioutil.TempDir("", "buildkit") gitDir, err := ioutil.TempDir("", "buildkit")
@ -5247,29 +5248,28 @@ COPY --from=buildx /buildx /usr/libexec/docker/cli-plugins/docker-buildx
dtbi, err := base64.StdEncoding.DecodeString(res.ExporterResponse[exptypes.ExporterBuildInfo]) dtbi, err := base64.StdEncoding.DecodeString(res.ExporterResponse[exptypes.ExporterBuildInfo])
require.NoError(t, err) require.NoError(t, err)
var bi map[string][]exptypes.BuildInfo var bi binfotypes.BuildInfo
err = json.Unmarshal(dtbi, &bi) err = json.Unmarshal(dtbi, &bi)
require.NoError(t, err) require.NoError(t, err)
_, ok := bi["sources"] sources := bi.Sources
require.True(t, ok) require.Equal(t, 4, len(sources))
require.Equal(t, 4, len(bi["sources"]))
assert.Equal(t, exptypes.BuildInfoTypeDockerImage, bi["sources"][0].Type) assert.Equal(t, binfotypes.SourceTypeDockerImage, sources[0].Type)
assert.Equal(t, "docker.io/docker/buildx-bin:0.6.1@sha256:a652ced4a4141977c7daaed0a074dcd9844a78d7d2615465b12f433ae6dd29f0", bi["sources"][0].Ref) assert.Equal(t, "docker.io/docker/buildx-bin:0.6.1@sha256:a652ced4a4141977c7daaed0a074dcd9844a78d7d2615465b12f433ae6dd29f0", sources[0].Ref)
assert.Equal(t, "sha256:a652ced4a4141977c7daaed0a074dcd9844a78d7d2615465b12f433ae6dd29f0", bi["sources"][0].Pin) assert.Equal(t, "sha256:a652ced4a4141977c7daaed0a074dcd9844a78d7d2615465b12f433ae6dd29f0", sources[0].Pin)
assert.Equal(t, exptypes.BuildInfoTypeDockerImage, bi["sources"][1].Type) assert.Equal(t, binfotypes.SourceTypeDockerImage, sources[1].Type)
assert.Equal(t, "docker.io/docker/dockerfile-upstream:1.3.0", bi["sources"][1].Ref) assert.Equal(t, "docker.io/docker/dockerfile-upstream:1.3.0", sources[1].Ref)
assert.Equal(t, "sha256:9e2c9eca7367393aecc68795c671f93466818395a2693498debe831fd67f5e89", bi["sources"][1].Pin) assert.Equal(t, "sha256:9e2c9eca7367393aecc68795c671f93466818395a2693498debe831fd67f5e89", sources[1].Pin)
assert.Equal(t, exptypes.BuildInfoTypeDockerImage, bi["sources"][2].Type) assert.Equal(t, binfotypes.SourceTypeDockerImage, sources[2].Type)
assert.Equal(t, "docker.io/library/busybox:latest", bi["sources"][2].Ref) assert.Equal(t, "docker.io/library/busybox:latest", sources[2].Ref)
assert.NotEmpty(t, bi["sources"][2].Pin) assert.NotEmpty(t, sources[2].Pin)
assert.Equal(t, exptypes.BuildInfoTypeHTTP, bi["sources"][3].Type) assert.Equal(t, binfotypes.SourceTypeHTTP, sources[3].Type)
assert.Equal(t, "https://raw.githubusercontent.com/moby/moby/master/README.md", bi["sources"][3].Ref) assert.Equal(t, "https://raw.githubusercontent.com/moby/moby/master/README.md", sources[3].Ref)
assert.Equal(t, "sha256:419455202b0ef97e480d7f8199b26a721a417818bc0e2d106975f74323f25e6c", bi["sources"][3].Pin) assert.Equal(t, "sha256:419455202b0ef97e480d7f8199b26a721a417818bc0e2d106975f74323f25e6c", sources[3].Pin)
} }
func testShmSize(t *testing.T, sb integration.Sandbox) { func testShmSize(t *testing.T, sb integration.Sandbox) {

View File

@ -22,7 +22,7 @@ import (
type ResolveOpFunc func(Vertex, Builder) (Op, error) type ResolveOpFunc func(Vertex, Builder) (Op, error)
type Builder interface { type Builder interface {
Build(ctx context.Context, e Edge) (CachedResult, BuildInfo, error) Build(ctx context.Context, e Edge) (CachedResult, BuildSources, error)
InContext(ctx context.Context, f func(ctx context.Context, g session.Group) error) error InContext(ctx context.Context, f func(ctx context.Context, g session.Group) error) error
EachValue(ctx context.Context, key string, fn func(interface{}) error) error EachValue(ctx context.Context, key string, fn func(interface{}) error) error
} }
@ -197,7 +197,7 @@ type subBuilder struct {
exporters []ExportableCacheKey exporters []ExportableCacheKey
} }
func (sb *subBuilder) Build(ctx context.Context, e Edge) (CachedResult, BuildInfo, error) { func (sb *subBuilder) Build(ctx context.Context, e Edge) (CachedResult, BuildSources, error) {
// TODO(@crazy-max): Handle BuildInfo from subbuild // TODO(@crazy-max): Handle BuildInfo from subbuild
res, err := sb.solver.subBuild(ctx, e, sb.vtx) res, err := sb.solver.subBuild(ctx, e, sb.vtx)
if err != nil { if err != nil {
@ -496,7 +496,7 @@ func (jl *Solver) deleteIfUnreferenced(k digest.Digest, st *state) {
} }
} }
func (j *Job) Build(ctx context.Context, e Edge) (CachedResult, BuildInfo, error) { func (j *Job) Build(ctx context.Context, e Edge) (CachedResult, BuildSources, error) {
if span := trace.SpanFromContext(ctx); span.SpanContext().IsValid() { if span := trace.SpanFromContext(ctx); span.SpanContext().IsValid() {
j.span = span j.span = span
} }
@ -514,25 +514,25 @@ func (j *Job) Build(ctx context.Context, e Edge) (CachedResult, BuildInfo, error
j.list.mu.Lock() j.list.mu.Lock()
defer j.list.mu.Unlock() defer j.list.mu.Unlock()
return res, j.walkBuildInfo(ctx, e, make(BuildInfo)), nil return res, j.walkBuildSources(ctx, e, make(BuildSources)), nil
} }
func (j *Job) walkBuildInfo(ctx context.Context, e Edge, bi BuildInfo) BuildInfo { func (j *Job) walkBuildSources(ctx context.Context, e Edge, bsrc BuildSources) BuildSources {
for _, inp := range e.Vertex.Inputs() { for _, inp := range e.Vertex.Inputs() {
if st, ok := j.list.actives[inp.Vertex.Digest()]; ok { if st, ok := j.list.actives[inp.Vertex.Digest()]; ok {
st.mu.Lock() st.mu.Lock()
for _, cacheRes := range st.op.cacheRes { for _, cacheRes := range st.op.cacheRes {
for key, val := range cacheRes.BuildInfo { for key, val := range cacheRes.BuildSources {
if _, ok := bi[key]; !ok { if _, ok := bsrc[key]; !ok {
bi[key] = val bsrc[key] = val
} }
} }
} }
st.mu.Unlock() st.mu.Unlock()
bi = j.walkBuildInfo(ctx, inp, bi) bsrc = j.walkBuildSources(ctx, inp, bsrc)
} }
} }
return bi return bsrc
} }
func (j *Job) Discard() error { func (j *Job) Discard() error {

View File

@ -62,7 +62,7 @@ func (b *llbBridge) Warn(ctx context.Context, dgst digest.Digest, msg string, op
}) })
} }
func (b *llbBridge) loadResult(ctx context.Context, def *pb.Definition, cacheImports []gw.CacheOptionsEntry) (solver.CachedResult, solver.BuildInfo, error) { func (b *llbBridge) loadResult(ctx context.Context, def *pb.Definition, cacheImports []gw.CacheOptionsEntry) (solver.CachedResult, solver.BuildSources, error) {
w, err := b.resolveWorker() w, err := b.resolveWorker()
if err != nil { if err != nil {
return nil, nil, err return nil, nil, err
@ -162,13 +162,13 @@ func (b *llbBridge) Solve(ctx context.Context, req frontend.SolveRequest, sid st
} }
type resultProxy struct { type resultProxy struct {
cb func(context.Context) (solver.CachedResult, solver.BuildInfo, error) cb func(context.Context) (solver.CachedResult, solver.BuildSources, error)
def *pb.Definition def *pb.Definition
g flightcontrol.Group g flightcontrol.Group
mu sync.Mutex mu sync.Mutex
released bool released bool
v solver.CachedResult v solver.CachedResult
bi solver.BuildInfo bsrc solver.BuildSources
err error err error
errResults []solver.Result errResults []solver.Result
} }
@ -177,8 +177,8 @@ func newResultProxy(b *llbBridge, req frontend.SolveRequest) *resultProxy {
rp := &resultProxy{ rp := &resultProxy{
def: req.Definition, def: req.Definition,
} }
rp.cb = func(ctx context.Context) (solver.CachedResult, solver.BuildInfo, error) { rp.cb = func(ctx context.Context) (solver.CachedResult, solver.BuildSources, error) {
res, bi, err := b.loadResult(ctx, req.Definition, req.CacheImports) res, bsrc, err := b.loadResult(ctx, req.Definition, req.CacheImports)
var ee *llberrdefs.ExecError var ee *llberrdefs.ExecError
if errors.As(err, &ee) { if errors.As(err, &ee) {
ee.EachRef(func(res solver.Result) error { ee.EachRef(func(res solver.Result) error {
@ -188,7 +188,7 @@ func newResultProxy(b *llbBridge, req frontend.SolveRequest) *resultProxy {
// acquire ownership so ExecError finalizer doesn't attempt to release as well // acquire ownership so ExecError finalizer doesn't attempt to release as well
ee.OwnerBorrowed = true ee.OwnerBorrowed = true
} }
return res, bi, err return res, bsrc, err
} }
return rp return rp
} }
@ -197,8 +197,8 @@ func (rp *resultProxy) Definition() *pb.Definition {
return rp.def return rp.def
} }
func (rp *resultProxy) BuildInfo() solver.BuildInfo { func (rp *resultProxy) BuildSources() solver.BuildSources {
return rp.bi return rp.bsrc
} }
func (rp *resultProxy) Release(ctx context.Context) (err error) { func (rp *resultProxy) Release(ctx context.Context) (err error) {
@ -259,7 +259,7 @@ func (rp *resultProxy) Result(ctx context.Context) (res solver.CachedResult, err
return rp.v, rp.err return rp.v, rp.err
} }
rp.mu.Unlock() rp.mu.Unlock()
v, bi, err := rp.cb(ctx) v, bsrc, err := rp.cb(ctx)
if err != nil { if err != nil {
select { select {
case <-ctx.Done(): case <-ctx.Done():
@ -278,7 +278,7 @@ func (rp *resultProxy) Result(ctx context.Context) (res solver.CachedResult, err
return nil, errors.Errorf("evaluating released result") return nil, errors.Errorf("evaluating released result")
} }
rp.v = v rp.v = v
rp.bi = bi rp.bsrc = bsrc
rp.err = err rp.err = err
rp.mu.Unlock() rp.mu.Unlock()
return v, err return v, err

View File

@ -78,16 +78,16 @@ func (s *sourceOp) CacheMap(ctx context.Context, g session.Group, index int) (*s
dgst = digest.Digest("random:" + strings.TrimPrefix(dgst.String(), dgst.Algorithm().String()+":")) dgst = digest.Digest("random:" + strings.TrimPrefix(dgst.String(), dgst.Algorithm().String()+":"))
} }
var buildInfo map[string]string var buildSources map[string]string
if !strings.HasPrefix(s.op.Source.GetIdentifier(), "local://") { if !strings.HasPrefix(s.op.Source.GetIdentifier(), "local://") {
buildInfo = map[string]string{s.op.Source.GetIdentifier(): pin} buildSources = map[string]string{s.op.Source.GetIdentifier(): pin}
} }
return &solver.CacheMap{ return &solver.CacheMap{
// TODO: add os/arch // TODO: add os/arch
Digest: dgst, Digest: dgst,
Opts: cacheOpts, Opts: cacheOpts,
BuildInfo: buildInfo, BuildSources: buildSources,
}, done, nil }, done, nil
} }

View File

@ -176,7 +176,7 @@ func (s *Solver) Solve(ctx context.Context, id string, sessionID string, req fro
} }
inp.Ref = workerRef.ImmutableRef inp.Ref = workerRef.ImmutableRef
dtbi, err := buildinfo.Merge(ctx, res.BuildInfo(), inp.Metadata[exptypes.ExporterImageConfigKey]) dtbi, err := buildinfo.Merge(ctx, res.BuildSources(), inp.Metadata[exptypes.ExporterImageConfigKey])
if err != nil { if err != nil {
return nil, err return nil, err
} }
@ -208,7 +208,7 @@ func (s *Solver) Solve(ctx context.Context, id string, sessionID string, req fro
} }
m[k] = workerRef.ImmutableRef m[k] = workerRef.ImmutableRef
dtbi, err := buildinfo.Merge(ctx, res.BuildInfo(), inp.Metadata[fmt.Sprintf("%s/%s", exptypes.ExporterImageConfigKey, k)]) dtbi, err := buildinfo.Merge(ctx, res.BuildSources(), inp.Metadata[fmt.Sprintf("%s/%s", exptypes.ExporterImageConfigKey, k)])
if err != nil { if err != nil {
return nil, err return nil, err
} }

View File

@ -76,7 +76,7 @@ type ResultProxy interface {
Result(context.Context) (CachedResult, error) Result(context.Context) (CachedResult, error)
Release(context.Context) error Release(context.Context) error
Definition() *pb.Definition Definition() *pb.Definition
BuildInfo() BuildInfo BuildSources() BuildSources
} }
// CacheExportMode is the type for setting cache exporting modes // CacheExportMode is the type for setting cache exporting modes
@ -197,13 +197,13 @@ type CacheMap struct {
// the cache. Opts should not have any impact on the computed cache key. // the cache. Opts should not have any impact on the computed cache key.
Opts CacheOpts Opts CacheOpts
// BuildInfo contains build dependencies that will be set from source // BuildSources contains build dependencies that will be set from source
// operation. // operation.
BuildInfo map[string]string BuildSources BuildSources
} }
// BuildInfo contains solved build dependencies. // BuildSources contains solved build dependencies.
type BuildInfo map[string]string type BuildSources map[string]string
// ExportableCacheKey is a cache key connected with an exporter that can export // ExportableCacheKey is a cache key connected with an exporter that can export
// a chain of cacherecords pointing to that key // a chain of cacherecords pointing to that key

View File

@ -2,136 +2,142 @@ package buildinfo
import ( import (
"context" "context"
"encoding/base64"
"encoding/json" "encoding/json"
"sort" "sort"
"github.com/docker/distribution/reference" "github.com/docker/distribution/reference"
"github.com/moby/buildkit/exporter/containerimage/exptypes"
"github.com/moby/buildkit/source" "github.com/moby/buildkit/source"
binfotypes "github.com/moby/buildkit/util/buildinfo/types"
"github.com/moby/buildkit/util/urlutil" "github.com/moby/buildkit/util/urlutil"
"github.com/pkg/errors" "github.com/pkg/errors"
) )
const ImageConfigField = "moby.buildkit.buildinfo.v1" // Decode decodes a base64 encoded build info.
func Decode(enc string) (bi binfotypes.BuildInfo, _ error) {
dec, err := base64.StdEncoding.DecodeString(enc)
if err != nil {
return bi, err
}
err = json.Unmarshal(dec, &bi)
return bi, err
}
// Merge combines and fixes build info from image config // Merge combines and fixes build sources from image config
// key moby.buildkit.buildinfo.v1. // key binfotypes.ImageConfigField.
func Merge(ctx context.Context, buildInfo map[string]string, imageConfig []byte) ([]byte, error) { func Merge(ctx context.Context, buildSources map[string]string, imageConfig []byte) ([]byte, error) {
icbi, err := imageConfigBuildInfo(imageConfig) icbi, err := FromImageConfig(imageConfig)
if err != nil { if err != nil {
return nil, err return nil, err
} }
// Iterate and combine build sources // Iterate and combine build sources
mbis := map[string]exptypes.BuildInfo{} mbs := map[string]binfotypes.Source{}
for srcs, di := range buildInfo { for buildSource, pin := range buildSources {
src, err := source.FromString(srcs) src, err := source.FromString(buildSource)
if err != nil { if err != nil {
return nil, err return nil, err
} }
switch sid := src.(type) { switch sourceID := src.(type) {
case *source.ImageIdentifier: case *source.ImageIdentifier:
for idx, bi := range icbi { for i, ics := range icbi.Sources {
// Use original user input from image config // Use original user input from image config
if bi.Type == exptypes.BuildInfoTypeDockerImage && bi.Alias == sid.Reference.String() { if ics.Type == binfotypes.SourceTypeDockerImage && ics.Alias == sourceID.Reference.String() {
if _, ok := mbis[bi.Alias]; !ok { if _, ok := mbs[ics.Alias]; !ok {
parsed, err := reference.ParseNormalizedNamed(bi.Ref) parsed, err := reference.ParseNormalizedNamed(ics.Ref)
if err != nil { if err != nil {
return nil, errors.Wrapf(err, "failed to parse %s", bi.Ref) return nil, errors.Wrapf(err, "failed to parse %s", ics.Ref)
} }
mbis[bi.Alias] = exptypes.BuildInfo{ mbs[ics.Alias] = binfotypes.Source{
Type: exptypes.BuildInfoTypeDockerImage, Type: binfotypes.SourceTypeDockerImage,
Ref: reference.TagNameOnly(parsed).String(), Ref: reference.TagNameOnly(parsed).String(),
Pin: di, Pin: pin,
} }
icbi = append(icbi[:idx], icbi[idx+1:]...) icbi.Sources = append(icbi.Sources[:i], icbi.Sources[i+1:]...)
} }
break break
} }
} }
if _, ok := mbis[sid.Reference.String()]; !ok { if _, ok := mbs[sourceID.Reference.String()]; !ok {
mbis[sid.Reference.String()] = exptypes.BuildInfo{ mbs[sourceID.Reference.String()] = binfotypes.Source{
Type: exptypes.BuildInfoTypeDockerImage, Type: binfotypes.SourceTypeDockerImage,
Ref: sid.Reference.String(), Ref: sourceID.Reference.String(),
Pin: di, Pin: pin,
} }
} }
case *source.GitIdentifier: case *source.GitIdentifier:
sref := sid.Remote sref := sourceID.Remote
if len(sid.Ref) > 0 { if len(sourceID.Ref) > 0 {
sref += "#" + sid.Ref sref += "#" + sourceID.Ref
} }
if len(sid.Subdir) > 0 { if len(sourceID.Subdir) > 0 {
sref += ":" + sid.Subdir sref += ":" + sourceID.Subdir
} }
if _, ok := mbis[sref]; !ok { if _, ok := mbs[sref]; !ok {
mbis[sref] = exptypes.BuildInfo{ mbs[sref] = binfotypes.Source{
Type: exptypes.BuildInfoTypeGit, Type: binfotypes.SourceTypeGit,
Ref: urlutil.RedactCredentials(sref), Ref: urlutil.RedactCredentials(sref),
Pin: di, Pin: pin,
} }
} }
case *source.HTTPIdentifier: case *source.HTTPIdentifier:
if _, ok := mbis[sid.URL]; !ok { if _, ok := mbs[sourceID.URL]; !ok {
mbis[sid.URL] = exptypes.BuildInfo{ mbs[sourceID.URL] = binfotypes.Source{
Type: exptypes.BuildInfoTypeHTTP, Type: binfotypes.SourceTypeHTTP,
Ref: urlutil.RedactCredentials(sid.URL), Ref: urlutil.RedactCredentials(sourceID.URL),
Pin: di, Pin: pin,
} }
} }
} }
} }
// Leftovers build deps in image config. Mostly duplicated ones we // Leftovers build deps in image config. Mostly duplicated ones we
// don't need but there is an edge case if no instruction except source's // don't need but there is an edge case if no instruction except sources
// one is defined (eg. FROM ...) that can be valid so take it into account. // one is defined (e.g. FROM ...) that can be valid so take it into account.
for _, bi := range icbi { for _, ics := range icbi.Sources {
if bi.Type != exptypes.BuildInfoTypeDockerImage { if ics.Type != binfotypes.SourceTypeDockerImage {
continue continue
} }
if _, ok := mbis[bi.Alias]; !ok { if _, ok := mbs[ics.Alias]; !ok {
parsed, err := reference.ParseNormalizedNamed(bi.Ref) parsed, err := reference.ParseNormalizedNamed(ics.Ref)
if err != nil { if err != nil {
return nil, errors.Wrapf(err, "failed to parse %s", bi.Ref) return nil, errors.Wrapf(err, "failed to parse %s", ics.Ref)
} }
mbis[bi.Alias] = exptypes.BuildInfo{ mbs[ics.Alias] = binfotypes.Source{
Type: exptypes.BuildInfoTypeDockerImage, Type: binfotypes.SourceTypeDockerImage,
Ref: reference.TagNameOnly(parsed).String(), Ref: reference.TagNameOnly(parsed).String(),
Pin: bi.Pin, Pin: ics.Pin,
} }
} }
} }
bis := make([]exptypes.BuildInfo, 0, len(mbis)) srcs := make([]binfotypes.Source, 0, len(mbs))
for _, bi := range mbis { for _, bs := range mbs {
bis = append(bis, bi) srcs = append(srcs, bs)
} }
sort.Slice(bis, func(i, j int) bool { sort.Slice(srcs, func(i, j int) bool {
return bis[i].Ref < bis[j].Ref return srcs[i].Ref < srcs[j].Ref
}) })
return json.Marshal(map[string][]exptypes.BuildInfo{ return json.Marshal(binfotypes.BuildInfo{
"sources": bis, Sources: srcs,
}) })
} }
// imageConfigBuildInfo returns build dependencies from image config // FromImageConfig returns build dependencies from image config.
func imageConfigBuildInfo(imageConfig []byte) ([]exptypes.BuildInfo, error) { func FromImageConfig(imageConfig []byte) (bi binfotypes.BuildInfo, _ error) {
if len(imageConfig) == 0 { if len(imageConfig) == 0 {
return nil, nil return bi, nil
}
var config struct {
BuildInfo []byte `json:"moby.buildkit.buildinfo.v1,omitempty"`
} }
var config binfotypes.ImageConfig
if err := json.Unmarshal(imageConfig, &config); err != nil { if err := json.Unmarshal(imageConfig, &config); err != nil {
return nil, errors.Wrap(err, "failed to unmarshal buildinfo from config") return bi, errors.Wrap(err, "failed to unmarshal buildinfo from image config")
} }
if len(config.BuildInfo) == 0 { if len(config.BuildInfo) == 0 {
return nil, nil return bi, nil
} }
var bi []exptypes.BuildInfo
if err := json.Unmarshal(config.BuildInfo, &bi); err != nil { if err := json.Unmarshal(config.BuildInfo, &bi); err != nil {
return nil, errors.Wrapf(err, "failed to unmarshal %s", ImageConfigField) return bi, errors.Wrap(err, "failed to unmarshal buildinfo")
} }
return bi, nil return bi, nil
} }

View File

@ -0,0 +1,105 @@
package buildinfo
import (
"context"
"encoding/base64"
"encoding/json"
"testing"
binfotypes "github.com/moby/buildkit/util/buildinfo/types"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
)
func TestDecode(t *testing.T) {
bi, err := Decode("eyJzb3VyY2VzIjpbeyJ0eXBlIjoiaW1hZ2UiLCJyZWYiOiJkb2NrZXIuaW8vZG9ja2VyL2J1aWxkeC1iaW46MC42LjFAc2hhMjU2OmE2NTJjZWQ0YTQxNDE5NzdjN2RhYWVkMGEwNzRkY2Q5ODQ0YTc4ZDdkMjYxNTQ2NWIxMmY0MzNhZTZkZDI5ZjAiLCJwaW4iOiJzaGEyNTY6YTY1MmNlZDRhNDE0MTk3N2M3ZGFhZWQwYTA3NGRjZDk4NDRhNzhkN2QyNjE1NDY1YjEyZjQzM2FlNmRkMjlmMCJ9LHsidHlwZSI6ImltYWdlIiwicmVmIjoiZG9ja2VyLmlvL2xpYnJhcnkvYWxwaW5lOjMuMTMiLCJwaW4iOiJzaGEyNTY6MWQzMGQxYmEzY2I5MDk2MjA2N2U5YjI5NDkxZmJkNTY5OTc5NzlkNTQzNzZmMjNmMDE0NDhiNWM1Y2Q4YjQ2MiJ9LHsidHlwZSI6ImltYWdlIiwicmVmIjoiZG9ja2VyLmlvL21vYnkvYnVpbGRraXQ6djAuOS4wIiwicGluIjoic2hhMjU2OjhkYzY2OGU3ZjY2ZGIxYzA0NGFhZGJlZDMwNjAyMDc0MzUxNmE5NDg0ODc5M2UwZjgxZjk0YTA4N2VlNzhjYWIifSx7InR5cGUiOiJpbWFnZSIsInJlZiI6ImRvY2tlci5pby90b25pc3RpaWdpL3h4QHNoYTI1NjoyMWE2MWJlNDc0NGY2NTMxY2I1ZjMzYjBlNmY0MGVkZTQxZmEzYTFiOGM4MmQ1OTQ2MTc4ZjgwY2M4NGJmYzA0IiwicGluIjoic2hhMjU2OjIxYTYxYmU0NzQ0ZjY1MzFjYjVmMzNiMGU2ZjQwZWRlNDFmYTNhMWI4YzgyZDU5NDYxNzhmODBjYzg0YmZjMDQifSx7InR5cGUiOiJnaXQiLCJyZWYiOiJodHRwczovL2dpdGh1Yi5jb20vY3JhenktbWF4L2J1aWxka2l0LWJ1aWxkc291cmNlcy10ZXN0LmdpdCNtYXN0ZXIiLCJwaW4iOiIyNTlhNWFhNWFhNWJiMzU2MmQxMmNjNjMxZmUzOTlmNDc4ODY0MmMxIn0seyJ0eXBlIjoiaHR0cCIsInJlZiI6Imh0dHBzOi8vcmF3LmdpdGh1YnVzZXJjb250ZW50LmNvbS9tb2J5L21vYnkvbWFzdGVyL1JFQURNRS5tZCIsInBpbiI6InNoYTI1Njo0MTk0NTUyMDJiMGVmOTdlNDgwZDdmODE5OWIyNmE3MjFhNDE3ODE4YmMwZTJkMTA2OTc1Zjc0MzIzZjI1ZTZjIn1dfQ==")
require.NoError(t, err)
assert.Equal(t, 6, len(bi.Sources))
}
func TestMerge(t *testing.T) {
buildSourcesLLB := map[string]string{
"docker-image://docker.io/docker/buildx-bin:0.6.1@sha256:a652ced4a4141977c7daaed0a074dcd9844a78d7d2615465b12f433ae6dd29f0": "sha256:a652ced4a4141977c7daaed0a074dcd9844a78d7d2615465b12f433ae6dd29f0",
"docker-image://docker.io/library/alpine:3.13@sha256:1d30d1ba3cb90962067e9b29491fbd56997979d54376f23f01448b5c5cd8b462": "sha256:1d30d1ba3cb90962067e9b29491fbd56997979d54376f23f01448b5c5cd8b462",
"docker-image://docker.io/moby/buildkit:v0.9.0@sha256:8dc668e7f66db1c044aadbed306020743516a94848793e0f81f94a087ee78cab": "sha256:8dc668e7f66db1c044aadbed306020743516a94848793e0f81f94a087ee78cab",
"docker-image://docker.io/tonistiigi/xx@sha256:21a61be4744f6531cb5f33b0e6f40ede41fa3a1b8c82d5946178f80cc84bfc04": "sha256:21a61be4744f6531cb5f33b0e6f40ede41fa3a1b8c82d5946178f80cc84bfc04",
"git://https://github.com/crazy-max/buildkit-buildsources-test.git#master": "259a5aa5aa5bb3562d12cc631fe399f4788642c1",
"https://raw.githubusercontent.com/moby/moby/master/README.md": "sha256:419455202b0ef97e480d7f8199b26a721a417818bc0e2d106975f74323f25e6c",
}
buildInfoImageConfig, err := json.Marshal(binfotypes.BuildInfo{
Sources: []binfotypes.Source{
{
Type: binfotypes.SourceTypeDockerImage,
Ref: "docker.io/docker/buildx-bin:0.6.1@sha256:a652ced4a4141977c7daaed0a074dcd9844a78d7d2615465b12f433ae6dd29f0",
Alias: "docker.io/docker/buildx-bin:0.6.1@sha256:a652ced4a4141977c7daaed0a074dcd9844a78d7d2615465b12f433ae6dd29f0",
Pin: "sha256:a652ced4a4141977c7daaed0a074dcd9844a78d7d2615465b12f433ae6dd29f0",
},
{
Type: binfotypes.SourceTypeDockerImage,
Ref: "docker.io/library/alpine:3.13",
Alias: "docker.io/library/alpine:3.13@sha256:1d30d1ba3cb90962067e9b29491fbd56997979d54376f23f01448b5c5cd8b462",
Pin: "sha256:1d30d1ba3cb90962067e9b29491fbd56997979d54376f23f01448b5c5cd8b462",
},
{
Type: binfotypes.SourceTypeDockerImage,
Ref: "docker.io/moby/buildkit:v0.9.0",
Alias: "docker.io/moby/buildkit:v0.9.0@sha256:8dc668e7f66db1c044aadbed306020743516a94848793e0f81f94a087ee78cab",
Pin: "sha256:8dc668e7f66db1c044aadbed306020743516a94848793e0f81f94a087ee78cab",
},
{
Type: binfotypes.SourceTypeDockerImage,
Ref: "docker.io/tonistiigi/xx@sha256:21a61be4744f6531cb5f33b0e6f40ede41fa3a1b8c82d5946178f80cc84bfc04",
Alias: "docker.io/tonistiigi/xx@sha256:21a61be4744f6531cb5f33b0e6f40ede41fa3a1b8c82d5946178f80cc84bfc04",
Pin: "sha256:21a61be4744f6531cb5f33b0e6f40ede41fa3a1b8c82d5946178f80cc84bfc04",
},
},
})
require.NoError(t, err)
bic, err := json.Marshal(binfotypes.ImageConfig{
BuildInfo: buildInfoImageConfig,
})
require.NoError(t, err)
ret, err := Merge(context.Background(), buildSourcesLLB, bic)
require.NoError(t, err)
dec, err := Decode(base64.StdEncoding.EncodeToString(ret))
require.NoError(t, err)
assert.Equal(t, binfotypes.BuildInfo{
Sources: []binfotypes.Source{
{
Type: binfotypes.SourceTypeDockerImage,
Ref: "docker.io/docker/buildx-bin:0.6.1@sha256:a652ced4a4141977c7daaed0a074dcd9844a78d7d2615465b12f433ae6dd29f0",
Pin: "sha256:a652ced4a4141977c7daaed0a074dcd9844a78d7d2615465b12f433ae6dd29f0",
},
{
Type: binfotypes.SourceTypeDockerImage,
Ref: "docker.io/library/alpine:3.13",
Pin: "sha256:1d30d1ba3cb90962067e9b29491fbd56997979d54376f23f01448b5c5cd8b462",
},
{
Type: binfotypes.SourceTypeDockerImage,
Ref: "docker.io/moby/buildkit:v0.9.0",
Pin: "sha256:8dc668e7f66db1c044aadbed306020743516a94848793e0f81f94a087ee78cab",
},
{
Type: binfotypes.SourceTypeDockerImage,
Ref: "docker.io/tonistiigi/xx@sha256:21a61be4744f6531cb5f33b0e6f40ede41fa3a1b8c82d5946178f80cc84bfc04",
Pin: "sha256:21a61be4744f6531cb5f33b0e6f40ede41fa3a1b8c82d5946178f80cc84bfc04",
},
{
Type: binfotypes.SourceTypeGit,
Ref: "https://github.com/crazy-max/buildkit-buildsources-test.git#master",
Pin: "259a5aa5aa5bb3562d12cc631fe399f4788642c1",
},
{
Type: binfotypes.SourceTypeHTTP,
Ref: "https://raw.githubusercontent.com/moby/moby/master/README.md",
Pin: "sha256:419455202b0ef97e480d7f8199b26a721a417818bc0e2d106975f74323f25e6c",
},
},
}, dec)
}

View File

@ -0,0 +1,46 @@
package binfotypes
import (
srctypes "github.com/moby/buildkit/source/types"
)
// ImageConfigField defines the key of build dependencies.
const ImageConfigField = "moby.buildkit.buildinfo.v1"
// ImageConfig defines the structure of build dependencies
// inside image config.
type ImageConfig struct {
BuildInfo []byte `json:"moby.buildkit.buildinfo.v1,omitempty"`
}
// BuildInfo defines the main structure added to image config as
// ImageConfigField key and returned in solver ExporterResponse as
// exptypes.ExporterBuildInfo key.
type BuildInfo struct {
// Sources defines build dependencies.
Sources []Source `json:"sources,omitempty"`
}
// Source defines a build dependency.
type Source struct {
// Type defines the SourceType source type (docker-image, git, http).
Type SourceType `json:"type,omitempty"`
// Ref is the reference of the source.
Ref string `json:"ref,omitempty"`
// Alias is a special field used to match with the actual source ref
// because frontend might have already transformed a string user typed
// before generating LLB.
Alias string `json:"alias,omitempty"`
// Pin is the source digest.
Pin string `json:"pin,omitempty"`
}
// SourceType contains source type.
type SourceType string
// List of source types.
const (
SourceTypeDockerImage SourceType = srctypes.DockerImageScheme
SourceTypeGit SourceType = srctypes.GitScheme
SourceTypeHTTP SourceType = srctypes.HTTPScheme
)