buildinfo: refactor
Signed-off-by: CrazyMax <crazy-max@users.noreply.github.com>master
parent
58bac77c86
commit
b4e37a867f
|
@ -1,7 +1,6 @@
|
|||
package exptypes
|
||||
|
||||
import (
|
||||
srctypes "github.com/moby/buildkit/source/types"
|
||||
digest "github.com/opencontainers/go-digest"
|
||||
ocispecs "github.com/opencontainers/image-spec/specs-go/v1"
|
||||
)
|
||||
|
@ -27,29 +26,3 @@ type Platform struct {
|
|||
ID string
|
||||
Platform ocispecs.Platform
|
||||
}
|
||||
|
||||
// BuildInfo defines build dependencies that will be added to image config as
|
||||
// moby.buildkit.buildinfo.v1 key and returned in solver ExporterResponse as
|
||||
// ExporterBuildInfo key.
|
||||
type BuildInfo struct {
|
||||
// Type defines the BuildInfoType source type (docker-image, git, http).
|
||||
Type BuildInfoType `json:"type,omitempty"`
|
||||
// Ref is the reference of the source.
|
||||
Ref string `json:"ref,omitempty"`
|
||||
// Alias is a special field used to match with the actual source ref
|
||||
// because frontend might have already transformed a string user typed
|
||||
// before generating LLB.
|
||||
Alias string `json:"alias,omitempty"`
|
||||
// Pin is the source digest.
|
||||
Pin string `json:"pin,omitempty"`
|
||||
}
|
||||
|
||||
// BuildInfoType contains source type.
|
||||
type BuildInfoType string
|
||||
|
||||
// List of source types.
|
||||
const (
|
||||
BuildInfoTypeDockerImage BuildInfoType = srctypes.DockerImageScheme
|
||||
BuildInfoTypeGit BuildInfoType = srctypes.GitScheme
|
||||
BuildInfoTypeHTTP BuildInfoType = srctypes.HTTPScheme
|
||||
)
|
||||
|
|
|
@ -21,6 +21,7 @@ import (
|
|||
"github.com/moby/buildkit/solver"
|
||||
"github.com/moby/buildkit/util/bklog"
|
||||
"github.com/moby/buildkit/util/buildinfo"
|
||||
binfotypes "github.com/moby/buildkit/util/buildinfo/types"
|
||||
"github.com/moby/buildkit/util/compression"
|
||||
"github.com/moby/buildkit/util/progress"
|
||||
"github.com/moby/buildkit/util/system"
|
||||
|
@ -416,9 +417,9 @@ func patchImageConfig(dt []byte, descs []ocispecs.Descriptor, history []ocispecs
|
|||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
m[buildinfo.ImageConfigField] = dt
|
||||
} else if _, ok := m[buildinfo.ImageConfigField]; ok {
|
||||
delete(m, buildinfo.ImageConfigField)
|
||||
m[binfotypes.ImageConfigField] = dt
|
||||
} else if _, ok := m[binfotypes.ImageConfigField]; ok {
|
||||
delete(m, binfotypes.ImageConfigField)
|
||||
}
|
||||
|
||||
dt, err = json.Marshal(m)
|
||||
|
|
|
@ -19,12 +19,12 @@ import (
|
|||
"github.com/docker/go-connections/nat"
|
||||
"github.com/moby/buildkit/client/llb"
|
||||
"github.com/moby/buildkit/client/llb/imagemetaresolver"
|
||||
"github.com/moby/buildkit/exporter/containerimage/exptypes"
|
||||
"github.com/moby/buildkit/frontend/dockerfile/instructions"
|
||||
"github.com/moby/buildkit/frontend/dockerfile/parser"
|
||||
"github.com/moby/buildkit/frontend/dockerfile/shell"
|
||||
"github.com/moby/buildkit/solver/pb"
|
||||
"github.com/moby/buildkit/util/apicaps"
|
||||
binfotypes "github.com/moby/buildkit/util/buildinfo/types"
|
||||
"github.com/moby/buildkit/util/suggest"
|
||||
"github.com/moby/buildkit/util/system"
|
||||
"github.com/moby/sys/signal"
|
||||
|
@ -346,10 +346,10 @@ func Dockerfile2LLB(ctx context.Context, dt []byte, opt ConvertOpt) (*llb.State,
|
|||
}
|
||||
}
|
||||
if !isScratch {
|
||||
// image not scratch set original image name as ref
|
||||
// and actual reference as alias in BuildInfo
|
||||
d.buildInfo = &exptypes.BuildInfo{
|
||||
Type: exptypes.BuildInfoTypeDockerImage,
|
||||
// if image not scratch set original image name as ref
|
||||
// and actual reference as alias in BuildSource
|
||||
d.buildSource = &binfotypes.Source{
|
||||
Type: binfotypes.SourceTypeDockerImage,
|
||||
Ref: origName,
|
||||
Alias: ref.String(),
|
||||
Pin: dgst.String(),
|
||||
|
@ -381,7 +381,7 @@ func Dockerfile2LLB(ctx context.Context, dt []byte, opt ConvertOpt) (*llb.State,
|
|||
|
||||
buildContext := &mutableOutput{}
|
||||
ctxPaths := map[string]struct{}{}
|
||||
var buildInfos []exptypes.BuildInfo
|
||||
var buildSources []binfotypes.Source
|
||||
|
||||
for _, d := range allDispatchStates.states {
|
||||
if !isReachable(target, d) {
|
||||
|
@ -389,8 +389,8 @@ func Dockerfile2LLB(ctx context.Context, dt []byte, opt ConvertOpt) (*llb.State,
|
|||
}
|
||||
|
||||
// collect build dependencies
|
||||
if d.buildInfo != nil {
|
||||
buildInfos = append(buildInfos, *d.buildInfo)
|
||||
if d.buildSource != nil {
|
||||
buildSources = append(buildSources, *d.buildSource)
|
||||
}
|
||||
|
||||
if d.base != nil {
|
||||
|
@ -469,11 +469,13 @@ func Dockerfile2LLB(ctx context.Context, dt []byte, opt ConvertOpt) (*llb.State,
|
|||
|
||||
// set target with gathered build dependencies
|
||||
target.image.BuildInfo = []byte{}
|
||||
if len(buildInfos) > 0 {
|
||||
sort.Slice(buildInfos, func(i, j int) bool {
|
||||
return buildInfos[i].Ref < buildInfos[j].Ref
|
||||
if len(buildSources) > 0 {
|
||||
sort.Slice(buildSources, func(i, j int) bool {
|
||||
return buildSources[i].Ref < buildSources[j].Ref
|
||||
})
|
||||
target.image.BuildInfo, err = json.Marshal(&binfotypes.BuildInfo{
|
||||
Sources: buildSources,
|
||||
})
|
||||
target.image.BuildInfo, err = json.Marshal(buildInfos)
|
||||
if err != nil {
|
||||
return nil, nil, err
|
||||
}
|
||||
|
@ -685,7 +687,7 @@ type dispatchState struct {
|
|||
cmdIndex int
|
||||
cmdTotal int
|
||||
prefixPlatform bool
|
||||
buildInfo *exptypes.BuildInfo
|
||||
buildSource *binfotypes.Source
|
||||
}
|
||||
|
||||
type dispatchStates struct {
|
||||
|
|
|
@ -5,10 +5,10 @@ import (
|
|||
"strings"
|
||||
"testing"
|
||||
|
||||
"github.com/moby/buildkit/exporter/containerimage/exptypes"
|
||||
"github.com/moby/buildkit/frontend/dockerfile/instructions"
|
||||
"github.com/moby/buildkit/frontend/dockerfile/shell"
|
||||
"github.com/moby/buildkit/util/appcontext"
|
||||
binfotypes "github.com/moby/buildkit/util/buildinfo/types"
|
||||
ocispecs "github.com/opencontainers/image-spec/specs-go/v1"
|
||||
"github.com/stretchr/testify/assert"
|
||||
"github.com/stretchr/testify/require"
|
||||
|
@ -202,11 +202,13 @@ ADD https://raw.githubusercontent.com/moby/buildkit/master/README.md /
|
|||
`
|
||||
_, image, err := Dockerfile2LLB(appcontext.Context(), []byte(df), ConvertOpt{
|
||||
TargetPlatform: &ocispecs.Platform{
|
||||
Architecture: "amd64", OS: "linux",
|
||||
Architecture: "amd64",
|
||||
OS: "linux",
|
||||
},
|
||||
BuildPlatforms: []ocispecs.Platform{
|
||||
{
|
||||
Architecture: "amd64", OS: "linux",
|
||||
Architecture: "amd64",
|
||||
OS: "linux",
|
||||
},
|
||||
},
|
||||
})
|
||||
|
@ -214,13 +216,15 @@ ADD https://raw.githubusercontent.com/moby/buildkit/master/README.md /
|
|||
require.NoError(t, err)
|
||||
require.NotNil(t, image.BuildInfo)
|
||||
|
||||
var bi []exptypes.BuildInfo
|
||||
var bi binfotypes.BuildInfo
|
||||
err = json.Unmarshal(image.BuildInfo, &bi)
|
||||
require.NoError(t, err)
|
||||
require.Equal(t, 1, len(bi))
|
||||
|
||||
assert.Equal(t, exptypes.BuildInfoTypeDockerImage, bi[0].Type)
|
||||
assert.Equal(t, "busybox", bi[0].Ref)
|
||||
assert.True(t, strings.HasPrefix(bi[0].Alias, "docker.io/library/busybox@"))
|
||||
assert.NotEmpty(t, bi[0].Pin)
|
||||
sources := bi.Sources
|
||||
require.Equal(t, 1, len(sources))
|
||||
|
||||
assert.Equal(t, binfotypes.SourceTypeDockerImage, sources[0].Type)
|
||||
assert.Equal(t, "busybox", sources[0].Ref)
|
||||
assert.True(t, strings.HasPrefix(sources[0].Alias, "docker.io/library/busybox@"))
|
||||
assert.NotEmpty(t, sources[0].Pin)
|
||||
}
|
||||
|
|
|
@ -4,6 +4,7 @@ import (
|
|||
"time"
|
||||
|
||||
"github.com/docker/docker/api/types/strslice"
|
||||
binfotypes "github.com/moby/buildkit/util/buildinfo/types"
|
||||
"github.com/moby/buildkit/util/system"
|
||||
ocispecs "github.com/opencontainers/image-spec/specs-go/v1"
|
||||
)
|
||||
|
@ -54,8 +55,8 @@ type Image struct {
|
|||
// Variant defines platform variant. To be added to OCI.
|
||||
Variant string `json:"variant,omitempty"`
|
||||
|
||||
// BuildInfo defines build dependencies.
|
||||
BuildInfo []byte `json:"moby.buildkit.buildinfo.v1,omitempty"`
|
||||
// binfotypes.ImageConfig defines build dependencies.
|
||||
binfotypes.ImageConfig
|
||||
}
|
||||
|
||||
func clone(src Image) Image {
|
||||
|
|
|
@ -40,6 +40,7 @@ import (
|
|||
"github.com/moby/buildkit/session/upload/uploadprovider"
|
||||
"github.com/moby/buildkit/solver/errdefs"
|
||||
"github.com/moby/buildkit/solver/pb"
|
||||
binfotypes "github.com/moby/buildkit/util/buildinfo/types"
|
||||
"github.com/moby/buildkit/util/contentutil"
|
||||
"github.com/moby/buildkit/util/testutil"
|
||||
"github.com/moby/buildkit/util/testutil/httpserver"
|
||||
|
@ -115,7 +116,7 @@ var allTests = integration.TestFuncs(
|
|||
testExportCacheLoop,
|
||||
testWildcardRenameCache,
|
||||
testDockerfileInvalidInstruction,
|
||||
testBuildInfo,
|
||||
testBuildSources,
|
||||
testShmSize,
|
||||
testUlimit,
|
||||
testCgroupParent,
|
||||
|
@ -5184,7 +5185,7 @@ RUN echo $(hostname) | grep foo
|
|||
}
|
||||
|
||||
// moby/buildkit#2311
|
||||
func testBuildInfo(t *testing.T, sb integration.Sandbox) {
|
||||
func testBuildSources(t *testing.T, sb integration.Sandbox) {
|
||||
f := getFrontend(t, sb)
|
||||
|
||||
gitDir, err := ioutil.TempDir("", "buildkit")
|
||||
|
@ -5247,29 +5248,28 @@ COPY --from=buildx /buildx /usr/libexec/docker/cli-plugins/docker-buildx
|
|||
dtbi, err := base64.StdEncoding.DecodeString(res.ExporterResponse[exptypes.ExporterBuildInfo])
|
||||
require.NoError(t, err)
|
||||
|
||||
var bi map[string][]exptypes.BuildInfo
|
||||
var bi binfotypes.BuildInfo
|
||||
err = json.Unmarshal(dtbi, &bi)
|
||||
require.NoError(t, err)
|
||||
|
||||
_, ok := bi["sources"]
|
||||
require.True(t, ok)
|
||||
require.Equal(t, 4, len(bi["sources"]))
|
||||
sources := bi.Sources
|
||||
require.Equal(t, 4, len(sources))
|
||||
|
||||
assert.Equal(t, exptypes.BuildInfoTypeDockerImage, bi["sources"][0].Type)
|
||||
assert.Equal(t, "docker.io/docker/buildx-bin:0.6.1@sha256:a652ced4a4141977c7daaed0a074dcd9844a78d7d2615465b12f433ae6dd29f0", bi["sources"][0].Ref)
|
||||
assert.Equal(t, "sha256:a652ced4a4141977c7daaed0a074dcd9844a78d7d2615465b12f433ae6dd29f0", bi["sources"][0].Pin)
|
||||
assert.Equal(t, binfotypes.SourceTypeDockerImage, sources[0].Type)
|
||||
assert.Equal(t, "docker.io/docker/buildx-bin:0.6.1@sha256:a652ced4a4141977c7daaed0a074dcd9844a78d7d2615465b12f433ae6dd29f0", sources[0].Ref)
|
||||
assert.Equal(t, "sha256:a652ced4a4141977c7daaed0a074dcd9844a78d7d2615465b12f433ae6dd29f0", sources[0].Pin)
|
||||
|
||||
assert.Equal(t, exptypes.BuildInfoTypeDockerImage, bi["sources"][1].Type)
|
||||
assert.Equal(t, "docker.io/docker/dockerfile-upstream:1.3.0", bi["sources"][1].Ref)
|
||||
assert.Equal(t, "sha256:9e2c9eca7367393aecc68795c671f93466818395a2693498debe831fd67f5e89", bi["sources"][1].Pin)
|
||||
assert.Equal(t, binfotypes.SourceTypeDockerImage, sources[1].Type)
|
||||
assert.Equal(t, "docker.io/docker/dockerfile-upstream:1.3.0", sources[1].Ref)
|
||||
assert.Equal(t, "sha256:9e2c9eca7367393aecc68795c671f93466818395a2693498debe831fd67f5e89", sources[1].Pin)
|
||||
|
||||
assert.Equal(t, exptypes.BuildInfoTypeDockerImage, bi["sources"][2].Type)
|
||||
assert.Equal(t, "docker.io/library/busybox:latest", bi["sources"][2].Ref)
|
||||
assert.NotEmpty(t, bi["sources"][2].Pin)
|
||||
assert.Equal(t, binfotypes.SourceTypeDockerImage, sources[2].Type)
|
||||
assert.Equal(t, "docker.io/library/busybox:latest", sources[2].Ref)
|
||||
assert.NotEmpty(t, sources[2].Pin)
|
||||
|
||||
assert.Equal(t, exptypes.BuildInfoTypeHTTP, bi["sources"][3].Type)
|
||||
assert.Equal(t, "https://raw.githubusercontent.com/moby/moby/master/README.md", bi["sources"][3].Ref)
|
||||
assert.Equal(t, "sha256:419455202b0ef97e480d7f8199b26a721a417818bc0e2d106975f74323f25e6c", bi["sources"][3].Pin)
|
||||
assert.Equal(t, binfotypes.SourceTypeHTTP, sources[3].Type)
|
||||
assert.Equal(t, "https://raw.githubusercontent.com/moby/moby/master/README.md", sources[3].Ref)
|
||||
assert.Equal(t, "sha256:419455202b0ef97e480d7f8199b26a721a417818bc0e2d106975f74323f25e6c", sources[3].Pin)
|
||||
}
|
||||
|
||||
func testShmSize(t *testing.T, sb integration.Sandbox) {
|
||||
|
|
|
@ -22,7 +22,7 @@ import (
|
|||
type ResolveOpFunc func(Vertex, Builder) (Op, error)
|
||||
|
||||
type Builder interface {
|
||||
Build(ctx context.Context, e Edge) (CachedResult, BuildInfo, error)
|
||||
Build(ctx context.Context, e Edge) (CachedResult, BuildSources, error)
|
||||
InContext(ctx context.Context, f func(ctx context.Context, g session.Group) error) error
|
||||
EachValue(ctx context.Context, key string, fn func(interface{}) error) error
|
||||
}
|
||||
|
@ -197,7 +197,7 @@ type subBuilder struct {
|
|||
exporters []ExportableCacheKey
|
||||
}
|
||||
|
||||
func (sb *subBuilder) Build(ctx context.Context, e Edge) (CachedResult, BuildInfo, error) {
|
||||
func (sb *subBuilder) Build(ctx context.Context, e Edge) (CachedResult, BuildSources, error) {
|
||||
// TODO(@crazy-max): Handle BuildInfo from subbuild
|
||||
res, err := sb.solver.subBuild(ctx, e, sb.vtx)
|
||||
if err != nil {
|
||||
|
@ -496,7 +496,7 @@ func (jl *Solver) deleteIfUnreferenced(k digest.Digest, st *state) {
|
|||
}
|
||||
}
|
||||
|
||||
func (j *Job) Build(ctx context.Context, e Edge) (CachedResult, BuildInfo, error) {
|
||||
func (j *Job) Build(ctx context.Context, e Edge) (CachedResult, BuildSources, error) {
|
||||
if span := trace.SpanFromContext(ctx); span.SpanContext().IsValid() {
|
||||
j.span = span
|
||||
}
|
||||
|
@ -514,25 +514,25 @@ func (j *Job) Build(ctx context.Context, e Edge) (CachedResult, BuildInfo, error
|
|||
|
||||
j.list.mu.Lock()
|
||||
defer j.list.mu.Unlock()
|
||||
return res, j.walkBuildInfo(ctx, e, make(BuildInfo)), nil
|
||||
return res, j.walkBuildSources(ctx, e, make(BuildSources)), nil
|
||||
}
|
||||
|
||||
func (j *Job) walkBuildInfo(ctx context.Context, e Edge, bi BuildInfo) BuildInfo {
|
||||
func (j *Job) walkBuildSources(ctx context.Context, e Edge, bsrc BuildSources) BuildSources {
|
||||
for _, inp := range e.Vertex.Inputs() {
|
||||
if st, ok := j.list.actives[inp.Vertex.Digest()]; ok {
|
||||
st.mu.Lock()
|
||||
for _, cacheRes := range st.op.cacheRes {
|
||||
for key, val := range cacheRes.BuildInfo {
|
||||
if _, ok := bi[key]; !ok {
|
||||
bi[key] = val
|
||||
for key, val := range cacheRes.BuildSources {
|
||||
if _, ok := bsrc[key]; !ok {
|
||||
bsrc[key] = val
|
||||
}
|
||||
}
|
||||
}
|
||||
st.mu.Unlock()
|
||||
bi = j.walkBuildInfo(ctx, inp, bi)
|
||||
bsrc = j.walkBuildSources(ctx, inp, bsrc)
|
||||
}
|
||||
}
|
||||
return bi
|
||||
return bsrc
|
||||
}
|
||||
|
||||
func (j *Job) Discard() error {
|
||||
|
|
|
@ -62,7 +62,7 @@ func (b *llbBridge) Warn(ctx context.Context, dgst digest.Digest, msg string, op
|
|||
})
|
||||
}
|
||||
|
||||
func (b *llbBridge) loadResult(ctx context.Context, def *pb.Definition, cacheImports []gw.CacheOptionsEntry) (solver.CachedResult, solver.BuildInfo, error) {
|
||||
func (b *llbBridge) loadResult(ctx context.Context, def *pb.Definition, cacheImports []gw.CacheOptionsEntry) (solver.CachedResult, solver.BuildSources, error) {
|
||||
w, err := b.resolveWorker()
|
||||
if err != nil {
|
||||
return nil, nil, err
|
||||
|
@ -162,13 +162,13 @@ func (b *llbBridge) Solve(ctx context.Context, req frontend.SolveRequest, sid st
|
|||
}
|
||||
|
||||
type resultProxy struct {
|
||||
cb func(context.Context) (solver.CachedResult, solver.BuildInfo, error)
|
||||
cb func(context.Context) (solver.CachedResult, solver.BuildSources, error)
|
||||
def *pb.Definition
|
||||
g flightcontrol.Group
|
||||
mu sync.Mutex
|
||||
released bool
|
||||
v solver.CachedResult
|
||||
bi solver.BuildInfo
|
||||
bsrc solver.BuildSources
|
||||
err error
|
||||
errResults []solver.Result
|
||||
}
|
||||
|
@ -177,8 +177,8 @@ func newResultProxy(b *llbBridge, req frontend.SolveRequest) *resultProxy {
|
|||
rp := &resultProxy{
|
||||
def: req.Definition,
|
||||
}
|
||||
rp.cb = func(ctx context.Context) (solver.CachedResult, solver.BuildInfo, error) {
|
||||
res, bi, err := b.loadResult(ctx, req.Definition, req.CacheImports)
|
||||
rp.cb = func(ctx context.Context) (solver.CachedResult, solver.BuildSources, error) {
|
||||
res, bsrc, err := b.loadResult(ctx, req.Definition, req.CacheImports)
|
||||
var ee *llberrdefs.ExecError
|
||||
if errors.As(err, &ee) {
|
||||
ee.EachRef(func(res solver.Result) error {
|
||||
|
@ -188,7 +188,7 @@ func newResultProxy(b *llbBridge, req frontend.SolveRequest) *resultProxy {
|
|||
// acquire ownership so ExecError finalizer doesn't attempt to release as well
|
||||
ee.OwnerBorrowed = true
|
||||
}
|
||||
return res, bi, err
|
||||
return res, bsrc, err
|
||||
}
|
||||
return rp
|
||||
}
|
||||
|
@ -197,8 +197,8 @@ func (rp *resultProxy) Definition() *pb.Definition {
|
|||
return rp.def
|
||||
}
|
||||
|
||||
func (rp *resultProxy) BuildInfo() solver.BuildInfo {
|
||||
return rp.bi
|
||||
func (rp *resultProxy) BuildSources() solver.BuildSources {
|
||||
return rp.bsrc
|
||||
}
|
||||
|
||||
func (rp *resultProxy) Release(ctx context.Context) (err error) {
|
||||
|
@ -259,7 +259,7 @@ func (rp *resultProxy) Result(ctx context.Context) (res solver.CachedResult, err
|
|||
return rp.v, rp.err
|
||||
}
|
||||
rp.mu.Unlock()
|
||||
v, bi, err := rp.cb(ctx)
|
||||
v, bsrc, err := rp.cb(ctx)
|
||||
if err != nil {
|
||||
select {
|
||||
case <-ctx.Done():
|
||||
|
@ -278,7 +278,7 @@ func (rp *resultProxy) Result(ctx context.Context) (res solver.CachedResult, err
|
|||
return nil, errors.Errorf("evaluating released result")
|
||||
}
|
||||
rp.v = v
|
||||
rp.bi = bi
|
||||
rp.bsrc = bsrc
|
||||
rp.err = err
|
||||
rp.mu.Unlock()
|
||||
return v, err
|
||||
|
|
|
@ -78,16 +78,16 @@ func (s *sourceOp) CacheMap(ctx context.Context, g session.Group, index int) (*s
|
|||
dgst = digest.Digest("random:" + strings.TrimPrefix(dgst.String(), dgst.Algorithm().String()+":"))
|
||||
}
|
||||
|
||||
var buildInfo map[string]string
|
||||
var buildSources map[string]string
|
||||
if !strings.HasPrefix(s.op.Source.GetIdentifier(), "local://") {
|
||||
buildInfo = map[string]string{s.op.Source.GetIdentifier(): pin}
|
||||
buildSources = map[string]string{s.op.Source.GetIdentifier(): pin}
|
||||
}
|
||||
|
||||
return &solver.CacheMap{
|
||||
// TODO: add os/arch
|
||||
Digest: dgst,
|
||||
Opts: cacheOpts,
|
||||
BuildInfo: buildInfo,
|
||||
BuildSources: buildSources,
|
||||
}, done, nil
|
||||
}
|
||||
|
||||
|
|
|
@ -176,7 +176,7 @@ func (s *Solver) Solve(ctx context.Context, id string, sessionID string, req fro
|
|||
}
|
||||
inp.Ref = workerRef.ImmutableRef
|
||||
|
||||
dtbi, err := buildinfo.Merge(ctx, res.BuildInfo(), inp.Metadata[exptypes.ExporterImageConfigKey])
|
||||
dtbi, err := buildinfo.Merge(ctx, res.BuildSources(), inp.Metadata[exptypes.ExporterImageConfigKey])
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
@ -208,7 +208,7 @@ func (s *Solver) Solve(ctx context.Context, id string, sessionID string, req fro
|
|||
}
|
||||
m[k] = workerRef.ImmutableRef
|
||||
|
||||
dtbi, err := buildinfo.Merge(ctx, res.BuildInfo(), inp.Metadata[fmt.Sprintf("%s/%s", exptypes.ExporterImageConfigKey, k)])
|
||||
dtbi, err := buildinfo.Merge(ctx, res.BuildSources(), inp.Metadata[fmt.Sprintf("%s/%s", exptypes.ExporterImageConfigKey, k)])
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
|
|
@ -76,7 +76,7 @@ type ResultProxy interface {
|
|||
Result(context.Context) (CachedResult, error)
|
||||
Release(context.Context) error
|
||||
Definition() *pb.Definition
|
||||
BuildInfo() BuildInfo
|
||||
BuildSources() BuildSources
|
||||
}
|
||||
|
||||
// CacheExportMode is the type for setting cache exporting modes
|
||||
|
@ -197,13 +197,13 @@ type CacheMap struct {
|
|||
// the cache. Opts should not have any impact on the computed cache key.
|
||||
Opts CacheOpts
|
||||
|
||||
// BuildInfo contains build dependencies that will be set from source
|
||||
// BuildSources contains build dependencies that will be set from source
|
||||
// operation.
|
||||
BuildInfo map[string]string
|
||||
BuildSources BuildSources
|
||||
}
|
||||
|
||||
// BuildInfo contains solved build dependencies.
|
||||
type BuildInfo map[string]string
|
||||
// BuildSources contains solved build dependencies.
|
||||
type BuildSources map[string]string
|
||||
|
||||
// ExportableCacheKey is a cache key connected with an exporter that can export
|
||||
// a chain of cacherecords pointing to that key
|
||||
|
|
|
@ -2,136 +2,142 @@ package buildinfo
|
|||
|
||||
import (
|
||||
"context"
|
||||
"encoding/base64"
|
||||
"encoding/json"
|
||||
"sort"
|
||||
|
||||
"github.com/docker/distribution/reference"
|
||||
"github.com/moby/buildkit/exporter/containerimage/exptypes"
|
||||
"github.com/moby/buildkit/source"
|
||||
binfotypes "github.com/moby/buildkit/util/buildinfo/types"
|
||||
"github.com/moby/buildkit/util/urlutil"
|
||||
"github.com/pkg/errors"
|
||||
)
|
||||
|
||||
const ImageConfigField = "moby.buildkit.buildinfo.v1"
|
||||
// Decode decodes a base64 encoded build info.
|
||||
func Decode(enc string) (bi binfotypes.BuildInfo, _ error) {
|
||||
dec, err := base64.StdEncoding.DecodeString(enc)
|
||||
if err != nil {
|
||||
return bi, err
|
||||
}
|
||||
err = json.Unmarshal(dec, &bi)
|
||||
return bi, err
|
||||
}
|
||||
|
||||
// Merge combines and fixes build info from image config
|
||||
// key moby.buildkit.buildinfo.v1.
|
||||
func Merge(ctx context.Context, buildInfo map[string]string, imageConfig []byte) ([]byte, error) {
|
||||
icbi, err := imageConfigBuildInfo(imageConfig)
|
||||
// Merge combines and fixes build sources from image config
|
||||
// key binfotypes.ImageConfigField.
|
||||
func Merge(ctx context.Context, buildSources map[string]string, imageConfig []byte) ([]byte, error) {
|
||||
icbi, err := FromImageConfig(imageConfig)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
// Iterate and combine build sources
|
||||
mbis := map[string]exptypes.BuildInfo{}
|
||||
for srcs, di := range buildInfo {
|
||||
src, err := source.FromString(srcs)
|
||||
mbs := map[string]binfotypes.Source{}
|
||||
for buildSource, pin := range buildSources {
|
||||
src, err := source.FromString(buildSource)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
switch sid := src.(type) {
|
||||
switch sourceID := src.(type) {
|
||||
case *source.ImageIdentifier:
|
||||
for idx, bi := range icbi {
|
||||
for i, ics := range icbi.Sources {
|
||||
// Use original user input from image config
|
||||
if bi.Type == exptypes.BuildInfoTypeDockerImage && bi.Alias == sid.Reference.String() {
|
||||
if _, ok := mbis[bi.Alias]; !ok {
|
||||
parsed, err := reference.ParseNormalizedNamed(bi.Ref)
|
||||
if ics.Type == binfotypes.SourceTypeDockerImage && ics.Alias == sourceID.Reference.String() {
|
||||
if _, ok := mbs[ics.Alias]; !ok {
|
||||
parsed, err := reference.ParseNormalizedNamed(ics.Ref)
|
||||
if err != nil {
|
||||
return nil, errors.Wrapf(err, "failed to parse %s", bi.Ref)
|
||||
return nil, errors.Wrapf(err, "failed to parse %s", ics.Ref)
|
||||
}
|
||||
mbis[bi.Alias] = exptypes.BuildInfo{
|
||||
Type: exptypes.BuildInfoTypeDockerImage,
|
||||
mbs[ics.Alias] = binfotypes.Source{
|
||||
Type: binfotypes.SourceTypeDockerImage,
|
||||
Ref: reference.TagNameOnly(parsed).String(),
|
||||
Pin: di,
|
||||
Pin: pin,
|
||||
}
|
||||
icbi = append(icbi[:idx], icbi[idx+1:]...)
|
||||
icbi.Sources = append(icbi.Sources[:i], icbi.Sources[i+1:]...)
|
||||
}
|
||||
break
|
||||
}
|
||||
}
|
||||
if _, ok := mbis[sid.Reference.String()]; !ok {
|
||||
mbis[sid.Reference.String()] = exptypes.BuildInfo{
|
||||
Type: exptypes.BuildInfoTypeDockerImage,
|
||||
Ref: sid.Reference.String(),
|
||||
Pin: di,
|
||||
if _, ok := mbs[sourceID.Reference.String()]; !ok {
|
||||
mbs[sourceID.Reference.String()] = binfotypes.Source{
|
||||
Type: binfotypes.SourceTypeDockerImage,
|
||||
Ref: sourceID.Reference.String(),
|
||||
Pin: pin,
|
||||
}
|
||||
}
|
||||
case *source.GitIdentifier:
|
||||
sref := sid.Remote
|
||||
if len(sid.Ref) > 0 {
|
||||
sref += "#" + sid.Ref
|
||||
sref := sourceID.Remote
|
||||
if len(sourceID.Ref) > 0 {
|
||||
sref += "#" + sourceID.Ref
|
||||
}
|
||||
if len(sid.Subdir) > 0 {
|
||||
sref += ":" + sid.Subdir
|
||||
if len(sourceID.Subdir) > 0 {
|
||||
sref += ":" + sourceID.Subdir
|
||||
}
|
||||
if _, ok := mbis[sref]; !ok {
|
||||
mbis[sref] = exptypes.BuildInfo{
|
||||
Type: exptypes.BuildInfoTypeGit,
|
||||
if _, ok := mbs[sref]; !ok {
|
||||
mbs[sref] = binfotypes.Source{
|
||||
Type: binfotypes.SourceTypeGit,
|
||||
Ref: urlutil.RedactCredentials(sref),
|
||||
Pin: di,
|
||||
Pin: pin,
|
||||
}
|
||||
}
|
||||
case *source.HTTPIdentifier:
|
||||
if _, ok := mbis[sid.URL]; !ok {
|
||||
mbis[sid.URL] = exptypes.BuildInfo{
|
||||
Type: exptypes.BuildInfoTypeHTTP,
|
||||
Ref: urlutil.RedactCredentials(sid.URL),
|
||||
Pin: di,
|
||||
if _, ok := mbs[sourceID.URL]; !ok {
|
||||
mbs[sourceID.URL] = binfotypes.Source{
|
||||
Type: binfotypes.SourceTypeHTTP,
|
||||
Ref: urlutil.RedactCredentials(sourceID.URL),
|
||||
Pin: pin,
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Leftovers build deps in image config. Mostly duplicated ones we
|
||||
// don't need but there is an edge case if no instruction except source's
|
||||
// one is defined (eg. FROM ...) that can be valid so take it into account.
|
||||
for _, bi := range icbi {
|
||||
if bi.Type != exptypes.BuildInfoTypeDockerImage {
|
||||
// don't need but there is an edge case if no instruction except sources
|
||||
// one is defined (e.g. FROM ...) that can be valid so take it into account.
|
||||
for _, ics := range icbi.Sources {
|
||||
if ics.Type != binfotypes.SourceTypeDockerImage {
|
||||
continue
|
||||
}
|
||||
if _, ok := mbis[bi.Alias]; !ok {
|
||||
parsed, err := reference.ParseNormalizedNamed(bi.Ref)
|
||||
if _, ok := mbs[ics.Alias]; !ok {
|
||||
parsed, err := reference.ParseNormalizedNamed(ics.Ref)
|
||||
if err != nil {
|
||||
return nil, errors.Wrapf(err, "failed to parse %s", bi.Ref)
|
||||
return nil, errors.Wrapf(err, "failed to parse %s", ics.Ref)
|
||||
}
|
||||
mbis[bi.Alias] = exptypes.BuildInfo{
|
||||
Type: exptypes.BuildInfoTypeDockerImage,
|
||||
mbs[ics.Alias] = binfotypes.Source{
|
||||
Type: binfotypes.SourceTypeDockerImage,
|
||||
Ref: reference.TagNameOnly(parsed).String(),
|
||||
Pin: bi.Pin,
|
||||
Pin: ics.Pin,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
bis := make([]exptypes.BuildInfo, 0, len(mbis))
|
||||
for _, bi := range mbis {
|
||||
bis = append(bis, bi)
|
||||
srcs := make([]binfotypes.Source, 0, len(mbs))
|
||||
for _, bs := range mbs {
|
||||
srcs = append(srcs, bs)
|
||||
}
|
||||
sort.Slice(bis, func(i, j int) bool {
|
||||
return bis[i].Ref < bis[j].Ref
|
||||
sort.Slice(srcs, func(i, j int) bool {
|
||||
return srcs[i].Ref < srcs[j].Ref
|
||||
})
|
||||
|
||||
return json.Marshal(map[string][]exptypes.BuildInfo{
|
||||
"sources": bis,
|
||||
return json.Marshal(binfotypes.BuildInfo{
|
||||
Sources: srcs,
|
||||
})
|
||||
}
|
||||
|
||||
// imageConfigBuildInfo returns build dependencies from image config
|
||||
func imageConfigBuildInfo(imageConfig []byte) ([]exptypes.BuildInfo, error) {
|
||||
// FromImageConfig returns build dependencies from image config.
|
||||
func FromImageConfig(imageConfig []byte) (bi binfotypes.BuildInfo, _ error) {
|
||||
if len(imageConfig) == 0 {
|
||||
return nil, nil
|
||||
}
|
||||
var config struct {
|
||||
BuildInfo []byte `json:"moby.buildkit.buildinfo.v1,omitempty"`
|
||||
return bi, nil
|
||||
}
|
||||
var config binfotypes.ImageConfig
|
||||
if err := json.Unmarshal(imageConfig, &config); err != nil {
|
||||
return nil, errors.Wrap(err, "failed to unmarshal buildinfo from config")
|
||||
return bi, errors.Wrap(err, "failed to unmarshal buildinfo from image config")
|
||||
}
|
||||
if len(config.BuildInfo) == 0 {
|
||||
return nil, nil
|
||||
return bi, nil
|
||||
}
|
||||
var bi []exptypes.BuildInfo
|
||||
if err := json.Unmarshal(config.BuildInfo, &bi); err != nil {
|
||||
return nil, errors.Wrapf(err, "failed to unmarshal %s", ImageConfigField)
|
||||
return bi, errors.Wrap(err, "failed to unmarshal buildinfo")
|
||||
}
|
||||
return bi, nil
|
||||
}
|
||||
|
|
|
@ -0,0 +1,105 @@
|
|||
package buildinfo
|
||||
|
||||
import (
|
||||
"context"
|
||||
"encoding/base64"
|
||||
"encoding/json"
|
||||
"testing"
|
||||
|
||||
binfotypes "github.com/moby/buildkit/util/buildinfo/types"
|
||||
"github.com/stretchr/testify/assert"
|
||||
"github.com/stretchr/testify/require"
|
||||
)
|
||||
|
||||
func TestDecode(t *testing.T) {
|
||||
bi, err := Decode("eyJzb3VyY2VzIjpbeyJ0eXBlIjoiaW1hZ2UiLCJyZWYiOiJkb2NrZXIuaW8vZG9ja2VyL2J1aWxkeC1iaW46MC42LjFAc2hhMjU2OmE2NTJjZWQ0YTQxNDE5NzdjN2RhYWVkMGEwNzRkY2Q5ODQ0YTc4ZDdkMjYxNTQ2NWIxMmY0MzNhZTZkZDI5ZjAiLCJwaW4iOiJzaGEyNTY6YTY1MmNlZDRhNDE0MTk3N2M3ZGFhZWQwYTA3NGRjZDk4NDRhNzhkN2QyNjE1NDY1YjEyZjQzM2FlNmRkMjlmMCJ9LHsidHlwZSI6ImltYWdlIiwicmVmIjoiZG9ja2VyLmlvL2xpYnJhcnkvYWxwaW5lOjMuMTMiLCJwaW4iOiJzaGEyNTY6MWQzMGQxYmEzY2I5MDk2MjA2N2U5YjI5NDkxZmJkNTY5OTc5NzlkNTQzNzZmMjNmMDE0NDhiNWM1Y2Q4YjQ2MiJ9LHsidHlwZSI6ImltYWdlIiwicmVmIjoiZG9ja2VyLmlvL21vYnkvYnVpbGRraXQ6djAuOS4wIiwicGluIjoic2hhMjU2OjhkYzY2OGU3ZjY2ZGIxYzA0NGFhZGJlZDMwNjAyMDc0MzUxNmE5NDg0ODc5M2UwZjgxZjk0YTA4N2VlNzhjYWIifSx7InR5cGUiOiJpbWFnZSIsInJlZiI6ImRvY2tlci5pby90b25pc3RpaWdpL3h4QHNoYTI1NjoyMWE2MWJlNDc0NGY2NTMxY2I1ZjMzYjBlNmY0MGVkZTQxZmEzYTFiOGM4MmQ1OTQ2MTc4ZjgwY2M4NGJmYzA0IiwicGluIjoic2hhMjU2OjIxYTYxYmU0NzQ0ZjY1MzFjYjVmMzNiMGU2ZjQwZWRlNDFmYTNhMWI4YzgyZDU5NDYxNzhmODBjYzg0YmZjMDQifSx7InR5cGUiOiJnaXQiLCJyZWYiOiJodHRwczovL2dpdGh1Yi5jb20vY3JhenktbWF4L2J1aWxka2l0LWJ1aWxkc291cmNlcy10ZXN0LmdpdCNtYXN0ZXIiLCJwaW4iOiIyNTlhNWFhNWFhNWJiMzU2MmQxMmNjNjMxZmUzOTlmNDc4ODY0MmMxIn0seyJ0eXBlIjoiaHR0cCIsInJlZiI6Imh0dHBzOi8vcmF3LmdpdGh1YnVzZXJjb250ZW50LmNvbS9tb2J5L21vYnkvbWFzdGVyL1JFQURNRS5tZCIsInBpbiI6InNoYTI1Njo0MTk0NTUyMDJiMGVmOTdlNDgwZDdmODE5OWIyNmE3MjFhNDE3ODE4YmMwZTJkMTA2OTc1Zjc0MzIzZjI1ZTZjIn1dfQ==")
|
||||
require.NoError(t, err)
|
||||
assert.Equal(t, 6, len(bi.Sources))
|
||||
}
|
||||
|
||||
func TestMerge(t *testing.T) {
|
||||
buildSourcesLLB := map[string]string{
|
||||
"docker-image://docker.io/docker/buildx-bin:0.6.1@sha256:a652ced4a4141977c7daaed0a074dcd9844a78d7d2615465b12f433ae6dd29f0": "sha256:a652ced4a4141977c7daaed0a074dcd9844a78d7d2615465b12f433ae6dd29f0",
|
||||
"docker-image://docker.io/library/alpine:3.13@sha256:1d30d1ba3cb90962067e9b29491fbd56997979d54376f23f01448b5c5cd8b462": "sha256:1d30d1ba3cb90962067e9b29491fbd56997979d54376f23f01448b5c5cd8b462",
|
||||
"docker-image://docker.io/moby/buildkit:v0.9.0@sha256:8dc668e7f66db1c044aadbed306020743516a94848793e0f81f94a087ee78cab": "sha256:8dc668e7f66db1c044aadbed306020743516a94848793e0f81f94a087ee78cab",
|
||||
"docker-image://docker.io/tonistiigi/xx@sha256:21a61be4744f6531cb5f33b0e6f40ede41fa3a1b8c82d5946178f80cc84bfc04": "sha256:21a61be4744f6531cb5f33b0e6f40ede41fa3a1b8c82d5946178f80cc84bfc04",
|
||||
"git://https://github.com/crazy-max/buildkit-buildsources-test.git#master": "259a5aa5aa5bb3562d12cc631fe399f4788642c1",
|
||||
"https://raw.githubusercontent.com/moby/moby/master/README.md": "sha256:419455202b0ef97e480d7f8199b26a721a417818bc0e2d106975f74323f25e6c",
|
||||
}
|
||||
|
||||
buildInfoImageConfig, err := json.Marshal(binfotypes.BuildInfo{
|
||||
Sources: []binfotypes.Source{
|
||||
{
|
||||
Type: binfotypes.SourceTypeDockerImage,
|
||||
Ref: "docker.io/docker/buildx-bin:0.6.1@sha256:a652ced4a4141977c7daaed0a074dcd9844a78d7d2615465b12f433ae6dd29f0",
|
||||
Alias: "docker.io/docker/buildx-bin:0.6.1@sha256:a652ced4a4141977c7daaed0a074dcd9844a78d7d2615465b12f433ae6dd29f0",
|
||||
Pin: "sha256:a652ced4a4141977c7daaed0a074dcd9844a78d7d2615465b12f433ae6dd29f0",
|
||||
},
|
||||
{
|
||||
Type: binfotypes.SourceTypeDockerImage,
|
||||
Ref: "docker.io/library/alpine:3.13",
|
||||
Alias: "docker.io/library/alpine:3.13@sha256:1d30d1ba3cb90962067e9b29491fbd56997979d54376f23f01448b5c5cd8b462",
|
||||
Pin: "sha256:1d30d1ba3cb90962067e9b29491fbd56997979d54376f23f01448b5c5cd8b462",
|
||||
},
|
||||
{
|
||||
Type: binfotypes.SourceTypeDockerImage,
|
||||
Ref: "docker.io/moby/buildkit:v0.9.0",
|
||||
Alias: "docker.io/moby/buildkit:v0.9.0@sha256:8dc668e7f66db1c044aadbed306020743516a94848793e0f81f94a087ee78cab",
|
||||
Pin: "sha256:8dc668e7f66db1c044aadbed306020743516a94848793e0f81f94a087ee78cab",
|
||||
},
|
||||
{
|
||||
Type: binfotypes.SourceTypeDockerImage,
|
||||
Ref: "docker.io/tonistiigi/xx@sha256:21a61be4744f6531cb5f33b0e6f40ede41fa3a1b8c82d5946178f80cc84bfc04",
|
||||
Alias: "docker.io/tonistiigi/xx@sha256:21a61be4744f6531cb5f33b0e6f40ede41fa3a1b8c82d5946178f80cc84bfc04",
|
||||
Pin: "sha256:21a61be4744f6531cb5f33b0e6f40ede41fa3a1b8c82d5946178f80cc84bfc04",
|
||||
},
|
||||
},
|
||||
})
|
||||
require.NoError(t, err)
|
||||
|
||||
bic, err := json.Marshal(binfotypes.ImageConfig{
|
||||
BuildInfo: buildInfoImageConfig,
|
||||
})
|
||||
require.NoError(t, err)
|
||||
|
||||
ret, err := Merge(context.Background(), buildSourcesLLB, bic)
|
||||
require.NoError(t, err)
|
||||
|
||||
dec, err := Decode(base64.StdEncoding.EncodeToString(ret))
|
||||
require.NoError(t, err)
|
||||
|
||||
assert.Equal(t, binfotypes.BuildInfo{
|
||||
Sources: []binfotypes.Source{
|
||||
{
|
||||
Type: binfotypes.SourceTypeDockerImage,
|
||||
Ref: "docker.io/docker/buildx-bin:0.6.1@sha256:a652ced4a4141977c7daaed0a074dcd9844a78d7d2615465b12f433ae6dd29f0",
|
||||
Pin: "sha256:a652ced4a4141977c7daaed0a074dcd9844a78d7d2615465b12f433ae6dd29f0",
|
||||
},
|
||||
{
|
||||
Type: binfotypes.SourceTypeDockerImage,
|
||||
Ref: "docker.io/library/alpine:3.13",
|
||||
Pin: "sha256:1d30d1ba3cb90962067e9b29491fbd56997979d54376f23f01448b5c5cd8b462",
|
||||
},
|
||||
{
|
||||
Type: binfotypes.SourceTypeDockerImage,
|
||||
Ref: "docker.io/moby/buildkit:v0.9.0",
|
||||
Pin: "sha256:8dc668e7f66db1c044aadbed306020743516a94848793e0f81f94a087ee78cab",
|
||||
},
|
||||
{
|
||||
Type: binfotypes.SourceTypeDockerImage,
|
||||
Ref: "docker.io/tonistiigi/xx@sha256:21a61be4744f6531cb5f33b0e6f40ede41fa3a1b8c82d5946178f80cc84bfc04",
|
||||
Pin: "sha256:21a61be4744f6531cb5f33b0e6f40ede41fa3a1b8c82d5946178f80cc84bfc04",
|
||||
},
|
||||
{
|
||||
Type: binfotypes.SourceTypeGit,
|
||||
Ref: "https://github.com/crazy-max/buildkit-buildsources-test.git#master",
|
||||
Pin: "259a5aa5aa5bb3562d12cc631fe399f4788642c1",
|
||||
},
|
||||
{
|
||||
Type: binfotypes.SourceTypeHTTP,
|
||||
Ref: "https://raw.githubusercontent.com/moby/moby/master/README.md",
|
||||
Pin: "sha256:419455202b0ef97e480d7f8199b26a721a417818bc0e2d106975f74323f25e6c",
|
||||
},
|
||||
},
|
||||
}, dec)
|
||||
}
|
|
@ -0,0 +1,46 @@
|
|||
package binfotypes
|
||||
|
||||
import (
|
||||
srctypes "github.com/moby/buildkit/source/types"
|
||||
)
|
||||
|
||||
// ImageConfigField defines the key of build dependencies.
|
||||
const ImageConfigField = "moby.buildkit.buildinfo.v1"
|
||||
|
||||
// ImageConfig defines the structure of build dependencies
|
||||
// inside image config.
|
||||
type ImageConfig struct {
|
||||
BuildInfo []byte `json:"moby.buildkit.buildinfo.v1,omitempty"`
|
||||
}
|
||||
|
||||
// BuildInfo defines the main structure added to image config as
|
||||
// ImageConfigField key and returned in solver ExporterResponse as
|
||||
// exptypes.ExporterBuildInfo key.
|
||||
type BuildInfo struct {
|
||||
// Sources defines build dependencies.
|
||||
Sources []Source `json:"sources,omitempty"`
|
||||
}
|
||||
|
||||
// Source defines a build dependency.
|
||||
type Source struct {
|
||||
// Type defines the SourceType source type (docker-image, git, http).
|
||||
Type SourceType `json:"type,omitempty"`
|
||||
// Ref is the reference of the source.
|
||||
Ref string `json:"ref,omitempty"`
|
||||
// Alias is a special field used to match with the actual source ref
|
||||
// because frontend might have already transformed a string user typed
|
||||
// before generating LLB.
|
||||
Alias string `json:"alias,omitempty"`
|
||||
// Pin is the source digest.
|
||||
Pin string `json:"pin,omitempty"`
|
||||
}
|
||||
|
||||
// SourceType contains source type.
|
||||
type SourceType string
|
||||
|
||||
// List of source types.
|
||||
const (
|
||||
SourceTypeDockerImage SourceType = srctypes.DockerImageScheme
|
||||
SourceTypeGit SourceType = srctypes.GitScheme
|
||||
SourceTypeHTTP SourceType = srctypes.HTTPScheme
|
||||
)
|
Loading…
Reference in New Issue