cache: base code for cache importing

Signed-off-by: Tonis Tiigi <tonistiigi@gmail.com>
docker-18.09
Tonis Tiigi 2017-10-12 17:53:28 -07:00
parent 8e611643f7
commit 1b3f191b37
5 changed files with 336 additions and 94 deletions

129
cache/cacheimport/export.go vendored Normal file
View File

@ -0,0 +1,129 @@
package cacheimport
import (
"bytes"
gocontext "context"
"encoding/json"
"github.com/containerd/containerd/content"
"github.com/containerd/containerd/rootfs"
"github.com/moby/buildkit/cache"
"github.com/moby/buildkit/cache/blobs"
"github.com/moby/buildkit/snapshot"
digest "github.com/opencontainers/go-digest"
ocispec "github.com/opencontainers/image-spec/specs-go/v1"
"github.com/pkg/errors"
"github.com/sirupsen/logrus"
"golang.org/x/net/context"
)
type blobmapper interface {
GetBlob(ctx gocontext.Context, key string) (digest.Digest, error)
SetBlob(ctx gocontext.Context, key string, blob digest.Digest) error
}
type CacheRecord struct {
CacheKey digest.Digest
Reference cache.ImmutableRef
ContentKey digest.Digest
}
type ExporterOpt struct {
Snapshotter snapshot.Snapshotter
ContentStore content.Store
Differ rootfs.MountDiffer
}
func NewCacheExporter(opt ExporterOpt) *CacheExporter {
return &CacheExporter{opt: opt}
}
type CacheExporter struct {
opt ExporterOpt
}
func (ce *CacheExporter) Export(ctx context.Context, rec []CacheRecord) error {
allBlobs := map[digest.Digest][]blobs.DiffPair{}
currentBlobs := map[digest.Digest]struct{}{}
type cr struct {
CacheRecord
dgst digest.Digest
}
list := make([]cr, 0, len(rec))
for _, r := range rec {
ref := r.Reference
if ref == nil {
list = append(list, cr{CacheRecord: r})
continue
}
dpairs, err := blobs.GetDiffPairs(ctx, ce.opt.Snapshotter, ce.opt.Differ, ref)
if err != nil {
return err
}
for i, dp := range dpairs {
allBlobs[dp.Blobsum] = dpairs[:i+1]
}
dgst := dpairs[len(dpairs)-1].Blobsum
list = append(list, cr{CacheRecord: r, dgst: dgst})
currentBlobs[dgst] = struct{}{}
}
for b := range allBlobs {
if _, ok := currentBlobs[b]; !ok {
list = append(list, cr{dgst: b})
}
}
var mfst ocispec.Index
mfst.SchemaVersion = 2
for _, l := range list {
var size int64
parent := ""
diffID := ""
if l.dgst != "" {
info, err := ce.opt.ContentStore.Info(ctx, l.dgst)
if err != nil {
return err
}
size = info.Size
chain := allBlobs[l.dgst]
if len(chain) > 1 {
parent = chain[len(chain)-2].Blobsum.String()
}
diffID = chain[len(chain)-1].DiffID.String()
}
mfst.Manifests = append(mfst.Manifests, ocispec.Descriptor{
MediaType: ocispec.MediaTypeImageLayerGzip,
Size: size,
Digest: l.dgst,
Annotations: map[string]string{
"buildkit.cachekey": l.CacheKey.String(),
"buildkit.contentkey": l.ContentKey.String(),
"buildkit.parent": parent,
"buildkit.diffid": diffID,
},
})
}
dt, err := json.Marshal(mfst)
if err != nil {
return errors.Wrap(err, "failed to marshal manifest")
}
dgst := digest.FromBytes(dt)
if err := content.WriteBlob(ctx, ce.opt.ContentStore, dgst.String(), bytes.NewReader(dt), int64(len(dt)), dgst); err != nil {
return errors.Wrap(err, "error writing manifest blob")
}
logrus.Debugf("cache-manifest: %s", dgst)
return nil
}

View File

@ -1,7 +1,6 @@
package cacheimport
import (
"bytes"
"context"
"encoding/json"
@ -11,110 +10,212 @@ import (
"github.com/moby/buildkit/cache/blobs"
"github.com/moby/buildkit/snapshot"
digest "github.com/opencontainers/go-digest"
"github.com/opencontainers/image-spec/identity"
ocispec "github.com/opencontainers/image-spec/specs-go/v1"
"github.com/pkg/errors"
"github.com/sirupsen/logrus"
)
type CacheRecord struct {
CacheKey digest.Digest
Reference cache.ImmutableRef
ContentKey digest.Digest
}
type Opt struct {
Snapshotter snapshot.Snapshotter
type ImportOpt struct {
ContentStore content.Store
Differ rootfs.MountDiffer
Snapshotter snapshot.Snapshotter
Applier rootfs.Applier
CacheAccessor cache.Accessor
}
func NewCacheExporter(opt Opt) *CacheExporter {
return &CacheExporter{opt: opt}
func NewCacheImporter(opt ImportOpt) *CacheImporter {
return &CacheImporter{opt: opt}
}
type CacheExporter struct {
opt Opt
type CacheImporter struct {
opt ImportOpt
}
func (ce *CacheExporter) Export(ctx context.Context, rec []CacheRecord) error {
allBlobs := map[digest.Digest][]blobs.DiffPair{}
currentBlobs := map[digest.Digest]struct{}{}
type cr struct {
CacheRecord
dgst digest.Digest
}
list := make([]cr, 0, len(rec))
for _, r := range rec {
ref := r.Reference
if ref == nil {
list = append(list, cr{CacheRecord: r})
continue
}
dpairs, err := blobs.GetDiffPairs(ctx, ce.opt.Snapshotter, ce.opt.Differ, ref)
func (ci *CacheImporter) Import(ctx context.Context, dgst digest.Digest) (InstructionCache, error) {
dt, err := content.ReadBlob(ctx, ci.opt.ContentStore, dgst)
if err != nil {
return err
return nil, err
}
for i, dp := range dpairs {
allBlobs[dp.Blobsum] = dpairs[:i]
var mfst ocispec.Index
if err := json.Unmarshal(dt, &mfst); err != nil {
return nil, err
}
dgst := dpairs[len(dpairs)-1].Blobsum
list = append(list, cr{CacheRecord: r, dgst: dgst})
currentBlobs[dgst] = struct{}{}
}
allBlobs := map[digest.Digest]ocispec.Descriptor{}
byCacheKey := map[digest.Digest]ocispec.Descriptor{}
byContentKey := map[digest.Digest][]digest.Digest{}
for b := range allBlobs {
if _, ok := currentBlobs[b]; !ok {
list = append(list, cr{dgst: b})
for _, m := range mfst.Manifests {
if m.Digest != "" {
allBlobs[m.Digest] = m
}
}
mfst := ocispec.Index{}
mfst.SchemaVersion = 2
for _, l := range list {
var size int64
parent := ""
if l.dgst != "" {
info, err := ce.opt.ContentStore.Info(ctx, l.dgst)
if m.Annotations != nil {
if cacheKey := m.Annotations["buildkit.cachekey"]; cacheKey != "" {
cacheKeyDigest, err := digest.Parse(cacheKey)
if err != nil {
return err
return nil, err
}
size = info.Size
parents := allBlobs[l.dgst]
if len(parents) > 0 {
parent = parents[len(parents)-1].Blobsum.String()
}
}
mfst.Manifests = append(mfst.Manifests, ocispec.Descriptor{
MediaType: ocispec.MediaTypeImageLayerGzip,
Size: size,
Digest: l.dgst,
Annotations: map[string]string{
"buildkit.cachekey": l.CacheKey.String(),
"buildkit.contentkey": l.ContentKey.String(),
"buildkit.parent": parent,
},
})
}
dt, err := json.Marshal(mfst)
byCacheKey[cacheKeyDigest] = m
if contentKey := m.Annotations["buildkit.contentkey"]; contentKey != "" {
contentKeyDigest, err := digest.Parse(contentKey)
if err != nil {
return errors.Wrap(err, "failed to marshal manifest")
return nil, err
}
byContentKey[contentKeyDigest] = append(byContentKey[contentKeyDigest], cacheKeyDigest)
}
}
}
}
dgst := digest.FromBytes(dt)
if err := content.WriteBlob(ctx, ce.opt.ContentStore, dgst.String(), bytes.NewReader(dt), int64(len(dt)), dgst); err != nil {
return errors.Wrap(err, "error writing manifest blob")
return &importInfo{
CacheImporter: ci,
byCacheKey: byCacheKey,
byContentKey: byContentKey,
allBlobs: allBlobs,
}, nil
}
logrus.Debugf("cache-manifest: %s", dgst)
type importInfo struct {
*CacheImporter
byCacheKey map[digest.Digest]ocispec.Descriptor
byContentKey map[digest.Digest][]digest.Digest
allBlobs map[digest.Digest]ocispec.Descriptor
}
func (ii *importInfo) Probe(ctx context.Context, key digest.Digest) (bool, error) {
_, ok := ii.byCacheKey[key]
return ok, nil
}
func (ii *importInfo) getChain(dgst digest.Digest) ([]blobs.DiffPair, error) {
desc, ok := ii.allBlobs[dgst]
if !ok {
return nil, errors.Errorf("blob %s not found in cache", dgst)
}
var parentDigest digest.Digest
if desc.Annotations == nil {
return nil, errors.Errorf("missing annotations")
}
parent := desc.Annotations["buildkit.parent"]
if parent != "" {
dgst, err := digest.Parse(parent)
if err != nil {
return nil, err
}
parentDigest = dgst
}
var out []blobs.DiffPair
if parentDigest != "" {
parentChain, err := ii.getChain(parentDigest)
if err != nil {
return nil, err
}
out = parentChain
}
diffIDStr := desc.Annotations["buildkit.diffid"]
diffID, err := digest.Parse(diffIDStr)
if err != nil {
return nil, err
}
return append(out, blobs.DiffPair{Blobsum: dgst, DiffID: diffID}), nil
}
func (ii *importInfo) Lookup(ctx context.Context, key digest.Digest) (interface{}, error) {
desc, ok := ii.byCacheKey[key]
if !ok || desc.Digest == "" {
return nil, nil
}
ch, err := ii.getChain(desc.Digest)
if err != nil {
return nil, err
}
return ii.fetch(ctx, ch)
}
func (ii *importInfo) Set(key digest.Digest, ref interface{}) error {
return nil
}
func (ii *importInfo) SetContentMapping(contentKey, key digest.Digest) error {
return nil
}
func (ii *importInfo) GetContentMapping(dgst digest.Digest) ([]digest.Digest, error) {
dgsts, ok := ii.byContentKey[dgst]
if !ok {
return nil, nil
}
return dgsts, nil
}
func (ii *importInfo) fetch(ctx context.Context, chain []blobs.DiffPair) (cache.ImmutableRef, error) {
chainid, err := ii.unpack(ctx, chain)
if err != nil {
return nil, err
}
return ii.opt.CacheAccessor.Get(ctx, chainid, cache.WithDescription("imported cache")) // TODO: more descriptive name
}
func (ii *importInfo) unpack(ctx context.Context, dpairs []blobs.DiffPair) (string, error) {
layers, err := ii.getLayers(ctx, dpairs)
if err != nil {
return "", err
}
chainID, err := rootfs.ApplyLayers(ctx, layers, ii.opt.Snapshotter, ii.opt.Applier)
if err != nil {
return "", err
}
if err := ii.fillBlobMapping(ctx, layers); err != nil {
return "", err
}
return string(chainID), nil
}
func (ii *importInfo) fillBlobMapping(ctx context.Context, layers []rootfs.Layer) error {
var chain []digest.Digest
for _, l := range layers {
chain = append(chain, l.Diff.Digest)
chainID := identity.ChainID(chain)
if err := ii.opt.Snapshotter.(blobmapper).SetBlob(ctx, string(chainID), l.Blob.Digest); err != nil {
return err
}
}
return nil
}
func (ii *importInfo) getLayers(ctx context.Context, dpairs []blobs.DiffPair) ([]rootfs.Layer, error) {
layers := make([]rootfs.Layer, len(dpairs))
for i := range dpairs {
layers[i].Diff = ocispec.Descriptor{
// TODO: derive media type from compressed type
MediaType: ocispec.MediaTypeImageLayer,
Digest: dpairs[i].DiffID,
}
info, err := ii.opt.ContentStore.Info(ctx, dpairs[i].Blobsum)
if err != nil {
return nil, err
}
layers[i].Blob = ocispec.Descriptor{
// TODO: derive media type from compressed type
MediaType: ocispec.MediaTypeImageLayerGzip,
Digest: dpairs[i].Blobsum,
Size: info.Size,
}
}
return layers, nil
}
type InstructionCache interface {
Probe(ctx context.Context, key digest.Digest) (bool, error)
Lookup(ctx context.Context, key digest.Digest) (interface{}, error) // TODO: regular ref
Set(key digest.Digest, ref interface{}) error
SetContentMapping(contentKey, key digest.Digest) error
GetContentMapping(dgst digest.Digest) ([]digest.Digest, error)
}

View File

@ -31,6 +31,7 @@ type Opt struct {
Frontends map[string]frontend.Frontend
ImageSource source.Source
CacheExporter *cacheimport.CacheExporter
CacheImporter *cacheimport.CacheImporter
}
type Controller struct { // TODO: ControlService
@ -49,6 +50,7 @@ func NewController(opt Opt) (*Controller, error) {
ImageSource: opt.ImageSource,
Frontends: opt.Frontends,
CacheExporter: opt.CacheExporter,
CacheImporter: opt.CacheImporter,
}),
}
return c, nil

View File

@ -131,12 +131,19 @@ func defaultControllerOpts(root string, pd pullDeps) (*Opt, error) {
frontends["dockerfile.v0"] = dockerfile.NewDockerfileFrontend()
frontends["gateway.v0"] = gateway.NewGatewayFrontend()
ce := cacheimport.NewCacheExporter(cacheimport.Opt{
ce := cacheimport.NewCacheExporter(cacheimport.ExporterOpt{
Snapshotter: snapshotter,
ContentStore: pd.ContentStore,
Differ: pd.Differ,
})
ci := cacheimport.NewCacheImporter(cacheimport.ImportOpt{
Snapshotter: snapshotter,
ContentStore: pd.ContentStore,
Applier: pd.Applier,
CacheAccessor: cm,
})
return &Opt{
Snapshotter: snapshotter,
CacheManager: cm,
@ -147,5 +154,6 @@ func defaultControllerOpts(root string, pd pullDeps) (*Opt, error) {
Frontends: frontends,
ImageSource: is,
CacheExporter: ce,
CacheImporter: ci,
}, nil
}

View File

@ -33,6 +33,7 @@ type LLBOpt struct {
ImageSource source.Source
Frontends map[string]frontend.Frontend // used by nested invocations
CacheExporter *cacheimport.CacheExporter
CacheImporter *cacheimport.CacheImporter
}
func NewLLBSolver(opt LLBOpt) *Solver {
@ -48,7 +49,7 @@ func NewLLBSolver(opt LLBOpt) *Solver {
default:
return nil, nil
}
}, opt.InstructionCache, opt.ImageSource, opt.Worker, opt.CacheManager, opt.Frontends, opt.CacheExporter)
}, opt.InstructionCache, opt.ImageSource, opt.Worker, opt.CacheManager, opt.Frontends, opt.CacheExporter, opt.CacheImporter)
return s
}
@ -89,10 +90,11 @@ type Solver struct {
cm cache.Manager // TODO: remove with immutableRef.New()
frontends map[string]frontend.Frontend
ce *cacheimport.CacheExporter
ci *cacheimport.CacheImporter
}
func New(resolve ResolveOpFunc, cache InstructionCache, imageSource source.Source, worker worker.Worker, cm cache.Manager, f map[string]frontend.Frontend, ce *cacheimport.CacheExporter) *Solver {
return &Solver{resolve: resolve, jobs: newJobList(), cache: cache, imageSource: imageSource, worker: worker, cm: cm, frontends: f, ce: ce}
func New(resolve ResolveOpFunc, cache InstructionCache, imageSource source.Source, worker worker.Worker, cm cache.Manager, f map[string]frontend.Frontend, ce *cacheimport.CacheExporter, ci *cacheimport.CacheImporter) *Solver {
return &Solver{resolve: resolve, jobs: newJobList(), cache: cache, imageSource: imageSource, worker: worker, cm: cm, frontends: f, ce: ce, ci: ci}
}
type SolveRequest struct {