solver: improve multi-key root caching

Signed-off-by: Tonis Tiigi <tonistiigi@gmail.com>
docker-18.09
Tonis Tiigi 2018-07-23 14:27:35 -07:00
parent 77b648bf8d
commit 97ea41ee6a
7 changed files with 140 additions and 32 deletions

View File

@ -144,7 +144,9 @@ func (e *edge) commitOptions() ([]*CacheKey, []CachedResult) {
inputs := make([][]CacheKeyWithSelector, len(e.deps))
results := make([]CachedResult, len(e.deps))
for i, dep := range e.deps {
inputs[i] = append(inputs[i], CacheKeyWithSelector{CacheKey: dep.result.CacheKey(), Selector: e.cacheMap.Deps[i].Selector})
for _, k := range dep.result.CacheKeys() {
inputs[i] = append(inputs[i], CacheKeyWithSelector{CacheKey: k, Selector: e.cacheMap.Deps[i].Selector})
}
if dep.slowCacheKey != nil {
inputs[i] = append(inputs[i], CacheKeyWithSelector{CacheKey: *dep.slowCacheKey})
}
@ -245,7 +247,9 @@ func (e *edge) currentIndexKey() *CacheKey {
keys[i] = append(keys[i], CacheKeyWithSelector{Selector: e.cacheMap.Deps[i].Selector, CacheKey: k})
}
if d.result != nil {
keys[i] = append(keys[i], CacheKeyWithSelector{Selector: e.cacheMap.Deps[i].Selector, CacheKey: d.result.CacheKey()})
for _, rk := range d.result.CacheKeys() {
keys[i] = append(keys[i], CacheKeyWithSelector{Selector: e.cacheMap.Deps[i].Selector, CacheKey: rk})
}
if d.slowCacheKey != nil {
keys[i] = append(keys[i], CacheKeyWithSelector{CacheKey: ExportableCacheKey{CacheKey: d.slowCacheKey.CacheKey, Exporter: &exporter{k: d.slowCacheKey.CacheKey}}})
}
@ -461,11 +465,14 @@ func (e *edge) processUpdate(upt pipe.Receiver) (depChanged bool) {
k := NewCacheKey(upt.Status().Value.(digest.Digest), -1)
dep.slowCacheKey = &ExportableCacheKey{CacheKey: k, Exporter: &exporter{k: k}}
slowKeyExp := CacheKeyWithSelector{CacheKey: *dep.slowCacheKey}
defKeyExp := CacheKeyWithSelector{CacheKey: dep.result.CacheKey(), Selector: e.cacheMap.Deps[i].Selector}
defKeys := make([]CacheKeyWithSelector, 0, len(dep.result.CacheKeys()))
for _, dk := range dep.result.CacheKeys() {
defKeys = append(defKeys, CacheKeyWithSelector{CacheKey: dk, Selector: e.cacheMap.Deps[i].Selector})
}
dep.slowCacheFoundKey = e.probeCache(dep, []CacheKeyWithSelector{slowKeyExp})
// connect def key to slow key
e.op.Cache().Query([]CacheKeyWithSelector{defKeyExp, slowKeyExp}, dep.index, e.cacheMap.Digest, e.edge.Index)
e.op.Cache().Query(append(defKeys, slowKeyExp), dep.index, e.cacheMap.Digest, e.edge.Index)
dep.slowCacheComplete = true
e.keysDidChange = true
@ -510,7 +517,9 @@ func (e *edge) recalcCurrentState() {
mergedKey.deps = make([][]CacheKeyWithSelector, len(e.deps))
for i, dep := range e.deps {
if dep.result != nil {
mergedKey.deps[i] = append(mergedKey.deps[i], CacheKeyWithSelector{Selector: e.cacheMap.Deps[i].Selector, CacheKey: dep.result.CacheKey()})
for _, dk := range dep.result.CacheKeys() {
mergedKey.deps[i] = append(mergedKey.deps[i], CacheKeyWithSelector{Selector: e.cacheMap.Deps[i].Selector, CacheKey: dk})
}
if dep.slowCacheKey != nil {
mergedKey.deps[i] = append(mergedKey.deps[i], CacheKeyWithSelector{CacheKey: *dep.slowCacheKey})
}
@ -789,7 +798,7 @@ func (e *edge) loadCache(ctx context.Context) (interface{}, error) {
return nil, err
}
return NewCachedResult(res, ExportableCacheKey{CacheKey: rec.key, Exporter: &exporter{k: rec.key, record: rec, edge: e}}), nil
return NewCachedResult(res, []ExportableCacheKey{{CacheKey: rec.key, Exporter: &exporter{k: rec.key, record: rec, edge: e}}}), nil
}
// execOp creates a request to execute the vertex operation
@ -834,12 +843,15 @@ func (e *edge) execOp(ctx context.Context) (interface{}, error) {
exporters = append(exporters, exps...)
}
ck := &ExportableCacheKey{
CacheKey: cacheKeys[0],
Exporter: &mergedExporter{exporters: exporters},
ek := make([]ExportableCacheKey, 0, len(cacheKeys))
for _, ck := range cacheKeys {
ek = append(ek, ExportableCacheKey{
CacheKey: ck,
Exporter: &mergedExporter{exporters: exporters},
})
}
return NewCachedResult(res, *ck), nil
return NewCachedResult(res, ek), nil
}
func toResultSlice(cres []CachedResult) (out []Result) {

View File

@ -161,7 +161,7 @@ func (sb *subBuilder) Build(ctx context.Context, e Edge) (CachedResult, error) {
return nil, err
}
sb.mu.Lock()
sb.exporters = append(sb.exporters, res.CacheKey())
sb.exporters = append(sb.exporters, res.CacheKeys()[0]) // all keys already have full export chain
sb.mu.Unlock()
return res, nil
}

View File

@ -125,7 +125,8 @@ func (s *Solver) Solve(ctx context.Context, id string, req frontend.SolveRequest
if err := j.Call(ctx, "exporting cache", func(ctx context.Context) error {
prepareDone := oneOffProgress(ctx, "preparing build cache for export")
if err := res.EachRef(func(res solver.CachedResult) error {
_, err := res.CacheKey().Exporter.ExportTo(ctx, e, solver.CacheExportOpt{
// all keys have same export chain so exporting others is not needed
_, err := res.CacheKeys()[0].Exporter.ExportTo(ctx, e, solver.CacheExportOpt{
Convert: workerRefConverter,
Mode: exp.CacheExportMode,
})

View File

@ -58,16 +58,16 @@ func (r *splitResult) Release(ctx context.Context) error {
}
// NewCachedResult combines a result and cache key into cached result
func NewCachedResult(res Result, k ExportableCacheKey) CachedResult {
func NewCachedResult(res Result, k []ExportableCacheKey) CachedResult {
return &cachedResult{res, k}
}
type cachedResult struct {
Result
k ExportableCacheKey
k []ExportableCacheKey
}
func (cr *cachedResult) CacheKey() ExportableCacheKey {
func (cr *cachedResult) CacheKeys() []ExportableCacheKey {
return cr.k
}
@ -95,8 +95,8 @@ func (r *clonedCachedResult) ID() string {
return r.Result.ID()
}
func (cr *clonedCachedResult) CacheKey() ExportableCacheKey {
return cr.cr.CacheKey()
func (cr *clonedCachedResult) CacheKeys() []ExportableCacheKey {
return cr.cr.CacheKeys()
}
type SharedCachedResult struct {

View File

@ -306,7 +306,9 @@ func (s *scheduler) mergeTo(target, src *edge) bool {
target.secondaryExporters = append(target.secondaryExporters, expDep{i, CacheKeyWithSelector{CacheKey: *d.slowCacheKey}})
}
if d.result != nil {
target.secondaryExporters = append(target.secondaryExporters, expDep{i, CacheKeyWithSelector{CacheKey: d.result.CacheKey(), Selector: src.cacheMap.Deps[i].Selector}})
for _, dk := range d.result.CacheKeys() {
target.secondaryExporters = append(target.secondaryExporters, expDep{i, CacheKeyWithSelector{CacheKey: dk, Selector: src.cacheMap.Deps[i].Selector}})
}
}
}

View File

@ -2131,7 +2131,7 @@ func TestCacheExporting(t *testing.T) {
expTarget := newTestExporterTarget()
_, err = res.CacheKey().Exporter.ExportTo(ctx, expTarget, testExporterOpts(true))
_, err = res.CacheKeys()[0].Exporter.ExportTo(ctx, expTarget, testExporterOpts(true))
require.NoError(t, err)
expTarget.normalize()
@ -2162,7 +2162,7 @@ func TestCacheExporting(t *testing.T) {
expTarget = newTestExporterTarget()
_, err = res.CacheKey().Exporter.ExportTo(ctx, expTarget, testExporterOpts(true))
_, err = res.CacheKeys()[0].Exporter.ExportTo(ctx, expTarget, testExporterOpts(true))
require.NoError(t, err)
expTarget.normalize()
@ -2219,7 +2219,7 @@ func TestCacheExportingModeMin(t *testing.T) {
expTarget := newTestExporterTarget()
_, err = res.CacheKey().Exporter.ExportTo(ctx, expTarget, testExporterOpts(false))
_, err = res.CacheKeys()[0].Exporter.ExportTo(ctx, expTarget, testExporterOpts(false))
require.NoError(t, err)
expTarget.normalize()
@ -2252,7 +2252,7 @@ func TestCacheExportingModeMin(t *testing.T) {
expTarget = newTestExporterTarget()
_, err = res.CacheKey().Exporter.ExportTo(ctx, expTarget, testExporterOpts(false))
_, err = res.CacheKeys()[0].Exporter.ExportTo(ctx, expTarget, testExporterOpts(false))
require.NoError(t, err)
expTarget.normalize()
@ -2286,7 +2286,7 @@ func TestCacheExportingModeMin(t *testing.T) {
expTarget = newTestExporterTarget()
_, err = res.CacheKey().Exporter.ExportTo(ctx, expTarget, testExporterOpts(true))
_, err = res.CacheKeys()[0].Exporter.ExportTo(ctx, expTarget, testExporterOpts(true))
require.NoError(t, err)
expTarget.normalize()
@ -2431,7 +2431,7 @@ func TestCacheMultipleMaps(t *testing.T) {
expTarget := newTestExporterTarget()
_, err = res.CacheKey().Exporter.ExportTo(ctx, expTarget, testExporterOpts(true))
_, err = res.CacheKeys()[0].Exporter.ExportTo(ctx, expTarget, testExporterOpts(true))
require.NoError(t, err)
expTarget.normalize()
@ -2467,7 +2467,7 @@ func TestCacheMultipleMaps(t *testing.T) {
expTarget = newTestExporterTarget()
_, err = res.CacheKey().Exporter.ExportTo(ctx, expTarget, testExporterOpts(true))
_, err = res.CacheKeys()[0].Exporter.ExportTo(ctx, expTarget, testExporterOpts(true))
require.NoError(t, err)
require.Equal(t, len(expTarget.records), 3)
@ -2502,13 +2502,106 @@ func TestCacheMultipleMaps(t *testing.T) {
expTarget = newTestExporterTarget()
_, err = res.CacheKey().Exporter.ExportTo(ctx, expTarget, testExporterOpts(true))
_, err = res.CacheKeys()[0].Exporter.ExportTo(ctx, expTarget, testExporterOpts(true))
require.NoError(t, err)
require.Equal(t, len(expTarget.records), 3)
require.Equal(t, called, true)
}
func TestCacheInputMultipleMaps(t *testing.T) {
t.Parallel()
ctx := context.TODO()
cacheManager := newTrackingCacheManager(NewInMemoryCacheManager())
l := NewSolver(SolverOpt{
ResolveOpFunc: testOpResolver,
DefaultCache: cacheManager,
})
defer l.Close()
j0, err := l.NewJob("j0")
require.NoError(t, err)
defer func() {
if j0 != nil {
j0.Discard()
}
}()
g0 := Edge{
Vertex: vtx(vtxOpt{
name: "v0",
cacheKeySeed: "seed0",
value: "result0",
inputs: []Edge{{
Vertex: vtx(vtxOpt{
name: "v1",
cacheKeySeed: "seed1",
cacheKeySeeds: []func() string{
func() string { return "seed2" },
},
value: "result1",
}),
}},
}),
}
res, err := j0.Build(ctx, g0)
require.NoError(t, err)
require.Equal(t, unwrap(res), "result0")
expTarget := newTestExporterTarget()
_, err = res.CacheKeys()[0].Exporter.ExportTo(ctx, expTarget, testExporterOpts(true))
require.NoError(t, err)
expTarget.normalize()
require.Equal(t, len(expTarget.records), 3)
require.NoError(t, j0.Discard())
j0 = nil
j1, err := l.NewJob("j1")
require.NoError(t, err)
defer func() {
if j1 != nil {
j1.Discard()
}
}()
g1 := Edge{
Vertex: vtx(vtxOpt{
name: "v0",
cacheKeySeed: "seed0",
value: "result0-no-cache",
inputs: []Edge{{
Vertex: vtx(vtxOpt{
name: "v1",
cacheKeySeed: "seed1.changed",
cacheKeySeeds: []func() string{
func() string { return "seed2" },
},
value: "result1-no-cache",
}),
}},
}),
}
res, err = j1.Build(ctx, g1)
require.NoError(t, err)
require.Equal(t, unwrap(res), "result0")
_, err = res.CacheKeys()[0].Exporter.ExportTo(ctx, expTarget, testExporterOpts(true))
require.NoError(t, err)
expTarget.normalize()
require.Equal(t, len(expTarget.records), 3)
require.NoError(t, j1.Discard())
j1 = nil
}
func TestCacheExportingPartialSelector(t *testing.T) {
t.Parallel()
ctx := context.TODO()
@ -2560,7 +2653,7 @@ func TestCacheExportingPartialSelector(t *testing.T) {
expTarget := newTestExporterTarget()
_, err = res.CacheKey().Exporter.ExportTo(ctx, expTarget, testExporterOpts(true))
_, err = res.CacheKeys()[0].Exporter.ExportTo(ctx, expTarget, testExporterOpts(true))
require.NoError(t, err)
expTarget.normalize()
@ -2593,7 +2686,7 @@ func TestCacheExportingPartialSelector(t *testing.T) {
expTarget = newTestExporterTarget()
_, err = res.CacheKey().Exporter.ExportTo(ctx, expTarget, testExporterOpts(true))
_, err = res.CacheKeys()[0].Exporter.ExportTo(ctx, expTarget, testExporterOpts(true))
require.NoError(t, err)
expTarget.normalize()
@ -2647,7 +2740,7 @@ func TestCacheExportingPartialSelector(t *testing.T) {
expTarget = newTestExporterTarget()
_, err = res.CacheKey().Exporter.ExportTo(ctx, expTarget, testExporterOpts(true))
_, err = res.CacheKeys()[0].Exporter.ExportTo(ctx, expTarget, testExporterOpts(true))
require.NoError(t, err)
expTarget.normalize()
@ -2693,7 +2786,7 @@ func TestCacheExportingPartialSelector(t *testing.T) {
expTarget = newTestExporterTarget()
_, err = res.CacheKey().Exporter.ExportTo(ctx, expTarget, testExporterOpts(true))
_, err = res.CacheKeys()[0].Exporter.ExportTo(ctx, expTarget, testExporterOpts(true))
require.NoError(t, err)
expTarget.normalize()
@ -2791,7 +2884,7 @@ func TestCacheExportingMergedKey(t *testing.T) {
expTarget := newTestExporterTarget()
_, err = res.CacheKey().Exporter.ExportTo(ctx, expTarget, testExporterOpts(true))
_, err = res.CacheKeys()[0].Exporter.ExportTo(ctx, expTarget, testExporterOpts(true))
require.NoError(t, err)
expTarget.normalize()

View File

@ -50,7 +50,7 @@ type Result interface {
// CachedResult is a result connected with its cache key
type CachedResult interface {
Result
CacheKey() ExportableCacheKey
CacheKeys() []ExportableCacheKey
}
// CacheExportMode is the type for setting cache exporting modes