Merge pull request #339 from tonistiigi/dockerfile-copy
dockerfile: copy only files that are useddocker-18.09
commit
fc2b7c558a
|
@ -421,7 +421,7 @@ func (cc *cacheContext) checksum(ctx context.Context, root *iradix.Node, txn *ir
|
|||
v, ok := root.Get(k)
|
||||
|
||||
if !ok {
|
||||
return nil, false, errors.Wrapf(errNotFound, "%s not found", string(k))
|
||||
return nil, false, errors.Wrapf(errNotFound, "%s not found", convertKeyToPath(k))
|
||||
}
|
||||
cr := v.(*CacheRecord)
|
||||
|
||||
|
|
|
@ -8,6 +8,7 @@ import (
|
|||
"net/url"
|
||||
"path"
|
||||
"path/filepath"
|
||||
"sort"
|
||||
"strconv"
|
||||
"strings"
|
||||
"time"
|
||||
|
@ -120,8 +121,10 @@ func Dockerfile2LLB(ctx context.Context, dt []byte, opt ConvertOpt) (*llb.State,
|
|||
}
|
||||
}
|
||||
|
||||
ctxPaths := map[string]struct{}{}
|
||||
eg, ctx := errgroup.WithContext(ctx)
|
||||
for i, d := range allDispatchStates {
|
||||
reachable := isReachable(target, d)
|
||||
// resolve image config for every stage
|
||||
if d.base == nil {
|
||||
if d.stage.BaseName == emptyImageName {
|
||||
|
@ -136,7 +139,7 @@ func Dockerfile2LLB(ctx context.Context, dt []byte, opt ConvertOpt) (*llb.State,
|
|||
return err
|
||||
}
|
||||
d.stage.BaseName = reference.TagNameOnly(ref).String()
|
||||
if metaResolver != nil && isReachable(target, d) {
|
||||
if metaResolver != nil && reachable {
|
||||
dgst, dt, err := metaResolver.ResolveImageConfig(ctx, d.stage.BaseName)
|
||||
if err == nil { // handle the error while builder is actually running
|
||||
var img Image
|
||||
|
@ -157,21 +160,47 @@ func Dockerfile2LLB(ctx context.Context, dt []byte, opt ConvertOpt) (*llb.State,
|
|||
})
|
||||
}(i, d)
|
||||
}
|
||||
|
||||
if reachable {
|
||||
for _, cmd := range d.commands {
|
||||
switch c := cmd.Command.(type) {
|
||||
case *instructions.CopyCommand:
|
||||
if c.From != "" {
|
||||
continue
|
||||
}
|
||||
for _, src := range c.Sources() {
|
||||
ctxPaths[path.Join("/", filepath.ToSlash(src))] = struct{}{}
|
||||
}
|
||||
case *instructions.AddCommand:
|
||||
for _, src := range c.Sources() {
|
||||
ctxPaths[path.Join("/", filepath.ToSlash(src))] = struct{}{}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if err := eg.Wait(); err != nil {
|
||||
return nil, nil, err
|
||||
}
|
||||
buildContext := llb.Local(localNameContext,
|
||||
|
||||
opts := []llb.LocalOption{
|
||||
llb.SessionID(opt.SessionID),
|
||||
llb.ExcludePatterns(opt.Excludes),
|
||||
llb.SharedKeyHint(localNameContext),
|
||||
)
|
||||
}
|
||||
if includePatterns := normalizeContextPaths(ctxPaths); includePatterns != nil {
|
||||
opts = append(opts, llb.IncludePatterns(includePatterns))
|
||||
}
|
||||
buildContext := llb.Local(localNameContext, opts...)
|
||||
if opt.BuildContext != nil {
|
||||
buildContext = *opt.BuildContext
|
||||
}
|
||||
|
||||
for _, d := range allDispatchStates {
|
||||
if !isReachable(target, d) {
|
||||
continue
|
||||
}
|
||||
if d.base != nil {
|
||||
d.state = d.base.state
|
||||
d.image = clone(d.base.image)
|
||||
|
@ -757,3 +786,36 @@ func parseUID(str string) (uint32, error) {
|
|||
}
|
||||
return uint32(uid), nil
|
||||
}
|
||||
|
||||
func normalizeContextPaths(paths map[string]struct{}) []string {
|
||||
pathSlice := make([]string, 0, len(paths))
|
||||
for p := range paths {
|
||||
if p == "/" {
|
||||
return nil
|
||||
}
|
||||
pathSlice = append(pathSlice, p)
|
||||
}
|
||||
|
||||
toDelete := map[string]struct{}{}
|
||||
for i := range pathSlice {
|
||||
for j := range pathSlice {
|
||||
if i == j {
|
||||
continue
|
||||
}
|
||||
if strings.HasPrefix(pathSlice[j], pathSlice[i]+"/") {
|
||||
delete(paths, pathSlice[j])
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
toSort := make([]string, 0, len(paths))
|
||||
for p := range paths {
|
||||
if _, ok := toDelete[p]; !ok {
|
||||
toSort = append(toSort, path.Join(".", p))
|
||||
}
|
||||
}
|
||||
sort.Slice(toSort, func(i, j int) bool {
|
||||
return toSort[i] < toSort[j]
|
||||
})
|
||||
return toSort
|
||||
}
|
||||
|
|
|
@ -39,7 +39,7 @@ github.com/BurntSushi/locker a6e239ea1c69bff1cfdb20c4b73dadf52f784b6a
|
|||
github.com/docker/docker 53a58da551e961b3710bbbdfabbc162c3f5f30f6
|
||||
github.com/pkg/profile 5b67d428864e92711fcbd2f8629456121a56d91f
|
||||
|
||||
github.com/tonistiigi/fsutil 0b8b62deeb18fecebc272676425c1416d1e86fec
|
||||
github.com/tonistiigi/fsutil 93a0fd10b669d389e349ff54c48f13829708c9b0
|
||||
github.com/hashicorp/go-immutable-radix 826af9ccf0feeee615d546d69b11f8e98da8c8f1 git://github.com/tonistiigi/go-immutable-radix.git
|
||||
github.com/hashicorp/golang-lru a0d98a5f288019575c6d1f4bb1573fef2d1fcdc4
|
||||
github.com/mitchellh/hashstructure 2bca23e0e452137f789efbc8610126fd8b94f73b
|
||||
|
|
|
@ -0,0 +1,13 @@
|
|||
// +build !linux
|
||||
|
||||
package fsutil
|
||||
|
||||
import (
|
||||
"os"
|
||||
"time"
|
||||
)
|
||||
|
||||
func chtimes(path string, un int64) error {
|
||||
mtime := time.Unix(0, un)
|
||||
return os.Chtimes(path, mtime, mtime)
|
||||
}
|
|
@ -1,7 +0,0 @@
|
|||
// +build darwin
|
||||
|
||||
package fsutil
|
||||
|
||||
func chtimes(path string, un int64) error {
|
||||
return nil
|
||||
}
|
|
@ -3,9 +3,6 @@
|
|||
package fsutil
|
||||
|
||||
import (
|
||||
"os"
|
||||
"time"
|
||||
|
||||
"github.com/pkg/errors"
|
||||
)
|
||||
|
||||
|
@ -13,11 +10,6 @@ func rewriteMetadata(p string, stat *Stat) error {
|
|||
return chtimes(p, stat.ModTime)
|
||||
}
|
||||
|
||||
func chtimes(path string, un int64) error {
|
||||
mtime := time.Unix(0, un)
|
||||
return os.Chtimes(path, mtime, mtime)
|
||||
}
|
||||
|
||||
// handleTarTypeBlockCharFifo is an OS-specific helper function used by
|
||||
// createTarFile to handle the following types of header: Block; Char; Fifo
|
||||
func handleTarTypeBlockCharFifo(path string, stat *Stat) error {
|
||||
|
|
|
@ -39,6 +39,9 @@ func Walk(ctx context.Context, p string, opt *WalkOpt, fn filepath.WalkFunc) err
|
|||
}
|
||||
}
|
||||
|
||||
var lastIncludedDir string
|
||||
var includePatternPrefixes []string
|
||||
|
||||
seenFiles := make(map[uint64]string)
|
||||
return filepath.Walk(root, func(path string, fi os.FileInfo, err error) (retErr error) {
|
||||
if err != nil {
|
||||
|
@ -64,18 +67,34 @@ func Walk(ctx context.Context, p string, opt *WalkOpt, fn filepath.WalkFunc) err
|
|||
|
||||
if opt != nil {
|
||||
if opt.IncludePatterns != nil {
|
||||
if includePatternPrefixes == nil {
|
||||
includePatternPrefixes = patternPrefixes(opt.IncludePatterns)
|
||||
}
|
||||
matched := false
|
||||
for _, p := range opt.IncludePatterns {
|
||||
if m, _ := filepath.Match(p, path); m {
|
||||
if lastIncludedDir != "" {
|
||||
if strings.HasPrefix(path, lastIncludedDir+string(filepath.Separator)) {
|
||||
matched = true
|
||||
break
|
||||
}
|
||||
}
|
||||
if !matched {
|
||||
if fi.IsDir() {
|
||||
return filepath.SkipDir
|
||||
for _, p := range opt.IncludePatterns {
|
||||
if m, _ := filepath.Match(p, path); m {
|
||||
matched = true
|
||||
break
|
||||
}
|
||||
}
|
||||
if matched && fi.IsDir() {
|
||||
lastIncludedDir = path
|
||||
}
|
||||
}
|
||||
if !matched {
|
||||
if !fi.IsDir() {
|
||||
return nil
|
||||
} else {
|
||||
if noPossiblePrefixMatch(path, includePatternPrefixes) {
|
||||
return filepath.SkipDir
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
}
|
||||
if pm != nil {
|
||||
|
@ -179,3 +198,30 @@ func (s *StatInfo) IsDir() bool {
|
|||
func (s *StatInfo) Sys() interface{} {
|
||||
return s.Stat
|
||||
}
|
||||
|
||||
func patternPrefixes(patterns []string) []string {
|
||||
pfxs := make([]string, 0, len(patterns))
|
||||
for _, ptrn := range patterns {
|
||||
idx := strings.IndexFunc(ptrn, func(ch rune) bool {
|
||||
return ch == '*' || ch == '?' || ch == '[' || ch == '\\'
|
||||
})
|
||||
if idx == -1 {
|
||||
idx = len(ptrn)
|
||||
}
|
||||
pfxs = append(pfxs, ptrn[:idx])
|
||||
}
|
||||
return pfxs
|
||||
}
|
||||
|
||||
func noPossiblePrefixMatch(p string, pfxs []string) bool {
|
||||
for _, pfx := range pfxs {
|
||||
chk := p
|
||||
if len(pfx) < len(p) {
|
||||
chk = p[:len(pfx)]
|
||||
}
|
||||
if strings.HasPrefix(pfx, chk) {
|
||||
return false
|
||||
}
|
||||
}
|
||||
return true
|
||||
}
|
||||
|
|
Loading…
Reference in New Issue