dockerfile: keep mapping on #syntax error
Signed-off-by: Tonis Tiigi <tonistiigi@gmail.com>v0.8
parent
6073e6cff3
commit
e536302180
|
@ -317,9 +317,13 @@ func Build(ctx context.Context, c client.Client) (*client.Result, error) {
|
|||
}
|
||||
|
||||
if _, ok := opts["cmdline"]; !ok {
|
||||
ref, cmdline, ok := dockerfile2llb.DetectSyntax(bytes.NewBuffer(dtDockerfile))
|
||||
ref, cmdline, loc, ok := dockerfile2llb.DetectSyntax(bytes.NewBuffer(dtDockerfile))
|
||||
if ok {
|
||||
return forwardGateway(ctx, c, ref, cmdline)
|
||||
res, err := forwardGateway(ctx, c, ref, cmdline)
|
||||
if err != nil && len(errdefs.Sources(err)) == 0 {
|
||||
return nil, wrapSource(err, sourceMap, loc)
|
||||
}
|
||||
return res, err
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -5,34 +5,51 @@ import (
|
|||
"io"
|
||||
"regexp"
|
||||
"strings"
|
||||
|
||||
"github.com/moby/buildkit/frontend/dockerfile/parser"
|
||||
)
|
||||
|
||||
const keySyntax = "syntax"
|
||||
|
||||
var reDirective = regexp.MustCompile(`^#\s*([a-zA-Z][a-zA-Z0-9]*)\s*=\s*(.+?)\s*$`)
|
||||
|
||||
func DetectSyntax(r io.Reader) (string, string, bool) {
|
||||
type Directive struct {
|
||||
Name string
|
||||
Value string
|
||||
Location []parser.Range
|
||||
}
|
||||
|
||||
func DetectSyntax(r io.Reader) (string, string, []parser.Range, bool) {
|
||||
directives := ParseDirectives(r)
|
||||
if len(directives) == 0 {
|
||||
return "", "", false
|
||||
return "", "", nil, false
|
||||
}
|
||||
v, ok := directives[keySyntax]
|
||||
if !ok {
|
||||
return "", "", false
|
||||
return "", "", nil, false
|
||||
}
|
||||
p := strings.SplitN(v, " ", 2)
|
||||
return p[0], v, true
|
||||
p := strings.SplitN(v.Value, " ", 2)
|
||||
return p[0], v.Value, v.Location, true
|
||||
}
|
||||
|
||||
func ParseDirectives(r io.Reader) map[string]string {
|
||||
m := map[string]string{}
|
||||
func ParseDirectives(r io.Reader) map[string]Directive {
|
||||
m := map[string]Directive{}
|
||||
s := bufio.NewScanner(r)
|
||||
var l int
|
||||
for s.Scan() {
|
||||
l++
|
||||
match := reDirective.FindStringSubmatch(s.Text())
|
||||
if len(match) == 0 {
|
||||
return m
|
||||
}
|
||||
m[strings.ToLower(match[1])] = match[2]
|
||||
m[strings.ToLower(match[1])] = Directive{
|
||||
Name: match[1],
|
||||
Value: match[2],
|
||||
Location: []parser.Range{{
|
||||
Start: parser.Position{Line: l},
|
||||
End: parser.Position{Line: l},
|
||||
}},
|
||||
}
|
||||
}
|
||||
return m
|
||||
}
|
||||
|
|
|
@ -22,11 +22,11 @@ func TestDirectives(t *testing.T) {
|
|||
|
||||
v, ok := d["escape"]
|
||||
require.True(t, ok)
|
||||
require.Equal(t, v, "\\")
|
||||
require.Equal(t, v.Value, "\\")
|
||||
|
||||
v, ok = d["key"]
|
||||
require.True(t, ok)
|
||||
require.Equal(t, v, "FOO bar")
|
||||
require.Equal(t, v.Value, "FOO bar")
|
||||
|
||||
// for some reason Moby implementation in case insensitive for escape
|
||||
dt = `# EScape=\
|
||||
|
@ -40,11 +40,11 @@ func TestDirectives(t *testing.T) {
|
|||
|
||||
v, ok = d["escape"]
|
||||
require.True(t, ok)
|
||||
require.Equal(t, v, "\\")
|
||||
require.Equal(t, v.Value, "\\")
|
||||
|
||||
v, ok = d["key"]
|
||||
require.True(t, ok)
|
||||
require.Equal(t, v, "FOO bar")
|
||||
require.Equal(t, v.Value, "FOO bar")
|
||||
}
|
||||
|
||||
func TestSyntaxDirective(t *testing.T) {
|
||||
|
@ -54,15 +54,17 @@ func TestSyntaxDirective(t *testing.T) {
|
|||
FROM busybox
|
||||
`
|
||||
|
||||
ref, cmdline, ok := DetectSyntax(bytes.NewBuffer([]byte(dt)))
|
||||
ref, cmdline, loc, ok := DetectSyntax(bytes.NewBuffer([]byte(dt)))
|
||||
require.True(t, ok)
|
||||
require.Equal(t, ref, "dockerfile:experimental")
|
||||
require.Equal(t, cmdline, "dockerfile:experimental // opts")
|
||||
require.Equal(t, 1, loc[0].Start.Line)
|
||||
require.Equal(t, 1, loc[0].End.Line)
|
||||
|
||||
dt = `FROM busybox
|
||||
RUN ls
|
||||
`
|
||||
ref, cmdline, ok = DetectSyntax(bytes.NewBuffer([]byte(dt)))
|
||||
ref, cmdline, _, ok = DetectSyntax(bytes.NewBuffer([]byte(dt)))
|
||||
require.False(t, ok)
|
||||
require.Equal(t, ref, "")
|
||||
require.Equal(t, cmdline, "")
|
||||
|
|
Loading…
Reference in New Issue