2015-09-05 19:49:06 +00:00
|
|
|
// Package parser implements a parser and parse tree dumper for Dockerfiles.
|
2018-06-02 00:30:18 +00:00
|
|
|
package parser
|
2015-09-05 19:49:06 +00:00
|
|
|
|
|
|
|
import (
|
|
|
|
"bufio"
|
2016-06-03 12:32:37 +00:00
|
|
|
"bytes"
|
2016-04-22 22:04:46 +00:00
|
|
|
"fmt"
|
2015-09-05 19:49:06 +00:00
|
|
|
"io"
|
|
|
|
"regexp"
|
2017-03-10 21:23:46 +00:00
|
|
|
"strconv"
|
2015-09-05 19:49:06 +00:00
|
|
|
"strings"
|
|
|
|
"unicode"
|
|
|
|
|
2018-06-02 00:30:18 +00:00
|
|
|
"github.com/moby/buildkit/frontend/dockerfile/command"
|
2017-04-12 19:40:16 +00:00
|
|
|
"github.com/pkg/errors"
|
2015-09-05 19:49:06 +00:00
|
|
|
)
|
|
|
|
|
|
|
|
// Node is a structure used to represent a parse tree.
|
|
|
|
//
|
|
|
|
// In the node there are three fields, Value, Next, and Children. Value is the
|
|
|
|
// current token's string value. Next is always the next non-child token, and
|
|
|
|
// children contains all the children. Here's an example:
|
|
|
|
//
|
|
|
|
// (value next (child child-next child-next-next) next-next)
|
|
|
|
//
|
|
|
|
// This data structure is frankly pretty lousy for handling complex languages,
|
|
|
|
// but lucky for us the Dockerfile isn't very complicated. This structure
|
|
|
|
// works a little more effectively than a "proper" parse tree for our needs.
|
|
|
|
//
|
|
|
|
type Node struct {
|
2020-09-22 03:54:27 +00:00
|
|
|
Value string // actual content
|
|
|
|
Next *Node // the next item in the current sexp
|
|
|
|
Children []*Node // the children of this sexp
|
|
|
|
Attributes map[string]bool // special attributes for this node
|
|
|
|
Original string // original line used before parsing
|
|
|
|
Flags []string // only top Node should have this set
|
|
|
|
StartLine int // the line in the original dockerfile where the node begins
|
|
|
|
EndLine int // the line in the original dockerfile where the node ends
|
|
|
|
PrevComment []string
|
2015-09-05 19:49:06 +00:00
|
|
|
}
|
|
|
|
|
2020-04-22 05:56:14 +00:00
|
|
|
// Location return the location of node in source code
|
|
|
|
func (node *Node) Location() []Range {
|
|
|
|
return toRanges(node.StartLine, node.EndLine)
|
|
|
|
}
|
|
|
|
|
2017-03-10 21:23:46 +00:00
|
|
|
// Dump dumps the AST defined by `node` as a list of sexps.
|
|
|
|
// Returns a string suitable for printing.
|
|
|
|
func (node *Node) Dump() string {
|
|
|
|
str := ""
|
|
|
|
str += node.Value
|
|
|
|
|
|
|
|
if len(node.Flags) > 0 {
|
|
|
|
str += fmt.Sprintf(" %q", node.Flags)
|
|
|
|
}
|
|
|
|
|
|
|
|
for _, n := range node.Children {
|
|
|
|
str += "(" + n.Dump() + ")\n"
|
|
|
|
}
|
|
|
|
|
|
|
|
for n := node.Next; n != nil; n = n.Next {
|
|
|
|
if len(n.Children) > 0 {
|
|
|
|
str += " " + n.Dump()
|
|
|
|
} else {
|
|
|
|
str += " " + strconv.Quote(n.Value)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
return strings.TrimSpace(str)
|
|
|
|
}
|
|
|
|
|
2017-04-12 22:00:55 +00:00
|
|
|
func (node *Node) lines(start, end int) {
|
|
|
|
node.StartLine = start
|
2019-05-31 03:44:40 +00:00
|
|
|
node.EndLine = end
|
2017-04-12 22:00:55 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
// AddChild adds a new child node, and updates line information
|
|
|
|
func (node *Node) AddChild(child *Node, startLine, endLine int) {
|
|
|
|
child.lines(startLine, endLine)
|
|
|
|
if node.StartLine < 0 {
|
|
|
|
node.StartLine = startLine
|
|
|
|
}
|
2019-05-31 03:44:40 +00:00
|
|
|
node.EndLine = endLine
|
2017-04-12 22:00:55 +00:00
|
|
|
node.Children = append(node.Children, child)
|
|
|
|
}
|
|
|
|
|
2015-09-05 19:49:06 +00:00
|
|
|
var (
|
2020-04-15 03:30:57 +00:00
|
|
|
dispatch map[string]func(string, *directives) (*Node, map[string]bool, error)
|
|
|
|
reWhitespace = regexp.MustCompile(`[\t\v\f\r ]+`)
|
|
|
|
reDirectives = regexp.MustCompile(`^#\s*([a-zA-Z][a-zA-Z0-9]*)\s*=\s*(.+?)\s*$`)
|
|
|
|
reComment = regexp.MustCompile(`^#.*$`)
|
2015-09-05 19:49:06 +00:00
|
|
|
)
|
|
|
|
|
2016-06-27 20:20:47 +00:00
|
|
|
// DefaultEscapeToken is the default escape token
|
2017-04-12 17:47:19 +00:00
|
|
|
const DefaultEscapeToken = '\\'
|
2016-06-27 20:20:47 +00:00
|
|
|
|
2020-04-15 03:30:57 +00:00
|
|
|
var validDirectives = map[string]struct{}{
|
|
|
|
"escape": {},
|
|
|
|
"syntax": {},
|
|
|
|
}
|
|
|
|
|
|
|
|
// directive is the structure used during a build run to hold the state of
|
2017-04-11 19:07:02 +00:00
|
|
|
// parsing directives.
|
2020-04-15 03:30:57 +00:00
|
|
|
type directives struct {
|
|
|
|
escapeToken rune // Current escape token
|
|
|
|
lineContinuationRegex *regexp.Regexp // Current line continuation regex
|
|
|
|
done bool // Whether we are done looking for directives
|
|
|
|
seen map[string]struct{} // Whether the escape directive has been seen
|
2017-04-11 19:07:02 +00:00
|
|
|
}
|
|
|
|
|
2017-04-12 17:47:19 +00:00
|
|
|
// setEscapeToken sets the default token for escaping characters in a Dockerfile.
|
2020-04-15 03:30:57 +00:00
|
|
|
func (d *directives) setEscapeToken(s string) error {
|
2016-04-22 22:04:46 +00:00
|
|
|
if s != "`" && s != "\\" {
|
2020-04-15 03:30:57 +00:00
|
|
|
return errors.Errorf("invalid escape token '%s' does not match ` or \\", s)
|
2016-04-22 22:04:46 +00:00
|
|
|
}
|
2017-04-11 19:07:02 +00:00
|
|
|
d.escapeToken = rune(s[0])
|
|
|
|
d.lineContinuationRegex = regexp.MustCompile(`\` + s + `[ \t]*$`)
|
2016-04-22 22:04:46 +00:00
|
|
|
return nil
|
|
|
|
}
|
|
|
|
|
2017-10-03 18:38:37 +00:00
|
|
|
// possibleParserDirective looks for parser directives, eg '# escapeToken=<char>'.
|
|
|
|
// Parser directives must precede any builder instruction or other comments,
|
|
|
|
// and cannot be repeated.
|
2020-04-15 03:30:57 +00:00
|
|
|
func (d *directives) possibleParserDirective(line string) error {
|
|
|
|
if d.done {
|
2017-04-12 19:40:16 +00:00
|
|
|
return nil
|
|
|
|
}
|
|
|
|
|
2020-04-15 03:30:57 +00:00
|
|
|
match := reDirectives.FindStringSubmatch(line)
|
|
|
|
if len(match) == 0 {
|
|
|
|
d.done = true
|
|
|
|
return nil
|
|
|
|
}
|
|
|
|
|
|
|
|
k := strings.ToLower(match[1])
|
|
|
|
_, ok := validDirectives[k]
|
|
|
|
if !ok {
|
|
|
|
d.done = true
|
|
|
|
return nil
|
|
|
|
}
|
|
|
|
|
|
|
|
if _, ok := d.seen[k]; ok {
|
|
|
|
return errors.Errorf("only one %s parser directive can be used", k)
|
|
|
|
}
|
|
|
|
d.seen[k] = struct{}{}
|
|
|
|
|
|
|
|
if k == "escape" {
|
|
|
|
return d.setEscapeToken(match[2])
|
2017-04-12 19:40:16 +00:00
|
|
|
}
|
2017-05-30 16:45:27 +00:00
|
|
|
|
2017-04-12 19:40:16 +00:00
|
|
|
return nil
|
|
|
|
}
|
|
|
|
|
2020-04-15 03:30:57 +00:00
|
|
|
// newDefaultDirectives returns a new directives structure with the default escapeToken token
|
|
|
|
func newDefaultDirectives() *directives {
|
|
|
|
d := &directives{
|
|
|
|
seen: map[string]struct{}{},
|
|
|
|
}
|
|
|
|
d.setEscapeToken(string(DefaultEscapeToken))
|
|
|
|
return d
|
2017-04-11 19:07:02 +00:00
|
|
|
}
|
|
|
|
|
2015-09-05 19:49:06 +00:00
|
|
|
func init() {
|
|
|
|
// Dispatch Table. see line_parsers.go for the parse functions.
|
|
|
|
// The command is parsed and mapped to the line parser. The line parser
|
|
|
|
// receives the arguments but not the command, and returns an AST after
|
|
|
|
// reformulating the arguments according to the rules in the parser
|
|
|
|
// functions. Errors are propagated up by Parse() and the resulting AST can
|
|
|
|
// be incorporated directly into the existing AST as a next.
|
2020-04-15 03:30:57 +00:00
|
|
|
dispatch = map[string]func(string, *directives) (*Node, map[string]bool, error){
|
2016-04-18 09:48:13 +00:00
|
|
|
command.Add: parseMaybeJSONToList,
|
2016-05-03 20:56:59 +00:00
|
|
|
command.Arg: parseNameOrNameVal,
|
2016-04-18 09:48:13 +00:00
|
|
|
command.Cmd: parseMaybeJSON,
|
2016-05-03 20:56:59 +00:00
|
|
|
command.Copy: parseMaybeJSONToList,
|
2016-04-18 09:48:13 +00:00
|
|
|
command.Entrypoint: parseMaybeJSON,
|
2016-05-03 20:56:59 +00:00
|
|
|
command.Env: parseEnv,
|
2016-04-18 09:48:13 +00:00
|
|
|
command.Expose: parseStringsWhitespaceDelimited,
|
2017-03-20 17:28:21 +00:00
|
|
|
command.From: parseStringsWhitespaceDelimited,
|
2016-04-18 09:48:13 +00:00
|
|
|
command.Healthcheck: parseHealthConfig,
|
2016-05-03 20:56:59 +00:00
|
|
|
command.Label: parseLabel,
|
|
|
|
command.Maintainer: parseString,
|
|
|
|
command.Onbuild: parseSubCommand,
|
|
|
|
command.Run: parseMaybeJSON,
|
|
|
|
command.Shell: parseMaybeJSON,
|
|
|
|
command.StopSignal: parseString,
|
|
|
|
command.User: parseString,
|
|
|
|
command.Volume: parseMaybeJSONToList,
|
|
|
|
command.Workdir: parseString,
|
2015-09-05 19:49:06 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2017-03-10 21:23:46 +00:00
|
|
|
// newNodeFromLine splits the line into parts, and dispatches to a function
|
|
|
|
// based on the command and command arguments. A Node is created from the
|
|
|
|
// result of the dispatch.
|
2020-09-22 03:54:27 +00:00
|
|
|
func newNodeFromLine(line string, d *directives, comments []string) (*Node, error) {
|
2015-09-05 19:49:06 +00:00
|
|
|
cmd, flags, args, err := splitCommand(line)
|
|
|
|
if err != nil {
|
2017-03-10 21:23:46 +00:00
|
|
|
return nil, err
|
2015-09-05 19:49:06 +00:00
|
|
|
}
|
|
|
|
|
2017-03-10 21:23:46 +00:00
|
|
|
fn := dispatch[cmd]
|
|
|
|
// Ignore invalid Dockerfile instructions
|
|
|
|
if fn == nil {
|
|
|
|
fn = parseIgnore
|
|
|
|
}
|
2020-04-15 03:30:57 +00:00
|
|
|
next, attrs, err := fn(args, d)
|
2015-09-05 19:49:06 +00:00
|
|
|
if err != nil {
|
2017-03-10 21:23:46 +00:00
|
|
|
return nil, err
|
2015-09-05 19:49:06 +00:00
|
|
|
}
|
|
|
|
|
2017-03-10 21:23:46 +00:00
|
|
|
return &Node{
|
2020-09-22 03:54:27 +00:00
|
|
|
Value: cmd,
|
|
|
|
Original: line,
|
|
|
|
Flags: flags,
|
|
|
|
Next: next,
|
|
|
|
Attributes: attrs,
|
|
|
|
PrevComment: comments,
|
2017-03-10 21:23:46 +00:00
|
|
|
}, nil
|
|
|
|
}
|
2015-09-05 19:49:06 +00:00
|
|
|
|
2017-04-12 17:47:19 +00:00
|
|
|
// Result is the result of parsing a Dockerfile
|
|
|
|
type Result struct {
|
|
|
|
AST *Node
|
|
|
|
EscapeToken rune
|
2017-10-03 17:20:41 +00:00
|
|
|
Warnings []string
|
2017-04-12 17:47:19 +00:00
|
|
|
}
|
|
|
|
|
2017-06-16 22:05:30 +00:00
|
|
|
// PrintWarnings to the writer
|
|
|
|
func (r *Result) PrintWarnings(out io.Writer) {
|
|
|
|
if len(r.Warnings) == 0 {
|
|
|
|
return
|
|
|
|
}
|
|
|
|
fmt.Fprintf(out, strings.Join(r.Warnings, "\n")+"\n")
|
|
|
|
}
|
|
|
|
|
2017-04-12 17:47:19 +00:00
|
|
|
// Parse reads lines from a Reader, parses the lines into an AST and returns
|
|
|
|
// the AST and escape token
|
|
|
|
func Parse(rwc io.Reader) (*Result, error) {
|
2020-04-15 03:30:57 +00:00
|
|
|
d := newDefaultDirectives()
|
2015-11-01 21:28:30 +00:00
|
|
|
currentLine := 0
|
2017-04-12 19:40:16 +00:00
|
|
|
root := &Node{StartLine: -1}
|
2015-09-05 19:49:06 +00:00
|
|
|
scanner := bufio.NewScanner(rwc)
|
2016-12-06 02:55:07 +00:00
|
|
|
warnings := []string{}
|
2020-09-22 03:54:27 +00:00
|
|
|
var comments []string
|
2015-09-05 19:49:06 +00:00
|
|
|
|
2017-04-12 22:00:55 +00:00
|
|
|
var err error
|
2015-09-05 19:49:06 +00:00
|
|
|
for scanner.Scan() {
|
2017-05-30 16:45:27 +00:00
|
|
|
bytesRead := scanner.Bytes()
|
|
|
|
if currentLine == 0 {
|
|
|
|
// First line, strip the byte-order-marker if present
|
|
|
|
bytesRead = bytes.TrimPrefix(bytesRead, utf8bom)
|
|
|
|
}
|
2020-09-22 03:54:27 +00:00
|
|
|
if isComment(bytesRead) {
|
|
|
|
comment := strings.TrimSpace(string(bytesRead[1:]))
|
|
|
|
if comment == "" {
|
|
|
|
comments = nil
|
|
|
|
} else {
|
|
|
|
comments = append(comments, comment)
|
|
|
|
}
|
|
|
|
}
|
2017-05-30 16:45:27 +00:00
|
|
|
bytesRead, err = processLine(d, bytesRead, true)
|
|
|
|
if err != nil {
|
2020-04-19 22:54:58 +00:00
|
|
|
return nil, withLocation(err, currentLine, 0)
|
2016-06-03 12:32:37 +00:00
|
|
|
}
|
2015-11-01 21:28:30 +00:00
|
|
|
currentLine++
|
2015-09-05 19:49:06 +00:00
|
|
|
|
2017-04-12 22:00:55 +00:00
|
|
|
startLine := currentLine
|
2017-05-30 16:45:27 +00:00
|
|
|
line, isEndOfLine := trimContinuationCharacter(string(bytesRead), d)
|
2017-04-12 22:00:55 +00:00
|
|
|
if isEndOfLine && line == "" {
|
|
|
|
continue
|
|
|
|
}
|
2015-09-05 19:49:06 +00:00
|
|
|
|
2016-12-06 02:55:07 +00:00
|
|
|
var hasEmptyContinuationLine bool
|
2017-04-12 22:00:55 +00:00
|
|
|
for !isEndOfLine && scanner.Scan() {
|
2017-05-30 16:45:27 +00:00
|
|
|
bytesRead, err := processLine(d, scanner.Bytes(), false)
|
|
|
|
if err != nil {
|
2020-04-19 22:54:58 +00:00
|
|
|
return nil, withLocation(err, currentLine, 0)
|
2017-05-30 16:45:27 +00:00
|
|
|
}
|
2017-04-12 22:00:55 +00:00
|
|
|
currentLine++
|
2015-09-05 19:49:06 +00:00
|
|
|
|
2017-09-27 20:18:24 +00:00
|
|
|
if isComment(scanner.Bytes()) {
|
|
|
|
// original line was a comment (processLine strips comments)
|
|
|
|
continue
|
|
|
|
}
|
2017-05-30 16:45:27 +00:00
|
|
|
if isEmptyContinuationLine(bytesRead) {
|
2016-12-06 02:55:07 +00:00
|
|
|
hasEmptyContinuationLine = true
|
2017-04-12 22:00:55 +00:00
|
|
|
continue
|
2015-09-05 19:49:06 +00:00
|
|
|
}
|
2017-04-12 22:00:55 +00:00
|
|
|
|
2017-05-30 16:45:27 +00:00
|
|
|
continuationLine := string(bytesRead)
|
2017-04-12 22:00:55 +00:00
|
|
|
continuationLine, isEndOfLine = trimContinuationCharacter(continuationLine, d)
|
|
|
|
line += continuationLine
|
2015-09-05 19:49:06 +00:00
|
|
|
}
|
|
|
|
|
2016-12-06 02:55:07 +00:00
|
|
|
if hasEmptyContinuationLine {
|
2018-05-19 11:38:54 +00:00
|
|
|
warnings = append(warnings, "[WARNING]: Empty continuation line found in:\n "+line)
|
2016-12-06 02:55:07 +00:00
|
|
|
}
|
|
|
|
|
2020-09-22 03:54:27 +00:00
|
|
|
child, err := newNodeFromLine(line, d, comments)
|
2017-04-12 22:00:55 +00:00
|
|
|
if err != nil {
|
2020-04-19 22:54:58 +00:00
|
|
|
return nil, withLocation(err, startLine, currentLine)
|
2015-09-05 19:49:06 +00:00
|
|
|
}
|
2020-09-22 03:54:27 +00:00
|
|
|
comments = nil
|
2017-04-12 22:00:55 +00:00
|
|
|
root.AddChild(child, startLine, currentLine)
|
2015-09-05 19:49:06 +00:00
|
|
|
}
|
2016-12-06 02:55:07 +00:00
|
|
|
|
|
|
|
if len(warnings) > 0 {
|
|
|
|
warnings = append(warnings, "[WARNING]: Empty continuation lines will become errors in a future release.")
|
|
|
|
}
|
2019-01-04 18:57:32 +00:00
|
|
|
|
|
|
|
if root.StartLine < 0 {
|
2020-04-19 22:54:58 +00:00
|
|
|
return nil, withLocation(errors.New("file with no instructions"), currentLine, 0)
|
2019-01-04 18:57:32 +00:00
|
|
|
}
|
|
|
|
|
2016-12-06 02:55:07 +00:00
|
|
|
return &Result{
|
|
|
|
AST: root,
|
|
|
|
Warnings: warnings,
|
|
|
|
EscapeToken: d.escapeToken,
|
2020-04-19 22:54:58 +00:00
|
|
|
}, withLocation(handleScannerError(scanner.Err()), currentLine, 0)
|
2015-09-05 19:49:06 +00:00
|
|
|
}
|
2017-03-10 21:23:46 +00:00
|
|
|
|
2017-04-12 22:00:55 +00:00
|
|
|
func trimComments(src []byte) []byte {
|
2020-04-15 03:30:57 +00:00
|
|
|
return reComment.ReplaceAll(src, []byte{})
|
2017-04-12 22:00:55 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
func trimWhitespace(src []byte) []byte {
|
|
|
|
return bytes.TrimLeftFunc(src, unicode.IsSpace)
|
|
|
|
}
|
|
|
|
|
2017-09-27 20:18:24 +00:00
|
|
|
func isComment(line []byte) bool {
|
2020-04-15 03:30:57 +00:00
|
|
|
return reComment.Match(trimWhitespace(line))
|
2017-09-27 20:18:24 +00:00
|
|
|
}
|
|
|
|
|
2017-04-12 22:00:55 +00:00
|
|
|
func isEmptyContinuationLine(line []byte) bool {
|
2017-09-27 20:18:24 +00:00
|
|
|
return len(trimWhitespace(line)) == 0
|
2017-04-12 22:00:55 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
var utf8bom = []byte{0xEF, 0xBB, 0xBF}
|
|
|
|
|
2020-04-15 03:30:57 +00:00
|
|
|
func trimContinuationCharacter(line string, d *directives) (string, bool) {
|
2017-04-12 22:00:55 +00:00
|
|
|
if d.lineContinuationRegex.MatchString(line) {
|
|
|
|
line = d.lineContinuationRegex.ReplaceAllString(line, "")
|
|
|
|
return line, false
|
2017-03-10 21:23:46 +00:00
|
|
|
}
|
2017-04-12 22:00:55 +00:00
|
|
|
return line, true
|
|
|
|
}
|
|
|
|
|
|
|
|
// TODO: remove stripLeftWhitespace after deprecation period. It seems silly
|
|
|
|
// to preserve whitespace on continuation lines. Why is that done?
|
2020-04-15 03:30:57 +00:00
|
|
|
func processLine(d *directives, token []byte, stripLeftWhitespace bool) ([]byte, error) {
|
2017-04-12 22:00:55 +00:00
|
|
|
if stripLeftWhitespace {
|
|
|
|
token = trimWhitespace(token)
|
|
|
|
}
|
2016-12-06 02:55:07 +00:00
|
|
|
return trimComments(token), d.possibleParserDirective(string(token))
|
2017-03-10 21:23:46 +00:00
|
|
|
}
|
2017-11-07 23:27:49 +00:00
|
|
|
|
|
|
|
func handleScannerError(err error) error {
|
|
|
|
switch err {
|
|
|
|
case bufio.ErrTooLong:
|
|
|
|
return errors.Errorf("dockerfile line greater than max allowed size of %d", bufio.MaxScanTokenSize-1)
|
|
|
|
default:
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
}
|