Variable expansion bug.

This commit is contained in:
Daniel Jones 2013-03-10 00:34:42 -08:00
parent 8cbbe9c53c
commit 587bdd6c63
8 changed files with 99 additions and 79 deletions

View file

@ -52,6 +52,9 @@ improvements.
1. A clean, modern implementation in Go, that doesn't depend on the whole Plan 1. A clean, modern implementation in Go, that doesn't depend on the whole Plan
9 stack. 9 stack.
1. Parallel by default. Modern computers can build more than one C file at a
time. Cases that should not be run in parallel are the exception. Use
`-p=1` if this is the case.
1. Use Go regular expressions, which are perl-like. The original mk used plan9 1. Use Go regular expressions, which are perl-like. The original mk used plan9
regex, which few people know or care to learn. regex, which few people know or care to learn.
1. Allow blank lines in recipes. A recipe is any indented block of text, and 1. Allow blank lines in recipes. A recipe is any indented block of text, and
@ -60,8 +63,6 @@ improvements.
1. Add an 'S' attribute to execute recipes with programs other than sh. This 1. Add an 'S' attribute to execute recipes with programs other than sh. This
way, you don't have to separate your six line python script into its own way, you don't have to separate your six line python script into its own
file. Just stick it directly in the mkfile. file. Just stick it directly in the mkfile.
1. Use sh syntax for command insertion (i.e. backticks) rather than rc shell
syntax.
1. Pretty colors. 1. Pretty colors.
# Current State # Current State

20
TODO.md Normal file
View file

@ -0,0 +1,20 @@
# Short-term
* Unit tests.
* Expanding regex matches in targets.
* Dummy rule for multiple explicit targets
* Expand `$newprereq`.
* Expand `$alltargets`.
* Man page.
* Namelist syntax.
* Environment variables.
# Long-term
* Nicer syntax for alternative-shell rules.
* An attribute to demand n processors for a particular rule. This way
resource hog rules can be run on their own without disabling parallel
make.
* A switch that prints the rules that will be executed and prompts to user
to do so. I often find myself doing `mk -n` before `mk` to make sure my
rules aren't bogus.

View file

@ -13,12 +13,13 @@ func expand(input string, vars map[string][]string, expandBackticks bool) []stri
expanded := "" expanded := ""
var i, j int var i, j int
for i = 0; i < len(input); { for i = 0; i < len(input); {
j = i + strings.IndexAny(input[i:], "\"'`$\\") j = strings.IndexAny(input[i:], "\"'`$\\")
if j < 0 { if j < 0 {
expanded += input[i:] expanded += input[i:]
break break
} }
j += i
expanded += input[i:j] expanded += input[i:j]
c, w := utf8.DecodeRuneInString(input[j:]) c, w := utf8.DecodeRuneInString(input[j:])
@ -155,8 +156,8 @@ func expandSigil(input string, vars map[string][]string) ([]string, int) {
if ok { if ok {
return varvals, offset return varvals, offset
} else { } else {
return []string{"$" + input[:offset]}, offset return []string{"$" + input[:offset]}, offset
} }
} }
return []string{"$" + input}, len(input) return []string{"$" + input}, len(input)

View file

@ -56,7 +56,6 @@ type node struct {
flags nodeFlag // bitwise combination of node flags flags nodeFlag // bitwise combination of node flags
} }
// Update a node's timestamp and 'exists' flag. // Update a node's timestamp and 'exists' flag.
func (u *node) updateTimestamp() { func (u *node) updateTimestamp() {
info, err := os.Stat(u.name) info, err := os.Stat(u.name)
@ -67,7 +66,7 @@ func (u *node) updateTimestamp() {
} else { } else {
_, ok := err.(*os.PathError) _, ok := err.(*os.PathError)
if ok { if ok {
u.t = time.Unix(0, 0) u.t = time.Unix(0, 0)
u.exists = false u.exists = false
} else { } else {
mkError(err.Error()) mkError(err.Error())
@ -78,7 +77,7 @@ func (u *node) updateTimestamp() {
// Create a new node // Create a new node
func (g *graph) newnode(name string) *node { func (g *graph) newnode(name string) *node {
u := &node{name: name} u := &node{name: name}
u.updateTimestamp() u.updateTimestamp()
g.nodes[name] = u g.nodes[name] = u
return u return u
} }
@ -110,10 +109,10 @@ func buildgraph(rs *ruleSet, target string) *graph {
// keep track of how many times each rule is visited, to avoid cycles. // keep track of how many times each rule is visited, to avoid cycles.
rulecnt := make([]int, len(rs.rules)) rulecnt := make([]int, len(rs.rules))
g.root = applyrules(rs, g, target, rulecnt) g.root = applyrules(rs, g, target, rulecnt)
g.cyclecheck(g.root) g.cyclecheck(g.root)
g.root.flags |= nodeFlagProbable g.root.flags |= nodeFlagProbable
g.vacuous(g.root) g.vacuous(g.root)
g.ambiguous(g.root) g.ambiguous(g.root)
return g return g
} }
@ -283,16 +282,16 @@ func (g *graph) vacuous(u *node) bool {
// Check for cycles // Check for cycles
func (g *graph) cyclecheck(u *node) { func (g *graph) cyclecheck(u *node) {
if u.flags & nodeFlagCycle != 0 && len(u.prereqs) > 0 { if u.flags&nodeFlagCycle != 0 && len(u.prereqs) > 0 {
mkError(fmt.Sprintf("cycle in the graph detected at target %s", u.name)) mkError(fmt.Sprintf("cycle in the graph detected at target %s", u.name))
} }
u.flags |= nodeFlagCycle u.flags |= nodeFlagCycle
for i := range u.prereqs { for i := range u.prereqs {
if u.prereqs[i].v != nil { if u.prereqs[i].v != nil {
g.cyclecheck(u.prereqs[i].v) g.cyclecheck(u.prereqs[i].v)
} }
} }
u.flags &= ^nodeFlagCycle u.flags &= ^nodeFlagCycle
} }
@ -314,11 +313,11 @@ func (g *graph) ambiguous(u *node) {
} else { } else {
if le.r.equivRecipe(e.r) { if le.r.equivRecipe(e.r) {
if le.r.ismeta && !e.r.ismeta { if le.r.ismeta && !e.r.ismeta {
mkPrintRecipe(u.name, le.r.recipe) mkPrintRecipe(u.name, le.r.recipe)
le.togo = true le.togo = true
le = e le = e
} else if !le.r.ismeta && e.r.ismeta { } else if !le.r.ismeta && e.r.ismeta {
mkPrintRecipe(u.name, e.r.recipe) mkPrintRecipe(u.name, e.r.recipe)
e.togo = true e.togo = true
continue continue
} }

41
lex.go
View file

@ -1,10 +1,9 @@
package main package main
import ( import (
"fmt"
"strings" "strings"
"unicode/utf8" "unicode/utf8"
"fmt"
) )
type tokenType int type tokenType int
@ -16,7 +15,7 @@ const nonBareRunes = " \t\n\r\\=:#'\""
// Return true if the string contains whitespace only. // Return true if the string contains whitespace only.
func onlyWhitespace(s string) bool { func onlyWhitespace(s string) bool {
return strings.IndexAny(s, " \t\r\n") < 0 return strings.IndexAny(s, " \t\r\n") < 0
} }
const ( const (
@ -86,9 +85,9 @@ type lexer struct {
type lexerStateFun func(*lexer) lexerStateFun type lexerStateFun func(*lexer) lexerStateFun
func (l *lexer) lexerror(what string) { func (l *lexer) lexerror(what string) {
if l.errmsg == "" { if l.errmsg == "" {
l.errmsg = what l.errmsg = what
} }
l.emit(tokenError) l.emit(tokenError)
} }
@ -183,9 +182,9 @@ func (l *lexer) acceptUntil(invalid string) {
l.next() l.next()
} }
if l.peek() == eof { if l.peek() == eof {
l.lexerror(fmt.Sprintf("end of file encountered while looking for one of: %s", invalid)) l.lexerror(fmt.Sprintf("end of file encountered while looking for one of: %s", invalid))
} }
} }
// Skip characters from the valid string until the next is not. // Skip characters from the valid string until the next is not.
@ -203,9 +202,9 @@ func (l *lexer) skipUntil(invalid string) {
l.skip() l.skip()
} }
if l.peek() == eof { if l.peek() == eof {
l.lexerror(fmt.Sprintf("end of file encountered while looking for one of: %s", invalid)) l.lexerror(fmt.Sprintf("end of file encountered while looking for one of: %s", invalid))
} }
} }
// Start a new lexer to lex the given input. // Start a new lexer to lex the given input.
@ -269,9 +268,9 @@ func lexTopLevel(l *lexer) lexerStateFun {
return lexBackQuotedWord return lexBackQuotedWord
} }
if strings.IndexRune(nonBareRunes, c) >= 0 { if strings.IndexRune(nonBareRunes, c) >= 0 {
l.lexerror(fmt.Sprintf("expected a unquoted string, but found '%c'", c)) l.lexerror(fmt.Sprintf("expected a unquoted string, but found '%c'", c))
} }
return lexBareWord return lexBareWord
} }
@ -313,9 +312,9 @@ func lexDoubleQuotedWord(l *lexer) lexerStateFun {
} }
} }
if l.peek() == eof { if l.peek() == eof {
l.lexerror("end of file encountered while parsing a quoted string.") l.lexerror("end of file encountered while parsing a quoted string.")
} }
l.next() // '"' l.next() // '"'
return lexBareWord return lexBareWord
@ -344,9 +343,9 @@ func lexRecipe(l *lexer) lexerStateFun {
} }
} }
if !onlyWhitespace(l.input[l.start:l.pos]) { if !onlyWhitespace(l.input[l.start:l.pos]) {
l.emit(tokenRecipe) l.emit(tokenRecipe)
} }
return lexTopLevel return lexTopLevel
} }

30
mk.go
View file

@ -63,7 +63,7 @@ func mk(rs *ruleSet, target string, dryrun bool) {
if g.root.exists && !rebuildall { if g.root.exists && !rebuildall {
return return
} }
mkNode(g, g.root) mkNode(g, g.root)
} }
// Build a target in the graph. // Build a target in the graph.
@ -128,7 +128,7 @@ func mkNode(g *graph, u *node) {
for i := range prereqs { for i := range prereqs {
prereqs[i].mutex.Lock() prereqs[i].mutex.Lock()
// needs to be built? // needs to be built?
u.updateTimestamp() u.updateTimestamp()
if !prereqs[i].exists || e.r.attributes.virtual || rebuildall || (u.exists && u.t.Before(prereqs[i].t)) { if !prereqs[i].exists || e.r.attributes.virtual || rebuildall || (u.exists && u.t.Before(prereqs[i].t)) {
switch prereqs[i].status { switch prereqs[i].status {
case nodeStatusReady: case nodeStatusReady:
@ -233,16 +233,16 @@ func main() {
targets := flag.Args() targets := flag.Args()
// build the first non-meta rule in the makefile, if none are given explicitly // build the first non-meta rule in the makefile, if none are given explicitly
if len(targets) == 0 { if len(targets) == 0 {
for i := range rs.rules { for i := range rs.rules {
if !rs.rules[i].ismeta { if !rs.rules[i].ismeta {
for j := range rs.rules[i].targets { for j := range rs.rules[i].targets {
targets = append(targets, rs.rules[i].targets[j].spat) targets = append(targets, rs.rules[i].targets[j].spat)
} }
break break
} }
} }
} }
if len(targets) == 0 { if len(targets) == 0 {
fmt.Println("mk: nothing to mk") fmt.Println("mk: nothing to mk")
@ -251,7 +251,7 @@ func main() {
// TODO: For multiple targets, we should add a dummy rule that depends on // TODO: For multiple targets, we should add a dummy rule that depends on
// all let mk handle executing each. // all let mk handle executing each.
for _, target := range targets { for _, target := range targets {
mk(rs, target, dryrun) mk(rs, target, dryrun)
} }
} }

View file

@ -5,10 +5,10 @@ package main
import ( import (
"fmt" "fmt"
"io/ioutil"
"os"
"regexp" "regexp"
"strings" "strings"
"io/ioutil"
"os"
) )
type parser struct { type parser struct {
@ -20,10 +20,10 @@ type parser struct {
// Pretty errors. // Pretty errors.
func (p *parser) parseError(context string, expected string, found token) { func (p *parser) parseError(context string, expected string, found token) {
mkPrintError(fmt.Sprintf("%s:%d: syntax error: ", p.name, found.line)) mkPrintError(fmt.Sprintf("%s:%d: syntax error: ", p.name, found.line))
mkPrintError(fmt.Sprintf("while %s, expected %s but found '%s'.\n", mkPrintError(fmt.Sprintf("while %s, expected %s but found '%s'.\n",
context, expected, found.String())) context, expected, found.String()))
mkError("") mkError("")
} }
// More basic errors. // More basic errors.
@ -65,7 +65,7 @@ func parseInto(input string, name string, rules *ruleSet) {
state := parseTopLevel state := parseTopLevel
for t := range tokens { for t := range tokens {
if t.typ == tokenError { if t.typ == tokenError {
p.basicErrorAtLine(l.errmsg, t.line) p.basicErrorAtLine(l.errmsg, t.line)
break break
} }
@ -135,7 +135,7 @@ func parsePipeInclude(p *parser, t token) parserStateFun {
p.tokenbuf = append(p.tokenbuf, t) p.tokenbuf = append(p.tokenbuf, t)
default: default:
p.parseError("parsing piped include", "a shell command", t) p.parseError("parsing piped include", "a shell command", t)
} }
return parsePipeInclude return parsePipeInclude
@ -145,25 +145,25 @@ func parsePipeInclude(p *parser, t token) parserStateFun {
func parseRedirInclude(p *parser, t token) parserStateFun { func parseRedirInclude(p *parser, t token) parserStateFun {
switch t.typ { switch t.typ {
case tokenNewline: case tokenNewline:
filename := "" filename := ""
for i := range p.tokenbuf { for i := range p.tokenbuf {
filename += p.tokenbuf[i].val filename += p.tokenbuf[i].val
} }
file, err := os.Open(filename) file, err := os.Open(filename)
if err != nil { if err != nil {
p.basicErrorAtToken(fmt.Sprintf("cannot open %s", filename), p.tokenbuf[0]) p.basicErrorAtToken(fmt.Sprintf("cannot open %s", filename), p.tokenbuf[0])
} }
input, _ := ioutil.ReadAll(file) input, _ := ioutil.ReadAll(file)
parseInto(string(input), filename, p.rules) parseInto(string(input), filename, p.rules)
p.clear() p.clear()
return parseTopLevel return parseTopLevel
case tokenWord: case tokenWord:
p.tokenbuf = append(p.tokenbuf, t) p.tokenbuf = append(p.tokenbuf, t)
default: default:
p.parseError("parsing include", "a file name", t) p.parseError("parsing include", "a file name", t)
} }
return parseRedirInclude return parseRedirInclude

View file

@ -152,7 +152,7 @@ func subprocess(program string,
var outbytes []byte var outbytes []byte
outbytes, err = cmd.Output() outbytes, err = cmd.Output()
output = string(outbytes) output = string(outbytes)
if output[len(output)-1] == '\n' { if len(output) > 0 && output[len(output)-1] == '\n' {
output = output[:len(output)-1] output = output[:len(output)-1]
} }
} else { } else {