Fix a number of things.

This commit is contained in:
Daniel Jones 2013-03-09 20:54:13 -08:00
parent 6e0d8979be
commit 8cbbe9c53c
7 changed files with 105 additions and 61 deletions

View file

@ -1,14 +1,28 @@
# Mk # Mk
Mk is a reboot of the Plan 9 mk command, which itself is a replacement for make. Mk is a reboot of the Plan 9 mk command, which itself is [a successor to
This tool is for anyone who loves make, but hates all its stupid bullshit. make]($). This tool is for anyone who loves make, but hates all its stupid
bullshit.
# Installation
1. Install Go.
2. Run `go install github.com/dcjones/mk`
# Why Plan 9 mk is better than make # Why Plan 9 mk is better than make
Plan 9 mk blows make out of the water. Yet tragically, few use or have even heard Way back in the 90s, some smart guys at Bell Labs got together and decided to
of it. Put simply, mk takes make, keeps its simple direct syntax, but fixes write new operating system to replace Unix. The idea was to keep everything that
basically everything that's annoyed you over the years. To name a few things: was great about Unix, but totally disregard backwards compatibility in a quest
for something better. The operating system they designed, Plan 9, had a lot of
terrific ideas, and though some were cherry picked, the OS as a whole never
really caught on.
Among the gems in Plan 9 was a rewrite of the venerable Unix make
command, in the form of mk. Simply put, mk is make, but with a large collection
of relatively minor improvements, adding up to something more consistent,
elegant, and powerful. To name a few specifics:
1. Recipes are delimited by any indentation, not tab characters in particular. 1. Recipes are delimited by any indentation, not tab characters in particular.
1. Phony targets are handled separately from file targets. Your mkfile won't 1. Phony targets are handled separately from file targets. Your mkfile won't
@ -29,29 +43,30 @@ basically everything that's annoyed you over the years. To name a few things:
1. Variables are expanded in recipes only if they are defined. They way you 1. Variables are expanded in recipes only if they are defined. They way you
usually don't have to escape `$`. usually don't have to escape `$`.
And much more! For more, read the original mk paper: ["Mk: a successor to And much more!
make"](#).
# Improvements over Plan 9 mk # Improvements over Plan 9 mk
This mk stays mostly faithful to Plan 9, but makes a few (in my opinion) This mk stays mostly faithful to Plan 9, but makes a few (in my opinion)
improvements. improvements.
1. A clean, modern implementation in go, that doesn't depend on the whole plan 1. A clean, modern implementation in Go, that doesn't depend on the whole Plan
9 for userspace stack. 9 stack.
1. Use go regular expressions, which are perl-like. The original mk used plan9 1. Use Go regular expressions, which are perl-like. The original mk used plan9
regex, which few people know or care to learn. regex, which few people know or care to learn.
1. Allow blank lines in recipes. A recipe is any indented block of text, and 1. Allow blank lines in recipes. A recipe is any indented block of text, and
continues until a non-indented character or the end of the file. continues until a non-indented character or the end of the file. (Similar
to blocks in Python.)
1. Add an 'S' attribute to execute recipes with programs other than sh. This 1. Add an 'S' attribute to execute recipes with programs other than sh. This
way, you don't have to separate your six line python script into its own way, you don't have to separate your six line python script into its own
file. Just stick it in the mkfile. file. Just stick it directly in the mkfile.
1. Use sh syntax for command insertion (i.e. backticks) rather than rc shell 1. Use sh syntax for command insertion (i.e. backticks) rather than rc shell
syntax. syntax.
1. Pretty colors.
# Current State # Current State
Totally non-functional. Check back later! Functional, but with some bugs and some unimplemented minor features. Give it a
try and see what you think!

View file

@ -154,6 +154,8 @@ func expandSigil(input string, vars map[string][]string) ([]string, int) {
varvals, ok := vars[varname] varvals, ok := vars[varname]
if ok { if ok {
return varvals, offset return varvals, offset
} else {
return []string{"$" + input[:offset]}, offset
} }
} }

View file

@ -56,10 +56,10 @@ type node struct {
flags nodeFlag // bitwise combination of node flags flags nodeFlag // bitwise combination of node flags
} }
// Create a new node
func (g *graph) newnode(name string) *node { // Update a node's timestamp and 'exists' flag.
u := &node{name: name} func (u *node) updateTimestamp() {
info, err := os.Stat(name) info, err := os.Stat(u.name)
if err == nil { if err == nil {
u.t = info.ModTime() u.t = info.ModTime()
u.exists = true u.exists = true
@ -67,11 +67,18 @@ func (g *graph) newnode(name string) *node {
} else { } else {
_, ok := err.(*os.PathError) _, ok := err.(*os.PathError)
if ok { if ok {
u.t = time.Unix(0, 0)
u.exists = false u.exists = false
} else { } else {
mkError(err.Error()) mkError(err.Error())
} }
} }
}
// Create a new node
func (g *graph) newnode(name string) *node {
u := &node{name: name}
u.updateTimestamp()
g.nodes[name] = u g.nodes[name] = u
return u return u
} }
@ -103,14 +110,10 @@ func buildgraph(rs *ruleSet, target string) *graph {
// keep track of how many times each rule is visited, to avoid cycles. // keep track of how many times each rule is visited, to avoid cycles.
rulecnt := make([]int, len(rs.rules)) rulecnt := make([]int, len(rs.rules))
g.root = applyrules(rs, g, target, rulecnt) g.root = applyrules(rs, g, target, rulecnt)
println("cyclecheck")
g.cyclecheck(g.root) g.cyclecheck(g.root)
g.root.flags |= nodeFlagProbable g.root.flags |= nodeFlagProbable
println("vacuous")
g.vacuous(g.root) g.vacuous(g.root)
println("ambiguous")
g.ambiguous(g.root) g.ambiguous(g.root)
println("done")
return g return g
} }
@ -192,7 +195,6 @@ func applyrules(rs *ruleSet, g *graph, target string, rulecnt []int) *node {
} }
rulecnt[k] += 1 rulecnt[k] += 1
fmt.Println(rulecnt)
if len(r.prereqs) == 0 { if len(r.prereqs) == 0 {
e := u.newedge(nil, r) e := u.newedge(nil, r)
e.stem = stem e.stem = stem

39
lex.go
View file

@ -1,17 +1,24 @@
// TODO: Backquoted strings.
// TODO: Comments
package main package main
import ( import (
"strings" "strings"
"unicode/utf8" "unicode/utf8"
"fmt"
) )
type tokenType int type tokenType int
const eof rune = '\000' const eof rune = '\000'
// Rune's that cannot be part of a bare (unquoted) string.
const nonBareRunes = " \t\n\r\\=:#'\""
// Return true if the string contains whitespace only.
func onlyWhitespace(s string) bool {
return strings.IndexAny(s, " \t\r\n") < 0
}
const ( const (
tokenError tokenType = iota tokenError tokenType = iota
tokenNewline tokenNewline
@ -79,7 +86,9 @@ type lexer struct {
type lexerStateFun func(*lexer) lexerStateFun type lexerStateFun func(*lexer) lexerStateFun
func (l *lexer) lexerror(what string) { func (l *lexer) lexerror(what string) {
if l.errmsg == "" {
l.errmsg = what l.errmsg = what
}
l.emit(tokenError) l.emit(tokenError)
} }
@ -173,6 +182,10 @@ func (l *lexer) acceptUntil(invalid string) {
for l.pos < len(l.input) && strings.IndexRune(invalid, l.peek()) < 0 { for l.pos < len(l.input) && strings.IndexRune(invalid, l.peek()) < 0 {
l.next() l.next()
} }
if l.peek() == eof {
l.lexerror(fmt.Sprintf("end of file encountered while looking for one of: %s", invalid))
}
} }
// Skip characters from the valid string until the next is not. // Skip characters from the valid string until the next is not.
@ -189,6 +202,10 @@ func (l *lexer) skipUntil(invalid string) {
for l.pos < len(l.input) && strings.IndexRune(invalid, l.peek()) < 0 { for l.pos < len(l.input) && strings.IndexRune(invalid, l.peek()) < 0 {
l.skip() l.skip()
} }
if l.peek() == eof {
l.lexerror(fmt.Sprintf("end of file encountered while looking for one of: %s", invalid))
}
} }
// Start a new lexer to lex the given input. // Start a new lexer to lex the given input.
@ -252,8 +269,10 @@ func lexTopLevel(l *lexer) lexerStateFun {
return lexBackQuotedWord return lexBackQuotedWord
} }
// TODO: No! The lexer can get stuck in a loop this way. if strings.IndexRune(nonBareRunes, c) >= 0 {
// Check if the next charar is a valid bare string chacter. If not, error. l.lexerror(fmt.Sprintf("expected a unquoted string, but found '%c'", c))
}
return lexBareWord return lexBareWord
} }
@ -287,12 +306,17 @@ func lexInclude(l *lexer) lexerStateFun {
func lexDoubleQuotedWord(l *lexer) lexerStateFun { func lexDoubleQuotedWord(l *lexer) lexerStateFun {
l.next() // '"' l.next() // '"'
for l.peek() != '"' { for l.peek() != '"' && l.peek() != eof {
l.acceptUntil("\\\"") l.acceptUntil("\\\"")
if l.accept("\\") { if l.accept("\\") {
l.accept("\"") l.accept("\"")
} }
} }
if l.peek() == eof {
l.lexerror("end of file encountered while parsing a quoted string.")
}
l.next() // '"' l.next() // '"'
return lexBareWord return lexBareWord
} }
@ -320,13 +344,14 @@ func lexRecipe(l *lexer) lexerStateFun {
} }
} }
// TODO: don't emit if there is only whitespace in the recipe if !onlyWhitespace(l.input[l.start:l.pos]) {
l.emit(tokenRecipe) l.emit(tokenRecipe)
}
return lexTopLevel return lexTopLevel
} }
func lexBareWord(l *lexer) lexerStateFun { func lexBareWord(l *lexer) lexerStateFun {
l.acceptUntil(" \t\n\r\\=:#'\"") l.acceptUntil(nonBareRunes)
if l.peek() == '"' { if l.peek() == '"' {
return lexDoubleQuotedWord return lexDoubleQuotedWord
} else if l.peek() == '\'' { } else if l.peek() == '\'' {

10
mk.go
View file

@ -128,6 +128,7 @@ func mkNode(g *graph, u *node) {
for i := range prereqs { for i := range prereqs {
prereqs[i].mutex.Lock() prereqs[i].mutex.Lock()
// needs to be built? // needs to be built?
u.updateTimestamp()
if !prereqs[i].exists || e.r.attributes.virtual || rebuildall || (u.exists && u.t.Before(prereqs[i].t)) { if !prereqs[i].exists || e.r.attributes.virtual || rebuildall || (u.exists && u.t.Before(prereqs[i].t)) {
switch prereqs[i].status { switch prereqs[i].status {
case nodeStatusReady: case nodeStatusReady:
@ -143,12 +144,7 @@ func mkNode(g *graph, u *node) {
e.r.mutex.Unlock() e.r.mutex.Unlock()
// wait until all the prereqs are built // wait until all the prereqs are built
//fmt.Printf("%s: %d\n", u.name, pending)
for pending > 0 { for pending > 0 {
//for i := range prereqs {
//fmt.Println(prereqs[i].name)
//}
s := <-prereqstat s := <-prereqstat
pending-- pending--
if s == nodeStatusFailed { if s == nodeStatusFailed {
@ -164,8 +160,6 @@ func mkNode(g *graph, u *node) {
} }
finishSubproc() finishSubproc()
} }
//mkPrintSuccess("finished mking " + u.name)
} }
func mkError(msg string) { func mkError(msg string) {
@ -177,7 +171,7 @@ func mkPrintError(msg string) {
if !nocolor { if !nocolor {
os.Stderr.WriteString(ansiTermRed) os.Stderr.WriteString(ansiTermRed)
} }
fmt.Fprintf(os.Stderr, "mk: %s\n", msg) fmt.Fprintf(os.Stderr, "%s", msg)
if !nocolor { if !nocolor {
os.Stderr.WriteString(ansiTermDefault) os.Stderr.WriteString(ansiTermDefault)
} }

View file

@ -5,9 +5,10 @@ package main
import ( import (
"fmt" "fmt"
"os"
"regexp" "regexp"
"strings" "strings"
"io/ioutil"
"os"
) )
type parser struct { type parser struct {
@ -19,10 +20,10 @@ type parser struct {
// Pretty errors. // Pretty errors.
func (p *parser) parseError(context string, expected string, found token) { func (p *parser) parseError(context string, expected string, found token) {
fmt.Fprintf(os.Stderr, "%s:%d: syntax error: ", p.name, found.line) mkPrintError(fmt.Sprintf("%s:%d: syntax error: ", p.name, found.line))
fmt.Fprintf(os.Stderr, "while %s, expected %s but found \"%s\".\n", mkPrintError(fmt.Sprintf("while %s, expected %s but found '%s'.\n",
context, expected, found.String()) context, expected, found.String()))
os.Exit(1) mkError("")
} }
// More basic errors. // More basic errors.
@ -31,9 +32,7 @@ func (p *parser) basicErrorAtToken(what string, found token) {
} }
func (p *parser) basicErrorAtLine(what string, line int) { func (p *parser) basicErrorAtLine(what string, line int) {
fmt.Fprintf(os.Stderr, "%s:%d: syntax error: %s\n", mkError(fmt.Sprintf("%s:%d: syntax error: %s\n", p.name, line, what))
p.name, line, what)
os.Exit(1)
} }
// Accept a token for use in the current statement being parsed. // Accept a token for use in the current statement being parsed.
@ -66,8 +65,7 @@ func parseInto(input string, name string, rules *ruleSet) {
state := parseTopLevel state := parseTopLevel
for t := range tokens { for t := range tokens {
if t.typ == tokenError { if t.typ == tokenError {
// TODO: fancier error messages p.basicErrorAtLine(l.errmsg, t.line)
fmt.Fprintf(os.Stderr, "Error: %s", l.errmsg)
break break
} }
@ -137,7 +135,7 @@ func parsePipeInclude(p *parser, t token) parserStateFun {
p.tokenbuf = append(p.tokenbuf, t) p.tokenbuf = append(p.tokenbuf, t)
default: default:
// TODO: Complain about unexpected tokens. p.parseError("parsing piped include", "a shell command", t)
} }
return parsePipeInclude return parsePipeInclude
@ -147,15 +145,25 @@ func parsePipeInclude(p *parser, t token) parserStateFun {
func parseRedirInclude(p *parser, t token) parserStateFun { func parseRedirInclude(p *parser, t token) parserStateFun {
switch t.typ { switch t.typ {
case tokenNewline: case tokenNewline:
// TODO: filename := ""
// Open the file, read its context, call parseInto recursively. for i := range p.tokenbuf {
// Clear out p.tokenbuf filename += p.tokenbuf[i].val
}
file, err := os.Open(filename)
if err != nil {
p.basicErrorAtToken(fmt.Sprintf("cannot open %s", filename), p.tokenbuf[0])
}
input, _ := ioutil.ReadAll(file)
parseInto(string(input), filename, p.rules)
p.clear()
return parseTopLevel
case tokenWord: case tokenWord:
// TODO: p.tokenbuf = append(p.tokenbuf, t)
default: default:
// TODO: Complain about unexpected tokens. p.parseError("parsing include", "a file name", t)
} }
return parseRedirInclude return parseRedirInclude
@ -182,7 +190,7 @@ func parseEqualsOrTarget(p *parser, t token) parserStateFun {
return parseAttributesOrPrereqs return parseAttributesOrPrereqs
default: default:
p.parseError("reading a a target or assignment", p.parseError("reading a target or assignment",
"'=', ':', or another target", t) "'=', ':', or another target", t)
} }

View file

@ -86,7 +86,7 @@ func dorecipe(target string, u *node, e *edge) bool {
prereqs = append(prereqs, u.prereqs[i].v.name) prereqs = append(prereqs, u.prereqs[i].v.name)
} }
} }
vars["prereqs"] = prereqs vars["prereq"] = prereqs
input := expandRecipeSigils(e.r.recipe, vars) input := expandRecipeSigils(e.r.recipe, vars)
sh := "sh" sh := "sh"
@ -113,8 +113,6 @@ func dorecipe(target string, u *node, e *edge) bool {
true, true,
false) false)
// TODO: update the timestamps of each target
return success return success
} }