Fix a number of things.

This commit is contained in:
Daniel Jones 2013-03-09 20:54:13 -08:00
parent 6e0d8979be
commit 8cbbe9c53c
7 changed files with 105 additions and 61 deletions

View file

@ -1,14 +1,28 @@
# Mk
Mk is a reboot of the Plan 9 mk command, which itself is a replacement for make.
This tool is for anyone who loves make, but hates all its stupid bullshit.
Mk is a reboot of the Plan 9 mk command, which itself is [a successor to
make]($). This tool is for anyone who loves make, but hates all its stupid
bullshit.
# Installation
1. Install Go.
2. Run `go install github.com/dcjones/mk`
# Why Plan 9 mk is better than make
Plan 9 mk blows make out of the water. Yet tragically, few use or have even heard
of it. Put simply, mk takes make, keeps its simple direct syntax, but fixes
basically everything that's annoyed you over the years. To name a few things:
Way back in the 90s, some smart guys at Bell Labs got together and decided to
write new operating system to replace Unix. The idea was to keep everything that
was great about Unix, but totally disregard backwards compatibility in a quest
for something better. The operating system they designed, Plan 9, had a lot of
terrific ideas, and though some were cherry picked, the OS as a whole never
really caught on.
Among the gems in Plan 9 was a rewrite of the venerable Unix make
command, in the form of mk. Simply put, mk is make, but with a large collection
of relatively minor improvements, adding up to something more consistent,
elegant, and powerful. To name a few specifics:
1. Recipes are delimited by any indentation, not tab characters in particular.
1. Phony targets are handled separately from file targets. Your mkfile won't
@ -29,29 +43,30 @@ basically everything that's annoyed you over the years. To name a few things:
1. Variables are expanded in recipes only if they are defined. They way you
usually don't have to escape `$`.
And much more! For more, read the original mk paper: ["Mk: a successor to
make"](#).
And much more!
# Improvements over Plan 9 mk
This mk stays mostly faithful to Plan 9, but makes a few (in my opinion)
improvements.
1. A clean, modern implementation in go, that doesn't depend on the whole plan
9 for userspace stack.
1. Use go regular expressions, which are perl-like. The original mk used plan9
1. A clean, modern implementation in Go, that doesn't depend on the whole Plan
9 stack.
1. Use Go regular expressions, which are perl-like. The original mk used plan9
regex, which few people know or care to learn.
1. Allow blank lines in recipes. A recipe is any indented block of text, and
continues until a non-indented character or the end of the file.
continues until a non-indented character or the end of the file. (Similar
to blocks in Python.)
1. Add an 'S' attribute to execute recipes with programs other than sh. This
way, you don't have to separate your six line python script into its own
file. Just stick it in the mkfile.
file. Just stick it directly in the mkfile.
1. Use sh syntax for command insertion (i.e. backticks) rather than rc shell
syntax.
1. Pretty colors.
# Current State
Totally non-functional. Check back later!
Functional, but with some bugs and some unimplemented minor features. Give it a
try and see what you think!

View file

@ -154,7 +154,9 @@ func expandSigil(input string, vars map[string][]string) ([]string, int) {
varvals, ok := vars[varname]
if ok {
return varvals, offset
}
} else {
return []string{"$" + input[:offset]}, offset
}
}
return []string{"$" + input}, len(input)

View file

@ -56,10 +56,10 @@ type node struct {
flags nodeFlag // bitwise combination of node flags
}
// Create a new node
func (g *graph) newnode(name string) *node {
u := &node{name: name}
info, err := os.Stat(name)
// Update a node's timestamp and 'exists' flag.
func (u *node) updateTimestamp() {
info, err := os.Stat(u.name)
if err == nil {
u.t = info.ModTime()
u.exists = true
@ -67,11 +67,18 @@ func (g *graph) newnode(name string) *node {
} else {
_, ok := err.(*os.PathError)
if ok {
u.t = time.Unix(0, 0)
u.exists = false
} else {
mkError(err.Error())
}
}
}
// Create a new node
func (g *graph) newnode(name string) *node {
u := &node{name: name}
u.updateTimestamp()
g.nodes[name] = u
return u
}
@ -103,14 +110,10 @@ func buildgraph(rs *ruleSet, target string) *graph {
// keep track of how many times each rule is visited, to avoid cycles.
rulecnt := make([]int, len(rs.rules))
g.root = applyrules(rs, g, target, rulecnt)
println("cyclecheck")
g.cyclecheck(g.root)
g.root.flags |= nodeFlagProbable
println("vacuous")
g.vacuous(g.root)
println("ambiguous")
g.ambiguous(g.root)
println("done")
return g
}
@ -192,7 +195,6 @@ func applyrules(rs *ruleSet, g *graph, target string, rulecnt []int) *node {
}
rulecnt[k] += 1
fmt.Println(rulecnt)
if len(r.prereqs) == 0 {
e := u.newedge(nil, r)
e.stem = stem

43
lex.go
View file

@ -1,17 +1,24 @@
// TODO: Backquoted strings.
// TODO: Comments
package main
import (
"strings"
"unicode/utf8"
"fmt"
)
type tokenType int
const eof rune = '\000'
// Rune's that cannot be part of a bare (unquoted) string.
const nonBareRunes = " \t\n\r\\=:#'\""
// Return true if the string contains whitespace only.
func onlyWhitespace(s string) bool {
return strings.IndexAny(s, " \t\r\n") < 0
}
const (
tokenError tokenType = iota
tokenNewline
@ -79,7 +86,9 @@ type lexer struct {
type lexerStateFun func(*lexer) lexerStateFun
func (l *lexer) lexerror(what string) {
l.errmsg = what
if l.errmsg == "" {
l.errmsg = what
}
l.emit(tokenError)
}
@ -173,6 +182,10 @@ func (l *lexer) acceptUntil(invalid string) {
for l.pos < len(l.input) && strings.IndexRune(invalid, l.peek()) < 0 {
l.next()
}
if l.peek() == eof {
l.lexerror(fmt.Sprintf("end of file encountered while looking for one of: %s", invalid))
}
}
// Skip characters from the valid string until the next is not.
@ -189,6 +202,10 @@ func (l *lexer) skipUntil(invalid string) {
for l.pos < len(l.input) && strings.IndexRune(invalid, l.peek()) < 0 {
l.skip()
}
if l.peek() == eof {
l.lexerror(fmt.Sprintf("end of file encountered while looking for one of: %s", invalid))
}
}
// Start a new lexer to lex the given input.
@ -252,8 +269,10 @@ func lexTopLevel(l *lexer) lexerStateFun {
return lexBackQuotedWord
}
// TODO: No! The lexer can get stuck in a loop this way.
// Check if the next charar is a valid bare string chacter. If not, error.
if strings.IndexRune(nonBareRunes, c) >= 0 {
l.lexerror(fmt.Sprintf("expected a unquoted string, but found '%c'", c))
}
return lexBareWord
}
@ -287,12 +306,17 @@ func lexInclude(l *lexer) lexerStateFun {
func lexDoubleQuotedWord(l *lexer) lexerStateFun {
l.next() // '"'
for l.peek() != '"' {
for l.peek() != '"' && l.peek() != eof {
l.acceptUntil("\\\"")
if l.accept("\\") {
l.accept("\"")
}
}
if l.peek() == eof {
l.lexerror("end of file encountered while parsing a quoted string.")
}
l.next() // '"'
return lexBareWord
}
@ -320,13 +344,14 @@ func lexRecipe(l *lexer) lexerStateFun {
}
}
// TODO: don't emit if there is only whitespace in the recipe
l.emit(tokenRecipe)
if !onlyWhitespace(l.input[l.start:l.pos]) {
l.emit(tokenRecipe)
}
return lexTopLevel
}
func lexBareWord(l *lexer) lexerStateFun {
l.acceptUntil(" \t\n\r\\=:#'\"")
l.acceptUntil(nonBareRunes)
if l.peek() == '"' {
return lexDoubleQuotedWord
} else if l.peek() == '\'' {

10
mk.go
View file

@ -128,6 +128,7 @@ func mkNode(g *graph, u *node) {
for i := range prereqs {
prereqs[i].mutex.Lock()
// needs to be built?
u.updateTimestamp()
if !prereqs[i].exists || e.r.attributes.virtual || rebuildall || (u.exists && u.t.Before(prereqs[i].t)) {
switch prereqs[i].status {
case nodeStatusReady:
@ -143,12 +144,7 @@ func mkNode(g *graph, u *node) {
e.r.mutex.Unlock()
// wait until all the prereqs are built
//fmt.Printf("%s: %d\n", u.name, pending)
for pending > 0 {
//for i := range prereqs {
//fmt.Println(prereqs[i].name)
//}
s := <-prereqstat
pending--
if s == nodeStatusFailed {
@ -164,8 +160,6 @@ func mkNode(g *graph, u *node) {
}
finishSubproc()
}
//mkPrintSuccess("finished mking " + u.name)
}
func mkError(msg string) {
@ -177,7 +171,7 @@ func mkPrintError(msg string) {
if !nocolor {
os.Stderr.WriteString(ansiTermRed)
}
fmt.Fprintf(os.Stderr, "mk: %s\n", msg)
fmt.Fprintf(os.Stderr, "%s", msg)
if !nocolor {
os.Stderr.WriteString(ansiTermDefault)
}

View file

@ -5,9 +5,10 @@ package main
import (
"fmt"
"os"
"regexp"
"strings"
"io/ioutil"
"os"
)
type parser struct {
@ -19,10 +20,10 @@ type parser struct {
// Pretty errors.
func (p *parser) parseError(context string, expected string, found token) {
fmt.Fprintf(os.Stderr, "%s:%d: syntax error: ", p.name, found.line)
fmt.Fprintf(os.Stderr, "while %s, expected %s but found \"%s\".\n",
context, expected, found.String())
os.Exit(1)
mkPrintError(fmt.Sprintf("%s:%d: syntax error: ", p.name, found.line))
mkPrintError(fmt.Sprintf("while %s, expected %s but found '%s'.\n",
context, expected, found.String()))
mkError("")
}
// More basic errors.
@ -31,9 +32,7 @@ func (p *parser) basicErrorAtToken(what string, found token) {
}
func (p *parser) basicErrorAtLine(what string, line int) {
fmt.Fprintf(os.Stderr, "%s:%d: syntax error: %s\n",
p.name, line, what)
os.Exit(1)
mkError(fmt.Sprintf("%s:%d: syntax error: %s\n", p.name, line, what))
}
// Accept a token for use in the current statement being parsed.
@ -66,8 +65,7 @@ func parseInto(input string, name string, rules *ruleSet) {
state := parseTopLevel
for t := range tokens {
if t.typ == tokenError {
// TODO: fancier error messages
fmt.Fprintf(os.Stderr, "Error: %s", l.errmsg)
p.basicErrorAtLine(l.errmsg, t.line)
break
}
@ -137,7 +135,7 @@ func parsePipeInclude(p *parser, t token) parserStateFun {
p.tokenbuf = append(p.tokenbuf, t)
default:
// TODO: Complain about unexpected tokens.
p.parseError("parsing piped include", "a shell command", t)
}
return parsePipeInclude
@ -147,15 +145,25 @@ func parsePipeInclude(p *parser, t token) parserStateFun {
func parseRedirInclude(p *parser, t token) parserStateFun {
switch t.typ {
case tokenNewline:
// TODO:
// Open the file, read its context, call parseInto recursively.
// Clear out p.tokenbuf
filename := ""
for i := range p.tokenbuf {
filename += p.tokenbuf[i].val
}
file, err := os.Open(filename)
if err != nil {
p.basicErrorAtToken(fmt.Sprintf("cannot open %s", filename), p.tokenbuf[0])
}
input, _ := ioutil.ReadAll(file)
parseInto(string(input), filename, p.rules)
p.clear()
return parseTopLevel
case tokenWord:
// TODO:
p.tokenbuf = append(p.tokenbuf, t)
default:
// TODO: Complain about unexpected tokens.
p.parseError("parsing include", "a file name", t)
}
return parseRedirInclude
@ -182,7 +190,7 @@ func parseEqualsOrTarget(p *parser, t token) parserStateFun {
return parseAttributesOrPrereqs
default:
p.parseError("reading a a target or assignment",
p.parseError("reading a target or assignment",
"'=', ':', or another target", t)
}

View file

@ -86,7 +86,7 @@ func dorecipe(target string, u *node, e *edge) bool {
prereqs = append(prereqs, u.prereqs[i].v.name)
}
}
vars["prereqs"] = prereqs
vars["prereq"] = prereqs
input := expandRecipeSigils(e.r.recipe, vars)
sh := "sh"
@ -113,8 +113,6 @@ func dorecipe(target string, u *node, e *edge) bool {
true,
false)
// TODO: update the timestamps of each target
return success
}