Skip to content

Commit

Permalink
Merge pull request #5 from qiniu/develop
Browse files Browse the repository at this point in the history
#3 linux shell; #4 generator
  • Loading branch information
xushiwei committed Apr 19, 2016
2 parents 77b9f16 + 48ef778 commit a931811
Show file tree
Hide file tree
Showing 10 changed files with 912 additions and 15 deletions.
2 changes: 1 addition & 1 deletion tpl.v1/compiler_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -47,7 +47,7 @@ factor =
}

compiler := &Compiler{
Grammar: grammar,
Grammar: []byte(grammar),
Marker: marker,
}
m, err := compiler.Cl()
Expand Down
229 changes: 229 additions & 0 deletions tpl.v1/generator/static_parser_gen.go
Original file line number Diff line number Diff line change
@@ -0,0 +1,229 @@
package generator

import (
"fmt"
"strings"

"qiniupkg.com/text/tpl.v1"
)

// GenStaticCode 生成StaticCompiler的Cl() method.
func GenStaticCode(source string) (string, error) {
var stk []string
grammars := make(map[string]string)
vars := make(map[string]string)
scanner := new(tpl.AutoKwScanner)
list := func(tokens []tpl.Token, g tpl.Grammar) {
n := len(stk)
a, b := stk[n-2], stk[n-1]
stk[n-2] = fmt.Sprintf("tpl.List(%s,%s)", a, b)
stk = stk[:n-1]
}
list0 := func(tokens []tpl.Token, g tpl.Grammar) {
n := len(stk)
a, b := stk[n-2], stk[n-1]
stk[n-2] = fmt.Sprintf("tpl.List0(%s,%s)", a, b)
stk = stk[:n-1]
}

mark := func(tokens []tpl.Token, g tpl.Grammar) {
n := len(stk)
a := stk[n-1]
stk[n-1] = fmt.Sprintf("cmplr.Marker(%s,\"%s\")", a, tokens[0].Literal)
}
and := func(tokens []tpl.Token, g tpl.Grammar) {
m := g.Len()
if m == 1 {
return
}
n := len(stk)
stk[n-m] = fmt.Sprintf("tpl.And(%s)", strings.Join(stk[n-m:], ","))
stk = stk[:n-m+1]
}
or := func(tokens []tpl.Token, g tpl.Grammar) {
m := g.Len()
if m == 1 {
return
}
n := len(stk)
stk[n-m] = fmt.Sprintf("tpl.Or(%s)", strings.Join(stk[n-m:], ","))
stk = stk[:n-m+1]
}
assign := func(tokens []tpl.Token, g tpl.Grammar) {
n := len(stk)
a := stk[n-1]
name := tokens[0].Literal
if _, ok := vars[name]; ok {
vars[name] = fmt.Sprintf("%s.Assign(%s)", name, a)
} else if _, ok := grammars[name]; ok {
panic("grammar already exists: " + name)
} else {
grammars[name] = a
}
stk = stk[:n-1]
}
ident := func(tokens []tpl.Token, g tpl.Grammar) {
name := tokens[0].Literal
ch := name[0]
var s string
if ch >= 'A' && ch <= 'Z' {
tok := scanner.Ltot(name)
if tok == tpl.ILLEGAL {
panic("illegal token: " + name)
}
if strings.HasPrefix(name, "'") {
s = fmt.Sprintf("tpl.Gr(%s)", name)
} else {
s = fmt.Sprintf("tpl.Gr(tpl.%s)", name)
}
} else {
_, ok := grammars[name]
if ok {
s = name
} else if s, ok = vars[name]; !ok {
if name == "true" {
vars[name] = "tpl.GrTrue"
s = "tpl.GrTrue"
} else {
vars[name] = name
s = name
}
}
}
stk = append(stk, s)

}
gr := func(tokens []tpl.Token, g tpl.Grammar) {
name := tokens[0].Literal
tok := scanner.Ltot(name)
if tok == tpl.ILLEGAL {
panic("illegal token: " + name)
}
stk = append(stk, fmt.Sprintf("tpl.Gr(%s)", name))
}
grString := func(tokens []tpl.Token, g tpl.Grammar) {
name := tokens[0].Literal
tok := scanner.Ltot(name)
if tok == tpl.ILLEGAL {
panic("illegal token: " + name)
}
stk = append(stk, "tpl.Gr(tpl.STRING)")
}
grTrue := func(tokens []tpl.Token, g tpl.Grammar) {
if tokens[0].Literal != "1" {
panic("illegal token: " + tokens[0].Literal)
}
stk = append(stk, "tpl.GrTrue")
}

grNil := func(tokens []tpl.Token, g tpl.Grammar) {
stk = append(stk, "nil")
}

repeat0 := func(tokens []tpl.Token, g tpl.Grammar) {
n := len(stk)
a := stk[n-1]
stk[n-1] = fmt.Sprintf("tpl.Repeat0(%s)", a)
}

repeat1 := func(tokens []tpl.Token, g tpl.Grammar) {
n := len(stk)
a := stk[n-1]
stk[n-1] = fmt.Sprintf("tpl.tpl.Repeat1(%s)", a)
}

repeat01 := func(tokens []tpl.Token, g tpl.Grammar) {
n := len(stk)
a := stk[n-1]
stk[n-1] = fmt.Sprintf("tpl.Repeat01(%s)", a)
}

not := func(tokens []tpl.Token, g tpl.Grammar) {
n := len(stk)
a := stk[n-1]
stk[n-1] = fmt.Sprintf("tpl.Not(%s)", a)
}

peek := func(tokens []tpl.Token, g tpl.Grammar) {
n := len(stk)
a := stk[n-1]
stk[n-1] = fmt.Sprintf("tpl.Peek(%s)", a)
}

factor := tpl.Var("factor")

term := tpl.And(factor, tpl.Repeat0(tpl.Or(
tpl.And(tpl.Gr('%'), tpl.Action(factor, list)),
tpl.And(tpl.Gr(tpl.REM_ASSIGN), tpl.Action(factor, list0)),
tpl.And(tpl.Gr('/'), tpl.Action(tpl.Gr(tpl.IDENT), mark)),
)))

expr := tpl.Action(tpl.Repeat1(tpl.Or(term, tpl.Action(tpl.Gr('!'), grNil))), and)

grammar := tpl.Action(tpl.List(expr, tpl.Gr('|')), or)

doc := tpl.Repeat1(
tpl.Action(tpl.And(tpl.Gr(tpl.IDENT), tpl.Gr('='), grammar, tpl.Gr(';')), assign),
)

factor.Assign(tpl.Or(
tpl.Action(tpl.Gr(tpl.IDENT), ident),
tpl.Action(tpl.Gr(tpl.CHAR), gr),
tpl.Action(tpl.Gr(tpl.STRING), grString),
tpl.Action(tpl.Gr(tpl.INT), grTrue),
tpl.And(tpl.Gr('*'), tpl.Action(factor, repeat0)),
tpl.And(tpl.Gr('+'), tpl.Action(factor, repeat1)),
tpl.And(tpl.Gr('?'), tpl.Action(factor, repeat01)),
tpl.And(tpl.Gr('~'), tpl.Action(factor, not)),
tpl.And(tpl.Gr('@'), tpl.Action(factor, peek)),
tpl.And(tpl.Gr('('), grammar, tpl.Gr(')')),
))

m := &tpl.Matcher{
Grammar: doc,
Scanner: scanner,
ScanMode: tpl.InsertSemis,
}
err := m.MatchExactly([]byte(source), "")
if err != nil {
return "", err
}
var (
declarations string = "var (\n"
assignments string
)
for k, v := range vars {
declarations += fmt.Sprintf("%s = new(tpl.GrVar)\n", k)
assignments += fmt.Sprintf("// GrVar %s\n%s\n", k, v)
}
for k, v := range grammars {
declarations += fmt.Sprintf("%s = new(tpl.GrNamed)\n", k)
assignments += fmt.Sprintf("// GrNamed %s\n%s.Assign(\"%s\", %s)\n", k, k, k, v)
}
declarations += ")\n"
if _, ok := grammars["doc"]; ok {
output := fmt.Sprintf("\n"+
"type StaticCompiler struct {\n"+
" Grammar string\n"+
" Marker func(g tpl.Grammar, mark string) tpl.Grammar\n"+
" Init func()\n"+
" Scanner tpl.Tokener\n"+
" ScanMode tpl.ScanMode\n"+
"}\n"+
"func(cmplr *StaticCompiler)Cl()tpl.CompileRet{\n"+
"%s"+
"%s"+
"ret := tpl.CompileRet{}\n"+
"ret.Matcher = &tpl.Matcher{\n"+
" Grammar: doc,\n"+
" Scanner: cmplr.Scanner,\n"+
" Init: cmplr.Init,\n"+
" }\n"+
"return ret}\n",
declarations,
assignments,
)
return output, nil
}
return "", tpl.ErrNoDoc
}
17 changes: 11 additions & 6 deletions tpl.v1/grammar.go
Original file line number Diff line number Diff line change
Expand Up @@ -630,29 +630,34 @@ func Var(name string) *GrVar {

// -----------------------------------------------------------------------------

type grNamed struct {
type GrNamed struct {
g Grammar
name string
}

func (p *grNamed) Len() int {
func (p *GrNamed) Len() int {

return -1
}

func (p *grNamed) Match(src []Token, ctx Context) (n int, err error) {
func (p *GrNamed) Match(src []Token, ctx Context) (n int, err error) {

return p.g.Match(src, ctx)
}

func (p *grNamed) Marshal(b []byte, t Tokener, lvlParent int) []byte {
func (p *GrNamed) Marshal(b []byte, t Tokener, lvlParent int) []byte {

return append(b, p.name...)
}

func Named(name string, g Grammar) Grammar {
func (p *GrNamed) Assign(name string, g Grammar) {
p.name = name
p.g = g
}

func Named(name string, g Grammar) *GrNamed {

return &grNamed{g, name}
return &GrNamed{g, name}
}

// -----------------------------------------------------------------------------
Expand Down
2 changes: 1 addition & 1 deletion tpl.v1/number/number.go
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,7 @@ import (
)

// -----------------------------------------------------------------------------

//go:generate tplgen -i -g Grammar -f static_interpreter.go
const Grammar = `
term = factor *('*' factor/mul | '/' factor/quo | '%' factor/mod)
Expand Down
45 changes: 41 additions & 4 deletions tpl.v1/scanner.go
Original file line number Diff line number Diff line change
Expand Up @@ -10,13 +10,15 @@ import (
"unicode/utf8"
)

// A Token is a lexical unit returned by Scan.
//
type Token struct {
Kind uint
Pos token.Pos
Literal string
}

// An ErrorHandler may be provided to Scanner.Init. If a syntax error is
// An ScanErrorHandler may be provided to Scanner.Init. If a syntax error is
// encountered and a handler was installed, the handler is called with a
// position and an error message. The position points to the beginning of
// the offending token.
Expand Down Expand Up @@ -83,14 +85,17 @@ func (s *Scanner) next() {
}
}

// A mode value is a set of flags (or 0).
// A ScanMode value is a set of flags (or 0).
// They control scanner behavior.
//
type ScanMode uint

const (
ScanComments ScanMode = 1 << iota // return comments as COMMENT tokens
InsertSemis // automatically insert semicolons
// ScanComments means returning comments as COMMENT tokens
ScanComments ScanMode = 1 << iota

// InsertSemis means automatically insert semicolons
InsertSemis
)

// Init prepares the scanner s to tokenize the text src by setting the
Expand Down Expand Up @@ -453,6 +458,21 @@ func (s *Scanner) scanRune() string {
return string(s.src[offs:s.offset])
}

func (s *Scanner) scanSharpComment() string {
// '#' opening already consumed
offs := s.offset - 1

for {
ch := s.ch
if ch == '\n' || ch < 0 {
break
}
s.next()
}

return string(s.src[offs:s.offset])
}

func (s *Scanner) scanString() string {
// '"' opening already consumed
offs := s.offset - 1
Expand Down Expand Up @@ -707,6 +727,21 @@ scanAgain:
} else {
t.Kind = s.switch2(QUO, QUO_ASSIGN)
}
case '#':
if s.insertSemi {
s.ch = '#'
s.offset = s.file.Offset(t.Pos)
s.rdOffset = s.offset + 1
s.insertSemi = false
t.Kind, t.Literal = SEMICOLON, "\n"
return
}
comment := s.scanSharpComment()
if s.mode&ScanComments == 0 { // skip comment
goto scanAgain
}
t.Kind = COMMENT
t.Literal = comment
case '%':
t.Kind = s.switch2(REM, REM_ASSIGN)
case '^':
Expand Down Expand Up @@ -756,6 +791,8 @@ scanAgain:
return
}

// Source returns the scanning source.
//
func (s *Scanner) Source() TokenSource {

return TokenSource{s.file, s.src}
Expand Down
Loading

0 comments on commit a931811

Please sign in to comment.