Source File
lex.go
Belonging Package
text/template/parse
package parse
import (
)
type item struct {
typ itemType // The type of this item.
pos Pos // The starting position, in bytes, of this item in the input string.
val string // The value of this item.
line int // The line number at the start of this item.
}
func ( item) () string {
switch {
case .typ == itemEOF:
return "EOF"
case .typ == itemError:
return .val
case .typ > itemKeyword:
return fmt.Sprintf("<%s>", .val)
case len(.val) > 10:
return fmt.Sprintf("%.10q...", .val)
}
return fmt.Sprintf("%q", .val)
}
type itemType int
const (
itemError itemType = iota // error occurred; value is text of error
itemBool // boolean constant
itemChar // printable ASCII character; grab bag for comma etc.
itemCharConstant // character constant
itemComment // comment text
itemComplex // complex constant (1+2i); imaginary is just a number
itemAssign // equals ('=') introducing an assignment
itemDeclare // colon-equals (':=') introducing a declaration
itemEOF
itemField // alphanumeric identifier starting with '.'
itemIdentifier // alphanumeric identifier not starting with '.'
itemLeftDelim // left action delimiter
itemLeftParen // '(' inside action
itemNumber // simple number, including imaginary
itemPipe // pipe symbol
itemRawString // raw quoted string (includes quotes)
itemRightDelim // right action delimiter
itemRightParen // ')' inside action
itemSpace // run of spaces separating arguments
itemString // quoted string (includes quotes)
itemText // plain text
itemKeyword // used only to delimit the keywords
itemBlock // block keyword
itemDot // the cursor, spelled '.'
itemDefine // define keyword
itemElse // else keyword
itemEnd // end keyword
itemIf // if keyword
itemNil // the untyped nil constant, easiest to treat as a keyword
itemRange // range keyword
itemTemplate // template keyword
itemWith // with keyword
)
var key = map[string]itemType{
".": itemDot,
"block": itemBlock,
"define": itemDefine,
"else": itemElse,
"end": itemEnd,
"if": itemIf,
"range": itemRange,
"nil": itemNil,
"template": itemTemplate,
"with": itemWith,
}
const eof = -1
const (
spaceChars = " \t\r\n" // These are the space characters defined by Go itself.
trimMarker = '-' // Attached to left/right delimiter, trims trailing spaces from preceding/following text.
trimMarkerLen = Pos(1 + 1) // marker plus space before or after
)
type lexer struct {
name string // the name of the input; used only for error reports
input string // the string being scanned
leftDelim string // start of action
rightDelim string // end of action
emitComment bool // emit itemComment tokens.
pos Pos // current position in the input
start Pos // start position of this item
width Pos // width of last rune read from input
items chan item // channel of scanned items
parenDepth int // nesting depth of ( ) exprs
line int // 1+number of newlines seen
startLine int // start line of this item
}
func ( *lexer) () {
const (
leftDelim = "{{"
rightDelim = "}}"
leftComment = "/*"
rightComment = "*/"
)
func ( *lexer) stateFn {
.width = 0
if := strings.Index(.input[.pos:], .leftDelim); >= 0 {
:= Pos(len(.leftDelim))
.pos += Pos()
:= Pos(0)
if hasLeftTrimMarker(.input[.pos+:]) {
= rightTrimLength(.input[.start:.pos])
}
.pos -=
if .pos > .start {
.line += strings.Count(.input[.start:.pos], "\n")
.emit(itemText)
}
.pos +=
.ignore()
return lexLeftDelim
}
func ( *lexer) stateFn {
.pos += Pos(len(.leftDelim))
:= hasLeftTrimMarker(.input[.pos:])
:= Pos(0)
if {
= trimMarkerLen
}
if strings.HasPrefix(.input[.pos+:], leftComment) {
.pos +=
.ignore()
return lexComment
}
.emit(itemLeftDelim)
.pos +=
.ignore()
.parenDepth = 0
return lexInsideAction
}
func ( *lexer) stateFn {
.pos += Pos(len(leftComment))
:= strings.Index(.input[.pos:], rightComment)
if < 0 {
return .errorf("unclosed comment")
}
.pos += Pos( + len(rightComment))
, := .atRightDelim()
if ! {
return .errorf("comment ends before closing delimiter")
}
if .emitComment {
.emit(itemComment)
}
if {
.pos += trimMarkerLen
}
.pos += Pos(len(.rightDelim))
if {
.pos += leftTrimLength(.input[.pos:])
}
.ignore()
return lexText
}
func ( *lexer) stateFn {
:= hasRightTrimMarker(.input[.pos:])
if {
.pos += trimMarkerLen
.ignore()
}
.pos += Pos(len(.rightDelim))
.emit(itemRightDelim)
if {
.pos += leftTrimLength(.input[.pos:])
.ignore()
}
return lexText
}
, := .atRightDelim()
if {
if .parenDepth == 0 {
return lexRightDelim
}
return .errorf("unclosed left paren")
}
switch := .next(); {
case == eof:
return .errorf("unclosed action")
case isSpace():
.backup() // Put space back in case we have " -}}".
return lexSpace
case == '=':
.emit(itemAssign)
case == ':':
if .next() != '=' {
return .errorf("expected :=")
}
.emit(itemDeclare)
case == '|':
.emit(itemPipe)
case == '"':
return lexQuote
case == '`':
return lexRawQuote
case == '$':
return lexVariable
case == '\'':
return lexChar
if .pos < Pos(len(.input)) {
:= .input[.pos]
if < '0' || '9' < {
return lexField
}
}
fallthrough // '.' can start a number.
case == '+' || == '-' || ('0' <= && <= '9'):
.backup()
return lexNumber
case isAlphaNumeric():
.backup()
return lexIdentifier
case == '(':
.emit(itemLeftParen)
.parenDepth++
case == ')':
.emit(itemRightParen)
.parenDepth--
if .parenDepth < 0 {
return .errorf("unexpected right paren %#U", )
}
case <= unicode.MaxASCII && unicode.IsPrint():
.emit(itemChar)
default:
return .errorf("unrecognized character in action: %#U", )
}
return
}
if hasRightTrimMarker(.input[.pos-1:]) && strings.HasPrefix(.input[.pos-1+trimMarkerLen:], .rightDelim) {
.backup() // Before the space.
if == 1 {
return lexRightDelim // On the delim, so go right to that.
}
}
.emit(itemSpace)
return lexInsideAction
}
default:
.backup()
:= .input[.start:.pos]
if !.atTerminator() {
return .errorf("bad character %#U", )
}
switch {
case key[] > itemKeyword:
.emit(key[])
case [0] == '.':
.emit(itemField)
case == "true", == "false":
.emit(itemBool)
default:
.emit(itemIdentifier)
}
break
}
}
return lexInsideAction
}
func ( *lexer) stateFn {
return lexFieldOrVariable(, itemField)
}
func ( *lexer) stateFn {
if .atTerminator() { // Nothing interesting follows -> "$".
.emit(itemVariable)
return lexInsideAction
}
return lexFieldOrVariable(, itemVariable)
}
func ( *lexer, itemType) stateFn {
if .atTerminator() { // Nothing interesting follows -> "." or "$".
if == itemVariable {
.emit(itemVariable)
} else {
.emit(itemDot)
}
return lexInsideAction
}
var rune
for {
= .next()
if !isAlphaNumeric() {
.backup()
break
}
}
if !.atTerminator() {
return .errorf("bad character %#U", )
}
.emit()
return lexInsideAction
}
if , := utf8.DecodeRuneInString(.rightDelim); == {
return true
}
return false
}
func ( *lexer) stateFn {
:
for {
switch .next() {
case '\\':
if := .next(); != eof && != '\n' {
break
}
fallthrough
case eof, '\n':
return .errorf("unterminated character constant")
case '\'':
break
}
}
.emit(itemCharConstant)
return lexInsideAction
}
if !.scanNumber() || .input[.pos-1] != 'i' {
return .errorf("bad number syntax: %q", .input[.start:.pos])
}
.emit(itemComplex)
} else {
.emit(itemNumber)
}
return lexInsideAction
}
:= "0123456789_"
if .accept("xX") {
= "0123456789abcdefABCDEF_"
} else if .accept("oO") {
= "01234567_"
} else if .accept("bB") {
= "01_"
}
}
.acceptRun()
if .accept(".") {
.acceptRun()
}
if len() == 10+1 && .accept("eE") {
.accept("+-")
.acceptRun("0123456789_")
}
if len() == 16+6+1 && .accept("pP") {
.accept("+-")
.acceptRun("0123456789_")
if isAlphaNumeric(.peek()) {
.next()
return false
}
return true
}
func ( *lexer) stateFn {
:
for {
switch .next() {
case '\\':
if := .next(); != eof && != '\n' {
break
}
fallthrough
case eof, '\n':
return .errorf("unterminated quoted string")
case '"':
break
}
}
.emit(itemString)
return lexInsideAction
}
func ( *lexer) stateFn {
:
for {
switch .next() {
case eof:
return .errorf("unterminated raw quoted string")
case '`':
break
}
}
.emit(itemRawString)
return lexInsideAction
}
![]() |
The pages are generated with Golds v0.3.2-preview. (GOOS=darwin GOARCH=amd64) Golds is a Go 101 project developed by Tapir Liu. PR and bug reports are welcome and can be submitted to the issue list. Please follow @Go100and1 (reachable from the left QR code) to get the latest news of Golds. |