Source File
lexer.go
Belonging Package
github.com/jmespath/go-jmespath
type Lexer struct {
expression string // The expression provided by the user.
currentPos int // The current position in the string.
lastWidth int // The width of the current rune. This
buf bytes.Buffer // Internal buffer used for building up values.
}
type SyntaxError struct {
msg string // Error message displayed to user
Expression string // Expression that generated a SyntaxError
Offset int // The location in the string where the error occurred
}
return "SyntaxError: " + .msg
}
func ( SyntaxError) () string {
return .Expression + "\n" + strings.Repeat(" ", .Offset) + "^"
}
const (
tUnknown tokType = iota
tStar
tDot
tFilter
tFlatten
tLparen
tRparen
tLbracket
tRbracket
tLbrace
tRbrace
tOr
tPipe
tNumber
tUnquotedIdentifier
tQuotedIdentifier
tComma
tColon
tLT
tLTE
tGT
tGTE
tEQ
tNE
tJSONLiteral
tStringLiteral
tCurrent
tExpref
tAnd
tNot
tEOF
)
var basicTokens = map[rune]tokType{
'.': tDot,
'*': tStar,
',': tComma,
':': tColon,
'{': tLbrace,
'}': tRbrace,
']': tRbracket, // tLbracket not included because it could be "[]"
'(': tLparen,
')': tRparen,
'@': tCurrent,
}
const identifierStartBits uint64 = 576460745995190270
func () *Lexer {
:= Lexer{}
return &
}
func ( *Lexer) () rune {
if .currentPos >= len(.expression) {
.lastWidth = 0
return eof
}
, := utf8.DecodeRuneInString(.expression[.currentPos:])
.lastWidth =
.currentPos +=
return
}
func ( *Lexer) () {
.currentPos -= .lastWidth
}
func ( *Lexer) () rune {
:= .next()
.back()
return
}
func ( *Lexer) ( string) ([]token, error) {
var []token
.expression =
.currentPos = 0
.lastWidth = 0
:
for {
:= .next()
if identifierStartBits&(1<<(uint64()-64)) > 0 {
:= .consumeUnquotedIdentifier()
= append(, )
:= token{
tokenType: ,
value: string(),
position: .currentPos - .lastWidth,
length: 1,
}
= append(, )
} else if == '-' || ( >= '0' && <= '9') {
:= .consumeNumber()
= append(, )
} else if == '[' {
:= .consumeLBracket()
= append(, )
} else if == '"' {
, := .consumeQuotedIdentifier()
if != nil {
return ,
}
= append(, )
} else if == '\'' {
, := .consumeRawStringLiteral()
if != nil {
return ,
}
= append(, )
} else if == '`' {
, := .consumeLiteral()
if != nil {
return ,
}
= append(, )
} else if == '|' {
:= .matchOrElse(, '|', tOr, tPipe)
= append(, )
} else if == '<' {
:= .matchOrElse(, '=', tLTE, tLT)
= append(, )
} else if == '>' {
:= .matchOrElse(, '=', tGTE, tGT)
= append(, )
} else if == '!' {
:= .matchOrElse(, '=', tNE, tNot)
= append(, )
} else if == '=' {
:= .matchOrElse(, '=', tEQ, tUnknown)
= append(, )
} else if == '&' {
:= .matchOrElse(, '&', tAnd, tExpref)
= append(, )
} else if == eof {
break
} else {
return , .syntaxError(fmt.Sprintf("Unknown char: %s", strconv.QuoteRuneToASCII()))
}
}
= append(, token{tEOF, "", len(.expression), 0})
return , nil
}
return "", SyntaxError{
msg: "Unclosed delimiter: " + string(),
Expression: .expression,
Offset: len(.expression),
}
}
return .expression[ : .currentPos-.lastWidth], nil
}
func ( *Lexer) () (token, error) {
:= .currentPos
, := .consumeUntil('`')
if != nil {
return token{},
}
= strings.Replace(, "\\`", "`", -1)
return token{
tokenType: tJSONLiteral,
value: ,
position: ,
length: len(),
}, nil
}
func ( *Lexer) () (token, error) {
:= .currentPos
:=
:= .next()
for != '\'' && .peek() != eof {
if == '\\' && .peek() == '\'' {
:= .expression[ : .currentPos-1]
.buf.WriteString()
.buf.WriteString("'")
.next()
= .currentPos
}
= .next()
}
return token{}, SyntaxError{
msg: "Unclosed delimiter: '",
Expression: .expression,
Offset: len(.expression),
}
}
if < .currentPos {
.buf.WriteString(.expression[ : .currentPos-1])
}
.buf.Reset()
return token{
tokenType: tStringLiteral,
value: ,
position: ,
length: len(),
}, nil
}
func ( *Lexer) ( string) SyntaxError {
return SyntaxError{
msg: ,
Expression: .expression,
Offset: .currentPos - 1,
}
}
:= .currentPos - .lastWidth
:= .next()
var token
if == '?' {
= token{
tokenType: tFilter,
value: "[?",
position: ,
length: 2,
}
} else if == ']' {
= token{
tokenType: tFlatten,
value: "[]",
position: ,
length: 2,
}
} else {
= token{
tokenType: tLbracket,
value: "[",
position: ,
length: 1,
}
.back()
}
return
}
func ( *Lexer) () (token, error) {
:= .currentPos
, := .consumeUntil('"')
if != nil {
return token{},
}
var string
:= []byte("\"" + + "\"")
if := json.Unmarshal([]byte(), &); != nil {
return token{},
}
return token{
tokenType: tQuotedIdentifier,
value: ,
position: - 1,
length: len(),
}, nil
}
:= .currentPos - .lastWidth
for {
:= .next()
if < 0 || > 128 || identifierTrailingBits[uint64()/64]&(1<<(uint64()%64)) == 0 {
.back()
break
}
}
:= .expression[:.currentPos]
return token{
tokenType: tUnquotedIdentifier,
value: ,
position: ,
length: .currentPos - ,
}
}
:= .currentPos - .lastWidth
for {
:= .next()
if < '0' || > '9' {
.back()
break
}
}
:= .expression[:.currentPos]
return token{
tokenType: tNumber,
value: ,
position: ,
length: .currentPos - ,
}
![]() |
The pages are generated with Golds v0.3.2-preview. (GOOS=darwin GOARCH=amd64) Golds is a Go 101 project developed by Tapir Liu. PR and bug reports are welcome and can be submitted to the issue list. Please follow @Go100and1 (reachable from the left QR code) to get the latest news of Golds. |