type github.com/jmespath/go-jmespath.token

33 uses

	github.com/jmespath/go-jmespath (current package)
		lexer.go#L12: type token struct {
		lexer.go#L111: func (t token) String() string {
		lexer.go#L144: func (lexer *Lexer) tokenize(expression string) ([]token, error) {
		lexer.go#L145: 	var tokens []token
		lexer.go#L157: 			t := token{
		lexer.go#L214: 	tokens = append(tokens, token{tEOF, "", len(lexer.expression), 0})
		lexer.go#L244: func (lexer *Lexer) consumeLiteral() (token, error) {
		lexer.go#L248: 		return token{}, err
		lexer.go#L251: 	return token{
		lexer.go#L259: func (lexer *Lexer) consumeRawStringLiteral() (token, error) {
		lexer.go#L276: 		return token{}, SyntaxError{
		lexer.go#L288: 	return token{
		lexer.go#L307: func (lexer *Lexer) matchOrElse(first rune, second rune, matchedType tokType, singleCharType tokType) token {
		lexer.go#L310: 	var t token
		lexer.go#L312: 		t = token{
		lexer.go#L320: 		t = token{
		lexer.go#L330: func (lexer *Lexer) consumeLBracket() token {
		lexer.go#L337: 	var t token
		lexer.go#L339: 		t = token{
		lexer.go#L346: 		t = token{
		lexer.go#L353: 		t = token{
		lexer.go#L364: func (lexer *Lexer) consumeQuotedIdentifier() (token, error) {
		lexer.go#L368: 		return token{}, err
		lexer.go#L373: 		return token{}, err
		lexer.go#L375: 	return token{
		lexer.go#L383: func (lexer *Lexer) consumeUnquotedIdentifier() token {
		lexer.go#L395: 	return token{
		lexer.go#L403: func (lexer *Lexer) consumeNumber() token {
		lexer.go#L414: 	return token{
		parser.go#L114: 	tokens     []token
		parser.go#L317: func (p *Parser) nud(token token) (ASTNode, error) {
		parser.go#L569: func (p *Parser) lookaheadToken(number int) token {
		parser.go#L597: func (p *Parser) syntaxErrorToken(msg string, t token) SyntaxError {