Copyright 2010 The Go Authors. All rights reserved. Use of this source code is governed by a BSD-style license that can be found in the LICENSE file.

package json

import (
	
	
	
)
A Decoder reads and decodes JSON values from an input stream.
type Decoder struct {
	r       io.Reader
	buf     []byte
	d       decodeState
	scanp   int   // start of unread data in buf
	scanned int64 // amount of data already scanned
	scan    scanner
	err     error

	tokenState int
	tokenStack []int
}
NewDecoder returns a new decoder that reads from r. The decoder introduces its own buffering and may read data from r beyond the JSON values requested.
func ( io.Reader) *Decoder {
	return &Decoder{r: }
}
UseNumber causes the Decoder to unmarshal a number into an interface{} as a Number instead of as a float64.
func ( *Decoder) () { .d.useNumber = true }
DisallowUnknownFields causes the Decoder to return an error when the destination is a struct and the input contains object keys which do not match any non-ignored, exported fields in the destination.
Decode reads the next JSON-encoded value from its input and stores it in the value pointed to by v. See the documentation for Unmarshal for details about the conversion of JSON into a Go value.
func ( *Decoder) ( interface{}) error {
	if .err != nil {
		return .err
	}

	if  := .tokenPrepareForDecode();  != nil {
		return 
	}

	if !.tokenValueAllowed() {
		return &SyntaxError{msg: "not at beginning of value", Offset: .InputOffset()}
	}
Read whole value into buffer.
	,  := .readValue()
	if  != nil {
		return 
	}
	.d.init(.buf[.scanp : .scanp+])
	.scanp += 
Don't save err from unmarshal into dec.err: the connection is still usable since we read a complete JSON object from it before the error happened.
	 = .d.unmarshal()
fixup token streaming state
	.tokenValueEnd()

	return 
}
Buffered returns a reader of the data remaining in the Decoder's buffer. The reader is valid until the next call to Decode.
func ( *Decoder) () io.Reader {
	return bytes.NewReader(.buf[.scanp:])
}
readValue reads a JSON value into dec.buf. It returns the length of the encoding.
func ( *Decoder) () (int, error) {
	.scan.reset()

	 := .scanp
	var  error
help the compiler see that scanp is never negative, so it can remove some bounds checks below.
	for  >= 0 {
Look in the buffer for a new value.
		for ;  < len(.buf); ++ {
			 := .buf[]
			.scan.bytes++
			switch .scan.step(&.scan, ) {
scanEnd is delayed one byte so we decrement the scanner bytes count by 1 to ensure that this value is correct in the next call of Decode.
				.scan.bytes--
				break 
scanEnd is delayed one byte. We might block trying to get that byte from src, so instead invent a space byte.
				if stateEndValue(&.scan, ' ') == scanEnd {
					++
					break 
				}
			case scanError:
				.err = .scan.err
				return 0, .scan.err
			}
		}
Did the last read have an error? Delayed until now to allow buffer scan.
		if  != nil {
			if  == io.EOF {
				if .scan.step(&.scan, ' ') == scanEnd {
					break 
				}
				if nonSpace(.buf) {
					 = io.ErrUnexpectedEOF
				}
			}
			.err = 
			return 0, 
		}

		 :=  - .scanp
		 = .refill()
		 = .scanp + 
	}
	return  - .scanp, nil
}

Make room to read more into the buffer. First slide down data already consumed.
	if .scanp > 0 {
		.scanned += int64(.scanp)
		 := copy(.buf, .buf[.scanp:])
		.buf = .buf[:]
		.scanp = 0
	}
Grow buffer if not large enough.
	const  = 512
	if cap(.buf)-len(.buf) <  {
		 := make([]byte, len(.buf), 2*cap(.buf)+)
		copy(, .buf)
		.buf = 
	}
Read. Delay error for next iteration (after scan).
	,  := .r.Read(.buf[len(.buf):cap(.buf)])
	.buf = .buf[0 : len(.buf)+]

	return 
}

func ( []byte) bool {
	for ,  := range  {
		if !isSpace() {
			return true
		}
	}
	return false
}
An Encoder writes JSON values to an output stream.
NewEncoder returns a new encoder that writes to w.
func ( io.Writer) *Encoder {
	return &Encoder{w: , escapeHTML: true}
}
Encode writes the JSON encoding of v to the stream, followed by a newline character. See the documentation for Marshal for details about the conversion of Go values to JSON.
func ( *Encoder) ( interface{}) error {
	if .err != nil {
		return .err
	}
	 := newEncodeState()
	 := .marshal(, encOpts{escapeHTML: .escapeHTML})
	if  != nil {
		return 
	}
Terminate each value with a newline. This makes the output look a little nicer when debugging, and some kind of space is required if the encoded value was a number, so that the reader knows there aren't more digits coming.
	.WriteByte('\n')

	 := .Bytes()
	if .indentPrefix != "" || .indentValue != "" {
		if .indentBuf == nil {
			.indentBuf = new(bytes.Buffer)
		}
		.indentBuf.Reset()
		 = Indent(.indentBuf, , .indentPrefix, .indentValue)
		if  != nil {
			return 
		}
		 = .indentBuf.Bytes()
	}
	if _,  = .w.Write();  != nil {
		.err = 
	}
	encodeStatePool.Put()
	return 
}
SetIndent instructs the encoder to format each subsequent encoded value as if indented by the package-level function Indent(dst, src, prefix, indent). Calling SetIndent("", "") disables indentation.
func ( *Encoder) (,  string) {
	.indentPrefix = 
	.indentValue = 
}
SetEscapeHTML specifies whether problematic HTML characters should be escaped inside JSON quoted strings. The default behavior is to escape &, <, and > to \u0026, \u003c, and \u003e to avoid certain safety problems that can arise when embedding JSON in HTML. In non-HTML settings where the escaping interferes with the readability of the output, SetEscapeHTML(false) disables this behavior.
func ( *Encoder) ( bool) {
	.escapeHTML = 
}
RawMessage is a raw encoded JSON value. It implements Marshaler and Unmarshaler and can be used to delay JSON decoding or precompute a JSON encoding.
MarshalJSON returns m as the JSON encoding of m.
func ( RawMessage) () ([]byte, error) {
	if  == nil {
		return []byte("null"), nil
	}
	return , nil
}
UnmarshalJSON sets *m to a copy of data.
func ( *RawMessage) ( []byte) error {
	if  == nil {
		return errors.New("json.RawMessage: UnmarshalJSON on nil pointer")
	}
	* = append((*)[0:0], ...)
	return nil
}

var _ Marshaler = (*RawMessage)(nil)
var _ Unmarshaler = (*RawMessage)(nil)
A Token holds a value of one of these types: Delim, for the four JSON delimiters [ ] { } bool, for JSON booleans float64, for JSON numbers Number, for JSON numbers string, for JSON string literals nil, for JSON null
advance tokenstate from a separator state to a value state
Note: Not calling peek before switch, to avoid putting peek into the standard Decode path. peek is only called when using the Token API.
	switch .tokenState {
	case tokenArrayComma:
		,  := .peek()
		if  != nil {
			return 
		}
		if  != ',' {
			return &SyntaxError{"expected comma after array element", .InputOffset()}
		}
		.scanp++
		.tokenState = tokenArrayValue
	case tokenObjectColon:
		,  := .peek()
		if  != nil {
			return 
		}
		if  != ':' {
			return &SyntaxError{"expected colon after object key", .InputOffset()}
		}
		.scanp++
		.tokenState = tokenObjectValue
	}
	return nil
}

func ( *Decoder) () bool {
	switch .tokenState {
	case tokenTopValue, tokenArrayStart, tokenArrayValue, tokenObjectValue:
		return true
	}
	return false
}

func ( *Decoder) () {
	switch .tokenState {
	case tokenArrayStart, tokenArrayValue:
		.tokenState = tokenArrayComma
	case tokenObjectValue:
		.tokenState = tokenObjectComma
	}
}
A Delim is a JSON array or object delimiter, one of [ ] { or }.
type Delim rune

func ( Delim) () string {
	return string()
}
Token returns the next JSON token in the input stream. At the end of the input stream, Token returns nil, io.EOF. Token guarantees that the delimiters [ ] { } it returns are properly nested and matched: if Token encounters an unexpected delimiter in the input, it will return an error. The input stream consists of basic JSON values—bool, string, number, and null—along with delimiters [ ] { } of type Delim to mark the start and end of arrays and objects. Commas and colons are elided.
func ( *Decoder) () (Token, error) {
	for {
		,  := .peek()
		if  != nil {
			return nil, 
		}
		switch  {
		case '[':
			if !.tokenValueAllowed() {
				return .tokenError()
			}
			.scanp++
			.tokenStack = append(.tokenStack, .tokenState)
			.tokenState = tokenArrayStart
			return Delim('['), nil

		case ']':
			if .tokenState != tokenArrayStart && .tokenState != tokenArrayComma {
				return .tokenError()
			}
			.scanp++
			.tokenState = .tokenStack[len(.tokenStack)-1]
			.tokenStack = .tokenStack[:len(.tokenStack)-1]
			.tokenValueEnd()
			return Delim(']'), nil

		case '{':
			if !.tokenValueAllowed() {
				return .tokenError()
			}
			.scanp++
			.tokenStack = append(.tokenStack, .tokenState)
			.tokenState = tokenObjectStart
			return Delim('{'), nil

		case '}':
			if .tokenState != tokenObjectStart && .tokenState != tokenObjectComma {
				return .tokenError()
			}
			.scanp++
			.tokenState = .tokenStack[len(.tokenStack)-1]
			.tokenStack = .tokenStack[:len(.tokenStack)-1]
			.tokenValueEnd()
			return Delim('}'), nil

		case ':':
			if .tokenState != tokenObjectColon {
				return .tokenError()
			}
			.scanp++
			.tokenState = tokenObjectValue
			continue

		case ',':
			if .tokenState == tokenArrayComma {
				.scanp++
				.tokenState = tokenArrayValue
				continue
			}
			if .tokenState == tokenObjectComma {
				.scanp++
				.tokenState = tokenObjectKey
				continue
			}
			return .tokenError()

		case '"':
			if .tokenState == tokenObjectStart || .tokenState == tokenObjectKey {
				var  string
				 := .tokenState
				.tokenState = tokenTopValue
				 := .Decode(&)
				.tokenState = 
				if  != nil {
					return nil, 
				}
				.tokenState = tokenObjectColon
				return , nil
			}
			fallthrough

		default:
			if !.tokenValueAllowed() {
				return .tokenError()
			}
			var  interface{}
			if  := .Decode(&);  != nil {
				return nil, 
			}
			return , nil
		}
	}
}

func ( *Decoder) ( byte) (Token, error) {
	var  string
	switch .tokenState {
	case tokenTopValue:
		 = " looking for beginning of value"
	case tokenArrayStart, tokenArrayValue, tokenObjectValue:
		 = " looking for beginning of value"
	case tokenArrayComma:
		 = " after array element"
	case tokenObjectKey:
		 = " looking for beginning of object key string"
	case tokenObjectColon:
		 = " after object key"
	case tokenObjectComma:
		 = " after object key:value pair"
	}
	return nil, &SyntaxError{"invalid character " + quoteChar() + , .InputOffset()}
}
More reports whether there is another element in the current array or object being parsed.
func ( *Decoder) () bool {
	,  := .peek()
	return  == nil &&  != ']' &&  != '}'
}

func ( *Decoder) () (byte, error) {
	var  error
	for {
		for  := .scanp;  < len(.buf); ++ {
			 := .buf[]
			if isSpace() {
				continue
			}
			.scanp = 
			return , nil
buffer has been scanned, now report any error
		if  != nil {
			return 0, 
		}
		 = .refill()
	}
}
InputOffset returns the input stream byte offset of the current decoder position. The offset gives the location of the end of the most recently returned token and the beginning of the next token.
func ( *Decoder) () int64 {
	return .scanned + int64(.scanp)