Source File
parserc.go
Belonging Package
gopkg.in/yaml.v2
package yaml
import (
)
func ( *yaml_parser_t) *yaml_token_t {
if .token_available || yaml_parser_fetch_more_tokens() {
return &.tokens[.tokens_head]
}
return nil
}
func ( *yaml_parser_t) {
.token_available = false
.tokens_parsed++
.stream_end_produced = .tokens[.tokens_head].typ == yaml_STREAM_END_TOKEN
.tokens_head++
}
* = yaml_event_t{}
if .stream_end_produced || .error != yaml_NO_ERROR || .state == yaml_PARSE_END_STATE {
return true
}
return yaml_parser_state_machine(, )
}
func ( *yaml_parser_t, string, yaml_mark_t) bool {
.error = yaml_PARSER_ERROR
.problem =
.problem_mark =
return false
}
func ( *yaml_parser_t, string, yaml_mark_t, string, yaml_mark_t) bool {
.error = yaml_PARSER_ERROR
.context =
.context_mark =
.problem =
.problem_mark =
return false
}
switch .state {
case yaml_PARSE_STREAM_START_STATE:
return yaml_parser_parse_stream_start(, )
case yaml_PARSE_IMPLICIT_DOCUMENT_START_STATE:
return yaml_parser_parse_document_start(, , true)
case yaml_PARSE_DOCUMENT_START_STATE:
return yaml_parser_parse_document_start(, , false)
case yaml_PARSE_DOCUMENT_CONTENT_STATE:
return yaml_parser_parse_document_content(, )
case yaml_PARSE_DOCUMENT_END_STATE:
return yaml_parser_parse_document_end(, )
case yaml_PARSE_BLOCK_NODE_STATE:
return yaml_parser_parse_node(, , true, false)
case yaml_PARSE_BLOCK_NODE_OR_INDENTLESS_SEQUENCE_STATE:
return yaml_parser_parse_node(, , true, true)
case yaml_PARSE_FLOW_NODE_STATE:
return yaml_parser_parse_node(, , false, false)
case yaml_PARSE_BLOCK_SEQUENCE_FIRST_ENTRY_STATE:
return yaml_parser_parse_block_sequence_entry(, , true)
case yaml_PARSE_BLOCK_SEQUENCE_ENTRY_STATE:
return yaml_parser_parse_block_sequence_entry(, , false)
case yaml_PARSE_INDENTLESS_SEQUENCE_ENTRY_STATE:
return yaml_parser_parse_indentless_sequence_entry(, )
case yaml_PARSE_BLOCK_MAPPING_FIRST_KEY_STATE:
return yaml_parser_parse_block_mapping_key(, , true)
case yaml_PARSE_BLOCK_MAPPING_KEY_STATE:
return yaml_parser_parse_block_mapping_key(, , false)
case yaml_PARSE_BLOCK_MAPPING_VALUE_STATE:
return yaml_parser_parse_block_mapping_value(, )
case yaml_PARSE_FLOW_SEQUENCE_FIRST_ENTRY_STATE:
return yaml_parser_parse_flow_sequence_entry(, , true)
case yaml_PARSE_FLOW_SEQUENCE_ENTRY_STATE:
return yaml_parser_parse_flow_sequence_entry(, , false)
case yaml_PARSE_FLOW_SEQUENCE_ENTRY_MAPPING_KEY_STATE:
return yaml_parser_parse_flow_sequence_entry_mapping_key(, )
case yaml_PARSE_FLOW_SEQUENCE_ENTRY_MAPPING_VALUE_STATE:
return yaml_parser_parse_flow_sequence_entry_mapping_value(, )
case yaml_PARSE_FLOW_SEQUENCE_ENTRY_MAPPING_END_STATE:
return yaml_parser_parse_flow_sequence_entry_mapping_end(, )
case yaml_PARSE_FLOW_MAPPING_FIRST_KEY_STATE:
return yaml_parser_parse_flow_mapping_key(, , true)
case yaml_PARSE_FLOW_MAPPING_KEY_STATE:
return yaml_parser_parse_flow_mapping_key(, , false)
case yaml_PARSE_FLOW_MAPPING_VALUE_STATE:
return yaml_parser_parse_flow_mapping_value(, , false)
case yaml_PARSE_FLOW_MAPPING_EMPTY_VALUE_STATE:
return yaml_parser_parse_flow_mapping_value(, , true)
default:
panic("invalid parser state")
}
}
func ( *yaml_parser_t, *yaml_event_t) bool {
:= peek_token()
if == nil {
return false
}
if .typ != yaml_STREAM_START_TOKEN {
return yaml_parser_set_parser_error(, "did not find expected <stream-start>", .start_mark)
}
.state = yaml_PARSE_IMPLICIT_DOCUMENT_START_STATE
* = yaml_event_t{
typ: yaml_STREAM_START_EVENT,
start_mark: .start_mark,
end_mark: .end_mark,
encoding: .encoding,
}
skip_token()
return true
}
func ( *yaml_parser_t, *yaml_event_t, bool) bool {
:= peek_token()
if == nil {
return false
}
if ! {
for .typ == yaml_DOCUMENT_END_TOKEN {
skip_token()
= peek_token()
if == nil {
return false
}
}
}
if && .typ != yaml_VERSION_DIRECTIVE_TOKEN &&
.typ != yaml_TAG_DIRECTIVE_TOKEN &&
.typ != yaml_DOCUMENT_START_TOKEN &&
if !yaml_parser_process_directives(, nil, nil) {
return false
}
.states = append(.states, yaml_PARSE_DOCUMENT_END_STATE)
.state = yaml_PARSE_BLOCK_NODE_STATE
* = yaml_event_t{
typ: yaml_DOCUMENT_START_EVENT,
start_mark: .start_mark,
end_mark: .end_mark,
}
var *yaml_version_directive_t
var []yaml_tag_directive_t
:= .start_mark
if !yaml_parser_process_directives(, &, &) {
return false
}
= peek_token()
if == nil {
return false
}
if .typ != yaml_DOCUMENT_START_TOKEN {
yaml_parser_set_parser_error(,
"did not find expected <document start>", .start_mark)
return false
}
.states = append(.states, yaml_PARSE_DOCUMENT_END_STATE)
.state = yaml_PARSE_DOCUMENT_CONTENT_STATE
:= .end_mark
* = yaml_event_t{
typ: yaml_DOCUMENT_START_EVENT,
start_mark: ,
end_mark: ,
version_directive: ,
tag_directives: ,
implicit: false,
}
skip_token()
.state = yaml_PARSE_END_STATE
* = yaml_event_t{
typ: yaml_STREAM_END_EVENT,
start_mark: .start_mark,
end_mark: .end_mark,
}
skip_token()
}
return true
}
func ( *yaml_parser_t, *yaml_event_t) bool {
:= peek_token()
if == nil {
return false
}
if .typ == yaml_VERSION_DIRECTIVE_TOKEN ||
.typ == yaml_TAG_DIRECTIVE_TOKEN ||
.typ == yaml_DOCUMENT_START_TOKEN ||
.typ == yaml_DOCUMENT_END_TOKEN ||
.typ == yaml_STREAM_END_TOKEN {
.state = .states[len(.states)-1]
.states = .states[:len(.states)-1]
return yaml_parser_process_empty_scalar(, ,
.start_mark)
}
return yaml_parser_parse_node(, , true, false)
}
func ( *yaml_parser_t, *yaml_event_t) bool {
:= peek_token()
if == nil {
return false
}
:= .start_mark
:= .start_mark
:= true
if .typ == yaml_DOCUMENT_END_TOKEN {
= .end_mark
skip_token()
= false
}
.tag_directives = .tag_directives[:0]
.state = yaml_PARSE_DOCUMENT_START_STATE
* = yaml_event_t{
typ: yaml_DOCUMENT_END_EVENT,
start_mark: ,
end_mark: ,
implicit: ,
}
return true
}
:= peek_token()
if == nil {
return false
}
if .typ == yaml_ALIAS_TOKEN {
.state = .states[len(.states)-1]
.states = .states[:len(.states)-1]
* = yaml_event_t{
typ: yaml_ALIAS_EVENT,
start_mark: .start_mark,
end_mark: .end_mark,
anchor: .value,
}
skip_token()
return true
}
:= .start_mark
:= .start_mark
var bool
var , , []byte
var yaml_mark_t
if .typ == yaml_ANCHOR_TOKEN {
= .value
= .start_mark
= .end_mark
skip_token()
= peek_token()
if == nil {
return false
}
if .typ == yaml_TAG_TOKEN {
= true
= .value
= .suffix
= .start_mark
= .end_mark
skip_token()
= peek_token()
if == nil {
return false
}
}
} else if .typ == yaml_TAG_TOKEN {
= true
= .value
= .suffix
= .start_mark
= .start_mark
= .end_mark
skip_token()
= peek_token()
if == nil {
return false
}
if .typ == yaml_ANCHOR_TOKEN {
= .value
= .end_mark
skip_token()
= peek_token()
if == nil {
return false
}
}
}
var []byte
if {
if len() == 0 {
=
= nil
} else {
for := range .tag_directives {
if bytes.Equal(.tag_directives[].handle, ) {
= append([]byte(nil), .tag_directives[].prefix...)
= append(, ...)
break
}
}
if len() == 0 {
yaml_parser_set_parser_error_context(,
"while parsing a node", ,
"found undefined tag handle", )
return false
}
}
}
:= len() == 0
if && .typ == yaml_BLOCK_ENTRY_TOKEN {
= .end_mark
.state = yaml_PARSE_INDENTLESS_SEQUENCE_ENTRY_STATE
* = yaml_event_t{
typ: yaml_SEQUENCE_START_EVENT,
start_mark: ,
end_mark: ,
anchor: ,
tag: ,
implicit: ,
style: yaml_style_t(yaml_BLOCK_SEQUENCE_STYLE),
}
return true
}
if .typ == yaml_SCALAR_TOKEN {
var , bool
= .end_mark
if (len() == 0 && .style == yaml_PLAIN_SCALAR_STYLE) || (len() == 1 && [0] == '!') {
= true
} else if len() == 0 {
= true
}
.state = .states[len(.states)-1]
.states = .states[:len(.states)-1]
* = yaml_event_t{
typ: yaml_SCALAR_EVENT,
start_mark: ,
end_mark: ,
anchor: ,
tag: ,
value: .value,
implicit: ,
quoted_implicit: ,
style: yaml_style_t(.style),
}
skip_token()
return true
}
= .end_mark
.state = yaml_PARSE_FLOW_SEQUENCE_FIRST_ENTRY_STATE
* = yaml_event_t{
typ: yaml_SEQUENCE_START_EVENT,
start_mark: ,
end_mark: ,
anchor: ,
tag: ,
implicit: ,
style: yaml_style_t(yaml_FLOW_SEQUENCE_STYLE),
}
return true
}
if .typ == yaml_FLOW_MAPPING_START_TOKEN {
= .end_mark
.state = yaml_PARSE_FLOW_MAPPING_FIRST_KEY_STATE
* = yaml_event_t{
typ: yaml_MAPPING_START_EVENT,
start_mark: ,
end_mark: ,
anchor: ,
tag: ,
implicit: ,
style: yaml_style_t(yaml_FLOW_MAPPING_STYLE),
}
return true
}
if && .typ == yaml_BLOCK_SEQUENCE_START_TOKEN {
= .end_mark
.state = yaml_PARSE_BLOCK_SEQUENCE_FIRST_ENTRY_STATE
* = yaml_event_t{
typ: yaml_SEQUENCE_START_EVENT,
start_mark: ,
end_mark: ,
anchor: ,
tag: ,
implicit: ,
style: yaml_style_t(yaml_BLOCK_SEQUENCE_STYLE),
}
return true
}
if && .typ == yaml_BLOCK_MAPPING_START_TOKEN {
= .end_mark
.state = yaml_PARSE_BLOCK_MAPPING_FIRST_KEY_STATE
* = yaml_event_t{
typ: yaml_MAPPING_START_EVENT,
start_mark: ,
end_mark: ,
anchor: ,
tag: ,
implicit: ,
style: yaml_style_t(yaml_BLOCK_MAPPING_STYLE),
}
return true
}
if len() > 0 || len() > 0 {
.state = .states[len(.states)-1]
.states = .states[:len(.states)-1]
* = yaml_event_t{
typ: yaml_SCALAR_EVENT,
start_mark: ,
end_mark: ,
anchor: ,
tag: ,
implicit: ,
quoted_implicit: false,
style: yaml_style_t(yaml_PLAIN_SCALAR_STYLE),
}
return true
}
:= "while parsing a flow node"
if {
= "while parsing a block node"
}
yaml_parser_set_parser_error_context(, , ,
"did not find expected node content", .start_mark)
return false
}
func ( *yaml_parser_t, *yaml_event_t, bool) bool {
if {
:= peek_token()
.marks = append(.marks, .start_mark)
skip_token()
}
:= peek_token()
if == nil {
return false
}
if .typ == yaml_BLOCK_ENTRY_TOKEN {
:= .end_mark
skip_token()
= peek_token()
if == nil {
return false
}
if .typ != yaml_BLOCK_ENTRY_TOKEN && .typ != yaml_BLOCK_END_TOKEN {
.states = append(.states, yaml_PARSE_BLOCK_SEQUENCE_ENTRY_STATE)
return yaml_parser_parse_node(, , true, false)
} else {
.state = yaml_PARSE_BLOCK_SEQUENCE_ENTRY_STATE
return yaml_parser_process_empty_scalar(, , )
}
}
if .typ == yaml_BLOCK_END_TOKEN {
.state = .states[len(.states)-1]
.states = .states[:len(.states)-1]
.marks = .marks[:len(.marks)-1]
* = yaml_event_t{
typ: yaml_SEQUENCE_END_EVENT,
start_mark: .start_mark,
end_mark: .end_mark,
}
skip_token()
return true
}
:= .marks[len(.marks)-1]
.marks = .marks[:len(.marks)-1]
return yaml_parser_set_parser_error_context(,
"while parsing a block collection", ,
"did not find expected '-' indicator", .start_mark)
}
func ( *yaml_parser_t, *yaml_event_t) bool {
:= peek_token()
if == nil {
return false
}
if .typ == yaml_BLOCK_ENTRY_TOKEN {
:= .end_mark
skip_token()
= peek_token()
if == nil {
return false
}
if .typ != yaml_BLOCK_ENTRY_TOKEN &&
.typ != yaml_KEY_TOKEN &&
.typ != yaml_VALUE_TOKEN &&
.typ != yaml_BLOCK_END_TOKEN {
.states = append(.states, yaml_PARSE_INDENTLESS_SEQUENCE_ENTRY_STATE)
return yaml_parser_parse_node(, , true, false)
}
.state = yaml_PARSE_INDENTLESS_SEQUENCE_ENTRY_STATE
return yaml_parser_process_empty_scalar(, , )
}
.state = .states[len(.states)-1]
.states = .states[:len(.states)-1]
* = yaml_event_t{
typ: yaml_SEQUENCE_END_EVENT,
start_mark: .start_mark,
end_mark: .start_mark, // [Go] Shouldn't this be token.end_mark?
}
return true
}
func ( *yaml_parser_t, *yaml_event_t, bool) bool {
if {
:= peek_token()
.marks = append(.marks, .start_mark)
skip_token()
}
:= peek_token()
if == nil {
return false
}
if .typ == yaml_KEY_TOKEN {
:= .end_mark
skip_token()
= peek_token()
if == nil {
return false
}
if .typ != yaml_KEY_TOKEN &&
.typ != yaml_VALUE_TOKEN &&
.typ != yaml_BLOCK_END_TOKEN {
.states = append(.states, yaml_PARSE_BLOCK_MAPPING_VALUE_STATE)
return yaml_parser_parse_node(, , true, true)
} else {
.state = yaml_PARSE_BLOCK_MAPPING_VALUE_STATE
return yaml_parser_process_empty_scalar(, , )
}
} else if .typ == yaml_BLOCK_END_TOKEN {
.state = .states[len(.states)-1]
.states = .states[:len(.states)-1]
.marks = .marks[:len(.marks)-1]
* = yaml_event_t{
typ: yaml_MAPPING_END_EVENT,
start_mark: .start_mark,
end_mark: .end_mark,
}
skip_token()
return true
}
:= .marks[len(.marks)-1]
.marks = .marks[:len(.marks)-1]
return yaml_parser_set_parser_error_context(,
"while parsing a block mapping", ,
"did not find expected key", .start_mark)
}
func ( *yaml_parser_t, *yaml_event_t) bool {
:= peek_token()
if == nil {
return false
}
if .typ == yaml_VALUE_TOKEN {
:= .end_mark
skip_token()
= peek_token()
if == nil {
return false
}
if .typ != yaml_KEY_TOKEN &&
.typ != yaml_VALUE_TOKEN &&
.typ != yaml_BLOCK_END_TOKEN {
.states = append(.states, yaml_PARSE_BLOCK_MAPPING_KEY_STATE)
return yaml_parser_parse_node(, , true, true)
}
.state = yaml_PARSE_BLOCK_MAPPING_KEY_STATE
return yaml_parser_process_empty_scalar(, , )
}
.state = yaml_PARSE_BLOCK_MAPPING_KEY_STATE
return yaml_parser_process_empty_scalar(, , .start_mark)
}
func ( *yaml_parser_t, *yaml_event_t, bool) bool {
if {
:= peek_token()
.marks = append(.marks, .start_mark)
skip_token()
}
:= peek_token()
if == nil {
return false
}
if .typ != yaml_FLOW_SEQUENCE_END_TOKEN {
if ! {
if .typ == yaml_FLOW_ENTRY_TOKEN {
skip_token()
= peek_token()
if == nil {
return false
}
} else {
:= .marks[len(.marks)-1]
.marks = .marks[:len(.marks)-1]
return yaml_parser_set_parser_error_context(,
"while parsing a flow sequence", ,
"did not find expected ',' or ']'", .start_mark)
}
}
if .typ == yaml_KEY_TOKEN {
.state = yaml_PARSE_FLOW_SEQUENCE_ENTRY_MAPPING_KEY_STATE
* = yaml_event_t{
typ: yaml_MAPPING_START_EVENT,
start_mark: .start_mark,
end_mark: .end_mark,
implicit: true,
style: yaml_style_t(yaml_FLOW_MAPPING_STYLE),
}
skip_token()
return true
} else if .typ != yaml_FLOW_SEQUENCE_END_TOKEN {
.states = append(.states, yaml_PARSE_FLOW_SEQUENCE_ENTRY_STATE)
return yaml_parser_parse_node(, , false, false)
}
}
.state = .states[len(.states)-1]
.states = .states[:len(.states)-1]
.marks = .marks[:len(.marks)-1]
* = yaml_event_t{
typ: yaml_SEQUENCE_END_EVENT,
start_mark: .start_mark,
end_mark: .end_mark,
}
skip_token()
return true
}
func ( *yaml_parser_t, *yaml_event_t) bool {
:= peek_token()
if == nil {
return false
}
if .typ != yaml_VALUE_TOKEN &&
.typ != yaml_FLOW_ENTRY_TOKEN &&
.typ != yaml_FLOW_SEQUENCE_END_TOKEN {
.states = append(.states, yaml_PARSE_FLOW_SEQUENCE_ENTRY_MAPPING_VALUE_STATE)
return yaml_parser_parse_node(, , false, false)
}
:= .end_mark
skip_token()
.state = yaml_PARSE_FLOW_SEQUENCE_ENTRY_MAPPING_VALUE_STATE
return yaml_parser_process_empty_scalar(, , )
}
func ( *yaml_parser_t, *yaml_event_t) bool {
:= peek_token()
if == nil {
return false
}
if .typ == yaml_VALUE_TOKEN {
skip_token()
:= peek_token()
if == nil {
return false
}
if .typ != yaml_FLOW_ENTRY_TOKEN && .typ != yaml_FLOW_SEQUENCE_END_TOKEN {
.states = append(.states, yaml_PARSE_FLOW_SEQUENCE_ENTRY_MAPPING_END_STATE)
return yaml_parser_parse_node(, , false, false)
}
}
.state = yaml_PARSE_FLOW_SEQUENCE_ENTRY_MAPPING_END_STATE
return yaml_parser_process_empty_scalar(, , .start_mark)
}
func ( *yaml_parser_t, *yaml_event_t) bool {
:= peek_token()
if == nil {
return false
}
.state = yaml_PARSE_FLOW_SEQUENCE_ENTRY_STATE
* = yaml_event_t{
typ: yaml_MAPPING_END_EVENT,
start_mark: .start_mark,
end_mark: .start_mark, // [Go] Shouldn't this be end_mark?
}
return true
}
func ( *yaml_parser_t, *yaml_event_t, bool) bool {
if {
:= peek_token()
.marks = append(.marks, .start_mark)
skip_token()
}
:= peek_token()
if == nil {
return false
}
if .typ != yaml_FLOW_MAPPING_END_TOKEN {
if ! {
if .typ == yaml_FLOW_ENTRY_TOKEN {
skip_token()
= peek_token()
if == nil {
return false
}
} else {
:= .marks[len(.marks)-1]
.marks = .marks[:len(.marks)-1]
return yaml_parser_set_parser_error_context(,
"while parsing a flow mapping", ,
"did not find expected ',' or '}'", .start_mark)
}
}
if .typ == yaml_KEY_TOKEN {
skip_token()
= peek_token()
if == nil {
return false
}
if .typ != yaml_VALUE_TOKEN &&
.typ != yaml_FLOW_ENTRY_TOKEN &&
.typ != yaml_FLOW_MAPPING_END_TOKEN {
.states = append(.states, yaml_PARSE_FLOW_MAPPING_VALUE_STATE)
return yaml_parser_parse_node(, , false, false)
} else {
.state = yaml_PARSE_FLOW_MAPPING_VALUE_STATE
return yaml_parser_process_empty_scalar(, , .start_mark)
}
} else if .typ != yaml_FLOW_MAPPING_END_TOKEN {
.states = append(.states, yaml_PARSE_FLOW_MAPPING_EMPTY_VALUE_STATE)
return yaml_parser_parse_node(, , false, false)
}
}
.state = .states[len(.states)-1]
.states = .states[:len(.states)-1]
.marks = .marks[:len(.marks)-1]
* = yaml_event_t{
typ: yaml_MAPPING_END_EVENT,
start_mark: .start_mark,
end_mark: .end_mark,
}
skip_token()
return true
}
func ( *yaml_parser_t, *yaml_event_t, bool) bool {
:= peek_token()
if == nil {
return false
}
if {
.state = yaml_PARSE_FLOW_MAPPING_KEY_STATE
return yaml_parser_process_empty_scalar(, , .start_mark)
}
if .typ == yaml_VALUE_TOKEN {
skip_token()
= peek_token()
if == nil {
return false
}
if .typ != yaml_FLOW_ENTRY_TOKEN && .typ != yaml_FLOW_MAPPING_END_TOKEN {
.states = append(.states, yaml_PARSE_FLOW_MAPPING_KEY_STATE)
return yaml_parser_parse_node(, , false, false)
}
}
.state = yaml_PARSE_FLOW_MAPPING_KEY_STATE
return yaml_parser_process_empty_scalar(, , .start_mark)
}
func ( *yaml_parser_t, *yaml_event_t, yaml_mark_t) bool {
* = yaml_event_t{
typ: yaml_SCALAR_EVENT,
start_mark: ,
end_mark: ,
value: nil, // Empty
implicit: true,
style: yaml_style_t(yaml_PLAIN_SCALAR_STYLE),
}
return true
}
var default_tag_directives = []yaml_tag_directive_t{
{[]byte("!"), []byte("!")},
{[]byte("!!"), []byte("tag:yaml.org,2002:")},
}
func ( *yaml_parser_t,
**yaml_version_directive_t,
*[]yaml_tag_directive_t) bool {
var *yaml_version_directive_t
var []yaml_tag_directive_t
:= peek_token()
if == nil {
return false
}
for .typ == yaml_VERSION_DIRECTIVE_TOKEN || .typ == yaml_TAG_DIRECTIVE_TOKEN {
if .typ == yaml_VERSION_DIRECTIVE_TOKEN {
if != nil {
yaml_parser_set_parser_error(,
"found duplicate %YAML directive", .start_mark)
return false
}
if .major != 1 || .minor != 1 {
yaml_parser_set_parser_error(,
"found incompatible YAML document", .start_mark)
return false
}
= &yaml_version_directive_t{
major: .major,
minor: .minor,
}
} else if .typ == yaml_TAG_DIRECTIVE_TOKEN {
:= yaml_tag_directive_t{
handle: .value,
prefix: .prefix,
}
if !yaml_parser_append_tag_directive(, , false, .start_mark) {
return false
}
= append(, )
}
skip_token()
= peek_token()
if == nil {
return false
}
}
for := range default_tag_directives {
if !yaml_parser_append_tag_directive(, default_tag_directives[], true, .start_mark) {
return false
}
}
if != nil {
* =
}
if != nil {
* =
}
return true
}
func ( *yaml_parser_t, yaml_tag_directive_t, bool, yaml_mark_t) bool {
for := range .tag_directives {
if bytes.Equal(.handle, .tag_directives[].handle) {
if {
return true
}
return yaml_parser_set_parser_error(, "found duplicate %TAG directive", )
}
}
![]() |
The pages are generated with Golds v0.3.2-preview. (GOOS=darwin GOARCH=amd64) Golds is a Go 101 project developed by Tapir Liu. PR and bug reports are welcome and can be submitted to the issue list. Please follow @Go100and1 (reachable from the left QR code) to get the latest news of Golds. |