vel. if parser.flow_level == 0 && parser.mark.column < indent { break } } // Create a token. *token = yaml_token_t{ typ: yaml_SCALAR_TOKEN, start_mark: start_mark, end_mark: end_mark, value: s, style: yaml_PLAIN_SCALAR_STYLE, } // Note that we change the 'simple_key_allowed' flag. if leading_blanks { parser.simple_key_allowed = true } return true } func yaml_parser_scan_line_comment(parser *yaml_parser_t, token_mark yaml_mark_t) bool { if parser.newlines > 0 { return true } var start_mark yaml_mark_t var text []byte for peek := 0; peek < 512; peek++ { if parser.unread < peek+1 && !yaml_parser_update_buffer(parser, peek+1) { break } if is_blank(parser.buffer, parser.buffer_pos+peek) { continue } if parser.buffer[parser.buffer_pos+peek] == '#' { seen := parser.mark.index + peek for { if parser.unread < 1 && !yaml_parser_update_buffer(parser, 1) { return false } if is_breakz(parser.buffer, parser.buffer_pos) { if parser.mark.index >= seen { break } if parser.unread < 2 && !yaml_parser_update_buffer(parser, 2) { return false } skip_line(parser) } else if parser.mark.index >= seen { if len(text) == 0 { start_mark = parser.mark } text = read(parser, text) } else { skip(parser) } } } break } if len(text) > 0 { parser.comments = append(parser.comments, yaml_comment_t{ token_mark: token_mark, start_mark: start_mark, line: text, }) } return true } func yaml_parser_scan_comments(parser *yaml_parser_t, scan_mark yaml_mark_t) bool { token := parser.tokens[len(parser.tokens)-1] if token.typ == yaml_FLOW_ENTRY_TOKEN && len(parser.tokens) > 1 { token = parser.tokens[len(parser.tokens)-2] } var token_mark = token.start_mark var start_mark yaml_mark_t var next_indent = parser.indent if next_indent < 0 { next_indent = 0 } var recent_empty = false var first_empty = parser.newlines <= 1 var line = parser.mark.line var column = parser.mark.column var text []byte // The foot line is the place where a comment must start to // still be considered as a foot of the prior content. // If there's some content in the currently parsed line, then // the foot is the line below it. var foot_line = -1 if scan_mark.line > 0 { foot_line = parser.mark.line - parser.newlines + 1 if parser.newlines == 0 && parser.mark.column > 1 { foot_line++ } } var peek = 0 for ; peek < 512; peek++ { if parser.unread < peek+1 && !yaml_parser_update_buffer(parser, peek+1) { break } column++ if is_blank(parser.buffer, parser.buffer_pos+peek) { continue } c := parser.buffer[parser.buffer_pos+peek] var close_flow = parser.flow_level > 0 && (c == ']' || c == '}') if close_flow || is_breakz(parser.buffer, parser.buffer_pos+peek) { // Got line break or terminator. if close_flow || !recent_empty { if close_flow || first_empty && (start_mark.line == foot_line && token.typ != yaml_VALUE_TOKEN || start_mark.column-1 < next_indent) { // This is the first empty line and there were no empty lines before, // so this initial part of the comment is a foot of the prior token // instead of being a head for the following one. Split it up. // Alternatively, this might also be the last comment inside a flow // scope, so it must be a footer. if len(text) > 0 { if start_mark.column-1 < next_indent { // If dedented it's unrelated to the prior token. token_mark = start_mark } parser.comments = append(parser.comments, yaml_comment_t{ scan_mark: scan_mark, token_mark: token_mark, start_mark: start_mark, end_mark: yaml_mark_t{parser.mark.index + peek, line, column}, foot: text, }) scan_mark = yaml_mark_t{parser.mark.index + peek, line, column} token_mark = scan_mark text = nil } } else { if len(text) > 0 && parser.buffer[parser.buffer_pos+peek] != 0 { text = append(text, '\n') } } } if !is_break(parser.buffer, parser.buffer_pos+peek) { break } first_empty = false recent_empty = true column = 0 line++ continue } if len(text) > 0 && (close_flow || column-1 < next_indent && column != start_mark.column) { // The comment at the different indentation is a foot of the // preceding data rather than a head of the upcoming one. parser.comments = append(parser.comments, yaml_comment_t{ scan_mark: scan_mark, token_mark: token_mark, start_mark: start_mark, end_mark: yaml_mark_t{parser.mark.index + peek, line, column}, foot: text, }) scan_mark = yaml_mark_t{parser.mark.index + peek, line, column} token_mark = scan_mark text = nil } if parser.buffer[parser.buffer_pos+peek] != '#' { break } if len(text) == 0 { start_mark = yaml_mark_t{parser.mark.index + peek, line, column} } else { text = append(text, '\n') } recent_empty = false // Consume until after the consumed comment line. seen := parser.mark.index + peek for { if parser.unread < 1 && !yaml_parser_update_buffer(parser, 1) { return false } if is_breakz(parser.buffer, parser.buffer_pos) { if parser.mark.index >= seen { break } if parser.unread < 2 && !yaml_parser_update_buffer(parser, 2) { return false } skip_line(parser) } else if parser.mark.index >= seen { text = read(parser, text) } else { skip(parser) } } peek = 0 column = 0 line = parser.mark.line next_indent = parser.indent if next_indent < 0 { next_indent = 0 } } if len(text) > 0 { parser.comments = append(parser.comments, yaml_comment_t{ scan_mark: scan_mark, token_mark: start_mark, start_mark: start_mark, end_mark: yaml_mark_t{parser.mark.index + peek - 1, line, column}, head: text, }) } return true }