chore: migrate to gitea
Some checks failed
golangci-lint / lint (push) Successful in 1m33s
Test / test (push) Failing after 2m16s

This commit is contained in:
2026-01-27 00:12:32 +01:00
parent 79d9f55fdc
commit f81c902ca6
3170 changed files with 1216494 additions and 1586 deletions

28
vendor/github.com/goccy/go-yaml/parser/color.go generated vendored Normal file
View File

@@ -0,0 +1,28 @@
package parser
import "fmt"
const (
colorFgHiBlack int = iota + 90
colorFgHiRed
colorFgHiGreen
colorFgHiYellow
colorFgHiBlue
colorFgHiMagenta
colorFgHiCyan
)
var colorTable = []int{
colorFgHiRed,
colorFgHiGreen,
colorFgHiYellow,
colorFgHiBlue,
colorFgHiMagenta,
colorFgHiCyan,
}
func colorize(idx int, content string) string {
colorIdx := idx % len(colorTable)
color := colorTable[colorIdx]
return fmt.Sprintf("\x1b[1;%dm", color) + content + "\x1b[22;0m"
}

187
vendor/github.com/goccy/go-yaml/parser/context.go generated vendored Normal file
View File

@@ -0,0 +1,187 @@
package parser
import (
"fmt"
"strings"
"github.com/goccy/go-yaml/token"
)
// context context at parsing
type context struct {
tokenRef *tokenRef
path string
isFlow bool
}
type tokenRef struct {
tokens []*Token
size int
idx int
}
var pathSpecialChars = []string{
"$", "*", ".", "[", "]",
}
func containsPathSpecialChar(path string) bool {
for _, char := range pathSpecialChars {
if strings.Contains(path, char) {
return true
}
}
return false
}
func normalizePath(path string) string {
if containsPathSpecialChar(path) {
return fmt.Sprintf("'%s'", path)
}
return path
}
func (c *context) currentToken() *Token {
if c.tokenRef.idx >= c.tokenRef.size {
return nil
}
return c.tokenRef.tokens[c.tokenRef.idx]
}
func (c *context) isComment() bool {
return c.currentToken().Type() == token.CommentType
}
func (c *context) nextToken() *Token {
if c.tokenRef.idx+1 >= c.tokenRef.size {
return nil
}
return c.tokenRef.tokens[c.tokenRef.idx+1]
}
func (c *context) nextNotCommentToken() *Token {
for i := c.tokenRef.idx + 1; i < c.tokenRef.size; i++ {
tk := c.tokenRef.tokens[i]
if tk.Type() == token.CommentType {
continue
}
return tk
}
return nil
}
func (c *context) isTokenNotFound() bool {
return c.currentToken() == nil
}
func (c *context) withGroup(g *TokenGroup) *context {
ctx := *c
ctx.tokenRef = &tokenRef{
tokens: g.Tokens,
size: len(g.Tokens),
}
return &ctx
}
func (c *context) withChild(path string) *context {
ctx := *c
ctx.path = c.path + "." + normalizePath(path)
return &ctx
}
func (c *context) withIndex(idx uint) *context {
ctx := *c
ctx.path = c.path + "[" + fmt.Sprint(idx) + "]"
return &ctx
}
func (c *context) withFlow(isFlow bool) *context {
ctx := *c
ctx.isFlow = isFlow
return &ctx
}
func newContext() *context {
return &context{
path: "$",
}
}
func (c *context) goNext() {
ref := c.tokenRef
if ref.size <= ref.idx+1 {
ref.idx = ref.size
} else {
ref.idx++
}
}
func (c *context) next() bool {
return c.tokenRef.idx < c.tokenRef.size
}
func (c *context) insertNullToken(tk *Token) *Token {
nullToken := c.createImplicitNullToken(tk)
c.insertToken(nullToken)
c.goNext()
return nullToken
}
func (c *context) addNullValueToken(tk *Token) *Token {
nullToken := c.createImplicitNullToken(tk)
rawTk := nullToken.RawToken()
// add space for map or sequence value.
rawTk.Position.Column++
c.addToken(nullToken)
c.goNext()
return nullToken
}
func (c *context) createImplicitNullToken(base *Token) *Token {
pos := *(base.RawToken().Position)
pos.Column++
tk := token.New("null", " null", &pos)
tk.Type = token.ImplicitNullType
return &Token{Token: tk}
}
func (c *context) insertToken(tk *Token) {
ref := c.tokenRef
idx := ref.idx
if ref.size < idx {
return
}
if ref.size == idx {
curToken := ref.tokens[ref.size-1]
tk.RawToken().Next = curToken.RawToken()
curToken.RawToken().Prev = tk.RawToken()
ref.tokens = append(ref.tokens, tk)
ref.size = len(ref.tokens)
return
}
curToken := ref.tokens[idx]
tk.RawToken().Next = curToken.RawToken()
curToken.RawToken().Prev = tk.RawToken()
ref.tokens = append(ref.tokens[:idx+1], ref.tokens[idx:]...)
ref.tokens[idx] = tk
ref.size = len(ref.tokens)
}
func (c *context) addToken(tk *Token) {
ref := c.tokenRef
lastTk := ref.tokens[ref.size-1]
if lastTk.Group != nil {
lastTk = lastTk.Group.Last()
}
lastTk.RawToken().Next = tk.RawToken()
tk.RawToken().Prev = lastTk.RawToken()
ref.tokens = append(ref.tokens, tk)
ref.size = len(ref.tokens)
}

257
vendor/github.com/goccy/go-yaml/parser/node.go generated vendored Normal file
View File

@@ -0,0 +1,257 @@
package parser
import (
"fmt"
"github.com/goccy/go-yaml/ast"
"github.com/goccy/go-yaml/internal/errors"
"github.com/goccy/go-yaml/token"
)
func newMappingNode(ctx *context, tk *Token, isFlow bool, values ...*ast.MappingValueNode) (*ast.MappingNode, error) {
node := ast.Mapping(tk.RawToken(), isFlow, values...)
node.SetPath(ctx.path)
return node, nil
}
func newMappingValueNode(ctx *context, colonTk, entryTk *Token, key ast.MapKeyNode, value ast.Node) (*ast.MappingValueNode, error) {
node := ast.MappingValue(colonTk.RawToken(), key, value)
node.SetPath(ctx.path)
node.CollectEntry = entryTk.RawToken()
if key.GetToken().Position.Line == value.GetToken().Position.Line {
// originally key was commented, but now that null value has been added, value must be commented.
if err := setLineComment(ctx, value, colonTk); err != nil {
return nil, err
}
// set line comment by colonTk or entryTk.
if err := setLineComment(ctx, value, entryTk); err != nil {
return nil, err
}
} else {
if err := setLineComment(ctx, key, colonTk); err != nil {
return nil, err
}
// set line comment by colonTk or entryTk.
if err := setLineComment(ctx, key, entryTk); err != nil {
return nil, err
}
}
return node, nil
}
func newMappingKeyNode(ctx *context, tk *Token) (*ast.MappingKeyNode, error) {
node := ast.MappingKey(tk.RawToken())
node.SetPath(ctx.path)
if err := setLineComment(ctx, node, tk); err != nil {
return nil, err
}
return node, nil
}
func newAnchorNode(ctx *context, tk *Token) (*ast.AnchorNode, error) {
node := ast.Anchor(tk.RawToken())
node.SetPath(ctx.path)
if err := setLineComment(ctx, node, tk); err != nil {
return nil, err
}
return node, nil
}
func newAliasNode(ctx *context, tk *Token) (*ast.AliasNode, error) {
node := ast.Alias(tk.RawToken())
node.SetPath(ctx.path)
if err := setLineComment(ctx, node, tk); err != nil {
return nil, err
}
return node, nil
}
func newDirectiveNode(ctx *context, tk *Token) (*ast.DirectiveNode, error) {
node := ast.Directive(tk.RawToken())
node.SetPath(ctx.path)
if err := setLineComment(ctx, node, tk); err != nil {
return nil, err
}
return node, nil
}
func newMergeKeyNode(ctx *context, tk *Token) (*ast.MergeKeyNode, error) {
node := ast.MergeKey(tk.RawToken())
node.SetPath(ctx.path)
if err := setLineComment(ctx, node, tk); err != nil {
return nil, err
}
return node, nil
}
func newNullNode(ctx *context, tk *Token) (*ast.NullNode, error) {
node := ast.Null(tk.RawToken())
node.SetPath(ctx.path)
if err := setLineComment(ctx, node, tk); err != nil {
return nil, err
}
return node, nil
}
func newBoolNode(ctx *context, tk *Token) (*ast.BoolNode, error) {
node := ast.Bool(tk.RawToken())
node.SetPath(ctx.path)
if err := setLineComment(ctx, node, tk); err != nil {
return nil, err
}
return node, nil
}
func newIntegerNode(ctx *context, tk *Token) (*ast.IntegerNode, error) {
node := ast.Integer(tk.RawToken())
node.SetPath(ctx.path)
if err := setLineComment(ctx, node, tk); err != nil {
return nil, err
}
return node, nil
}
func newFloatNode(ctx *context, tk *Token) (*ast.FloatNode, error) {
node := ast.Float(tk.RawToken())
node.SetPath(ctx.path)
if err := setLineComment(ctx, node, tk); err != nil {
return nil, err
}
return node, nil
}
func newInfinityNode(ctx *context, tk *Token) (*ast.InfinityNode, error) {
node := ast.Infinity(tk.RawToken())
node.SetPath(ctx.path)
if err := setLineComment(ctx, node, tk); err != nil {
return nil, err
}
return node, nil
}
func newNanNode(ctx *context, tk *Token) (*ast.NanNode, error) {
node := ast.Nan(tk.RawToken())
node.SetPath(ctx.path)
if err := setLineComment(ctx, node, tk); err != nil {
return nil, err
}
return node, nil
}
func newStringNode(ctx *context, tk *Token) (*ast.StringNode, error) {
node := ast.String(tk.RawToken())
node.SetPath(ctx.path)
if err := setLineComment(ctx, node, tk); err != nil {
return nil, err
}
return node, nil
}
func newLiteralNode(ctx *context, tk *Token) (*ast.LiteralNode, error) {
node := ast.Literal(tk.RawToken())
node.SetPath(ctx.path)
if err := setLineComment(ctx, node, tk); err != nil {
return nil, err
}
return node, nil
}
func newTagNode(ctx *context, tk *Token) (*ast.TagNode, error) {
node := ast.Tag(tk.RawToken())
node.SetPath(ctx.path)
if err := setLineComment(ctx, node, tk); err != nil {
return nil, err
}
return node, nil
}
func newSequenceNode(ctx *context, tk *Token, isFlow bool) (*ast.SequenceNode, error) {
node := ast.Sequence(tk.RawToken(), isFlow)
node.SetPath(ctx.path)
if err := setLineComment(ctx, node, tk); err != nil {
return nil, err
}
return node, nil
}
func newTagDefaultScalarValueNode(ctx *context, tag *token.Token) (ast.ScalarNode, error) {
pos := *(tag.Position)
pos.Column++
var (
tk *Token
node ast.ScalarNode
)
switch token.ReservedTagKeyword(tag.Value) {
case token.IntegerTag:
tk = &Token{Token: token.New("0", "0", &pos)}
n, err := newIntegerNode(ctx, tk)
if err != nil {
return nil, err
}
node = n
case token.FloatTag:
tk = &Token{Token: token.New("0", "0", &pos)}
n, err := newFloatNode(ctx, tk)
if err != nil {
return nil, err
}
node = n
case token.StringTag, token.BinaryTag, token.TimestampTag:
tk = &Token{Token: token.New("", "", &pos)}
n, err := newStringNode(ctx, tk)
if err != nil {
return nil, err
}
node = n
case token.BooleanTag:
tk = &Token{Token: token.New("false", "false", &pos)}
n, err := newBoolNode(ctx, tk)
if err != nil {
return nil, err
}
node = n
case token.NullTag:
tk = &Token{Token: token.New("null", "null", &pos)}
n, err := newNullNode(ctx, tk)
if err != nil {
return nil, err
}
node = n
default:
return nil, errors.ErrSyntax(fmt.Sprintf("cannot assign default value for %q tag", tag.Value), tag)
}
ctx.insertToken(tk)
ctx.goNext()
return node, nil
}
func setLineComment(ctx *context, node ast.Node, tk *Token) error {
if tk == nil || tk.LineComment == nil {
return nil
}
comment := ast.CommentGroup([]*token.Token{tk.LineComment})
comment.SetPath(ctx.path)
if err := node.SetComment(comment); err != nil {
return err
}
return nil
}
func setHeadComment(cm *ast.CommentGroupNode, value ast.Node) error {
if cm == nil {
return nil
}
switch n := value.(type) {
case *ast.MappingNode:
if len(n.Values) != 0 && value.GetComment() == nil {
cm.SetPath(n.Values[0].GetPath())
return n.Values[0].SetComment(cm)
}
case *ast.MappingValueNode:
cm.SetPath(n.GetPath())
return n.SetComment(cm)
}
cm.SetPath(value.GetPath())
return value.SetComment(cm)
}

12
vendor/github.com/goccy/go-yaml/parser/option.go generated vendored Normal file
View File

@@ -0,0 +1,12 @@
package parser
// Option represents parser's option.
type Option func(p *parser)
// AllowDuplicateMapKey allow the use of keys with the same name in the same map,
// but by default, this is not permitted.
func AllowDuplicateMapKey() Option {
return func(p *parser) {
p.allowDuplicateMapKey = true
}
}

1330
vendor/github.com/goccy/go-yaml/parser/parser.go generated vendored Normal file

File diff suppressed because it is too large Load Diff

746
vendor/github.com/goccy/go-yaml/parser/token.go generated vendored Normal file
View File

@@ -0,0 +1,746 @@
package parser
import (
"fmt"
"os"
"strings"
"github.com/goccy/go-yaml/internal/errors"
"github.com/goccy/go-yaml/token"
)
type TokenGroupType int
const (
TokenGroupNone TokenGroupType = iota
TokenGroupDirective
TokenGroupDirectiveName
TokenGroupDocument
TokenGroupDocumentBody
TokenGroupAnchor
TokenGroupAnchorName
TokenGroupAlias
TokenGroupLiteral
TokenGroupFolded
TokenGroupScalarTag
TokenGroupMapKey
TokenGroupMapKeyValue
)
func (t TokenGroupType) String() string {
switch t {
case TokenGroupNone:
return "none"
case TokenGroupDirective:
return "directive"
case TokenGroupDirectiveName:
return "directive_name"
case TokenGroupDocument:
return "document"
case TokenGroupDocumentBody:
return "document_body"
case TokenGroupAnchor:
return "anchor"
case TokenGroupAnchorName:
return "anchor_name"
case TokenGroupAlias:
return "alias"
case TokenGroupLiteral:
return "literal"
case TokenGroupFolded:
return "folded"
case TokenGroupScalarTag:
return "scalar_tag"
case TokenGroupMapKey:
return "map_key"
case TokenGroupMapKeyValue:
return "map_key_value"
}
return "none"
}
type Token struct {
Token *token.Token
Group *TokenGroup
LineComment *token.Token
}
func (t *Token) RawToken() *token.Token {
if t == nil {
return nil
}
if t.Token != nil {
return t.Token
}
return t.Group.RawToken()
}
func (t *Token) Type() token.Type {
if t == nil {
return 0
}
if t.Token != nil {
return t.Token.Type
}
return t.Group.TokenType()
}
func (t *Token) GroupType() TokenGroupType {
if t == nil {
return TokenGroupNone
}
if t.Token != nil {
return TokenGroupNone
}
return t.Group.Type
}
func (t *Token) Line() int {
if t == nil {
return 0
}
if t.Token != nil {
return t.Token.Position.Line
}
return t.Group.Line()
}
func (t *Token) Column() int {
if t == nil {
return 0
}
if t.Token != nil {
return t.Token.Position.Column
}
return t.Group.Column()
}
func (t *Token) SetGroupType(typ TokenGroupType) {
if t.Group == nil {
return
}
t.Group.Type = typ
}
func (t *Token) Dump() {
ctx := new(groupTokenRenderContext)
if t.Token != nil {
fmt.Fprint(os.Stdout, t.Token.Value)
return
}
t.Group.dump(ctx)
fmt.Fprintf(os.Stdout, "\n")
}
func (t *Token) dump(ctx *groupTokenRenderContext) {
if t.Token != nil {
fmt.Fprint(os.Stdout, t.Token.Value)
return
}
t.Group.dump(ctx)
}
type groupTokenRenderContext struct {
num int
}
type TokenGroup struct {
Type TokenGroupType
Tokens []*Token
}
func (g *TokenGroup) First() *Token {
if len(g.Tokens) == 0 {
return nil
}
return g.Tokens[0]
}
func (g *TokenGroup) Last() *Token {
if len(g.Tokens) == 0 {
return nil
}
return g.Tokens[len(g.Tokens)-1]
}
func (g *TokenGroup) dump(ctx *groupTokenRenderContext) {
num := ctx.num
fmt.Fprint(os.Stdout, colorize(num, "("))
ctx.num++
for _, tk := range g.Tokens {
tk.dump(ctx)
}
fmt.Fprint(os.Stdout, colorize(num, ")"))
}
func (g *TokenGroup) RawToken() *token.Token {
if len(g.Tokens) == 0 {
return nil
}
return g.Tokens[0].RawToken()
}
func (g *TokenGroup) Line() int {
if len(g.Tokens) == 0 {
return 0
}
return g.Tokens[0].Line()
}
func (g *TokenGroup) Column() int {
if len(g.Tokens) == 0 {
return 0
}
return g.Tokens[0].Column()
}
func (g *TokenGroup) TokenType() token.Type {
if len(g.Tokens) == 0 {
return 0
}
return g.Tokens[0].Type()
}
func CreateGroupedTokens(tokens token.Tokens) ([]*Token, error) {
var err error
tks := newTokens(tokens)
tks = createLineCommentTokenGroups(tks)
tks, err = createLiteralAndFoldedTokenGroups(tks)
if err != nil {
return nil, err
}
tks, err = createAnchorAndAliasTokenGroups(tks)
if err != nil {
return nil, err
}
tks, err = createScalarTagTokenGroups(tks)
if err != nil {
return nil, err
}
tks, err = createAnchorWithScalarTagTokenGroups(tks)
if err != nil {
return nil, err
}
tks, err = createMapKeyTokenGroups(tks)
if err != nil {
return nil, err
}
tks = createMapKeyValueTokenGroups(tks)
tks, err = createDirectiveTokenGroups(tks)
if err != nil {
return nil, err
}
tks, err = createDocumentTokens(tks)
if err != nil {
return nil, err
}
return tks, nil
}
func newTokens(tks token.Tokens) []*Token {
ret := make([]*Token, 0, len(tks))
for _, tk := range tks {
ret = append(ret, &Token{Token: tk})
}
return ret
}
func createLineCommentTokenGroups(tokens []*Token) []*Token {
ret := make([]*Token, 0, len(tokens))
for i := 0; i < len(tokens); i++ {
tk := tokens[i]
switch tk.Type() {
case token.CommentType:
if i > 0 && tokens[i-1].Line() == tk.Line() {
tokens[i-1].LineComment = tk.RawToken()
} else {
ret = append(ret, tk)
}
default:
ret = append(ret, tk)
}
}
return ret
}
func createLiteralAndFoldedTokenGroups(tokens []*Token) ([]*Token, error) {
ret := make([]*Token, 0, len(tokens))
for i := 0; i < len(tokens); i++ {
tk := tokens[i]
switch tk.Type() {
case token.LiteralType:
tks := []*Token{tk}
if i+1 < len(tokens) {
tks = append(tks, tokens[i+1])
}
ret = append(ret, &Token{
Group: &TokenGroup{
Type: TokenGroupLiteral,
Tokens: tks,
},
})
i++
case token.FoldedType:
tks := []*Token{tk}
if i+1 < len(tokens) {
tks = append(tks, tokens[i+1])
}
ret = append(ret, &Token{
Group: &TokenGroup{
Type: TokenGroupFolded,
Tokens: tks,
},
})
i++
default:
ret = append(ret, tk)
}
}
return ret, nil
}
func createAnchorAndAliasTokenGroups(tokens []*Token) ([]*Token, error) {
ret := make([]*Token, 0, len(tokens))
for i := 0; i < len(tokens); i++ {
tk := tokens[i]
switch tk.Type() {
case token.AnchorType:
if i+1 >= len(tokens) {
return nil, errors.ErrSyntax("undefined anchor name", tk.RawToken())
}
if i+2 >= len(tokens) {
return nil, errors.ErrSyntax("undefined anchor value", tk.RawToken())
}
anchorName := &Token{
Group: &TokenGroup{
Type: TokenGroupAnchorName,
Tokens: []*Token{tk, tokens[i+1]},
},
}
valueTk := tokens[i+2]
if tk.Line() == valueTk.Line() && valueTk.Type() == token.SequenceEntryType {
return nil, errors.ErrSyntax("sequence entries are not allowed after anchor on the same line", valueTk.RawToken())
}
if tk.Line() == valueTk.Line() && isScalarType(valueTk) {
ret = append(ret, &Token{
Group: &TokenGroup{
Type: TokenGroupAnchor,
Tokens: []*Token{anchorName, valueTk},
},
})
i++
} else {
ret = append(ret, anchorName)
}
i++
case token.AliasType:
if i+1 == len(tokens) {
return nil, errors.ErrSyntax("undefined alias name", tk.RawToken())
}
ret = append(ret, &Token{
Group: &TokenGroup{
Type: TokenGroupAlias,
Tokens: []*Token{tk, tokens[i+1]},
},
})
i++
default:
ret = append(ret, tk)
}
}
return ret, nil
}
func createScalarTagTokenGroups(tokens []*Token) ([]*Token, error) {
ret := make([]*Token, 0, len(tokens))
for i := 0; i < len(tokens); i++ {
tk := tokens[i]
if tk.Type() != token.TagType {
ret = append(ret, tk)
continue
}
tag := tk.RawToken()
if strings.HasPrefix(tag.Value, "!!") {
// secondary tag.
switch token.ReservedTagKeyword(tag.Value) {
case token.IntegerTag, token.FloatTag, token.StringTag, token.BinaryTag, token.TimestampTag, token.BooleanTag, token.NullTag:
if len(tokens) <= i+1 {
ret = append(ret, tk)
continue
}
if tk.Line() != tokens[i+1].Line() {
ret = append(ret, tk)
continue
}
if tokens[i+1].GroupType() == TokenGroupAnchorName {
ret = append(ret, tk)
continue
}
if isScalarType(tokens[i+1]) {
ret = append(ret, &Token{
Group: &TokenGroup{
Type: TokenGroupScalarTag,
Tokens: []*Token{tk, tokens[i+1]},
},
})
i++
} else {
ret = append(ret, tk)
}
case token.MergeTag:
if len(tokens) <= i+1 {
ret = append(ret, tk)
continue
}
if tk.Line() != tokens[i+1].Line() {
ret = append(ret, tk)
continue
}
if tokens[i+1].GroupType() == TokenGroupAnchorName {
ret = append(ret, tk)
continue
}
if tokens[i+1].Type() != token.MergeKeyType {
return nil, errors.ErrSyntax("could not find merge key", tokens[i+1].RawToken())
}
ret = append(ret, &Token{
Group: &TokenGroup{
Type: TokenGroupScalarTag,
Tokens: []*Token{tk, tokens[i+1]},
},
})
i++
default:
ret = append(ret, tk)
}
} else {
if len(tokens) <= i+1 {
ret = append(ret, tk)
continue
}
if tk.Line() != tokens[i+1].Line() {
ret = append(ret, tk)
continue
}
if tokens[i+1].GroupType() == TokenGroupAnchorName {
ret = append(ret, tk)
continue
}
if isFlowType(tokens[i+1]) {
ret = append(ret, tk)
continue
}
ret = append(ret, &Token{
Group: &TokenGroup{
Type: TokenGroupScalarTag,
Tokens: []*Token{tk, tokens[i+1]},
},
})
i++
}
}
return ret, nil
}
func createAnchorWithScalarTagTokenGroups(tokens []*Token) ([]*Token, error) {
ret := make([]*Token, 0, len(tokens))
for i := 0; i < len(tokens); i++ {
tk := tokens[i]
switch tk.GroupType() {
case TokenGroupAnchorName:
if i+1 >= len(tokens) {
return nil, errors.ErrSyntax("undefined anchor value", tk.RawToken())
}
valueTk := tokens[i+1]
if tk.Line() == valueTk.Line() && valueTk.GroupType() == TokenGroupScalarTag {
ret = append(ret, &Token{
Group: &TokenGroup{
Type: TokenGroupAnchor,
Tokens: []*Token{tk, tokens[i+1]},
},
})
i++
} else {
ret = append(ret, tk)
}
default:
ret = append(ret, tk)
}
}
return ret, nil
}
func createMapKeyTokenGroups(tokens []*Token) ([]*Token, error) {
tks, err := createMapKeyByMappingKey(tokens)
if err != nil {
return nil, err
}
return createMapKeyByMappingValue(tks)
}
func createMapKeyByMappingKey(tokens []*Token) ([]*Token, error) {
ret := make([]*Token, 0, len(tokens))
for i := 0; i < len(tokens); i++ {
tk := tokens[i]
switch tk.Type() {
case token.MappingKeyType:
if i+1 >= len(tokens) {
return nil, errors.ErrSyntax("undefined map key", tk.RawToken())
}
ret = append(ret, &Token{
Group: &TokenGroup{
Type: TokenGroupMapKey,
Tokens: []*Token{tk, tokens[i+1]},
},
})
i++
default:
ret = append(ret, tk)
}
}
return ret, nil
}
func createMapKeyByMappingValue(tokens []*Token) ([]*Token, error) {
ret := make([]*Token, 0, len(tokens))
for i := 0; i < len(tokens); i++ {
tk := tokens[i]
switch tk.Type() {
case token.MappingValueType:
if i == 0 {
return nil, errors.ErrSyntax("unexpected key name", tk.RawToken())
}
mapKeyTk := tokens[i-1]
if isNotMapKeyType(mapKeyTk) {
return nil, errors.ErrSyntax("found an invalid key for this map", tokens[i].RawToken())
}
newTk := &Token{Token: mapKeyTk.Token, Group: mapKeyTk.Group}
mapKeyTk.Token = nil
mapKeyTk.Group = &TokenGroup{
Type: TokenGroupMapKey,
Tokens: []*Token{newTk, tk},
}
default:
ret = append(ret, tk)
}
}
return ret, nil
}
func createMapKeyValueTokenGroups(tokens []*Token) []*Token {
ret := make([]*Token, 0, len(tokens))
for i := 0; i < len(tokens); i++ {
tk := tokens[i]
switch tk.GroupType() {
case TokenGroupMapKey:
if len(tokens) <= i+1 {
ret = append(ret, tk)
continue
}
valueTk := tokens[i+1]
if tk.Line() != valueTk.Line() {
ret = append(ret, tk)
continue
}
if valueTk.GroupType() == TokenGroupAnchorName {
ret = append(ret, tk)
continue
}
if valueTk.Type() == token.TagType && valueTk.GroupType() != TokenGroupScalarTag {
ret = append(ret, tk)
continue
}
if isScalarType(valueTk) || valueTk.Type() == token.TagType {
ret = append(ret, &Token{
Group: &TokenGroup{
Type: TokenGroupMapKeyValue,
Tokens: []*Token{tk, valueTk},
},
})
i++
} else {
ret = append(ret, tk)
continue
}
default:
ret = append(ret, tk)
}
}
return ret
}
func createDirectiveTokenGroups(tokens []*Token) ([]*Token, error) {
ret := make([]*Token, 0, len(tokens))
for i := 0; i < len(tokens); i++ {
tk := tokens[i]
switch tk.Type() {
case token.DirectiveType:
if i+1 >= len(tokens) {
return nil, errors.ErrSyntax("undefined directive value", tk.RawToken())
}
directiveName := &Token{
Group: &TokenGroup{
Type: TokenGroupDirectiveName,
Tokens: []*Token{tk, tokens[i+1]},
},
}
i++
var valueTks []*Token
for j := i + 1; j < len(tokens); j++ {
if tokens[j].Line() != tk.Line() {
break
}
valueTks = append(valueTks, tokens[j])
i++
}
if i+1 >= len(tokens) || tokens[i+1].Type() != token.DocumentHeaderType {
return nil, errors.ErrSyntax("unexpected directive value. document not started", tk.RawToken())
}
if len(valueTks) != 0 {
ret = append(ret, &Token{
Group: &TokenGroup{
Type: TokenGroupDirective,
Tokens: append([]*Token{directiveName}, valueTks...),
},
})
} else {
ret = append(ret, directiveName)
}
default:
ret = append(ret, tk)
}
}
return ret, nil
}
func createDocumentTokens(tokens []*Token) ([]*Token, error) {
var ret []*Token
for i := 0; i < len(tokens); i++ {
tk := tokens[i]
switch tk.Type() {
case token.DocumentHeaderType:
if i != 0 {
ret = append(ret, &Token{
Group: &TokenGroup{Tokens: tokens[:i]},
})
}
if i+1 == len(tokens) {
// if current token is last token, add DocumentHeader only tokens to ret.
return append(ret, &Token{
Group: &TokenGroup{
Type: TokenGroupDocument,
Tokens: []*Token{tk},
},
}), nil
}
if tokens[i+1].Type() == token.DocumentHeaderType {
return append(ret, &Token{
Group: &TokenGroup{
Type: TokenGroupDocument,
Tokens: []*Token{tk},
},
}), nil
}
if tokens[i].Line() == tokens[i+1].Line() {
switch tokens[i+1].GroupType() {
case TokenGroupMapKey, TokenGroupMapKeyValue:
return nil, errors.ErrSyntax("value cannot be placed after document separator", tokens[i+1].RawToken())
}
switch tokens[i+1].Type() {
case token.SequenceEntryType:
return nil, errors.ErrSyntax("value cannot be placed after document separator", tokens[i+1].RawToken())
}
}
tks, err := createDocumentTokens(tokens[i+1:])
if err != nil {
return nil, err
}
if len(tks) != 0 {
tks[0].SetGroupType(TokenGroupDocument)
tks[0].Group.Tokens = append([]*Token{tk}, tks[0].Group.Tokens...)
return append(ret, tks...), nil
}
return append(ret, &Token{
Group: &TokenGroup{
Type: TokenGroupDocument,
Tokens: []*Token{tk},
},
}), nil
case token.DocumentEndType:
if i != 0 {
ret = append(ret, &Token{
Group: &TokenGroup{
Type: TokenGroupDocument,
Tokens: tokens[0 : i+1],
},
})
}
if i+1 == len(tokens) {
return ret, nil
}
if isScalarType(tokens[i+1]) {
return nil, errors.ErrSyntax("unexpected end content", tokens[i+1].RawToken())
}
tks, err := createDocumentTokens(tokens[i+1:])
if err != nil {
return nil, err
}
return append(ret, tks...), nil
}
}
return append(ret, &Token{
Group: &TokenGroup{
Type: TokenGroupDocument,
Tokens: tokens,
},
}), nil
}
func isScalarType(tk *Token) bool {
switch tk.GroupType() {
case TokenGroupMapKey, TokenGroupMapKeyValue:
return false
}
typ := tk.Type()
return typ == token.AnchorType ||
typ == token.AliasType ||
typ == token.LiteralType ||
typ == token.FoldedType ||
typ == token.NullType ||
typ == token.ImplicitNullType ||
typ == token.BoolType ||
typ == token.IntegerType ||
typ == token.BinaryIntegerType ||
typ == token.OctetIntegerType ||
typ == token.HexIntegerType ||
typ == token.FloatType ||
typ == token.InfinityType ||
typ == token.NanType ||
typ == token.StringType ||
typ == token.SingleQuoteType ||
typ == token.DoubleQuoteType
}
func isNotMapKeyType(tk *Token) bool {
typ := tk.Type()
return typ == token.DirectiveType ||
typ == token.DocumentHeaderType ||
typ == token.DocumentEndType ||
typ == token.CollectEntryType ||
typ == token.MappingStartType ||
typ == token.MappingValueType ||
typ == token.MappingEndType ||
typ == token.SequenceStartType ||
typ == token.SequenceEntryType ||
typ == token.SequenceEndType
}
func isFlowType(tk *Token) bool {
typ := tk.Type()
return typ == token.MappingStartType ||
typ == token.MappingEndType ||
typ == token.SequenceStartType ||
typ == token.SequenceEntryType
}