Keep the order of `on` when parsing workflow (#46)
Keep the order of `on` when parsing workflow, and fix the occasional unit test failure of `actions` like https://gitea.com/gitea/act/actions/runs/68 Co-authored-by: Jason Song <i@wolfogre.com> Reviewed-on: https://gitea.com/gitea/act/pulls/46 Reviewed-by: Lunny Xiao <xiaolunwen@gmail.com> Reviewed-by: Jason Song <i@wolfogre.com> Co-authored-by: sillyguodong <gedong_1994@163.com> Co-committed-by: sillyguodong <gedong_1994@163.com>
This commit is contained in:
parent
0c1f2edb99
commit
518d8c96f3
|
@ -25,35 +25,21 @@ func (w *SingleWorkflow) Job() (string, *Job) {
|
||||||
}
|
}
|
||||||
|
|
||||||
func (w *SingleWorkflow) jobs() ([]string, []*Job, error) {
|
func (w *SingleWorkflow) jobs() ([]string, []*Job, error) {
|
||||||
var ids []string
|
ids, jobs, err := parseMappingNode[*Job](&w.RawJobs)
|
||||||
var jobs []*Job
|
if err != nil {
|
||||||
expectKey := true
|
return nil, nil, err
|
||||||
for _, item := range w.RawJobs.Content {
|
}
|
||||||
if expectKey {
|
|
||||||
if item.Kind != yaml.ScalarNode {
|
for _, job := range jobs {
|
||||||
return nil, nil, fmt.Errorf("invalid job id: %v", item.Value)
|
steps := make([]*Step, 0, len(job.Steps))
|
||||||
|
for _, s := range job.Steps {
|
||||||
|
if s != nil {
|
||||||
|
steps = append(steps, s)
|
||||||
}
|
}
|
||||||
ids = append(ids, item.Value)
|
|
||||||
expectKey = false
|
|
||||||
} else {
|
|
||||||
job := &Job{}
|
|
||||||
if err := item.Decode(job); err != nil {
|
|
||||||
return nil, nil, fmt.Errorf("yaml.Unmarshal: %w", err)
|
|
||||||
}
|
|
||||||
steps := make([]*Step, 0, len(job.Steps))
|
|
||||||
for _, s := range job.Steps {
|
|
||||||
if s != nil {
|
|
||||||
steps = append(steps, s)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
job.Steps = steps
|
|
||||||
jobs = append(jobs, job)
|
|
||||||
expectKey = true
|
|
||||||
}
|
}
|
||||||
|
job.Steps = steps
|
||||||
}
|
}
|
||||||
if len(ids) != len(jobs) {
|
|
||||||
return nil, nil, fmt.Errorf("invalid jobs: %v", w.RawJobs.Value)
|
|
||||||
}
|
|
||||||
return ids, jobs, nil
|
return ids, jobs, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -232,13 +218,13 @@ func ParseRawOn(rawOn *yaml.Node) ([]*Event, error) {
|
||||||
}
|
}
|
||||||
return res, nil
|
return res, nil
|
||||||
case yaml.MappingNode:
|
case yaml.MappingNode:
|
||||||
var val map[string]interface{}
|
events, triggers, err := parseMappingNode[interface{}](rawOn)
|
||||||
err := rawOn.Decode(&val)
|
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
res := make([]*Event, 0, len(val))
|
res := make([]*Event, 0, len(events))
|
||||||
for k, v := range val {
|
for i, k := range events {
|
||||||
|
v := triggers[i]
|
||||||
if v == nil {
|
if v == nil {
|
||||||
res = append(res, &Event{
|
res = append(res, &Event{
|
||||||
Name: k,
|
Name: k,
|
||||||
|
@ -312,3 +298,36 @@ func ParseRawOn(rawOn *yaml.Node) ([]*Event, error) {
|
||||||
return nil, fmt.Errorf("unknown on type: %v", rawOn.Kind)
|
return nil, fmt.Errorf("unknown on type: %v", rawOn.Kind)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// parseMappingNode parse a mapping node and preserve order.
|
||||||
|
func parseMappingNode[T any](node *yaml.Node) ([]string, []T, error) {
|
||||||
|
if node.Kind != yaml.MappingNode {
|
||||||
|
return nil, nil, fmt.Errorf("input node is not a mapping node")
|
||||||
|
}
|
||||||
|
|
||||||
|
var scalars []string
|
||||||
|
var datas []T
|
||||||
|
expectKey := true
|
||||||
|
for _, item := range node.Content {
|
||||||
|
if expectKey {
|
||||||
|
if item.Kind != yaml.ScalarNode {
|
||||||
|
return nil, nil, fmt.Errorf("not a valid scalar node: %v", item.Value)
|
||||||
|
}
|
||||||
|
scalars = append(scalars, item.Value)
|
||||||
|
expectKey = false
|
||||||
|
} else {
|
||||||
|
var val T
|
||||||
|
if err := item.Decode(&val); err != nil {
|
||||||
|
return nil, nil, err
|
||||||
|
}
|
||||||
|
datas = append(datas, val)
|
||||||
|
expectKey = true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if len(scalars) != len(datas) {
|
||||||
|
return nil, nil, fmt.Errorf("invalid definition of on: %v", node.Value)
|
||||||
|
}
|
||||||
|
|
||||||
|
return scalars, datas, nil
|
||||||
|
}
|
||||||
|
|
|
@ -220,3 +220,87 @@ func TestSingleWorkflow_SetJob(t *testing.T) {
|
||||||
assert.Equal(t, string(want), builder.String())
|
assert.Equal(t, string(want), builder.String())
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func TestParseMappingNode(t *testing.T) {
|
||||||
|
tests := []struct {
|
||||||
|
input string
|
||||||
|
scalars []string
|
||||||
|
datas []interface{}
|
||||||
|
}{
|
||||||
|
{
|
||||||
|
input: "on:\n push:\n branches:\n - master",
|
||||||
|
scalars: []string{"push"},
|
||||||
|
datas: []interface {
|
||||||
|
}{
|
||||||
|
map[string]interface{}{
|
||||||
|
"branches": []interface{}{"master"},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
input: "on:\n branch_protection_rule:\n types: [created, deleted]",
|
||||||
|
scalars: []string{"branch_protection_rule"},
|
||||||
|
datas: []interface{}{
|
||||||
|
map[string]interface{}{
|
||||||
|
"types": []interface{}{"created", "deleted"},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
input: "on:\n project:\n types: [created, deleted]\n milestone:\n types: [opened, deleted]",
|
||||||
|
scalars: []string{"project", "milestone"},
|
||||||
|
datas: []interface{}{
|
||||||
|
map[string]interface{}{
|
||||||
|
"types": []interface{}{"created", "deleted"},
|
||||||
|
},
|
||||||
|
map[string]interface{}{
|
||||||
|
"types": []interface{}{"opened", "deleted"},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
input: "on:\n pull_request:\n types:\n - opened\n branches:\n - 'releases/**'",
|
||||||
|
scalars: []string{"pull_request"},
|
||||||
|
datas: []interface{}{
|
||||||
|
map[string]interface{}{
|
||||||
|
"types": []interface{}{"opened"},
|
||||||
|
"branches": []interface{}{"releases/**"},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
input: "on:\n push:\n branches:\n - main\n pull_request:\n types:\n - opened\n branches:\n - '**'",
|
||||||
|
scalars: []string{"push", "pull_request"},
|
||||||
|
datas: []interface{}{
|
||||||
|
map[string]interface{}{
|
||||||
|
"branches": []interface{}{"main"},
|
||||||
|
},
|
||||||
|
map[string]interface{}{
|
||||||
|
"types": []interface{}{"opened"},
|
||||||
|
"branches": []interface{}{"**"},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
input: "on:\n schedule:\n - cron: '20 6 * * *'",
|
||||||
|
scalars: []string{"schedule"},
|
||||||
|
datas: []interface{}{
|
||||||
|
[]interface{}{map[string]interface{}{
|
||||||
|
"cron": "20 6 * * *",
|
||||||
|
}},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, test := range tests {
|
||||||
|
t.Run(test.input, func(t *testing.T) {
|
||||||
|
workflow, err := model.ReadWorkflow(strings.NewReader(test.input))
|
||||||
|
assert.NoError(t, err)
|
||||||
|
|
||||||
|
scalars, datas, err := parseMappingNode[interface{}](&workflow.RawOn)
|
||||||
|
assert.NoError(t, err)
|
||||||
|
assert.EqualValues(t, test.scalars, scalars, fmt.Sprintf("%#v", scalars))
|
||||||
|
assert.EqualValues(t, test.datas, datas, fmt.Sprintf("%#v", datas))
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
Loading…
Reference in New Issue