diff --git a/api/build/auto_cancel.go b/api/build/auto_cancel.go index d1ba091fe..7f8a77f81 100644 --- a/api/build/auto_cancel.go +++ b/api/build/auto_cancel.go @@ -15,10 +15,10 @@ import ( "github.com/sirupsen/logrus" "github.com/go-vela/server/api/types" + "github.com/go-vela/server/compiler/types/pipeline" "github.com/go-vela/server/database" "github.com/go-vela/server/internal/token" "github.com/go-vela/types/constants" - "github.com/go-vela/types/pipeline" ) // AutoCancel is a helper function that checks to see if any pending or running diff --git a/api/build/auto_cancel_test.go b/api/build/auto_cancel_test.go index 15420b04d..d3ad75ae7 100644 --- a/api/build/auto_cancel_test.go +++ b/api/build/auto_cancel_test.go @@ -6,8 +6,8 @@ import ( "testing" "github.com/go-vela/server/api/types" + "github.com/go-vela/server/compiler/types/pipeline" "github.com/go-vela/types/constants" - "github.com/go-vela/types/pipeline" ) func Test_isCancelable(t *testing.T) { diff --git a/api/build/compile_publish.go b/api/build/compile_publish.go index 63db6c895..05111de4a 100644 --- a/api/build/compile_publish.go +++ b/api/build/compile_publish.go @@ -15,14 +15,13 @@ import ( "github.com/go-vela/server/api/types" "github.com/go-vela/server/compiler" + "github.com/go-vela/server/compiler/types/pipeline" "github.com/go-vela/server/database" "github.com/go-vela/server/internal" "github.com/go-vela/server/queue" "github.com/go-vela/server/queue/models" "github.com/go-vela/server/scm" "github.com/go-vela/types/constants" - "github.com/go-vela/types/library" - "github.com/go-vela/types/pipeline" ) // CompileAndPublishConfig is a struct that contains information for the CompileAndPublish function. @@ -181,7 +180,7 @@ func CompileAndPublish( // variable to store executable pipeline p *pipeline.Build // variable to store pipeline configuration - pipeline *library.Pipeline + pipeline *types.Pipeline // variable to store the pipeline type for the repository pipelineType = r.GetPipelineType() // variable to store updated repository record @@ -257,7 +256,7 @@ func CompileAndPublish( repo.SetPipelineType(pipeline.GetType()) } - var compiled *library.Pipeline + var compiled *types.Pipeline // parse and compile the pipeline configuration file p, compiled, err = compiler. Duplicate(). @@ -311,7 +310,7 @@ func CompileAndPublish( // check if the pipeline did not already exist in the database if pipeline == nil { pipeline = compiled - pipeline.SetRepoID(repo.GetID()) + pipeline.SetRepo(repo) pipeline.SetCommit(b.GetCommit()) pipeline.SetRef(b.GetRef()) diff --git a/api/build/executable.go b/api/build/executable.go index 19f66c3a6..c0324a4de 100644 --- a/api/build/executable.go +++ b/api/build/executable.go @@ -12,12 +12,12 @@ import ( "github.com/sirupsen/logrus" "github.com/go-vela/server/api/types" + "github.com/go-vela/server/compiler/types/pipeline" "github.com/go-vela/server/database" "github.com/go-vela/server/router/middleware/build" "github.com/go-vela/server/router/middleware/repo" "github.com/go-vela/server/util" "github.com/go-vela/types/library" - "github.com/go-vela/types/pipeline" ) // swagger:operation GET /api/v1/repos/{org}/{repo}/builds/{build}/executable builds GetBuildExecutable diff --git a/api/build/graph.go b/api/build/graph.go index 7c612bdf6..086c9261d 100644 --- a/api/build/graph.go +++ b/api/build/graph.go @@ -12,6 +12,7 @@ import ( "github.com/sirupsen/logrus" "github.com/go-vela/server/compiler" + "github.com/go-vela/server/compiler/types/pipeline" "github.com/go-vela/server/database" "github.com/go-vela/server/internal" "github.com/go-vela/server/router/middleware/build" @@ -21,7 +22,6 @@ import ( "github.com/go-vela/server/util" "github.com/go-vela/types/constants" "github.com/go-vela/types/library" - "github.com/go-vela/types/pipeline" ) // Graph contains nodes, and relationships between nodes, or edges. diff --git a/api/build/plan.go b/api/build/plan.go index fcdcd2dff..dc6f26c18 100644 --- a/api/build/plan.go +++ b/api/build/plan.go @@ -12,9 +12,9 @@ import ( "github.com/go-vela/server/api/service" "github.com/go-vela/server/api/step" "github.com/go-vela/server/api/types" + "github.com/go-vela/server/compiler/types/pipeline" "github.com/go-vela/server/database" "github.com/go-vela/server/scm" - "github.com/go-vela/types/pipeline" ) // PlanBuild is a helper function to plan the build for diff --git a/api/build/skip.go b/api/build/skip.go index 8e8965c57..4c65d908f 100644 --- a/api/build/skip.go +++ b/api/build/skip.go @@ -3,7 +3,7 @@ package build import ( - "github.com/go-vela/types/pipeline" + "github.com/go-vela/server/compiler/types/pipeline" ) // SkipEmptyBuild checks if the build should be skipped due to it diff --git a/api/build/skip_test.go b/api/build/skip_test.go index f55163227..518baafa2 100644 --- a/api/build/skip_test.go +++ b/api/build/skip_test.go @@ -5,7 +5,7 @@ package build import ( "testing" - "github.com/go-vela/types/pipeline" + "github.com/go-vela/server/compiler/types/pipeline" ) func Test_SkipEmptyBuild(t *testing.T) { diff --git a/api/pipeline/compile.go b/api/pipeline/compile.go index 5d39427b8..f21f0a811 100644 --- a/api/pipeline/compile.go +++ b/api/pipeline/compile.go @@ -11,12 +11,12 @@ import ( "github.com/sirupsen/logrus" "github.com/go-vela/server/compiler" + "github.com/go-vela/server/compiler/types/pipeline" "github.com/go-vela/server/internal" pMiddleware "github.com/go-vela/server/router/middleware/pipeline" "github.com/go-vela/server/router/middleware/repo" "github.com/go-vela/server/router/middleware/user" "github.com/go-vela/server/util" - "github.com/go-vela/types/pipeline" ) // swagger:operation POST /api/v1/pipelines/{org}/{repo}/{pipeline}/compile pipelines CompilePipeline diff --git a/api/pipeline/compile_test.go b/api/pipeline/compile_test.go index e1ebd89d4..05a3de45d 100644 --- a/api/pipeline/compile_test.go +++ b/api/pipeline/compile_test.go @@ -11,7 +11,7 @@ import ( "github.com/gin-gonic/gin" "github.com/google/go-cmp/cmp" - "github.com/go-vela/types/pipeline" + "github.com/go-vela/server/compiler/types/pipeline" ) // TestPrepareRuleData tests the prepareRuleData function. diff --git a/api/pipeline/create.go b/api/pipeline/create.go index 2dc5d5067..a37b5aacd 100644 --- a/api/pipeline/create.go +++ b/api/pipeline/create.go @@ -9,10 +9,10 @@ import ( "github.com/gin-gonic/gin" "github.com/sirupsen/logrus" + "github.com/go-vela/server/api/types" "github.com/go-vela/server/database" "github.com/go-vela/server/router/middleware/repo" "github.com/go-vela/server/util" - "github.com/go-vela/types/library" ) // swagger:operation POST /api/v1/pipelines/{org}/{repo} pipelines CreatePipeline @@ -75,7 +75,7 @@ func CreatePipeline(c *gin.Context) { l.Debugf("creating new pipeline for repo %s", r.GetFullName()) // capture body from API request - input := new(library.Pipeline) + input := new(types.Pipeline) err := c.Bind(input) if err != nil { @@ -87,7 +87,7 @@ func CreatePipeline(c *gin.Context) { } // update fields in pipeline object - input.SetRepoID(r.GetID()) + input.SetRepo(r) // send API call to create the pipeline p, err := database.FromContext(c).CreatePipeline(ctx, input) diff --git a/api/pipeline/template.go b/api/pipeline/template.go index cd3c50b62..8703bd9c9 100644 --- a/api/pipeline/template.go +++ b/api/pipeline/template.go @@ -12,6 +12,7 @@ import ( "github.com/go-vela/server/compiler" "github.com/go-vela/server/compiler/registry/github" + "github.com/go-vela/server/compiler/types/yaml" "github.com/go-vela/server/internal" "github.com/go-vela/server/router/middleware/org" "github.com/go-vela/server/router/middleware/pipeline" @@ -20,7 +21,6 @@ import ( "github.com/go-vela/server/scm" "github.com/go-vela/server/util" "github.com/go-vela/types/library" - "github.com/go-vela/types/yaml" ) // swagger:operation GET /api/v1/pipelines/{org}/{repo}/{pipeline}/templates pipelines GetTemplates diff --git a/api/pipeline/update.go b/api/pipeline/update.go index babd29494..b7f338525 100644 --- a/api/pipeline/update.go +++ b/api/pipeline/update.go @@ -9,11 +9,11 @@ import ( "github.com/gin-gonic/gin" "github.com/sirupsen/logrus" + "github.com/go-vela/server/api/types" "github.com/go-vela/server/database" "github.com/go-vela/server/router/middleware/pipeline" "github.com/go-vela/server/router/middleware/repo" "github.com/go-vela/server/util" - "github.com/go-vela/types/library" ) // swagger:operation PUT /api/v1/pipelines/{org}/{repo}/{pipeline} pipelines UpdatePipeline @@ -83,7 +83,7 @@ func UpdatePipeline(c *gin.Context) { l.Debugf("updating pipeline %s", entry) // capture body from API request - input := new(library.Pipeline) + input := new(types.Pipeline) err := c.Bind(input) if err != nil { diff --git a/api/service/plan.go b/api/service/plan.go index bc8408ddb..6efa6b2a5 100644 --- a/api/service/plan.go +++ b/api/service/plan.go @@ -10,10 +10,10 @@ import ( "github.com/sirupsen/logrus" "github.com/go-vela/server/api/types" + "github.com/go-vela/server/compiler/types/pipeline" "github.com/go-vela/server/database" "github.com/go-vela/types/constants" "github.com/go-vela/types/library" - "github.com/go-vela/types/pipeline" ) // PlanServices is a helper function to plan all services diff --git a/api/step/plan.go b/api/step/plan.go index cfb55652e..5bbc1ac84 100644 --- a/api/step/plan.go +++ b/api/step/plan.go @@ -10,11 +10,11 @@ import ( "github.com/sirupsen/logrus" "github.com/go-vela/server/api/types" + "github.com/go-vela/server/compiler/types/pipeline" "github.com/go-vela/server/database" "github.com/go-vela/server/scm" "github.com/go-vela/types/constants" "github.com/go-vela/types/library" - "github.com/go-vela/types/pipeline" ) // PlanSteps is a helper function to plan all steps diff --git a/api/types/build.go b/api/types/build.go index 10ccc31e2..f9ba8ea49 100644 --- a/api/types/build.go +++ b/api/types/build.go @@ -7,8 +7,8 @@ import ( "strings" "time" + "github.com/go-vela/server/compiler/types/raw" "github.com/go-vela/types/constants" - "github.com/go-vela/types/raw" ) // Build is the API types representation of a build for a pipeline. diff --git a/api/types/build_test.go b/api/types/build_test.go index dd6d70483..ffb0656f1 100644 --- a/api/types/build_test.go +++ b/api/types/build_test.go @@ -10,7 +10,7 @@ import ( "github.com/google/go-cmp/cmp" - "github.com/go-vela/types/raw" + "github.com/go-vela/server/compiler/types/raw" ) func TestTypes_Build_Duration(t *testing.T) { diff --git a/api/types/deployment.go b/api/types/deployment.go index 915fdd39c..ee1eb89da 100644 --- a/api/types/deployment.go +++ b/api/types/deployment.go @@ -5,7 +5,7 @@ package types import ( "fmt" - "github.com/go-vela/types/raw" + "github.com/go-vela/server/compiler/types/raw" ) // Deployment is the API representation of a deployment. diff --git a/api/types/executor.go b/api/types/executor.go index 45bbfce2e..7ef41e8c1 100644 --- a/api/types/executor.go +++ b/api/types/executor.go @@ -6,7 +6,7 @@ import ( "fmt" "strings" - "github.com/go-vela/types/pipeline" + "github.com/go-vela/server/compiler/types/pipeline" ) // Executor is the API representation of an executor for a worker. diff --git a/api/types/executor_test.go b/api/types/executor_test.go index 5c0e3011f..9b2b5d055 100644 --- a/api/types/executor_test.go +++ b/api/types/executor_test.go @@ -8,7 +8,7 @@ import ( "strings" "testing" - "github.com/go-vela/types/pipeline" + "github.com/go-vela/server/compiler/types/pipeline" ) func TestTypes_Executor_Getters(t *testing.T) { diff --git a/api/types/pipeline.go b/api/types/pipeline.go new file mode 100644 index 000000000..f626e5c6e --- /dev/null +++ b/api/types/pipeline.go @@ -0,0 +1,456 @@ +// SPDX-License-Identifier: Apache-2.0 + +package types + +import ( + "fmt" +) + +// Pipeline is the API representation of a Pipeline. +// +// swagger:model Pipeline +type Pipeline struct { + ID *int64 `json:"id,omitempty"` + Repo *Repo `json:"repo,omitempty"` + Commit *string `json:"commit,omitempty"` + Flavor *string `json:"flavor,omitempty"` + Platform *string `json:"platform,omitempty"` + Ref *string `json:"ref,omitempty"` + Type *string `json:"type,omitempty"` + Version *string `json:"version,omitempty"` + ExternalSecrets *bool `json:"external_secrets,omitempty"` + InternalSecrets *bool `json:"internal_secrets,omitempty"` + Services *bool `json:"services,omitempty"` + Stages *bool `json:"stages,omitempty"` + Steps *bool `json:"steps,omitempty"` + Templates *bool `json:"templates,omitempty"` + // swagger:strfmt base64 + Data *[]byte `json:"data,omitempty"` +} + +// GetID returns the ID field. +// +// When the provided Pipeline type is nil, or the field within +// the type is nil, it returns the zero value for the field. +func (p *Pipeline) GetID() int64 { + // return zero value if Pipeline type or ID field is nil + if p == nil || p.ID == nil { + return 0 + } + + return *p.ID +} + +// GetRepo returns the Repo field. +// +// When the provided Pipeline type is nil, or the field within +// the type is nil, it returns the zero value for the field. +func (p *Pipeline) GetRepo() *Repo { + // return zero value if Pipeline type or Repo field is nil + if p == nil || p.Repo == nil { + return new(Repo) + } + + return p.Repo +} + +// GetCommit returns the Commit field. +// +// When the provided Pipeline type is nil, or the field within +// the type is nil, it returns the zero value for the field. +func (p *Pipeline) GetCommit() string { + // return zero value if Pipeline type or Commit field is nil + if p == nil || p.Commit == nil { + return "" + } + + return *p.Commit +} + +// GetFlavor returns the Flavor field. +// +// When the provided Pipeline type is nil, or the field within +// the type is nil, it returns the zero value for the field. +func (p *Pipeline) GetFlavor() string { + // return zero value if Pipeline type or Flavor field is nil + if p == nil || p.Flavor == nil { + return "" + } + + return *p.Flavor +} + +// GetPlatform returns the Platform field. +// +// When the provided Pipeline type is nil, or the field within +// the type is nil, it returns the zero value for the field. +func (p *Pipeline) GetPlatform() string { + // return zero value if Pipeline type or Platform field is nil + if p == nil || p.Platform == nil { + return "" + } + + return *p.Platform +} + +// GetRef returns the Ref field. +// +// When the provided Pipeline type is nil, or the field within +// the type is nil, it returns the zero value for the field. +func (p *Pipeline) GetRef() string { + // return zero value if Pipeline type or Ref field is nil + if p == nil || p.Ref == nil { + return "" + } + + return *p.Ref +} + +// GetType returns the Type field. +// +// When the provided Pipeline type is nil, or the field within +// the type is nil, it returns the zero value for the field. +func (p *Pipeline) GetType() string { + // return zero value if Pipeline type or Type field is nil + if p == nil || p.Type == nil { + return "" + } + + return *p.Type +} + +// GetVersion returns the Version field. +// +// When the provided Pipeline type is nil, or the field within +// the type is nil, it returns the zero value for the field. +func (p *Pipeline) GetVersion() string { + // return zero value if Pipeline type or Version field is nil + if p == nil || p.Version == nil { + return "" + } + + return *p.Version +} + +// GetExternalSecrets returns the ExternalSecrets field. +// +// When the provided Pipeline type is nil, or the field within +// the type is nil, it returns the zero value for the field. +func (p *Pipeline) GetExternalSecrets() bool { + // return zero value if Pipeline type or ExternalSecrets field is nil + if p == nil || p.ExternalSecrets == nil { + return false + } + + return *p.ExternalSecrets +} + +// GetInternalSecrets returns the InternalSecrets field. +// +// When the provided Pipeline type is nil, or the field within +// the type is nil, it returns the zero value for the field. +func (p *Pipeline) GetInternalSecrets() bool { + // return zero value if Pipeline type or InternalSecrets field is nil + if p == nil || p.InternalSecrets == nil { + return false + } + + return *p.InternalSecrets +} + +// GetServices returns the Services field. +// +// When the provided Pipeline type is nil, or the field within +// the type is nil, it returns the zero value for the field. +func (p *Pipeline) GetServices() bool { + // return zero value if Pipeline type or Services field is nil + if p == nil || p.Services == nil { + return false + } + + return *p.Services +} + +// GetStages returns the Stages field. +// +// When the provided Pipeline type is nil, or the field within +// the type is nil, it returns the zero value for the field. +func (p *Pipeline) GetStages() bool { + // return zero value if Pipeline type or Stages field is nil + if p == nil || p.Stages == nil { + return false + } + + return *p.Stages +} + +// GetSteps returns the Steps field. +// +// When the provided Pipeline type is nil, or the field within +// the type is nil, it returns the zero value for the field. +func (p *Pipeline) GetSteps() bool { + // return zero value if Pipeline type or Steps field is nil + if p == nil || p.Steps == nil { + return false + } + + return *p.Steps +} + +// GetTemplates returns the Templates field. +// +// When the provided Pipeline type is nil, or the field within +// the type is nil, it returns the zero value for the field. +func (p *Pipeline) GetTemplates() bool { + // return zero value if Pipeline type or Templates field is nil + if p == nil || p.Templates == nil { + return false + } + + return *p.Templates +} + +// GetData returns the Data field. +// +// When the provided Pipeline type is nil, or the field within +// the type is nil, it returns the zero value for the field. +func (p *Pipeline) GetData() []byte { + // return zero value if Pipeline type or Data field is nil + if p == nil || p.Data == nil { + return []byte{} + } + + return *p.Data +} + +// SetID sets the ID field. +// +// When the provided Pipeline type is nil, it +// will set nothing and immediately return. +func (p *Pipeline) SetID(v int64) { + // return if Pipeline type is nil + if p == nil { + return + } + + p.ID = &v +} + +// SetRepo sets the Repo field. +// +// When the provided Pipeline type is nil, it +// will set nothing and immediately return. +func (p *Pipeline) SetRepo(v *Repo) { + // return if Pipeline type is nil + if p == nil { + return + } + + p.Repo = v +} + +// SetCommit sets the Commit field. +// +// When the provided Pipeline type is nil, it +// will set nothing and immediately return. +func (p *Pipeline) SetCommit(v string) { + // return if Pipeline type is nil + if p == nil { + return + } + + p.Commit = &v +} + +// SetFlavor sets the Flavor field. +// +// When the provided Pipeline type is nil, it +// will set nothing and immediately return. +func (p *Pipeline) SetFlavor(v string) { + // return if Pipeline type is nil + if p == nil { + return + } + + p.Flavor = &v +} + +// SetPlatform sets the Platform field. +// +// When the provided Pipeline type is nil, it +// will set nothing and immediately return. +func (p *Pipeline) SetPlatform(v string) { + // return if Pipeline type is nil + if p == nil { + return + } + + p.Platform = &v +} + +// SetRef sets the Ref field. +// +// When the provided Pipeline type is nil, it +// will set nothing and immediately return. +func (p *Pipeline) SetRef(v string) { + // return if Pipeline type is nil + if p == nil { + return + } + + p.Ref = &v +} + +// SetType sets the Type field. +// +// When the provided Pipeline type is nil, it +// will set nothing and immediately return. +func (p *Pipeline) SetType(v string) { + // return if Pipeline type is nil + if p == nil { + return + } + + p.Type = &v +} + +// SetVersion sets the Version field. +// +// When the provided Pipeline type is nil, it +// will set nothing and immediately return. +func (p *Pipeline) SetVersion(v string) { + // return if Pipeline type is nil + if p == nil { + return + } + + p.Version = &v +} + +// SetExternalSecrets sets the ExternalSecrets field. +// +// When the provided Pipeline type is nil, it +// will set nothing and immediately return. +func (p *Pipeline) SetExternalSecrets(v bool) { + // return if Pipeline type is nil + if p == nil { + return + } + + p.ExternalSecrets = &v +} + +// SetInternalSecrets sets the InternalSecrets field. +// +// When the provided Pipeline type is nil, it +// will set nothing and immediately return. +func (p *Pipeline) SetInternalSecrets(v bool) { + // return if Pipeline type is nil + if p == nil { + return + } + + p.InternalSecrets = &v +} + +// SetServices sets the Services field. +// +// When the provided Pipeline type is nil, it +// will set nothing and immediately return. +func (p *Pipeline) SetServices(v bool) { + // return if Pipeline type is nil + if p == nil { + return + } + + p.Services = &v +} + +// SetStages sets the Stages field. +// +// When the provided Pipeline type is nil, it +// will set nothing and immediately return. +func (p *Pipeline) SetStages(v bool) { + // return if Pipeline type is nil + if p == nil { + return + } + + p.Stages = &v +} + +// SetSteps sets the Steps field. +// +// When the provided Pipeline type is nil, it +// will set nothing and immediately return. +func (p *Pipeline) SetSteps(v bool) { + // return if Pipeline type is nil + if p == nil { + return + } + + p.Steps = &v +} + +// SetTemplates sets the Templates field. +// +// When the provided Pipeline type is nil, it +// will set nothing and immediately return. +func (p *Pipeline) SetTemplates(v bool) { + // return if Pipeline type is nil + if p == nil { + return + } + + p.Templates = &v +} + +// SetData sets the Data field. +// +// When the provided Pipeline type is nil, it +// will set nothing and immediately return. +func (p *Pipeline) SetData(v []byte) { + // return if Log type is nil + if p == nil { + return + } + + p.Data = &v +} + +// String implements the Stringer interface for the Pipeline type. +func (p *Pipeline) String() string { + return fmt.Sprintf(`{ + Commit: %s, + Data: %s, + Flavor: %s, + ID: %d, + Platform: %s, + Ref: %s, + Repo: %v, + ExternalSecrets: %t, + InternalSecrets: %t, + Services: %t, + Stages: %t, + Steps: %t, + Templates: %t, + Type: %s, + Version: %s, +}`, + p.GetCommit(), + p.GetData(), + p.GetFlavor(), + p.GetID(), + p.GetPlatform(), + p.GetRef(), + p.GetRepo(), + p.GetExternalSecrets(), + p.GetInternalSecrets(), + p.GetServices(), + p.GetStages(), + p.GetSteps(), + p.GetTemplates(), + p.GetType(), + p.GetVersion(), + ) +} diff --git a/api/types/pipeline_test.go b/api/types/pipeline_test.go new file mode 100644 index 000000000..a06d299ca --- /dev/null +++ b/api/types/pipeline_test.go @@ -0,0 +1,280 @@ +// SPDX-License-Identifier: Apache-2.0 + +package types + +import ( + "fmt" + "reflect" + "testing" +) + +func TestLibrary_Pipeline_Getters(t *testing.T) { + // setup tests + tests := []struct { + pipeline *Pipeline + want *Pipeline + }{ + { + pipeline: testPipeline(), + want: testPipeline(), + }, + { + pipeline: new(Pipeline), + want: new(Pipeline), + }, + } + + // run tests + for _, test := range tests { + if test.pipeline.GetID() != test.want.GetID() { + t.Errorf("GetID is %v, want %v", test.pipeline.GetID(), test.want.GetID()) + } + + if !reflect.DeepEqual(test.pipeline.GetRepo(), test.want.GetRepo()) { + t.Errorf("GetRepoID is %v, want %v", test.pipeline.GetRepo(), test.want.GetRepo()) + } + + if test.pipeline.GetCommit() != test.want.GetCommit() { + t.Errorf("GetCommit is %v, want %v", test.pipeline.GetCommit(), test.want.GetCommit()) + } + + if test.pipeline.GetFlavor() != test.want.GetFlavor() { + t.Errorf("GetFlavor is %v, want %v", test.pipeline.GetFlavor(), test.want.GetFlavor()) + } + + if test.pipeline.GetPlatform() != test.want.GetPlatform() { + t.Errorf("GetPlatform is %v, want %v", test.pipeline.GetPlatform(), test.want.GetPlatform()) + } + + if test.pipeline.GetRef() != test.want.GetRef() { + t.Errorf("GetRef is %v, want %v", test.pipeline.GetRef(), test.want.GetRef()) + } + + if test.pipeline.GetType() != test.want.GetType() { + t.Errorf("GetType is %v, want %v", test.pipeline.GetType(), test.want.GetType()) + } + + if test.pipeline.GetVersion() != test.want.GetVersion() { + t.Errorf("GetVersion is %v, want %v", test.pipeline.GetVersion(), test.want.GetVersion()) + } + + if test.pipeline.GetExternalSecrets() != test.want.GetExternalSecrets() { + t.Errorf("GetExternalSecrets is %v, want %v", test.pipeline.GetExternalSecrets(), test.want.GetExternalSecrets()) + } + + if test.pipeline.GetInternalSecrets() != test.want.GetInternalSecrets() { + t.Errorf("GetInternalSecrets is %v, want %v", test.pipeline.GetInternalSecrets(), test.want.GetInternalSecrets()) + } + + if test.pipeline.GetServices() != test.want.GetServices() { + t.Errorf("GetServices is %v, want %v", test.pipeline.GetServices(), test.want.GetServices()) + } + + if test.pipeline.GetStages() != test.want.GetStages() { + t.Errorf("GetStages is %v, want %v", test.pipeline.GetStages(), test.want.GetStages()) + } + + if test.pipeline.GetSteps() != test.want.GetSteps() { + t.Errorf("GetSteps is %v, want %v", test.pipeline.GetSteps(), test.want.GetSteps()) + } + + if test.pipeline.GetTemplates() != test.want.GetTemplates() { + t.Errorf("GetTemplates is %v, want %v", test.pipeline.GetTemplates(), test.want.GetTemplates()) + } + + if !reflect.DeepEqual(test.pipeline.GetData(), test.want.GetData()) { + t.Errorf("GetData is %v, want %v", test.pipeline.GetData(), test.want.GetData()) + } + } +} + +func TestLibrary_Pipeline_Setters(t *testing.T) { + // setup types + var p *Pipeline + + // setup tests + tests := []struct { + pipeline *Pipeline + want *Pipeline + }{ + { + pipeline: testPipeline(), + want: testPipeline(), + }, + { + pipeline: p, + want: new(Pipeline), + }, + } + + // run tests + for _, test := range tests { + test.pipeline.SetID(test.want.GetID()) + test.pipeline.SetRepo(test.want.GetRepo()) + test.pipeline.SetCommit(test.want.GetCommit()) + test.pipeline.SetFlavor(test.want.GetFlavor()) + test.pipeline.SetPlatform(test.want.GetPlatform()) + test.pipeline.SetRef(test.want.GetRef()) + test.pipeline.SetType(test.want.GetType()) + test.pipeline.SetVersion(test.want.GetVersion()) + test.pipeline.SetExternalSecrets(test.want.GetExternalSecrets()) + test.pipeline.SetInternalSecrets(test.want.GetInternalSecrets()) + test.pipeline.SetServices(test.want.GetServices()) + test.pipeline.SetStages(test.want.GetStages()) + test.pipeline.SetSteps(test.want.GetSteps()) + test.pipeline.SetTemplates(test.want.GetTemplates()) + test.pipeline.SetData(test.want.GetData()) + + if test.pipeline.GetID() != test.want.GetID() { + t.Errorf("SetID is %v, want %v", test.pipeline.GetID(), test.want.GetID()) + } + + if !reflect.DeepEqual(test.pipeline.GetRepo(), test.want.GetRepo()) { + t.Errorf("SetRepoID is %v, want %v", test.pipeline.GetRepo(), test.want.GetRepo()) + } + + if test.pipeline.GetCommit() != test.want.GetCommit() { + t.Errorf("SetCommit is %v, want %v", test.pipeline.GetCommit(), test.want.GetCommit()) + } + + if test.pipeline.GetFlavor() != test.want.GetFlavor() { + t.Errorf("SetFlavor is %v, want %v", test.pipeline.GetFlavor(), test.want.GetFlavor()) + } + + if test.pipeline.GetPlatform() != test.want.GetPlatform() { + t.Errorf("SetPlatform is %v, want %v", test.pipeline.GetPlatform(), test.want.GetPlatform()) + } + + if test.pipeline.GetRef() != test.want.GetRef() { + t.Errorf("SetRef is %v, want %v", test.pipeline.GetRef(), test.want.GetRef()) + } + + if test.pipeline.GetType() != test.want.GetType() { + t.Errorf("SetType is %v, want %v", test.pipeline.GetType(), test.want.GetType()) + } + + if test.pipeline.GetVersion() != test.want.GetVersion() { + t.Errorf("SetVersion is %v, want %v", test.pipeline.GetVersion(), test.want.GetVersion()) + } + + if test.pipeline.GetExternalSecrets() != test.want.GetExternalSecrets() { + t.Errorf("SetExternalSecrets is %v, want %v", test.pipeline.GetExternalSecrets(), test.want.GetExternalSecrets()) + } + + if test.pipeline.GetInternalSecrets() != test.want.GetInternalSecrets() { + t.Errorf("SetInternalSecrets is %v, want %v", test.pipeline.GetInternalSecrets(), test.want.GetInternalSecrets()) + } + + if test.pipeline.GetServices() != test.want.GetServices() { + t.Errorf("SetServices is %v, want %v", test.pipeline.GetServices(), test.want.GetServices()) + } + + if test.pipeline.GetStages() != test.want.GetStages() { + t.Errorf("SetStages is %v, want %v", test.pipeline.GetStages(), test.want.GetStages()) + } + + if test.pipeline.GetSteps() != test.want.GetSteps() { + t.Errorf("SetSteps is %v, want %v", test.pipeline.GetSteps(), test.want.GetSteps()) + } + + if test.pipeline.GetTemplates() != test.want.GetTemplates() { + t.Errorf("SetTemplates is %v, want %v", test.pipeline.GetTemplates(), test.want.GetTemplates()) + } + + if !reflect.DeepEqual(test.pipeline.GetData(), test.want.GetData()) { + t.Errorf("SetData is %v, want %v", test.pipeline.GetData(), test.want.GetData()) + } + } +} + +func TestLibrary_Pipeline_String(t *testing.T) { + // setup types + p := testPipeline() + + want := fmt.Sprintf(`{ + Commit: %s, + Data: %s, + Flavor: %s, + ID: %d, + Platform: %s, + Ref: %s, + Repo: %v, + ExternalSecrets: %t, + InternalSecrets: %t, + Services: %t, + Stages: %t, + Steps: %t, + Templates: %t, + Type: %s, + Version: %s, +}`, + p.GetCommit(), + p.GetData(), + p.GetFlavor(), + p.GetID(), + p.GetPlatform(), + p.GetRef(), + p.GetRepo(), + p.GetExternalSecrets(), + p.GetInternalSecrets(), + p.GetServices(), + p.GetStages(), + p.GetSteps(), + p.GetTemplates(), + p.GetType(), + p.GetVersion(), + ) + + // run test + got := p.String() + + if !reflect.DeepEqual(got, want) { + t.Errorf("String is %v, want %v", got, want) + } +} + +// testPipeline is a test helper function to create a Pipeline +// type with all fields set to a fake value. +func testPipeline() *Pipeline { + p := new(Pipeline) + + p.SetID(1) + p.SetRepo(testRepo()) + p.SetCommit("48afb5bdc41ad69bf22588491333f7cf71135163") + p.SetFlavor("large") + p.SetPlatform("docker") + p.SetRef("refs/heads/main") + p.SetRef("yaml") + p.SetVersion("1") + p.SetExternalSecrets(false) + p.SetInternalSecrets(false) + p.SetServices(true) + p.SetStages(false) + p.SetSteps(true) + p.SetTemplates(false) + p.SetData(testPipelineData()) + + return p +} + +// testPipelineData is a test helper function to create the +// content for the Data field for the Pipeline type. +func testPipelineData() []byte { + return []byte(` +version: 1 + +worker: + flavor: large + platform: docker + +services: + - name: redis + image: redis + +steps: + - name: ping + image: redis + commands: + - redis-cli -h redis ping +`) +} diff --git a/compiler/engine.go b/compiler/engine.go index 931cef4ad..0f09c7ecb 100644 --- a/compiler/engine.go +++ b/compiler/engine.go @@ -7,11 +7,10 @@ import ( api "github.com/go-vela/server/api/types" "github.com/go-vela/server/api/types/settings" + "github.com/go-vela/server/compiler/types/pipeline" + "github.com/go-vela/server/compiler/types/raw" + "github.com/go-vela/server/compiler/types/yaml" "github.com/go-vela/server/internal" - "github.com/go-vela/types/library" - "github.com/go-vela/types/pipeline" - "github.com/go-vela/types/raw" - "github.com/go-vela/types/yaml" ) // Engine represents an interface for converting a yaml @@ -22,12 +21,12 @@ type Engine interface { // Compile defines a function that produces an executable // representation of a pipeline from an object. This calls // Parse internally to convert the object to a yaml configuration. - Compile(context.Context, interface{}) (*pipeline.Build, *library.Pipeline, error) + Compile(context.Context, interface{}) (*pipeline.Build, *api.Pipeline, error) // CompileLite defines a function that produces an light executable // representation of a pipeline from an object. This calls // Parse internally to convert the object to a yaml configuration. - CompileLite(context.Context, interface{}, *pipeline.RuleData, bool) (*yaml.Build, *library.Pipeline, error) + CompileLite(context.Context, interface{}, *pipeline.RuleData, bool) (*yaml.Build, *api.Pipeline, error) // Duplicate defines a function that // creates a clone of the Engine. diff --git a/compiler/native/clone.go b/compiler/native/clone.go index 85016cfc6..da1aacdf3 100644 --- a/compiler/native/clone.go +++ b/compiler/native/clone.go @@ -3,8 +3,8 @@ package native import ( + "github.com/go-vela/server/compiler/types/yaml" "github.com/go-vela/types/constants" - "github.com/go-vela/types/yaml" ) const ( diff --git a/compiler/native/clone_test.go b/compiler/native/clone_test.go index 1fc5b2813..6bd92697c 100644 --- a/compiler/native/clone_test.go +++ b/compiler/native/clone_test.go @@ -9,7 +9,7 @@ import ( "github.com/urfave/cli/v2" - "github.com/go-vela/types/yaml" + "github.com/go-vela/server/compiler/types/yaml" ) const defaultCloneImage = "target/vela-git-slim:latest" diff --git a/compiler/native/compile.go b/compiler/native/compile.go index 16c6ff7c9..088dbca15 100644 --- a/compiler/native/compile.go +++ b/compiler/native/compile.go @@ -17,11 +17,10 @@ import ( "github.com/hashicorp/go-retryablehttp" api "github.com/go-vela/server/api/types" + "github.com/go-vela/server/compiler/types/pipeline" + "github.com/go-vela/server/compiler/types/raw" + "github.com/go-vela/server/compiler/types/yaml" "github.com/go-vela/types/constants" - "github.com/go-vela/types/library" - "github.com/go-vela/types/pipeline" - "github.com/go-vela/types/raw" - "github.com/go-vela/types/yaml" ) // ModifyRequest contains the payload passed to the modification endpoint. @@ -39,14 +38,14 @@ type ModifyResponse struct { } // Compile produces an executable pipeline from a yaml configuration. -func (c *client) Compile(ctx context.Context, v interface{}) (*pipeline.Build, *library.Pipeline, error) { +func (c *client) Compile(ctx context.Context, v interface{}) (*pipeline.Build, *api.Pipeline, error) { p, data, err := c.Parse(v, c.repo.GetPipelineType(), new(yaml.Template)) if err != nil { return nil, nil, err } - // create the library pipeline object from the yaml configuration - _pipeline := p.ToPipelineLibrary() + // create the API pipeline object from the yaml configuration + _pipeline := p.ToPipelineAPI() _pipeline.SetData(data) _pipeline.SetType(c.repo.GetPipelineType()) @@ -103,14 +102,14 @@ func (c *client) Compile(ctx context.Context, v interface{}) (*pipeline.Build, * } // CompileLite produces a partial of an executable pipeline from a yaml configuration. -func (c *client) CompileLite(ctx context.Context, v interface{}, ruleData *pipeline.RuleData, substitute bool) (*yaml.Build, *library.Pipeline, error) { +func (c *client) CompileLite(ctx context.Context, v interface{}, ruleData *pipeline.RuleData, substitute bool) (*yaml.Build, *api.Pipeline, error) { p, data, err := c.Parse(v, c.repo.GetPipelineType(), new(yaml.Template)) if err != nil { return nil, nil, err } // create the library pipeline object from the yaml configuration - _pipeline := p.ToPipelineLibrary() + _pipeline := p.ToPipelineAPI() _pipeline.SetData(data) _pipeline.SetType(c.repo.GetPipelineType()) @@ -304,7 +303,7 @@ func (c *client) compileInline(ctx context.Context, p *yaml.Build, depth int) (* } // compileSteps executes the workflow for converting a YAML pipeline into an executable struct. -func (c *client) compileSteps(ctx context.Context, p *yaml.Build, _pipeline *library.Pipeline, tmpls map[string]*yaml.Template, r *pipeline.RuleData) (*pipeline.Build, *library.Pipeline, error) { +func (c *client) compileSteps(ctx context.Context, p *yaml.Build, _pipeline *api.Pipeline, tmpls map[string]*yaml.Template, r *pipeline.RuleData) (*pipeline.Build, *api.Pipeline, error) { var err error // check if the pipeline disabled the clone @@ -399,7 +398,7 @@ func (c *client) compileSteps(ctx context.Context, p *yaml.Build, _pipeline *lib } // compileStages executes the workflow for converting a YAML pipeline into an executable struct. -func (c *client) compileStages(ctx context.Context, p *yaml.Build, _pipeline *library.Pipeline, tmpls map[string]*yaml.Template, r *pipeline.RuleData) (*pipeline.Build, *library.Pipeline, error) { +func (c *client) compileStages(ctx context.Context, p *yaml.Build, _pipeline *api.Pipeline, tmpls map[string]*yaml.Template, r *pipeline.RuleData) (*pipeline.Build, *api.Pipeline, error) { var err error // check if the pipeline disabled the clone diff --git a/compiler/native/compile_test.go b/compiler/native/compile_test.go index 3670cae07..c575a3494 100644 --- a/compiler/native/compile_test.go +++ b/compiler/native/compile_test.go @@ -20,11 +20,11 @@ import ( "github.com/urfave/cli/v2" api "github.com/go-vela/server/api/types" + "github.com/go-vela/server/compiler/types/pipeline" + "github.com/go-vela/server/compiler/types/raw" + "github.com/go-vela/server/compiler/types/yaml" "github.com/go-vela/server/internal" "github.com/go-vela/types/constants" - "github.com/go-vela/types/pipeline" - "github.com/go-vela/types/raw" - "github.com/go-vela/types/yaml" ) func TestNative_Compile_StagesPipeline(t *testing.T) { diff --git a/compiler/native/environment.go b/compiler/native/environment.go index 653984100..d9a82ed55 100644 --- a/compiler/native/environment.go +++ b/compiler/native/environment.go @@ -8,11 +8,11 @@ import ( "strings" api "github.com/go-vela/server/api/types" + "github.com/go-vela/server/compiler/types/raw" + "github.com/go-vela/server/compiler/types/yaml" "github.com/go-vela/server/internal" "github.com/go-vela/types/constants" "github.com/go-vela/types/library" - "github.com/go-vela/types/raw" - "github.com/go-vela/types/yaml" ) // EnvironmentStages injects environment variables diff --git a/compiler/native/environment_test.go b/compiler/native/environment_test.go index 5f00c6088..fae1f4552 100644 --- a/compiler/native/environment_test.go +++ b/compiler/native/environment_test.go @@ -12,9 +12,9 @@ import ( "github.com/urfave/cli/v2" api "github.com/go-vela/server/api/types" + "github.com/go-vela/server/compiler/types/raw" + "github.com/go-vela/server/compiler/types/yaml" "github.com/go-vela/server/internal" - "github.com/go-vela/types/raw" - "github.com/go-vela/types/yaml" ) func TestNative_EnvironmentStages(t *testing.T) { diff --git a/compiler/native/expand.go b/compiler/native/expand.go index 1c805ca93..a971b8efd 100644 --- a/compiler/native/expand.go +++ b/compiler/native/expand.go @@ -13,10 +13,10 @@ import ( "github.com/go-vela/server/compiler/registry" "github.com/go-vela/server/compiler/template/native" "github.com/go-vela/server/compiler/template/starlark" + "github.com/go-vela/server/compiler/types/pipeline" + "github.com/go-vela/server/compiler/types/raw" + "github.com/go-vela/server/compiler/types/yaml" "github.com/go-vela/types/constants" - "github.com/go-vela/types/pipeline" - "github.com/go-vela/types/raw" - "github.com/go-vela/types/yaml" ) // ExpandStages injects the template for each diff --git a/compiler/native/expand_test.go b/compiler/native/expand_test.go index 08ae0a165..8702f9c8b 100644 --- a/compiler/native/expand_test.go +++ b/compiler/native/expand_test.go @@ -15,9 +15,9 @@ import ( "github.com/urfave/cli/v2" api "github.com/go-vela/server/api/types" - "github.com/go-vela/types/pipeline" - "github.com/go-vela/types/raw" - "github.com/go-vela/types/yaml" + "github.com/go-vela/server/compiler/types/pipeline" + "github.com/go-vela/server/compiler/types/raw" + "github.com/go-vela/server/compiler/types/yaml" ) func TestNative_ExpandStages(t *testing.T) { diff --git a/compiler/native/initialize.go b/compiler/native/initialize.go index e54b10ff1..21c9e3b16 100644 --- a/compiler/native/initialize.go +++ b/compiler/native/initialize.go @@ -3,8 +3,8 @@ package native import ( + "github.com/go-vela/server/compiler/types/yaml" "github.com/go-vela/types/constants" - "github.com/go-vela/types/yaml" ) const ( diff --git a/compiler/native/initialize_test.go b/compiler/native/initialize_test.go index ea196216a..26c170581 100644 --- a/compiler/native/initialize_test.go +++ b/compiler/native/initialize_test.go @@ -9,7 +9,7 @@ import ( "github.com/urfave/cli/v2" - "github.com/go-vela/types/yaml" + "github.com/go-vela/server/compiler/types/yaml" ) func TestNative_InitStage(t *testing.T) { diff --git a/compiler/native/parse.go b/compiler/native/parse.go index fb96b5428..47924ac0a 100644 --- a/compiler/native/parse.go +++ b/compiler/native/parse.go @@ -11,9 +11,9 @@ import ( "github.com/go-vela/server/compiler/template/native" "github.com/go-vela/server/compiler/template/starlark" + typesRaw "github.com/go-vela/server/compiler/types/raw" + types "github.com/go-vela/server/compiler/types/yaml" "github.com/go-vela/types/constants" - typesRaw "github.com/go-vela/types/raw" - types "github.com/go-vela/types/yaml" ) // ParseRaw converts an object to a string. diff --git a/compiler/native/parse_test.go b/compiler/native/parse_test.go index a47da0459..91b13c9cb 100644 --- a/compiler/native/parse_test.go +++ b/compiler/native/parse_test.go @@ -14,9 +14,9 @@ import ( "github.com/urfave/cli/v2" api "github.com/go-vela/server/api/types" + "github.com/go-vela/server/compiler/types/raw" + "github.com/go-vela/server/compiler/types/yaml" "github.com/go-vela/types/constants" - "github.com/go-vela/types/raw" - "github.com/go-vela/types/yaml" ) func TestNative_Parse_Metadata_Bytes(t *testing.T) { diff --git a/compiler/native/script.go b/compiler/native/script.go index cd3463a30..03aa093bb 100644 --- a/compiler/native/script.go +++ b/compiler/native/script.go @@ -8,7 +8,7 @@ import ( "fmt" "strings" - "github.com/go-vela/types/yaml" + "github.com/go-vela/server/compiler/types/yaml" ) // ScriptStages injects the script for each step in every stage in a yaml configuration. diff --git a/compiler/native/script_test.go b/compiler/native/script_test.go index 0f03e6865..e12e6a4b8 100644 --- a/compiler/native/script_test.go +++ b/compiler/native/script_test.go @@ -10,7 +10,7 @@ import ( "github.com/google/go-cmp/cmp" "github.com/urfave/cli/v2" - "github.com/go-vela/types/yaml" + "github.com/go-vela/server/compiler/types/yaml" ) func TestNative_ScriptStages(t *testing.T) { diff --git a/compiler/native/substitute.go b/compiler/native/substitute.go index 9bbc717cd..a675e92d5 100644 --- a/compiler/native/substitute.go +++ b/compiler/native/substitute.go @@ -9,7 +9,7 @@ import ( "github.com/buildkite/yaml" "github.com/drone/envsubst" - types "github.com/go-vela/types/yaml" + types "github.com/go-vela/server/compiler/types/yaml" ) // SubstituteStages replaces every declared environment diff --git a/compiler/native/substitute_test.go b/compiler/native/substitute_test.go index 0be8eac03..e8db1565c 100644 --- a/compiler/native/substitute_test.go +++ b/compiler/native/substitute_test.go @@ -9,7 +9,7 @@ import ( "github.com/google/go-cmp/cmp" "github.com/urfave/cli/v2" - "github.com/go-vela/types/yaml" + "github.com/go-vela/server/compiler/types/yaml" ) func Test_client_SubstituteStages(t *testing.T) { diff --git a/compiler/native/transform.go b/compiler/native/transform.go index 1165e6a40..2f54c41a3 100644 --- a/compiler/native/transform.go +++ b/compiler/native/transform.go @@ -5,8 +5,8 @@ package native import ( "fmt" - "github.com/go-vela/types/pipeline" - "github.com/go-vela/types/yaml" + "github.com/go-vela/server/compiler/types/pipeline" + "github.com/go-vela/server/compiler/types/yaml" ) const ( diff --git a/compiler/native/transform_test.go b/compiler/native/transform_test.go index bcd318103..14da2b332 100644 --- a/compiler/native/transform_test.go +++ b/compiler/native/transform_test.go @@ -9,9 +9,9 @@ import ( "github.com/urfave/cli/v2" + "github.com/go-vela/server/compiler/types/pipeline" + "github.com/go-vela/server/compiler/types/yaml" "github.com/go-vela/server/internal" - "github.com/go-vela/types/pipeline" - "github.com/go-vela/types/yaml" ) func TestNative_TransformStages(t *testing.T) { diff --git a/compiler/native/validate.go b/compiler/native/validate.go index b220b34a7..cabe3f602 100644 --- a/compiler/native/validate.go +++ b/compiler/native/validate.go @@ -7,8 +7,8 @@ import ( "github.com/hashicorp/go-multierror" + "github.com/go-vela/server/compiler/types/yaml" "github.com/go-vela/types/constants" - "github.com/go-vela/types/yaml" ) // Validate verifies the yaml configuration is valid. diff --git a/compiler/native/validate_test.go b/compiler/native/validate_test.go index 226c75aa9..4d3876f0a 100644 --- a/compiler/native/validate_test.go +++ b/compiler/native/validate_test.go @@ -9,8 +9,8 @@ import ( "github.com/urfave/cli/v2" - "github.com/go-vela/types/raw" - "github.com/go-vela/types/yaml" + "github.com/go-vela/server/compiler/types/raw" + "github.com/go-vela/server/compiler/types/yaml" ) func TestNative_Validate_NoVersion(t *testing.T) { diff --git a/compiler/template/native/convert.go b/compiler/template/native/convert.go index ee45ef809..4a75a5793 100644 --- a/compiler/template/native/convert.go +++ b/compiler/template/native/convert.go @@ -7,7 +7,7 @@ import ( "github.com/buildkite/yaml" - "github.com/go-vela/types/raw" + "github.com/go-vela/server/compiler/types/raw" ) // convertPlatformVars takes the platform injected variables diff --git a/compiler/template/native/convert_test.go b/compiler/template/native/convert_test.go index 4ab988f03..457aec5dc 100644 --- a/compiler/template/native/convert_test.go +++ b/compiler/template/native/convert_test.go @@ -6,7 +6,7 @@ import ( "reflect" "testing" - "github.com/go-vela/types/raw" + "github.com/go-vela/server/compiler/types/raw" ) func Test_convertPlatformVars(t *testing.T) { diff --git a/compiler/template/native/render.go b/compiler/template/native/render.go index a31f642b6..528188e0e 100644 --- a/compiler/template/native/render.go +++ b/compiler/template/native/render.go @@ -10,8 +10,8 @@ import ( "github.com/Masterminds/sprig/v3" "github.com/buildkite/yaml" - "github.com/go-vela/types/raw" - types "github.com/go-vela/types/yaml" + "github.com/go-vela/server/compiler/types/raw" + types "github.com/go-vela/server/compiler/types/yaml" ) // Render combines the template with the step in the yaml pipeline. diff --git a/compiler/template/native/render_test.go b/compiler/template/native/render_test.go index f4c92160d..3b4daacb8 100644 --- a/compiler/template/native/render_test.go +++ b/compiler/template/native/render_test.go @@ -9,8 +9,8 @@ import ( goyaml "github.com/buildkite/yaml" "github.com/google/go-cmp/cmp" - "github.com/go-vela/types/raw" - "github.com/go-vela/types/yaml" + "github.com/go-vela/server/compiler/types/raw" + "github.com/go-vela/server/compiler/types/yaml" ) func TestNative_Render(t *testing.T) { diff --git a/compiler/template/starlark/convert.go b/compiler/template/starlark/convert.go index d571cd0ef..8a519db4f 100644 --- a/compiler/template/starlark/convert.go +++ b/compiler/template/starlark/convert.go @@ -7,7 +7,7 @@ import ( "go.starlark.net/starlark" - "github.com/go-vela/types/raw" + "github.com/go-vela/server/compiler/types/raw" ) // convertTemplateVars takes template variables and converts diff --git a/compiler/template/starlark/convert_test.go b/compiler/template/starlark/convert_test.go index 3e868a4ba..64cd069d3 100644 --- a/compiler/template/starlark/convert_test.go +++ b/compiler/template/starlark/convert_test.go @@ -8,7 +8,7 @@ import ( "go.starlark.net/starlark" - "github.com/go-vela/types/raw" + "github.com/go-vela/server/compiler/types/raw" ) func TestStarlark_Render_convertTemplateVars(t *testing.T) { diff --git a/compiler/template/starlark/render.go b/compiler/template/starlark/render.go index 93b37cd78..325e49c53 100644 --- a/compiler/template/starlark/render.go +++ b/compiler/template/starlark/render.go @@ -11,8 +11,8 @@ import ( "go.starlark.net/starlark" "go.starlark.net/starlarkstruct" - "github.com/go-vela/types/raw" - types "github.com/go-vela/types/yaml" + "github.com/go-vela/server/compiler/types/raw" + types "github.com/go-vela/server/compiler/types/yaml" ) var ( diff --git a/compiler/template/starlark/render_test.go b/compiler/template/starlark/render_test.go index a730f1088..2f34e88f0 100644 --- a/compiler/template/starlark/render_test.go +++ b/compiler/template/starlark/render_test.go @@ -9,8 +9,8 @@ import ( goyaml "github.com/buildkite/yaml" "github.com/google/go-cmp/cmp" - "github.com/go-vela/types/raw" - "github.com/go-vela/types/yaml" + "github.com/go-vela/server/compiler/types/raw" + "github.com/go-vela/server/compiler/types/yaml" ) func TestStarlark_Render(t *testing.T) { diff --git a/compiler/template/template.go b/compiler/template/template.go index 6ace9c8c2..a16cc4057 100644 --- a/compiler/template/template.go +++ b/compiler/template/template.go @@ -2,7 +2,7 @@ package template -import "github.com/go-vela/types/yaml" +import "github.com/go-vela/server/compiler/types/yaml" // Engine represents the interface for Vela integrating // with the different supported template engines. diff --git a/compiler/types/pipeline/build.go b/compiler/types/pipeline/build.go new file mode 100644 index 000000000..681269d5d --- /dev/null +++ b/compiler/types/pipeline/build.go @@ -0,0 +1,163 @@ +// SPDX-License-Identifier: Apache-2.0 + +package pipeline + +import ( + "fmt" + "strings" + "unicode/utf8" + + "github.com/go-vela/server/compiler/types/raw" + "github.com/go-vela/types/constants" +) + +// Build is the pipeline representation of a build for a pipeline. +// +// swagger:model PipelineBuild +type Build struct { + ID string `json:"id,omitempty" yaml:"id,omitempty"` + Version string `json:"version,omitempty" yaml:"version,omitempty"` + Metadata Metadata `json:"metadata,omitempty" yaml:"metadata,omitempty"` + Environment raw.StringSliceMap `json:"environment,omitempty" yaml:"environment,omitempty"` + Worker Worker `json:"worker,omitempty" yaml:"worker,omitempty"` + Secrets SecretSlice `json:"secrets,omitempty" yaml:"secrets,omitempty"` + Services ContainerSlice `json:"services,omitempty" yaml:"services,omitempty"` + Stages StageSlice `json:"stages,omitempty" yaml:"stages,omitempty"` + Steps ContainerSlice `json:"steps,omitempty" yaml:"steps,omitempty"` +} + +// Purge removes the steps, in every stage, that contain a ruleset +// that do not match the provided ruledata. If all steps from a +// stage are removed, then the entire stage is removed from the +// pipeline. If no stages are provided in the pipeline, then the +// function will remove the steps that have a ruleset that do not +// match the provided ruledata. If both stages and steps are +// provided, then an empty pipeline is returned. +func (b *Build) Purge(r *RuleData) (*Build, error) { + // return an empty pipeline if both stages and steps are provided + if len(b.Stages) > 0 && len(b.Steps) > 0 { + return nil, fmt.Errorf("cannot have both stages and steps at the top level of pipeline") + } + + // purge stages pipeline if stages are provided + if len(b.Stages) > 0 { + pStages, err := b.Stages.Purge(r) + if err != nil { + return nil, err + } + + b.Stages = *pStages + } + + // purge steps pipeline if steps are provided + if len(b.Steps) > 0 { + pSteps, err := b.Steps.Purge(r) + if err != nil { + return nil, err + } + + b.Steps = *pSteps + } + + // purge services in pipeline if services are provided + if len(b.Services) > 0 { + pServices, err := b.Services.Purge(r) + if err != nil { + return nil, err + } + + b.Services = *pServices + } + + // purge secrets in pipeline if secrets are provided + if len(b.Secrets) > 0 { + pSecrets, err := b.Secrets.Purge(r) + if err != nil { + return nil, err + } + + b.Secrets = *pSecrets + } + + // return the purged pipeline + return b, nil +} + +// Sanitize cleans the fields for every step in each stage so they +// can be safely executed on the worker. If no stages are provided +// in the pipeline, then the function will sanitize the fields for +// every step in the pipeline. The fields are sanitized based off +// of the provided runtime driver which is setup on every worker. +// Currently, this function supports the following runtimes: +// +// - Docker +// - Kubernetes +func (b *Build) Sanitize(driver string) *Build { + // return an empty pipeline if both stages and steps are provided + if len(b.Stages) > 0 && len(b.Steps) > 0 { + return nil + } + + // sanitize stages pipeline if they are provided + if len(b.Stages) > 0 { + b.Stages = *b.Stages.Sanitize(driver) + } + + // sanitize steps pipeline if they are provided + if len(b.Steps) > 0 { + b.Steps = *b.Steps.Sanitize(driver) + } + + // sanitize services pipeline if they are provided + if len(b.Services) > 0 { + b.Services = *b.Services.Sanitize(driver) + } + + // sanitize secret plugins pipeline if they are provided + for i, secret := range b.Secrets { + if secret.Origin.Empty() { + continue + } + + b.Secrets[i].Origin = secret.Origin.Sanitize(driver) + } + + switch driver { + // sanitize pipeline for Docker + case constants.DriverDocker: + if strings.Contains(b.ID, " ") { + b.ID = strings.ReplaceAll(b.ID, " ", "-") + } + // sanitize pipeline for Kubernetes + case constants.DriverKubernetes: + if strings.Contains(b.ID, " ") { + b.ID = strings.ReplaceAll(b.ID, " ", "-") + } + + if strings.Contains(b.ID, "_") { + b.ID = strings.ReplaceAll(b.ID, "_", "-") + } + + if strings.Contains(b.ID, ".") { + b.ID = strings.ReplaceAll(b.ID, ".", "-") + } + + // Kubernetes requires DNS compatible names (lowercase, <= 63 chars) + b.ID = strings.ToLower(b.ID) + + const dnsMaxLength = 63 + if utf8.RuneCountInString(b.ID) > dnsMaxLength { + const randomSuffixLength = 6 + + rs := []rune(b.ID) + b.ID = fmt.Sprintf( + "%s-%s", + string(rs[:dnsMaxLength-1-randomSuffixLength]), + dnsSafeRandomString(randomSuffixLength), + ) + } + } + + // return the purged pipeline + return b +} diff --git a/compiler/types/pipeline/build_test.go b/compiler/types/pipeline/build_test.go new file mode 100644 index 000000000..a693a89be --- /dev/null +++ b/compiler/types/pipeline/build_test.go @@ -0,0 +1,388 @@ +// SPDX-License-Identifier: Apache-2.0 + +package pipeline + +import ( + "reflect" + "testing" + + "github.com/go-vela/types/constants" +) + +func TestPipeline_Build_Purge(t *testing.T) { + // setup types + stages := testBuildStages() + stages.Stages = stages.Stages[:len(stages.Stages)-1] + + steps := testBuildSteps() + steps.Steps = steps.Steps[:len(steps.Steps)-1] + + // setup tests + tests := []struct { + pipeline *Build + want *Build + wantErr bool + }{ + { + pipeline: testBuildStages(), + want: stages, + }, + { + pipeline: testBuildSteps(), + want: steps, + }, + { + pipeline: new(Build), + want: new(Build), + }, + { + pipeline: &Build{ + Stages: StageSlice{ + { + Name: "init", + Steps: ContainerSlice{ + { + ID: "github octocat._1_init_init", + Directory: "/home/github/octocat", + Environment: map[string]string{"FOO": "bar"}, + Image: "#init", + Name: "init", + Number: 1, + Pull: "always", + }, + }, + }, + }, + Steps: ContainerSlice{ + { + ID: "step_github octocat._1_init", + Directory: "/home/github/octocat", + Environment: map[string]string{"FOO": "bar"}, + Image: "#init", + Name: "init", + Number: 1, + Pull: "always", + }, + }, + }, + want: nil, + wantErr: true, + }, + { + pipeline: &Build{ + Steps: ContainerSlice{ + { + ID: "step_github octocat._1_init", + Directory: "/home/github/octocat", + Environment: map[string]string{"FOO": "bar"}, + Image: "#init", + Name: "init", + Number: 1, + Pull: "always", + }, + { + ID: "step_github octocat._1_bad_regexp", + Directory: "/home/github/octocat", + Environment: map[string]string{"FOO": "bar"}, + Image: "alpine", + Name: "bad_regexp", + Number: 2, + Pull: "always", + Ruleset: Ruleset{ + If: Rules{Event: []string{"push"}, Branch: []string{"*-dev"}}, + Operator: "and", + Matcher: "regexp", + }, + }, + }, + }, + want: nil, + wantErr: true, + }, + } + + // run tests + for _, test := range tests { + r := &RuleData{ + Branch: "main", + Event: "pull_request", + Path: []string{}, + Repo: "foo/bar", + Tag: "refs/heads/main", + } + + got, err := test.pipeline.Purge(r) + + if test.wantErr && err == nil { + t.Errorf("Purge should have returned an error, got: %v", got) + } + + if !reflect.DeepEqual(got, test.want) { + t.Errorf("Purge is %v, want %v", got, test.want) + } + } +} + +func TestPipeline_Build_Sanitize(t *testing.T) { + // setup types + stages := testBuildStages() + stages.ID = "github-Octocat._1" + stages.Services[0].ID = "service_github-octocat._1_postgres" + stages.Stages[0].Steps[0].ID = "github-octocat._1_init_init" + stages.Stages[1].Steps[0].ID = "github-octocat._1_clone_clone" + stages.Stages[2].Steps[0].ID = "github-octocat._1_echo_echo" + stages.Secrets[0].Origin.ID = "secret_github-octocat._1_vault" + + kubeStages := testBuildStages() + kubeStages.ID = "github-octocat--1" + kubeStages.Services[0].ID = "service-github-octocat--1-postgres" + kubeStages.Stages[0].Steps[0].ID = "github-octocat--1-init-init" + kubeStages.Stages[1].Steps[0].ID = "github-octocat--1-clone-clone" + kubeStages.Stages[2].Steps[0].ID = "github-octocat--1-echo-echo" + kubeStages.Secrets[0].Origin.ID = "secret-github-octocat--1-vault" + + steps := testBuildSteps() + steps.ID = "github-octocat._1" + steps.Services[0].ID = "service_github-octocat._1_postgres" + steps.Steps[0].ID = "step_github-octocat._1_init" + steps.Steps[1].ID = "step_github-octocat._1_clone" + steps.Steps[2].ID = "step_github-octocat._1_echo" + steps.Secrets[0].Origin.ID = "secret_github-octocat._1_vault" + + kubeSteps := testBuildSteps() + kubeSteps.ID = "github-octocat--1" + kubeSteps.Services[0].ID = "service-github-octocat--1-postgres" + kubeSteps.Steps[0].ID = "step-github-octocat--1-init" + kubeSteps.Steps[1].ID = "step-github-octocat--1-clone" + kubeSteps.Steps[2].ID = "step-github-octocat--1-echo" + kubeSteps.Secrets[0].Origin.ID = "secret-github-octocat--1-vault" + + // setup tests + tests := []struct { + driver string + pipeline *Build + want *Build + }{ + { + driver: constants.DriverDocker, + pipeline: testBuildStages(), + want: stages, + }, + { + driver: constants.DriverKubernetes, + pipeline: testBuildStages(), + want: kubeStages, + }, + { + driver: constants.DriverDocker, + pipeline: testBuildSteps(), + want: steps, + }, + { + driver: constants.DriverKubernetes, + pipeline: testBuildSteps(), + want: kubeSteps, + }, + { + driver: constants.DriverDocker, + pipeline: new(Build), + want: new(Build), + }, + { + driver: constants.DriverKubernetes, + pipeline: new(Build), + want: new(Build), + }, + { + driver: constants.DriverDocker, + pipeline: &Build{ + Stages: StageSlice{ + { + Name: "init", + Steps: ContainerSlice{ + { + ID: "github octocat._1_init_init", + Directory: "/home/github/octocat", + Environment: map[string]string{"FOO": "bar"}, + Image: "#init", + Name: "init", + Number: 1, + Pull: "always", + }, + }, + }, + }, + Steps: ContainerSlice{ + { + ID: "step_github octocat._1_init", + Directory: "/home/github/octocat", + Environment: map[string]string{"FOO": "bar"}, + Image: "#init", + Name: "init", + Number: 1, + Pull: "always", + }, + }, + }, + want: nil, + }, + } + + // run tests + for _, test := range tests { + got := test.pipeline.Sanitize(test.driver) + + if !reflect.DeepEqual(got, test.want) { + t.Errorf("Sanitize is %v, want %v", got, test.want) + } + } +} + +func testBuildStages() *Build { + return &Build{ + Version: "1", + ID: "github Octocat._1", + Environment: map[string]string{"HELLO": "Hello, Global Message"}, + Services: ContainerSlice{ + { + ID: "service_github octocat._1_postgres", + Directory: "/home/github/octocat", + Environment: map[string]string{"FOO": "bar"}, + Image: "postgres:12-alpine", + Name: "postgres", + Number: 1, + Ports: []string{"5432:5432"}, + }, + }, + Stages: StageSlice{ + { + Name: "init", + Steps: ContainerSlice{ + { + ID: "github octocat._1_init_init", + Directory: "/home/github/octocat", + Environment: map[string]string{"FOO": "bar"}, + Image: "#init", + Name: "init", + Number: 1, + Pull: "always", + }, + }, + }, + { + Name: "clone", + Needs: []string{"init"}, + Steps: ContainerSlice{ + { + ID: "github octocat._1_clone_clone", + Directory: "/home/github/octocat", + Environment: map[string]string{"FOO": "bar"}, + Image: "target/vela-git:v0.3.0", + Name: "clone", + Number: 2, + Pull: "always", + }, + }, + }, + { + Name: "echo", + Needs: []string{"clone"}, + Steps: ContainerSlice{ + { + ID: "github octocat._1_echo_echo", + Commands: []string{"echo hello"}, + Directory: "/home/github/octocat", + Environment: map[string]string{"FOO": "bar"}, + Image: "alpine:latest", + Name: "echo", + Number: 3, + Pull: "always", + Ruleset: Ruleset{ + If: Rules{Event: []string{"push"}}, + Operator: "and", + }, + }, + }, + }, + }, + Secrets: SecretSlice{ + { + Name: "foobar", + Origin: &Container{ + ID: "secret_github octocat._1_vault", + Directory: "/home/github/octocat", + Environment: map[string]string{"FOO": "bar"}, + Image: "vault:latest", + Name: "vault", + Number: 1, + }, + }, + }, + } +} + +func testBuildSteps() *Build { + return &Build{ + Version: "1", + ID: "github octocat._1", + Environment: map[string]string{"HELLO": "Hello, Global Message"}, + Services: ContainerSlice{ + { + ID: "service_github octocat._1_postgres", + Directory: "/home/github/octocat", + Environment: map[string]string{"FOO": "bar"}, + Image: "postgres:12-alpine", + Name: "postgres", + Number: 1, + Ports: []string{"5432:5432"}, + }, + }, + Steps: ContainerSlice{ + { + ID: "step_github octocat._1_init", + Directory: "/home/github/octocat", + Environment: map[string]string{"FOO": "bar"}, + Image: "#init", + Name: "init", + Number: 1, + Pull: "always", + }, + { + ID: "step_github octocat._1_clone", + Directory: "/home/github/octocat", + Environment: map[string]string{"FOO": "bar"}, + Image: "target/vela-git:v0.3.0", + Name: "clone", + Number: 2, + Pull: "always", + }, + { + ID: "step_github octocat._1_echo", + Commands: []string{"echo hello"}, + Directory: "/home/github/octocat", + Environment: map[string]string{"FOO": "bar"}, + Image: "alpine:latest", + Name: "echo", + Number: 3, + Pull: "always", + Ruleset: Ruleset{ + If: Rules{Event: []string{"push"}}, + Operator: "and", + }, + }, + }, + Secrets: SecretSlice{ + { + Name: "foobar", + Origin: &Container{ + ID: "secret_github octocat._1_vault", + Directory: "/home/github/octocat", + Environment: map[string]string{"FOO": "bar"}, + Image: "vault:latest", + Name: "vault", + Number: 1, + }, + }, + }, + } +} diff --git a/compiler/types/pipeline/container.go b/compiler/types/pipeline/container.go new file mode 100644 index 000000000..85a96d8b8 --- /dev/null +++ b/compiler/types/pipeline/container.go @@ -0,0 +1,401 @@ +// SPDX-License-Identifier: Apache-2.0 + +package pipeline + +import ( + "bytes" + "encoding/json" + "errors" + "fmt" + "math/rand" + "reflect" + "strings" + "unicode/utf8" + + "github.com/drone/envsubst" + + "github.com/go-vela/types/constants" +) + +type ( + // ContainerSlice is the pipeline representation + // of the Containers block for a pipeline. + // + // swagger:model PipelineContainerSlice + // + // swagger:model PipelineContainerSlice + ContainerSlice []*Container + + // Container is the pipeline representation + // of a Container in a pipeline. + // + // swagger:model PipelineContainer + Container struct { + ID string `json:"id,omitempty" yaml:"id,omitempty"` + Commands []string `json:"commands,omitempty" yaml:"commands,omitempty"` + Detach bool `json:"detach,omitempty" yaml:"detach,omitempty"` + Directory string `json:"directory,omitempty" yaml:"directory,omitempty"` + Entrypoint []string `json:"entrypoint,omitempty" yaml:"entrypoint,omitempty"` + Environment map[string]string `json:"environment,omitempty" yaml:"environment,omitempty"` + ExitCode int `json:"exit_code,omitempty" yaml:"exit_code,omitempty"` + Image string `json:"image,omitempty" yaml:"image,omitempty"` + Name string `json:"name,omitempty" yaml:"name,omitempty"` + Needs []string `json:"needs,omitempty" yaml:"needs,omitempty"` + Networks []string `json:"networks,omitempty" yaml:"networks,omitempty"` + Number int `json:"number,omitempty" yaml:"number,omitempty"` + Ports []string `json:"ports,omitempty" yaml:"ports,omitempty"` + Privileged bool `json:"privileged,omitempty" yaml:"privileged,omitempty"` + Pull string `json:"pull,omitempty" yaml:"pull,omitempty"` + Ruleset Ruleset `json:"ruleset,omitempty" yaml:"ruleset,omitempty"` + Secrets StepSecretSlice `json:"secrets,omitempty" yaml:"secrets,omitempty"` + Ulimits UlimitSlice `json:"ulimits,omitempty" yaml:"ulimits,omitempty"` + Volumes VolumeSlice `json:"volumes,omitempty" yaml:"volumes,omitempty"` + User string `json:"user,omitempty" yaml:"user,omitempty"` + ReportAs string `json:"report_as,omitempty" yaml:"report_as,omitempty"` + IDRequest string `json:"id_request,omitempty" yaml:"id_request,omitempty"` + } +) + +// Purge removes the Containers that have a ruleset +// that do not match the provided ruledata. +func (c *ContainerSlice) Purge(r *RuleData) (*ContainerSlice, error) { + counter := 1 + containers := new(ContainerSlice) + + // iterate through each Container in the pipeline + for _, container := range *c { + // verify ruleset matches + match, err := container.Ruleset.Match(r) + if err != nil { + return nil, fmt.Errorf("unable to process ruleset for step %s: %w", container.Name, err) + } + + if match { + // overwrite the Container number with the Container counter + container.Number = counter + + // increment Container counter + counter = counter + 1 + + // append the Container to the new slice of Containers + *containers = append(*containers, container) + } + } + + // return the new slice of Containers + return containers, nil +} + +// Sanitize cleans the fields for every step in the pipeline so they +// can be safely executed on the worker. The fields are sanitized +// based off of the provided runtime driver which is setup on every +// worker. Currently, this function supports the following runtimes: +// +// - Docker +// - Kubernetes +func (c *ContainerSlice) Sanitize(driver string) *ContainerSlice { + containers := new(ContainerSlice) + + // iterate through each Container in the pipeline + for _, container := range *c { + // sanitize container + cont := container.Sanitize(driver) + + // append the Container to the new slice of Containers + *containers = append(*containers, cont) + } + + return containers +} + +// Empty returns true if the provided container is empty. +func (c *Container) Empty() bool { + // return true if the container is nil + if c == nil { + return true + } + + // return true if every container field is empty + if len(c.ID) == 0 && + len(c.Commands) == 0 && + !c.Detach && + len(c.Directory) == 0 && + len(c.Entrypoint) == 0 && + len(c.Environment) == 0 && + c.ExitCode == 0 && + len(c.Image) == 0 && + len(c.Name) == 0 && + len(c.Needs) == 0 && + len(c.Networks) == 0 && + c.Number == 0 && + len(c.Ports) == 0 && + !c.Privileged && + len(c.Pull) == 0 && + reflect.DeepEqual(c.Ruleset, Ruleset{}) && + len(c.Secrets) == 0 && + len(c.Ulimits) == 0 && + len(c.Volumes) == 0 && + len(c.User) == 0 && + len(c.ReportAs) == 0 && + len(c.IDRequest) == 0 { + return true + } + + // return false if any of the ruletype is provided + return false +} + +// Execute returns true when the provided ruledata matches +// the conditions when we should be running the container on the worker. +func (c *Container) Execute(r *RuleData) (bool, error) { + // return false if the container is nil + if c == nil { + return false, nil + } + + // Skip evaluating path, comment, and label in ruleset, + // as the worker lacks necessary rule data. + // + // The compiler determines whether a container will run based on + // these rules. + c.Ruleset.If.Path = []string{} + c.Ruleset.Unless.Path = []string{} + + c.Ruleset.If.Comment = []string{} + c.Ruleset.Unless.Comment = []string{} + + c.Ruleset.If.Label = []string{} + c.Ruleset.Unless.Label = []string{} + + c.Ruleset.If.Instance = []string{} + c.Ruleset.Unless.Instance = []string{} + + // check if the build is in a running state + if strings.EqualFold(r.Status, constants.StatusRunning) { + // treat the ruleset status as success + r.Status = constants.StatusSuccess + + // return if the container ruleset matches the conditions + return c.Ruleset.Match(r) + } + + // assume you will execute the container + execute := true + + // capture the build status out of the ruleset + status := r.Status + + // check if the build status is successful + if !strings.EqualFold(status, constants.StatusSuccess) { + // disregard the need to run the container + execute = false + + match, err := c.Ruleset.Match(r) + if err != nil { + return false, err + } + + // check if you need to run a status failure ruleset + + if ((!(c.Ruleset.If.Empty() && c.Ruleset.Unless.Empty()) && + !(c.Ruleset.If.NoStatus() && c.Ruleset.Unless.NoStatus())) || c.Ruleset.If.Parallel) && + match { + // approve the need to run the container + execute = true + } + } + + r.Status = constants.StatusFailure + + match, err := c.Ruleset.Match(r) + if err != nil { + return false, err + } + + // check if you need to skip a status failure ruleset + if strings.EqualFold(status, constants.StatusSuccess) && + !(c.Ruleset.If.NoStatus() && c.Ruleset.Unless.NoStatus()) && + !(c.Ruleset.If.Empty() && c.Ruleset.Unless.Empty()) && match { + r.Status = constants.StatusSuccess + + match, err = c.Ruleset.Match(r) + if err != nil { + return false, err + } + + if !match { + // disregard the need to run the container + execute = false + } + } + + return execute, nil +} + +// MergeEnv takes a list of environment variables and attempts +// to set them in the container environment. If the environment +// variable already exists in the container, then this will +// overwrite the existing environment variable. +func (c *Container) MergeEnv(environment map[string]string) error { + // check if the container is empty + if c.Empty() { + // TODO: evaluate if we should error here + // + // immediately return and do nothing + // + // treated as a no-op + return nil + } + + // check if the environment provided is empty + if environment == nil { + return fmt.Errorf("empty environment provided for container %s", c.ID) + } + + // iterate through all environment variables provided + for key, value := range environment { + // set or update the container environment variable + c.Environment[key] = value + } + + return nil +} + +// Sanitize cleans the fields for every step in the pipeline so they +// can be safely executed on the worker. The fields are sanitized +// based off of the provided runtime driver which is setup on every +// worker. Currently, this function supports the following runtimes: +// +// - Docker +// - Kubernetes +func (c *Container) Sanitize(driver string) *Container { + container := c + + switch driver { + // sanitize container for Docker + case constants.DriverDocker: + if strings.Contains(c.ID, " ") { + c.ID = strings.ReplaceAll(c.ID, " ", "-") + } + + if strings.Contains(c.ID, "/") { + c.ID = strings.ReplaceAll(c.ID, "/", "-") + } + + return container + // sanitize container for Kubernetes + case constants.DriverKubernetes: + if strings.Contains(c.ID, " ") { + container.ID = strings.ReplaceAll(c.ID, " ", "-") + } + + if strings.Contains(c.ID, "_") { + container.ID = strings.ReplaceAll(c.ID, "_", "-") + } + + if strings.Contains(c.ID, ".") { + container.ID = strings.ReplaceAll(c.ID, ".", "-") + } + + if strings.Contains(c.ID, "/") { + c.ID = strings.ReplaceAll(c.ID, "/", "-") + } + + // Kubernetes requires DNS compatible names (lowercase, <= 63 chars) + container.ID = strings.ToLower(c.ID) + + const dnsMaxLength = 63 + if utf8.RuneCountInString(c.ID) > dnsMaxLength { + const randomSuffixLength = 6 + + rs := []rune(c.ID) + container.ID = fmt.Sprintf( + "%s-%s", + string(rs[:dnsMaxLength-1-randomSuffixLength]), + dnsSafeRandomString(randomSuffixLength), + ) + } + + return container + // unrecognized driver + default: + // TODO: add a log message indicating how we got here + return nil + } +} + +// Substitute replaces every reference (${VAR} or $${VAR}) to an +// environment variable in the container configuration with the +// corresponding value for that environment variable. +func (c *Container) Substitute() error { + // check if container or container environment are nil + if c == nil || c.Environment == nil { + return errors.New("empty container environment provided") + } + + // marshal container configuration + body, err := json.Marshal(c) + if err != nil { + return err + } + + // create substitute function + subFunc := func(name string) string { + // capture the environment variable value + value := c.Environment[name] + + // check for a new line in the value + if strings.Contains(value, "\n") { + // safely escape the environment variable + value = fmt.Sprintf("%q", value) + } + + return value + } + + // substitute the environment variables + // + // https://pkg.go.dev/github.com/drone/envsubst?tab=doc#Eval + ctn, err := envsubst.Eval(string(body), subFunc) + if err != nil { + return err + } + + // unmarshal container configuration + err = json.Unmarshal([]byte(ctn), c) + if err != nil { + // create a new buffer for encoded JSON + // + // will be thrown away after encoding + b := new(bytes.Buffer) + + // create new JSON encoder attached to buffer + enc := json.NewEncoder(b) + + // JSON encode container output + // + // buffer is thrown away + err = enc.Encode(c) + if err != nil { + return err + } + } + + return nil +} + +// dnsSafeRandomString creates a lowercase alphanumeric string of length n. +// Some kubernetes IDs must be dns-safe, so the character set and length is limited. +// If an ID is too long, use this to generate a random suffix for a truncated ID. +func dnsSafeRandomString(n int) string { + // this function is based on randomString in database/build_test.go + var letter = []rune("abcdefghijklmnopqrstuvwxyz0123456789") + + b := make([]rune, n) + for i := range b { + //nolint:gosec // this is not about security. Just a random string. + b[i] = letter[rand.Intn(len(letter))] + } + + return string(b) +} diff --git a/compiler/types/pipeline/container_test.go b/compiler/types/pipeline/container_test.go new file mode 100644 index 000000000..a7ae573c2 --- /dev/null +++ b/compiler/types/pipeline/container_test.go @@ -0,0 +1,1039 @@ +// SPDX-License-Identifier: Apache-2.0 + +package pipeline + +import ( + "reflect" + "testing" + + "github.com/go-vela/types/constants" +) + +func TestPipeline_ContainerSlice_Purge(t *testing.T) { + // setup types + containers := testContainers() + *containers = (*containers)[:len(*containers)-1] + + // setup tests + tests := []struct { + containers *ContainerSlice + want *ContainerSlice + }{ + { + containers: testContainers(), + want: containers, + }, + { + containers: new(ContainerSlice), + want: new(ContainerSlice), + }, + } + + // run tests + for _, test := range tests { + r := &RuleData{ + Branch: "main", + Event: "pull_request", + Path: []string{}, + Repo: "foo/bar", + Tag: "refs/heads/main", + } + + got, _ := test.containers.Purge(r) + + if !reflect.DeepEqual(got, test.want) { + t.Errorf("Purge is %v, want %v", got, test.want) + } + } +} + +func TestPipeline_ContainerSlice_Sanitize(t *testing.T) { + // setup types + containers := testContainers() + (*containers)[0].ID = "step_github-octocat._1_init" + (*containers)[1].ID = "step_github-octocat._1_clone" + (*containers)[2].ID = "step_github-octocat._1_echo" + + kubeContainers := testContainers() + (*kubeContainers)[0].ID = "step-github-octocat--1-init" + (*kubeContainers)[1].ID = "step-github-octocat--1-clone" + (*kubeContainers)[2].ID = "step-github-octocat--1-echo" + + // setup tests + tests := []struct { + driver string + containers *ContainerSlice + want *ContainerSlice + }{ + { + driver: constants.DriverDocker, + containers: testContainers(), + want: containers, + }, + { + driver: constants.DriverKubernetes, + containers: testContainers(), + want: kubeContainers, + }, + { + driver: constants.DriverDocker, + containers: new(ContainerSlice), + want: new(ContainerSlice), + }, + { + driver: constants.DriverKubernetes, + containers: new(ContainerSlice), + want: new(ContainerSlice), + }, + { + driver: "foo", + containers: new(ContainerSlice), + want: new(ContainerSlice), + }, + } + + // run tests + for _, test := range tests { + got := test.containers.Sanitize(test.driver) + + if !reflect.DeepEqual(got, test.want) { + t.Errorf("Sanitize is %v, want %v", got, test.want) + } + } +} + +func TestPipeline_Container_Empty(t *testing.T) { + // setup tests + tests := []struct { + container *Container + want bool + }{ + { + container: &Container{}, + want: true, + }, + { + container: nil, + want: true, + }, + { + container: &Container{ID: "foo"}, + want: false, + }, + } + + // run tests + for _, test := range tests { + got := test.container.Empty() + + if !reflect.DeepEqual(got, test.want) { + t.Errorf("Empty is %v, want %v", got, test.want) + } + } +} + +func TestPipeline_Container_Execute(t *testing.T) { + // setup types + containers := testContainers() + *containers = (*containers)[:len(*containers)-1] + + // setup tests + tests := []struct { + container *Container + ruleData *RuleData + want bool + }{ + { // empty/nil container + container: nil, + ruleData: nil, + want: false, + }, + { // empty container ruleset with build running + container: &Container{ + Name: "empty-running", + Image: "alpine:latest", + Commands: []string{"echo \"Hey Vela\""}, + }, + ruleData: &RuleData{ + Branch: "main", + Event: "push", + Repo: "foo/bar", + Status: "running", + Path: []string{"README.md"}, + }, + want: true, + }, + { // empty container ruleset with build success + container: &Container{ + Name: "empty-success", + Image: "alpine:latest", + Commands: []string{"echo \"Hey Vela\""}, + }, + ruleData: &RuleData{ + Branch: "main", + Event: "push", + Repo: "foo/bar", + Status: "success", + Path: []string{"src/*, *.gradle"}, + }, + want: true, + }, + { // empty container ruleset with build failure + container: &Container{ + Name: "empty-failure", + Image: "alpine:latest", + Commands: []string{"echo \"Hey Vela\""}, + }, + ruleData: &RuleData{ + Branch: "main", + Event: "push", + Repo: "foo/bar", + Status: "failure", + Comment: "LGTM", + }, + want: false, + }, + { // status success container with build running + container: &Container{ + Name: "status-running", + Image: "alpine:latest", + Commands: []string{"echo \"Hey Vela\""}, + Ruleset: Ruleset{ + If: Rules{ + Status: []string{constants.StatusSuccess}, + }, + Operator: "and", + }, + }, + ruleData: &RuleData{ + Branch: "main", + Event: "push", + Repo: "foo/bar", + Status: "running", + Comment: "CI Run", + }, + want: true, + }, + { // status success container with build success + container: &Container{ + Name: "status-success", + Image: "alpine:latest", + Commands: []string{"echo \"Hey Vela\""}, + Ruleset: Ruleset{ + If: Rules{ + Status: []string{constants.StatusSuccess}, + }, + Operator: "and", + }, + }, + ruleData: &RuleData{ + Branch: "main", + Event: "push", + Repo: "foo/bar", + Status: "success", + }, + want: true, + }, + { // status success container with build failure + container: &Container{ + Name: "status-failure", + Image: "alpine:latest", + Commands: []string{"echo \"Hey Vela\""}, + Ruleset: Ruleset{ + If: Rules{ + Status: []string{constants.StatusSuccess}, + }, + Operator: "and", + }, + }, + ruleData: &RuleData{ + Branch: "main", + Event: "push", + Repo: "foo/bar", + Status: "failure", + }, + want: false, + }, + { // status/failure success container with build running + container: &Container{ + Name: "status/failure-running", + Image: "alpine:latest", + Commands: []string{"echo \"Hey Vela\""}, + Ruleset: Ruleset{ + If: Rules{ + Status: []string{constants.StatusSuccess, constants.StatusFailure}, + }, + Operator: "and", + }, + }, + ruleData: &RuleData{ + Branch: "main", + Event: "push", + Repo: "foo/bar", + Status: "running", + }, + want: true, + }, + { // status/failure success container with build success + container: &Container{ + Name: "status/failure-success", + Image: "alpine:latest", + Commands: []string{"echo \"Hey Vela\""}, + Ruleset: Ruleset{ + If: Rules{ + Status: []string{constants.StatusSuccess, constants.StatusFailure}, + }, + Operator: "and", + }, + }, + ruleData: &RuleData{ + Branch: "main", + Event: "push", + Repo: "foo/bar", + Status: "success", + }, + want: true, + }, + { // status/failure success container with build failure + container: &Container{ + Name: "status/failure-failure", + Image: "alpine:latest", + Commands: []string{"echo \"Hey Vela\""}, + Ruleset: Ruleset{ + If: Rules{ + Status: []string{constants.StatusSuccess, constants.StatusFailure}, + }, + Operator: "and", + }, + }, + ruleData: &RuleData{ + Branch: "main", + Event: "push", + Repo: "foo/bar", + Status: "failure", + }, + want: true, + }, + { // no status container with build running + container: &Container{ + Name: "branch/event/no-status-running", + Image: "alpine:latest", + Commands: []string{"echo \"Hey Vela\""}, + Ruleset: Ruleset{ + If: Rules{ + Branch: []string{"main"}, + Event: []string{constants.EventPush}, + }, + Operator: "and", + }, + }, + ruleData: &RuleData{ + Branch: "main", + Event: "push", + Repo: "foo/bar", + Status: "running", + }, + want: true, + }, + { // no status container with build failure + container: &Container{ + Name: "branch/event/no-status-failure", + Image: "alpine:latest", + Commands: []string{"echo \"Hey Vela\""}, + Ruleset: Ruleset{ + If: Rules{ + Branch: []string{"main"}, + Event: []string{constants.EventPush}, + }, + Operator: "and", + }, + }, + ruleData: &RuleData{ + Branch: "main", + Event: "push", + Repo: "foo/bar", + Status: "failure", + }, + want: false, + }, + { // branch/event/path container with build running + container: &Container{ + Name: "branch/event/path-running", + Image: "alpine:latest", + Commands: []string{"echo \"Hey Vela\""}, + Ruleset: Ruleset{ + If: Rules{ + Branch: []string{"main"}, + Event: []string{constants.EventPush}, + Path: []string{"README.md"}, + }, + Operator: "and", + }, + }, + ruleData: &RuleData{ + Branch: "main", + Event: "push", + Repo: "foo/bar", + Status: "running", + }, + want: true, + }, + { // branch/event/path container with build success + container: &Container{ + Name: "branch/event/path-success", + Image: "alpine:latest", + Commands: []string{"echo \"Hey Vela\""}, + Ruleset: Ruleset{ + If: Rules{ + Branch: []string{"main"}, + Event: []string{constants.EventPush}, + Path: []string{"README.md"}, + }, + Operator: "and", + }, + }, + ruleData: &RuleData{ + Branch: "main", + Event: "push", + Repo: "foo/bar", + Status: "success", + }, + want: true, + }, + { // branch/event/path container with build failure + container: &Container{ + Name: "branch/event/path-failure", + Image: "alpine:latest", + Commands: []string{"echo \"Hey Vela\""}, + Ruleset: Ruleset{ + If: Rules{ + Branch: []string{"main"}, + Event: []string{constants.EventPush}, + Path: []string{"README.md"}, + }, + Operator: "and", + }, + }, + ruleData: &RuleData{ + Branch: "main", + Event: "push", + Repo: "foo/bar", + Status: "failure", + }, + want: false, + }, + { // branch/event/comment container with build running + container: &Container{ + Name: "branch/event/comment-running", + Image: "alpine:latest", + Commands: []string{"echo \"Hey Vela\""}, + Ruleset: Ruleset{ + If: Rules{ + Branch: []string{"develop"}, + Event: []string{constants.EventComment}, + Comment: []string{"run vela"}, + }, + Operator: "and", + }, + }, + ruleData: &RuleData{ + Branch: "develop", + Event: "comment", + Repo: "foo/bar", + Status: "running", + }, + want: true, + }, + { // branch/event/comment container with build success + container: &Container{ + Name: "branch/event/comment-success", + Image: "alpine:latest", + Commands: []string{"echo \"Hey Vela\""}, + Ruleset: Ruleset{ + If: Rules{ + Branch: []string{"develop"}, + Event: []string{constants.EventComment}, + Comment: []string{"run vela"}, + }, + Operator: "and", + }, + }, + ruleData: &RuleData{ + Branch: "develop", + Event: "comment", + Repo: "foo/bar", + Status: "success", + }, + want: true, + }, + { // branch/event/comment container with build failure + container: &Container{ + Name: "branch/event/comment-failure", + Image: "alpine:latest", + Commands: []string{"echo \"Hey Vela\""}, + Ruleset: Ruleset{ + If: Rules{ + Branch: []string{"develop"}, + Event: []string{constants.EventComment}, + Comment: []string{"run vela"}, + }, + Operator: "and", + }, + }, + ruleData: &RuleData{ + Branch: "develop", + Event: "comment", + Repo: "foo/bar", + Status: "failure", + }, + want: false, + }, + { // branch/event/status container with build running + container: &Container{ + Name: "branch/event/status-running", + Image: "alpine:latest", + Commands: []string{"echo \"Hey Vela\""}, + Ruleset: Ruleset{ + If: Rules{ + Branch: []string{"main"}, + Event: []string{constants.EventPush}, + Status: []string{constants.StatusSuccess}, + }, + Operator: "and", + }, + }, + ruleData: &RuleData{ + Branch: "main", + Event: "push", + Repo: "foo/bar", + Status: "running", + }, + want: true, + }, + { // branch/event/status container with build success + container: &Container{ + Name: "branch/event/status-success", + Image: "alpine:latest", + Commands: []string{"echo \"Hey Vela\""}, + Ruleset: Ruleset{ + If: Rules{ + Branch: []string{"main"}, + Event: []string{constants.EventPush}, + Status: []string{constants.StatusSuccess}, + }, + Operator: "and", + }, + }, + ruleData: &RuleData{ + Branch: "main", + Event: "push", + Repo: "foo/bar", + Status: "success", + }, + want: true, + }, + { // branch/event/status container with build failure + container: &Container{ + Name: "branch/event/status-failure", + Image: "alpine:latest", + Commands: []string{"echo \"Hey Vela\""}, + Ruleset: Ruleset{ + If: Rules{ + Branch: []string{"main"}, + Event: []string{constants.EventPush}, + Status: []string{constants.StatusSuccess}, + }, + Operator: "and", + }, + }, + ruleData: &RuleData{ + Branch: "main", + Event: "push", + Repo: "foo/bar", + Status: "failure", + }, + want: false, + }, + { // branch/event/status container with or operator with build failure + container: &Container{ + Name: "branch/event/status-failure-or", + Image: "alpine:latest", + Commands: []string{"echo \"Hey Vela\""}, + Ruleset: Ruleset{ + If: Rules{ + Branch: []string{"main"}, + Event: []string{constants.EventPush}, + Status: []string{constants.StatusSuccess}, + }, + Operator: "or", + }, + }, + ruleData: &RuleData{ + Branch: "main", + Event: "push", + Repo: "foo/bar", + Status: "failure", + }, + want: true, + }, + { // tag/event/status container with build running + container: &Container{ + Name: "tag/event/status-running", + Image: "alpine:latest", + Commands: []string{"echo \"Hey Vela\""}, + Ruleset: Ruleset{ + If: Rules{ + Tag: []string{"v*"}, + Event: []string{constants.EventTag}, + Status: []string{constants.StatusSuccess}, + }, + Operator: "and", + }, + }, + ruleData: &RuleData{ + Branch: "main", + Event: "tag", + Repo: "foo/bar", + Status: "running", + Tag: "v0.1.0", + }, + want: true, + }, + { // tag/event/status container with build success + container: &Container{ + Name: "tag/event/status-success", + Image: "alpine:latest", + Commands: []string{"echo \"Hey Vela\""}, + Ruleset: Ruleset{ + If: Rules{ + Tag: []string{"v*"}, + Event: []string{constants.EventTag}, + Status: []string{constants.StatusSuccess}, + }, + Operator: "and", + }, + }, + ruleData: &RuleData{ + Branch: "main", + Event: "tag", + Repo: "foo/bar", + Status: "success", + Tag: "v0.1.0", + }, + want: true, + }, + { // tag/event/status container with build failure + container: &Container{ + Name: "tag/event/status-failure", + Image: "alpine:latest", + Commands: []string{"echo \"Hey Vela\""}, + Ruleset: Ruleset{ + If: Rules{ + Tag: []string{"v*"}, + Event: []string{constants.EventTag}, + Status: []string{constants.StatusSuccess}, + }, + Operator: "and", + }, + }, + ruleData: &RuleData{ + Branch: "main", + Event: "tag", + Repo: "foo/bar", + Status: "failure", + Tag: "v0.1.0", + }, + want: false, + }, + { // status unless success container with build running + container: &Container{ + Name: "unless/status-running", + Image: "alpine:latest", + Commands: []string{"echo \"Hey Vela\""}, + Ruleset: Ruleset{ + Unless: Rules{ + Status: []string{constants.StatusSuccess}, + }, + Operator: "and", + }, + }, + ruleData: &RuleData{ + Branch: "main", + Event: "push", + Repo: "foo/bar", + Status: "running", + }, + want: false, + }, + { // status unless success container with build success + container: &Container{ + Name: "unless/status-success", + Image: "alpine:latest", + Commands: []string{"echo \"Hey Vela\""}, + Ruleset: Ruleset{ + Unless: Rules{ + Status: []string{constants.StatusSuccess}, + }, + Operator: "and", + }, + }, + ruleData: &RuleData{ + Branch: "main", + Event: "push", + Repo: "foo/bar", + Status: "success", + }, + want: false, + }, + { // status unless success container with build failure + container: &Container{ + Name: "unless/status-failure", + Image: "alpine:latest", + Commands: []string{"echo \"Hey Vela\""}, + Ruleset: Ruleset{ + Unless: Rules{ + Status: []string{constants.StatusSuccess}, + }, + Operator: "and", + }, + }, + ruleData: &RuleData{ + Branch: "main", + Event: "push", + Repo: "foo/bar", + Status: "failure", + }, + want: true, + }, + { // status unless success container with build success + container: &Container{ + Name: "status unless", + Image: "alpine:latest", + Commands: []string{"echo \"Hey Vela\""}, + Ruleset: Ruleset{ + Unless: Rules{ + Branch: []string{"main"}, + Event: []string{constants.EventPush}, + Status: []string{constants.StatusSuccess}, + }, + Operator: "and", + }, + }, + ruleData: &RuleData{ + Branch: "main", + Event: "push", + Repo: "foo/bar", + Status: "success", + }, + want: false, + }, + { // status unless success container with build failure + container: &Container{ + Name: "status unless", + Image: "alpine:latest", + Commands: []string{"echo \"Hey Vela\""}, + Ruleset: Ruleset{ + Unless: Rules{ + Branch: []string{"dev"}, + Event: []string{constants.EventPush}, + Status: []string{constants.StatusSuccess}, + }, + Operator: "and", + }, + }, + ruleData: &RuleData{ + Branch: "main", + Event: "pull_request", + Repo: "foo/bar", + Status: "failure", + }, + want: true, + }, + { // pull request labeled success container with build success + container: &Container{ + Name: "pull-request-labeled", + Image: "alpine:latest", + Commands: []string{"echo \"Hey Vela\""}, + Ruleset: Ruleset{ + If: Rules{ + Branch: []string{"fix/1234"}, + Event: []string{constants.EventPull + constants.ActionLabeled}, + Label: []string{"enhancement", "documentation"}, + }, + Operator: "and", + }, + }, + ruleData: &RuleData{ + Branch: "fix/1234", + Event: "pull_request:labeled", + Repo: "foo/bar", + Status: "success", + }, + want: true, + }, + { // pull request unlabeled success container with build success + container: &Container{ + Name: "pull-request-unlabeled", + Image: "alpine:latest", + Commands: []string{"echo \"Hey Vela\""}, + Ruleset: Ruleset{ + If: Rules{ + Event: []string{constants.EventPull + constants.ActionUnlabeled}, + Label: []string{"enhancement"}, + }, + Operator: "and", + }, + }, + ruleData: &RuleData{ + Branch: "fix/1234", + Event: "pull_request:unlabeled", + Repo: "foo/bar", + Status: "success", + }, + want: true, + }, + { // pull request labeled unless ruleset, success container with build success + container: &Container{ + Name: "pull-request-labeled", + Image: "alpine:latest", + Commands: []string{"echo \"Hey Vela\""}, + Ruleset: Ruleset{ + Unless: Rules{ + Branch: []string{"fix/1234"}, + Event: []string{constants.EventPull + constants.ActionLabeled}, + Label: []string{"enhancement", "documentation"}, + }, + Operator: "and", + }, + }, + ruleData: &RuleData{ + Branch: "fix/1234", + Event: "pull_request:labeled", + Repo: "foo/bar", + Status: "success", + }, + want: true, + }, + { // pull request unlabeled unless ruleset, success container with build success + container: &Container{ + Name: "pull-request-unlabeled", + Image: "alpine:latest", + Commands: []string{"echo \"Hey Vela\""}, + Ruleset: Ruleset{ + Unless: Rules{ + Event: []string{constants.EventPull + constants.ActionUnlabeled}, + Label: []string{"enhancement"}, + }, + Operator: "and", + }, + }, + ruleData: &RuleData{ + Branch: "fix/1234", + Event: "pull_request:unlabeled", + Repo: "foo/bar", + Status: "success", + }, + want: true, + }, + } + + // run tests + for _, test := range tests { + got, _ := test.container.Execute(test.ruleData) + + if got != test.want { + t.Errorf("Container Execute %s is %v, want %v", test.container.Name, got, test.want) + } + } +} + +func TestPipeline_Container_MergeEnv(t *testing.T) { + // setup tests + tests := []struct { + container *Container + environment map[string]string + failure bool + }{ + { + container: &Container{ + ID: "step_github_octocat_1_init", + Directory: "/home/github/octocat", + Environment: map[string]string{"FOO": "bar"}, + Image: "#init", + Name: "init", + Number: 1, + Pull: "always", + }, + environment: map[string]string{"BAR": "baz"}, + failure: false, + }, + { + container: &Container{}, + environment: map[string]string{"BAR": "baz"}, + failure: false, + }, + { + container: nil, + environment: map[string]string{"BAR": "baz"}, + failure: false, + }, + { + container: &Container{ + ID: "step_github_octocat_1_init", + Directory: "/home/github/octocat", + Environment: map[string]string{"FOO": "bar"}, + Image: "#init", + Name: "init", + Number: 1, + Pull: "always", + }, + environment: nil, + failure: true, + }, + } + + // run tests + for _, test := range tests { + err := test.container.MergeEnv(test.environment) + + if test.failure { + if err == nil { + t.Errorf("MergeEnv should have returned err") + } + + continue + } + + if err != nil { + t.Errorf("MergeEnv returned err: %v", err) + } + } +} + +func TestPipeline_Container_Substitute(t *testing.T) { + // setup tests + tests := []struct { + container *Container + want *Container + failure bool + }{ + { + container: &Container{ + ID: "step_github_octocat_1_echo", + Commands: []string{"echo ${FOO}", "echo $${BAR}"}, + Environment: map[string]string{"FOO": "baz", "BAR": "baz"}, + Image: "alpine:latest", + Name: "echo", + Number: 1, + Pull: "always", + }, + want: &Container{ + ID: "step_github_octocat_1_echo", + Commands: []string{"echo baz", "echo ${BAR}"}, + Environment: map[string]string{"FOO": "baz", "BAR": "baz"}, + Image: "alpine:latest", + Name: "echo", + Number: 1, + Pull: "always", + }, + failure: false, + }, + { + container: &Container{ + ID: "step_github_octocat_1_echo", + Commands: []string{"echo ${FOO}", "echo ${BAR}"}, + Environment: map[string]string{ + "FOO": "1\n2\n", + "BAR": "`~!@#$%^&*()-_=+[{]}\\|;:',<.>/?", + }, + Image: "alpine:latest", + Name: "echo", + Number: 1, + Pull: "always", + }, + want: &Container{ + ID: "step_github_octocat_1_echo", + Commands: []string{"echo ${FOO}", "echo ${BAR}"}, + Environment: map[string]string{ + "FOO": "1\n2\n", + "BAR": "`~!@#$%^&*()-_=+[{]}\\|;:',<.>/?", + }, + Image: "alpine:latest", + Name: "echo", + Number: 1, + Pull: "always", + }, + failure: false, + }, + { + container: nil, + want: nil, + failure: true, + }, + { + container: new(Container), + want: new(Container), + failure: true, + }, + } + + // run tests + for _, test := range tests { + err := test.container.Substitute() + + if test.failure { + if err == nil { + t.Errorf("Substitute should have returned err") + } + + continue + } + + if err != nil { + t.Errorf("Substitute returned err: %v", err) + } + + if !reflect.DeepEqual(test.container, test.want) { + t.Errorf("Substitute is %v, want %v", test.container, test.want) + } + } +} + +func testContainers() *ContainerSlice { + return &ContainerSlice{ + { + ID: "step_github octocat._1_init", + Directory: "/home/github/octocat", + Environment: map[string]string{"FOO": "bar"}, + Image: "#init", + Name: "init", + Number: 1, + Pull: "always", + }, + { + ID: "step_github octocat._1_clone", + Directory: "/home/github/octocat", + Environment: map[string]string{"FOO": "bar"}, + Image: "target/vela-git:v0.3.0", + Name: "clone", + Number: 2, + Pull: "always", + IDRequest: "yes", + }, + { + ID: "step_github/octocat._1_echo", + Commands: []string{"echo hello"}, + Directory: "/home/github/octocat", + Environment: map[string]string{"FOO": "bar"}, + Image: "alpine:latest", + Name: "echo", + Number: 3, + Pull: "always", + ReportAs: "echo-step", + Ruleset: Ruleset{ + If: Rules{Event: []string{"push"}}, + Operator: "and", + }, + }, + } +} diff --git a/compiler/types/pipeline/context.go b/compiler/types/pipeline/context.go new file mode 100644 index 000000000..7eae49b16 --- /dev/null +++ b/compiler/types/pipeline/context.go @@ -0,0 +1,117 @@ +// SPDX-License-Identifier: Apache-2.0 + +package pipeline + +import ( + "context" +) + +// contextKey defines the key type for +// storing pipeline types in a context. +type contextKey int + +const ( + // buildKey defines the key type for + // storing a Build type in a context. + buildKey contextKey = iota + + // secretKey defines the key type for + // storing a Secret type in a context. + secretKey + + // stageKey defines the key type for + // storing a Stage type in a context. + stageKey + + // containerKey defines the key type for + // storing a Step type in a context. + containerKey +) + +// BuildFromContext retrieves the Build type from the context. +func BuildFromContext(c context.Context) *Build { + // get build value from context + v := c.Value(buildKey) + if v == nil { + return nil + } + + // cast build value to expected Build type + b, ok := v.(*Build) + if !ok { + return nil + } + + return b +} + +// BuildWithContext inserts the Build type to the context. +func BuildWithContext(c context.Context, b *Build) context.Context { + return context.WithValue(c, buildKey, b) +} + +// SecretFromContext retrieves the Secret type from the context. +func SecretFromContext(c context.Context) *Secret { + // get secret value from context + v := c.Value(secretKey) + if v == nil { + return nil + } + + // cast secret value to expected Secret type + s, ok := v.(*Secret) + if !ok { + return nil + } + + return s +} + +// SecretWithContext inserts the Secret type to the context. +func SecretWithContext(c context.Context, s *Secret) context.Context { + return context.WithValue(c, secretKey, s) +} + +// StageFromContext retrieves the Stage type from the context. +func StageFromContext(c context.Context) *Stage { + // get stage value from context + v := c.Value(stageKey) + if v == nil { + return nil + } + + // cast stage value to expected Stage type + s, ok := v.(*Stage) + if !ok { + return nil + } + + return s +} + +// StageWithContext inserts the Stage type to the context. +func StageWithContext(c context.Context, s *Stage) context.Context { + return context.WithValue(c, stageKey, s) +} + +// ContainerFromContext retrieves the container type from the context. +func ContainerFromContext(c context.Context) *Container { + // get container value from context + v := c.Value(containerKey) + if v == nil { + return nil + } + + // cast step value to expected Container type + s, ok := v.(*Container) + if !ok { + return nil + } + + return s +} + +// ContainerWithContext inserts the Container type to the context. +func ContainerWithContext(c context.Context, s *Container) context.Context { + return context.WithValue(c, containerKey, s) +} diff --git a/compiler/types/pipeline/context_test.go b/compiler/types/pipeline/context_test.go new file mode 100644 index 000000000..899c7b920 --- /dev/null +++ b/compiler/types/pipeline/context_test.go @@ -0,0 +1,200 @@ +// SPDX-License-Identifier: Apache-2.0 + +package pipeline + +import ( + "context" + "testing" +) + +func TestPipeline_BuildFromContext(t *testing.T) { + // setup types + b := &Build{ID: "1"} + + // setup tests + tests := []struct { + ctx context.Context + want *Build + }{ + { + ctx: context.WithValue(context.Background(), buildKey, b), + want: b, + }, + { + ctx: context.Background(), + want: nil, + }, + { + ctx: context.WithValue(context.Background(), buildKey, "foo"), + want: nil, + }, + } + + // run tests + for _, test := range tests { + got := BuildFromContext(test.ctx) + + if got != test.want { + t.Errorf("BuildFromContext is %v, want %v", got, test.want) + } + } +} + +func TestPipeline_BuildWithContext(t *testing.T) { + // setup types + want := &Build{ID: "1"} + + // setup context + ctx := BuildWithContext(context.Background(), want) + + // run test + got := ctx.Value(buildKey) + + if got != want { + t.Errorf("BuildWithContext is %v, want %v", got, want) + } +} + +func TestPipeline_SecretFromContext(t *testing.T) { + // setup types + s := &Secret{Name: "foo"} + + // setup tests + tests := []struct { + ctx context.Context + want *Secret + }{ + { + ctx: context.WithValue(context.Background(), secretKey, s), + want: s, + }, + { + ctx: context.Background(), + want: nil, + }, + { + ctx: context.WithValue(context.Background(), secretKey, "foo"), + want: nil, + }, + } + + // run tests + for _, test := range tests { + got := SecretFromContext(test.ctx) + + if got != test.want { + t.Errorf("SecretFromContext is %v, want %v", got, test.want) + } + } +} + +func TestPipeline_SecretWithContext(t *testing.T) { + // setup types + want := &Secret{Name: "foo"} + + // setup context + ctx := SecretWithContext(context.Background(), want) + + // run test + got := ctx.Value(secretKey) + + if got != want { + t.Errorf("SecretWithContext is %v, want %v", got, want) + } +} + +func TestPipeline_StageFromContext(t *testing.T) { + // setup types + s := &Stage{Name: "foo"} + + // setup tests + tests := []struct { + ctx context.Context + want *Stage + }{ + { + ctx: context.WithValue(context.Background(), stageKey, s), + want: s, + }, + { + ctx: context.Background(), + want: nil, + }, + { + ctx: context.WithValue(context.Background(), stageKey, "foo"), + want: nil, + }, + } + + // run tests + for _, test := range tests { + got := StageFromContext(test.ctx) + + if got != test.want { + t.Errorf("StageFromContext is %v, want %v", got, test.want) + } + } +} + +func TestPipeline_StageWithContext(t *testing.T) { + // setup types + want := &Stage{Name: "foo"} + + // setup context + ctx := StageWithContext(context.Background(), want) + + // run test + got := ctx.Value(stageKey) + + if got != want { + t.Errorf("StageWithContext is %v, want %v", got, want) + } +} + +func TestPipeline_ContainerFromContext(t *testing.T) { + // setup types + c := &Container{Name: "foo"} + + // setup tests + tests := []struct { + ctx context.Context + want *Container + }{ + { + ctx: context.WithValue(context.Background(), containerKey, c), + want: c, + }, + { + ctx: context.Background(), + want: nil, + }, + { + ctx: context.WithValue(context.Background(), containerKey, "foo"), + want: nil, + }, + } + + // run tests + for _, test := range tests { + got := ContainerFromContext(test.ctx) + + if got != test.want { + t.Errorf("ContainerFromContext is %v, want %v", got, test.want) + } + } +} + +func TestPipeline_ContainerWithContext(t *testing.T) { + // setup types + want := &Container{ID: "1"} + + // setup context + ctx := ContainerWithContext(context.Background(), want) + + // run test + got := ctx.Value(containerKey) + + if got != want { + t.Errorf("ContainerWithContext is %v, want %v", got, want) + } +} diff --git a/compiler/types/pipeline/doc.go b/compiler/types/pipeline/doc.go new file mode 100644 index 000000000..4ef7e35a0 --- /dev/null +++ b/compiler/types/pipeline/doc.go @@ -0,0 +1,8 @@ +// SPDX-License-Identifier: Apache-2.0 + +// Package pipeline provides the defined pipeline types for Vela. +// +// Usage: +// +// import "github.com/go-vela/server/compiler/types/pipeline" +package pipeline diff --git a/compiler/types/pipeline/metadata.go b/compiler/types/pipeline/metadata.go new file mode 100644 index 000000000..c4f5fdf87 --- /dev/null +++ b/compiler/types/pipeline/metadata.go @@ -0,0 +1,20 @@ +// SPDX-License-Identifier: Apache-2.0 + +package pipeline + +// Metadata is the pipeline representation of the metadata block for a pipeline. +// +// swagger:model PipelineMetadata +type Metadata struct { + Template bool `json:"template,omitempty" yaml:"template,omitempty"` + Clone bool `json:"clone,omitempty" yaml:"clone,omitempty"` + Environment []string `json:"environment,omitempty" yaml:"environment,omitempty"` + AutoCancel *CancelOptions `json:"auto_cancel,omitempty" yaml:"auto_cancel,omitempty"` +} + +// CancelOptions is the pipeline representation of the auto_cancel block for a pipeline. +type CancelOptions struct { + Running bool `yaml:"running,omitempty" json:"running,omitempty"` + Pending bool `yaml:"pending,omitempty" json:"pending,omitempty"` + DefaultBranch bool `yaml:"default_branch,omitempty" json:"default_branch,omitempty"` +} diff --git a/compiler/types/pipeline/port.go b/compiler/types/pipeline/port.go new file mode 100644 index 000000000..6ce572b52 --- /dev/null +++ b/compiler/types/pipeline/port.go @@ -0,0 +1,19 @@ +// SPDX-License-Identifier: Apache-2.0 + +package pipeline + +type ( + // PortSlice is the pipeline representation + // of the ports for a step in a pipeline. + PortSlice []*Port + + // Port is the pipeline representation + // of a port for a step in a pipeline. + // + // swagger:model PipelinePort + Port struct { + Port int `json:"port,omitempty" yaml:"port,omitempty"` + Host int `json:"host,omitempty" yaml:"host,omitempty"` + Protocol string `json:"protocol,omitempty" yaml:"protocol,omitempty"` + } +) diff --git a/compiler/types/pipeline/ruleset.go b/compiler/types/pipeline/ruleset.go new file mode 100644 index 000000000..53126e6c5 --- /dev/null +++ b/compiler/types/pipeline/ruleset.go @@ -0,0 +1,281 @@ +// SPDX-License-Identifier: Apache-2.0 + +package pipeline + +import ( + "fmt" + "path/filepath" + "regexp" + "strings" + + "github.com/go-vela/types/constants" +) + +type ( + // Ruleset is the pipeline representation of + // a ruleset block for a step in a pipeline. + // + // swagger:model PipelineRuleset + Ruleset struct { + If Rules `json:"if,omitempty" yaml:"if,omitempty"` + Unless Rules `json:"unless,omitempty" yaml:"unless,omitempty"` + Matcher string `json:"matcher,omitempty" yaml:"matcher,omitempty"` + Operator string `json:"operator,omitempty" yaml:"operator,omitempty"` + Continue bool `json:"continue,omitempty" yaml:"continue,omitempty"` + } + + // Rules is the pipeline representation of the ruletypes + // from a ruleset block for a step in a pipeline. + // + // swagger:model PipelineRules + Rules struct { + Branch Ruletype `json:"branch,omitempty" yaml:"branch,omitempty"` + Comment Ruletype `json:"comment,omitempty" yaml:"comment,omitempty"` + Event Ruletype `json:"event,omitempty" yaml:"event,omitempty"` + Path Ruletype `json:"path,omitempty" yaml:"path,omitempty"` + Repo Ruletype `json:"repo,omitempty" yaml:"repo,omitempty"` + Status Ruletype `json:"status,omitempty" yaml:"status,omitempty"` + Tag Ruletype `json:"tag,omitempty" yaml:"tag,omitempty"` + Target Ruletype `json:"target,omitempty" yaml:"target,omitempty"` + Label Ruletype `json:"label,omitempty" yaml:"label,omitempty"` + Instance Ruletype `json:"instance,omitempty" yaml:"instance,omitempty"` + Parallel bool `json:"-" yaml:"-"` + } + + // Ruletype is the pipeline representation of an element + // for a ruleset block for a step in a pipeline. + // + // swagger:model PipelineRuletype + Ruletype []string + + // RuleData is the data to check our ruleset + // against for a step in a pipeline. + RuleData struct { + Branch string `json:"branch,omitempty" yaml:"branch,omitempty"` + Comment string `json:"comment,omitempty" yaml:"comment,omitempty"` + Event string `json:"event,omitempty" yaml:"event,omitempty"` + Path []string `json:"path,omitempty" yaml:"path,omitempty"` + Repo string `json:"repo,omitempty" yaml:"repo,omitempty"` + Status string `json:"status,omitempty" yaml:"status,omitempty"` + Tag string `json:"tag,omitempty" yaml:"tag,omitempty"` + Target string `json:"target,omitempty" yaml:"target,omitempty"` + Label []string `json:"label,omitempty" yaml:"label,omitempty"` + Instance string `json:"instance,omitempty" yaml:"instance,omitempty"` + Parallel bool `json:"-" yaml:"-"` + } +) + +// Match returns true when the provided ruledata matches +// the if rules and does not match any of the unless rules. +// When the provided if rules are empty, the function returns +// true. When both the provided if and unless rules are empty, +// the function also returns true. +func (r *Ruleset) Match(from *RuleData) (bool, error) { + // return true when the if and unless rules are empty + if r.If.Empty() && r.Unless.Empty() { + return true, nil + } + + // return false when the unless rules are not empty and match + if !r.Unless.Empty() { + match, err := r.Unless.Match(from, r.Matcher, r.Operator) + if err != nil { + return false, err + } + + if match { + return false, nil + } + } + + // return true when the if rules are empty + if r.If.Empty() { + return true, nil + } + + // return true when the if rules match + match, err := r.If.Match(from, r.Matcher, r.Operator) + + return match, err +} + +// NoStatus returns true if the status field is empty. +func (r *Rules) NoStatus() bool { + // return true if every ruletype is empty + return len(r.Status) == 0 +} + +// Empty returns true if the provided ruletypes are empty. +func (r *Rules) Empty() bool { + // return true if every ruletype is empty + if len(r.Branch) == 0 && + len(r.Comment) == 0 && + len(r.Event) == 0 && + len(r.Path) == 0 && + len(r.Repo) == 0 && + len(r.Status) == 0 && + len(r.Tag) == 0 && + len(r.Target) == 0 && + len(r.Label) == 0 && + len(r.Instance) == 0 { + return true + } + + // return false if any of the ruletype is provided + return false +} + +// Match returns true for the `or` operator when one of the +// ruletypes from the rules match the provided ruledata. +// Match returns true for the `and` operator when all of the +// ruletypes from the rules match the provided ruledata. For +// both operators, when none of the ruletypes from the rules +// match the provided ruledata, the function returns false. +func (r *Rules) Match(from *RuleData, matcher, op string) (bool, error) { + status := true + + var err error + + if len(from.Status) != 0 { + status, err = r.Status.MatchSingle(from.Status, matcher, op) + if err != nil { + return false, err + } + } + + matchBranch, err := r.Branch.MatchSingle(from.Branch, matcher, op) + if err != nil { + return false, err + } + + matchComment, err := r.Comment.MatchSingle(from.Comment, matcher, op) + if err != nil { + return false, err + } + + matchEvent, err := r.Event.MatchSingle(from.Event, matcher, op) + if err != nil { + return false, err + } + + matchPath, err := r.Path.MatchMultiple(from.Path, matcher, op) + if err != nil { + return false, err + } + + matchRepo, err := r.Repo.MatchSingle(from.Repo, matcher, op) + if err != nil { + return false, err + } + + matchTag, err := r.Tag.MatchSingle(from.Tag, matcher, op) + if err != nil { + return false, err + } + + matchTarget, err := r.Target.MatchSingle(from.Target, matcher, op) + if err != nil { + return false, err + } + + matchLabel, err := r.Label.MatchMultiple(from.Label, matcher, op) + if err != nil { + return false, err + } + + matchInstance, err := r.Instance.MatchSingle(from.Instance, matcher, op) + if err != nil { + return false, err + } + + switch op { + case constants.OperatorOr: + return (matchBranch || matchComment || matchEvent || matchPath || matchRepo || matchTag || matchTarget || matchLabel || matchInstance || status), nil + default: + return (matchBranch && matchComment && matchEvent && matchPath && matchRepo && matchTag && matchTarget && matchLabel && matchInstance && status), nil + } +} + +// MatchSingle returns true when the provided ruletype +// matches the provided ruledata. When the provided +// ruletype is empty, the function returns true for +// the `and` operator and false for the `or` operator. +func (r *Ruletype) MatchSingle(data, matcher, logic string) (bool, error) { + // return true for `and`, false for `or` if an empty ruletype is provided + if len(*r) == 0 { + return strings.EqualFold(logic, constants.OperatorAnd), nil + } + + // iterate through each pattern in the ruletype + for _, pattern := range *r { + match, err := match(data, matcher, pattern) + if err != nil { + return false, err + } + + if match { + return true, nil + } + } + + // return false if no match is found + return false, nil +} + +// MatchMultiple returns true when the provided ruletype +// matches the provided ruledata. When the provided +// ruletype is empty, the function returns true for +// the `and` operator and false for the `or` operator. +func (r *Ruletype) MatchMultiple(data []string, matcher, logic string) (bool, error) { + // return true for `and`, false for `or` if an empty ruletype is provided + if len(*r) == 0 { + return strings.EqualFold(logic, constants.OperatorAnd), nil + } + + // iterate through each pattern in the ruletype + for _, pattern := range *r { + for _, value := range data { + match, err := match(value, matcher, pattern) + if err != nil { + return false, err + } + + if match { + return true, nil + } + } + } + + // return false if no match is found + return false, nil +} + +// match is a helper function that compares data against a pattern +// and returns true if the data matches the pattern, depending on +// matcher specified. +func match(data, matcher, pattern string) (bool, error) { + // handle the pattern based off the matcher provided + switch matcher { + case constants.MatcherRegex, "regex": + regExpPattern, err := regexp.Compile(pattern) + if err != nil { + return false, fmt.Errorf("error in regex pattern %s: %w", pattern, err) + } + + // return true if the regexp pattern matches the ruledata + if regExpPattern.MatchString(data) { + return true, nil + } + case constants.MatcherFilepath: + fallthrough + default: + // return true if the pattern matches the ruledata + ok, _ := filepath.Match(pattern, data) + if ok { + return true, nil + } + } + + // return false if no match is found + return false, nil +} diff --git a/compiler/types/pipeline/ruleset_test.go b/compiler/types/pipeline/ruleset_test.go new file mode 100644 index 000000000..26a23823e --- /dev/null +++ b/compiler/types/pipeline/ruleset_test.go @@ -0,0 +1,678 @@ +// SPDX-License-Identifier: Apache-2.0 + +package pipeline + +import ( + "testing" + + "github.com/go-vela/types/constants" +) + +func TestPipeline_Ruleset_Match(t *testing.T) { + // setup types + tests := []struct { + ruleset *Ruleset + data *RuleData + want bool + wantErr bool + }{ + // Empty + {ruleset: &Ruleset{}, data: &RuleData{Branch: "main"}, want: true}, + // If with and operator + { + ruleset: &Ruleset{If: Rules{Branch: []string{"main"}}, Operator: "and"}, + data: &RuleData{Branch: "main", Comment: "rerun", Event: "push", Repo: "octocat/hello-world", Status: "pending", Tag: "refs/heads/main", Target: ""}, + want: true, + }, + { + ruleset: &Ruleset{If: Rules{Branch: []string{"main"}}, Operator: "and"}, + data: &RuleData{Branch: "dev", Comment: "rerun", Event: "push", Repo: "octocat/hello-world", Status: "pending", Tag: "refs/heads/main", Target: ""}, + want: false, + }, + { + ruleset: &Ruleset{If: Rules{Branch: []string{"main"}, Event: []string{"push"}}, Operator: "and"}, + data: &RuleData{Branch: "main", Comment: "rerun", Event: "push", Repo: "octocat/hello-world", Status: "pending", Tag: "refs/heads/main", Target: ""}, + want: true, + }, + { + ruleset: &Ruleset{If: Rules{Branch: []string{"main"}, Event: []string{"push"}}, Operator: "and"}, + data: &RuleData{Branch: "main", Comment: "rerun", Event: "pull_request", Repo: "octocat/hello-world", Status: "pending", Tag: "refs/heads/main", Target: ""}, + want: false, + }, + { + ruleset: &Ruleset{If: Rules{Path: []string{"foo.txt", "/foo/bar.txt"}}, Operator: "and"}, + data: &RuleData{Branch: "main", Comment: "rerun", Event: "pull_request", Path: []string{}, Repo: "octocat/hello-world", Status: "pending", Tag: "refs/heads/main", Target: ""}, + want: false, + }, + { + ruleset: &Ruleset{If: Rules{Comment: []string{"rerun"}}, Operator: "and"}, + data: &RuleData{Branch: "dev", Comment: "rerun", Event: "push", Repo: "octocat/hello-world", Status: "pending", Tag: "refs/heads/main", Target: ""}, + want: true, + }, + { + ruleset: &Ruleset{If: Rules{Comment: []string{"rerun"}}, Operator: "and"}, + data: &RuleData{Branch: "dev", Comment: "ok to test", Event: "push", Repo: "octocat/hello-world", Status: "pending", Tag: "refs/heads/main", Target: ""}, + want: false, + }, + { + ruleset: &Ruleset{If: Rules{Event: []string{"deployment"}, Target: []string{"production"}}, Operator: "and"}, + data: &RuleData{Branch: "dev", Comment: "", Event: "deployment", Repo: "octocat/hello-world", Status: "pending", Tag: "refs/heads/main", Target: "production"}, + want: true, + }, + { + ruleset: &Ruleset{If: Rules{Event: []string{"deployment"}, Target: []string{"production"}}, Operator: "and"}, + data: &RuleData{Branch: "dev", Comment: "", Event: "deployment", Repo: "octocat/hello-world", Status: "pending", Tag: "refs/heads/main", Target: "stage"}, + want: false, + }, + { + ruleset: &Ruleset{If: Rules{Event: []string{"schedule"}, Target: []string{"weekly"}}, Operator: "and"}, + data: &RuleData{Branch: "dev", Comment: "", Event: "schedule", Repo: "octocat/hello-world", Status: "pending", Tag: "refs/heads/main", Target: "weekly"}, + want: true, + }, + { + ruleset: &Ruleset{If: Rules{Event: []string{"schedule"}, Target: []string{"weekly"}}, Operator: "and"}, + data: &RuleData{Branch: "dev", Comment: "", Event: "schedule", Repo: "octocat/hello-world", Status: "pending", Tag: "refs/heads/main", Target: "nightly"}, + want: false, + }, + { + ruleset: &Ruleset{If: Rules{Status: []string{"success", "failure"}}, Operator: "and"}, + data: &RuleData{Branch: "dev", Comment: "ok to test", Event: "push", Repo: "octocat/hello-world", Status: "failure", Tag: "refs/heads/main", Target: ""}, + want: true, + }, + // If with or operator + { + ruleset: &Ruleset{If: Rules{Branch: []string{"main"}, Event: []string{"push"}}, Operator: "or"}, + data: &RuleData{Branch: "main", Comment: "rerun", Event: "push", Repo: "octocat/hello-world", Status: "pending", Tag: "refs/heads/main", Target: ""}, + want: true, + }, + { + ruleset: &Ruleset{If: Rules{Branch: []string{"main"}, Event: []string{"push"}}, Operator: "or"}, + data: &RuleData{Branch: "dev", Comment: "rerun", Event: "push", Repo: "octocat/hello-world", Status: "pending", Tag: "refs/heads/main", Target: ""}, + want: true, + }, + { + ruleset: &Ruleset{If: Rules{Branch: []string{"main"}, Event: []string{"push"}}, Operator: "or"}, + data: &RuleData{Branch: "main", Comment: "rerun", Event: "pull_request", Repo: "octocat/hello-world", Status: "pending", Tag: "refs/heads/main", Target: ""}, + want: true, + }, + { + ruleset: &Ruleset{If: Rules{Branch: []string{"main"}, Event: []string{"push"}}, Operator: "or"}, + data: &RuleData{Branch: "dev", Comment: "rerun", Event: "pull_request", Repo: "octocat/hello-world", Status: "pending", Tag: "refs/heads/main", Target: ""}, + want: false, + }, + { + ruleset: &Ruleset{If: Rules{Path: []string{"foo.txt", "/foo/bar.txt"}}, Operator: "or"}, + data: &RuleData{Branch: "main", Comment: "rerun", Event: "pull_request", Path: []string{}, Repo: "octocat/hello-world", Status: "pending", Tag: "refs/heads/main", Target: ""}, + want: false, + }, + // Unless with and operator + { + ruleset: &Ruleset{Unless: Rules{Branch: []string{"main"}}, Operator: "and"}, + data: &RuleData{Branch: "main", Comment: "rerun", Event: "push", Repo: "octocat/hello-world", Status: "pending", Tag: "refs/heads/main", Target: ""}, + want: false, + }, + { + ruleset: &Ruleset{Unless: Rules{Branch: []string{"main"}}, Operator: "and"}, + data: &RuleData{Branch: "dev", Comment: "rerun", Event: "push", Repo: "octocat/hello-world", Status: "pending", Tag: "refs/heads/main", Target: ""}, + want: true, + }, + { + ruleset: &Ruleset{Unless: Rules{Branch: []string{"main"}, Event: []string{"push"}}, Operator: "and"}, + data: &RuleData{Branch: "main", Comment: "rerun", Event: "push", Repo: "octocat/hello-world", Status: "pending", Tag: "refs/heads/main", Target: ""}, + want: false, + }, + { + ruleset: &Ruleset{Unless: Rules{Branch: []string{"main"}, Event: []string{"push"}}, Operator: "and"}, + data: &RuleData{Branch: "main", Comment: "rerun", Event: "pull_request", Repo: "octocat/hello-world", Status: "pending", Tag: "refs/heads/main", Target: ""}, + want: true, + }, + { + ruleset: &Ruleset{Unless: Rules{Path: []string{"foo.txt", "/foo/bar.txt"}}, Operator: "and"}, + data: &RuleData{Branch: "main", Comment: "rerun", Event: "pull_request", Path: []string{}, Repo: "octocat/hello-world", Status: "pending", Tag: "refs/heads/main", Target: ""}, + want: true, + }, + // Unless with or operator + { + ruleset: &Ruleset{Unless: Rules{Branch: []string{"main"}, Event: []string{"push"}}, Operator: "or"}, + data: &RuleData{Branch: "main", Comment: "rerun", Event: "push", Repo: "octocat/hello-world", Status: "pending", Tag: "refs/heads/main", Target: ""}, + want: false, + }, + { + ruleset: &Ruleset{Unless: Rules{Branch: []string{"main"}, Event: []string{"push"}}, Operator: "or"}, + data: &RuleData{Branch: "dev", Comment: "rerun", Event: "push", Repo: "octocat/hello-world", Status: "pending", Tag: "refs/heads/main", Target: ""}, + want: false, + }, + { + ruleset: &Ruleset{Unless: Rules{Branch: []string{"main"}, Event: []string{"push"}}, Operator: "or"}, + data: &RuleData{Branch: "main", Comment: "rerun", Event: "pull_request", Repo: "octocat/hello-world", Status: "pending", Tag: "refs/heads/main", Target: ""}, + want: false, + }, + { + ruleset: &Ruleset{Unless: Rules{Branch: []string{"main"}, Event: []string{"push"}}, Operator: "or"}, + data: &RuleData{Branch: "dev", Comment: "rerun", Event: "pull_request", Repo: "octocat/hello-world", Status: "pending", Tag: "refs/heads/main", Target: ""}, + want: true, + }, + { + ruleset: &Ruleset{Unless: Rules{Path: []string{"foo.txt", "/foo/bar.txt"}}, Operator: "or"}, + data: &RuleData{Branch: "main", Comment: "rerun", Event: "pull_request", Path: []string{}, Repo: "octocat/hello-world", Status: "pending", Tag: "refs/heads/main", Target: ""}, + want: true, + }, + // Advanced Rulesets + { + ruleset: &Ruleset{ + If: Rules{ + Event: []string{"push", "pull_request"}, + Tag: []string{"release/*"}, + }, + Operator: "or", + }, + data: &RuleData{Branch: "main", Comment: "rerun", Event: "tag", Repo: "octocat/hello-world", Status: "pending", Tag: "release/*", Target: ""}, + want: true, + }, + { + ruleset: &Ruleset{ + If: Rules{ + Event: []string{"push", "pull_request"}, + Tag: []string{"release/*"}, + }, + Operator: "or", + }, + data: &RuleData{Branch: "main", Comment: "rerun", Event: "tag", Repo: "octocat/hello-world", Status: "pending", Tag: "release/*", Target: ""}, + want: true, + }, + { + ruleset: &Ruleset{ + If: Rules{ + Event: []string{"push", "pull_request"}, + Tag: []string{"release/*"}, + }, + Operator: "or", + }, + data: &RuleData{Branch: "main", Comment: "rerun", Event: "tag", Repo: "octocat/hello-world", Status: "pending", Tag: "refs/heads/main", Target: ""}, + want: false, + }, + // Bad regexp + { + ruleset: &Ruleset{If: Rules{Branch: []string{"*-dev"}}, Matcher: "regexp"}, + data: &RuleData{Branch: "main", Comment: "rerun", Event: "push", Repo: "octocat/hello-world", Status: "pending", Tag: "refs/heads/main", Target: ""}, + wantErr: true, + }, + { + ruleset: &Ruleset{Unless: Rules{Branch: []string{"*-dev"}, Event: []string{"push"}}, Operator: "or", Matcher: "regexp"}, + data: &RuleData{Branch: "main", Comment: "rerun", Event: "push", Repo: "octocat/hello-world", Status: "pending", Tag: "refs/heads/main", Target: ""}, + wantErr: true, + }, + } + + // run test + for _, test := range tests { + got, err := test.ruleset.Match(test.data) + if err != nil { + if !test.wantErr { + t.Errorf("Ruleset Match for %s operator returned err: %s", test.ruleset.Operator, err) + } + } else { + if test.wantErr { + t.Errorf("Ruleset Match should have returned an error") + } + } + + if got != test.want { + t.Errorf("Ruleset Match for %s operator is %v, want %v", test.ruleset.Operator, got, test.want) + } + } +} + +func TestPipeline_Rules_NoStatus(t *testing.T) { + // setup types + r := Rules{} + + // run test + got := r.Empty() + + if !got { + t.Errorf("Rule NoStatus is %v, want true", got) + } +} + +func TestPipeline_Rules_Empty(t *testing.T) { + // setup types + r := Rules{} + + // run test + got := r.Empty() + + if !got { + t.Errorf("Rule IsEmpty is %v, want true", got) + } +} + +func TestPipeline_Rules_Empty_Invalid(t *testing.T) { + // setup types + r := Rules{Branch: []string{"main"}} + + // run test + got := r.Empty() + + if got { + t.Errorf("Rule IsEmpty is %v, want false", got) + } +} + +func TestPipeline_Rules_Match_Regex_Tag(t *testing.T) { + // setup types + tests := []struct { + rules *Rules + data *RuleData + operator string + want bool + }{ + { + rules: &Rules{Event: []string{"tag"}, Tag: []string{"refs/tags/20.*"}}, + data: &RuleData{Branch: "main", Event: "tag", Repo: "octocat/hello-world", Status: "pending", Tag: "refs/tags/20.4.42.167", Target: ""}, + operator: "and", + want: true, + }, + { + rules: &Rules{Event: []string{"tag"}, Tag: []string{"[0-9][0-9].[0-9].[0-9][0-9].[0-9][0-9][0-9]"}}, + data: &RuleData{Branch: "main", Event: "tag", Repo: "octocat/hello-world", Status: "pending", Tag: "refs/tags/20.4.42.167", Target: ""}, + operator: "and", + want: true, + }, + { + rules: &Rules{Event: []string{"tag"}, Tag: []string{"[0-9]+\\.[0-9]+\\.[0-9]+\\.[0-9]+"}}, + data: &RuleData{Branch: "main", Event: "tag", Repo: "octocat/hello-world", Status: "pending", Tag: "refs/tags/20.4.42.167", Target: ""}, + operator: "and", + want: true, + }, + { + rules: &Rules{Event: []string{"tag"}, Tag: []string{"^refs/tags/(\\d+\\.)+\\d+$"}}, + data: &RuleData{Branch: "main", Event: "tag", Repo: "octocat/hello-world", Status: "pending", Tag: "refs/tags/20.4.42.167", Target: ""}, + operator: "and", + want: true, + }, + { + rules: &Rules{Event: []string{"tag"}, Tag: []string{"^refs/tags/(\\d+\\.)+\\d+"}}, + data: &RuleData{Branch: "main", Event: "tag", Repo: "octocat/hello-world", Status: "pending", Tag: "refs/tags/2.4.42.165-prod", Target: ""}, + operator: "and", + want: true, + }, + { + rules: &Rules{Event: []string{"tag"}, Tag: []string{"^refs/tags/(\\d+\\.)+\\d+$"}}, + data: &RuleData{Branch: "main", Event: "tag", Repo: "octocat/hello-world", Status: "pending", Tag: "refs/tags/2.4.42.165-prod", Target: ""}, + operator: "and", + want: false, + }, + } + + // run test + for _, test := range tests { + got, _ := test.rules.Match(test.data, "regexp", test.operator) + + if got != test.want { + t.Errorf("Rules Match for %s operator is %v, want %v", test.operator, got, test.want) + } + } +} + +func TestPipeline_Rules_Match(t *testing.T) { + // setup types + tests := []struct { + rules *Rules + data *RuleData + operator string + want bool + }{ + // Empty + { + rules: &Rules{}, + data: &RuleData{Branch: "main", Event: "push", Path: []string{"foo.txt", "/foo/bar.txt"}, Repo: "octocat/hello-world", Status: "pending", Tag: "refs/heads/main", Target: ""}, + operator: "and", + want: true, + }, + { + rules: &Rules{}, + data: &RuleData{Branch: "main", Event: "push", Path: []string{"foo.txt", "/foo/bar.txt"}, Repo: "octocat/hello-world", Status: "pending", Tag: "refs/heads/main", Target: ""}, + operator: "or", + want: false, + }, + // and operator + { + rules: &Rules{Branch: []string{"main"}}, + data: &RuleData{Branch: "main", Event: "push", Path: []string{"foo.txt", "/foo/bar.txt"}, Repo: "octocat/hello-world", Status: "pending", Tag: "refs/heads/main", Target: ""}, + operator: "and", + want: true, + }, + { + rules: &Rules{Branch: []string{"main"}}, + data: &RuleData{Branch: "dev", Event: "push", Path: []string{"foo.txt", "/foo/bar.txt"}, Repo: "octocat/hello-world", Status: "pending", Tag: "refs/heads/main", Target: ""}, + operator: "and", + want: false, + }, + { + rules: &Rules{Branch: []string{"main"}, Event: []string{"push"}}, + data: &RuleData{Branch: "main", Event: "push", Path: []string{"foo.txt", "/foo/bar.txt"}, Repo: "octocat/hello-world", Status: "pending", Tag: "refs/heads/main", Target: ""}, + operator: "and", + want: true, + }, + { + rules: &Rules{Branch: []string{"main"}, Event: []string{"push"}}, + data: &RuleData{Branch: "main", Event: "pull_request", Path: []string{"foo.txt", "/foo/bar.txt"}, Repo: "octocat/hello-world", Status: "pending", Tag: "refs/heads/main", Target: ""}, + operator: "and", + want: false, + }, + { + rules: &Rules{Path: []string{"foob.txt"}}, + data: &RuleData{Branch: "main", Event: "pull_request", Path: []string{"foo.txt", "/foo/bar.txt"}, Repo: "octocat/hello-world", Status: "pending", Tag: "refs/heads/main", Target: ""}, + operator: "and", + want: false, + }, + { + rules: &Rules{Status: []string{"success", "failure"}}, + data: &RuleData{Branch: "main", Event: "pull_request", Path: []string{"foo.txt", "/foo/bar.txt"}, Repo: "octocat/hello-world", Tag: "refs/heads/main", Target: ""}, + operator: "and", + want: true, + }, + { + rules: &Rules{Event: []string{"tag"}, Tag: []string{"refs/tags/[0-9].*-prod"}}, + data: &RuleData{Branch: "main", Event: "tag", Repo: "octocat/hello-world", Status: "pending", Tag: "refs/tags/2.4.42.167-prod", Target: ""}, + operator: "and", + want: true, + }, + { + rules: &Rules{Event: []string{"tag"}, Tag: []string{"path/to/thing/*/*"}}, + data: &RuleData{Branch: "main", Event: "tag", Repo: "octocat/hello-world", Status: "pending", Tag: "path/to/thing/stage/1.0.2-rc", Target: ""}, + operator: "and", + want: true, + }, + // or operator + { + rules: &Rules{Branch: []string{"main"}, Event: []string{"push"}}, + data: &RuleData{Branch: "main", Event: "push", Path: []string{"foo.txt", "/foo/bar.txt"}, Repo: "octocat/hello-world", Status: "pending", Tag: "refs/heads/main", Target: ""}, + operator: "or", + want: true, + }, + { + rules: &Rules{Branch: []string{"main"}, Event: []string{"push"}}, + data: &RuleData{Branch: "dev", Event: "push", Path: []string{"foo.txt", "/foo/bar.txt"}, Repo: "octocat/hello-world", Status: "pending", Tag: "refs/heads/main", Target: ""}, + operator: "or", + want: true, + }, + { + rules: &Rules{Branch: []string{"main"}, Event: []string{"push"}}, + data: &RuleData{Branch: "main", Event: "pull_request", Path: []string{"foo.txt", "/foo/bar.txt"}, Repo: "octocat/hello-world", Status: "pending", Tag: "refs/heads/main", Target: ""}, + operator: "or", + want: true, + }, + { + rules: &Rules{Branch: []string{"main"}, Event: []string{"push"}}, + data: &RuleData{Branch: "dev", Event: "pull_request", Path: []string{"foo.txt", "/foo/bar.txt"}, Repo: "octocat/hello-world", Status: "pending", Tag: "refs/heads/main", Target: ""}, + operator: "or", + want: false, + }, + { + rules: &Rules{Path: []string{"foob.txt"}}, + data: &RuleData{Branch: "dev", Event: "pull_request", Path: []string{"foo.txt", "/foo/bar.txt"}, Repo: "octocat/hello-world", Status: "pending", Tag: "refs/heads/main", Target: ""}, + operator: "or", + want: false, + }, + // Advanced Rulesets + { + rules: &Rules{Event: []string{"push", "pull_request"}, Tag: []string{"release/*"}}, + data: &RuleData{Branch: "main", Event: "push", Repo: "octocat/hello-world", Status: "pending", Tag: "release/*", Target: ""}, + operator: "or", + want: true, + }, + { + rules: &Rules{Event: []string{"push", "pull_request"}, Tag: []string{"release/*"}}, + data: &RuleData{Branch: "main", Event: "tag", Repo: "octocat/hello-world", Status: "pending", Tag: "refs/heads/main", Target: ""}, + operator: "or", + want: false, + }, + { + rules: &Rules{Event: []string{"pull_request:labeled"}, Label: []string{"enhancement", "documentation"}}, + data: &RuleData{Branch: "main", Event: "pull_request:labeled", Repo: "octocat/hello-world", Status: "pending", Label: []string{"documentation"}}, + operator: "and", + want: true, + }, + { + rules: &Rules{Event: []string{"pull_request:labeled"}, Label: []string{"enhancement", "documentation"}}, + data: &RuleData{Branch: "main", Event: "pull_request:labeled", Repo: "octocat/hello-world", Status: "pending", Label: []string{"support"}}, + operator: "and", + want: false, + }, + { + rules: &Rules{Event: []string{"pull_request:unlabeled"}, Label: []string{"enhancement", "documentation"}}, + data: &RuleData{Branch: "main", Event: "pull_request:unlabeled", Repo: "octocat/hello-world", Status: "pending", Label: []string{"documentation"}}, + operator: "and", + want: true, + }, + { + rules: &Rules{Event: []string{"pull_request:unlabeled"}, Label: []string{"enhancement"}}, + data: &RuleData{Branch: "main", Event: "pull_request:unlabeled", Repo: "octocat/hello-world", Status: "pending", Label: []string{"documentation"}}, + operator: "and", + want: false, + }, + { + rules: &Rules{Event: []string{"push"}, Label: []string{"enhancement", "documentation"}}, + data: &RuleData{Branch: "main", Event: "push", Repo: "octocat/hello-world", Status: "pending", Label: []string{"documentation"}}, + operator: "and", + want: true, + }, + { + rules: &Rules{Event: []string{"push"}, Label: []string{"enhancement"}}, + data: &RuleData{Branch: "main", Event: "push", Repo: "octocat/hello-world", Status: "pending", Label: []string{"documentation"}}, + operator: "and", + want: false, + }, + { + rules: &Rules{Event: []string{"push"}, Label: []string{"enhancement"}}, + data: &RuleData{Branch: "main", Event: "push", Repo: "octocat/hello-world", Status: "pending", Label: []string{"documentation"}}, + operator: "or", + want: true, + }, + { + rules: &Rules{Event: []string{"push"}, Instance: []string{"http://localhost:8080"}}, + data: &RuleData{Branch: "main", Event: "push", Repo: "octocat/hello-world", Status: "pending", Instance: "http://localhost:5432"}, + operator: "and", + want: false, + }, + { + rules: &Rules{Event: []string{"push"}, Instance: []string{"http://localhost:8080"}}, + data: &RuleData{Branch: "main", Event: "push", Repo: "octocat/hello-world", Status: "pending", Instance: "http://localhost:8080"}, + operator: "and", + want: true, + }, + } + + // run test + for _, test := range tests { + got, _ := test.rules.Match(test.data, "filepath", test.operator) + + if got != test.want { + t.Errorf("Rules Match for %s operator is %v, want %v", test.operator, got, test.want) + } + } +} + +func TestPipeline_Ruletype_MatchAnd(t *testing.T) { + // setup types + tests := []struct { + matcher string + rule Ruletype + pattern string + want bool + }{ + // Empty with filepath matcher + {matcher: "filepath", rule: []string{}, pattern: "main", want: true}, + {matcher: "filepath", rule: []string{}, pattern: "push", want: true}, + {matcher: "filepath", rule: []string{}, pattern: "foo/bar", want: true}, + {matcher: "filepath", rule: []string{}, pattern: "success", want: true}, + {matcher: "filepath", rule: []string{}, pattern: "release/*", want: true}, + // Branch with filepath matcher + {matcher: "filepath", rule: []string{"main"}, pattern: "main", want: true}, + {matcher: "filepath", rule: []string{"main"}, pattern: "dev", want: false}, + // Comment with filepath matcher + {matcher: "filepath", rule: []string{"ok to test"}, pattern: "ok to test", want: true}, + {matcher: "filepath", rule: []string{"ok to test"}, pattern: "rerun", want: false}, + // Event with filepath matcher + {matcher: "filepath", rule: []string{"push"}, pattern: "push", want: true}, + {matcher: "filepath", rule: []string{"push"}, pattern: "pull_request", want: false}, + // Repo with filepath matcher + {matcher: "filepath", rule: []string{"foo/bar"}, pattern: "foo/bar", want: true}, + {matcher: "filepath", rule: []string{"foo/bar"}, pattern: "test/foobar", want: false}, + // Status with filepath matcher + {matcher: "filepath", rule: []string{"success"}, pattern: "success", want: true}, + {matcher: "filepath", rule: []string{"success"}, pattern: "failure", want: false}, + // Tag with filepath matcher + {matcher: "filepath", rule: []string{"release/*"}, pattern: "release/*", want: true}, + {matcher: "filepath", rule: []string{"release/*"}, pattern: "stage/*", want: false}, + {matcher: "filepath", rule: []string{"release/*"}, pattern: "release/111.2.3-rc", want: true}, + {matcher: "filepath", rule: []string{"release/**"}, pattern: "release/1.2.3-rc-hold", want: true}, + {matcher: "filepath", rule: []string{"release/*"}, pattern: "release/stage/1.2.3-rc", want: false}, + {matcher: "filepath", rule: []string{"release/*/*"}, pattern: "release/stage/1.2.3-rc", want: true}, + {matcher: "filepath", rule: []string{"release/stage/*"}, pattern: "release/stage/1.2.3-rc", want: true}, + {matcher: "filepath", rule: []string{"release/prod/*"}, pattern: "release/stage/1.2.3-rc", want: false}, + {matcher: "filepath", rule: []string{"release/*"}, pattern: "release/1.2.3-rc", want: true}, + {matcher: "filepath", rule: []string{"release/*"}, pattern: "release/1.2.3", want: true}, + // Target with filepath matcher + {matcher: "filepath", rule: []string{"production"}, pattern: "production", want: true}, + {matcher: "filepath", rule: []string{"stage"}, pattern: "production", want: false}, + // Label with filepath matcher + {matcher: "filepath", rule: []string{"enhancement", "documentation"}, pattern: "documentation", want: true}, + {matcher: "filepath", rule: []string{"enhancement", "documentation"}, pattern: "question", want: false}, + // Empty with regex matcher + {matcher: "regexp", rule: []string{}, pattern: "main", want: true}, + {matcher: "regexp", rule: []string{}, pattern: "push", want: true}, + {matcher: "regexp", rule: []string{}, pattern: "foo/bar", want: true}, + {matcher: "regexp", rule: []string{}, pattern: "success", want: true}, + {matcher: "regexp", rule: []string{}, pattern: "release/*", want: true}, + // Branch with regex matcher + {matcher: "regexp", rule: []string{"main"}, pattern: "main", want: true}, + {matcher: "regexp", rule: []string{"main"}, pattern: "dev", want: false}, + // Comment with regex matcher + {matcher: "regexp", rule: []string{"ok to test"}, pattern: "ok to test", want: true}, + {matcher: "regexp", rule: []string{"ok to test"}, pattern: "rerun", want: false}, + // Event with regex matcher + {matcher: "regexp", rule: []string{"push"}, pattern: "push", want: true}, + {matcher: "regexp", rule: []string{"push"}, pattern: "pull_request", want: false}, + // Repo with regex matcher + {matcher: "regexp", rule: []string{"foo/bar"}, pattern: "foo/bar", want: true}, + {matcher: "regexp", rule: []string{"foo/bar"}, pattern: "test/foobar", want: false}, + // Status with regex matcher + {matcher: "regexp", rule: []string{"success"}, pattern: "success", want: true}, + {matcher: "regexp", rule: []string{"success"}, pattern: "failure", want: false}, + // Tag with regex matcher + {matcher: "regexp", rule: []string{"release/*"}, pattern: "release/*", want: true}, + {matcher: "regexp", rule: []string{"release/*"}, pattern: "stage/*", want: false}, + {matcher: "regex", rule: []string{"release/[0-9]+.*-rc$"}, pattern: "release/111.2.3-rc", want: true}, + {matcher: "regex", rule: []string{"release/[0-9]+.*-rc$"}, pattern: "release/1.2.3-rc-hold", want: false}, + {matcher: "regexp", rule: []string{"release/*"}, pattern: "release/stage/1.2.3-rc", want: true}, + {matcher: "regexp", rule: []string{"release/*/*"}, pattern: "release/stage/1.2.3-rc", want: true}, + {matcher: "regex", rule: []string{"release/stage/*"}, pattern: "release/stage/1.2.3-rc", want: true}, + {matcher: "regex", rule: []string{"release/prod/*"}, pattern: "release/stage/1.2.3-rc", want: false}, + {matcher: "regexp", rule: []string{"release/[0-9]+.[0-9]+.[0-9]+$"}, pattern: "release/1.2.3-rc", want: false}, + {matcher: "regexp", rule: []string{"release/[0-9]+.[0-9]+.[0-9]+$"}, pattern: "release/1.2.3", want: true}, + // Target with regex matcher + {matcher: "regexp", rule: []string{"production"}, pattern: "production", want: true}, + {matcher: "regexp", rule: []string{"stage"}, pattern: "production", want: false}, + // Label with regexp matcher + {matcher: "regexp", rule: []string{"enhancement", "documentation"}, pattern: "documentation", want: true}, + {matcher: "regexp", rule: []string{"enhancement", "documentation"}, pattern: "question", want: false}, + // Instance with regexp matcher + {matcher: "regexp", rule: []string{"http://localhost:8080", "http://localhost:1234"}, pattern: "http://localhost:5432", want: false}, + {matcher: "regexp", rule: []string{"http://localhost:8080", "http://localhost:1234"}, pattern: "http://localhost:8080", want: true}, + } + + // run test + for _, test := range tests { + got, _ := test.rule.MatchSingle(test.pattern, test.matcher, constants.OperatorAnd) + + if got != test.want { + t.Errorf("MatchAnd for %s matcher is %v, want %v", test.matcher, got, test.want) + } + } +} + +func TestPipeline_Ruletype_MatchOr(t *testing.T) { + // setup types + tests := []struct { + matcher string + rule Ruletype + pattern string + want bool + }{ + // Empty with filepath matcher + {matcher: "filepath", rule: []string{}, pattern: "main", want: false}, + {matcher: "filepath", rule: []string{}, pattern: "push", want: false}, + {matcher: "filepath", rule: []string{}, pattern: "foo/bar", want: false}, + {matcher: "filepath", rule: []string{}, pattern: "success", want: false}, + {matcher: "filepath", rule: []string{}, pattern: "release/*", want: false}, + // Branch with filepath matcher + {matcher: "filepath", rule: []string{"main"}, pattern: "main", want: true}, + {matcher: "filepath", rule: []string{"main"}, pattern: "dev", want: false}, + // Comment with filepath matcher + {matcher: "filepath", rule: []string{"ok to test"}, pattern: "ok to test", want: true}, + {matcher: "filepath", rule: []string{"ok to test"}, pattern: "rerun", want: false}, + // Event with filepath matcher + {matcher: "filepath", rule: []string{"push"}, pattern: "push", want: true}, + {matcher: "filepath", rule: []string{"push"}, pattern: "pull_request", want: false}, + // Repo with filepath matcher + {matcher: "filepath", rule: []string{"foo/bar"}, pattern: "foo/bar", want: true}, + {matcher: "filepath", rule: []string{"foo/bar"}, pattern: "test/foobar", want: false}, + // Status with filepath matcher + {matcher: "filepath", rule: []string{"success"}, pattern: "success", want: true}, + {matcher: "filepath", rule: []string{"success"}, pattern: "failure", want: false}, + // Tag with filepath matcher + {matcher: "filepath", rule: []string{"release/*"}, pattern: "release/*", want: true}, + {matcher: "filepath", rule: []string{"release/*"}, pattern: "stage/*", want: false}, + // Target with filepath matcher + {matcher: "filepath", rule: []string{"production"}, pattern: "production", want: true}, + {matcher: "filepath", rule: []string{"stage"}, pattern: "production", want: false}, + // Label with filepath matcher + {matcher: "filepath", rule: []string{"enhancement", "documentation"}, pattern: "documentation", want: true}, + {matcher: "filepath", rule: []string{"enhancement", "documentation"}, pattern: "question", want: false}, + // Empty with regexp matcher + {matcher: "regexp", rule: []string{}, pattern: "main", want: false}, + {matcher: "regexp", rule: []string{}, pattern: "push", want: false}, + {matcher: "regexp", rule: []string{}, pattern: "foo/bar", want: false}, + {matcher: "regexp", rule: []string{}, pattern: "success", want: false}, + {matcher: "regexp", rule: []string{}, pattern: "release/*", want: false}, + // Branch with regexp matcher + {matcher: "regexp", rule: []string{"main"}, pattern: "main", want: true}, + {matcher: "regexp", rule: []string{"main"}, pattern: "dev", want: false}, + // Comment with regexp matcher + {matcher: "regexp", rule: []string{"ok to test"}, pattern: "ok to test", want: true}, + {matcher: "regexp", rule: []string{"ok to test"}, pattern: "rerun", want: false}, + // Event with regexp matcher + {matcher: "regexp", rule: []string{"push"}, pattern: "push", want: true}, + {matcher: "regexp", rule: []string{"push"}, pattern: "pull_request", want: false}, + // Repo with regexp matcher + {matcher: "regexp", rule: []string{"foo/bar"}, pattern: "foo/bar", want: true}, + {matcher: "regexp", rule: []string{"foo/bar"}, pattern: "test/foobar", want: false}, + // Status with regexp matcher + {matcher: "regexp", rule: []string{"success"}, pattern: "success", want: true}, + {matcher: "regexp", rule: []string{"success"}, pattern: "failure", want: false}, + // Tag with regexp matcher + {matcher: "regexp", rule: []string{"release/*"}, pattern: "release/*", want: true}, + {matcher: "regexp", rule: []string{"release/*"}, pattern: "stage/*", want: false}, + // Target with regexp matcher + {matcher: "regexp", rule: []string{"production"}, pattern: "production", want: true}, + {matcher: "regexp", rule: []string{"stage"}, pattern: "production", want: false}, + // Label with regexp matcher + {matcher: "regexp", rule: []string{"enhancement", "documentation"}, pattern: "documentation", want: true}, + {matcher: "regexp", rule: []string{"enhancement", "documentation"}, pattern: "question", want: false}, + // Instance with regexp matcher + {matcher: "regexp", rule: []string{"http://localhost:8080", "http://localhost:1234"}, pattern: "http://localhost:5432", want: false}, + {matcher: "regexp", rule: []string{"http://localhost:8080", "http://localhost:1234"}, pattern: "http://localhost:8080", want: true}, + } + + // run test + for _, test := range tests { + got, _ := test.rule.MatchSingle(test.pattern, test.matcher, constants.OperatorOr) + + if got != test.want { + t.Errorf("MatchOr for %s matcher is %v, want %v", test.matcher, got, test.want) + } + } +} diff --git a/compiler/types/pipeline/secret.go b/compiler/types/pipeline/secret.go new file mode 100644 index 000000000..bbc5e2602 --- /dev/null +++ b/compiler/types/pipeline/secret.go @@ -0,0 +1,239 @@ +// SPDX-License-Identifier: Apache-2.0 + +package pipeline + +import ( + "errors" + "fmt" + "strings" + + "github.com/go-vela/types/constants" +) + +type ( + // SecretSlice is the pipeline representation + // of the secrets block for a pipeline. + // + // swagger:model PipelineSecretSlice + SecretSlice []*Secret + + // Secret is the pipeline representation of a + // secret from the secrets block for a pipeline. + // + // swagger:model PipelineSecret + Secret struct { + Name string `json:"name,omitempty" yaml:"name,omitempty"` + Value string `json:"value,omitempty" yaml:"value,omitempty"` + Key string `json:"key,omitempty" yaml:"key,omitempty"` + Engine string `json:"engine,omitempty" yaml:"engine,omitempty"` + Type string `json:"type,omitempty" yaml:"type,omitempty"` + Origin *Container `json:"origin,omitempty" yaml:"origin,omitempty"` + Pull string `json:"pull,omitempty" yaml:"pull,omitempty"` + } + + // StepSecretSlice is the pipeline representation + // of the secrets block for a step in a pipeline. + // + // swagger:model PipelineStepSecretSlice + StepSecretSlice []*StepSecret + + // StepSecret is the pipeline representation of a secret + // from a secrets block for a step in a pipeline. + // + // swagger:model PipelineStepSecret + StepSecret struct { + Source string `json:"source,omitempty" yaml:"source,omitempty"` + Target string `json:"target,omitempty" yaml:"target,omitempty"` + } +) + +var ( + // ErrInvalidEngine defines the error type when the + // SecretEngine provided to the client is unsupported. + ErrInvalidEngine = errors.New("invalid secret engine") + // ErrInvalidOrg defines the error type when the + // org in key does not equal the name of the organization. + ErrInvalidOrg = errors.New("invalid organization in key") + // ErrInvalidRepo defines the error type when the + // repo in key does not equal the name of the repository. + ErrInvalidRepo = errors.New("invalid repository in key") + // ErrInvalidShared defines the error type when the + // org in key does not equal the name of the team. + ErrInvalidShared = errors.New("invalid team in key") + // ErrInvalidPath defines the error type when the + // path provided for a type (org, repo, shared) is invalid. + ErrInvalidPath = errors.New("invalid secret path") + // ErrInvalidName defines the error type when the name + // contains restricted characters or is empty. + ErrInvalidName = errors.New("invalid secret name") +) + +// Purge removes the secrets that have a ruleset +// that do not match the provided ruledata. +func (s *SecretSlice) Purge(r *RuleData) (*SecretSlice, error) { + counter := 1 + secrets := new(SecretSlice) + + // iterate through each Secret in the pipeline + for _, secret := range *s { + if secret.Origin.Empty() { + // append the secret to the new slice of secrets + *secrets = append(*secrets, secret) + + continue + } + + match, err := secret.Origin.Ruleset.Match(r) + if err != nil { + return nil, fmt.Errorf("unable to process ruleset for secret %s: %w", secret.Name, err) + } + + // verify ruleset matches + if match { + // overwrite the Container number with the Container counter + secret.Origin.Number = counter + + // increment Container counter + counter = counter + 1 + + // append the secret to the new slice of secrets + *secrets = append(*secrets, secret) + } + } + + return secrets, nil +} + +// ParseOrg returns the parts (org, key) of the secret path +// when the secret is valid for a given organization. +func (s *Secret) ParseOrg(org string) (string, string, error) { + path := s.Key + + // check if the secret is not a native or vault type + if !strings.EqualFold(s.Engine, constants.DriverNative) && + !strings.EqualFold(s.Engine, constants.DriverVault) { + return "", "", fmt.Errorf("%w: %s", ErrInvalidEngine, s.Engine) + } + + // check if a path was provided + if !strings.Contains(path, "/") { + return "", "", fmt.Errorf("%w: %s ", ErrInvalidPath, path) + } + + // split the full path into parts + parts := strings.SplitN(path, "/", 2) + + // secret is invalid + if len(parts) != 2 { + return "", "", fmt.Errorf("%w: %s ", ErrInvalidPath, path) + } + + // check if the org provided matches what we expect + if !strings.EqualFold(parts[0], org) { + return "", "", fmt.Errorf("%w: %s ", ErrInvalidOrg, parts[0]) + } + + // check if path segments empty + if len(parts[1]) == 0 { + return "", "", fmt.Errorf("%w: %s ", ErrInvalidPath, path) + } + + // secret names can't be empty. + if len(s.Name) == 0 { + return "", "", fmt.Errorf("%w: %s missing name", ErrInvalidName, s.Key) + } + + // environmental variables can't contain certain restricted characters. + if strings.ContainsAny(s.Name, constants.SecretRestrictedCharacters) { + return "", "", fmt.Errorf("%w (contains restricted characters): %s ", ErrInvalidName, s.Name) + } + + return parts[0], parts[1], nil +} + +// ParseRepo returns the parts (org, repo, key) of the secret path +// when the secret is valid for a given organization and repository. +func (s *Secret) ParseRepo(org, repo string) (string, string, string, error) { + path := s.Key + + // check if the secret is not a native or vault type + if !strings.EqualFold(s.Engine, constants.DriverNative) && !strings.EqualFold(s.Engine, constants.DriverVault) { + return "", "", "", fmt.Errorf("%w: %s", ErrInvalidEngine, s.Engine) + } + + // split the full path into parts + parts := strings.SplitN(path, "/", 3) + + // secret is invalid + if len(parts) != 3 { + return "", "", "", fmt.Errorf("%w: %s ", ErrInvalidPath, path) + } + + // check if the org provided matches what we expect + if !strings.EqualFold(parts[0], org) { + return "", "", "", fmt.Errorf("%w: %s ", ErrInvalidOrg, parts[0]) + } + + // check if the repo provided matches what we expect + if !strings.EqualFold(parts[1], repo) { + return "", "", "", fmt.Errorf("%w: %s ", ErrInvalidRepo, parts[1]) + } + + // check if path segments empty + if len(parts[2]) == 0 { + return "", "", "", fmt.Errorf("%w: %s ", ErrInvalidPath, path) + } + + // secret names can't be empty. + if len(s.Name) == 0 { + return "", "", "", fmt.Errorf("%w: %s missing name", ErrInvalidName, s.Key) + } + + // environmental variables can't contain certain restricted characters. + if strings.ContainsAny(s.Name, constants.SecretRestrictedCharacters) { + return "", "", "", fmt.Errorf("%w (contains restricted characters): %s ", ErrInvalidName, s.Name) + } + + return parts[0], parts[1], parts[2], nil +} + +// ParseShared returns the parts (org, team, key) of the secret path +// when the secret is valid for a given organization and team. +func (s *Secret) ParseShared() (string, string, string, error) { + path := s.Key + + // check if the secret is not a native or vault type + if !strings.EqualFold(s.Engine, constants.DriverNative) && !strings.EqualFold(s.Engine, constants.DriverVault) { + return "", "", "", fmt.Errorf("%w: %s", ErrInvalidEngine, s.Engine) + } + + // check if a path was provided + if !strings.Contains(path, "/") { + return "", "", "", fmt.Errorf("%w: %s ", ErrInvalidPath, path) + } + + // split the full path into parts + parts := strings.SplitN(path, "/", 3) + + // secret is invalid + if len(parts) != 3 { + return "", "", "", fmt.Errorf("%w: %s ", ErrInvalidPath, path) + } + + // check if path segments empty + if len(parts[1]) == 0 || len(parts[2]) == 0 { + return "", "", "", fmt.Errorf("%w: %s ", ErrInvalidPath, path) + } + + // secret names can't be empty. + if len(s.Name) == 0 { + return "", "", "", fmt.Errorf("%w: %s missing name", ErrInvalidName, s.Key) + } + + // environmental variables can't contain certain restricted characters. + if strings.ContainsAny(s.Name, constants.SecretRestrictedCharacters) { + return "", "", "", fmt.Errorf("%w (contains restricted characters): %s ", ErrInvalidName, s.Name) + } + + return parts[0], parts[1], parts[2], nil +} diff --git a/compiler/types/pipeline/secret_test.go b/compiler/types/pipeline/secret_test.go new file mode 100644 index 000000000..bf5bfbc4d --- /dev/null +++ b/compiler/types/pipeline/secret_test.go @@ -0,0 +1,592 @@ +// SPDX-License-Identifier: Apache-2.0 + +package pipeline + +import ( + "errors" + "reflect" + "strings" + "testing" +) + +func TestPipeline_SecretSlice_Purge(t *testing.T) { + // setup types + secrets := testSecrets() + *secrets = (*secrets)[:len(*secrets)-1] + + // setup tests + tests := []struct { + secrets *SecretSlice + want *SecretSlice + }{ + { + secrets: testSecrets(), + want: secrets, + }, + { + secrets: new(SecretSlice), + want: new(SecretSlice), + }, + } + + // run tests + for _, test := range tests { + r := &RuleData{ + Branch: "main", + Event: "push", + Path: []string{}, + Repo: "foo/bar", + Tag: "refs/heads/main", + } + + got, _ := test.secrets.Purge(r) + + if !reflect.DeepEqual(got, test.want) { + t.Errorf("Purge is %v, want %v", got, test.want) + } + } +} + +func TestPipeline_Secret_ParseOrg_success(t *testing.T) { + // setup tests + tests := []struct { + secret *Secret + org string + }{ + { // success with good data + secret: &Secret{ + Name: "foo", + Value: "bar", + Key: "octocat/foo", + Engine: "native", + Type: "org", + Pull: "build_start", + }, + org: "octocat", + }, + { // success with multilevel & special characters + secret: &Secret{ + Name: "foo", + Value: "bar", + Key: "octocat/👋/🧪/🔑", + Engine: "native", + Type: "org", + Pull: "build_start", + }, + org: "octocat", + }, + } + + // run tests + for _, test := range tests { + org, key, err := test.secret.ParseOrg(test.org) + if err != nil { + t.Errorf("ParseOrg had an error occur: %+v", err) + } + + p := strings.SplitN(test.secret.Key, "/", 2) + + if !strings.EqualFold(org, p[0]) { + t.Errorf("org is %s want %s", org, p[0]) + } + + if !strings.EqualFold(key, p[1]) { + t.Errorf("key is %s want %s", key, p[1]) + } + } +} + +func TestPipeline_Secret_ParseOrg_failure(t *testing.T) { + // setup tests + tests := []struct { + secret *Secret + org string + wantErr error + }{ + { // failure with bad org + secret: &Secret{ + Name: "foo", + Value: "bar", + Key: "octocat/foo", + Engine: "native", + Type: "org", + Pull: "build_start", + }, + org: "wrongorg", + wantErr: ErrInvalidOrg, + }, + { // failure with bad key + secret: &Secret{ + Name: "foo", + Value: "bar", + Key: "octocat", + Engine: "native", + Type: "org", + Pull: "build_start", + }, + org: "octocat", + wantErr: ErrInvalidPath, + }, + { // failure with bad key + secret: &Secret{ + Name: "foo", + Value: "bar", + Key: "octocat/", + Engine: "native", + Type: "org", + Pull: "build_start", + }, + org: "octocat", + wantErr: ErrInvalidPath, + }, + { // failure with missing name + secret: &Secret{ + Value: "bar", + Key: "octocat/foo/bar", + Engine: "native", + Type: "org", + Pull: "build_start", + }, + org: "octocat", + wantErr: ErrInvalidName, + }, + { // failure with bad name + secret: &Secret{ + Name: "This is a null char \u0000", + Value: "bar", + Key: "octocat/foo/bar", + Engine: "native", + Type: "org", + Pull: "build_start", + }, + org: "octocat", + wantErr: ErrInvalidName, + }, + { // failure with bad engine + secret: &Secret{ + Name: "foo", + Value: "bar", + Key: "octocat/foo", + Engine: "invalid", + Type: "org", + Pull: "build_start", + }, + org: "octocat", + wantErr: ErrInvalidEngine, + }, + } + + // run tests + for _, test := range tests { + _, _, err := test.secret.ParseOrg(test.org) + if test.wantErr != nil && err != nil && !errors.Is(err, test.wantErr) { + t.Errorf("ParseOrg should have failed with error '%s' but got '%s'", test.wantErr, err) + } + + if err == nil { + t.Errorf("ParseOrg should have failed") + } + } +} + +func TestPipeline_Secret_ParseRepo_success(t *testing.T) { + // setup tests + tests := []struct { + secret *Secret + org string + repo string + }{ + { // success with explicit + secret: &Secret{ + Name: "foo", + Value: "bar", + Key: "octocat/helloworld/foo", + Engine: "native", + Type: "repo", + Pull: "build_start", + }, + org: "octocat", + repo: "helloworld", + }, + { // success with multilevel & special characters + secret: &Secret{ + Name: "foo", + Value: "bar", + Key: "octocat/👋/🧪/🔑", + Engine: "native", + Type: "repo", + Pull: "build_start", + }, + org: "octocat", + repo: "👋", + }, + } + + // run tests + for _, test := range tests { + org, repo, key, err := test.secret.ParseRepo(test.org, test.repo) + if err != nil { + t.Errorf("ParseRepo had an error occur: %+v", err) + } + + // checks for explicit only + if strings.Contains(test.secret.Key, "/") { + p := strings.SplitN(test.secret.Key, "/", 3) + + if !strings.EqualFold(org, p[0]) { + t.Errorf("org is %s want %s", org, p[0]) + } + + if !strings.EqualFold(repo, p[1]) { + t.Errorf("repo is %s want %s", key, p[1]) + } + + if !strings.EqualFold(key, p[2]) { + t.Errorf("key is %s want %s", key, p[2]) + } + } + } +} + +func TestPipeline_Secret_ParseRepo_failure(t *testing.T) { + // setup tests + tests := []struct { + secret *Secret + org string + repo string + wantErr error + }{ + { // failure with bad org + secret: &Secret{ + Name: "foo", + Value: "bar", + Key: "octocat/helloworld/foo", + Engine: "native", + Type: "repo", + Pull: "build_start", + }, + org: "wrongorg", + repo: "helloworld", + wantErr: ErrInvalidOrg, + }, + { // failure with bad repo + secret: &Secret{ + Name: "foo", + Value: "bar", + Key: "octocat/helloworld/foo", + Engine: "native", + Type: "repo", + Pull: "build_start", + }, + org: "octocat", + repo: "badrepo", + wantErr: ErrInvalidRepo, + }, + { // failure with bad key + secret: &Secret{ + Name: "foo", + Value: "bar", + Key: "octocat", + Engine: "native", + Type: "repo", + Pull: "build_start", + }, + org: "octocat", + wantErr: ErrInvalidPath, + }, + { // failure with bad key + secret: &Secret{ + Name: "foo", + Value: "bar", + Key: "octocat/helloworld", + Engine: "native", + Type: "org", + Pull: "build_start", + }, + repo: "helloworld", + org: "octocat", + wantErr: ErrInvalidPath, + }, + { // failure with bad key + secret: &Secret{ + Name: "foo", + Value: "bar", + Key: "octocat/helloworld/", + Engine: "native", + Type: "org", + Pull: "build_start", + }, + repo: "helloworld", + org: "octocat", + wantErr: ErrInvalidPath, + }, + { // failure with missing name + secret: &Secret{ + Value: "bar", + Key: "octocat/helloworld/foo/bar", + Engine: "native", + Type: "repo", + Pull: "build_start", + }, + org: "octocat", + repo: "helloworld", + wantErr: ErrInvalidName, + }, + { // failure with bad name + secret: &Secret{ + Name: "SOME=PASSWORD", + Value: "bar", + Key: "octocat/helloworld/foo/bar", + Engine: "native", + Type: "repo", + Pull: "build_start", + }, + org: "octocat", + repo: "helloworld", + wantErr: ErrInvalidName, + }, + { // failure with bad engine + secret: &Secret{ + Name: "foo", + Value: "bar", + Key: "octocat", + Engine: "invalid", + Type: "org", + Pull: "build_start", + }, + org: "octocat", + wantErr: ErrInvalidEngine, + }, + { // failure with deprecated implicit syntax + secret: &Secret{ + Name: "foo", + Value: "bar", + Key: "foo", + Engine: "native", + Type: "repo", + Pull: "build_start", + }, + org: "octocat", + repo: "helloworld", + wantErr: ErrInvalidPath, + }, + } + + // run tests + for _, test := range tests { + _, _, _, err := test.secret.ParseRepo(test.org, test.repo) + if test.wantErr != nil && err != nil && !errors.Is(err, test.wantErr) { + t.Errorf("ParseRepo should have failed with error '%s' but got '%s'", test.wantErr, err) + } + + if err == nil { + t.Errorf("ParseRepo should have failed") + } + } +} + +func TestPipeline_Secret_ParseShared_success(t *testing.T) { + // setup tests + tests := []struct { + secret *Secret + org string + }{ + { // success with good data + secret: &Secret{ + Name: "foo", + Value: "bar", + Key: "octocat/helloworld/foo", + Engine: "native", + Type: "repo", + Pull: "build_start", + }, + org: "octocat", + }, + { // success with multilevel & special characters + secret: &Secret{ + Name: "foo", + Value: "bar", + Key: "octocat/👋/🧪/🔑", + Engine: "native", + Type: "repo", + Pull: "build_start", + }, + org: "octocat", + }, + } + + // run tests + for _, test := range tests { + org, team, key, err := test.secret.ParseShared() + if err != nil { + t.Errorf("ParseShared had an error occur: %+v", err) + } + + p := strings.SplitN(test.secret.Key, "/", 3) + + if !strings.EqualFold(org, p[0]) { + t.Errorf("org is %s want %s", org, p[0]) + } + + if !strings.EqualFold(team, p[1]) { + t.Errorf("repo is %s want %s", key, p[1]) + } + + if !strings.EqualFold(key, p[2]) { + t.Errorf("key is %s want %s", key, p[2]) + } + } +} + +func TestPipeline_Secret_ParseShared_failure(t *testing.T) { + // setup tests + tests := []struct { + secret *Secret + org string + wantErr error + }{ + { // failure with bad key + secret: &Secret{ + Name: "foo", + Value: "bar", + Key: "octocat", + Engine: "native", + Type: "repo", + Pull: "build_start", + }, + org: "octocat", + wantErr: ErrInvalidPath, + }, + { // failure with bad engine + secret: &Secret{ + Name: "foo", + Value: "bar", + Key: "octocat", + Engine: "invalid", + Type: "org", + Pull: "build_start", + }, + org: "octocat", + wantErr: ErrInvalidEngine, + }, + { // failure with bad path + secret: &Secret{ + Name: "foo", + Value: "bar", + Key: "octocat/foo", + Engine: "native", + Type: "org", + Pull: "build_start", + }, + org: "octocat", + wantErr: ErrInvalidPath, + }, + { // failure with bad path + secret: &Secret{ + Name: "foo", + Value: "bar", + Key: "octocat/foo/", + Engine: "native", + Type: "org", + Pull: "build_start", + }, + org: "octocat", + wantErr: ErrInvalidPath, + }, + { // failure with missing name + secret: &Secret{ + Value: "bar", + Key: "octocat/foo/bar", + Engine: "native", + Type: "org", + Pull: "build_start", + }, + org: "octocat", + wantErr: ErrInvalidName, + }, + { // failure with bad name + secret: &Secret{ + Name: "=", + Value: "bar", + Key: "octocat/foo/bar", + Engine: "native", + Type: "org", + Pull: "build_start", + }, + org: "octocat", + wantErr: ErrInvalidName, + }, + } + + // run tests + for _, test := range tests { + _, _, _, err := test.secret.ParseShared() + if test.wantErr != nil && err != nil && !errors.Is(err, test.wantErr) { + t.Errorf("ParseShared should have failed with error '%s' but got '%s'", test.wantErr, err) + } + + if err == nil { + t.Errorf("ParseShared should have failed") + } + } +} + +func testSecrets() *SecretSlice { + return &SecretSlice{ + { + Engine: "native", + Key: "github/octocat/foobar", + Name: "foobar", + Type: "repo", + Origin: &Container{}, + Pull: "build_start", + }, + { + Engine: "native", + Key: "github/foobar", + Name: "foobar", + Type: "org", + Origin: &Container{}, + Pull: "build_start", + }, + { + Engine: "native", + Key: "github/octokitties/foobar", + Name: "foobar", + Type: "shared", + Origin: &Container{}, + Pull: "build_start", + }, + { + Name: "", + Origin: &Container{ + ID: "secret_github octocat._1_vault", + Directory: "/vela/src/foo//", + Environment: map[string]string{"FOO": "bar"}, + Image: "vault:latest", + Name: "vault", + Number: 1, + Pull: "always", + Ruleset: Ruleset{ + If: Rules{Event: []string{"push"}}, + Operator: "and", + }, + }, + }, + { + Origin: &Container{ + ID: "secret_github octocat._2_vault", + Directory: "/vela/src/foo//", + Environment: map[string]string{"FOO": "bar"}, + Image: "vault:latest", + Name: "vault", + Number: 2, + Pull: "always", + Ruleset: Ruleset{ + If: Rules{Event: []string{"pull_request"}}, + Operator: "and", + }, + }, + }, + } +} diff --git a/compiler/types/pipeline/stage.go b/compiler/types/pipeline/stage.go new file mode 100644 index 000000000..2325f340f --- /dev/null +++ b/compiler/types/pipeline/stage.go @@ -0,0 +1,162 @@ +// SPDX-License-Identifier: Apache-2.0 + +package pipeline + +import ( + "fmt" + + "github.com/go-vela/types/constants" +) + +type ( + // StageSlice is the pipeline representation + // of the stages block for a pipeline. + // + // swagger:model PipelineStageSlice + StageSlice []*Stage + + // Stage is the pipeline representation + // of a stage in a pipeline. + // + // swagger:model PipelineStage + Stage struct { + Done chan error `json:"-" yaml:"-"` + Environment map[string]string `json:"environment,omitempty" yaml:"environment,omitempty"` + Name string `json:"name,omitempty" yaml:"name,omitempty"` + Needs []string `json:"needs,omitempty" yaml:"needs,omitempty"` + Independent bool `json:"independent,omitempty" yaml:"independent,omitempty"` + Steps ContainerSlice `json:"steps,omitempty" yaml:"steps,omitempty"` + } +) + +// Purge removes the steps, from the stages, that have +// a ruleset that do not match the provided ruledata. +// If all steps from a stage are removed, then the +// entire stage is removed from the pipeline. +func (s *StageSlice) Purge(r *RuleData) (*StageSlice, error) { + counter := 1 + stages := new(StageSlice) + + // iterate through each stage for the pipeline + for _, stage := range *s { + containers := new(ContainerSlice) + + // iterate through each step for the stage in the pipeline + for _, step := range stage.Steps { + match, err := step.Ruleset.Match(r) + if err != nil { + return nil, fmt.Errorf("unable to process ruleset for step %s: %w", step.Name, err) + } + + // verify ruleset matches + if match { + // overwrite the step number with the step counter + step.Number = counter + + // increment step counter + counter = counter + 1 + + // append the step to the new slice of containers + *containers = append(*containers, step) + } + } + + // no steps for the stage so we continue processing to the next stage + if len(*containers) == 0 { + continue + } + + // overwrite the steps for the stage with the new slice of steps + stage.Steps = *containers + + // append the stage to the new slice of stages + *stages = append(*stages, stage) + } + + // return the new slice of stages + return stages, nil +} + +// Sanitize cleans the fields for every step in each stage so they +// can be safely executed on the worker. The fields are sanitized +// based off of the provided runtime driver which is setup on every +// worker. Currently, this function supports the following runtimes: +// +// - Docker +// - Kubernetes +func (s *StageSlice) Sanitize(driver string) *StageSlice { + stages := new(StageSlice) + + switch driver { + // sanitize container for Docker + case constants.DriverDocker: + for _, stage := range *s { + stage.Steps.Sanitize(driver) + + *stages = append(*stages, stage) + } + + return stages + // sanitize container for Kubernetes + case constants.DriverKubernetes: + for _, stage := range *s { + stage.Steps.Sanitize(driver) + + *stages = append(*stages, stage) + } + + return stages + // unrecognized driver + default: + // log here? + return nil + } +} + +// Empty returns true if the provided stage is empty. +func (s *Stage) Empty() bool { + // return true if the stage is nil + if s == nil { + return true + } + + // return true if every stage field is empty + if len(s.Name) == 0 && + len(s.Needs) == 0 && + len(s.Steps) == 0 && + len(s.Environment) == 0 { + return true + } + + // return false if any of the stage fields are not empty + return false +} + +// MergeEnv takes a list of environment variables and attempts +// to set them in the stage environment. If the environment +// variable already exists in the stage, then this will +// overwrite the existing environment variable. +func (s *Stage) MergeEnv(environment map[string]string) error { + // check if the stage is empty + if s.Empty() { + // TODO: evaluate if we should error here + // + // immediately return and do nothing + // + // treated as a no-op + return nil + } + + // check if the environment provided is empty + if environment == nil { + return fmt.Errorf("empty environment provided for stage %s", s.Name) + } + + // iterate through all environment variables provided + for key, value := range environment { + // set or update the stage environment variable + s.Environment[key] = value + } + + return nil +} diff --git a/compiler/types/pipeline/stage_test.go b/compiler/types/pipeline/stage_test.go new file mode 100644 index 000000000..3ae29b10f --- /dev/null +++ b/compiler/types/pipeline/stage_test.go @@ -0,0 +1,213 @@ +// SPDX-License-Identifier: Apache-2.0 + +package pipeline + +import ( + "reflect" + "testing" + + "github.com/go-vela/types/constants" +) + +func TestPipeline_StageSlice_Purge(t *testing.T) { + // setup types + stages := testStages() + *stages = (*stages)[:len(*stages)-1] + + // setup tests + tests := []struct { + stages *StageSlice + want *StageSlice + }{ + { + stages: testStages(), + want: stages, + }, + { + stages: new(StageSlice), + want: new(StageSlice), + }, + } + + // run tests + for _, test := range tests { + r := &RuleData{ + Branch: "main", + Event: "pull_request", + Path: []string{}, + Repo: "foo/bar", + Tag: "refs/heads/main", + } + + got, _ := test.stages.Purge(r) + + if !reflect.DeepEqual(got, test.want) { + t.Errorf("Purge is %v, want %v", got, test.want) + } + } +} + +func TestPipeline_StageSlice_Sanitize(t *testing.T) { + // setup types + stages := testStages() + (*stages)[0].Steps[0].ID = "github-octocat._1_init_init" + (*stages)[1].Steps[0].ID = "github-octocat._1_clone_clone" + (*stages)[2].Steps[0].ID = "github-octocat._1_echo_echo" + + kubeStages := testStages() + (*kubeStages)[0].Steps[0].ID = "github-octocat--1-init-init" + (*kubeStages)[1].Steps[0].ID = "github-octocat--1-clone-clone" + (*kubeStages)[2].Steps[0].ID = "github-octocat--1-echo-echo" + + // setup tests + tests := []struct { + driver string + stages *StageSlice + want *StageSlice + }{ + { + driver: constants.DriverDocker, + stages: testStages(), + want: stages, + }, + { + driver: constants.DriverKubernetes, + stages: testStages(), + want: kubeStages, + }, + { + driver: constants.DriverDocker, + stages: new(StageSlice), + want: new(StageSlice), + }, + { + driver: constants.DriverKubernetes, + stages: new(StageSlice), + want: new(StageSlice), + }, + { + driver: "foo", + stages: new(StageSlice), + want: nil, + }, + } + + // run tests + for _, test := range tests { + got := test.stages.Sanitize(test.driver) + + if !reflect.DeepEqual(got, test.want) { + t.Errorf("Sanitize is %v, want %v", got, test.want) + } + } +} + +func TestPipeline_Stage_MergeEnv(t *testing.T) { + // setup tests + tests := []struct { + stage *Stage + environment map[string]string + failure bool + }{ + { + stage: &Stage{ + Name: "testStage", + Environment: map[string]string{"FOO": "bar"}, + }, + environment: map[string]string{"BAR": "baz"}, + failure: false, + }, + { + stage: &Stage{}, + environment: map[string]string{"BAR": "baz"}, + failure: false, + }, + { + stage: nil, + environment: map[string]string{"BAR": "baz"}, + failure: false, + }, + { + stage: &Stage{ + Environment: map[string]string{"FOO": "bar"}, + Name: "testStage", + }, + environment: nil, + failure: true, + }, + } + + // run tests + for _, test := range tests { + err := test.stage.MergeEnv(test.environment) + + if test.failure { + if err == nil { + t.Errorf("MergeEnv should have returned err") + } + + continue + } + + if err != nil { + t.Errorf("MergeEnv returned err: %v", err) + } + } +} + +func testStages() *StageSlice { + return &StageSlice{ + { + Name: "init", + Environment: map[string]string{"FOO": "bar"}, + Steps: ContainerSlice{ + { + ID: "github octocat._1_init_init", + Directory: "/home/github/octocat", + Environment: map[string]string{"FOO": "bar"}, + Image: "#init", + Name: "init", + Number: 1, + Pull: "always", + }, + }, + }, + { + Name: "clone", + Needs: []string{"init"}, + Environment: map[string]string{"FOO": "bar"}, + Steps: ContainerSlice{ + { + ID: "github octocat._1_clone_clone", + Directory: "/home/github/octocat", + Environment: map[string]string{"FOO": "bar"}, + Image: "target/vela-git:v0.3.0", + Name: "clone", + Number: 2, + Pull: "always", + }, + }, + }, + { + Name: "echo", + Needs: []string{"clone"}, + Environment: map[string]string{"FOO": "bar"}, + Steps: ContainerSlice{ + { + ID: "github octocat._1_echo_echo", + Commands: []string{"echo hello"}, + Directory: "/home/github/octocat", + Environment: map[string]string{"FOO": "bar"}, + Image: "alpine:latest", + Name: "echo", + Number: 3, + Pull: "always", + Ruleset: Ruleset{ + If: Rules{Event: []string{"push"}}, + Operator: "and", + }, + }, + }, + }, + } +} diff --git a/compiler/types/pipeline/ulimit.go b/compiler/types/pipeline/ulimit.go new file mode 100644 index 000000000..a32b6c90a --- /dev/null +++ b/compiler/types/pipeline/ulimit.go @@ -0,0 +1,21 @@ +// SPDX-License-Identifier: Apache-2.0 + +package pipeline + +type ( + // UlimitSlice is the pipeline representation of + // the ulimits block for a step in a pipeline. + // + // swagger:model PipelineUlimitSlice + UlimitSlice []*Ulimit + + // Ulimit is the pipeline representation of a ulimit + // from the ulimits block for a step in a pipeline. + // + // swagger:model PipelineUlimit + Ulimit struct { + Name string `json:"name,omitempty" yaml:"name,omitempty"` + Soft int64 `json:"soft,omitempty" yaml:"soft,omitempty"` + Hard int64 `json:"hard,omitempty" yaml:"hard,omitempty"` + } +) diff --git a/compiler/types/pipeline/volume.go b/compiler/types/pipeline/volume.go new file mode 100644 index 000000000..c99342f5e --- /dev/null +++ b/compiler/types/pipeline/volume.go @@ -0,0 +1,21 @@ +// SPDX-License-Identifier: Apache-2.0 + +package pipeline + +type ( + // VolumeSlice is the pipeline representation of + // the volumes block for a step in a pipeline. + // + // swagger:model PipelineVolumeSlice + VolumeSlice []*Volume + + // Volume is the pipeline representation of a volume + // from a volumes block for a step in a pipeline. + // + // swagger:model PipelineVolume + Volume struct { + Source string `json:"source,omitempty" yaml:"source,omitempty"` + Destination string `json:"destination,omitempty" yaml:"destination,omitempty"` + AccessMode string `json:"access_mode,omitempty" yaml:"access_mode,omitempty"` + } +) diff --git a/compiler/types/pipeline/worker.go b/compiler/types/pipeline/worker.go new file mode 100644 index 000000000..1c667bfc6 --- /dev/null +++ b/compiler/types/pipeline/worker.go @@ -0,0 +1,23 @@ +// SPDX-License-Identifier: Apache-2.0 + +package pipeline + +// Worker is the yaml representation of the worker block for a pipeline. +// +// swagger:model PipelineWorker +type Worker struct { + Flavor string `json:"flavor,omitempty" yaml:"flavor,omitempty"` + Platform string `json:"platform,omitempty" yaml:"platform,omitempty"` +} + +// Empty returns true if the provided worker is empty. +func (w *Worker) Empty() bool { + // return true if every worker field is empty + if len(w.Flavor) == 0 && + len(w.Platform) == 0 { + return true + } + + // return false if any of the worker fields are provided + return false +} diff --git a/compiler/types/pipeline/worker_test.go b/compiler/types/pipeline/worker_test.go new file mode 100644 index 000000000..a026d4aa5 --- /dev/null +++ b/compiler/types/pipeline/worker_test.go @@ -0,0 +1,31 @@ +// SPDX-License-Identifier: Apache-2.0 + +package pipeline + +import "testing" + +func TestPipeline_Worker_Empty(t *testing.T) { + // setup tests + tests := []struct { + worker *Worker + want bool + }{ + { + worker: &Worker{Flavor: "foo"}, + want: false, + }, + { + worker: new(Worker), + want: true, + }, + } + + // run tests + for _, test := range tests { + got := test.worker.Empty() + + if got != test.want { + t.Errorf("Empty is %v, want %t", got, test.want) + } + } +} diff --git a/compiler/types/raw/doc.go b/compiler/types/raw/doc.go new file mode 100644 index 000000000..7f531532e --- /dev/null +++ b/compiler/types/raw/doc.go @@ -0,0 +1,8 @@ +// SPDX-License-Identifier: Apache-2.0 + +// Package raw provides the defined raw types for Vela. +// +// Usage: +// +// import "github.com/go-vela/server/compiler/types/raw" +package raw diff --git a/compiler/types/raw/map.go b/compiler/types/raw/map.go new file mode 100644 index 000000000..ed8fff427 --- /dev/null +++ b/compiler/types/raw/map.go @@ -0,0 +1,140 @@ +// SPDX-License-Identifier: Apache-2.0 + +package raw + +import ( + "database/sql/driver" + "encoding/json" + "errors" + "strings" +) + +// StringSliceMap represents an array of strings or a map of strings. +type StringSliceMap map[string]string + +// Value returns the map in JSON format. +func (s StringSliceMap) Value() (driver.Value, error) { + value, err := json.Marshal(s) + if err != nil { + return nil, err + } + + return string(value), nil +} + +// Scan decodes the JSON string into map[string]string. +func (s *StringSliceMap) Scan(value interface{}) error { + b, ok := value.(string) + if !ok { + return errors.New("type assertion to string failed") + } + + return json.Unmarshal([]byte(b), &s) +} + +// UnmarshalJSON implements the Unmarshaler interface for the StringSlice type. +func (s *StringSliceMap) UnmarshalJSON(b []byte) error { + // return nil if no input is provided + if len(b) == 0 { + return nil + } + + // target map we want to return + targetMap := map[string]string{} + + // json slice we try unmarshalling to + jsonSlice := StringSlice{} + + // attempt to unmarshal as a string slice type + err := json.Unmarshal(b, &jsonSlice) + if err == nil { + // iterate through each element in the json slice + for _, v := range jsonSlice { + // split each slice element into key/value pairs + kvPair := strings.SplitN(v, "=", 2) + + if len(kvPair) != 2 { + return errors.New("unable to unmarshal into StringSliceMap") + } + + // append each key/value pair to our target map + targetMap[kvPair[0]] = kvPair[1] + } + + // overwrite existing StringSliceMap + *s = targetMap + + return nil + } + + // json map we try unmarshalling to + jsonMap := map[string]string{} + + // attempt to unmarshal as map of strings + err = json.Unmarshal(b, &jsonMap) + if err == nil { + // iterate through each item in the json map + for k, v := range jsonMap { + // append each key/value pair to our target map + targetMap[k] = v + } + + // overwrite existing StringSliceMap + *s = targetMap + + return nil + } + + return errors.New("unable to unmarshal into StringSliceMap") +} + +// UnmarshalYAML implements the Unmarshaler interface for the StringSliceMap type. +func (s *StringSliceMap) UnmarshalYAML(unmarshal func(interface{}) error) error { + // target map we want to return + targetMap := map[string]string{} + + // yaml slice we try unmarshalling to + yamlSlice := StringSlice{} + + // attempt to unmarshal as a string slice type + err := unmarshal(&yamlSlice) + if err == nil { + // iterate through each element in the yaml slice + for _, v := range yamlSlice { + // split each slice element into key/value pairs + kvPair := strings.SplitN(v, "=", 2) + + if len(kvPair) != 2 { + return errors.New("unable to unmarshal into StringSliceMap") + } + + // append each key/value pair to our target map + targetMap[kvPair[0]] = kvPair[1] + } + + // overwrite existing StringSliceMap + *s = targetMap + + return nil + } + + // yaml map we try unmarshalling to + yamlMap := map[string]string{} + + // attempt to unmarshal as map of strings + err = unmarshal(&yamlMap) + if err == nil { + // iterate through each item in the yaml map + for k, v := range yamlMap { + // append each key/value pair to our target map + targetMap[k] = v + } + + // overwrite existing StringSliceMap + *s = targetMap + + return nil + } + + return errors.New("unable to unmarshal into StringSliceMap") +} diff --git a/compiler/types/raw/map_test.go b/compiler/types/raw/map_test.go new file mode 100644 index 000000000..0494c8ed4 --- /dev/null +++ b/compiler/types/raw/map_test.go @@ -0,0 +1,197 @@ +// SPDX-License-Identifier: Apache-2.0 + +package raw + +import ( + "database/sql/driver" + "os" + "reflect" + "testing" + + "github.com/buildkite/yaml" +) + +func TestRaw_StringSliceMap_UnmarshalJSON(t *testing.T) { + // setup tests + tests := []struct { + failure bool + file string + want *StringSliceMap + }{ + { + failure: false, + file: "testdata/string_map.json", + want: &StringSliceMap{"foo": "bar"}, + }, + { + failure: false, + file: "testdata/slice_map.json", + want: &StringSliceMap{"foo": "bar"}, + }, + { + failure: false, + file: "testdata/map.json", + want: &StringSliceMap{"foo": "bar"}, + }, + { + failure: false, + file: "", + want: new(StringSliceMap), + }, + { + failure: true, + file: "testdata/invalid.json", + want: nil, + }, + { + failure: true, + file: "testdata/invalid_2.json", + want: nil, + }, + } + + // run tests + for _, test := range tests { + var ( + err error + + b = []byte{} + got = new(StringSliceMap) + ) + + if len(test.file) > 0 { + b, err = os.ReadFile(test.file) + if err != nil { + t.Errorf("unable to read %s file: %v", test.file, err) + } + } + + err = got.UnmarshalJSON(b) + + if test.failure { + if err == nil { + t.Errorf("UnmarshalJSON should have returned err") + } + + continue + } + + if err != nil { + t.Errorf("UnmarshalJSON returned err: %v", err) + } + + if !reflect.DeepEqual(got, test.want) { + t.Errorf("UnmarshalJSON is %v, want %v", got, test.want) + } + } +} + +func TestRaw_StringSliceMap_UnmarshalYAML(t *testing.T) { + // setup tests + tests := []struct { + failure bool + file string + want *StringSliceMap + }{ + { + failure: false, + file: "testdata/string_map.yml", + want: &StringSliceMap{"foo": "bar"}, + }, + { + failure: false, + file: "testdata/slice_map.yml", + want: &StringSliceMap{"foo": "bar"}, + }, + { + failure: false, + file: "testdata/map.yml", + want: &StringSliceMap{"foo": "bar"}, + }, + { + failure: true, + file: "testdata/invalid.yml", + want: nil, + }, + { + failure: true, + file: "testdata/invalid_2.yml", + want: nil, + }, + } + + // run tests + for _, test := range tests { + got := new(StringSliceMap) + + b, err := os.ReadFile(test.file) + if err != nil { + t.Errorf("unable to read %s file: %v", test.file, err) + } + + err = yaml.Unmarshal(b, got) + + if test.failure { + if err == nil { + t.Errorf("UnmarshalYAML should have returned err") + } + + continue + } + + if err != nil { + t.Errorf("UnmarshalYAML returned err: %v", err) + } + + if !reflect.DeepEqual(got, test.want) { + t.Errorf("UnmarshalYAML is %v, want %v", got, test.want) + } + } +} + +func TestStringSliceMap_Value(t *testing.T) { + tests := []struct { + name string + s StringSliceMap + want driver.Value + wantErr bool + }{ + {"valid", StringSliceMap{"foo": "test1"}, "{\"foo\":\"test1\"}", false}, + } + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + got, err := tt.s.Value() + if (err != nil) != tt.wantErr { + t.Errorf("StringSliceMap.Value() error = %v, wantErr %v", err, tt.wantErr) + return + } + + if !reflect.DeepEqual(got, tt.want) { + t.Errorf("StringSliceMap.Value() = %v, want %v", got, tt.want) + } + }) + } +} + +func TestStringSliceMap_Scan(t *testing.T) { + type args struct { + value interface{} + } + + tests := []struct { + name string + s *StringSliceMap + args args + wantErr bool + }{ + {"valid", &StringSliceMap{"foo": "test1"}, args{value: "{\"foo\":\"test1\"}"}, false}, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + if err := tt.s.Scan(tt.args.value); (err != nil) != tt.wantErr { + t.Errorf("StringSliceMap.Scan() error = %v, wantErr %v", err, tt.wantErr) + } + }) + } +} diff --git a/compiler/types/raw/slice.go b/compiler/types/raw/slice.go new file mode 100644 index 000000000..11746363e --- /dev/null +++ b/compiler/types/raw/slice.go @@ -0,0 +1,73 @@ +// SPDX-License-Identifier: Apache-2.0 + +package raw + +import ( + "encoding/json" + "errors" +) + +// StringSlice represents a string or an array of strings. +type StringSlice []string + +// UnmarshalJSON implements the Unmarshaler interface for the StringSlice type. +func (s *StringSlice) UnmarshalJSON(b []byte) error { + // return nil if no input is provided + if len(b) == 0 { + return nil + } + + // json string we try unmarshalling to + jsonString := "" + + // attempt to unmarshal as a string type + err := json.Unmarshal(b, &jsonString) + if err == nil { + // overwrite existing StringSlice + *s = []string{jsonString} + + return nil + } + + // json slice we try unmarshalling to + jsonSlice := []string{} + + // attempt to unmarshal as a string slice type + err = json.Unmarshal(b, &jsonSlice) + if err == nil { + // overwrite existing StringSlice + *s = jsonSlice + + return nil + } + + return errors.New("unable to unmarshal into StringSlice") +} + +// UnmarshalYAML implements the Unmarshaler interface for the StringSlice type. +func (s *StringSlice) UnmarshalYAML(unmarshal func(interface{}) error) error { + // yaml string we try unmarshalling to + yamlString := "" + + // attempt to unmarshal as a string type + err := unmarshal(&yamlString) + if err == nil { + // overwrite existing StringSlice + *s = []string{yamlString} + + return nil + } + + yamlSlice := []string{} + + // attempt to unmarshal as a string slice type + err = unmarshal(&yamlSlice) + if err == nil { + // overwrite existing StringSlice + *s = yamlSlice + + return nil + } + + return errors.New("unable to unmarshal into StringSlice") +} diff --git a/compiler/types/raw/slice_test.go b/compiler/types/raw/slice_test.go new file mode 100644 index 000000000..7a32bd3d7 --- /dev/null +++ b/compiler/types/raw/slice_test.go @@ -0,0 +1,129 @@ +// SPDX-License-Identifier: Apache-2.0 + +package raw + +import ( + "os" + "reflect" + "testing" + + "github.com/buildkite/yaml" +) + +func TestRaw_StringSlice_UnmarshalJSON(t *testing.T) { + // setup tests + tests := []struct { + failure bool + file string + want *StringSlice + }{ + { + failure: false, + file: "testdata/string.json", + want: &StringSlice{"foo"}, + }, + { + failure: false, + file: "testdata/slice.json", + want: &StringSlice{"foo", "bar"}, + }, + { + failure: false, + file: "", + want: new(StringSlice), + }, + { + failure: true, + file: "testdata/invalid.json", + want: nil, + }, + } + + // run tests + for _, test := range tests { + var ( + err error + + b = []byte{} + got = new(StringSlice) + ) + + if len(test.file) > 0 { + b, err = os.ReadFile(test.file) + if err != nil { + t.Errorf("unable to read %s file: %v", test.file, err) + } + } + + err = got.UnmarshalJSON(b) + + if test.failure { + if err == nil { + t.Errorf("UnmarshalJSON should have returned err") + } + + continue + } + + if err != nil { + t.Errorf("UnmarshalJSON returned err: %v", err) + } + + if !reflect.DeepEqual(got, test.want) { + t.Errorf("UnmarshalJSON is %v, want %v", got, test.want) + } + } +} + +func TestRaw_StringSlice_UnmarshalYAML(t *testing.T) { + // setup tests + tests := []struct { + failure bool + file string + want *StringSlice + }{ + { + failure: false, + file: "testdata/string.yml", + want: &StringSlice{"foo"}, + }, + { + failure: false, + file: "testdata/slice.yml", + want: &StringSlice{"foo", "bar"}, + }, + { + failure: true, + file: "testdata/invalid.yml", + want: nil, + }, + } + + // run tests + for _, test := range tests { + got := new(StringSlice) + + b, err := os.ReadFile(test.file) + if err != nil { + t.Errorf("unable to read %s file: %v", test.file, err) + } + + err = yaml.Unmarshal(b, got) + + if test.failure { + if err == nil { + t.Errorf("UnmarshalYAML should have returned err") + } + + continue + } + + if err != nil { + t.Errorf("UnmarshalYAML returned err: %v", err) + } + + if !reflect.DeepEqual(got, test.want) { + t.Errorf("UnmarshalYAML is %v, want %v", got, test.want) + } + } +} diff --git a/compiler/types/raw/testdata/invalid.json b/compiler/types/raw/testdata/invalid.json new file mode 100644 index 000000000..f6a6c4bc8 --- /dev/null +++ b/compiler/types/raw/testdata/invalid.json @@ -0,0 +1,5 @@ +{ + "foo": { + "bar": "foobar" + } +} diff --git a/compiler/types/raw/testdata/invalid.yml b/compiler/types/raw/testdata/invalid.yml new file mode 100644 index 000000000..9d39d0b90 --- /dev/null +++ b/compiler/types/raw/testdata/invalid.yml @@ -0,0 +1,3 @@ +--- +foo: + bar: foobar diff --git a/compiler/types/raw/testdata/invalid_2.json b/compiler/types/raw/testdata/invalid_2.json new file mode 100644 index 000000000..7cb7dc52a --- /dev/null +++ b/compiler/types/raw/testdata/invalid_2.json @@ -0,0 +1 @@ +"foo: bar" diff --git a/compiler/types/raw/testdata/invalid_2.yml b/compiler/types/raw/testdata/invalid_2.yml new file mode 100644 index 000000000..af9d98dc1 --- /dev/null +++ b/compiler/types/raw/testdata/invalid_2.yml @@ -0,0 +1,2 @@ +--- +'foo: bar' diff --git a/compiler/types/raw/testdata/map.json b/compiler/types/raw/testdata/map.json new file mode 100644 index 000000000..c8c4105eb --- /dev/null +++ b/compiler/types/raw/testdata/map.json @@ -0,0 +1,3 @@ +{ + "foo": "bar" +} diff --git a/compiler/types/raw/testdata/map.yml b/compiler/types/raw/testdata/map.yml new file mode 100644 index 000000000..23809fe06 --- /dev/null +++ b/compiler/types/raw/testdata/map.yml @@ -0,0 +1,2 @@ +--- +foo: bar diff --git a/compiler/types/raw/testdata/slice.json b/compiler/types/raw/testdata/slice.json new file mode 100644 index 000000000..191f94f53 --- /dev/null +++ b/compiler/types/raw/testdata/slice.json @@ -0,0 +1 @@ +[ "foo", "bar" ] diff --git a/compiler/types/raw/testdata/slice.yml b/compiler/types/raw/testdata/slice.yml new file mode 100644 index 000000000..ffccd736f --- /dev/null +++ b/compiler/types/raw/testdata/slice.yml @@ -0,0 +1,2 @@ +--- +[ foo, bar ] diff --git a/compiler/types/raw/testdata/slice_map.json b/compiler/types/raw/testdata/slice_map.json new file mode 100644 index 000000000..215aeb76a --- /dev/null +++ b/compiler/types/raw/testdata/slice_map.json @@ -0,0 +1 @@ +[ "foo=bar" ] diff --git a/compiler/types/raw/testdata/slice_map.yml b/compiler/types/raw/testdata/slice_map.yml new file mode 100644 index 000000000..63f68f1c4 --- /dev/null +++ b/compiler/types/raw/testdata/slice_map.yml @@ -0,0 +1,2 @@ +--- +[ foo=bar ] diff --git a/compiler/types/raw/testdata/string.json b/compiler/types/raw/testdata/string.json new file mode 100644 index 000000000..810c96eee --- /dev/null +++ b/compiler/types/raw/testdata/string.json @@ -0,0 +1 @@ +"foo" diff --git a/compiler/types/raw/testdata/string.yml b/compiler/types/raw/testdata/string.yml new file mode 100644 index 000000000..f690dad50 --- /dev/null +++ b/compiler/types/raw/testdata/string.yml @@ -0,0 +1,2 @@ +--- +foo diff --git a/compiler/types/raw/testdata/string_map.json b/compiler/types/raw/testdata/string_map.json new file mode 100644 index 000000000..67dd41c47 --- /dev/null +++ b/compiler/types/raw/testdata/string_map.json @@ -0,0 +1 @@ +"foo=bar" diff --git a/compiler/types/raw/testdata/string_map.yml b/compiler/types/raw/testdata/string_map.yml new file mode 100644 index 000000000..6ac9c9add --- /dev/null +++ b/compiler/types/raw/testdata/string_map.yml @@ -0,0 +1,2 @@ +--- +foo=bar diff --git a/compiler/types/yaml/build.go b/compiler/types/yaml/build.go new file mode 100644 index 000000000..b2d918945 --- /dev/null +++ b/compiler/types/yaml/build.go @@ -0,0 +1,101 @@ +// SPDX-License-Identifier: Apache-2.0 + +package yaml + +import ( + api "github.com/go-vela/server/api/types" + "github.com/go-vela/server/compiler/types/raw" +) + +// Build is the yaml representation of a build for a pipeline. +type Build struct { + Version string `yaml:"version,omitempty" json:"version,omitempty" jsonschema:"required,minLength=1,description=Provide syntax version used to evaluate the pipeline.\nReference: https://go-vela.github.io/docs/reference/yaml/version/"` + Metadata Metadata `yaml:"metadata,omitempty" json:"metadata,omitempty" jsonschema:"description=Pass extra information.\nReference: https://go-vela.github.io/docs/reference/yaml/metadata/"` + Environment raw.StringSliceMap `yaml:"environment,omitempty" json:"environment,omitempty" jsonschema:"description=Provide global environment variables injected into the container environment.\nReference: https://go-vela.github.io/docs/reference/yaml/steps/#the-environment-key"` + Worker Worker `yaml:"worker,omitempty" json:"worker,omitempty" jsonschema:"description=Limit the pipeline to certain types of workers.\nReference: https://go-vela.github.io/docs/reference/yaml/worker/"` + Secrets SecretSlice `yaml:"secrets,omitempty" json:"secrets,omitempty" jsonschema:"description=Provide sensitive information.\nReference: https://go-vela.github.io/docs/reference/yaml/secrets/"` + Services ServiceSlice `yaml:"services,omitempty" json:"services,omitempty" jsonschema:"description=Provide detached (headless) execution instructions.\nReference: https://go-vela.github.io/docs/reference/yaml/services/"` + Stages StageSlice `yaml:"stages,omitempty" json:"stages,omitempty" jsonschema:"oneof_required=stages,description=Provide parallel execution instructions.\nReference: https://go-vela.github.io/docs/reference/yaml/stages/"` + Steps StepSlice `yaml:"steps,omitempty" json:"steps,omitempty" jsonschema:"oneof_required=steps,description=Provide sequential execution instructions.\nReference: https://go-vela.github.io/docs/reference/yaml/steps/"` + Templates TemplateSlice `yaml:"templates,omitempty" json:"templates,omitempty" jsonschema:"description=Provide the name of templates to expand.\nReference: https://go-vela.github.io/docs/reference/yaml/templates/"` +} + +// ToPipelineAPI converts the Build type to an API Pipeline type. +func (b *Build) ToPipelineAPI() *api.Pipeline { + pipeline := new(api.Pipeline) + + pipeline.SetFlavor(b.Worker.Flavor) + pipeline.SetPlatform(b.Worker.Platform) + pipeline.SetVersion(b.Version) + pipeline.SetServices(len(b.Services) > 0) + pipeline.SetStages(len(b.Stages) > 0) + pipeline.SetSteps(len(b.Steps) > 0) + pipeline.SetTemplates(len(b.Templates) > 0) + + // set default for external and internal secrets + external := false + internal := false + + // iterate through all secrets in the build + for _, secret := range b.Secrets { + // check if external and internal secrets have been found + if external && internal { + // exit the loop since both secrets have been found + break + } + + // check if the secret origin is empty + if secret.Origin.Empty() { + // origin was empty so an internal secret was found + internal = true + } else { + // origin was not empty so an external secret was found + external = true + } + } + + pipeline.SetExternalSecrets(external) + pipeline.SetInternalSecrets(internal) + + return pipeline +} + +// UnmarshalYAML implements the Unmarshaler interface for the Build type. +func (b *Build) UnmarshalYAML(unmarshal func(interface{}) error) error { + // build we try unmarshalling to + build := new(struct { + Version string + Metadata Metadata + Environment raw.StringSliceMap + Worker Worker + Secrets SecretSlice + Services ServiceSlice + Stages StageSlice + Steps StepSlice + Templates TemplateSlice + }) + + // attempt to unmarshal as a build type + err := unmarshal(build) + if err != nil { + return err + } + + // give the documented default value to metadata environment + if build.Metadata.Environment == nil { + build.Metadata.Environment = []string{"steps", "services", "secrets"} + } + + // override the values + b.Version = build.Version + b.Metadata = build.Metadata + b.Environment = build.Environment + b.Worker = build.Worker + b.Secrets = build.Secrets + b.Services = build.Services + b.Stages = build.Stages + b.Steps = build.Steps + b.Templates = build.Templates + + return nil +} diff --git a/compiler/types/yaml/build_test.go b/compiler/types/yaml/build_test.go new file mode 100644 index 000000000..f4095d48d --- /dev/null +++ b/compiler/types/yaml/build_test.go @@ -0,0 +1,686 @@ +// SPDX-License-Identifier: Apache-2.0 + +package yaml + +import ( + "os" + "reflect" + "testing" + + "github.com/buildkite/yaml" + + api "github.com/go-vela/server/api/types" + "github.com/go-vela/server/compiler/types/raw" +) + +func TestYaml_Build_ToLibrary(t *testing.T) { + build := new(api.Pipeline) + build.SetFlavor("16cpu8gb") + build.SetPlatform("gcp") + build.SetVersion("1") + build.SetExternalSecrets(true) + build.SetInternalSecrets(true) + build.SetServices(true) + build.SetStages(false) + build.SetSteps(true) + build.SetTemplates(true) + + stages := new(api.Pipeline) + stages.SetFlavor("") + stages.SetPlatform("") + stages.SetVersion("1") + stages.SetExternalSecrets(false) + stages.SetInternalSecrets(false) + stages.SetServices(false) + stages.SetStages(true) + stages.SetSteps(false) + stages.SetTemplates(false) + + steps := new(api.Pipeline) + steps.SetFlavor("") + steps.SetPlatform("") + steps.SetVersion("1") + steps.SetExternalSecrets(false) + steps.SetInternalSecrets(false) + steps.SetServices(false) + steps.SetStages(false) + steps.SetSteps(true) + steps.SetTemplates(false) + + // setup tests + tests := []struct { + name string + file string + want *api.Pipeline + }{ + { + name: "build", + file: "testdata/build.yml", + want: build, + }, + { + name: "stages", + file: "testdata/build_anchor_stage.yml", + want: stages, + }, + { + name: "steps", + file: "testdata/build_anchor_step.yml", + want: steps, + }, + } + + // run tests + for _, test := range tests { + b := new(Build) + + data, err := os.ReadFile(test.file) + if err != nil { + t.Errorf("unable to read file %s for %s: %v", test.file, test.name, err) + } + + err = yaml.Unmarshal(data, b) + if err != nil { + t.Errorf("unable to unmarshal YAML for %s: %v", test.name, err) + } + + got := b.ToPipelineAPI() + + if !reflect.DeepEqual(got, test.want) { + t.Errorf("ToPipelineLibrary for %s is %v, want %v", test.name, got, test.want) + } + } +} + +func TestYaml_Build_UnmarshalYAML(t *testing.T) { + // setup tests + tests := []struct { + file string + want *Build + }{ + { + file: "testdata/build.yml", + want: &Build{ + Version: "1", + Metadata: Metadata{ + Template: false, + Clone: nil, + Environment: []string{"steps", "services", "secrets"}, + }, + Environment: raw.StringSliceMap{ + "HELLO": "Hello, Global Message", + }, + Worker: Worker{ + Flavor: "16cpu8gb", + Platform: "gcp", + }, + Services: ServiceSlice{ + { + Ports: []string{"5432:5432"}, + Environment: raw.StringSliceMap{ + "POSTGRES_DB": "foo", + }, + Name: "postgres", + Image: "postgres:latest", + Pull: "not_present", + }, + }, + Steps: StepSlice{ + { + Commands: raw.StringSlice{"./gradlew downloadDependencies"}, + Environment: raw.StringSliceMap{ + "GRADLE_OPTS": "-Dorg.gradle.daemon=false -Dorg.gradle.workers.max=1 -Dorg.gradle.parallel=false", + "GRADLE_USER_HOME": ".gradle", + }, + Image: "openjdk:latest", + Name: "install", + Pull: "always", + Ruleset: Ruleset{ + If: Rules{Event: []string{"push", "pull_request:opened", "pull_request:synchronize", "pull_request:edited"}}, + Matcher: "filepath", + Operator: "and", + }, + Ulimits: UlimitSlice{ + { + Name: "foo", + Soft: 1024, + Hard: 2048, + }, + }, + Volumes: VolumeSlice{ + { + Source: "/foo", + Destination: "/bar", + AccessMode: "ro", + }, + }, + }, + { + Commands: raw.StringSlice{"./gradlew check"}, + Environment: raw.StringSliceMap{ + "GRADLE_OPTS": "-Dorg.gradle.daemon=false -Dorg.gradle.workers.max=1 -Dorg.gradle.parallel=false", + "GRADLE_USER_HOME": ".gradle", + }, + Name: "test", + Image: "openjdk:latest", + Pull: "always", + Ruleset: Ruleset{ + If: Rules{Event: []string{"push", "pull_request:opened", "pull_request:synchronize", "pull_request:reopened"}}, + Matcher: "filepath", + Operator: "and", + }, + Volumes: VolumeSlice{ + { + Source: "/foo", + Destination: "/bar", + AccessMode: "ro", + }, + }, + Ulimits: UlimitSlice{ + { + Name: "foo", + Soft: 1024, + Hard: 2048, + }, + }, + }, + { + Commands: raw.StringSlice{"./gradlew build"}, + Environment: raw.StringSliceMap{ + "GRADLE_OPTS": "-Dorg.gradle.daemon=false -Dorg.gradle.workers.max=1 -Dorg.gradle.parallel=false", + "GRADLE_USER_HOME": ".gradle", + }, + Name: "build", + Image: "openjdk:latest", + Pull: "always", + Ruleset: Ruleset{ + If: Rules{Event: []string{"push", "pull_request:opened", "pull_request:synchronize", "pull_request:reopened"}}, + Matcher: "filepath", + Operator: "and", + }, + Volumes: VolumeSlice{ + { + Source: "/foo", + Destination: "/bar", + AccessMode: "ro", + }, + }, + Ulimits: UlimitSlice{ + { + Name: "foo", + Soft: 1024, + Hard: 2048, + }, + }, + }, + { + Name: "docker_build", + Parameters: map[string]interface{}{ + "dry_run": true, + "registry": "index.docker.io", + "repo": "github/octocat", + "tags": []interface{}{"latest", "dev"}, + }, + Image: "plugins/docker:18.09", + Pull: "always", + Ruleset: Ruleset{ + If: Rules{Event: []string{"push", "pull_request:opened", "pull_request:synchronize", "pull_request:reopened"}}, + Matcher: "filepath", + Operator: "and", + }, + }, + { + Name: "docker_publish", + Parameters: map[string]interface{}{ + "registry": "index.docker.io", + "repo": "github/octocat", + "tags": []interface{}{"latest", "dev"}, + }, + Image: "plugins/docker:18.09", + Pull: "always", + Ruleset: Ruleset{ + If: Rules{Branch: []string{"main"}, Event: []string{"push"}}, + Matcher: "filepath", + Operator: "and", + }, + Secrets: StepSecretSlice{ + { + Source: "docker_username", + Target: "PLUGIN_USERNAME", + }, + { + Source: "docker_password", + Target: "PLUGIN_PASSWORD", + }, + }, + }, + }, + Secrets: SecretSlice{ + { + Name: "docker_username", + Key: "org/repo/docker/username", + Engine: "native", + Type: "repo", + Pull: "build_start", + }, + { + Name: "docker_password", + Key: "org/repo/docker/password", + Engine: "vault", + Type: "repo", + Pull: "build_start", + }, + { + Name: "docker_username", + Key: "org/docker/username", + Engine: "native", + Type: "org", + Pull: "build_start", + }, + { + Name: "docker_password", + Key: "org/docker/password", + Engine: "vault", + Type: "org", + Pull: "build_start", + }, + { + Name: "docker_username", + Key: "org/team/docker/username", + Engine: "native", + Type: "shared", + Pull: "build_start", + }, + { + Name: "docker_password", + Key: "org/team/docker/password", + Engine: "vault", + Type: "shared", + Pull: "build_start", + }, + { + Origin: Origin{ + Image: "target/vela-vault:latest", + Parameters: map[string]interface{}{ + "addr": "vault.example.com", + }, + Pull: "always", + Secrets: StepSecretSlice{ + { + Source: "docker_username", + Target: "DOCKER_USERNAME", + }, + { + Source: "docker_password", + Target: "DOCKER_PASSWORD", + }, + }, + }, + }, + }, + Templates: TemplateSlice{ + { + Name: "docker_publish", + Source: "github.com/go-vela/atlas/stable/docker_publish", + Type: "github", + }, + }, + }, + }, + { + file: "testdata/build_anchor_stage.yml", + want: &Build{ + Version: "1", + Metadata: Metadata{ + Template: false, + Clone: nil, + Environment: []string{"steps", "services", "secrets"}, + }, + Stages: StageSlice{ + { + Name: "dependencies", + Needs: []string{"clone"}, + Independent: false, + Steps: StepSlice{ + { + Commands: raw.StringSlice{"./gradlew downloadDependencies"}, + Environment: raw.StringSliceMap{ + "GRADLE_OPTS": "-Dorg.gradle.daemon=false -Dorg.gradle.workers.max=1 -Dorg.gradle.parallel=false", + "GRADLE_USER_HOME": ".gradle", + }, + Image: "openjdk:latest", + Name: "install", + Pull: "always", + Ruleset: Ruleset{ + If: Rules{Event: []string{"push", "pull_request:opened", "pull_request:synchronize", "pull_request:reopened"}}, + Matcher: "filepath", + Operator: "and", + }, + Volumes: VolumeSlice{ + { + Source: "/foo", + Destination: "/bar", + AccessMode: "ro", + }, + }, + Ulimits: UlimitSlice{ + { + Name: "foo", + Soft: 1024, + Hard: 2048, + }, + }, + }, + }, + }, + { + Name: "test", + Needs: []string{"dependencies", "clone"}, + Independent: false, + Steps: StepSlice{ + { + Commands: raw.StringSlice{"./gradlew check"}, + Environment: raw.StringSliceMap{ + "GRADLE_OPTS": "-Dorg.gradle.daemon=false -Dorg.gradle.workers.max=1 -Dorg.gradle.parallel=false", + "GRADLE_USER_HOME": ".gradle", + }, + Name: "test", + Image: "openjdk:latest", + Pull: "always", + Ruleset: Ruleset{ + If: Rules{Event: []string{"push", "pull_request:opened", "pull_request:synchronize", "pull_request:reopened"}}, + Matcher: "filepath", + Operator: "and", + }, + Volumes: VolumeSlice{ + { + Source: "/foo", + Destination: "/bar", + AccessMode: "ro", + }, + }, + Ulimits: UlimitSlice{ + { + Name: "foo", + Soft: 1024, + Hard: 2048, + }, + }, + }, + }, + }, + { + Name: "build", + Needs: []string{"dependencies", "clone"}, + Independent: true, + Steps: StepSlice{ + { + Commands: raw.StringSlice{"./gradlew build"}, + Environment: raw.StringSliceMap{ + "GRADLE_OPTS": "-Dorg.gradle.daemon=false -Dorg.gradle.workers.max=1 -Dorg.gradle.parallel=false", + "GRADLE_USER_HOME": ".gradle", + }, + Name: "build", + Image: "openjdk:latest", + Pull: "always", + Ruleset: Ruleset{ + If: Rules{Event: []string{"push", "pull_request:opened", "pull_request:synchronize", "pull_request:reopened"}}, + Matcher: "filepath", + Operator: "and", + }, + Volumes: VolumeSlice{ + { + Source: "/foo", + Destination: "/bar", + AccessMode: "ro", + }, + }, + Ulimits: UlimitSlice{ + { + Name: "foo", + Soft: 1024, + Hard: 2048, + }, + }, + }, + }, + }, + }, + }, + }, + { + file: "testdata/build_anchor_step.yml", + want: &Build{ + Version: "1", + Metadata: Metadata{ + Template: false, + Clone: nil, + Environment: []string{"steps", "services", "secrets"}, + }, + Steps: StepSlice{ + { + Commands: raw.StringSlice{"./gradlew downloadDependencies"}, + Environment: raw.StringSliceMap{ + "GRADLE_OPTS": "-Dorg.gradle.daemon=false -Dorg.gradle.workers.max=1 -Dorg.gradle.parallel=false", + "GRADLE_USER_HOME": ".gradle", + }, + Image: "openjdk:latest", + Name: "install", + Pull: "always", + Ruleset: Ruleset{ + If: Rules{Event: []string{"push", "pull_request:opened", "pull_request:synchronize", "pull_request:reopened"}}, + Matcher: "filepath", + Operator: "and", + }, + Volumes: VolumeSlice{ + { + Source: "/foo", + Destination: "/bar", + AccessMode: "ro", + }, + }, + Ulimits: UlimitSlice{ + { + Name: "foo", + Soft: 1024, + Hard: 2048, + }, + }, + }, + { + Commands: raw.StringSlice{"./gradlew check"}, + Environment: raw.StringSliceMap{ + "GRADLE_OPTS": "-Dorg.gradle.daemon=false -Dorg.gradle.workers.max=1 -Dorg.gradle.parallel=false", + "GRADLE_USER_HOME": ".gradle", + }, + Name: "test", + Image: "openjdk:latest", + Pull: "always", + Ruleset: Ruleset{ + If: Rules{Event: []string{"push", "pull_request:opened", "pull_request:synchronize", "pull_request:reopened"}}, + Matcher: "filepath", + Operator: "and", + }, + Volumes: VolumeSlice{ + { + Source: "/foo", + Destination: "/bar", + AccessMode: "ro", + }, + }, + Ulimits: UlimitSlice{ + { + Name: "foo", + Soft: 1024, + Hard: 2048, + }, + }, + }, + { + Commands: raw.StringSlice{"./gradlew build"}, + Environment: raw.StringSliceMap{ + "GRADLE_OPTS": "-Dorg.gradle.daemon=false -Dorg.gradle.workers.max=1 -Dorg.gradle.parallel=false", + "GRADLE_USER_HOME": ".gradle", + }, + Name: "build", + Image: "openjdk:latest", + Pull: "always", + Ruleset: Ruleset{ + If: Rules{Event: []string{"push", "pull_request:opened", "pull_request:synchronize", "pull_request:reopened"}}, + Matcher: "filepath", + Operator: "and", + }, + Volumes: VolumeSlice{ + { + Source: "/foo", + Destination: "/bar", + AccessMode: "ro", + }, + }, + Ulimits: UlimitSlice{ + { + Name: "foo", + Soft: 1024, + Hard: 2048, + }, + }, + }, + }, + }, + }, + { + file: "testdata/build_empty_env.yml", + want: &Build{ + Version: "1", + Metadata: Metadata{ + Template: false, + Clone: nil, + Environment: []string{}, + }, + Environment: raw.StringSliceMap{ + "HELLO": "Hello, Global Message", + }, + Worker: Worker{ + Flavor: "16cpu8gb", + Platform: "gcp"}, + Steps: StepSlice{ + { + Commands: raw.StringSlice{"./gradlew downloadDependencies"}, + Environment: raw.StringSliceMap{ + "GRADLE_OPTS": "-Dorg.gradle.daemon=false -Dorg.gradle.workers.max=1 -Dorg.gradle.parallel=false", + "GRADLE_USER_HOME": ".gradle", + }, + Image: "openjdk:latest", + Name: "install", + Pull: "always", + Ruleset: Ruleset{ + If: Rules{Event: []string{"push", "pull_request:opened", "pull_request:synchronize", "pull_request:reopened"}}, + Matcher: "filepath", + Operator: "and", + }, + Ulimits: UlimitSlice{ + { + Name: "foo", + Soft: 1024, + Hard: 2048, + }, + }, + Volumes: VolumeSlice{ + { + Source: "/foo", + Destination: "/bar", + AccessMode: "ro", + }, + }, + }, + }, + }, + }, + { + file: "testdata/merge_anchor.yml", + want: &Build{ + Version: "1", + Metadata: Metadata{ + Template: false, + Clone: nil, + Environment: []string{"steps", "services", "secrets"}, + }, + Services: ServiceSlice{ + { + Name: "service-a", + Ports: []string{"5432:5432"}, + Environment: raw.StringSliceMap{ + "REGION": "dev", + }, + Image: "postgres", + Pull: "not_present", + }, + }, + Steps: StepSlice{ + { + Commands: raw.StringSlice{"echo alpha"}, + Name: "alpha", + Image: "alpine:latest", + Pull: "not_present", + Ruleset: Ruleset{ + If: Rules{ + Event: []string{"push"}, + }, + Matcher: "filepath", + Operator: "and", + }, + }, + { + Commands: raw.StringSlice{"echo beta"}, + Name: "beta", + Image: "alpine:latest", + Pull: "not_present", + Ruleset: Ruleset{ + If: Rules{ + Event: []string{"push"}, + }, + Matcher: "filepath", + Operator: "and", + }, + }, + { + Commands: raw.StringSlice{"echo gamma"}, + Name: "gamma", + Image: "alpine:latest", + Pull: "not_present", + Environment: raw.StringSliceMap{ + "REGION": "dev", + }, + Ruleset: Ruleset{ + If: Rules{ + Event: []string{"push"}, + }, + Matcher: "filepath", + Operator: "and", + }, + }, + }, + }, + }, + } + + // run tests + for _, test := range tests { + got := new(Build) + + b, err := os.ReadFile(test.file) + if err != nil { + t.Errorf("Reading file for UnmarshalYAML returned err: %v", err) + } + + err = yaml.Unmarshal(b, got) + + if err != nil { + t.Errorf("UnmarshalYAML returned err: %v", err) + } + + if !reflect.DeepEqual(got, test.want) { + t.Errorf("UnmarshalYAML is %v, want %v", got, test.want) + } + } +} diff --git a/compiler/types/yaml/doc.go b/compiler/types/yaml/doc.go new file mode 100644 index 000000000..2a2e39cd3 --- /dev/null +++ b/compiler/types/yaml/doc.go @@ -0,0 +1,8 @@ +// SPDX-License-Identifier: Apache-2.0 + +// Package yaml provides the defined yaml types for Vela. +// +// Usage: +// +// import "github.com/go-vela/server/compiler/types/yaml" +package yaml diff --git a/compiler/types/yaml/metadata.go b/compiler/types/yaml/metadata.go new file mode 100644 index 000000000..83dfe5c31 --- /dev/null +++ b/compiler/types/yaml/metadata.go @@ -0,0 +1,81 @@ +// SPDX-License-Identifier: Apache-2.0 + +package yaml + +import ( + "github.com/go-vela/server/compiler/types/pipeline" +) + +type ( + // Metadata is the yaml representation of + // the metadata block for a pipeline. + Metadata struct { + Template bool `yaml:"template,omitempty" json:"template,omitempty" jsonschema:"description=Enables compiling the pipeline as a template.\nReference: https://go-vela.github.io/docs/reference/yaml/metadata/#the-template-key"` + RenderInline bool `yaml:"render_inline,omitempty" json:"render_inline,omitempty" jsonschema:"description=Enables inline compiling for the pipeline templates.\nReference: https://go-vela.github.io/docs/reference/yaml/metadata/#the-render-inline-key"` + Clone *bool `yaml:"clone,omitempty" json:"clone,omitempty" jsonschema:"default=true,description=Enables injecting the default clone process.\nReference: https://go-vela.github.io/docs/reference/yaml/metadata/#the-clone-key"` + Environment []string `yaml:"environment,omitempty" json:"environment,omitempty" jsonschema:"description=Controls which containers processes can have global env injected.\nReference: https://go-vela.github.io/docs/reference/yaml/metadata/#the-environment-key"` + AutoCancel *CancelOptions `yaml:"auto_cancel,omitempty" json:"auto_cancel,omitempty" jsonschema:"description=Enables auto canceling of queued or running pipelines that become stale due to new push.\nReference: https://go-vela.github.io/docs/reference/yaml/metadata/#the-auto-cancel-key"` + } + + // CancelOptions is the yaml representation of + // the auto_cancel block for a pipeline. + CancelOptions struct { + Running *bool `yaml:"running,omitempty" json:"running,omitempty" jsonschema:"description=Enables auto canceling of running pipelines that become stale due to new push.\nReference: https://go-vela.github.io/docs/reference/yaml/metadata/#the-auto-cancel-key"` + Pending *bool `yaml:"pending,omitempty" json:"pending,omitempty" jsonschema:"description=Enables auto canceling of queued pipelines that become stale due to new push.\nReference: https://go-vela.github.io/docs/reference/yaml/metadata/#the-auto-cancel-key"` + DefaultBranch *bool `yaml:"default_branch,omitempty" json:"default_branch,omitempty" jsonschema:"description=Enables auto canceling of queued or running pipelines that become stale due to new push to default branch.\nReference: https://go-vela.github.io/docs/reference/yaml/metadata/#the-auto-cancel-key"` + } +) + +// ToPipeline converts the Metadata type +// to a pipeline Metadata type. +func (m *Metadata) ToPipeline() *pipeline.Metadata { + var clone bool + if m.Clone == nil { + clone = true + } else { + clone = *m.Clone + } + + autoCancel := new(pipeline.CancelOptions) + + // default to false for all fields if block isn't found + if m.AutoCancel == nil { + autoCancel.Pending = false + autoCancel.Running = false + autoCancel.DefaultBranch = false + } else { + // if block is found but pending field isn't, default to true + if m.AutoCancel.Pending != nil { + autoCancel.Pending = *m.AutoCancel.Pending + } else { + autoCancel.Pending = true + } + + if m.AutoCancel.Running != nil { + autoCancel.Running = *m.AutoCancel.Running + } + + if m.AutoCancel.DefaultBranch != nil { + autoCancel.DefaultBranch = *m.AutoCancel.DefaultBranch + } + } + + return &pipeline.Metadata{ + Template: m.Template, + Clone: clone, + Environment: m.Environment, + AutoCancel: autoCancel, + } +} + +// HasEnvironment checks if the container type +// is contained within the environment list. +func (m *Metadata) HasEnvironment(container string) bool { + for _, e := range m.Environment { + if e == container { + return true + } + } + + return false +} diff --git a/compiler/types/yaml/metadata_test.go b/compiler/types/yaml/metadata_test.go new file mode 100644 index 000000000..65baa2be8 --- /dev/null +++ b/compiler/types/yaml/metadata_test.go @@ -0,0 +1,130 @@ +// SPDX-License-Identifier: Apache-2.0 + +package yaml + +import ( + "reflect" + "testing" + + "github.com/go-vela/server/compiler/types/pipeline" +) + +func TestYaml_Metadata_ToPipeline(t *testing.T) { + tBool := true + fBool := false + // setup tests + tests := []struct { + metadata *Metadata + want *pipeline.Metadata + }{ + { + metadata: &Metadata{ + Template: false, + Clone: &fBool, + Environment: []string{"steps", "services", "secrets"}, + AutoCancel: &CancelOptions{ + Pending: &tBool, + Running: &tBool, + DefaultBranch: &fBool, + }, + }, + want: &pipeline.Metadata{ + Template: false, + Clone: false, + Environment: []string{"steps", "services", "secrets"}, + AutoCancel: &pipeline.CancelOptions{ + Pending: true, + Running: true, + DefaultBranch: false, + }, + }, + }, + { + metadata: &Metadata{ + Template: false, + Clone: &tBool, + Environment: []string{"steps", "services"}, + }, + want: &pipeline.Metadata{ + Template: false, + Clone: true, + Environment: []string{"steps", "services"}, + AutoCancel: &pipeline.CancelOptions{ + Pending: false, + Running: false, + DefaultBranch: false, + }, + }, + }, + { + metadata: &Metadata{ + Template: false, + Clone: nil, + Environment: []string{"steps"}, + AutoCancel: &CancelOptions{ + Running: &tBool, + DefaultBranch: &tBool, + }, + }, + want: &pipeline.Metadata{ + Template: false, + Clone: true, + Environment: []string{"steps"}, + AutoCancel: &pipeline.CancelOptions{ + Pending: true, + Running: true, + DefaultBranch: true, + }, + }, + }, + } + + // run tests + for _, test := range tests { + got := test.metadata.ToPipeline() + + if !reflect.DeepEqual(got, test.want) { + t.Errorf("ToPipeline is %v, want %v", got, test.want) + } + } +} + +func TestYaml_Metadata_HasEnvironment(t *testing.T) { + // setup tests + tests := []struct { + metadata *Metadata + container string + want bool + }{ + { + metadata: &Metadata{ + Environment: []string{"steps", "services", "secrets"}, + }, + container: "steps", + want: true, + }, + { + metadata: &Metadata{ + Environment: []string{"services", "secrets"}, + }, + container: "services", + want: true, + }, + { + metadata: &Metadata{ + Environment: []string{"steps", "services", "secrets"}, + }, + container: "notacontainer", + want: false, + }, + } + + // run tests + for _, test := range tests { + got := test.metadata.HasEnvironment(test.container) + + if !reflect.DeepEqual(got, test.want) { + t.Errorf("ToPipeline is %v, want %v", got, test.want) + } + } +} diff --git a/compiler/types/yaml/ruleset.go b/compiler/types/yaml/ruleset.go new file mode 100644 index 000000000..f0d4078d8 --- /dev/null +++ b/compiler/types/yaml/ruleset.go @@ -0,0 +1,183 @@ +// SPDX-License-Identifier: Apache-2.0 + +package yaml + +import ( + "github.com/go-vela/server/compiler/types/pipeline" + "github.com/go-vela/server/compiler/types/raw" + "github.com/go-vela/types/constants" +) + +type ( + // Ruleset is the yaml representation of a + // ruleset block for a step in a pipeline. + Ruleset struct { + If Rules `yaml:"if,omitempty" json:"if,omitempty" jsonschema:"description=Limit execution to when all rules match.\nReference: https://go-vela.github.io/docs/reference/yaml/steps/#the-ruleset-key"` + Unless Rules `yaml:"unless,omitempty" json:"unless,omitempty" jsonschema:"description=Limit execution to when all rules do not match.\nReference: https://go-vela.github.io/docs/reference/yaml/steps/#the-ruleset-key"` + Matcher string `yaml:"matcher,omitempty" json:"matcher,omitempty" jsonschema:"enum=filepath,enum=regexp,default=filepath,description=Use the defined matching method.\nReference: coming soon"` + Operator string `yaml:"operator,omitempty" json:"operator,omitempty" jsonschema:"enum=or,enum=and,default=and,description=Whether all rule conditions must be met or just any one of them.\nReference: https://go-vela.github.io/docs/reference/yaml/steps/#the-ruleset-key"` + Continue bool `yaml:"continue,omitempty" json:"continue,omitempty" jsonschema:"default=false,description=Limits the execution of a step to continuing on any failure.\nReference: https://go-vela.github.io/docs/reference/yaml/steps/#the-ruleset-key"` + } + + // Rules is the yaml representation of the ruletypes + // from a ruleset block for a step in a pipeline. + Rules struct { + Branch []string `yaml:"branch,omitempty,flow" json:"branch,omitempty" jsonschema:"description=Limits the execution of a step to matching build branches.\nReference: https://go-vela.github.io/docs/reference/yaml/steps/#the-ruleset-key"` + Comment []string `yaml:"comment,omitempty,flow" json:"comment,omitempty" jsonschema:"description=Limits the execution of a step to matching a pull request comment.\nReference: https://go-vela.github.io/docs/reference/yaml/steps/#the-ruleset-key"` + Event []string `yaml:"event,omitempty,flow" json:"event,omitempty" jsonschema:"description=Limits the execution of a step to matching build events.\nReference: https://go-vela.github.io/docs/reference/yaml/steps/#the-ruleset-key"` + Path []string `yaml:"path,omitempty,flow" json:"path,omitempty" jsonschema:"description=Limits the execution of a step to matching files changed in a repository.\nReference: https://go-vela.github.io/docs/reference/yaml/steps/#the-ruleset-key"` + Repo []string `yaml:"repo,omitempty,flow" json:"repo,omitempty" jsonschema:"description=Limits the execution of a step to matching repos.\nReference: https://go-vela.github.io/docs/reference/yaml/steps/#the-ruleset-key"` + Status []string `yaml:"status,omitempty,flow" json:"status,omitempty" jsonschema:"enum=[failure],enum=[success],description=Limits the execution of a step to matching build statuses.\nReference: https://go-vela.github.io/docs/reference/yaml/steps/#the-ruleset-key"` + Tag []string `yaml:"tag,omitempty,flow" json:"tag,omitempty" jsonschema:"description=Limits the execution of a step to matching build tag references.\nReference: https://go-vela.github.io/docs/reference/yaml/steps/#the-ruleset-key"` + Target []string `yaml:"target,omitempty,flow" json:"target,omitempty" jsonschema:"description=Limits the execution of a step to matching build deployment targets.\nReference: https://go-vela.github.io/docs/reference/yaml/steps/#the-ruleset-key"` + Label []string `yaml:"label,omitempty,flow" json:"label,omitempty" jsonschema:"description=Limits step execution to match on pull requests labels.\nReference: https://go-vela.github.io/docs/reference/yaml/steps/#the-ruleset-key"` + Instance []string `yaml:"instance,omitempty,flow" json:"instance,omitempty" jsonschema:"description=Limits step execution to match on certain instances.\nReference: https://go-vela.github.io/docs/reference/yaml/steps/#the-ruleset-key"` + } +) + +// ToPipeline converts the Ruleset type +// to a pipeline Ruleset type. +func (r *Ruleset) ToPipeline() *pipeline.Ruleset { + return &pipeline.Ruleset{ + If: *r.If.ToPipeline(), + Unless: *r.Unless.ToPipeline(), + Matcher: r.Matcher, + Operator: r.Operator, + Continue: r.Continue, + } +} + +// UnmarshalYAML implements the Unmarshaler interface for the Ruleset type. +func (r *Ruleset) UnmarshalYAML(unmarshal func(interface{}) error) error { + // simple struct we try unmarshalling to + simple := new(Rules) + + // advanced struct we try unmarshalling to + advanced := new(struct { + If Rules + Unless Rules + Matcher string + Operator string + Continue bool + }) + + // attempt to unmarshal simple ruleset + //nolint:errcheck // intentionally not handling error + unmarshal(simple) + // attempt to unmarshal advanced ruleset + //nolint:errcheck // intentionally not handling error + unmarshal(advanced) + + // set ruleset `unless` to advanced `unless` rules + r.Unless = advanced.Unless + // set ruleset `matcher` to advanced `matcher` + r.Matcher = advanced.Matcher + // set ruleset `operator` to advanced `operator` + r.Operator = advanced.Operator + // set ruleset `continue` to advanced `continue` + r.Continue = advanced.Continue + + // implicitly add simple ruleset to the advanced ruleset for each rule type + advanced.If.Branch = append(advanced.If.Branch, simple.Branch...) + advanced.If.Comment = append(advanced.If.Comment, simple.Comment...) + advanced.If.Event = append(advanced.If.Event, simple.Event...) + advanced.If.Path = append(advanced.If.Path, simple.Path...) + advanced.If.Repo = append(advanced.If.Repo, simple.Repo...) + advanced.If.Status = append(advanced.If.Status, simple.Status...) + advanced.If.Tag = append(advanced.If.Tag, simple.Tag...) + advanced.If.Target = append(advanced.If.Target, simple.Target...) + advanced.If.Label = append(advanced.If.Label, simple.Label...) + advanced.If.Instance = append(advanced.If.Instance, simple.Instance...) + + // set ruleset `if` to advanced `if` rules + r.If = advanced.If + + // implicitly set `matcher` field if empty for ruleset + if len(r.Matcher) == 0 { + r.Matcher = constants.MatcherFilepath + } + + // implicitly set `operator` field if empty for ruleset + if len(r.Operator) == 0 { + r.Operator = constants.OperatorAnd + } + + return nil +} + +// ToPipeline converts the Rules +// type to a pipeline Rules type. +func (r *Rules) ToPipeline() *pipeline.Rules { + return &pipeline.Rules{ + Branch: r.Branch, + Comment: r.Comment, + Event: r.Event, + Path: r.Path, + Repo: r.Repo, + Status: r.Status, + Tag: r.Tag, + Target: r.Target, + Label: r.Label, + Instance: r.Instance, + } +} + +// UnmarshalYAML implements the Unmarshaler interface for the Rules type. +func (r *Rules) UnmarshalYAML(unmarshal func(interface{}) error) error { + // rules struct we try unmarshalling to + rules := new(struct { + Branch raw.StringSlice + Comment raw.StringSlice + Event raw.StringSlice + Path raw.StringSlice + Repo raw.StringSlice + Status raw.StringSlice + Tag raw.StringSlice + Target raw.StringSlice + Label raw.StringSlice + Instance raw.StringSlice + }) + + // attempt to unmarshal rules + err := unmarshal(rules) + if err == nil { + r.Branch = rules.Branch + r.Comment = rules.Comment + r.Path = rules.Path + r.Repo = rules.Repo + r.Status = rules.Status + r.Tag = rules.Tag + r.Target = rules.Target + r.Label = rules.Label + r.Instance = rules.Instance + + // account for users who use non-scoped pull_request event + events := []string{} + + for _, e := range rules.Event { + switch e { + // backwards compatibility + // pull_request = pull_request:opened + pull_request:synchronize + pull_request:reopened + // comment = comment:created + comment:edited + case constants.EventPull: + events = append(events, + constants.EventPull+":"+constants.ActionOpened, + constants.EventPull+":"+constants.ActionSynchronize, + constants.EventPull+":"+constants.ActionReopened) + case constants.EventDeploy: + events = append(events, + constants.EventDeploy+":"+constants.ActionCreated) + case constants.EventComment: + events = append(events, + constants.EventComment+":"+constants.ActionCreated, + constants.EventComment+":"+constants.ActionEdited) + default: + events = append(events, e) + } + } + + r.Event = events + } + + return err +} diff --git a/compiler/types/yaml/ruleset_test.go b/compiler/types/yaml/ruleset_test.go new file mode 100644 index 000000000..d1f559daa --- /dev/null +++ b/compiler/types/yaml/ruleset_test.go @@ -0,0 +1,274 @@ +// SPDX-License-Identifier: Apache-2.0 + +package yaml + +import ( + "os" + "reflect" + "testing" + + "github.com/buildkite/yaml" + + "github.com/go-vela/server/compiler/types/pipeline" +) + +func TestYaml_Ruleset_ToPipeline(t *testing.T) { + // setup tests + tests := []struct { + ruleset *Ruleset + want *pipeline.Ruleset + }{ + { + ruleset: &Ruleset{ + If: Rules{ + Branch: []string{"main"}, + Comment: []string{"test comment"}, + Event: []string{"push", "pull_request:labeled"}, + Path: []string{"foo.txt"}, + Repo: []string{"github/octocat"}, + Status: []string{"success"}, + Tag: []string{"v0.1.0"}, + Target: []string{"production"}, + Label: []string{"enhancement"}, + Instance: []string{"http://localhost:8080"}, + }, + Unless: Rules{ + Branch: []string{"main"}, + Comment: []string{"real comment"}, + Event: []string{"pull_request"}, + Path: []string{"bar.txt"}, + Repo: []string{"github/octocat"}, + Status: []string{"failure"}, + Tag: []string{"v0.2.0"}, + Target: []string{"production"}, + Instance: []string{"http://localhost:8080"}, + }, + Matcher: "filepath", + Operator: "and", + Continue: false, + }, + want: &pipeline.Ruleset{ + If: pipeline.Rules{ + Branch: []string{"main"}, + Comment: []string{"test comment"}, + Event: []string{"push", "pull_request:labeled"}, + Path: []string{"foo.txt"}, + Repo: []string{"github/octocat"}, + Status: []string{"success"}, + Tag: []string{"v0.1.0"}, + Target: []string{"production"}, + Label: []string{"enhancement"}, + Instance: []string{"http://localhost:8080"}, + }, + Unless: pipeline.Rules{ + Branch: []string{"main"}, + Comment: []string{"real comment"}, + Event: []string{"pull_request"}, + Path: []string{"bar.txt"}, + Repo: []string{"github/octocat"}, + Status: []string{"failure"}, + Tag: []string{"v0.2.0"}, + Target: []string{"production"}, + Instance: []string{"http://localhost:8080"}, + }, + Matcher: "filepath", + Operator: "and", + Continue: false, + }, + }, + } + + // run tests + for _, test := range tests { + got := test.ruleset.ToPipeline() + + if !reflect.DeepEqual(got, test.want) { + t.Errorf("ToPipeline is %v, want %v", got, test.want) + } + } +} + +func TestYaml_Ruleset_UnmarshalYAML(t *testing.T) { + // setup tests + tests := []struct { + file string + want *Ruleset + }{ + { + file: "testdata/ruleset_simple.yml", + want: &Ruleset{ + If: Rules{ + Branch: []string{"main"}, + Comment: []string{"test comment"}, + Event: []string{"push"}, + Path: []string{"foo.txt"}, + Repo: []string{"github/octocat"}, + Status: []string{"success"}, + Tag: []string{"v0.1.0"}, + Target: []string{"production"}, + }, + Matcher: "filepath", + Operator: "and", + Continue: true, + }, + }, + { + file: "testdata/ruleset_advanced.yml", + want: &Ruleset{ + If: Rules{ + Branch: []string{"main"}, + Event: []string{"push"}, + Tag: []string{"^refs/tags/(\\d+\\.)+\\d+$"}, + }, + Unless: Rules{ + Event: []string{"deployment:created", "pull_request:opened", "pull_request:synchronize", "pull_request:reopened", "comment:created", "comment:edited", "schedule"}, + Path: []string{"foo.txt", "/foo/bar.txt"}, + }, + Matcher: "regexp", + Operator: "or", + Continue: true, + }, + }, + { + file: "testdata/ruleset_regex.yml", + want: &Ruleset{ + If: Rules{ + Branch: []string{"main"}, + Event: []string{"tag"}, + Tag: []string{"^refs/tags/(\\d+\\.)+\\d+$"}, + }, + Operator: "and", + Matcher: "regex", + }, + }, + } + + // run tests + for _, test := range tests { + got := new(Ruleset) + + b, err := os.ReadFile(test.file) + if err != nil { + t.Errorf("unable to read file: %v", err) + } + + err = yaml.Unmarshal(b, got) + + if err != nil { + t.Errorf("UnmarshalYAML returned err: %v", err) + } + + if !reflect.DeepEqual(got, test.want) { + t.Errorf("UnmarshalYAML is %v, want %v", got, test.want) + } + } +} + +func TestYaml_Rules_ToPipeline(t *testing.T) { + // setup tests + tests := []struct { + rules *Rules + want *pipeline.Rules + }{ + { + rules: &Rules{ + Branch: []string{"main"}, + Comment: []string{"test comment"}, + Event: []string{"push", "pull_request:labeled"}, + Path: []string{"foo.txt"}, + Repo: []string{"github/octocat"}, + Status: []string{"success"}, + Tag: []string{"v0.1.0"}, + Target: []string{"production"}, + Label: []string{"enhancement"}, + }, + want: &pipeline.Rules{ + Branch: []string{"main"}, + Comment: []string{"test comment"}, + Event: []string{"push", "pull_request:labeled"}, + Path: []string{"foo.txt"}, + Repo: []string{"github/octocat"}, + Status: []string{"success"}, + Tag: []string{"v0.1.0"}, + Target: []string{"production"}, + Label: []string{"enhancement"}, + }, + }, + } + + // run tests + for _, test := range tests { + got := test.rules.ToPipeline() + + if !reflect.DeepEqual(got, test.want) { + t.Errorf("ToPipeline is %v, want %v", got, test.want) + } + } +} + +func TestYaml_Rules_UnmarshalYAML(t *testing.T) { + // setup types + var ( + b []byte + err error + ) + + // setup tests + tests := []struct { + failure bool + file string + want *Rules + }{ + { + failure: false, + file: "testdata/ruleset_simple.yml", + want: &Rules{ + Branch: []string{"main"}, + Comment: []string{"test comment"}, + Event: []string{"push"}, + Path: []string{"foo.txt"}, + Repo: []string{"github/octocat"}, + Status: []string{"success"}, + Tag: []string{"v0.1.0"}, + Target: []string{"production"}, + }, + }, + { + failure: true, + file: "", + want: nil, + }, + } + + // run tests + for _, test := range tests { + got := new(Rules) + + if len(test.file) > 0 { + b, err = os.ReadFile(test.file) + if err != nil { + t.Errorf("unable to read file: %v", err) + } + } else { + b = []byte("``") + } + + err = yaml.Unmarshal(b, got) + + if test.failure { + if err == nil { + t.Errorf("UnmarshalYAML should have returned err") + } + + continue + } + + if err != nil { + t.Errorf("UnmarshalYAML returned err: %v", err) + } + + if !reflect.DeepEqual(got, test.want) { + t.Errorf("UnmarshalYAML is %v, want %v", got, test.want) + } + } +} diff --git a/compiler/types/yaml/secret.go b/compiler/types/yaml/secret.go new file mode 100644 index 000000000..887e1a21c --- /dev/null +++ b/compiler/types/yaml/secret.go @@ -0,0 +1,271 @@ +// SPDX-License-Identifier: Apache-2.0 + +package yaml + +import ( + "errors" + "fmt" + "strings" + + "github.com/go-vela/server/compiler/types/pipeline" + "github.com/go-vela/server/compiler/types/raw" + "github.com/go-vela/types/constants" +) + +type ( + // SecretSlice is the yaml representation + // of the secrets block for a pipeline. + SecretSlice []*Secret + + // Secret is the yaml representation of a secret + // from the secrets block for a pipeline. + Secret struct { + Name string `yaml:"name,omitempty" json:"name,omitempty" jsonschema:"required,minLength=1,description=Name of secret to reference in the pipeline.\nReference: https://go-vela.github.io/docs/reference/yaml/secrets/#the-name-key"` + Key string `yaml:"key,omitempty" json:"key,omitempty" jsonschema:"minLength=1,description=Path to secret to fetch from storage backend.\nReference: https://go-vela.github.io/docs/reference/yaml/secrets/#the-key-key"` + Engine string `yaml:"engine,omitempty" json:"engine,omitempty" jsonschema:"enum=native,enum=vault,default=native,description=Name of storage backend to fetch secret from.\nReference: https://go-vela.github.io/docs/reference/yaml/secrets/#the-engine-key"` + Type string `yaml:"type,omitempty" json:"type,omitempty" jsonschema:"enum=repo,enum=org,enum=shared,default=repo,description=Type of secret to fetch from storage backend.\nReference: https://go-vela.github.io/docs/reference/yaml/secrets/#the-type-key"` + Origin Origin `yaml:"origin,omitempty" json:"origin,omitempty" jsonschema:"description=Declaration to pull secrets from non-internal secret providers.\nReference: https://go-vela.github.io/docs/reference/yaml/secrets/#the-origin-key"` + Pull string `yaml:"pull,omitempty" json:"pull,omitempty" jsonschema:"enum=step_start,enum=build_start,default=build_start,description=When to pull in secrets from storage backend.\nReference: https://go-vela.github.io/docs/reference/yaml/secrets/#the-pull-key"` + } + + // Origin is the yaml representation of a method + // for looking up secrets with a secret plugin. + Origin struct { + Environment raw.StringSliceMap `yaml:"environment,omitempty" json:"environment,omitempty" jsonschema:"description=Variables to inject into the container environment.\nReference: https://go-vela.github.io/docs/reference/yaml/steps/#the-environment-key"` + Image string `yaml:"image,omitempty" json:"image,omitempty" jsonschema:"required,minLength=1,description=Docker image to use to create the ephemeral container.\nReference: https://go-vela.github.io/docs/reference/yaml/steps/#the-image-key"` + Name string `yaml:"name,omitempty" json:"name,omitempty" jsonschema:"required,minLength=1,description=Unique name for the secret origin.\nReference: https://go-vela.github.io/docs/reference/yaml/steps/#the-name-key"` + Parameters map[string]interface{} `yaml:"parameters,omitempty" json:"parameters,omitempty" jsonschema:"description=Extra configuration variables for the secret plugin.\nReference: https://go-vela.github.io/docs/reference/yaml/steps/#the-parameters-key"` + Secrets StepSecretSlice `yaml:"secrets,omitempty" json:"secrets,omitempty" jsonschema:"description=Secrets to inject that are necessary to retrieve the secrets.\nReference: https://go-vela.github.io/docs/reference/yaml/steps/#the-secrets-key"` + Pull string `yaml:"pull,omitempty" json:"pull,omitempty" jsonschema:"enum=always,enum=not_present,enum=on_start,enum=never,default=not_present,description=Declaration to configure if and when the Docker image is pulled.\nReference: https://go-vela.github.io/docs/reference/yaml/steps/#the-pull-key"` + Ruleset Ruleset `yaml:"ruleset,omitempty" json:"ruleset,omitempty" jsonschema:"description=Conditions to limit the execution of the container.\nReference: https://go-vela.github.io/docs/reference/yaml/steps/#the-ruleset-key"` + } +) + +// ToPipeline converts the SecretSlice type +// to a pipeline SecretSlice type. +func (s *SecretSlice) ToPipeline() *pipeline.SecretSlice { + // secret slice we want to return + secretSlice := new(pipeline.SecretSlice) + + // iterate through each element in the secret slice + for _, secret := range *s { + // append the element to the pipeline secret slice + *secretSlice = append(*secretSlice, &pipeline.Secret{ + Name: secret.Name, + Key: secret.Key, + Engine: secret.Engine, + Type: secret.Type, + Origin: secret.Origin.ToPipeline(), + Pull: secret.Pull, + }) + } + + return secretSlice +} + +// UnmarshalYAML implements the Unmarshaler interface for the SecretSlice type. +func (s *SecretSlice) UnmarshalYAML(unmarshal func(interface{}) error) error { + // secret slice we try unmarshalling to + secretSlice := new([]*Secret) + + // attempt to unmarshal as a secret slice type + err := unmarshal(secretSlice) + if err != nil { + return err + } + + tmp := SecretSlice{} + + // iterate through each element in the secret slice + for _, secret := range *secretSlice { + if secret.Origin.Empty() && len(secret.Name) == 0 { + continue + } + + if secret.Origin.Empty() && len(secret.Key) == 0 { + secret.Key = secret.Name + } + + // implicitly set `engine` field if empty + if secret.Origin.Empty() && len(secret.Engine) == 0 { + secret.Engine = constants.DriverNative + } + + // implicitly set `type` field if empty + if secret.Origin.Empty() && len(secret.Type) == 0 { + secret.Type = constants.SecretRepo + } + + // implicitly set `type` field if empty + if secret.Origin.Empty() && len(secret.Pull) == 0 { + secret.Pull = constants.SecretPullBuild + } + + // implicitly set `pull` field if empty + if !secret.Origin.Empty() && len(secret.Origin.Pull) == 0 { + secret.Origin.Pull = constants.PullNotPresent + } + + // TODO: remove this in a future release + // + // handle true deprecated pull policy + // + // a `true` pull policy equates to `always` + if !secret.Origin.Empty() && strings.EqualFold(secret.Origin.Pull, "true") { + secret.Origin.Pull = constants.PullAlways + } + + // TODO: remove this in a future release + // + // handle false deprecated pull policy + // + // a `false` pull policy equates to `not_present` + if !secret.Origin.Empty() && strings.EqualFold(secret.Origin.Pull, "false") { + secret.Origin.Pull = constants.PullNotPresent + } + + tmp = append(tmp, secret) + } + + // overwrite existing SecretSlice + *s = tmp + + return nil +} + +// Empty returns true if the provided origin is empty. +func (o *Origin) Empty() bool { + // return true if the origin is nil + if o == nil { + return true + } + + // return true if every origin field is empty + if o.Environment == nil && + len(o.Image) == 0 && + len(o.Name) == 0 && + o.Parameters == nil && + len(o.Secrets) == 0 && + len(o.Pull) == 0 { + return true + } + + return false +} + +// MergeEnv takes a list of environment variables and attempts +// to set them in the secret environment. If the environment +// variable already exists in the secret, than this will +// overwrite the existing environment variable. +func (o *Origin) MergeEnv(environment map[string]string) error { + // check if the secret container is empty + if o.Empty() { + // TODO: evaluate if we should error here + // + // immediately return and do nothing + // + // treated as a no-op + return nil + } + + // check if the environment provided is empty + if environment == nil { + return fmt.Errorf("empty environment provided for secret %s", o.Name) + } + + // iterate through all environment variables provided + for key, value := range environment { + // set or update the secret environment variable + o.Environment[key] = value + } + + return nil +} + +// ToPipeline converts the Origin type +// to a pipeline Container type. +func (o *Origin) ToPipeline() *pipeline.Container { + return &pipeline.Container{ + Environment: o.Environment, + Image: o.Image, + Name: o.Name, + Pull: o.Pull, + Ruleset: *o.Ruleset.ToPipeline(), + Secrets: *o.Secrets.ToPipeline(), + } +} + +type ( + // StepSecretSlice is the yaml representation of + // the secrets block for a step in a pipeline. + StepSecretSlice []*StepSecret + + // StepSecret is the yaml representation of a secret + // from a secrets block for a step in a pipeline. + StepSecret struct { + Source string `yaml:"source,omitempty"` + Target string `yaml:"target,omitempty"` + } +) + +// ToPipeline converts the StepSecretSlice type +// to a pipeline StepSecretSlice type. +func (s *StepSecretSlice) ToPipeline() *pipeline.StepSecretSlice { + // step secret slice we want to return + secretSlice := new(pipeline.StepSecretSlice) + + // iterate through each element in the step secret slice + for _, secret := range *s { + // append the element to the pipeline step secret slice + *secretSlice = append(*secretSlice, &pipeline.StepSecret{ + Source: secret.Source, + Target: secret.Target, + }) + } + + return secretSlice +} + +// UnmarshalYAML implements the Unmarshaler interface for the StepSecretSlice type. +func (s *StepSecretSlice) UnmarshalYAML(unmarshal func(interface{}) error) error { + // string slice we try unmarshalling to + stringSlice := new(raw.StringSlice) + + // attempt to unmarshal as a string slice type + err := unmarshal(stringSlice) + if err == nil { + // iterate through each element in the string slice + for _, secret := range *stringSlice { + // append the element to the step secret slice + *s = append(*s, &StepSecret{ + Source: secret, + Target: strings.ToUpper(secret), + }) + } + + return nil + } + + // step secret slice we try unmarshalling to + secrets := new([]*StepSecret) + + // attempt to unmarshal as a step secret slice type + err = unmarshal(secrets) + if err == nil { + // check for secret source and target + for _, secret := range *secrets { + if len(secret.Source) == 0 || len(secret.Target) == 0 { + return fmt.Errorf("no secret source or target found") + } + + secret.Target = strings.ToUpper(secret.Target) + } + + // overwrite existing StepSecretSlice + *s = StepSecretSlice(*secrets) + + return nil + } + + return errors.New("failed to unmarshal StepSecretSlice") +} diff --git a/compiler/types/yaml/secret_test.go b/compiler/types/yaml/secret_test.go new file mode 100644 index 000000000..68a9ed4d6 --- /dev/null +++ b/compiler/types/yaml/secret_test.go @@ -0,0 +1,460 @@ +// SPDX-License-Identifier: Apache-2.0 + +package yaml + +import ( + "os" + "reflect" + "testing" + + "github.com/buildkite/yaml" + + "github.com/go-vela/server/compiler/types/pipeline" +) + +func TestYaml_Origin_MergeEnv(t *testing.T) { + // setup tests + tests := []struct { + origin *Origin + environment map[string]string + failure bool + }{ + { + origin: &Origin{ + Name: "vault", + Environment: map[string]string{"FOO": "bar"}, + Image: "target/vela-vault:latest", + Parameters: map[string]interface{}{ + "addr": "vault.example.com", + "auth_method": "token", + "items": []interface{}{ + map[string]string{"source": "secret/docker", "path": "docker"}, + }, + }, + Pull: "always", + Secrets: StepSecretSlice{ + { + Source: "vault_token", + Target: "vault_token", + }, + }, + }, + environment: map[string]string{"BAR": "baz"}, + failure: false, + }, + { + origin: &Origin{}, + environment: map[string]string{"BAR": "baz"}, + failure: false, + }, + { + origin: nil, + environment: map[string]string{"BAR": "baz"}, + failure: false, + }, + { + origin: &Origin{ + Name: "vault", + Environment: map[string]string{"FOO": "bar"}, + Image: "target/vela-vault:latest", + Parameters: map[string]interface{}{ + "addr": "vault.example.com", + "auth_method": "token", + "items": []interface{}{ + map[string]string{"source": "secret/docker", "path": "docker"}, + }, + }, + Pull: "always", + Secrets: StepSecretSlice{ + { + Source: "vault_token", + Target: "vault_token", + }, + }, + }, + environment: nil, + failure: true, + }, + } + + // run tests + for _, test := range tests { + err := test.origin.MergeEnv(test.environment) + + if test.failure { + if err == nil { + t.Errorf("MergeEnv should have returned err") + } + + continue + } + + if err != nil { + t.Errorf("MergeEnv returned err: %v", err) + } + } +} + +func TestYaml_SecretSlice_ToPipeline(t *testing.T) { + // setup tests + tests := []struct { + secrets *SecretSlice + want *pipeline.SecretSlice + }{ + { + secrets: &SecretSlice{ + { + Name: "docker_username", + Key: "github/octocat/docker/username", + Engine: "native", + Type: "repo", + Origin: Origin{}, + Pull: "build_start", + }, + { + Name: "docker_username", + Key: "", + Engine: "", + Type: "", + Origin: Origin{ + Name: "vault", + Environment: map[string]string{"FOO": "bar"}, + Image: "target/vela-vault:latest", + Parameters: map[string]interface{}{ + "addr": "vault.company.com", + }, + Pull: "always", + Ruleset: Ruleset{ + If: Rules{ + Event: []string{"push"}, + }, + Operator: "and", + }, + Secrets: StepSecretSlice{ + { + Source: "foo", + Target: "foo", + }, + { + Source: "foobar", + Target: "foobar", + }, + }, + }, + Pull: "build_start", + }, + }, + want: &pipeline.SecretSlice{ + { + Name: "docker_username", + Key: "github/octocat/docker/username", + Engine: "native", + Type: "repo", + Origin: &pipeline.Container{}, + Pull: "build_start", + }, + { + Name: "docker_username", + Key: "", + Engine: "", + Type: "", + Origin: &pipeline.Container{ + Name: "vault", + Environment: map[string]string{"FOO": "bar"}, + Image: "target/vela-vault:latest", + Pull: "always", + Ruleset: pipeline.Ruleset{ + If: pipeline.Rules{ + Event: []string{"push"}, + }, + Operator: "and", + }, + Secrets: pipeline.StepSecretSlice{ + { + Source: "foo", + Target: "foo", + }, + { + Source: "foobar", + Target: "foobar", + }, + }, + }, + Pull: "build_start", + }, + }, + }, + } + + // run tests + for _, test := range tests { + got := test.secrets.ToPipeline() + + if !reflect.DeepEqual(got, test.want) { + t.Errorf("ToPipeline is %v, want %v", got, test.want) + } + } +} + +func TestYaml_SecretSlice_UnmarshalYAML(t *testing.T) { + // setup tests + tests := []struct { + failure bool + file string + want *SecretSlice + }{ + { + failure: false, + file: "testdata/secret.yml", + want: &SecretSlice{ + { + Name: "foo", + Key: "bar", + Engine: "native", + Type: "repo", + Pull: "build_start", + }, + { + Name: "noKey", + Key: "noKey", + Engine: "native", + Type: "repo", + Pull: "build_start", + }, + { + Name: "noType", + Key: "bar", + Engine: "native", + Type: "repo", + Pull: "build_start", + }, + { + Name: "noEngine", + Key: "bar", + Engine: "native", + Type: "repo", + Pull: "build_start", + }, + { + Name: "noKeyEngineAndType", + Key: "noKeyEngineAndType", + Engine: "native", + Type: "repo", + Pull: "build_start", + }, + { + Name: "externalSecret", + Key: "", + Engine: "", + Type: "", + Origin: Origin{ + Environment: map[string]string{"FOO": "bar"}, + Image: "target/vela-vault:latest", + Parameters: map[string]interface{}{ + "addr": "vault.company.com", + }, + Pull: "always", + Ruleset: Ruleset{ + If: Rules{ + Event: []string{"push"}, + }, + Operator: "and", + Matcher: "filepath", + }, + Secrets: StepSecretSlice{ + { + Source: "foo", + Target: "FOO", + }, + { + Source: "foobar", + Target: "FOOBAR", + }, + }, + }, + Pull: "", + }, + { + Name: "", + Key: "", + Engine: "", + Type: "", + Origin: Origin{ + Environment: map[string]string{"FOO": "bar"}, + Image: "target/vela-vault:latest", + Parameters: map[string]interface{}{ + "addr": "vault.company.com", + }, + Pull: "always", + Ruleset: Ruleset{ + If: Rules{ + Event: []string{"push"}, + }, + Operator: "and", + Matcher: "filepath", + }, + Secrets: StepSecretSlice{ + { + Source: "foo", + Target: "FOO", + }, + { + Source: "foobar", + Target: "FOOBAR", + }, + }, + }, + Pull: "", + }, + }, + }, + { + failure: true, + file: "testdata/invalid.yml", + want: nil, + }, + } + + // run tests + for _, test := range tests { + got := new(SecretSlice) + + // run test + b, err := os.ReadFile(test.file) + if err != nil { + t.Errorf("unable to read file: %v", err) + } + + err = yaml.Unmarshal(b, got) + + if test.failure { + if err == nil { + t.Errorf("UnmarshalYAML should have returned err") + } + + continue + } + + if err != nil { + t.Errorf("UnmarshalYAML returned err: %v", err) + } + + if !reflect.DeepEqual(got, test.want) { + t.Errorf("UnmarshalYAML is %v, want %v", got, test.want) + } + } +} + +func TestYaml_StepSecretSlice_ToPipeline(t *testing.T) { + // setup tests + tests := []struct { + secrets *StepSecretSlice + want *pipeline.StepSecretSlice + }{ + { + secrets: &StepSecretSlice{ + { + Source: "docker_username", + Target: "plugin_username", + }, + }, + want: &pipeline.StepSecretSlice{ + { + Source: "docker_username", + Target: "plugin_username", + }, + }, + }, + } + + // run tests + for _, test := range tests { + got := test.secrets.ToPipeline() + + if !reflect.DeepEqual(got, test.want) { + t.Errorf("ToPipeline is %v, want %v", got, test.want) + } + } +} + +func TestYaml_StepSecretSlice_UnmarshalYAML(t *testing.T) { + // setup tests + tests := []struct { + failure bool + file string + want *StepSecretSlice + }{ + { + failure: false, + file: "testdata/step_secret_slice.yml", + want: &StepSecretSlice{ + { + Source: "foo", + Target: "BAR", + }, + { + Source: "hello", + Target: "WORLD", + }, + }, + }, + { + failure: false, + file: "testdata/step_secret_string.yml", + want: &StepSecretSlice{ + { + Source: "foo", + Target: "FOO", + }, + { + Source: "hello", + Target: "HELLO", + }, + }, + }, + { + failure: true, + file: "testdata/step_secret_slice_invalid_no_source.yml", + want: nil, + }, + { + failure: true, + file: "testdata/step_secret_slice_invalid_no_target.yml", + want: nil, + }, + { + failure: true, + file: "testdata/invalid.yml", + want: nil, + }, + } + + // run tests + for _, test := range tests { + got := new(StepSecretSlice) + + // run test + b, err := os.ReadFile(test.file) + if err != nil { + t.Errorf("unable to read file: %v", err) + } + + err = yaml.Unmarshal(b, got) + + if test.failure { + if err == nil { + t.Errorf("UnmarshalYAML should have returned err") + } + + continue + } + + if err != nil { + t.Errorf("UnmarshalYAML returned err: %v", err) + } + + if !reflect.DeepEqual(got, test.want) { + t.Errorf("UnmarshalYAML is %v, want %v", got, test.want) + } + } +} diff --git a/compiler/types/yaml/service.go b/compiler/types/yaml/service.go new file mode 100644 index 000000000..38fa87a90 --- /dev/null +++ b/compiler/types/yaml/service.go @@ -0,0 +1,135 @@ +// SPDX-License-Identifier: Apache-2.0 + +package yaml + +import ( + "fmt" + "strings" + + "github.com/go-vela/server/compiler/types/pipeline" + "github.com/go-vela/server/compiler/types/raw" + "github.com/go-vela/types/constants" +) + +type ( + // ServiceSlice is the yaml representation + // of the Services block for a pipeline. + ServiceSlice []*Service + + // Service is the yaml representation + // of a Service in a pipeline. + Service struct { + Image string `yaml:"image,omitempty" json:"image,omitempty" jsonschema:"required,minLength=1,description=Docker image used to create ephemeral container.\nReference: https://go-vela.github.io/docs/reference/yaml/services/#the-image-key"` + Name string `yaml:"name,omitempty" json:"name,omitempty" jsonschema:"required,minLength=1,description=Unique identifier for the container in the pipeline.\nReference: https://go-vela.github.io/docs/reference/yaml/services/#the-name-key"` + Entrypoint raw.StringSlice `yaml:"entrypoint,omitempty" json:"entrypoint,omitempty" jsonschema:"description=Commands to execute inside the container.\nReference: https://go-vela.github.io/docs/reference/yaml/services/#the-entrypoint-key"` + Environment raw.StringSliceMap `yaml:"environment,omitempty" json:"environment,omitempty" jsonschema:"description=Variables to inject into the container environment.\nReference: https://go-vela.github.io/docs/reference/yaml/services/#the-environment-key"` + Ports raw.StringSlice `yaml:"ports,omitempty" json:"ports,omitempty" jsonschema:"description=List of ports to map for the container in the pipeline.\nReference: https://go-vela.github.io/docs/reference/yaml/services/#the-ports-key"` + Pull string `yaml:"pull,omitempty" json:"pull,omitempty" jsonschema:"enum=always,enum=not_present,enum=on_start,enum=never,default=not_present,description=Declaration to configure if and when the Docker image is pulled.\nReference: https://go-vela.github.io/docs/reference/yaml/services/#the-pul-key"` + Ulimits UlimitSlice `yaml:"ulimits,omitempty" json:"ulimits,omitempty" jsonschema:"description=Set the user limits for the container.\nReference: https://go-vela.github.io/docs/reference/yaml/services/#the-ulimits-key"` + User string `yaml:"user,omitempty" json:"user,omitempty" jsonschema:"description=Set the user for the container.\nReference: https://go-vela.github.io/docs/reference/yaml/steps/#the-user-key"` + } +) + +// ToPipeline converts the ServiceSlice type +// to a pipeline ContainerSlice type. +func (s *ServiceSlice) ToPipeline() *pipeline.ContainerSlice { + // service slice we want to return + serviceSlice := new(pipeline.ContainerSlice) + + // iterate through each element in the service slice + for _, service := range *s { + // append the element to the pipeline container slice + *serviceSlice = append(*serviceSlice, &pipeline.Container{ + Detach: true, + Image: service.Image, + Name: service.Name, + Entrypoint: service.Entrypoint, + Environment: service.Environment, + Ports: service.Ports, + Pull: service.Pull, + Ulimits: *service.Ulimits.ToPipeline(), + User: service.User, + }) + } + + return serviceSlice +} + +// UnmarshalYAML implements the Unmarshaler interface for the ServiceSlice type. +// +//nolint:dupl // accepting duplicative code that exists in step.go as well +func (s *ServiceSlice) UnmarshalYAML(unmarshal func(interface{}) error) error { + // service slice we try unmarshalling to + serviceSlice := new([]*Service) + + // attempt to unmarshal as a service slice type + err := unmarshal(serviceSlice) + if err != nil { + return err + } + + // iterate through each element in the service slice + for _, service := range *serviceSlice { + // handle nil service to avoid panic + if service == nil { + return fmt.Errorf("invalid service with nil content found") + } + + // implicitly set `pull` field if empty + if len(service.Pull) == 0 { + service.Pull = constants.PullNotPresent + } + + // TODO: remove this in a future release + // + // handle true deprecated pull policy + // + // a `true` pull policy equates to `always` + if strings.EqualFold(service.Pull, "true") { + service.Pull = constants.PullAlways + } + + // TODO: remove this in a future release + // + // handle false deprecated pull policy + // + // a `false` pull policy equates to `not_present` + if strings.EqualFold(service.Pull, "false") { + service.Pull = constants.PullNotPresent + } + } + + // overwrite existing ServiceSlice + *s = ServiceSlice(*serviceSlice) + + return nil +} + +// MergeEnv takes a list of environment variables and attempts +// to set them in the service environment. If the environment +// variable already exists in the service, than this will +// overwrite the existing environment variable. +func (s *Service) MergeEnv(environment map[string]string) error { + // check if the service container is empty + if s == nil || s.Environment == nil { + // TODO: evaluate if we should error here + // + // immediately return and do nothing + // + // treated as a no-op + return nil + } + + // check if the environment provided is empty + if environment == nil { + return fmt.Errorf("empty environment provided for service %s", s.Name) + } + + // iterate through all environment variables provided + for key, value := range environment { + // set or update the service environment variable + s.Environment[key] = value + } + + return nil +} diff --git a/compiler/types/yaml/service_test.go b/compiler/types/yaml/service_test.go new file mode 100644 index 000000000..09f4fbc16 --- /dev/null +++ b/compiler/types/yaml/service_test.go @@ -0,0 +1,186 @@ +// SPDX-License-Identifier: Apache-2.0 + +package yaml + +import ( + "os" + "reflect" + "testing" + + "github.com/buildkite/yaml" + + "github.com/go-vela/server/compiler/types/pipeline" + "github.com/go-vela/server/compiler/types/raw" +) + +func TestYaml_ServiceSlice_ToPipeline(t *testing.T) { + // setup tests + tests := []struct { + services *ServiceSlice + want *pipeline.ContainerSlice + }{ + { + services: &ServiceSlice{ + { + Entrypoint: []string{"/usr/local/bin/docker-entrypoint.sh"}, + Environment: map[string]string{"FOO": "bar"}, + Image: "postgres:12-alpine", + Name: "postgres", + Ports: []string{"5432:5432"}, + Pull: "not_present", + }, + }, + want: &pipeline.ContainerSlice{ + { + Detach: true, + Entrypoint: []string{"/usr/local/bin/docker-entrypoint.sh"}, + Environment: map[string]string{"FOO": "bar"}, + Image: "postgres:12-alpine", + Name: "postgres", + Ports: []string{"5432:5432"}, + Pull: "not_present", + }, + }, + }, + } + + // run tests + for _, test := range tests { + got := test.services.ToPipeline() + + if !reflect.DeepEqual(got, test.want) { + t.Errorf("ToPipeline is %v, want %v", got, test.want) + } + } +} + +func TestYaml_ServiceSlice_UnmarshalYAML(t *testing.T) { + // setup tests + tests := []struct { + failure bool + file string + want *ServiceSlice + }{ + { + failure: false, + file: "testdata/service.yml", + want: &ServiceSlice{ + { + Environment: raw.StringSliceMap{ + "POSTGRES_DB": "foo", + }, + Image: "postgres:latest", + Name: "postgres", + Ports: []string{"5432:5432"}, + Pull: "not_present", + }, + { + Environment: raw.StringSliceMap{ + "MYSQL_DATABASE": "foo", + }, + Image: "mysql:latest", + Name: "mysql", + Ports: []string{"3061:3061"}, + Pull: "not_present", + }, + }, + }, + { + failure: true, + file: "testdata/invalid.yml", + want: nil, + }, + { + failure: true, + file: "testdata/service_nil.yml", + want: nil, + }, + } + + // run tests + for _, test := range tests { + got := new(ServiceSlice) + + b, err := os.ReadFile(test.file) + if err != nil { + t.Errorf("unable to read file: %v", err) + } + + err = yaml.Unmarshal(b, got) + + if test.failure { + if err == nil { + t.Errorf("UnmarshalYAML should have returned err") + } + + continue + } + + if err != nil { + t.Errorf("UnmarshalYAML returned err: %v", err) + } + + if !reflect.DeepEqual(got, test.want) { + t.Errorf("UnmarshalYAML is %v, want %v", got, test.want) + } + } +} + +func TestYaml_Service_MergeEnv(t *testing.T) { + // setup tests + tests := []struct { + service *Service + environment map[string]string + failure bool + }{ + { + service: &Service{ + Environment: map[string]string{"FOO": "bar"}, + Image: "postgres:latest", + Name: "postgres", + Ports: []string{"5432:5432"}, + Pull: "not_present", + }, + environment: map[string]string{"BAR": "baz"}, + failure: false, + }, + { + service: &Service{}, + environment: map[string]string{"BAR": "baz"}, + failure: false, + }, + { + service: nil, + environment: map[string]string{"BAR": "baz"}, + failure: false, + }, + { + service: &Service{ + Environment: map[string]string{"FOO": "bar"}, + Image: "postgres:latest", + Name: "postgres", + Ports: []string{"5432:5432"}, + Pull: "not_present", + }, + environment: nil, + failure: true, + }, + } + + // run tests + for _, test := range tests { + err := test.service.MergeEnv(test.environment) + + if test.failure { + if err == nil { + t.Errorf("MergeEnv should have returned err") + } + + continue + } + + if err != nil { + t.Errorf("MergeEnv returned err: %v", err) + } + } +} diff --git a/compiler/types/yaml/stage.go b/compiler/types/yaml/stage.go new file mode 100644 index 000000000..f62ce6353 --- /dev/null +++ b/compiler/types/yaml/stage.go @@ -0,0 +1,153 @@ +// SPDX-License-Identifier: Apache-2.0 + +package yaml + +import ( + "fmt" + + "github.com/buildkite/yaml" + + "github.com/go-vela/server/compiler/types/pipeline" + "github.com/go-vela/server/compiler/types/raw" +) + +type ( + // StageSlice is the yaml representation + // of the stages block for a pipeline. + StageSlice []*Stage + + // Stage is the yaml representation + // of a stage in a pipeline. + Stage struct { + Environment raw.StringSliceMap `yaml:"environment,omitempty" json:"environment,omitempty" jsonschema:"description=Provide environment variables injected into the container environment.\nReference: https://go-vela.github.io/docs/reference/yaml/stages/#the-environment-key"` + Name string `yaml:"name,omitempty" json:"name,omitempty" jsonschema:"minLength=1,description=Unique identifier for the stage in the pipeline.\nReference: https://go-vela.github.io/docs/reference/yaml/stages/#the-name-key"` + Needs raw.StringSlice `yaml:"needs,omitempty,flow" json:"needs,omitempty" jsonschema:"description=Stages that must complete before starting the current one.\nReference: https://go-vela.github.io/docs/reference/yaml/stages/#the-needs-key"` + Independent bool `yaml:"independent,omitempty" json:"independent,omitempty" jsonschema:"description=Stage will continue executing if other stage fails"` + Steps StepSlice `yaml:"steps,omitempty" json:"steps,omitempty" jsonschema:"required,description=Sequential execution instructions for the stage.\nReference: https://go-vela.github.io/docs/reference/yaml/stages/#the-steps-key"` + } +) + +// ToPipeline converts the StageSlice type +// to a pipeline StageSlice type. +func (s *StageSlice) ToPipeline() *pipeline.StageSlice { + // stage slice we want to return + stageSlice := new(pipeline.StageSlice) + + // iterate through each element in the stage slice + for _, stage := range *s { + // append the element to the pipeline stage slice + *stageSlice = append(*stageSlice, &pipeline.Stage{ + Done: make(chan error, 1), + Environment: stage.Environment, + Name: stage.Name, + Needs: stage.Needs, + Independent: stage.Independent, + Steps: *stage.Steps.ToPipeline(), + }) + } + + return stageSlice +} + +// UnmarshalYAML implements the Unmarshaler interface for the StageSlice type. +func (s *StageSlice) UnmarshalYAML(unmarshal func(interface{}) error) error { + // map slice we try unmarshalling to + mapSlice := new(yaml.MapSlice) + + // attempt to unmarshal as a map slice type + err := unmarshal(mapSlice) + if err != nil { + return err + } + + // iterate through each element in the map slice + for _, v := range *mapSlice { + // stage we try unmarshalling to + stage := new(Stage) + + // marshal interface value from ordered map + out, _ := yaml.Marshal(v.Value) + + // unmarshal interface value as stage + err = yaml.Unmarshal(out, stage) + if err != nil { + return err + } + + // implicitly set stage `name` if empty + if len(stage.Name) == 0 { + stage.Name = fmt.Sprintf("%v", v.Key) + } + + // implicitly set the stage `needs` + if stage.Name != "clone" && stage.Name != "init" { + // add clone if not present + stage.Needs = func(needs []string) []string { + for _, s := range needs { + if s == "clone" { + return needs + } + } + return append(needs, "clone") + }(stage.Needs) + } + // append stage to stage slice + *s = append(*s, stage) + } + return nil +} + +// MarshalYAML implements the marshaler interface for the StageSlice type. +func (s StageSlice) MarshalYAML() (interface{}, error) { + // map slice to return as marshaled output + var output yaml.MapSlice + + // loop over the input stages + for _, inputStage := range s { + // create a new stage + outputStage := new(Stage) + + // add the existing needs to the new stage + outputStage.Needs = inputStage.Needs + + // add the existing dependent tag to the new stage + outputStage.Independent = inputStage.Independent + + // add the existing steps to the new stage + outputStage.Steps = inputStage.Steps + + // append stage to MapSlice + output = append(output, yaml.MapItem{Key: inputStage.Name, Value: outputStage}) + } + + return output, nil +} + +// MergeEnv takes a list of environment variables and attempts +// to set them in the stage environment. If the environment +// variable already exists in the stage, than this will +// overwrite the existing environment variable. +func (s *Stage) MergeEnv(environment map[string]string) error { + // check if the stage is empty + if s == nil || s.Environment == nil { + // TODO: evaluate if we should error here + // + // immediately return and do nothing + // + // treated as a no-op + return nil + } + + // check if the environment provided is empty + if environment == nil { + return fmt.Errorf("empty environment provided for stage %s", s.Name) + } + + // iterate through all environment variables provided + for key, value := range environment { + // set or update the stage environment variable + s.Environment[key] = value + } + + return nil +} diff --git a/compiler/types/yaml/stage_test.go b/compiler/types/yaml/stage_test.go new file mode 100644 index 000000000..7c63253b8 --- /dev/null +++ b/compiler/types/yaml/stage_test.go @@ -0,0 +1,474 @@ +// SPDX-License-Identifier: Apache-2.0 + +package yaml + +import ( + "os" + "reflect" + "testing" + + "github.com/buildkite/yaml" + "github.com/google/go-cmp/cmp" + + "github.com/go-vela/server/compiler/types/pipeline" +) + +func TestYaml_StageSlice_ToPipeline(t *testing.T) { + // setup tests + tests := []struct { + stages *StageSlice + want *pipeline.StageSlice + }{ + { + stages: &StageSlice{ + { + Name: "echo", + Needs: []string{"clone"}, + Steps: StepSlice{ + { + Commands: []string{"echo hello"}, + Detach: false, + Entrypoint: []string{"/bin/sh"}, + Environment: map[string]string{"FOO": "bar"}, + Image: "alpine:latest", + Name: "echo", + Privileged: false, + Pull: "not_present", + Ruleset: Ruleset{ + If: Rules{ + Branch: []string{"main"}, + Comment: []string{"test comment"}, + Event: []string{"push"}, + Path: []string{"foo.txt"}, + Repo: []string{"github/octocat"}, + Status: []string{"success"}, + Tag: []string{"v0.1.0"}, + Target: []string{"production"}, + }, + Unless: Rules{ + Branch: []string{"main"}, + Comment: []string{"real comment"}, + Event: []string{"pull_request"}, + Path: []string{"bar.txt"}, + Repo: []string{"github/octocat"}, + Status: []string{"failure"}, + Tag: []string{"v0.2.0"}, + Target: []string{"production"}, + }, + Operator: "and", + Continue: false, + }, + Secrets: StepSecretSlice{ + { + Source: "docker_username", + Target: "plugin_username", + }, + }, + Ulimits: UlimitSlice{ + { + Name: "foo", + Soft: 1024, + Hard: 2048, + }, + }, + Volumes: VolumeSlice{ + { + Source: "/foo", + Destination: "/bar", + AccessMode: "ro", + }, + }, + }, + }, + }, + }, + want: &pipeline.StageSlice{ + { + Name: "echo", + Needs: []string{"clone"}, + Steps: pipeline.ContainerSlice{ + { + Commands: []string{"echo hello"}, + Detach: false, + Entrypoint: []string{"/bin/sh"}, + Environment: map[string]string{"FOO": "bar"}, + Image: "alpine:latest", + Name: "echo", + Privileged: false, + Pull: "not_present", + Ruleset: pipeline.Ruleset{ + If: pipeline.Rules{ + Branch: []string{"main"}, + Comment: []string{"test comment"}, + Event: []string{"push"}, + Path: []string{"foo.txt"}, + Repo: []string{"github/octocat"}, + Status: []string{"success"}, + Tag: []string{"v0.1.0"}, + Target: []string{"production"}, + }, + Unless: pipeline.Rules{ + Branch: []string{"main"}, + Comment: []string{"real comment"}, + Event: []string{"pull_request"}, + Path: []string{"bar.txt"}, + Repo: []string{"github/octocat"}, + Status: []string{"failure"}, + Tag: []string{"v0.2.0"}, + Target: []string{"production"}, + }, + Operator: "and", + Continue: false, + }, + Secrets: pipeline.StepSecretSlice{ + { + Source: "docker_username", + Target: "plugin_username", + }, + }, + Ulimits: pipeline.UlimitSlice{ + { + Name: "foo", + Soft: 1024, + Hard: 2048, + }, + }, + Volumes: pipeline.VolumeSlice{ + { + Source: "/foo", + Destination: "/bar", + AccessMode: "ro", + }, + }, + }, + }, + }, + }, + }, + } + + // run tests + for _, test := range tests { + got := test.stages.ToPipeline() + + // WARNING: hack to compare stages + // + // Channel values can only be compared for equality. + // Two channel values are considered equal if they + // originated from the same make call meaning they + // refer to the same channel value in memory. + for i, stage := range *got { + tmp := *test.want + + tmp[i].Done = stage.Done + } + + if !reflect.DeepEqual(got, test.want) { + t.Errorf("ToPipeline is %v, want %v", got, test.want) + } + } +} + +func TestYaml_StageSlice_UnmarshalYAML(t *testing.T) { + // setup types + var ( + b []byte + err error + ) + + // setup tests + tests := []struct { + failure bool + file string + want *StageSlice + }{ + { + failure: false, + file: "testdata/stage.yml", + want: &StageSlice{ + { + Name: "dependencies", + Needs: []string{"clone"}, + Environment: map[string]string{ + "STAGE_ENV_VAR": "stage", + }, + Independent: true, + Steps: StepSlice{ + { + Commands: []string{"./gradlew downloadDependencies"}, + Environment: map[string]string{ + "GRADLE_OPTS": "-Dorg.gradle.daemon=false -Dorg.gradle.workers.max=1 -Dorg.gradle.parallel=false", + "GRADLE_USER_HOME": ".gradle", + }, + Image: "openjdk:latest", + Name: "install", + Pull: "always", + }, + }, + }, + { + Name: "test", + Needs: []string{"dependencies", "clone"}, + Environment: map[string]string{ + "STAGE_ENV_VAR": "stage", + "SECOND_STAGE_ENV": "stage2", + }, + Independent: false, + Steps: StepSlice{ + { + Commands: []string{"./gradlew check"}, + Environment: map[string]string{ + "GRADLE_OPTS": "-Dorg.gradle.daemon=false -Dorg.gradle.workers.max=1 -Dorg.gradle.parallel=false", + "GRADLE_USER_HOME": ".gradle", + }, + Name: "test", + Image: "openjdk:latest", + Pull: "always", + }, + }, + }, + { + Name: "build", + Needs: []string{"dependencies", "clone"}, + Environment: map[string]string{ + "STAGE_ENV_VAR": "stage", + }, + Independent: false, + Steps: StepSlice{ + { + Commands: []string{"./gradlew build"}, + Environment: map[string]string{ + "GRADLE_OPTS": "-Dorg.gradle.daemon=false -Dorg.gradle.workers.max=1 -Dorg.gradle.parallel=false", + "GRADLE_USER_HOME": ".gradle", + }, + Name: "build", + Image: "openjdk:latest", + Pull: "always", + }, + }, + }, + }, + }, + { + failure: true, + file: "testdata/invalid.yml", + want: nil, + }, + { + failure: true, + file: "", + want: nil, + }, + } + + // run tests + for _, test := range tests { + got := new(StageSlice) + + if len(test.file) > 0 { + b, err = os.ReadFile(test.file) + if err != nil { + t.Errorf("unable to read file: %v", err) + } + } else { + b = []byte("- foo") + } + + err = yaml.Unmarshal(b, got) + + if test.failure { + if err == nil { + t.Errorf("UnmarshalYAML should have returned err") + } + + continue + } + + if err != nil { + t.Errorf("UnmarshalYAML returned err: %v", err) + } + + if diff := cmp.Diff(test.want, got); diff != "" { + t.Errorf("(Unmarshal mismatch: -want +got):\n%s", diff) + } + } +} + +func TestYaml_StageSlice_MarshalYAML(t *testing.T) { + // setup types + var ( + b []byte + err error + ) + + // setup tests + tests := []struct { + failure bool + file string + want *StageSlice + }{ + { + failure: false, + file: "testdata/stage.yml", + want: &StageSlice{ + { + Name: "dependencies", + Needs: []string{"clone"}, + Independent: true, + Steps: StepSlice{ + { + Commands: []string{"./gradlew downloadDependencies"}, + Environment: map[string]string{ + "GRADLE_OPTS": "-Dorg.gradle.daemon=false -Dorg.gradle.workers.max=1 -Dorg.gradle.parallel=false", + "GRADLE_USER_HOME": ".gradle", + }, + Image: "openjdk:latest", + Name: "install", + Pull: "always", + }, + }, + }, + { + Name: "test", + Needs: []string{"dependencies", "clone"}, + Independent: false, + Steps: StepSlice{ + { + Commands: []string{"./gradlew check"}, + Environment: map[string]string{ + "GRADLE_OPTS": "-Dorg.gradle.daemon=false -Dorg.gradle.workers.max=1 -Dorg.gradle.parallel=false", + "GRADLE_USER_HOME": ".gradle", + }, + Name: "test", + Image: "openjdk:latest", + Pull: "always", + }, + }, + }, + { + Name: "build", + Needs: []string{"dependencies", "clone"}, + Independent: false, + Steps: StepSlice{ + { + Commands: []string{"./gradlew build"}, + Environment: map[string]string{ + "GRADLE_OPTS": "-Dorg.gradle.daemon=false -Dorg.gradle.workers.max=1 -Dorg.gradle.parallel=false", + "GRADLE_USER_HOME": ".gradle", + }, + Name: "build", + Image: "openjdk:latest", + Pull: "always", + }, + }, + }, + }, + }, + { + failure: true, + file: "testdata/invalid.yml", + want: nil, + }, + { + failure: true, + file: "", + want: nil, + }, + } + + // run tests + for _, test := range tests { + got := new(StageSlice) + got2 := new(StageSlice) + + if len(test.file) > 0 { + b, err = os.ReadFile(test.file) + if err != nil { + t.Errorf("unable to read file: %v", err) + } + } else { + b = []byte("- foo") + } + + err = yaml.Unmarshal(b, got) + + if test.failure { + if err == nil { + t.Errorf("UnmarshalYAML should have returned err") + } + + continue + } + + if err != nil { + t.Errorf("UnmarshalYAML returned err: %v", err) + } + + out, err := yaml.Marshal(got) + if err != nil { + t.Errorf("MarshalYAML returned err: %v", err) + } + + err = yaml.Unmarshal(out, got2) + if err != nil { + t.Errorf("UnmarshalYAML returned err: %v", err) + } + + if diff := cmp.Diff(got2, test.want); diff != "" { + t.Errorf("(Marshal mismatch: -got +want):\n%s", diff) + } + } +} + +func TestYaml_Stage_MergeEnv(t *testing.T) { + // setup tests + tests := []struct { + stage *Stage + environment map[string]string + failure bool + }{ + { + stage: &Stage{ + Environment: map[string]string{"FOO": "bar"}, + Name: "testStage", + }, + environment: map[string]string{"BAR": "baz"}, + failure: false, + }, + { + stage: &Stage{}, + environment: map[string]string{"BAR": "baz"}, + failure: false, + }, + { + stage: nil, + environment: map[string]string{"BAR": "baz"}, + failure: false, + }, + { + stage: &Stage{ + Environment: map[string]string{"FOO": "bar"}, + Name: "testStage", + }, + environment: nil, + failure: true, + }, + } + + // run tests + for _, test := range tests { + err := test.stage.MergeEnv(test.environment) + + if test.failure { + if err == nil { + t.Errorf("MergeEnv should have returned err") + } + + continue + } + + if err != nil { + t.Errorf("MergeEnv returned err: %v", err) + } + } +} diff --git a/compiler/types/yaml/step.go b/compiler/types/yaml/step.go new file mode 100644 index 000000000..42dd7c320 --- /dev/null +++ b/compiler/types/yaml/step.go @@ -0,0 +1,150 @@ +// SPDX-License-Identifier: Apache-2.0 + +package yaml + +import ( + "fmt" + "strings" + + "github.com/go-vela/server/compiler/types/pipeline" + "github.com/go-vela/server/compiler/types/raw" + "github.com/go-vela/types/constants" +) + +type ( + // StepSlice is the yaml representation + // of the steps block for a pipeline. + StepSlice []*Step + + // Step is the yaml representation of a step + // from the steps block for a pipeline. + Step struct { + Ruleset Ruleset `yaml:"ruleset,omitempty" json:"ruleset,omitempty" jsonschema:"description=Conditions to limit the execution of the container.\nReference: https://go-vela.github.io/docs/reference/yaml/steps/#the-ruleset-key"` + Commands raw.StringSlice `yaml:"commands,omitempty" json:"commands,omitempty" jsonschema:"description=Execution instructions to run inside the container.\nReference: https://go-vela.github.io/docs/reference/yaml/steps/#the-commands-key"` + Entrypoint raw.StringSlice `yaml:"entrypoint,omitempty" json:"entrypoint,omitempty" jsonschema:"description=Command to execute inside the container.\nReference: https://go-vela.github.io/docs/reference/yaml/steps/#the-entrypoint-key"` + Secrets StepSecretSlice `yaml:"secrets,omitempty" json:"secrets,omitempty" jsonschema:"description=Sensitive variables injected into the container environment.\nReference: https://go-vela.github.io/docs/reference/yaml/steps/#the-secrets-key"` + Template StepTemplate `yaml:"template,omitempty" json:"template,omitempty" jsonschema:"oneof_required=template,description=Name of template to expand in the pipeline.\nReference: https://go-vela.github.io/docs/reference/yaml/steps/#the-template-key"` + Ulimits UlimitSlice `yaml:"ulimits,omitempty" json:"ulimits,omitempty" jsonschema:"description=Set the user limits for the container.\nReference: https://go-vela.github.io/docs/reference/yaml/steps/#the-ulimits-key"` + Volumes VolumeSlice `yaml:"volumes,omitempty" json:"volumes,omitempty" jsonschema:"description=Mount volumes for the container.\nReference: https://go-vela.github.io/docs/reference/yaml/steps/#the-volume-key"` + Image string `yaml:"image,omitempty" json:"image,omitempty" jsonschema:"oneof_required=image,minLength=1,description=Docker image to use to create the ephemeral container.\nReference: https://go-vela.github.io/docs/reference/yaml/steps/#the-image-key"` + Name string `yaml:"name,omitempty" json:"name,omitempty" jsonschema:"required,minLength=1,description=Unique name for the step.\nReference: https://go-vela.github.io/docs/reference/yaml/steps/#the-name-key"` + Pull string `yaml:"pull,omitempty" json:"pull,omitempty" jsonschema:"enum=always,enum=not_present,enum=on_start,enum=never,default=not_present,description=Declaration to configure if and when the Docker image is pulled.\nReference: https://go-vela.github.io/docs/reference/yaml/steps/#the-pull-key"` + Environment raw.StringSliceMap `yaml:"environment,omitempty" json:"environment,omitempty" jsonschema:"description=Provide environment variables injected into the container environment.\nReference: https://go-vela.github.io/docs/reference/yaml/steps/#the-environment-key"` + Parameters map[string]interface{} `yaml:"parameters,omitempty" json:"parameters,omitempty" jsonschema:"description=Extra configuration variables for a plugin.\nReference: https://go-vela.github.io/docs/reference/yaml/steps/#the-parameters-key"` + Detach bool `yaml:"detach,omitempty" json:"detach,omitempty" jsonschema:"description=Run the container in a detached (headless) state.\nReference: https://go-vela.github.io/docs/reference/yaml/steps/#the-detach-key"` + Privileged bool `yaml:"privileged,omitempty" json:"privileged,omitempty" jsonschema:"description=Run the container with extra privileges.\nReference: https://go-vela.github.io/docs/reference/yaml/steps/#the-privileged-key"` + User string `yaml:"user,omitempty" json:"user,omitempty" jsonschema:"description=Set the user for the container.\nReference: https://go-vela.github.io/docs/reference/yaml/steps/#the-user-key"` + ReportAs string `yaml:"report_as,omitempty" json:"report_as,omitempty" jsonschema:"description=Set the name of the step to report as.\nReference: https://go-vela.github.io/docs/reference/yaml/steps/#the-report_as-key"` + IDRequest string `yaml:"id_request,omitempty" json:"id_request,omitempty" jsonschema:"description=Request ID Request Token for the step.\nReference: https://go-vela.github.io/docs/reference/yaml/steps/#the-id_request-key"` + } +) + +// ToPipeline converts the StepSlice type +// to a pipeline ContainerSlice type. +func (s *StepSlice) ToPipeline() *pipeline.ContainerSlice { + // step slice we want to return + stepSlice := new(pipeline.ContainerSlice) + + // iterate through each element in the step slice + for _, step := range *s { + // append the element to the pipeline container slice + *stepSlice = append(*stepSlice, &pipeline.Container{ + Commands: step.Commands, + Detach: step.Detach, + Entrypoint: step.Entrypoint, + Environment: step.Environment, + Image: step.Image, + Name: step.Name, + Privileged: step.Privileged, + Pull: step.Pull, + Ruleset: *step.Ruleset.ToPipeline(), + Secrets: *step.Secrets.ToPipeline(), + Ulimits: *step.Ulimits.ToPipeline(), + Volumes: *step.Volumes.ToPipeline(), + User: step.User, + ReportAs: step.ReportAs, + IDRequest: step.IDRequest, + }) + } + + return stepSlice +} + +// UnmarshalYAML implements the Unmarshaler interface for the StepSlice type. +// +//nolint:dupl // accepting duplicative code that exits in service.go as well +func (s *StepSlice) UnmarshalYAML(unmarshal func(interface{}) error) error { + // step slice we try unmarshalling to + stepSlice := new([]*Step) + + // attempt to unmarshal as a step slice type + err := unmarshal(stepSlice) + if err != nil { + return err + } + + // iterate through each element in the step slice + for _, step := range *stepSlice { + // handle nil step to avoid panic + if step == nil { + return fmt.Errorf("invalid step with nil content found") + } + + // implicitly set `pull` field if empty + if len(step.Pull) == 0 { + step.Pull = constants.PullNotPresent + } + + // TODO: remove this in a future release + // + // handle true deprecated pull policy + // + // a `true` pull policy equates to `always` + if strings.EqualFold(step.Pull, "true") { + step.Pull = constants.PullAlways + } + + // TODO: remove this in a future release + // + // handle false deprecated pull policy + // + // a `false` pull policy equates to `not_present` + if strings.EqualFold(step.Pull, "false") { + step.Pull = constants.PullNotPresent + } + } + + // overwrite existing StepSlice + *s = StepSlice(*stepSlice) + + return nil +} + +// MergeEnv takes a list of environment variables and attempts +// to set them in the step environment. If the environment +// variable already exists in the step, than this will +// overwrite the existing environment variable. +func (s *Step) MergeEnv(environment map[string]string) error { + // check if the step container is empty + if s == nil || s.Environment == nil { + // TODO: evaluate if we should error here + // + // immediately return and do nothing + // + // treated as a no-op + return nil + } + + // check if the environment provided is empty + if environment == nil { + return fmt.Errorf("empty environment provided for step %s", s.Name) + } + + // iterate through all environment variables provided + for key, value := range environment { + // set or update the step environment variable + s.Environment[key] = value + } + + return nil +} diff --git a/compiler/types/yaml/step_test.go b/compiler/types/yaml/step_test.go new file mode 100644 index 000000000..2f5336be4 --- /dev/null +++ b/compiler/types/yaml/step_test.go @@ -0,0 +1,327 @@ +// SPDX-License-Identifier: Apache-2.0 + +package yaml + +import ( + "os" + "reflect" + "testing" + + "github.com/buildkite/yaml" + + "github.com/go-vela/server/compiler/types/pipeline" + "github.com/go-vela/server/compiler/types/raw" +) + +func TestYaml_StepSlice_ToPipeline(t *testing.T) { + // setup tests + tests := []struct { + steps *StepSlice + want *pipeline.ContainerSlice + }{ + { + steps: &StepSlice{ + { + Commands: []string{"echo hello"}, + Detach: false, + Entrypoint: []string{"/bin/sh"}, + Environment: map[string]string{"FOO": "bar"}, + Image: "alpine:latest", + Name: "echo", + Privileged: false, + Pull: "not_present", + ReportAs: "my-step", + IDRequest: "yes", + Ruleset: Ruleset{ + If: Rules{ + Branch: []string{"main"}, + Comment: []string{"test comment"}, + Event: []string{"push"}, + Path: []string{"foo.txt"}, + Repo: []string{"github/octocat"}, + Status: []string{"success"}, + Tag: []string{"v0.1.0"}, + Target: []string{"production"}, + }, + Unless: Rules{ + Branch: []string{"main"}, + Comment: []string{"real comment"}, + Event: []string{"pull_request"}, + Path: []string{"bar.txt"}, + Repo: []string{"github/octocat"}, + Status: []string{"failure"}, + Tag: []string{"v0.2.0"}, + Target: []string{"production"}, + }, + Operator: "and", + Continue: false, + }, + Secrets: StepSecretSlice{ + { + Source: "docker_username", + Target: "plugin_username", + }, + }, + Ulimits: UlimitSlice{ + { + Name: "foo", + Soft: 1024, + Hard: 2048, + }, + }, + Volumes: VolumeSlice{ + { + Source: "/foo", + Destination: "/bar", + AccessMode: "ro", + }, + }, + }, + }, + want: &pipeline.ContainerSlice{ + { + Commands: []string{"echo hello"}, + Detach: false, + Entrypoint: []string{"/bin/sh"}, + Environment: map[string]string{"FOO": "bar"}, + Image: "alpine:latest", + Name: "echo", + Privileged: false, + Pull: "not_present", + ReportAs: "my-step", + IDRequest: "yes", + Ruleset: pipeline.Ruleset{ + If: pipeline.Rules{ + Branch: []string{"main"}, + Comment: []string{"test comment"}, + Event: []string{"push"}, + Path: []string{"foo.txt"}, + Repo: []string{"github/octocat"}, + Status: []string{"success"}, + Tag: []string{"v0.1.0"}, + Target: []string{"production"}, + }, + Unless: pipeline.Rules{ + Branch: []string{"main"}, + Comment: []string{"real comment"}, + Event: []string{"pull_request"}, + Path: []string{"bar.txt"}, + Repo: []string{"github/octocat"}, + Status: []string{"failure"}, + Tag: []string{"v0.2.0"}, + Target: []string{"production"}, + }, + Operator: "and", + Continue: false, + }, + Secrets: pipeline.StepSecretSlice{ + { + Source: "docker_username", + Target: "plugin_username", + }, + }, + Ulimits: pipeline.UlimitSlice{ + { + Name: "foo", + Soft: 1024, + Hard: 2048, + }, + }, + Volumes: pipeline.VolumeSlice{ + { + Source: "/foo", + Destination: "/bar", + AccessMode: "ro", + }, + }, + }, + }, + }, + } + + // run tests + for _, test := range tests { + got := test.steps.ToPipeline() + + if !reflect.DeepEqual(got, test.want) { + t.Errorf("ToPipeline is %v, want %v", got, test.want) + } + } +} + +func TestYaml_StepSlice_UnmarshalYAML(t *testing.T) { + // setup tests + tests := []struct { + failure bool + file string + want *StepSlice + }{ + { + failure: false, + file: "testdata/step.yml", + want: &StepSlice{ + { + Commands: raw.StringSlice{"./gradlew downloadDependencies"}, + Environment: raw.StringSliceMap{ + "GRADLE_OPTS": "-Dorg.gradle.daemon=false -Dorg.gradle.workers.max=1 -Dorg.gradle.parallel=false", + "GRADLE_USER_HOME": ".gradle", + }, + Name: "install", + Image: "openjdk:latest", + Pull: "always", + }, + { + Commands: raw.StringSlice{"./gradlew check"}, + Environment: raw.StringSliceMap{ + "GRADLE_OPTS": "-Dorg.gradle.daemon=false -Dorg.gradle.workers.max=1 -Dorg.gradle.parallel=false", + "GRADLE_USER_HOME": ".gradle", + }, + Name: "test", + Image: "openjdk:latest", + Pull: "always", + }, + { + Commands: raw.StringSlice{"./gradlew build"}, + Environment: raw.StringSliceMap{ + "GRADLE_OPTS": "-Dorg.gradle.daemon=false -Dorg.gradle.workers.max=1 -Dorg.gradle.parallel=false", + "GRADLE_USER_HOME": ".gradle", + }, + Name: "build", + Image: "openjdk:latest", + Pull: "always", + }, + { + Name: "docker_build", + Image: "plugins/docker:18.09", + Pull: "always", + ReportAs: "docker", + Parameters: map[string]interface{}{ + "registry": "index.docker.io", + "repo": "github/octocat", + "tags": []interface{}{"latest", "dev"}, + }, + }, + { + Name: "templated_publish", + Pull: "not_present", + Template: StepTemplate{ + Name: "docker_publish", + Variables: map[string]interface{}{ + "registry": "index.docker.io", + "repo": "github/octocat", + "tags": []interface{}{"latest", "dev"}, + }, + }, + }, + }, + }, + { + failure: true, + file: "testdata/invalid.yml", + want: nil, + }, + { + failure: true, + file: "testdata/step_malformed.yml", + want: nil, + }, + { + failure: true, + file: "testdata/step_nil.yml", + want: nil, + }, + } + + // run tests + for _, test := range tests { + got := new(StepSlice) + + b, err := os.ReadFile(test.file) + if err != nil { + t.Errorf("unable to read file: %v", err) + } + + err = yaml.Unmarshal(b, got) + + if test.failure { + if err == nil { + t.Errorf("UnmarshalYAML should have returned err") + } + + continue + } + + if err != nil { + t.Errorf("UnmarshalYAML returned err: %v", err) + } + + if !reflect.DeepEqual(got, test.want) { + t.Errorf("UnmarshalYAML is %v, want %v", got, test.want) + } + } +} + +func TestYaml_Step_MergeEnv(t *testing.T) { + // setup tests + tests := []struct { + step *Step + environment map[string]string + failure bool + }{ + { + step: &Step{ + Commands: []string{"echo hello"}, + Detach: false, + Entrypoint: []string{"/bin/sh"}, + Environment: map[string]string{"FOO": "bar"}, + Image: "alpine:latest", + Name: "echo", + Privileged: false, + Pull: "not_present", + }, + environment: map[string]string{"BAR": "baz"}, + failure: false, + }, + { + step: &Step{}, + environment: map[string]string{"BAR": "baz"}, + failure: false, + }, + { + step: nil, + environment: map[string]string{"BAR": "baz"}, + failure: false, + }, + { + step: &Step{ + Commands: []string{"echo hello"}, + Detach: false, + Entrypoint: []string{"/bin/sh"}, + Environment: map[string]string{"FOO": "bar"}, + Image: "alpine:latest", + Name: "echo", + Privileged: false, + Pull: "not_present", + }, + environment: nil, + failure: true, + }, + } + + // run tests + for _, test := range tests { + err := test.step.MergeEnv(test.environment) + + if test.failure { + if err == nil { + t.Errorf("MergeEnv should have returned err") + } + + continue + } + + if err != nil { + t.Errorf("MergeEnv returned err: %v", err) + } + } +} diff --git a/compiler/types/yaml/template.go b/compiler/types/yaml/template.go new file mode 100644 index 000000000..22dbf2753 --- /dev/null +++ b/compiler/types/yaml/template.go @@ -0,0 +1,86 @@ +// SPDX-License-Identifier: Apache-2.0 + +package yaml + +import ( + "github.com/go-vela/types/library" +) + +type ( + // TemplateSlice is the yaml representation + // of the templates block for a pipeline. + TemplateSlice []*Template + + // Template is the yaml representation of a template + // from the templates block for a pipeline. + Template struct { + Name string `yaml:"name,omitempty" json:"name,omitempty" jsonschema:"required,minLength=1,description=Unique identifier for the template.\nReference: https://go-vela.github.io/docs/reference/yaml/templates/#the-name-key"` + Source string `yaml:"source,omitempty" json:"source,omitempty" jsonschema:"required,minLength=1,description=Path to template in remote system.\nReference: https://go-vela.github.io/docs/reference/yaml/templates/#the-source-key"` + Format string `yaml:"format,omitempty" json:"format,omitempty" jsonschema:"enum=starlark,enum=golang,enum=go,default=go,minLength=1,description=language used within the template file \nReference: https://go-vela.github.io/docs/reference/yaml/templates/#the-format-key"` + Type string `yaml:"type,omitempty" json:"type,omitempty" jsonschema:"minLength=1,example=github,description=Type of template provided from the remote system.\nReference: https://go-vela.github.io/docs/reference/yaml/templates/#the-type-key"` + Variables map[string]interface{} `yaml:"vars,omitempty" json:"vars,omitempty" jsonschema:"description=Variables injected into the template.\nReference: https://go-vela.github.io/docs/reference/yaml/templates/#the-variables-key"` + } + + // StepTemplate is the yaml representation of the + // template block for a step in a pipeline. + StepTemplate struct { + Name string `yaml:"name,omitempty" json:"name,omitempty" jsonschema:"required,minLength=1,description=Unique identifier for the template.\nReference: https://go-vela.github.io/docs/reference/yaml/steps/#the-template-key"` + Variables map[string]interface{} `yaml:"vars,omitempty" json:"vars,omitempty" jsonschema:"description=Variables injected into the template.\nReference: https://go-vela.github.io/docs/reference/yaml/steps/#the-template-key"` + } +) + +// UnmarshalYAML implements the Unmarshaler interface for the TemplateSlice type. +func (t *TemplateSlice) UnmarshalYAML(unmarshal func(interface{}) error) error { + // template slice we try unmarshalling to + templateSlice := new([]*Template) + + // attempt to unmarshal as a template slice type + err := unmarshal(templateSlice) + if err != nil { + return err + } + + // overwrite existing TemplateSlice + *t = TemplateSlice(*templateSlice) + + return nil +} + +// ToLibrary converts the Template type +// to a library Template type. +func (t *Template) ToLibrary() *library.Template { + template := new(library.Template) + + template.SetName(t.Name) + template.SetSource(t.Source) + template.SetType(t.Type) + + return template +} + +// TemplateFromLibrary converts the library Template type +// to a yaml Template type. +func TemplateFromLibrary(t *library.Template) *Template { + template := &Template{ + Name: t.GetName(), + Source: t.GetSource(), + Type: t.GetType(), + } + + return template +} + +// Map helper function that creates a map of templates from a slice of templates. +func (t *TemplateSlice) Map() map[string]*Template { + m := make(map[string]*Template) + + if t == nil { + return m + } + + for _, tmpl := range *t { + m[tmpl.Name] = tmpl + } + + return m +} diff --git a/compiler/types/yaml/template_test.go b/compiler/types/yaml/template_test.go new file mode 100644 index 000000000..e7f760cb7 --- /dev/null +++ b/compiler/types/yaml/template_test.go @@ -0,0 +1,121 @@ +// SPDX-License-Identifier: Apache-2.0 + +package yaml + +import ( + "os" + "reflect" + "testing" + + "github.com/buildkite/yaml" + + "github.com/go-vela/types/library" +) + +func TestBuild_TemplateSlice_UnmarshalYAML(t *testing.T) { + // setup tests + tests := []struct { + failure bool + file string + want *TemplateSlice + }{ + { + failure: false, + file: "testdata/template.yml", + want: &TemplateSlice{ + { + Name: "docker_build", + Source: "github.com/go-vela/atlas/stable/docker_create", + Type: "github", + }, + { + Name: "docker_build", + Source: "github.com/go-vela/atlas/stable/docker_build", + Format: "go", + Type: "github", + }, + { + Name: "docker_publish", + Source: "github.com/go-vela/atlas/stable/docker_publish", + Format: "starlark", + Type: "github", + }, + }, + }, + { + failure: true, + file: "testdata/invalid.yml", + want: nil, + }, + } + + // run tests + for _, test := range tests { + got := new(TemplateSlice) + + b, err := os.ReadFile(test.file) + if err != nil { + t.Errorf("unable to read file: %v", err) + } + + err = yaml.Unmarshal(b, got) + + if test.failure { + if err == nil { + t.Errorf("UnmarshalYAML should have returned err") + } + + continue + } + + if err != nil { + t.Errorf("UnmarshalYAML returned err: %v", err) + } + + if !reflect.DeepEqual(got, test.want) { + t.Errorf("UnmarshalYAML is %v, want %v", got, test.want) + } + } +} + +func TestYAML_Template_ToLibrary(t *testing.T) { + // setup types + want := new(library.Template) + want.SetName("docker_build") + want.SetSource("github.com/go-vela/atlas/stable/docker_build") + want.SetType("github") + + tmpl := &Template{ + Name: "docker_build", + Source: "github.com/go-vela/atlas/stable/docker_build", + Type: "github", + } + + // run test + got := tmpl.ToLibrary() + + if !reflect.DeepEqual(got, want) { + t.Errorf("ToLibrary is %v, want %v", got, want) + } +} + +func TestYAML_TemplateFromLibrary(t *testing.T) { + // setup types + want := &Template{ + Name: "docker_build", + Source: "github.com/go-vela/atlas/stable/docker_build", + Type: "github", + } + + tmpl := new(library.Template) + tmpl.SetName("docker_build") + tmpl.SetSource("github.com/go-vela/atlas/stable/docker_build") + tmpl.SetType("github") + + // run test + got := TemplateFromLibrary(tmpl) + + if !reflect.DeepEqual(got, want) { + t.Errorf("TemplateFromLibrary is %v, want %v", got, want) + } +} diff --git a/compiler/types/yaml/testdata/build.yml b/compiler/types/yaml/testdata/build.yml new file mode 100644 index 000000000..e1a7fbc9f --- /dev/null +++ b/compiler/types/yaml/testdata/build.yml @@ -0,0 +1,144 @@ +--- +version: "1" + +environment: + HELLO: "Hello, Global Message" + +templates: + - name: docker_publish + source: github.com/go-vela/atlas/stable/docker_publish + type: github + +worker: + flavor: 16cpu8gb + platform: gcp + +services: + - name: postgres + image: postgres:latest + environment: + POSTGRES_DB: foo + ports: + - "5432:5432" + +steps: + - name: install + commands: + - ./gradlew downloadDependencies + environment: + GRADLE_OPTS: -Dorg.gradle.daemon=false -Dorg.gradle.workers.max=1 -Dorg.gradle.parallel=false + GRADLE_USER_HOME: .gradle + image: openjdk:latest + pull: true + ruleset: + event: [ push, pull_request:opened, pull_request:synchronize, pull_request:edited ] + volumes: [ /foo:/bar:ro ] + ulimits: [ foo=1024:2048 ] + + - name: test + commands: + - ./gradlew check + environment: + GRADLE_OPTS: -Dorg.gradle.daemon=false -Dorg.gradle.workers.max=1 -Dorg.gradle.parallel=false + GRADLE_USER_HOME: .gradle + image: openjdk:latest + pull: true + ruleset: + event: [ push, pull_request ] + volumes: [ /foo:/bar:ro ] + ulimits: [ foo=1024:2048 ] + + - name: build + commands: + - ./gradlew build + environment: + - GRADLE_OPTS=-Dorg.gradle.daemon=false -Dorg.gradle.workers.max=1 -Dorg.gradle.parallel=false + - GRADLE_USER_HOME=.gradle + image: openjdk:latest + pull: true + ruleset: + event: [ push, pull_request ] + volumes: + - source: /foo + destination: /bar + access_mode: ro + ulimits: + - name: foo + soft: 1024 + hard: 2048 + + - name: docker_build + image: plugins/docker:18.09 + parameters: + dry_run: true + registry: index.docker.io + repo: github/octocat + tags: + - latest + - dev + pull: true + ruleset: + if: + event: [ push, pull_request ] + operator: and + + - name: docker_publish + image: plugins/docker:18.09 + parameters: + registry: index.docker.io + repo: github/octocat + tags: + - latest + - dev + pull: true + ruleset: + if: + branch: main + event: push + operator: and + secrets: + - source: docker_username + target: plugin_username + - source: docker_password + target: plugin_password + +secrets: + # Repo secrets + - name: docker_username + key: org/repo/docker/username + engine: native + type: repo + + - name: docker_password + key: org/repo/docker/password + engine: vault + type: repo + + # Org secrets + - name: docker_username + key: org/docker/username + engine: native + type: org + + - name: docker_password + key: org/docker/password + engine: vault + type: org + + # Shared secrets + - name: docker_username + key: org/team/docker/username + engine: native + type: shared + + - name: docker_password + key: org/team/docker/password + engine: vault + type: shared + + - origin: + image: target/vela-vault:latest + pull: always + parameters: + addr: vault.example.com + secrets: [ docker_username, docker_password ] diff --git a/compiler/types/yaml/testdata/build/validate/bad_pipeline0.yml b/compiler/types/yaml/testdata/build/validate/bad_pipeline0.yml new file mode 100644 index 000000000..8cbe12806 --- /dev/null +++ b/compiler/types/yaml/testdata/build/validate/bad_pipeline0.yml @@ -0,0 +1 @@ +version: 1 \ No newline at end of file diff --git a/compiler/types/yaml/testdata/build/validate/bad_pipeline1.yml b/compiler/types/yaml/testdata/build/validate/bad_pipeline1.yml new file mode 100644 index 000000000..b1db03665 --- /dev/null +++ b/compiler/types/yaml/testdata/build/validate/bad_pipeline1.yml @@ -0,0 +1,3 @@ +version: "1" +steps: +stages: \ No newline at end of file diff --git a/compiler/types/yaml/testdata/build/validate/bad_version.yml b/compiler/types/yaml/testdata/build/validate/bad_version.yml new file mode 100644 index 000000000..e2489f424 --- /dev/null +++ b/compiler/types/yaml/testdata/build/validate/bad_version.yml @@ -0,0 +1,2 @@ +--- +steps: \ No newline at end of file diff --git a/compiler/types/yaml/testdata/build/validate/step.yml b/compiler/types/yaml/testdata/build/validate/step.yml new file mode 100644 index 000000000..a70942591 --- /dev/null +++ b/compiler/types/yaml/testdata/build/validate/step.yml @@ -0,0 +1,47 @@ +--- +version: 1 +steps: + - name: install + commands: + - ./gradlew downloadDependencies + environment: + GRADLE_OPTS: -Dorg.gradle.daemon=false -Dorg.gradle.workers.max=1 -Dorg.gradle.parallel=false + GRADLE_USER_HOME: .gradle + image: openjdk:latest + pull: true + + - name: test + commands: + - ./gradlew check + environment: + GRADLE_OPTS: -Dorg.gradle.daemon=false -Dorg.gradle.workers.max=1 -Dorg.gradle.parallel=false + GRADLE_USER_HOME: .gradle + image: openjdk:latest + pull: true + + - name: build + commands: + - ./gradlew build + environment: + - GRADLE_OPTS=-Dorg.gradle.daemon=false -Dorg.gradle.workers.max=1 -Dorg.gradle.parallel=false + - GRADLE_USER_HOME=.gradle + image: openjdk:latest + pull: true + + - name: docker_build + image: plugins/docker:18.09 + parameters: + registry: index.docker.io + repo: github/octocat + tags: + - latest + - dev + pull: true + + - name: templated_publish + template: + name: docker_publish + vars: + registry: index.docker.io + repo: github/octocat + tags: [ latest, dev ] diff --git a/compiler/types/yaml/testdata/build_anchor_stage.yml b/compiler/types/yaml/testdata/build_anchor_stage.yml new file mode 100644 index 000000000..2fc87932b --- /dev/null +++ b/compiler/types/yaml/testdata/build_anchor_stage.yml @@ -0,0 +1,57 @@ +--- +version: "1" + +metadata: + template: false + +stage-anchor: &stage-anchor + environment: + GRADLE_OPTS: -Dorg.gradle.daemon=false -Dorg.gradle.workers.max=1 -Dorg.gradle.parallel=false + GRADLE_USER_HOME: .gradle + image: openjdk:latest + +stages: + dependencies: + steps: + - name: install + commands: + - ./gradlew downloadDependencies + <<: *stage-anchor + pull: true + ruleset: + event: [ push, pull_request ] + volumes: [ /foo:/bar:ro ] + ulimits: [ foo=1024:2048 ] + + test: + needs: [ dependencies ] + steps: + - name: test + commands: + - ./gradlew check + <<: *stage-anchor + pull: true + ruleset: + event: [ push, pull_request ] + volumes: [ /foo:/bar:ro ] + ulimits: [ foo=1024:2048 ] + + build: + needs: [ dependencies ] + independent: true + steps: + - name: build + commands: + - ./gradlew build + <<: *stage-anchor + pull: true + ruleset: + event: [ push, pull_request ] + volumes: + - source: /foo + destination: /bar + access_mode: ro + ulimits: + - name: foo + soft: 1024 + hard: 2048 diff --git a/compiler/types/yaml/testdata/build_anchor_step.yml b/compiler/types/yaml/testdata/build_anchor_step.yml new file mode 100644 index 000000000..ffc7abf50 --- /dev/null +++ b/compiler/types/yaml/testdata/build_anchor_step.yml @@ -0,0 +1,48 @@ +--- +version: "1" + +metadata: + template: false + +step-anchor: &step-anchor + environment: + GRADLE_OPTS: -Dorg.gradle.daemon=false -Dorg.gradle.workers.max=1 -Dorg.gradle.parallel=false + GRADLE_USER_HOME: .gradle + image: openjdk:latest + +steps: + - name: install + commands: + - ./gradlew downloadDependencies + <<: *step-anchor + pull: true + ruleset: + event: [ push, pull_request ] + volumes: [ /foo:/bar:ro ] + ulimits: [ foo=1024:2048 ] + + - name: test + commands: + - ./gradlew check + <<: *step-anchor + pull: true + ruleset: + event: [ push, pull_request ] + volumes: [ /foo:/bar:ro ] + ulimits: [ foo=1024:2048 ] + + - name: build + commands: + - ./gradlew build + <<: *step-anchor + pull: true + ruleset: + event: [ push, pull_request ] + volumes: + - source: /foo + destination: /bar + access_mode: ro + ulimits: + - name: foo + soft: 1024 + hard: 2048 diff --git a/compiler/types/yaml/testdata/build_empty_env.yml b/compiler/types/yaml/testdata/build_empty_env.yml new file mode 100644 index 000000000..6b4f7d063 --- /dev/null +++ b/compiler/types/yaml/testdata/build_empty_env.yml @@ -0,0 +1,27 @@ +--- +version: "1" + +metadata: + template: false + environment: [] + +environment: + HELLO: "Hello, Global Message" + +worker: + flavor: 16cpu8gb + platform: gcp + +steps: + - name: install + commands: + - ./gradlew downloadDependencies + environment: + GRADLE_OPTS: -Dorg.gradle.daemon=false -Dorg.gradle.workers.max=1 -Dorg.gradle.parallel=false + GRADLE_USER_HOME: .gradle + image: openjdk:latest + pull: true + ruleset: + event: [ push, pull_request ] + volumes: [ /foo:/bar:ro ] + ulimits: [ foo=1024:2048 ] \ No newline at end of file diff --git a/compiler/types/yaml/testdata/invalid.yml b/compiler/types/yaml/testdata/invalid.yml new file mode 100644 index 000000000..23809fe06 --- /dev/null +++ b/compiler/types/yaml/testdata/invalid.yml @@ -0,0 +1,2 @@ +--- +foo: bar diff --git a/compiler/types/yaml/testdata/merge_anchor.yml b/compiler/types/yaml/testdata/merge_anchor.yml new file mode 100644 index 000000000..3de1e64b4 --- /dev/null +++ b/compiler/types/yaml/testdata/merge_anchor.yml @@ -0,0 +1,46 @@ +# test file that uses the non-standard multiple anchor keys in one step to test custom step unmarshaler + +version: "1" + +aliases: + images: + alpine: &alpine-image + image: alpine:latest + postgres: &pg-image + image: postgres + + events: + push: &event-push + ruleset: + event: + - push + env: + dev-env: &dev-environment + environment: + REGION: dev + +services: + - name: service-a + <<: *pg-image + <<: *dev-environment + ports: + - "5432:5432" + +steps: + - name: alpha + <<: *alpine-image + <<: *event-push + commands: + - echo alpha + + - name: beta + <<: [ *alpine-image, *event-push ] + commands: + - echo beta + + - name: gamma + <<: *alpine-image + <<: *event-push + <<: *dev-environment + commands: + - echo gamma \ No newline at end of file diff --git a/compiler/types/yaml/testdata/metadata.yml b/compiler/types/yaml/testdata/metadata.yml new file mode 100644 index 000000000..6946050b0 --- /dev/null +++ b/compiler/types/yaml/testdata/metadata.yml @@ -0,0 +1,2 @@ +--- +template: false diff --git a/compiler/types/yaml/testdata/metadata_env.yml b/compiler/types/yaml/testdata/metadata_env.yml new file mode 100644 index 000000000..0b7932a30 --- /dev/null +++ b/compiler/types/yaml/testdata/metadata_env.yml @@ -0,0 +1,3 @@ +--- +template: false +environment: [ steps ] diff --git a/compiler/types/yaml/testdata/ruleset_advanced.yml b/compiler/types/yaml/testdata/ruleset_advanced.yml new file mode 100644 index 000000000..24039e2a6 --- /dev/null +++ b/compiler/types/yaml/testdata/ruleset_advanced.yml @@ -0,0 +1,15 @@ +--- +if: + branch: [ main ] + event: push + tag: "^refs/tags/(\\d+\\.)+\\d+$" +unless: + event: + - deployment + - pull_request + - comment + - schedule + path: [ foo.txt, /foo/bar.txt ] +matcher: regexp +operator: or +continue: true \ No newline at end of file diff --git a/compiler/types/yaml/testdata/ruleset_regex.yml b/compiler/types/yaml/testdata/ruleset_regex.yml new file mode 100644 index 000000000..eb6b1fd31 --- /dev/null +++ b/compiler/types/yaml/testdata/ruleset_regex.yml @@ -0,0 +1,7 @@ +--- +if: + branch: main + event: tag + tag: [ "^refs/tags/(\\d+\\.)+\\d+$" ] + operator: and +matcher: regex diff --git a/compiler/types/yaml/testdata/ruleset_simple.yml b/compiler/types/yaml/testdata/ruleset_simple.yml new file mode 100644 index 000000000..7696e49b1 --- /dev/null +++ b/compiler/types/yaml/testdata/ruleset_simple.yml @@ -0,0 +1,10 @@ +--- +branch: main +comment: "test comment" +continue: true +event: push +path: foo.txt +repo: github/octocat +status: success +tag: v0.1.0 +target: production diff --git a/compiler/types/yaml/testdata/secret.yml b/compiler/types/yaml/testdata/secret.yml new file mode 100644 index 000000000..c432eb291 --- /dev/null +++ b/compiler/types/yaml/testdata/secret.yml @@ -0,0 +1,39 @@ +--- +- source: foo + target: bar +- name: foo + key: bar + engine: native + type: repo + pull: build_start +- name: noKey + engine: native + type: repo +- name: noType + key: bar + engine: native +- name: noEngine + key: bar + type: repo +- name: noKeyEngineAndType +- name: externalSecret + origin: + environment: + FOO: bar + image: target/vela-vault:latest + pull: true + parameters: + addr: vault.company.com + ruleset: + event: [ push ] + secrets: [ foo, foobar ] +- origin: + environment: + FOO: bar + image: target/vela-vault:latest + pull: true + parameters: + addr: vault.company.com + ruleset: + event: [ push ] + secrets: [ foo, foobar ] diff --git a/compiler/types/yaml/testdata/secret/validate/no_name.yml b/compiler/types/yaml/testdata/secret/validate/no_name.yml new file mode 100644 index 000000000..d674817c5 --- /dev/null +++ b/compiler/types/yaml/testdata/secret/validate/no_name.yml @@ -0,0 +1,11 @@ +secrets: +# Declarative repository secret definition. + - key: github/ocotocat/foob + engine: native + type: repo + - key: github/ocotocat + engine: native + type: org + - key: github/octokitties/foobar + engine: native + type: org \ No newline at end of file diff --git a/compiler/types/yaml/testdata/secret/validate/org.yml b/compiler/types/yaml/testdata/secret/validate/org.yml new file mode 100644 index 000000000..a5aad5e0d --- /dev/null +++ b/compiler/types/yaml/testdata/secret/validate/org.yml @@ -0,0 +1,5 @@ +secrets: + - name: foobar + key: github/foobar + engine: native + type: org \ No newline at end of file diff --git a/compiler/types/yaml/testdata/secret/validate/org_bad_engine.yml b/compiler/types/yaml/testdata/secret/validate/org_bad_engine.yml new file mode 100644 index 000000000..18f9f8c6f --- /dev/null +++ b/compiler/types/yaml/testdata/secret/validate/org_bad_engine.yml @@ -0,0 +1,9 @@ +secrets: + - name: foo + key: github/foobar + type: org + + - name: foobar + key: github/foobar + engine: badengine + type: org \ No newline at end of file diff --git a/compiler/types/yaml/testdata/secret/validate/org_bad_key.yml b/compiler/types/yaml/testdata/secret/validate/org_bad_key.yml new file mode 100644 index 000000000..bae2fcc9a --- /dev/null +++ b/compiler/types/yaml/testdata/secret/validate/org_bad_key.yml @@ -0,0 +1,9 @@ +secrets: + - name: foo + engine: native + type: org + + - name: foobar + key: github + engine: native + type: org \ No newline at end of file diff --git a/compiler/types/yaml/testdata/secret/validate/plugin.yml b/compiler/types/yaml/testdata/secret/validate/plugin.yml new file mode 100644 index 000000000..180ab5da4 --- /dev/null +++ b/compiler/types/yaml/testdata/secret/validate/plugin.yml @@ -0,0 +1,8 @@ +secrets: + - origin: + name: vault secrets + image: target/vela/secret-vault:latest + parameters: + items: + - source: secret/vela/dev/docker + path: docker \ No newline at end of file diff --git a/compiler/types/yaml/testdata/secret/validate/plugin_bad_image.yml b/compiler/types/yaml/testdata/secret/validate/plugin_bad_image.yml new file mode 100644 index 000000000..c21be424e --- /dev/null +++ b/compiler/types/yaml/testdata/secret/validate/plugin_bad_image.yml @@ -0,0 +1,15 @@ +secrets: + - origin: + name: vault secrets + parameters: + items: + - source: secret/vela/dev/docker + path: docker + + - origin: + name: vault secrets + image: bazel/:java:3240943c9ea3f72db51bea0a2428e83f3c5fa1312e19af017d026f9bcf70f84b + parameters: + items: + - source: secret/vela/dev/docker + path: docker \ No newline at end of file diff --git a/compiler/types/yaml/testdata/secret/validate/plugin_bad_name.yml b/compiler/types/yaml/testdata/secret/validate/plugin_bad_name.yml new file mode 100644 index 000000000..6ebb1505f --- /dev/null +++ b/compiler/types/yaml/testdata/secret/validate/plugin_bad_name.yml @@ -0,0 +1,7 @@ +secrets: + - origin: + image: target/vela/secret-vault:latest + parameters: + items: + - source: secret/vela/dev/docker + path: docker \ No newline at end of file diff --git a/compiler/types/yaml/testdata/secret/validate/repo.yml b/compiler/types/yaml/testdata/secret/validate/repo.yml new file mode 100644 index 000000000..fcad31edb --- /dev/null +++ b/compiler/types/yaml/testdata/secret/validate/repo.yml @@ -0,0 +1,13 @@ +secrets: + # Implicit native secret definition. + - name: foo + + # Declarative repository secret definition. + - name: foob + key: github/ocotocat/foob + engine: native + type: repo + - name: foo_bar + key: github/ocotocat/foo/bar + engine: native + type: repo \ No newline at end of file diff --git a/compiler/types/yaml/testdata/secret/validate/repo_bad_engine.yml b/compiler/types/yaml/testdata/secret/validate/repo_bad_engine.yml new file mode 100644 index 000000000..3eac1d359 --- /dev/null +++ b/compiler/types/yaml/testdata/secret/validate/repo_bad_engine.yml @@ -0,0 +1,5 @@ +secrets: + - name: foobar + key: github/ocotocat/foobar + engine: badengine + type: repo \ No newline at end of file diff --git a/compiler/types/yaml/testdata/secret/validate/repo_bad_key.yml b/compiler/types/yaml/testdata/secret/validate/repo_bad_key.yml new file mode 100644 index 000000000..cf031b30c --- /dev/null +++ b/compiler/types/yaml/testdata/secret/validate/repo_bad_key.yml @@ -0,0 +1,14 @@ +secrets: + - name: foo + engine: native + type: repo + + - name: bar + key: github/ocotocat + engine: native + type: repo + + - name: foobar + key: github + engine: native + type: repo \ No newline at end of file diff --git a/compiler/types/yaml/testdata/secret/validate/shared.yml b/compiler/types/yaml/testdata/secret/validate/shared.yml new file mode 100644 index 000000000..4037a97a3 --- /dev/null +++ b/compiler/types/yaml/testdata/secret/validate/shared.yml @@ -0,0 +1,5 @@ +secrets: + - name: foobar + key: github/ocotokitties/foo + engine: native + type: shared \ No newline at end of file diff --git a/compiler/types/yaml/testdata/secret/validate/shared_bad_engine.yml b/compiler/types/yaml/testdata/secret/validate/shared_bad_engine.yml new file mode 100644 index 000000000..ca22067dd --- /dev/null +++ b/compiler/types/yaml/testdata/secret/validate/shared_bad_engine.yml @@ -0,0 +1,9 @@ +secrets: + - name: foo + key: github/ocotokitties/foo + type: shared + + - name: foobar + key: github/ocotokitties/foo + engine: badengine + type: shared \ No newline at end of file diff --git a/compiler/types/yaml/testdata/secret/validate/shared_bad_key.yml b/compiler/types/yaml/testdata/secret/validate/shared_bad_key.yml new file mode 100644 index 000000000..b80945618 --- /dev/null +++ b/compiler/types/yaml/testdata/secret/validate/shared_bad_key.yml @@ -0,0 +1,9 @@ +secrets: + - name: foo + engine: native + type: shared + + - name: foobar + key: github/ocotokitties + engine: native + type: shared \ No newline at end of file diff --git a/compiler/types/yaml/testdata/service.yml b/compiler/types/yaml/testdata/service.yml new file mode 100644 index 000000000..b6bc0456f --- /dev/null +++ b/compiler/types/yaml/testdata/service.yml @@ -0,0 +1,14 @@ +--- +- name: postgres + image: postgres:latest + environment: + POSTGRES_DB: foo + ports: + - "5432:5432" + +- name: mysql + image: mysql:latest + environment: + MYSQL_DATABASE: foo + ports: + - "3061:3061" \ No newline at end of file diff --git a/compiler/types/yaml/testdata/service/validate/bad_image.yml b/compiler/types/yaml/testdata/service/validate/bad_image.yml new file mode 100644 index 000000000..47a875e67 --- /dev/null +++ b/compiler/types/yaml/testdata/service/validate/bad_image.yml @@ -0,0 +1,3 @@ +services: + - name: badimage + image: bazel/:java:3240943c9ea3f72db51bea0a2428e83f3c5fa1312e19af017d026f9bcf70f84b \ No newline at end of file diff --git a/compiler/types/yaml/testdata/service/validate/minimal.yml b/compiler/types/yaml/testdata/service/validate/minimal.yml new file mode 100644 index 000000000..469d2dddf --- /dev/null +++ b/compiler/types/yaml/testdata/service/validate/minimal.yml @@ -0,0 +1,3 @@ +services: + - name: postgres + image: postgres:latest diff --git a/compiler/types/yaml/testdata/service/validate/missing_image.yml b/compiler/types/yaml/testdata/service/validate/missing_image.yml new file mode 100644 index 000000000..3c2b76f1f --- /dev/null +++ b/compiler/types/yaml/testdata/service/validate/missing_image.yml @@ -0,0 +1,2 @@ +services: + - name: postgres \ No newline at end of file diff --git a/compiler/types/yaml/testdata/service/validate/missing_name.yml b/compiler/types/yaml/testdata/service/validate/missing_name.yml new file mode 100644 index 000000000..0c1034e5d --- /dev/null +++ b/compiler/types/yaml/testdata/service/validate/missing_name.yml @@ -0,0 +1,2 @@ +services: + - image: postgres:latest \ No newline at end of file diff --git a/compiler/types/yaml/testdata/service_nil.yml b/compiler/types/yaml/testdata/service_nil.yml new file mode 100644 index 000000000..41cd65e60 --- /dev/null +++ b/compiler/types/yaml/testdata/service_nil.yml @@ -0,0 +1,2 @@ +--- +- diff --git a/compiler/types/yaml/testdata/stage.yml b/compiler/types/yaml/testdata/stage.yml new file mode 100644 index 000000000..543ffdf83 --- /dev/null +++ b/compiler/types/yaml/testdata/stage.yml @@ -0,0 +1,44 @@ +--- +dependencies: + environment: + STAGE_ENV_VAR: stage + independent: true + steps: + - name: install + commands: + - ./gradlew downloadDependencies + environment: + GRADLE_OPTS: -Dorg.gradle.daemon=false -Dorg.gradle.workers.max=1 -Dorg.gradle.parallel=false + GRADLE_USER_HOME: .gradle + image: openjdk:latest + pull: true + +test: + needs: [ dependencies ] + environment: + STAGE_ENV_VAR: stage + SECOND_STAGE_ENV: stage2 + independent: false + steps: + - name: test + commands: + - ./gradlew check + environment: + GRADLE_OPTS: -Dorg.gradle.daemon=false -Dorg.gradle.workers.max=1 -Dorg.gradle.parallel=false + GRADLE_USER_HOME: .gradle + image: openjdk:latest + pull: true + +build: + needs: [ dependencies ] + environment: + STAGE_ENV_VAR: stage + steps: + - name: build + commands: + - ./gradlew build + environment: + - GRADLE_OPTS=-Dorg.gradle.daemon=false -Dorg.gradle.workers.max=1 -Dorg.gradle.parallel=false + - GRADLE_USER_HOME=.gradle + image: openjdk:latest + pull: true diff --git a/compiler/types/yaml/testdata/stage/validate/bad_image.yml b/compiler/types/yaml/testdata/stage/validate/bad_image.yml new file mode 100644 index 000000000..3dbed3107 --- /dev/null +++ b/compiler/types/yaml/testdata/stage/validate/bad_image.yml @@ -0,0 +1,7 @@ +stages: + badimage: + steps: + - name: badimage + image: bazel/:java:3240943c9ea3f72db51bea0a2428e83f3c5fa1312e19af017d026f9bcf70f84b + commands: + - echo "hello vela" \ No newline at end of file diff --git a/compiler/types/yaml/testdata/stage/validate/minimal.yml b/compiler/types/yaml/testdata/stage/validate/minimal.yml new file mode 100644 index 000000000..665887c87 --- /dev/null +++ b/compiler/types/yaml/testdata/stage/validate/minimal.yml @@ -0,0 +1,7 @@ +stages: + hello: + steps: + - name: hello + image: alpine:latest + commands: + - echo "hello vela" \ No newline at end of file diff --git a/compiler/types/yaml/testdata/stage/validate/missing.yml b/compiler/types/yaml/testdata/stage/validate/missing.yml new file mode 100644 index 000000000..954fdeabe --- /dev/null +++ b/compiler/types/yaml/testdata/stage/validate/missing.yml @@ -0,0 +1,5 @@ +stages: + hello: + steps: + - name: hello + image: alpine:latest \ No newline at end of file diff --git a/compiler/types/yaml/testdata/stage/validate/missing_image.yml b/compiler/types/yaml/testdata/stage/validate/missing_image.yml new file mode 100644 index 000000000..90b361748 --- /dev/null +++ b/compiler/types/yaml/testdata/stage/validate/missing_image.yml @@ -0,0 +1,6 @@ +stages: + hello: + steps: + - name: hello + commands: + - echo "hello vela" \ No newline at end of file diff --git a/compiler/types/yaml/testdata/stage/validate/missing_name.yml b/compiler/types/yaml/testdata/stage/validate/missing_name.yml new file mode 100644 index 000000000..1394939a3 --- /dev/null +++ b/compiler/types/yaml/testdata/stage/validate/missing_name.yml @@ -0,0 +1,6 @@ +stages: + hello: + steps: + - image: alpine:latest + commands: + - echo "hello vela" \ No newline at end of file diff --git a/compiler/types/yaml/testdata/step.yml b/compiler/types/yaml/testdata/step.yml new file mode 100644 index 000000000..1d6d9cc93 --- /dev/null +++ b/compiler/types/yaml/testdata/step.yml @@ -0,0 +1,46 @@ +--- +- name: install + commands: + - ./gradlew downloadDependencies + environment: + GRADLE_OPTS: -Dorg.gradle.daemon=false -Dorg.gradle.workers.max=1 -Dorg.gradle.parallel=false + GRADLE_USER_HOME: .gradle + image: openjdk:latest + pull: true + +- name: test + commands: + - ./gradlew check + environment: + GRADLE_OPTS: -Dorg.gradle.daemon=false -Dorg.gradle.workers.max=1 -Dorg.gradle.parallel=false + GRADLE_USER_HOME: .gradle + image: openjdk:latest + pull: true + +- name: build + commands: + - ./gradlew build + environment: + - GRADLE_OPTS=-Dorg.gradle.daemon=false -Dorg.gradle.workers.max=1 -Dorg.gradle.parallel=false + - GRADLE_USER_HOME=.gradle + image: openjdk:latest + pull: true + +- name: docker_build + image: plugins/docker:18.09 + report_as: docker + parameters: + registry: index.docker.io + repo: github/octocat + tags: + - latest + - dev + pull: true + +- name: templated_publish + template: + name: docker_publish + vars: + registry: index.docker.io + repo: github/octocat + tags: [ latest, dev ] diff --git a/compiler/types/yaml/testdata/step/validate/bad_image.yml b/compiler/types/yaml/testdata/step/validate/bad_image.yml new file mode 100644 index 000000000..a97e8f12c --- /dev/null +++ b/compiler/types/yaml/testdata/step/validate/bad_image.yml @@ -0,0 +1,5 @@ +steps: + - name: badimage + image: bazel/:java:3240943c9ea3f72db51bea0a2428e83f3c5fa1312e19af017d026f9bcf70f84b + commands: + - echo "hello vela" \ No newline at end of file diff --git a/compiler/types/yaml/testdata/step/validate/minimal.yml b/compiler/types/yaml/testdata/step/validate/minimal.yml new file mode 100644 index 000000000..da2283d4b --- /dev/null +++ b/compiler/types/yaml/testdata/step/validate/minimal.yml @@ -0,0 +1,5 @@ +steps: + - name: hello + image: alpine:latest + commands: + - echo "hello vela" \ No newline at end of file diff --git a/compiler/types/yaml/testdata/step/validate/missing.yml b/compiler/types/yaml/testdata/step/validate/missing.yml new file mode 100644 index 000000000..0aa30db52 --- /dev/null +++ b/compiler/types/yaml/testdata/step/validate/missing.yml @@ -0,0 +1,3 @@ +steps: + - name: hello + image: alpine:latest \ No newline at end of file diff --git a/compiler/types/yaml/testdata/step/validate/missing_image.yml b/compiler/types/yaml/testdata/step/validate/missing_image.yml new file mode 100644 index 000000000..b36acf70d --- /dev/null +++ b/compiler/types/yaml/testdata/step/validate/missing_image.yml @@ -0,0 +1,4 @@ +steps: + - name: hello + commands: + - echo "hello vela" \ No newline at end of file diff --git a/compiler/types/yaml/testdata/step/validate/missing_name.yml b/compiler/types/yaml/testdata/step/validate/missing_name.yml new file mode 100644 index 000000000..228ac30ec --- /dev/null +++ b/compiler/types/yaml/testdata/step/validate/missing_name.yml @@ -0,0 +1,4 @@ +steps: + - image: alpine:latest + commands: + - echo "hello vela" \ No newline at end of file diff --git a/compiler/types/yaml/testdata/step_malformed.yml b/compiler/types/yaml/testdata/step_malformed.yml new file mode 100644 index 000000000..5d70e5a4f --- /dev/null +++ b/compiler/types/yaml/testdata/step_malformed.yml @@ -0,0 +1,4 @@ +--- +- name: Testing + environment: + - 'This: Shouldnt Panic' diff --git a/compiler/types/yaml/testdata/step_nil.yml b/compiler/types/yaml/testdata/step_nil.yml new file mode 100644 index 000000000..41cd65e60 --- /dev/null +++ b/compiler/types/yaml/testdata/step_nil.yml @@ -0,0 +1,2 @@ +--- +- diff --git a/compiler/types/yaml/testdata/step_secret_slice.yml b/compiler/types/yaml/testdata/step_secret_slice.yml new file mode 100644 index 000000000..64bc68b7f --- /dev/null +++ b/compiler/types/yaml/testdata/step_secret_slice.yml @@ -0,0 +1,5 @@ +--- +- source: foo + target: bar +- source: hello + target: world diff --git a/compiler/types/yaml/testdata/step_secret_slice_invalid_no_source.yml b/compiler/types/yaml/testdata/step_secret_slice_invalid_no_source.yml new file mode 100644 index 000000000..1f7e6fc3e --- /dev/null +++ b/compiler/types/yaml/testdata/step_secret_slice_invalid_no_source.yml @@ -0,0 +1,2 @@ +--- +- target: foo \ No newline at end of file diff --git a/compiler/types/yaml/testdata/step_secret_slice_invalid_no_target.yml b/compiler/types/yaml/testdata/step_secret_slice_invalid_no_target.yml new file mode 100644 index 000000000..3e0e29b1c --- /dev/null +++ b/compiler/types/yaml/testdata/step_secret_slice_invalid_no_target.yml @@ -0,0 +1,2 @@ +--- +- source: foo \ No newline at end of file diff --git a/compiler/types/yaml/testdata/step_secret_string.yml b/compiler/types/yaml/testdata/step_secret_string.yml new file mode 100644 index 000000000..930977980 --- /dev/null +++ b/compiler/types/yaml/testdata/step_secret_string.yml @@ -0,0 +1,2 @@ +--- +[ foo, hello ] diff --git a/compiler/types/yaml/testdata/template.yml b/compiler/types/yaml/testdata/template.yml new file mode 100644 index 000000000..6d5615e01 --- /dev/null +++ b/compiler/types/yaml/testdata/template.yml @@ -0,0 +1,12 @@ +--- +- name: docker_build + source: github.com/go-vela/atlas/stable/docker_create + type: github +- name: docker_build + source: github.com/go-vela/atlas/stable/docker_build + format: go + type: github +- name: docker_publish + source: github.com/go-vela/atlas/stable/docker_publish + format: starlark + type: github diff --git a/compiler/types/yaml/testdata/ulimit_colon_error.yml b/compiler/types/yaml/testdata/ulimit_colon_error.yml new file mode 100644 index 000000000..3e948cb08 --- /dev/null +++ b/compiler/types/yaml/testdata/ulimit_colon_error.yml @@ -0,0 +1,2 @@ +--- +[ foo=bar:1024:2048 ] diff --git a/compiler/types/yaml/testdata/ulimit_equal_error.yml b/compiler/types/yaml/testdata/ulimit_equal_error.yml new file mode 100644 index 000000000..f72b3b461 --- /dev/null +++ b/compiler/types/yaml/testdata/ulimit_equal_error.yml @@ -0,0 +1,2 @@ +--- +[ foo=1024=2048 ] diff --git a/compiler/types/yaml/testdata/ulimit_hardlimit1_error.yml b/compiler/types/yaml/testdata/ulimit_hardlimit1_error.yml new file mode 100644 index 000000000..1472c22b7 --- /dev/null +++ b/compiler/types/yaml/testdata/ulimit_hardlimit1_error.yml @@ -0,0 +1,2 @@ +--- +[ foo=bar:1024 ] diff --git a/compiler/types/yaml/testdata/ulimit_hardlimit2_error.yml b/compiler/types/yaml/testdata/ulimit_hardlimit2_error.yml new file mode 100644 index 000000000..4569bc3ad --- /dev/null +++ b/compiler/types/yaml/testdata/ulimit_hardlimit2_error.yml @@ -0,0 +1,2 @@ +--- +[ foo=1024:bar ] diff --git a/compiler/types/yaml/testdata/ulimit_slice.yml b/compiler/types/yaml/testdata/ulimit_slice.yml new file mode 100644 index 000000000..9ee862c06 --- /dev/null +++ b/compiler/types/yaml/testdata/ulimit_slice.yml @@ -0,0 +1,6 @@ +--- +- name: foo + soft: 1024 +- name: bar + soft: 1024 + hard: 2048 diff --git a/compiler/types/yaml/testdata/ulimit_softlimit_error.yml b/compiler/types/yaml/testdata/ulimit_softlimit_error.yml new file mode 100644 index 000000000..63f68f1c4 --- /dev/null +++ b/compiler/types/yaml/testdata/ulimit_softlimit_error.yml @@ -0,0 +1,2 @@ +--- +[ foo=bar ] diff --git a/compiler/types/yaml/testdata/ulimit_string.yml b/compiler/types/yaml/testdata/ulimit_string.yml new file mode 100644 index 000000000..59669af36 --- /dev/null +++ b/compiler/types/yaml/testdata/ulimit_string.yml @@ -0,0 +1,2 @@ +--- +[ foo=1024, bar=1024:2048 ] diff --git a/compiler/types/yaml/testdata/volume_error.yml b/compiler/types/yaml/testdata/volume_error.yml new file mode 100644 index 000000000..8c36e5057 --- /dev/null +++ b/compiler/types/yaml/testdata/volume_error.yml @@ -0,0 +1,2 @@ +--- +[ /foo:/bar:/foo:bar ] diff --git a/compiler/types/yaml/testdata/volume_slice.yml b/compiler/types/yaml/testdata/volume_slice.yml new file mode 100644 index 000000000..fbad0133b --- /dev/null +++ b/compiler/types/yaml/testdata/volume_slice.yml @@ -0,0 +1,7 @@ +--- +- source: /foo +- source: /foo + destination: /bar +- source: /foo + destination: /foobar + access_mode: ro diff --git a/compiler/types/yaml/testdata/volume_string.yml b/compiler/types/yaml/testdata/volume_string.yml new file mode 100644 index 000000000..a596a9116 --- /dev/null +++ b/compiler/types/yaml/testdata/volume_string.yml @@ -0,0 +1,2 @@ +--- +[ /foo, /foo:/bar, /foo:/foobar:ro ] diff --git a/compiler/types/yaml/ulimit.go b/compiler/types/yaml/ulimit.go new file mode 100644 index 000000000..14d96c287 --- /dev/null +++ b/compiler/types/yaml/ulimit.go @@ -0,0 +1,132 @@ +// SPDX-License-Identifier: Apache-2.0 + +package yaml + +import ( + "fmt" + "strconv" + "strings" + + "github.com/go-vela/server/compiler/types/pipeline" + "github.com/go-vela/server/compiler/types/raw" +) + +type ( + // UlimitSlice is the yaml representation of + // the ulimits block for a step in a pipeline. + UlimitSlice []*Ulimit + + // Ulimit is the yaml representation of a ulimit + // from the ulimits block for a step in a pipeline. + Ulimit struct { + Name string `yaml:"name,omitempty" json:"name,omitempty" jsonschema:"required,minLength=1,description=Unique name of the user limit.\nReference: https://go-vela.github.io/docs/reference/yaml/steps/#the-ulimits-key"` + Soft int64 `yaml:"soft,omitempty" json:"soft,omitempty" jsonschema:"description=Set the soft limit.\nReference: https://go-vela.github.io/docs/reference/yaml/steps/#the-ulimits-key"` + Hard int64 `yaml:"hard,omitempty" json:"hard,omitempty" jsonschema:"description=Set the hard limit.\nReference: https://go-vela.github.io/docs/reference/yaml/steps/#the-ulimits-key"` + } +) + +// ToPipeline converts the UlimitSlice type +// to a pipeline UlimitSlice type. +func (u *UlimitSlice) ToPipeline() *pipeline.UlimitSlice { + // ulimit slice we want to return + ulimitSlice := new(pipeline.UlimitSlice) + + // iterate through each element in the ulimit slice + for _, ulimit := range *u { + // append the element to the pipeline ulimit slice + *ulimitSlice = append(*ulimitSlice, &pipeline.Ulimit{ + Name: ulimit.Name, + Soft: ulimit.Soft, + Hard: ulimit.Hard, + }) + } + + return ulimitSlice +} + +// UnmarshalYAML implements the Unmarshaler interface for the UlimitSlice type. +func (u *UlimitSlice) UnmarshalYAML(unmarshal func(interface{}) error) error { + // string slice we try unmarshalling to + stringSlice := new(raw.StringSlice) + + // attempt to unmarshal as a string slice type + err := unmarshal(stringSlice) + if err == nil { + // iterate through each element in the string slice + for _, ulimit := range *stringSlice { + // split each slice element into key/value pairs + parts := strings.Split(ulimit, "=") + if len(parts) != 2 { + return fmt.Errorf("ulimit %s must contain 1 `=` (equal)", ulimit) + } + + // split each value into soft and hard limits + limitParts := strings.Split(parts[1], ":") + + switch { + case len(limitParts) == 1: + // capture value for soft and hard limit + value, err := strconv.ParseInt(limitParts[0], 10, 64) + if err != nil { + return err + } + + // append the element to the ulimit slice + *u = append(*u, &Ulimit{ + Name: parts[0], + Soft: value, + Hard: value, + }) + + continue + case len(limitParts) == 2: + // capture value for soft limit + firstValue, err := strconv.ParseInt(limitParts[0], 10, 64) + if err != nil { + return err + } + + // capture value for hard limit + secondValue, err := strconv.ParseInt(limitParts[1], 10, 64) + if err != nil { + return err + } + + // append the element to the ulimit slice + *u = append(*u, &Ulimit{ + Name: parts[0], + Soft: firstValue, + Hard: secondValue, + }) + + continue + default: + return fmt.Errorf("ulimit %s can only contain 1 `:` (colon)", ulimit) + } + } + + return nil + } + + // ulimit slice we try unmarshalling to + ulimits := new([]*Ulimit) + + // attempt to unmarshal as a ulimit slice type + err = unmarshal(ulimits) + if err != nil { + return err + } + + // iterate through each element in the volume slice + for _, ulimit := range *ulimits { + // implicitly set `hard` field if empty + if ulimit.Hard == 0 { + ulimit.Hard = ulimit.Soft + } + } + + // overwrite existing UlimitSlice + *u = UlimitSlice(*ulimits) + + return nil +} diff --git a/compiler/types/yaml/ulimit_test.go b/compiler/types/yaml/ulimit_test.go new file mode 100644 index 000000000..3a2d9fcfb --- /dev/null +++ b/compiler/types/yaml/ulimit_test.go @@ -0,0 +1,147 @@ +// SPDX-License-Identifier: Apache-2.0 + +package yaml + +import ( + "os" + "reflect" + "testing" + + "github.com/buildkite/yaml" + + "github.com/go-vela/server/compiler/types/pipeline" +) + +func TestYaml_UlimitSlice_ToPipeline(t *testing.T) { + // setup tests + tests := []struct { + ulimits *UlimitSlice + want *pipeline.UlimitSlice + }{ + { + ulimits: &UlimitSlice{ + { + Name: "foo", + Soft: 1024, + Hard: 2048, + }, + }, + want: &pipeline.UlimitSlice{ + { + Name: "foo", + Soft: 1024, + Hard: 2048, + }, + }, + }, + } + + // run tests + for _, test := range tests { + got := test.ulimits.ToPipeline() + + if !reflect.DeepEqual(got, test.want) { + t.Errorf("ToPipeline is %v, want %v", got, test.want) + } + } +} + +func TestYaml_UlimitSlice_UnmarshalYAML(t *testing.T) { + // setup tests + tests := []struct { + failure bool + file string + want *UlimitSlice + }{ + { + failure: false, + file: "testdata/ulimit_slice.yml", + want: &UlimitSlice{ + { + Name: "foo", + Soft: 1024, + Hard: 1024, + }, + { + Name: "bar", + Soft: 1024, + Hard: 2048, + }, + }, + }, + { + failure: false, + file: "testdata/ulimit_string.yml", + want: &UlimitSlice{ + { + Name: "foo", + Soft: 1024, + Hard: 1024, + }, + { + Name: "bar", + Soft: 1024, + Hard: 2048, + }, + }, + }, + { + failure: true, + file: "testdata/invalid.yml", + want: nil, + }, + { + failure: true, + file: "testdata/ulimit_equal_error.yml", + want: nil, + }, + { + failure: true, + file: "testdata/ulimit_colon_error.yml", + want: nil, + }, + { + failure: true, + file: "testdata/ulimit_softlimit_error.yml", + want: nil, + }, + { + failure: true, + file: "testdata/ulimit_hardlimit1_error.yml", + want: nil, + }, + { + failure: true, + file: "testdata/ulimit_hardlimit2_error.yml", + want: nil, + }, + } + + // run tests + for _, test := range tests { + got := new(UlimitSlice) + + b, err := os.ReadFile(test.file) + if err != nil { + t.Errorf("unable to read file: %v", err) + } + + err = yaml.Unmarshal(b, got) + + if test.failure { + if err == nil { + t.Errorf("UnmarshalYAML should have returned err") + } + + continue + } + + if err != nil { + t.Errorf("UnmarshalYAML returned err: %v", err) + } + + if !reflect.DeepEqual(got, test.want) { + t.Errorf("UnmarshalYAML is %v, want %v", got, test.want) + } + } +} diff --git a/compiler/types/yaml/volume.go b/compiler/types/yaml/volume.go new file mode 100644 index 000000000..cc6cd991e --- /dev/null +++ b/compiler/types/yaml/volume.go @@ -0,0 +1,121 @@ +// SPDX-License-Identifier: Apache-2.0 + +package yaml + +import ( + "fmt" + "strings" + + "github.com/go-vela/server/compiler/types/pipeline" + "github.com/go-vela/server/compiler/types/raw" +) + +type ( + // VolumeSlice is the yaml representation of + // the volumes block for a step in a pipeline. + VolumeSlice []*Volume + + // Volume is the yaml representation of a volume + // from a volumes block for a step in a pipeline. + Volume struct { + Source string `yaml:"source,omitempty" json:"source,omitempty" jsonschema:"required,minLength=1,description=Set the source directory to be mounted.\nReference: https://go-vela.github.io/docs/reference/yaml/steps/#the-volume-key"` + Destination string `yaml:"destination,omitempty" json:"destination,omitempty" jsonschema:"required,minLength=1,description=Set the destination directory for the mount in the container.\nReference: https://go-vela.github.io/docs/reference/yaml/steps/#the-volume-key"` + AccessMode string `yaml:"access_mode,omitempty" json:"access_mode,omitempty" jsonschema:"default=ro,description=Set the access mode for the mounted volume.\nReference: https://go-vela.github.io/docs/reference/yaml/steps/#the-volume-key"` + } +) + +// ToPipeline converts the VolumeSlice type +// to a pipeline VolumeSlice type. +func (v *VolumeSlice) ToPipeline() *pipeline.VolumeSlice { + // volume slice we want to return + volumes := new(pipeline.VolumeSlice) + + // iterate through each element in the volume slice + for _, volume := range *v { + // append the element to the pipeline volume slice + *volumes = append(*volumes, &pipeline.Volume{ + Source: volume.Source, + Destination: volume.Destination, + AccessMode: volume.AccessMode, + }) + } + + return volumes +} + +// UnmarshalYAML implements the Unmarshaler interface for the VolumeSlice type. +func (v *VolumeSlice) UnmarshalYAML(unmarshal func(interface{}) error) error { + // string slice we try unmarshalling to + stringSlice := new(raw.StringSlice) + + // attempt to unmarshal as a string slice type + err := unmarshal(stringSlice) + if err == nil { + // iterate through each element in the string slice + for _, volume := range *stringSlice { + // split each slice element into source, destination and access mode + parts := strings.Split(volume, ":") + + switch { + case len(parts) == 1: + // append the element to the volume slice + *v = append(*v, &Volume{ + Source: parts[0], + Destination: parts[0], + AccessMode: "ro", + }) + + continue + case len(parts) == 2: + // append the element to the volume slice + *v = append(*v, &Volume{ + Source: parts[0], + Destination: parts[1], + AccessMode: "ro", + }) + + continue + case len(parts) == 3: + // append the element to the volume slice + *v = append(*v, &Volume{ + Source: parts[0], + Destination: parts[1], + AccessMode: parts[2], + }) + + continue + default: + return fmt.Errorf("volume %s must contain at least 1 but no more than 2 `:`(colons)", volume) + } + } + + return nil + } + + // volume slice we try unmarshalling to + volumes := new([]*Volume) + + // attempt to unmarshal as a volume slice type + err = unmarshal(volumes) + if err != nil { + return err + } + + // iterate through each element in the volume slice + for _, volume := range *volumes { + // implicitly set `destination` field if empty + if len(volume.Destination) == 0 { + volume.Destination = volume.Source + } + + // implicitly set `access_mode` field if empty + if len(volume.AccessMode) == 0 { + volume.AccessMode = "ro" + } + } + + // overwrite existing VolumeSlice + *v = VolumeSlice(*volumes) + + return nil +} diff --git a/compiler/types/yaml/volume_test.go b/compiler/types/yaml/volume_test.go new file mode 100644 index 000000000..f52aff9a4 --- /dev/null +++ b/compiler/types/yaml/volume_test.go @@ -0,0 +1,137 @@ +// SPDX-License-Identifier: Apache-2.0 + +package yaml + +import ( + "os" + "reflect" + "testing" + + "github.com/buildkite/yaml" + + "github.com/go-vela/server/compiler/types/pipeline" +) + +func TestYaml_VolumeSlice_ToPipeline(t *testing.T) { + // setup tests + tests := []struct { + volumes *VolumeSlice + want *pipeline.VolumeSlice + }{ + { + volumes: &VolumeSlice{ + { + Source: "/foo", + Destination: "/bar", + AccessMode: "ro", + }, + }, + want: &pipeline.VolumeSlice{ + { + Source: "/foo", + Destination: "/bar", + AccessMode: "ro", + }, + }, + }, + } + + // run tests + for _, test := range tests { + got := test.volumes.ToPipeline() + + if !reflect.DeepEqual(got, test.want) { + t.Errorf("ToPipeline is %v, want %v", got, test.want) + } + } +} + +func TestYaml_VolumeSlice_UnmarshalYAML(t *testing.T) { + // setup tests + tests := []struct { + failure bool + file string + want *VolumeSlice + }{ + { + failure: false, + file: "testdata/volume_slice.yml", + want: &VolumeSlice{ + { + Source: "/foo", + Destination: "/foo", + AccessMode: "ro", + }, + { + Source: "/foo", + Destination: "/bar", + AccessMode: "ro", + }, + { + Source: "/foo", + Destination: "/foobar", + AccessMode: "ro", + }, + }, + }, + { + failure: false, + file: "testdata/volume_string.yml", + want: &VolumeSlice{ + { + Source: "/foo", + Destination: "/foo", + AccessMode: "ro", + }, + { + Source: "/foo", + Destination: "/bar", + AccessMode: "ro", + }, + { + Source: "/foo", + Destination: "/foobar", + AccessMode: "ro", + }, + }, + }, + { + failure: true, + file: "testdata/invalid.yml", + want: nil, + }, + { + failure: true, + file: "testdata/volume_error.yml", + want: nil, + }, + } + + // run tests + for _, test := range tests { + got := new(VolumeSlice) + + b, err := os.ReadFile(test.file) + if err != nil { + t.Errorf("unable to read file: %v", err) + } + + err = yaml.Unmarshal(b, got) + + if test.failure { + if err == nil { + t.Errorf("UnmarshalYAML should have returned err") + } + + continue + } + + if err != nil { + t.Errorf("UnmarshalYAML returned err: %v", err) + } + + if !reflect.DeepEqual(got, test.want) { + t.Errorf("UnmarshalYAML is %v, want %v", got, test.want) + } + } +} diff --git a/compiler/types/yaml/worker.go b/compiler/types/yaml/worker.go new file mode 100644 index 000000000..85c2a91cd --- /dev/null +++ b/compiler/types/yaml/worker.go @@ -0,0 +1,21 @@ +// SPDX-License-Identifier: Apache-2.0 + +package yaml + +import "github.com/go-vela/server/compiler/types/pipeline" + +// Worker is the yaml representation of a worker +// from a worker block in a pipeline. +type Worker struct { + Flavor string `yaml:"flavor,omitempty" json:"flavor,omitempty" jsonschema:"minLength=1,description=Flavor identifier for worker.\nReference: https://go-vela.github.io/docs/reference/yaml/worker/#the-flavor-key,example=large"` + Platform string `yaml:"platform,omitempty" json:"platform,omitempty" jsonschema:"minLength=1,description=Platform identifier for the worker.\nReference: https://go-vela.github.io/docs/reference/yaml/worker/#the-platform-key,example=kubernetes"` +} + +// ToPipeline converts the Worker type +// to a pipeline Worker type. +func (w *Worker) ToPipeline() *pipeline.Worker { + return &pipeline.Worker{ + Flavor: w.Flavor, + Platform: w.Platform, + } +} diff --git a/compiler/types/yaml/worker_test.go b/compiler/types/yaml/worker_test.go new file mode 100644 index 000000000..4c3bd97f6 --- /dev/null +++ b/compiler/types/yaml/worker_test.go @@ -0,0 +1,38 @@ +// SPDX-License-Identifier: Apache-2.0 + +package yaml + +import ( + "reflect" + "testing" + + "github.com/go-vela/server/compiler/types/pipeline" +) + +func TestYaml_Worker_ToPipeline(t *testing.T) { + // setup tests + tests := []struct { + worker *Worker + want *pipeline.Worker + }{ + { + worker: &Worker{ + Flavor: "8cpu16gb", + Platform: "gcp", + }, + want: &pipeline.Worker{ + Flavor: "8cpu16gb", + Platform: "gcp", + }, + }, + } + + // run tests + for _, test := range tests { + got := test.worker.ToPipeline() + + if !reflect.DeepEqual(got, test.want) { + t.Errorf("ToPipeline is %v, want %v", got, test.want) + } + } +} diff --git a/database/build/get_repo.go b/database/build/get_repo.go index 8ac935881..1bc9dfb39 100644 --- a/database/build/get_repo.go +++ b/database/build/get_repo.go @@ -27,8 +27,6 @@ func (e *engine) GetBuildForRepo(ctx context.Context, r *api.Repo, number int) ( err := e.client. WithContext(ctx). Table(constants.TableBuild). - Preload("Repo"). - Preload("Repo.Owner"). Where("repo_id = ?", r.GetID()). Where("number = ?", number). Take(b). @@ -37,10 +35,8 @@ func (e *engine) GetBuildForRepo(ctx context.Context, r *api.Repo, number int) ( return nil, err } - err = b.Repo.Decrypt(e.config.EncryptionKey) - if err != nil { - e.logger.Errorf("unable to decrypt repo %s/%s: %v", r.GetOrg(), r.GetName(), err) - } + result := b.ToAPI() + result.SetRepo(r) - return b.ToAPI(), nil + return result, nil } diff --git a/database/build/get_repo_test.go b/database/build/get_repo_test.go index e2db55642..327b2f14d 100644 --- a/database/build/get_repo_test.go +++ b/database/build/get_repo_test.go @@ -11,7 +11,6 @@ import ( api "github.com/go-vela/server/api/types" "github.com/go-vela/server/database/testutils" - "github.com/go-vela/server/database/types" "github.com/go-vela/types/constants" ) @@ -49,18 +48,8 @@ func TestBuild_Engine_GetBuildForRepo(t *testing.T) { []string{"id", "repo_id", "pipeline_id", "number", "parent", "event", "event_action", "status", "error", "enqueued", "created", "started", "finished", "deploy", "deploy_number", "deploy_payload", "clone", "source", "title", "message", "commit", "sender", "author", "email", "link", "branch", "ref", "base_ref", "head_ref", "host", "runtime", "distribution", "timestamp"}). AddRow(1, 1, nil, 1, 0, "", "", "", "", 0, 0, 0, 0, "", 0, nil, "", "", "", "", "", "", "", "", "", "", "", "", "", "", "", "", 0) - _repoRows := sqlmock.NewRows( - []string{"id", "user_id", "hash", "org", "name", "full_name", "link", "clone", "branch", "topics", "build_limit", "timeout", "counter", "visibility", "private", "trusted", "active", "allow_events", "pipeline_type", "previous_name", "approve_build"}). - AddRow(1, 1, "baz", "foo", "bar", "foo/bar", "", "", "", "{}", 0, 0, 0, "public", false, false, false, 1, "yaml", "", "") - - _userRows := sqlmock.NewRows( - []string{"id", "name", "token", "hash", "active", "admin"}). - AddRow(1, "foo", "bar", "baz", false, false) - // ensure the mock expects the query _mock.ExpectQuery(`SELECT * FROM "builds" WHERE repo_id = $1 AND number = $2 LIMIT $3`).WithArgs(1, 1, 1).WillReturnRows(_rows) - _mock.ExpectQuery(`SELECT * FROM "repos" WHERE "repos"."id" = $1`).WithArgs(1).WillReturnRows(_repoRows) - _mock.ExpectQuery(`SELECT * FROM "users" WHERE "users"."id" = $1`).WithArgs(1).WillReturnRows(_userRows) _sqlite := testSqlite(t) defer func() { _sql, _ := _sqlite.client.DB(); _sql.Close() }() @@ -70,26 +59,6 @@ func TestBuild_Engine_GetBuildForRepo(t *testing.T) { t.Errorf("unable to create test build for sqlite: %v", err) } - err = _sqlite.client.AutoMigrate(&types.Repo{}) - if err != nil { - t.Errorf("unable to create build table for sqlite: %v", err) - } - - err = _sqlite.client.Table(constants.TableRepo).Create(types.RepoFromAPI(_repo)).Error - if err != nil { - t.Errorf("unable to create test user for sqlite: %v", err) - } - - err = _sqlite.client.AutoMigrate(&types.User{}) - if err != nil { - t.Errorf("unable to create build table for sqlite: %v", err) - } - - err = _sqlite.client.Table(constants.TableUser).Create(types.UserFromAPI(_owner)).Error - if err != nil { - t.Errorf("unable to create test user for sqlite: %v", err) - } - // setup tests tests := []struct { failure bool diff --git a/database/build/last_repo.go b/database/build/last_repo.go index ccade3cc8..517579240 100644 --- a/database/build/last_repo.go +++ b/database/build/last_repo.go @@ -28,8 +28,6 @@ func (e *engine) LastBuildForRepo(ctx context.Context, r *api.Repo, branch strin err := e.client. WithContext(ctx). Table(constants.TableBuild). - Preload("Repo"). - Preload("Repo.Owner"). Where("repo_id = ?", r.GetID()). Where("branch = ?", branch). Order("number DESC"). @@ -45,10 +43,8 @@ func (e *engine) LastBuildForRepo(ctx context.Context, r *api.Repo, branch strin return nil, err } - err = b.Repo.Decrypt(e.config.EncryptionKey) - if err != nil { - e.logger.Errorf("unable to decrypt repo %s/%s: %v", r.GetOrg(), r.GetName(), err) - } + result := b.ToAPI() + result.SetRepo(r) - return b.ToAPI(), nil + return result, nil } diff --git a/database/build/last_repo_test.go b/database/build/last_repo_test.go index 140b79651..7b518afee 100644 --- a/database/build/last_repo_test.go +++ b/database/build/last_repo_test.go @@ -11,7 +11,6 @@ import ( api "github.com/go-vela/server/api/types" "github.com/go-vela/server/database/testutils" - "github.com/go-vela/server/database/types" "github.com/go-vela/types/constants" ) @@ -49,18 +48,8 @@ func TestBuild_Engine_LastBuildForRepo(t *testing.T) { []string{"id", "repo_id", "pipeline_id", "number", "parent", "event", "event_action", "status", "error", "enqueued", "created", "started", "finished", "deploy", "deploy_payload", "clone", "source", "title", "message", "commit", "sender", "author", "email", "link", "branch", "ref", "base_ref", "head_ref", "host", "runtime", "distribution", "approved_at", "approved_by", "timestamp"}). AddRow(1, 1, nil, 1, 0, "", "", "", "", 0, 0, 0, 0, "", nil, "", "", "", "", "", "", "", "", "", "main", "", "", "", "", "", "", 0, "", 0) - _repoRows := sqlmock.NewRows( - []string{"id", "user_id", "hash", "org", "name", "full_name", "link", "clone", "branch", "topics", "build_limit", "timeout", "counter", "visibility", "private", "trusted", "active", "allow_events", "pipeline_type", "previous_name", "approve_build"}). - AddRow(1, 1, "baz", "foo", "bar", "foo/bar", "", "", "", "{}", 0, 0, 0, "public", false, false, false, 1, "yaml", "", "") - - _userRows := sqlmock.NewRows( - []string{"id", "name", "token", "hash", "active", "admin"}). - AddRow(1, "foo", "bar", "baz", false, false) - // ensure the mock expects the query _mock.ExpectQuery(`SELECT * FROM "builds" WHERE repo_id = $1 AND branch = $2 ORDER BY number DESC LIMIT $3`).WithArgs(1, "main", 1).WillReturnRows(_rows) - _mock.ExpectQuery(`SELECT * FROM "repos" WHERE "repos"."id" = $1`).WithArgs(1).WillReturnRows(_repoRows) - _mock.ExpectQuery(`SELECT * FROM "users" WHERE "users"."id" = $1`).WithArgs(1).WillReturnRows(_userRows) _sqlite := testSqlite(t) defer func() { _sql, _ := _sqlite.client.DB(); _sql.Close() }() @@ -70,26 +59,6 @@ func TestBuild_Engine_LastBuildForRepo(t *testing.T) { t.Errorf("unable to create test build for sqlite: %v", err) } - err = _sqlite.client.AutoMigrate(&types.Repo{}) - if err != nil { - t.Errorf("unable to create build table for sqlite: %v", err) - } - - err = _sqlite.client.Table(constants.TableRepo).Create(types.RepoFromAPI(_repo)).Error - if err != nil { - t.Errorf("unable to create test user for sqlite: %v", err) - } - - err = _sqlite.client.AutoMigrate(&types.User{}) - if err != nil { - t.Errorf("unable to create build table for sqlite: %v", err) - } - - err = _sqlite.client.Table(constants.TableUser).Create(types.UserFromAPI(_owner)).Error - if err != nil { - t.Errorf("unable to create test user for sqlite: %v", err) - } - // setup tests tests := []struct { failure bool diff --git a/database/build/list_pending_running_repo.go b/database/build/list_pending_running_repo.go index f4028a57a..c475ccc1a 100644 --- a/database/build/list_pending_running_repo.go +++ b/database/build/list_pending_running_repo.go @@ -22,8 +22,6 @@ func (e *engine) ListPendingAndRunningBuildsForRepo(ctx context.Context, repo *a err := e.client. WithContext(ctx). Table(constants.TableBuild). - Preload("Repo"). - Preload("Repo.Owner"). Select("*"). Where("repo_id = ?", repo.GetID()). Where("status = 'running' OR status = 'pending' OR status = 'pending approval'"). @@ -38,12 +36,10 @@ func (e *engine) ListPendingAndRunningBuildsForRepo(ctx context.Context, repo *a // https://golang.org/doc/faq#closures_and_goroutines tmp := build - err = tmp.Repo.Decrypt(e.config.EncryptionKey) - if err != nil { - e.logger.Errorf("unable to decrypt repo %s/%s: %v", repo.GetOrg(), repo.GetName(), err) - } + result := tmp.ToAPI() + result.SetRepo(repo) - builds = append(builds, tmp.ToAPI()) + builds = append(builds, result) } return builds, nil diff --git a/database/build/list_pending_running_repo_test.go b/database/build/list_pending_running_repo_test.go index 286b68141..e8a9b323b 100644 --- a/database/build/list_pending_running_repo_test.go +++ b/database/build/list_pending_running_repo_test.go @@ -11,8 +11,6 @@ import ( api "github.com/go-vela/server/api/types" "github.com/go-vela/server/database/testutils" - "github.com/go-vela/server/database/types" - "github.com/go-vela/types/constants" ) func TestBuild_Engine_ListPendingAndRunningBuildsForRepo(t *testing.T) { @@ -79,18 +77,8 @@ func TestBuild_Engine_ListPendingAndRunningBuildsForRepo(t *testing.T) { AddRow(2, 1, nil, 2, 0, "", "", "pending", "", 0, 1, 0, 0, "", nil, "", "", "", "", "", "", "", "", "", "", "", "", "", "", "", "", 0, "", 0). AddRow(1, 1, nil, 1, 0, "", "", "running", "", 0, 1, 0, 0, "", nil, "", "", "", "", "", "", "", "", "", "", "", "", "", "", "", "", 0, "", 0) - _repoRows := sqlmock.NewRows( - []string{"id", "user_id", "hash", "org", "name", "full_name", "link", "clone", "branch", "topics", "build_limit", "timeout", "counter", "visibility", "private", "trusted", "active", "allow_events", "pipeline_type", "previous_name", "approve_build"}). - AddRow(1, 1, "baz", "foo", "bar", "foo/bar", "", "", "", "{}", 0, 0, 0, "public", false, false, false, 1, "yaml", "", "") - - _userRows := sqlmock.NewRows( - []string{"id", "name", "token", "hash", "active", "admin"}). - AddRow(1, "foo", "bar", "baz", false, false) - // ensure the mock expects the name query _mock.ExpectQuery(`SELECT * FROM "builds" WHERE repo_id = $1 AND (status = 'running' OR status = 'pending' OR status = 'pending approval')`).WithArgs(1).WillReturnRows(_rows) - _mock.ExpectQuery(`SELECT * FROM "repos" WHERE "repos"."id" = $1`).WithArgs(1).WillReturnRows(_repoRows) - _mock.ExpectQuery(`SELECT * FROM "users" WHERE "users"."id" = $1`).WithArgs(1).WillReturnRows(_userRows) _sqlite := testSqlite(t) defer func() { _sql, _ := _sqlite.client.DB(); _sql.Close() }() @@ -110,31 +98,6 @@ func TestBuild_Engine_ListPendingAndRunningBuildsForRepo(t *testing.T) { t.Errorf("unable to create test build for sqlite: %v", err) } - err = _sqlite.client.AutoMigrate(&types.Repo{}) - if err != nil { - t.Errorf("unable to create repo table for sqlite: %v", err) - } - - err = _sqlite.client.Table(constants.TableRepo).Create(types.RepoFromAPI(_repoOne)).Error - if err != nil { - t.Errorf("unable to create test repo for sqlite: %v", err) - } - - err = _sqlite.client.Table(constants.TableRepo).Create(types.RepoFromAPI(_repoTwo)).Error - if err != nil { - t.Errorf("unable to create test repo for sqlite: %v", err) - } - - err = _sqlite.client.AutoMigrate(&types.User{}) - if err != nil { - t.Errorf("unable to create build table for sqlite: %v", err) - } - - err = _sqlite.client.Table(constants.TableUser).Create(types.UserFromAPI(_owner)).Error - if err != nil { - t.Errorf("unable to create test user for sqlite: %v", err) - } - // setup tests tests := []struct { failure bool diff --git a/database/build/list_repo.go b/database/build/list_repo.go index 880ea8e8c..772a7ee6b 100644 --- a/database/build/list_repo.go +++ b/database/build/list_repo.go @@ -43,8 +43,6 @@ func (e *engine) ListBuildsForRepo(ctx context.Context, r *api.Repo, filters map err = e.client. WithContext(ctx). Table(constants.TableBuild). - Preload("Repo"). - Preload("Repo.Owner"). Where("repo_id = ?", r.GetID()). Where("created < ?", before). Where("created > ?", after). @@ -63,12 +61,10 @@ func (e *engine) ListBuildsForRepo(ctx context.Context, r *api.Repo, filters map // https://golang.org/doc/faq#closures_and_goroutines tmp := build - err = tmp.Repo.Decrypt(e.config.EncryptionKey) - if err != nil { - e.logger.Errorf("unable to decrypt repo %s/%s: %v", r.GetOrg(), r.GetName(), err) - } + result := tmp.ToAPI() + result.SetRepo(r) - builds = append(builds, tmp.ToAPI()) + builds = append(builds, result) } return builds, count, nil diff --git a/database/build/list_repo_test.go b/database/build/list_repo_test.go index 96368faec..0933d0ea3 100644 --- a/database/build/list_repo_test.go +++ b/database/build/list_repo_test.go @@ -12,7 +12,6 @@ import ( api "github.com/go-vela/server/api/types" "github.com/go-vela/server/database/testutils" - "github.com/go-vela/server/database/types" "github.com/go-vela/types/constants" ) @@ -64,18 +63,8 @@ func TestBuild_Engine_ListBuildsForRepo(t *testing.T) { AddRow(2, 1, nil, 2, 0, "", "", "", "", 0, 2, 0, 0, "", nil, "", "", "", "", "", "", "", "", "", "", "", "", "", "", "", "", 0, "", 0). AddRow(1, 1, nil, 1, 0, "", "", "", "", 0, 1, 0, 0, "", nil, "", "", "", "", "", "", "", "", "", "", "", "", "", "", "", "", 0, "", 0) - _repoRows := sqlmock.NewRows( - []string{"id", "user_id", "hash", "org", "name", "full_name", "link", "clone", "branch", "topics", "build_limit", "timeout", "counter", "visibility", "private", "trusted", "active", "allow_events", "pipeline_type", "previous_name", "approve_build"}). - AddRow(1, 1, "baz", "foo", "bar", "foo/bar", "", "", "", "{}", 0, 0, 0, "public", false, false, false, 1, "yaml", "", "") - - _userRows := sqlmock.NewRows( - []string{"id", "name", "token", "hash", "active", "admin"}). - AddRow(1, "foo", "bar", "baz", false, false) - // ensure the mock expects the query _mock.ExpectQuery(`SELECT * FROM "builds" WHERE repo_id = $1 AND created < $2 AND created > $3 ORDER BY number DESC LIMIT $4`).WithArgs(1, AnyArgument{}, 0, 10).WillReturnRows(_rows) - _mock.ExpectQuery(`SELECT * FROM "repos" WHERE "repos"."id" = $1`).WithArgs(1).WillReturnRows(_repoRows) - _mock.ExpectQuery(`SELECT * FROM "users" WHERE "users"."id" = $1`).WithArgs(1).WillReturnRows(_userRows) _sqlite := testSqlite(t) defer func() { _sql, _ := _sqlite.client.DB(); _sql.Close() }() @@ -90,26 +79,6 @@ func TestBuild_Engine_ListBuildsForRepo(t *testing.T) { t.Errorf("unable to create test build for sqlite: %v", err) } - err = _sqlite.client.AutoMigrate(&types.Repo{}) - if err != nil { - t.Errorf("unable to create build table for sqlite: %v", err) - } - - err = _sqlite.client.Table(constants.TableRepo).Create(types.RepoFromAPI(_repo)).Error - if err != nil { - t.Errorf("unable to create test user for sqlite: %v", err) - } - - err = _sqlite.client.AutoMigrate(&types.User{}) - if err != nil { - t.Errorf("unable to create build table for sqlite: %v", err) - } - - err = _sqlite.client.Table(constants.TableUser).Create(types.UserFromAPI(_owner)).Error - if err != nil { - t.Errorf("unable to create test user for sqlite: %v", err) - } - // setup tests tests := []struct { failure bool diff --git a/database/deployment/get_repo.go b/database/deployment/get_repo.go index c96c8c6be..f9894789a 100644 --- a/database/deployment/get_repo.go +++ b/database/deployment/get_repo.go @@ -28,8 +28,6 @@ func (e *engine) GetDeploymentForRepo(ctx context.Context, r *api.Repo, number i err := e.client. WithContext(ctx). Table(constants.TableDeployment). - Preload("Repo"). - Preload("Repo.Owner"). Where("repo_id = ?", r.GetID()). Where("number = ?", number). Take(d). @@ -62,10 +60,8 @@ func (e *engine) GetDeploymentForRepo(ctx context.Context, r *api.Repo, number i builds = append(builds, b.ToAPI()) } - err = d.Repo.Decrypt(e.config.EncryptionKey) - if err != nil { - e.logger.Errorf("unable to decrypt repo %s/%s: %v", r.GetOrg(), r.GetName(), err) - } + result := d.ToAPI(builds) + result.SetRepo(r) - return d.ToAPI(builds), nil + return result, nil } diff --git a/database/deployment/get_repo_test.go b/database/deployment/get_repo_test.go index e79de6407..f4931b3b3 100644 --- a/database/deployment/get_repo_test.go +++ b/database/deployment/get_repo_test.go @@ -66,22 +66,12 @@ func TestDeployment_Engine_GetDeploymentForRepo(t *testing.T) { []string{"id", "repo_id", "number", "url", "commit", "ref", "task", "target", "description", "payload", "created_at", "created_by", "builds"}). AddRow(1, 1, 1, "https://github.com/github/octocat/deployments/1", "48afb5bdc41ad69bf22588491333f7cf71135163", "refs/heads/master", "vela-deploy", "production", "Deployment request from Vela", "{\"foo\":\"test1\"}", 1, "octocat", "{1}") - _repoRows := sqlmock.NewRows( - []string{"id", "user_id", "hash", "org", "name", "full_name", "link", "clone", "branch", "topics", "build_limit", "timeout", "counter", "visibility", "private", "trusted", "active", "allow_events", "pipeline_type", "previous_name", "approve_build"}). - AddRow(1, 1, "baz", "foo", "bar", "foo/bar", "", "", "", "{}", 0, 0, 0, "public", false, false, false, 1, "yaml", "", "") - - _userRows := sqlmock.NewRows( - []string{"id", "name", "token", "hash", "active", "admin"}). - AddRow(1, "foo", "bar", "baz", false, false) - _buildRows := sqlmock.NewRows( []string{"id", "repo_id", "pipeline_id", "number", "parent", "event", "event_action", "status", "error", "enqueued", "created", "started", "finished", "deploy", "deploy_number", "deploy_payload", "clone", "source", "title", "message", "commit", "sender", "author", "email", "link", "branch", "ref", "base_ref", "head_ref", "host", "runtime", "distribution", "timestamp"}). AddRow(1, 1, nil, 1, 0, "", "", "", "", 0, 0, 0, 0, "", 0, nil, "", "", "", "", "", "", "", "", "", "", "", "", "", "", "", "", 0) // ensure the mock expects the query _mock.ExpectQuery(`SELECT * FROM "deployments" WHERE repo_id = $1 AND number = $2 LIMIT $3`).WithArgs(1, 1, 1).WillReturnRows(_rows) - _mock.ExpectQuery(`SELECT * FROM "repos" WHERE "repos"."id" = $1`).WithArgs(1).WillReturnRows(_repoRows) - _mock.ExpectQuery(`SELECT * FROM "users" WHERE "users"."id" = $1`).WithArgs(1).WillReturnRows(_userRows) _mock.ExpectQuery(`SELECT * FROM "builds" WHERE id = $1 LIMIT $2`).WithArgs(1, 1).WillReturnRows(_buildRows) _sqlite := testSqlite(t) @@ -91,8 +81,8 @@ func TestDeployment_Engine_GetDeploymentForRepo(t *testing.T) { t, _sqlite, []*api.Deployment{_deploymentOne}, - []*api.User{_owner}, - []*api.Repo{_repo}, + []*api.User{}, + []*api.Repo{}, []*api.Build{_build}, ) diff --git a/database/deployment/list_repo.go b/database/deployment/list_repo.go index b8b77fdad..08419397a 100644 --- a/database/deployment/list_repo.go +++ b/database/deployment/list_repo.go @@ -31,8 +31,6 @@ func (e *engine) ListDeploymentsForRepo(ctx context.Context, r *api.Repo, page, err := e.client. WithContext(ctx). Table(constants.TableDeployment). - Preload("Repo"). - Preload("Repo.Owner"). Where("repo_id = ?", r.GetID()). Order("number DESC"). Limit(perPage). @@ -72,13 +70,11 @@ func (e *engine) ListDeploymentsForRepo(ctx context.Context, r *api.Repo, page, builds = append(builds, b.ToAPI()) } - err = tmp.Repo.Decrypt(e.config.EncryptionKey) - if err != nil { - e.logger.Errorf("unable to decrypt repo %s/%s: %v", r.GetOrg(), r.GetName(), err) - } + result := tmp.ToAPI(builds) + result.SetRepo(r) // convert query result to API type - deployments = append(deployments, tmp.ToAPI(builds)) + deployments = append(deployments, result) } return deployments, nil diff --git a/database/deployment/list_repo_test.go b/database/deployment/list_repo_test.go index 76b3fa1ea..50b20760d 100644 --- a/database/deployment/list_repo_test.go +++ b/database/deployment/list_repo_test.go @@ -92,22 +92,12 @@ func TestDeployment_Engine_ListDeploymentsForRepo(t *testing.T) { []string{"id", "repo_id", "number", "url", "commit", "ref", "task", "target", "description", "payload", "created_at", "created_by", "builds"}). AddRow(1, 1, 1, "https://github.com/github/octocat/deployments/1", "48afb5bdc41ad69bf22588491333f7cf71135163", "refs/heads/master", "vela-deploy", "production", "Deployment request from Vela", "{\"foo\":\"test1\"}", 1, "octocat", "{1}") - _repoRows := sqlmock.NewRows( - []string{"id", "user_id", "hash", "org", "name", "full_name", "link", "clone", "branch", "topics", "build_limit", "timeout", "counter", "visibility", "private", "trusted", "active", "allow_events", "pipeline_type", "previous_name", "approve_build"}). - AddRow(1, 1, "baz", "foo", "bar", "foo/bar", "", "", "", "{}", 0, 0, 0, "public", false, false, false, 1, "yaml", "", "") - - _userRows := sqlmock.NewRows( - []string{"id", "name", "token", "hash", "active", "admin"}). - AddRow(1, "foo", "bar", "baz", false, false) - _buildRows := sqlmock.NewRows( []string{"id", "repo_id", "pipeline_id", "number", "parent", "event", "event_action", "status", "error", "enqueued", "created", "started", "finished", "deploy", "deploy_number", "deploy_payload", "clone", "source", "title", "message", "commit", "sender", "author", "email", "link", "branch", "ref", "base_ref", "head_ref", "host", "runtime", "distribution", "timestamp"}). AddRow(1, 1, nil, 1, 0, "", "", "", "", 0, 0, 0, 0, "", 0, nil, "", "", "", "", "", "", "", "", "", "", "", "", "", "", "", "", 0) // ensure the mock expects the query _mock.ExpectQuery(`SELECT * FROM "deployments" WHERE repo_id = $1 ORDER BY number DESC LIMIT $2`).WithArgs(1, 10).WillReturnRows(_rows) - _mock.ExpectQuery(`SELECT * FROM "repos" WHERE "repos"."id" = $1`).WithArgs(1).WillReturnRows(_repoRows) - _mock.ExpectQuery(`SELECT * FROM "users" WHERE "users"."id" = $1`).WithArgs(1).WillReturnRows(_userRows) _mock.ExpectQuery(`SELECT * FROM "builds" WHERE id = $1 LIMIT $2`).WithArgs(1, 1).WillReturnRows(_buildRows) _sqlite := testSqlite(t) @@ -117,8 +107,8 @@ func TestDeployment_Engine_ListDeploymentsForRepo(t *testing.T) { t, _sqlite, []*api.Deployment{_deploymentOne, _deploymentTwo}, - []*api.User{_owner}, - []*api.Repo{_repoOne, _repoTwo}, + []*api.User{}, + []*api.Repo{}, []*api.Build{_build}, ) diff --git a/database/hook/get_repo.go b/database/hook/get_repo.go index 896275a16..e88f67ab3 100644 --- a/database/hook/get_repo.go +++ b/database/hook/get_repo.go @@ -27,8 +27,6 @@ func (e *engine) GetHookForRepo(ctx context.Context, r *api.Repo, number int) (* err := e.client. WithContext(ctx). Table(constants.TableHook). - Preload("Repo"). - Preload("Repo.Owner"). Preload("Build"). Where("repo_id = ?", r.GetID()). Where("number = ?", number). @@ -38,10 +36,8 @@ func (e *engine) GetHookForRepo(ctx context.Context, r *api.Repo, number int) (* return nil, err } - err = h.Repo.Decrypt(e.config.EncryptionKey) - if err != nil { - e.logger.Errorf("unable to decrypt repo %s/%s: %v", r.GetOrg(), r.GetName(), err) - } + result := h.ToAPI() + result.SetRepo(r) - return h.ToAPI(), nil + return result, nil } diff --git a/database/hook/get_repo_test.go b/database/hook/get_repo_test.go index eb65a23db..bc1eb0fb9 100644 --- a/database/hook/get_repo_test.go +++ b/database/hook/get_repo_test.go @@ -63,19 +63,9 @@ func TestHook_Engine_GetHookForRepo(t *testing.T) { []string{"id", "repo_id", "pipeline_id", "number", "parent", "event", "event_action", "status", "error", "enqueued", "created", "started", "finished", "deploy", "deploy_number", "deploy_payload", "clone", "source", "title", "message", "commit", "sender", "author", "email", "link", "branch", "ref", "base_ref", "head_ref", "host", "runtime", "distribution", "timestamp"}). AddRow(1, 1, nil, 1, 0, "", "", "", "", 0, 0, 0, 0, "", 0, nil, "", "", "", "", "", "", "", "", "", "", "", "", "", "", "", "", 0) - _repoRows := sqlmock.NewRows( - []string{"id", "user_id", "hash", "org", "name", "full_name", "link", "clone", "branch", "topics", "build_limit", "timeout", "counter", "visibility", "private", "trusted", "active", "allow_events", "pipeline_type", "previous_name", "approve_build"}). - AddRow(1, 1, "baz", "foo", "bar", "foo/bar", "", "", "", "{}", 0, 0, 0, "public", false, false, false, 1, "yaml", "", "") - - _userRows := sqlmock.NewRows( - []string{"id", "name", "token", "hash", "active", "admin"}). - AddRow(1, "foo", "bar", "baz", false, false) - // ensure the mock expects the query _mock.ExpectQuery(`SELECT * FROM "hooks" WHERE repo_id = $1 AND number = $2 LIMIT $3`).WithArgs(1, 1, 1).WillReturnRows(_rows) _mock.ExpectQuery(`SELECT * FROM "builds" WHERE "builds"."id" = $1`).WithArgs(1).WillReturnRows(_buildRows) - _mock.ExpectQuery(`SELECT * FROM "repos" WHERE "repos"."id" = $1`).WithArgs(1).WillReturnRows(_repoRows) - _mock.ExpectQuery(`SELECT * FROM "users" WHERE "users"."id" = $1`).WithArgs(1).WillReturnRows(_userRows) _sqlite := testSqlite(t) defer func() { _sql, _ := _sqlite.client.DB(); _sql.Close() }() @@ -84,8 +74,8 @@ func TestHook_Engine_GetHookForRepo(t *testing.T) { t, _sqlite, []*api.Hook{_hook}, - []*api.User{_owner}, - []*api.Repo{_repo}, + []*api.User{}, + []*api.Repo{}, []*api.Build{_build}, ) diff --git a/database/hook/last_repo.go b/database/hook/last_repo.go index 8299864fb..0073d8a8b 100644 --- a/database/hook/last_repo.go +++ b/database/hook/last_repo.go @@ -28,8 +28,6 @@ func (e *engine) LastHookForRepo(ctx context.Context, r *api.Repo) (*api.Hook, e err := e.client. WithContext(ctx). Table(constants.TableHook). - Preload("Repo"). - Preload("Repo.Owner"). Preload("Build"). Where("repo_id = ?", r.GetID()). Order("number DESC"). @@ -45,13 +43,8 @@ func (e *engine) LastHookForRepo(ctx context.Context, r *api.Repo) (*api.Hook, e return nil, err } - err = h.Repo.Decrypt(e.config.EncryptionKey) - if err != nil { - e.logger.Errorf("unable to decrypt repo for hook %d: %v", h.ID.Int64, err) - } + result := h.ToAPI() + result.SetRepo(r) - // return the hook - // - // https://pkg.go.dev/github.com/go-vela/types/database#Hook.ToLibrary - return h.ToAPI(), nil + return result, nil } diff --git a/database/hook/last_repo_test.go b/database/hook/last_repo_test.go index 2e1bf1b80..70af398ec 100644 --- a/database/hook/last_repo_test.go +++ b/database/hook/last_repo_test.go @@ -62,24 +62,14 @@ func TestHook_Engine_LastHookForRepo(t *testing.T) { []string{"id", "repo_id", "pipeline_id", "number", "parent", "event", "event_action", "status", "error", "enqueued", "created", "started", "finished", "deploy", "deploy_number", "deploy_payload", "clone", "source", "title", "message", "commit", "sender", "author", "email", "link", "branch", "ref", "base_ref", "head_ref", "host", "runtime", "distribution", "timestamp"}). AddRow(1, 1, nil, 1, 0, "", "", "", "", 0, 0, 0, 0, "", 0, nil, "", "", "", "", "", "", "", "", "", "", "", "", "", "", "", "", 0) - _repoRows := sqlmock.NewRows( - []string{"id", "user_id", "hash", "org", "name", "full_name", "link", "clone", "branch", "topics", "build_limit", "timeout", "counter", "visibility", "private", "trusted", "active", "allow_events", "pipeline_type", "previous_name", "approve_build"}). - AddRow(1, 1, "baz", "foo", "bar", "foo/bar", "", "", "", "{}", 0, 0, 0, "public", false, false, false, 1, "yaml", "", "") - - _userRows := sqlmock.NewRows( - []string{"id", "name", "token", "hash", "active", "admin"}). - AddRow(1, "foo", "bar", "baz", false, false) - // ensure the mock expects the query _mock.ExpectQuery(`SELECT * FROM "hooks" WHERE repo_id = $1 ORDER BY number DESC LIMIT $2`).WithArgs(1, 1).WillReturnRows(_rows) _mock.ExpectQuery(`SELECT * FROM "builds" WHERE "builds"."id" = $1`).WithArgs(1).WillReturnRows(_buildRows) - _mock.ExpectQuery(`SELECT * FROM "repos" WHERE "repos"."id" = $1`).WithArgs(1).WillReturnRows(_repoRows) - _mock.ExpectQuery(`SELECT * FROM "users" WHERE "users"."id" = $1`).WithArgs(1).WillReturnRows(_userRows) _sqlite := testSqlite(t) defer func() { _sql, _ := _sqlite.client.DB(); _sql.Close() }() - sqlitePopulateTables(t, _sqlite, []*api.Hook{_hook}, []*api.User{_owner}, []*api.Repo{_repo}, []*api.Build{_build}) + sqlitePopulateTables(t, _sqlite, []*api.Hook{_hook}, []*api.User{}, []*api.Repo{}, []*api.Build{_build}) // setup tests tests := []struct { diff --git a/database/hook/list_repo.go b/database/hook/list_repo.go index 7371ddcf5..dea9b350e 100644 --- a/database/hook/list_repo.go +++ b/database/hook/list_repo.go @@ -42,8 +42,6 @@ func (e *engine) ListHooksForRepo(ctx context.Context, r *api.Repo, page, perPag err = e.client. WithContext(ctx). Table(constants.TableHook). - Preload("Repo"). - Preload("Repo.Owner"). Preload("Build"). Where("repo_id = ?", r.GetID()). Order("id DESC"). @@ -60,12 +58,10 @@ func (e *engine) ListHooksForRepo(ctx context.Context, r *api.Repo, page, perPag // https://golang.org/doc/faq#closures_and_goroutines tmp := hook - err = tmp.Repo.Decrypt(e.config.EncryptionKey) - if err != nil { - e.logger.Errorf("unable to decrypt repo for hook %d: %v", tmp.ID.Int64, err) - } + result := tmp.ToAPI() + result.SetRepo(r) - hooks = append(hooks, tmp.ToAPI()) + hooks = append(hooks, result) } return hooks, count, nil diff --git a/database/hook/list_repo_test.go b/database/hook/list_repo_test.go index d14c6f375..ea3301f2c 100644 --- a/database/hook/list_repo_test.go +++ b/database/hook/list_repo_test.go @@ -76,19 +76,9 @@ func TestHook_Engine_ListHooksForRepo(t *testing.T) { []string{"id", "repo_id", "pipeline_id", "number", "parent", "event", "event_action", "status", "error", "enqueued", "created", "started", "finished", "deploy", "deploy_number", "deploy_payload", "clone", "source", "title", "message", "commit", "sender", "author", "email", "link", "branch", "ref", "base_ref", "head_ref", "host", "runtime", "distribution", "timestamp"}). AddRow(1, 1, nil, 1, 0, "", "", "", "", 0, 0, 0, 0, "", 0, nil, "", "", "", "", "", "", "", "", "", "", "", "", "", "", "", "", 0) - _repoRows := sqlmock.NewRows( - []string{"id", "user_id", "hash", "org", "name", "full_name", "link", "clone", "branch", "topics", "build_limit", "timeout", "counter", "visibility", "private", "trusted", "active", "allow_events", "pipeline_type", "previous_name", "approve_build"}). - AddRow(1, 1, "baz", "foo", "bar", "foo/bar", "", "", "", "{}", 0, 0, 0, "public", false, false, false, 1, "yaml", "", "") - - _userRows := sqlmock.NewRows( - []string{"id", "name", "token", "hash", "active", "admin"}). - AddRow(1, "foo", "bar", "baz", false, false) - // ensure the mock expects the query _mock.ExpectQuery(`SELECT * FROM "hooks" WHERE repo_id = $1 ORDER BY id DESC LIMIT $2`).WithArgs(1, 10).WillReturnRows(_rows) _mock.ExpectQuery(`SELECT * FROM "builds" WHERE "builds"."id" IN ($1,$2)`).WithArgs(0, 1).WillReturnRows(_buildRows) - _mock.ExpectQuery(`SELECT * FROM "repos" WHERE "repos"."id" = $1`).WithArgs(1).WillReturnRows(_repoRows) - _mock.ExpectQuery(`SELECT * FROM "users" WHERE "users"."id" = $1`).WithArgs(1).WillReturnRows(_userRows) _sqlite := testSqlite(t) defer func() { _sql, _ := _sqlite.client.DB(); _sql.Close() }() @@ -97,8 +87,8 @@ func TestHook_Engine_ListHooksForRepo(t *testing.T) { t, _sqlite, []*api.Hook{_hookOne, _hookTwo}, - []*api.User{_owner}, - []*api.Repo{_repo}, + []*api.User{}, + []*api.Repo{}, []*api.Build{_build}, ) diff --git a/database/integration_test.go b/database/integration_test.go index b30295422..5fb10fe5f 100644 --- a/database/integration_test.go +++ b/database/integration_test.go @@ -16,6 +16,7 @@ import ( api "github.com/go-vela/server/api/types" "github.com/go-vela/server/api/types/settings" + "github.com/go-vela/server/compiler/types/raw" "github.com/go-vela/server/database/build" "github.com/go-vela/server/database/dashboard" "github.com/go-vela/server/database/deployment" @@ -36,7 +37,6 @@ import ( "github.com/go-vela/server/tracing" "github.com/go-vela/types/constants" "github.com/go-vela/types/library" - "github.com/go-vela/types/raw" ) // Resources represents the object containing test resources. @@ -48,7 +48,7 @@ type Resources struct { Hooks []*api.Hook JWKs jwk.Set Logs []*library.Log - Pipelines []*library.Pipeline + Pipelines []*api.Pipeline Repos []*api.Repo Schedules []*api.Schedule Secrets []*library.Secret @@ -1202,6 +1202,22 @@ func testPipelines(t *testing.T, db Interface, resources *Resources) { methods[element.Method(i).Name] = false } + // create owners + for _, user := range resources.Users { + _, err := db.CreateUser(context.TODO(), user) + if err != nil { + t.Errorf("unable to create user %d: %v", user.GetID(), err) + } + } + + // create the repos + for _, repo := range resources.Repos { + _, err := db.CreateRepo(context.TODO(), repo) + if err != nil { + t.Errorf("unable to create repo %d: %v", repo.GetID(), err) + } + } + // create the pipelines for _, pipeline := range resources.Pipelines { _, err := db.CreatePipeline(context.TODO(), pipeline) @@ -1256,7 +1272,7 @@ func testPipelines(t *testing.T, db Interface, resources *Resources) { // lookup the pipelines by name for _, pipeline := range resources.Pipelines { - repo := resources.Repos[pipeline.GetRepoID()-1] + repo := resources.Repos[pipeline.GetRepo().GetID()-1] got, err := db.GetPipelineForRepo(context.TODO(), pipeline.GetCommit(), repo) if err != nil { t.Errorf("unable to get pipeline %d for repo %d: %v", pipeline.GetID(), repo.GetID(), err) @@ -1296,6 +1312,22 @@ func testPipelines(t *testing.T, db Interface, resources *Resources) { } methods["DeletePipeline"] = true + // delete the repos + for _, repo := range resources.Repos { + err = db.DeleteRepo(context.TODO(), repo) + if err != nil { + t.Errorf("unable to delete repo %d: %v", repo.GetID(), err) + } + } + + // delete the owners + for _, user := range resources.Users { + err := db.DeleteUser(context.TODO(), user) + if err != nil { + t.Errorf("unable to delete user %d: %v", user.GetID(), err) + } + } + // ensure we called all the methods we expected to for method, called := range methods { if !called { @@ -2724,9 +2756,9 @@ func newResources() *Resources { logStepTwo.SetStepID(2) logStepTwo.SetData([]byte("foo")) - pipelineOne := new(library.Pipeline) + pipelineOne := new(api.Pipeline) pipelineOne.SetID(1) - pipelineOne.SetRepoID(1) + pipelineOne.SetRepo(repoOne) pipelineOne.SetCommit("48afb5bdc41ad69bf22588491333f7cf71135163") pipelineOne.SetFlavor("large") pipelineOne.SetPlatform("docker") @@ -2741,9 +2773,9 @@ func newResources() *Resources { pipelineOne.SetTemplates(false) pipelineOne.SetData([]byte("version: 1")) - pipelineTwo := new(library.Pipeline) + pipelineTwo := new(api.Pipeline) pipelineTwo.SetID(2) - pipelineTwo.SetRepoID(1) + pipelineTwo.SetRepo(repoOne) pipelineTwo.SetCommit("48afb5bdc41ad69bf22588491333f7cf71135164") pipelineTwo.SetFlavor("large") pipelineTwo.SetPlatform("docker") @@ -2959,7 +2991,7 @@ func newResources() *Resources { Hooks: []*api.Hook{hookOne, hookTwo, hookThree}, JWKs: jwkSet, Logs: []*library.Log{logServiceOne, logServiceTwo, logStepOne, logStepTwo}, - Pipelines: []*library.Pipeline{pipelineOne, pipelineTwo}, + Pipelines: []*api.Pipeline{pipelineOne, pipelineTwo}, Repos: []*api.Repo{repoOne, repoTwo}, Schedules: []*api.Schedule{scheduleOne, scheduleTwo}, Secrets: []*library.Secret{secretOrg, secretRepo, secretShared}, diff --git a/database/pipeline/count_repo_test.go b/database/pipeline/count_repo_test.go index ae306e9e5..b8be352d9 100644 --- a/database/pipeline/count_repo_test.go +++ b/database/pipeline/count_repo_test.go @@ -9,15 +9,17 @@ import ( "github.com/DATA-DOG/go-sqlmock" - api "github.com/go-vela/server/api/types" "github.com/go-vela/server/database/testutils" ) func TestPipeline_Engine_CountPipelinesForRepo(t *testing.T) { // setup types + _repo := testutils.APIRepo() + _repo.SetID(1) + _pipelineOne := testutils.APIPipeline() _pipelineOne.SetID(1) - _pipelineOne.SetRepoID(1) + _pipelineOne.SetRepo(_repo) _pipelineOne.SetCommit("48afb5bdc41ad69bf22588491333f7cf71135163") _pipelineOne.SetRef("refs/heads/main") _pipelineOne.SetType("yaml") @@ -25,7 +27,7 @@ func TestPipeline_Engine_CountPipelinesForRepo(t *testing.T) { _pipelineTwo := testutils.APIPipeline() _pipelineTwo.SetID(2) - _pipelineTwo.SetRepoID(1) + _pipelineTwo.SetRepo(_repo) _pipelineTwo.SetCommit("a49aaf4afae6431a79239c95247a2b169fd9f067") _pipelineTwo.SetRef("refs/heads/main") _pipelineTwo.SetType("yaml") @@ -77,7 +79,7 @@ func TestPipeline_Engine_CountPipelinesForRepo(t *testing.T) { // run tests for _, test := range tests { t.Run(test.name, func(t *testing.T) { - got, err := test.database.CountPipelinesForRepo(context.TODO(), &api.Repo{ID: _pipelineOne.RepoID}) + got, err := test.database.CountPipelinesForRepo(context.TODO(), _repo) if test.failure { if err == nil { diff --git a/database/pipeline/count_test.go b/database/pipeline/count_test.go index 351e84050..56767e26c 100644 --- a/database/pipeline/count_test.go +++ b/database/pipeline/count_test.go @@ -14,9 +14,12 @@ import ( func TestPipeline_Engine_CountPipelines(t *testing.T) { // setup types + _repo := testutils.APIRepo() + _repo.SetID(1) + _pipelineOne := testutils.APIPipeline() _pipelineOne.SetID(1) - _pipelineOne.SetRepoID(1) + _pipelineOne.SetRepo(_repo) _pipelineOne.SetCommit("48afb5bdc41ad69bf22588491333f7cf71135163") _pipelineOne.SetRef("refs/heads/main") _pipelineOne.SetType("yaml") @@ -24,7 +27,7 @@ func TestPipeline_Engine_CountPipelines(t *testing.T) { _pipelineTwo := testutils.APIPipeline() _pipelineTwo.SetID(2) - _pipelineTwo.SetRepoID(2) + _pipelineTwo.SetRepo(_repo) _pipelineTwo.SetCommit("a49aaf4afae6431a79239c95247a2b169fd9f067") _pipelineTwo.SetRef("refs/heads/main") _pipelineTwo.SetType("yaml") diff --git a/database/pipeline/create.go b/database/pipeline/create.go index e2910e6a1..d6ce01cd0 100644 --- a/database/pipeline/create.go +++ b/database/pipeline/create.go @@ -7,33 +7,24 @@ import ( "github.com/sirupsen/logrus" + api "github.com/go-vela/server/api/types" + "github.com/go-vela/server/database/types" "github.com/go-vela/types/constants" - "github.com/go-vela/types/database" - "github.com/go-vela/types/library" ) // CreatePipeline creates a new pipeline in the database. -func (e *engine) CreatePipeline(ctx context.Context, p *library.Pipeline) (*library.Pipeline, error) { +func (e *engine) CreatePipeline(ctx context.Context, p *api.Pipeline) (*api.Pipeline, error) { e.logger.WithFields(logrus.Fields{ "pipeline": p.GetCommit(), }).Tracef("creating pipeline %s in the database", p.GetCommit()) - // cast the library type to database type - // - // https://pkg.go.dev/github.com/go-vela/types/database#PipelineFromLibrary - pipeline := database.PipelineFromLibrary(p) + pipeline := types.PipelineFromAPI(p) - // validate the necessary fields are populated - // - // https://pkg.go.dev/github.com/go-vela/types/database#Pipeline.Validate err := pipeline.Validate() if err != nil { return nil, err } - // compress data for the pipeline - // - // https://pkg.go.dev/github.com/go-vela/types/database#Pipeline.Compress err = pipeline.Compress(e.config.CompressionLevel) if err != nil { return nil, err @@ -53,5 +44,8 @@ func (e *engine) CreatePipeline(ctx context.Context, p *library.Pipeline) (*libr return nil, err } - return pipeline.ToLibrary(), nil + result := pipeline.ToAPI() + result.SetRepo(p.GetRepo()) + + return result, nil } diff --git a/database/pipeline/create_test.go b/database/pipeline/create_test.go index 58f52ffcc..5768e6f88 100644 --- a/database/pipeline/create_test.go +++ b/database/pipeline/create_test.go @@ -4,19 +4,22 @@ package pipeline import ( "context" - "reflect" "testing" "github.com/DATA-DOG/go-sqlmock" + "github.com/google/go-cmp/cmp" "github.com/go-vela/server/database/testutils" ) func TestPipeline_Engine_CreatePipeline(t *testing.T) { // setup types + _repo := testutils.APIRepo() + _repo.SetID(1) + _pipeline := testutils.APIPipeline() _pipeline.SetID(1) - _pipeline.SetRepoID(1) + _pipeline.SetRepo(_repo) _pipeline.SetCommit("48afb5bdc41ad69bf22588491333f7cf71135163") _pipeline.SetRef("refs/heads/main") _pipeline.SetType("yaml") @@ -74,8 +77,8 @@ VALUES ($1,$2,$3,$4,$5,$6,$7,$8,$9,$10,$11,$12,$13,$14,$15) RETURNING "id"`). t.Errorf("CreatePipeline for %s returned err: %v", test.name, err) } - if !reflect.DeepEqual(got, _pipeline) { - t.Errorf("CreatePipeline for %s returned %s, want %s", test.name, got, _pipeline) + if diff := cmp.Diff(_pipeline, got); diff != "" { + t.Errorf("CreatePipeline for %s mismatch (-want +got):\n%s", test.name, diff) } }) } diff --git a/database/pipeline/delete.go b/database/pipeline/delete.go index ff49a8406..ecd87611e 100644 --- a/database/pipeline/delete.go +++ b/database/pipeline/delete.go @@ -7,21 +7,18 @@ import ( "github.com/sirupsen/logrus" + api "github.com/go-vela/server/api/types" + "github.com/go-vela/server/database/types" "github.com/go-vela/types/constants" - "github.com/go-vela/types/database" - "github.com/go-vela/types/library" ) // DeletePipeline deletes an existing pipeline from the database. -func (e *engine) DeletePipeline(ctx context.Context, p *library.Pipeline) error { +func (e *engine) DeletePipeline(ctx context.Context, p *api.Pipeline) error { e.logger.WithFields(logrus.Fields{ "pipeline": p.GetCommit(), }).Tracef("deleting pipeline %s", p.GetCommit()) - // cast the library type to database type - // - // https://pkg.go.dev/github.com/go-vela/types/database#PipelineFromLibrary - pipeline := database.PipelineFromLibrary(p) + pipeline := types.PipelineFromAPI(p) // send query to the database return e.client. diff --git a/database/pipeline/delete_test.go b/database/pipeline/delete_test.go index 35a03d463..85321381c 100644 --- a/database/pipeline/delete_test.go +++ b/database/pipeline/delete_test.go @@ -13,9 +13,12 @@ import ( func TestPipeline_Engine_DeletePipeline(t *testing.T) { // setup types + _repo := testutils.APIRepo() + _repo.SetID(1) + _pipeline := testutils.APIPipeline() _pipeline.SetID(1) - _pipeline.SetRepoID(1) + _pipeline.SetRepo(_repo) _pipeline.SetCommit("48afb5bdc41ad69bf22588491333f7cf71135163") _pipeline.SetRef("refs/heads/main") _pipeline.SetType("yaml") diff --git a/database/pipeline/get.go b/database/pipeline/get.go index e685a9178..0721ba36d 100644 --- a/database/pipeline/get.go +++ b/database/pipeline/get.go @@ -5,22 +5,24 @@ package pipeline import ( "context" + api "github.com/go-vela/server/api/types" + "github.com/go-vela/server/database/types" "github.com/go-vela/types/constants" - "github.com/go-vela/types/database" - "github.com/go-vela/types/library" ) // GetPipeline gets a pipeline by ID from the database. -func (e *engine) GetPipeline(ctx context.Context, id int64) (*library.Pipeline, error) { +func (e *engine) GetPipeline(ctx context.Context, id int64) (*api.Pipeline, error) { e.logger.Tracef("getting pipeline %d", id) // variable to store query results - p := new(database.Pipeline) + p := new(types.Pipeline) // send query to the database and store result in variable err := e.client. WithContext(ctx). Table(constants.TablePipeline). + Preload("Repo"). + Preload("Repo.Owner"). Where("id = ?", id). Take(p). Error @@ -28,16 +30,15 @@ func (e *engine) GetPipeline(ctx context.Context, id int64) (*library.Pipeline, return nil, err } - // decompress data for the pipeline - // - // https://pkg.go.dev/github.com/go-vela/types/database#Pipeline.Decompress err = p.Decompress() if err != nil { return nil, err } - // return the decompressed pipeline - // - // https://pkg.go.dev/github.com/go-vela/types/database#Pipeline.ToLibrary - return p.ToLibrary(), nil + err = p.Repo.Decrypt(e.config.EncryptionKey) + if err != nil { + e.logger.Errorf("unable to decrypt repo: %v", err) + } + + return p.ToAPI(), nil } diff --git a/database/pipeline/get_repo.go b/database/pipeline/get_repo.go index 5724dd0a7..b84c320f3 100644 --- a/database/pipeline/get_repo.go +++ b/database/pipeline/get_repo.go @@ -8,13 +8,12 @@ import ( "github.com/sirupsen/logrus" api "github.com/go-vela/server/api/types" + "github.com/go-vela/server/database/types" "github.com/go-vela/types/constants" - "github.com/go-vela/types/database" - "github.com/go-vela/types/library" ) // GetPipelineForRepo gets a pipeline by number and repo ID from the database. -func (e *engine) GetPipelineForRepo(ctx context.Context, commit string, r *api.Repo) (*library.Pipeline, error) { +func (e *engine) GetPipelineForRepo(ctx context.Context, commit string, r *api.Repo) (*api.Pipeline, error) { e.logger.WithFields(logrus.Fields{ "org": r.GetOrg(), "pipeline": commit, @@ -22,7 +21,7 @@ func (e *engine) GetPipelineForRepo(ctx context.Context, commit string, r *api.R }).Tracef("getting pipeline %s/%s", r.GetFullName(), commit) // variable to store query results - p := new(database.Pipeline) + p := new(types.Pipeline) // send query to the database and store result in variable err := e.client. @@ -36,16 +35,13 @@ func (e *engine) GetPipelineForRepo(ctx context.Context, commit string, r *api.R return nil, err } - // decompress data for the pipeline - // - // https://pkg.go.dev/github.com/go-vela/types/database#Pipeline.Decompress err = p.Decompress() if err != nil { return nil, err } - // return the decompressed pipeline - // - // https://pkg.go.dev/github.com/go-vela/types/database#Pipeline.ToLibrary - return p.ToLibrary(), nil + result := p.ToAPI() + result.SetRepo(r) + + return result, nil } diff --git a/database/pipeline/get_repo_test.go b/database/pipeline/get_repo_test.go index d62612cd2..f7de4f72b 100644 --- a/database/pipeline/get_repo_test.go +++ b/database/pipeline/get_repo_test.go @@ -10,15 +10,32 @@ import ( "github.com/DATA-DOG/go-sqlmock" api "github.com/go-vela/server/api/types" + "github.com/go-vela/server/constants" "github.com/go-vela/server/database/testutils" - "github.com/go-vela/types/library" ) func TestPipeline_Engine_GetPipelineForRepo(t *testing.T) { // setup types + _owner := testutils.APIUser().Crop() + _owner.SetID(1) + _owner.SetName("foo") + _owner.SetToken("bar") + + _repo := testutils.APIRepo() + _repo.SetID(1) + _repo.SetOwner(_owner) + _repo.SetHash("baz") + _repo.SetOrg("foo") + _repo.SetName("bar") + _repo.SetFullName("foo/bar") + _repo.SetVisibility("public") + _repo.SetAllowEvents(api.NewEventsFromMask(1)) + _repo.SetPipelineType(constants.PipelineTypeYAML) + _repo.SetTopics([]string{}) + _pipeline := testutils.APIPipeline() _pipeline.SetID(1) - _pipeline.SetRepoID(1) + _pipeline.SetRepo(_repo) _pipeline.SetCommit("48afb5bdc41ad69bf22588491333f7cf71135163") _pipeline.SetRef("refs/heads/main") _pipeline.SetType("yaml") @@ -39,17 +56,20 @@ func TestPipeline_Engine_GetPipelineForRepo(t *testing.T) { _sqlite := testSqlite(t) defer func() { _sql, _ := _sqlite.client.DB(); _sql.Close() }() - _, err := _sqlite.CreatePipeline(context.TODO(), _pipeline) - if err != nil { - t.Errorf("unable to create test pipeline for sqlite: %v", err) - } + sqlitePopulateTables( + t, + _sqlite, + []*api.Pipeline{_pipeline}, + []*api.User{}, + []*api.Repo{}, + ) // setup tests tests := []struct { failure bool name string database *engine - want *library.Pipeline + want *api.Pipeline }{ { failure: false, @@ -68,7 +88,7 @@ func TestPipeline_Engine_GetPipelineForRepo(t *testing.T) { // run tests for _, test := range tests { t.Run(test.name, func(t *testing.T) { - got, err := test.database.GetPipelineForRepo(context.TODO(), "48afb5bdc41ad69bf22588491333f7cf71135163", &api.Repo{ID: _pipeline.RepoID}) + got, err := test.database.GetPipelineForRepo(context.TODO(), "48afb5bdc41ad69bf22588491333f7cf71135163", _repo) if test.failure { if err == nil { diff --git a/database/pipeline/get_test.go b/database/pipeline/get_test.go index c24f97da0..8022eb410 100644 --- a/database/pipeline/get_test.go +++ b/database/pipeline/get_test.go @@ -9,15 +9,33 @@ import ( "github.com/DATA-DOG/go-sqlmock" + api "github.com/go-vela/server/api/types" + "github.com/go-vela/server/constants" "github.com/go-vela/server/database/testutils" - "github.com/go-vela/types/library" ) func TestPipeline_Engine_GetPipeline(t *testing.T) { // setup types + _owner := testutils.APIUser().Crop() + _owner.SetID(1) + _owner.SetName("foo") + _owner.SetToken("bar") + + _repo := testutils.APIRepo() + _repo.SetID(1) + _repo.SetOwner(_owner) + _repo.SetHash("baz") + _repo.SetOrg("foo") + _repo.SetName("bar") + _repo.SetFullName("foo/bar") + _repo.SetVisibility("public") + _repo.SetAllowEvents(api.NewEventsFromMask(1)) + _repo.SetPipelineType(constants.PipelineTypeYAML) + _repo.SetTopics([]string{}) + _pipeline := testutils.APIPipeline() _pipeline.SetID(1) - _pipeline.SetRepoID(1) + _pipeline.SetRepo(_repo) _pipeline.SetCommit("48afb5bdc41ad69bf22588491333f7cf71135163") _pipeline.SetRef("refs/heads/main") _pipeline.SetType("yaml") @@ -32,23 +50,36 @@ func TestPipeline_Engine_GetPipeline(t *testing.T) { []string{"id", "repo_id", "commit", "flavor", "platform", "ref", "type", "version", "services", "stages", "steps", "templates", "data"}). AddRow(1, 1, "48afb5bdc41ad69bf22588491333f7cf71135163", "", "", "refs/heads/main", "yaml", "1", false, false, false, false, []byte{120, 94, 74, 203, 207, 7, 4, 0, 0, 255, 255, 2, 130, 1, 69}) + _repoRows := sqlmock.NewRows( + []string{"id", "user_id", "hash", "org", "name", "full_name", "link", "clone", "branch", "topics", "build_limit", "timeout", "counter", "visibility", "private", "trusted", "active", "allow_events", "pipeline_type", "previous_name", "approve_build"}). + AddRow(1, 1, "baz", "foo", "bar", "foo/bar", "", "", "", "{}", 0, 0, 0, "public", false, false, false, 1, "yaml", "", "") + + _userRows := sqlmock.NewRows( + []string{"id", "name", "token", "hash", "active", "admin"}). + AddRow(1, "foo", "bar", "baz", false, false) + // ensure the mock expects the query _mock.ExpectQuery(`SELECT * FROM "pipelines" WHERE id = $1 LIMIT $2`).WithArgs(1, 1).WillReturnRows(_rows) + _mock.ExpectQuery(`SELECT * FROM "repos" WHERE "repos"."id" = $1`).WithArgs(1).WillReturnRows(_repoRows) + _mock.ExpectQuery(`SELECT * FROM "users" WHERE "users"."id" = $1`).WithArgs(1).WillReturnRows(_userRows) _sqlite := testSqlite(t) defer func() { _sql, _ := _sqlite.client.DB(); _sql.Close() }() - _, err := _sqlite.CreatePipeline(context.TODO(), _pipeline) - if err != nil { - t.Errorf("unable to create test pipeline for sqlite: %v", err) - } + sqlitePopulateTables( + t, + _sqlite, + []*api.Pipeline{_pipeline}, + []*api.User{_owner}, + []*api.Repo{_repo}, + ) // setup tests tests := []struct { failure bool name string database *engine - want *library.Pipeline + want *api.Pipeline }{ { failure: false, diff --git a/database/pipeline/interface.go b/database/pipeline/interface.go index 64666952e..62ffac726 100644 --- a/database/pipeline/interface.go +++ b/database/pipeline/interface.go @@ -6,7 +6,6 @@ import ( "context" api "github.com/go-vela/server/api/types" - "github.com/go-vela/types/library" ) // PipelineInterface represents the Vela interface for pipeline @@ -32,17 +31,17 @@ type PipelineInterface interface { // CountPipelinesForRepo defines a function that gets the count of pipelines by repo ID. CountPipelinesForRepo(context.Context, *api.Repo) (int64, error) // CreatePipeline defines a function that creates a new pipeline. - CreatePipeline(context.Context, *library.Pipeline) (*library.Pipeline, error) + CreatePipeline(context.Context, *api.Pipeline) (*api.Pipeline, error) // DeletePipeline defines a function that deletes an existing pipeline. - DeletePipeline(context.Context, *library.Pipeline) error + DeletePipeline(context.Context, *api.Pipeline) error // GetPipeline defines a function that gets a pipeline by ID. - GetPipeline(context.Context, int64) (*library.Pipeline, error) + GetPipeline(context.Context, int64) (*api.Pipeline, error) // GetPipelineForRepo defines a function that gets a pipeline by commit SHA and repo ID. - GetPipelineForRepo(context.Context, string, *api.Repo) (*library.Pipeline, error) + GetPipelineForRepo(context.Context, string, *api.Repo) (*api.Pipeline, error) // ListPipelines defines a function that gets a list of all pipelines. - ListPipelines(context.Context) ([]*library.Pipeline, error) + ListPipelines(context.Context) ([]*api.Pipeline, error) // ListPipelinesForRepo defines a function that gets a list of pipelines by repo ID. - ListPipelinesForRepo(context.Context, *api.Repo, int, int) ([]*library.Pipeline, int64, error) + ListPipelinesForRepo(context.Context, *api.Repo, int, int) ([]*api.Pipeline, int64, error) // UpdatePipeline defines a function that updates an existing pipeline. - UpdatePipeline(context.Context, *library.Pipeline) (*library.Pipeline, error) + UpdatePipeline(context.Context, *api.Pipeline) (*api.Pipeline, error) } diff --git a/database/pipeline/list.go b/database/pipeline/list.go index 12fac0bf1..e288965bd 100644 --- a/database/pipeline/list.go +++ b/database/pipeline/list.go @@ -5,19 +5,19 @@ package pipeline import ( "context" + api "github.com/go-vela/server/api/types" + "github.com/go-vela/server/database/types" "github.com/go-vela/types/constants" - "github.com/go-vela/types/database" - "github.com/go-vela/types/library" ) // ListPipelines gets a list of all pipelines from the database. -func (e *engine) ListPipelines(ctx context.Context) ([]*library.Pipeline, error) { +func (e *engine) ListPipelines(ctx context.Context) ([]*api.Pipeline, error) { e.logger.Trace("listing all pipelines") // variables to store query results and return value count := int64(0) - p := new([]database.Pipeline) - pipelines := []*library.Pipeline{} + p := new([]types.Pipeline) + pipelines := []*api.Pipeline{} // count the results count, err := e.CountPipelines(ctx) @@ -34,6 +34,8 @@ func (e *engine) ListPipelines(ctx context.Context) ([]*library.Pipeline, error) err = e.client. WithContext(ctx). Table(constants.TablePipeline). + Preload("Repo"). + Preload("Repo.Owner"). Find(&p). Error if err != nil { @@ -45,18 +47,17 @@ func (e *engine) ListPipelines(ctx context.Context) ([]*library.Pipeline, error) // https://golang.org/doc/faq#closures_and_goroutines tmp := pipeline - // decompress data for the pipeline - // - // https://pkg.go.dev/github.com/go-vela/types/database#Pipeline.Decompress err = tmp.Decompress() if err != nil { return nil, err } - // convert query result to library type - // - // https://pkg.go.dev/github.com/go-vela/types/database#Pipeline.ToLibrary - pipelines = append(pipelines, tmp.ToLibrary()) + err = tmp.Repo.Decrypt(e.config.EncryptionKey) + if err != nil { + e.logger.Errorf("unable to decrypt repo: %v", err) + } + + pipelines = append(pipelines, tmp.ToAPI()) } return pipelines, nil diff --git a/database/pipeline/list_repo.go b/database/pipeline/list_repo.go index 22e9032cb..5476cd925 100644 --- a/database/pipeline/list_repo.go +++ b/database/pipeline/list_repo.go @@ -8,15 +8,14 @@ import ( "github.com/sirupsen/logrus" api "github.com/go-vela/server/api/types" + "github.com/go-vela/server/database/types" "github.com/go-vela/types/constants" - "github.com/go-vela/types/database" - "github.com/go-vela/types/library" ) // ListPipelinesForRepo gets a list of pipelines by repo ID from the database. // //nolint:lll // ignore long line length due to variable names -func (e *engine) ListPipelinesForRepo(ctx context.Context, r *api.Repo, page, perPage int) ([]*library.Pipeline, int64, error) { +func (e *engine) ListPipelinesForRepo(ctx context.Context, r *api.Repo, page, perPage int) ([]*api.Pipeline, int64, error) { e.logger.WithFields(logrus.Fields{ "org": r.GetOrg(), "repo": r.GetName(), @@ -24,8 +23,8 @@ func (e *engine) ListPipelinesForRepo(ctx context.Context, r *api.Repo, page, pe // variables to store query results and return values count := int64(0) - p := new([]database.Pipeline) - pipelines := []*library.Pipeline{} + p := new([]types.Pipeline) + pipelines := []*api.Pipeline{} // count the results count, err := e.CountPipelinesForRepo(ctx, r) @@ -58,18 +57,15 @@ func (e *engine) ListPipelinesForRepo(ctx context.Context, r *api.Repo, page, pe // https://golang.org/doc/faq#closures_and_goroutines tmp := pipeline - // decompress data for the pipeline - // - // https://pkg.go.dev/github.com/go-vela/types/database#Pipeline.Decompress err = tmp.Decompress() if err != nil { return nil, count, err } - // convert query result to library type - // - // https://pkg.go.dev/github.com/go-vela/types/database#Pipeline.ToLibrary - pipelines = append(pipelines, tmp.ToLibrary()) + result := tmp.ToAPI() + result.SetRepo(r) + + pipelines = append(pipelines, result) } return pipelines, count, nil diff --git a/database/pipeline/list_repo_test.go b/database/pipeline/list_repo_test.go index 4acc87e20..77696e05d 100644 --- a/database/pipeline/list_repo_test.go +++ b/database/pipeline/list_repo_test.go @@ -10,15 +10,32 @@ import ( "github.com/DATA-DOG/go-sqlmock" api "github.com/go-vela/server/api/types" + "github.com/go-vela/server/constants" "github.com/go-vela/server/database/testutils" - "github.com/go-vela/types/library" ) func TestPipeline_Engine_ListPipelinesForRepo(t *testing.T) { // setup types + _owner := testutils.APIUser().Crop() + _owner.SetID(1) + _owner.SetName("foo") + _owner.SetToken("bar") + + _repo := testutils.APIRepo() + _repo.SetID(1) + _repo.SetOwner(_owner) + _repo.SetHash("baz") + _repo.SetOrg("foo") + _repo.SetName("bar") + _repo.SetFullName("foo/bar") + _repo.SetVisibility("public") + _repo.SetAllowEvents(api.NewEventsFromMask(1)) + _repo.SetPipelineType(constants.PipelineTypeYAML) + _repo.SetTopics([]string{}) + _pipelineOne := testutils.APIPipeline() _pipelineOne.SetID(1) - _pipelineOne.SetRepoID(1) + _pipelineOne.SetRepo(_repo) _pipelineOne.SetCommit("48afb5bdc41ad69bf22588491333f7cf71135163") _pipelineOne.SetRef("refs/heads/main") _pipelineOne.SetType("yaml") @@ -27,7 +44,7 @@ func TestPipeline_Engine_ListPipelinesForRepo(t *testing.T) { _pipelineTwo := testutils.APIPipeline() _pipelineTwo.SetID(2) - _pipelineTwo.SetRepoID(1) + _pipelineTwo.SetRepo(_repo) _pipelineTwo.SetCommit("a49aaf4afae6431a79239c95247a2b169fd9f067") _pipelineTwo.SetRef("refs/heads/main") _pipelineTwo.SetType("yaml") @@ -55,41 +72,39 @@ func TestPipeline_Engine_ListPipelinesForRepo(t *testing.T) { _sqlite := testSqlite(t) defer func() { _sql, _ := _sqlite.client.DB(); _sql.Close() }() - _, err := _sqlite.CreatePipeline(context.TODO(), _pipelineOne) - if err != nil { - t.Errorf("unable to create test pipeline for sqlite: %v", err) - } - - _, err = _sqlite.CreatePipeline(context.TODO(), _pipelineTwo) - if err != nil { - t.Errorf("unable to create test pipeline for sqlite: %v", err) - } + sqlitePopulateTables( + t, + _sqlite, + []*api.Pipeline{_pipelineOne, _pipelineTwo}, + []*api.User{}, + []*api.Repo{}, + ) // setup tests tests := []struct { failure bool name string database *engine - want []*library.Pipeline + want []*api.Pipeline }{ { failure: false, name: "postgres", database: _postgres, - want: []*library.Pipeline{_pipelineOne, _pipelineTwo}, + want: []*api.Pipeline{_pipelineOne, _pipelineTwo}, }, { failure: false, name: "sqlite3", database: _sqlite, - want: []*library.Pipeline{_pipelineOne, _pipelineTwo}, + want: []*api.Pipeline{_pipelineOne, _pipelineTwo}, }, } // run tests for _, test := range tests { t.Run(test.name, func(t *testing.T) { - got, _, err := test.database.ListPipelinesForRepo(context.TODO(), &api.Repo{ID: _pipelineOne.RepoID}, 1, 10) + got, _, err := test.database.ListPipelinesForRepo(context.TODO(), _repo, 1, 10) if test.failure { if err == nil { diff --git a/database/pipeline/list_test.go b/database/pipeline/list_test.go index b485bab6f..4dc624a55 100644 --- a/database/pipeline/list_test.go +++ b/database/pipeline/list_test.go @@ -4,20 +4,50 @@ package pipeline import ( "context" - "reflect" "testing" "github.com/DATA-DOG/go-sqlmock" + "github.com/google/go-cmp/cmp" + api "github.com/go-vela/server/api/types" + "github.com/go-vela/server/constants" "github.com/go-vela/server/database/testutils" - "github.com/go-vela/types/library" ) func TestPipeline_Engine_ListPipelines(t *testing.T) { // setup types + _owner := testutils.APIUser().Crop() + _owner.SetID(1) + _owner.SetName("foo") + _owner.SetToken("bar") + + _repoOne := testutils.APIRepo() + _repoOne.SetID(1) + _repoOne.SetOwner(_owner) + _repoOne.SetHash("baz") + _repoOne.SetOrg("foo") + _repoOne.SetName("bar") + _repoOne.SetFullName("foo/bar") + _repoOne.SetVisibility("public") + _repoOne.SetAllowEvents(api.NewEventsFromMask(1)) + _repoOne.SetPipelineType(constants.PipelineTypeYAML) + _repoOne.SetTopics([]string{}) + + _repoTwo := testutils.APIRepo() + _repoTwo.SetID(2) + _repoTwo.SetOwner(_owner) + _repoTwo.SetHash("bazey") + _repoTwo.SetOrg("fooey") + _repoTwo.SetName("barey") + _repoTwo.SetFullName("fooey/barey") + _repoTwo.SetVisibility("public") + _repoTwo.SetAllowEvents(api.NewEventsFromMask(1)) + _repoTwo.SetPipelineType(constants.PipelineTypeYAML) + _repoTwo.SetTopics([]string{}) + _pipelineOne := testutils.APIPipeline() _pipelineOne.SetID(1) - _pipelineOne.SetRepoID(1) + _pipelineOne.SetRepo(_repoOne) _pipelineOne.SetCommit("48afb5bdc41ad69bf22588491333f7cf71135163") _pipelineOne.SetRef("refs/heads/main") _pipelineOne.SetType("yaml") @@ -26,7 +56,7 @@ func TestPipeline_Engine_ListPipelines(t *testing.T) { _pipelineTwo := testutils.APIPipeline() _pipelineTwo.SetID(2) - _pipelineTwo.SetRepoID(2) + _pipelineTwo.SetRepo(_repoTwo) _pipelineTwo.SetCommit("a49aaf4afae6431a79239c95247a2b169fd9f067") _pipelineTwo.SetRef("refs/heads/main") _pipelineTwo.SetType("yaml") @@ -48,40 +78,49 @@ func TestPipeline_Engine_ListPipelines(t *testing.T) { AddRow(1, 1, "48afb5bdc41ad69bf22588491333f7cf71135163", "", "", "refs/heads/main", "yaml", "1", false, false, false, false, []byte{120, 94, 74, 203, 207, 7, 4, 0, 0, 255, 255, 2, 130, 1, 69}). AddRow(2, 2, "a49aaf4afae6431a79239c95247a2b169fd9f067", "", "", "refs/heads/main", "yaml", "1", false, false, false, false, []byte{120, 94, 74, 203, 207, 7, 4, 0, 0, 255, 255, 2, 130, 1, 69}) + _repoRows := sqlmock.NewRows( + []string{"id", "user_id", "hash", "org", "name", "full_name", "link", "clone", "branch", "topics", "build_limit", "timeout", "counter", "visibility", "private", "trusted", "active", "allow_events", "pipeline_type", "previous_name", "approve_build"}). + AddRow(1, 1, "baz", "foo", "bar", "foo/bar", "", "", "", "{}", 0, 0, 0, "public", false, false, false, 1, "yaml", "", ""). + AddRow(2, 1, "bazey", "fooey", "barey", "fooey/barey", "", "", "", "{}", 0, 0, 0, "public", false, false, false, 1, "yaml", "", "") + + _userRows := sqlmock.NewRows( + []string{"id", "name", "token", "hash", "active", "admin"}). + AddRow(1, "foo", "bar", "baz", false, false) + // ensure the mock expects the query _mock.ExpectQuery(`SELECT * FROM "pipelines"`).WillReturnRows(_rows) + _mock.ExpectQuery(`SELECT * FROM "repos" WHERE "repos"."id" IN ($1,$2)`).WithArgs(1, 2).WillReturnRows(_repoRows) + _mock.ExpectQuery(`SELECT * FROM "users" WHERE "users"."id" = $1`).WithArgs(1).WillReturnRows(_userRows) _sqlite := testSqlite(t) defer func() { _sql, _ := _sqlite.client.DB(); _sql.Close() }() - _, err := _sqlite.CreatePipeline(context.TODO(), _pipelineOne) - if err != nil { - t.Errorf("unable to create test pipeline for sqlite: %v", err) - } - - _, err = _sqlite.CreatePipeline(context.TODO(), _pipelineTwo) - if err != nil { - t.Errorf("unable to create test pipeline for sqlite: %v", err) - } + sqlitePopulateTables( + t, + _sqlite, + []*api.Pipeline{_pipelineOne, _pipelineTwo}, + []*api.User{_owner}, + []*api.Repo{_repoOne, _repoTwo}, + ) // setup tests tests := []struct { failure bool name string database *engine - want []*library.Pipeline + want []*api.Pipeline }{ { failure: false, name: "postgres", database: _postgres, - want: []*library.Pipeline{_pipelineOne, _pipelineTwo}, + want: []*api.Pipeline{_pipelineOne, _pipelineTwo}, }, { failure: false, name: "sqlite3", database: _sqlite, - want: []*library.Pipeline{_pipelineOne, _pipelineTwo}, + want: []*api.Pipeline{_pipelineOne, _pipelineTwo}, }, } @@ -102,8 +141,8 @@ func TestPipeline_Engine_ListPipelines(t *testing.T) { t.Errorf("ListPipelines for %s returned err: %v", test.name, err) } - if !reflect.DeepEqual(got, test.want) { - t.Errorf("ListPipelines for %s is %v, want %v", test.name, got, test.want) + if diff := cmp.Diff(test.want, got); diff != "" { + t.Errorf("ListPipelines for %s mismatch (-want +got):\n%s", test.name, diff) } }) } diff --git a/database/pipeline/opts.go b/database/pipeline/opts.go index 609181652..3d7dfac44 100644 --- a/database/pipeline/opts.go +++ b/database/pipeline/opts.go @@ -32,6 +32,16 @@ func WithCompressionLevel(level int) EngineOpt { } } +// WithEncryptionKey sets the encryption key in the database engine for Pipelines. +func WithEncryptionKey(key string) EngineOpt { + return func(e *engine) error { + // set the encryption key in the build engine + e.config.EncryptionKey = key + + return nil + } +} + // WithLogger sets the github.com/sirupsen/logrus logger in the database engine for Pipelines. func WithLogger(logger *logrus.Entry) EngineOpt { return func(e *engine) error { diff --git a/database/pipeline/pipeline.go b/database/pipeline/pipeline.go index 0860a64f6..433f9d2fc 100644 --- a/database/pipeline/pipeline.go +++ b/database/pipeline/pipeline.go @@ -15,6 +15,8 @@ import ( type ( // config represents the settings required to create the engine that implements the PipelineInterface interface. config struct { + // specifies the encryption key to use for the Hook engine + EncryptionKey string // specifies the level of compression to use for the Pipeline engine CompressionLevel int // specifies to skip creating tables and indexes for the Pipeline engine diff --git a/database/pipeline/pipeline_test.go b/database/pipeline/pipeline_test.go index e748b21f1..92d4d2372 100644 --- a/database/pipeline/pipeline_test.go +++ b/database/pipeline/pipeline_test.go @@ -13,6 +13,10 @@ import ( "gorm.io/driver/postgres" "gorm.io/driver/sqlite" "gorm.io/gorm" + + api "github.com/go-vela/server/api/types" + "github.com/go-vela/server/constants" + "github.com/go-vela/server/database/types" ) func TestPipeline_New(t *testing.T) { @@ -172,6 +176,40 @@ func testSqlite(t *testing.T) *engine { return _engine } +// sqlitePopulateTables is a helper function to populate tables for testing. +func sqlitePopulateTables(t *testing.T, e *engine, pipelines []*api.Pipeline, users []*api.User, repos []*api.Repo) { + for _, _pipeline := range pipelines { + _, err := e.CreatePipeline(context.TODO(), _pipeline) + if err != nil { + t.Errorf("unable to create test pipeline for sqlite: %v", err) + } + } + + err := e.client.AutoMigrate(&types.User{}) + if err != nil { + t.Errorf("unable to create user table for sqlite: %v", err) + } + + for _, _user := range users { + err = e.client.Table(constants.TableUser).Create(types.UserFromAPI(_user)).Error + if err != nil { + t.Errorf("unable to create test user for sqlite: %v", err) + } + } + + err = e.client.AutoMigrate(&types.Repo{}) + if err != nil { + t.Errorf("unable to create repo table for sqlite: %v", err) + } + + for _, _repo := range repos { + err = e.client.Table(constants.TableRepo).Create(types.RepoFromAPI(_repo)).Error + if err != nil { + t.Errorf("unable to create test repo for sqlite: %v", err) + } + } +} + // This will be used with the github.com/DATA-DOG/go-sqlmock library to compare values // that are otherwise not easily compared. These typically would be values generated // before adding or updating them in the database. diff --git a/database/pipeline/update.go b/database/pipeline/update.go index 93cff9351..23251f1a1 100644 --- a/database/pipeline/update.go +++ b/database/pipeline/update.go @@ -7,33 +7,24 @@ import ( "github.com/sirupsen/logrus" + api "github.com/go-vela/server/api/types" + "github.com/go-vela/server/database/types" "github.com/go-vela/types/constants" - "github.com/go-vela/types/database" - "github.com/go-vela/types/library" ) // UpdatePipeline updates an existing pipeline in the database. -func (e *engine) UpdatePipeline(ctx context.Context, p *library.Pipeline) (*library.Pipeline, error) { +func (e *engine) UpdatePipeline(ctx context.Context, p *api.Pipeline) (*api.Pipeline, error) { e.logger.WithFields(logrus.Fields{ "pipeline": p.GetCommit(), }).Tracef("updating pipeline %s in the database", p.GetCommit()) - // cast the library type to database type - // - // https://pkg.go.dev/github.com/go-vela/types/database#PipelineFromLibrary - pipeline := database.PipelineFromLibrary(p) + pipeline := types.PipelineFromAPI(p) - // validate the necessary fields are populated - // - // https://pkg.go.dev/github.com/go-vela/types/database#Pipeline.Validate err := pipeline.Validate() if err != nil { return nil, err } - // compress data for the pipeline - // - // https://pkg.go.dev/github.com/go-vela/types/database#Pipeline.Compress err = pipeline.Compress(e.config.CompressionLevel) if err != nil { return nil, err @@ -54,5 +45,8 @@ func (e *engine) UpdatePipeline(ctx context.Context, p *library.Pipeline) (*libr return nil, err } - return pipeline.ToLibrary(), nil + result := pipeline.ToAPI() + result.SetRepo(p.GetRepo()) + + return result, nil } diff --git a/database/pipeline/update_test.go b/database/pipeline/update_test.go index bb0464550..e8be0e457 100644 --- a/database/pipeline/update_test.go +++ b/database/pipeline/update_test.go @@ -14,9 +14,12 @@ import ( func TestPipeline_Engine_UpdatePipeline(t *testing.T) { // setup types + _repo := testutils.APIRepo() + _repo.SetID(1) + _pipeline := testutils.APIPipeline() _pipeline.SetID(1) - _pipeline.SetRepoID(1) + _pipeline.SetRepo(_repo) _pipeline.SetCommit("48afb5bdc41ad69bf22588491333f7cf71135163") _pipeline.SetRef("refs/heads/main") _pipeline.SetType("yaml") diff --git a/database/resource.go b/database/resource.go index c17dc0f0b..43e659fd9 100644 --- a/database/resource.go +++ b/database/resource.go @@ -127,6 +127,7 @@ func (e *engine) NewResources(ctx context.Context) error { pipeline.WithContext(e.ctx), pipeline.WithClient(e.client), pipeline.WithCompressionLevel(e.config.CompressionLevel), + pipeline.WithEncryptionKey(e.config.EncryptionKey), pipeline.WithLogger(e.logger), pipeline.WithSkipCreation(e.config.SkipCreation), ) diff --git a/database/schedule/get_repo.go b/database/schedule/get_repo.go index 57c9f485a..8f0cff429 100644 --- a/database/schedule/get_repo.go +++ b/database/schedule/get_repo.go @@ -27,8 +27,6 @@ func (e *engine) GetScheduleForRepo(ctx context.Context, r *api.Repo, name strin err := e.client. WithContext(ctx). Table(constants.TableSchedule). - Preload("Repo"). - Preload("Repo.Owner"). Where("repo_id = ?", r.GetID()). Where("name = ?", name). Take(s). @@ -37,11 +35,8 @@ func (e *engine) GetScheduleForRepo(ctx context.Context, r *api.Repo, name strin return nil, err } - // decrypt hash value for repo - err = s.Repo.Decrypt(e.config.EncryptionKey) - if err != nil { - e.logger.Errorf("unable to decrypt repo %d: %v", s.Repo.ID.Int64, err) - } + result := s.ToAPI() + result.SetRepo(r) - return s.ToAPI(), nil + return result, nil } diff --git a/database/schedule/get_repo_test.go b/database/schedule/get_repo_test.go index 704443112..346119b3b 100644 --- a/database/schedule/get_repo_test.go +++ b/database/schedule/get_repo_test.go @@ -13,7 +13,6 @@ import ( api "github.com/go-vela/server/api/types" "github.com/go-vela/server/database/testutils" - "github.com/go-vela/server/database/types" "github.com/go-vela/types/constants" ) @@ -78,18 +77,8 @@ func TestSchedule_Engine_GetScheduleForRepo(t *testing.T) { []string{"id", "repo_id", "active", "name", "entry", "created_at", "created_by", "updated_at", "updated_by", "scheduled_at", "branch", "error"}). AddRow(1, 1, true, "nightly", "0 0 * * *", 1713476291, "octocat", 3013476291, "octokitty", 2013476291, "main", "no version: YAML property provided") - _repoRows := sqlmock.NewRows( - []string{"id", "user_id", "hash", "org", "name", "full_name", "link", "clone", "branch", "topics", "build_limit", "timeout", "counter", "visibility", "private", "trusted", "active", "allow_events", "pipeline_type", "previous_name", "approve_build"}). - AddRow(1, 1, "MzM4N2MzMDAtNmY4Mi00OTA5LWFhZDAtNWIzMTlkNTJkODMy", "github", "octocat", "github/octocat", "https://github.com/github/octocat", "https://github.com/github/octocat.git", "main", "{cloud,security}", 10, 30, 0, "public", false, false, true, 1, "", "", constants.ApproveNever) - - _userRows := sqlmock.NewRows( - []string{"id", "name", "token", "refresh_token", "favorites", "active", "admin", "dashboards"}). - AddRow(1, "octocat", "superSecretToken", "superSecretRefreshToken", "{}", true, false, "{}") - // ensure the mock expects the query _mock.ExpectQuery(`SELECT * FROM "schedules" WHERE repo_id = $1 AND name = $2 LIMIT $3`).WithArgs(1, "nightly", 1).WillReturnRows(_rows) - _mock.ExpectQuery(`SELECT * FROM "repos" WHERE "repos"."id" = $1`).WithArgs(1).WillReturnRows(_repoRows) - _mock.ExpectQuery(`SELECT * FROM "users" WHERE "users"."id" = $1`).WithArgs(1).WillReturnRows(_userRows) _sqlite := testSqlite(t) defer func() { _sql, _ := _sqlite.client.DB(); _sql.Close() }() @@ -99,26 +88,6 @@ func TestSchedule_Engine_GetScheduleForRepo(t *testing.T) { t.Errorf("unable to create test schedule for sqlite: %v", err) } - err = _sqlite.client.AutoMigrate(&types.Repo{}) - if err != nil { - t.Errorf("unable to create build table for sqlite: %v", err) - } - - err = _sqlite.client.Table(constants.TableRepo).Create(types.RepoFromAPI(_repo)).Error - if err != nil { - t.Errorf("unable to create test repo for sqlite: %v", err) - } - - err = _sqlite.client.AutoMigrate(&types.User{}) - if err != nil { - t.Errorf("unable to create build table for sqlite: %v", err) - } - - err = _sqlite.client.Table(constants.TableUser).Create(types.UserFromAPI(_owner)).Error - if err != nil { - t.Errorf("unable to create test user for sqlite: %v", err) - } - // setup tests tests := []struct { failure bool diff --git a/database/schedule/list_repo.go b/database/schedule/list_repo.go index 38e0d1b3e..766e5e1c0 100644 --- a/database/schedule/list_repo.go +++ b/database/schedule/list_repo.go @@ -42,8 +42,6 @@ func (e *engine) ListSchedulesForRepo(ctx context.Context, r *api.Repo, page, pe err = e.client. WithContext(ctx). Table(constants.TableSchedule). - Preload("Repo"). - Preload("Repo.Owner"). Where("repo_id = ?", r.GetID()). Order("id DESC"). Limit(perPage). @@ -59,14 +57,11 @@ func (e *engine) ListSchedulesForRepo(ctx context.Context, r *api.Repo, page, pe // https://golang.org/doc/faq#closures_and_goroutines tmp := schedule - // decrypt hash value for repo - err = tmp.Repo.Decrypt(e.config.EncryptionKey) - if err != nil { - e.logger.Errorf("unable to decrypt repo %d: %v", tmp.Repo.ID.Int64, err) - } + result := tmp.ToAPI() + result.SetRepo(r) // convert query result to API type - schedules = append(schedules, tmp.ToAPI()) + schedules = append(schedules, result) } return schedules, count, nil diff --git a/database/schedule/list_repo_test.go b/database/schedule/list_repo_test.go index 0719e6b68..d73107f6e 100644 --- a/database/schedule/list_repo_test.go +++ b/database/schedule/list_repo_test.go @@ -13,7 +13,6 @@ import ( api "github.com/go-vela/server/api/types" "github.com/go-vela/server/database/testutils" - "github.com/go-vela/server/database/types" "github.com/go-vela/types/constants" ) @@ -103,18 +102,8 @@ func TestSchedule_Engine_ListSchedulesForRepo(t *testing.T) { AddRow(1, 1, true, "nightly", "0 0 * * *", 1713476291, "octocat", 3013476291, "octokitty", 2013476291, "main", "no version: YAML property provided"). AddRow(2, 1, false, "hourly", "0 * * * *", 1713476291, "octocat", 3013476291, "octokitty", 2013476291, "main", "no version: YAML property provided") - _repoRows := sqlmock.NewRows( - []string{"id", "user_id", "hash", "org", "name", "full_name", "link", "clone", "branch", "topics", "build_limit", "timeout", "counter", "visibility", "private", "trusted", "active", "allow_events", "pipeline_type", "previous_name", "approve_build"}). - AddRow(1, 1, "MzM4N2MzMDAtNmY4Mi00OTA5LWFhZDAtNWIzMTlkNTJkODMy", "github", "octocat", "github/octocat", "https://github.com/github/octocat", "https://github.com/github/octocat.git", "main", "{cloud,security}", 10, 30, 0, "public", false, false, true, 1, "", "", constants.ApproveNever) - - _userRows := sqlmock.NewRows( - []string{"id", "name", "token", "refresh_token", "favorites", "active", "admin", "dashboards"}). - AddRow(1, "octocat", "superSecretToken", "superSecretRefreshToken", "{}", true, false, "{}") - // ensure the mock expects the query _mock.ExpectQuery(`SELECT * FROM "schedules" WHERE repo_id = $1 ORDER BY id DESC LIMIT $2`).WithArgs(1, 10).WillReturnRows(_rows) - _mock.ExpectQuery(`SELECT * FROM "repos" WHERE "repos"."id" = $1`).WithArgs(1).WillReturnRows(_repoRows) - _mock.ExpectQuery(`SELECT * FROM "users" WHERE "users"."id" = $1`).WithArgs(1).WillReturnRows(_userRows) _sqlite := testSqlite(t) defer func() { _sql, _ := _sqlite.client.DB(); _sql.Close() }() @@ -129,26 +118,6 @@ func TestSchedule_Engine_ListSchedulesForRepo(t *testing.T) { t.Errorf("unable to create test schedule for sqlite: %v", err) } - err = _sqlite.client.AutoMigrate(&types.Repo{}) - if err != nil { - t.Errorf("unable to create build table for sqlite: %v", err) - } - - err = _sqlite.client.Table(constants.TableRepo).Create(types.RepoFromAPI(_repo)).Error - if err != nil { - t.Errorf("unable to create test repo for sqlite: %v", err) - } - - err = _sqlite.client.AutoMigrate(&types.User{}) - if err != nil { - t.Errorf("unable to create build table for sqlite: %v", err) - } - - err = _sqlite.client.Table(constants.TableUser).Create(types.UserFromAPI(_owner)).Error - if err != nil { - t.Errorf("unable to create test user for sqlite: %v", err) - } - // setup tests tests := []struct { failure bool diff --git a/database/testutils/api_resources.go b/database/testutils/api_resources.go index 71f14e703..a183ada1f 100644 --- a/database/testutils/api_resources.go +++ b/database/testutils/api_resources.go @@ -11,8 +11,8 @@ import ( api "github.com/go-vela/server/api/types" "github.com/go-vela/server/api/types/actions" + "github.com/go-vela/server/compiler/types/raw" "github.com/go-vela/types/library" - "github.com/go-vela/types/raw" ) // API TEST RESOURCES @@ -232,10 +232,10 @@ func APIStep() *library.Step { } } -func APIPipeline() *library.Pipeline { - return &library.Pipeline{ +func APIPipeline() *api.Pipeline { + return &api.Pipeline{ ID: new(int64), - RepoID: new(int64), + Repo: APIRepo(), Commit: new(string), Flavor: new(string), Platform: new(string), diff --git a/database/types/build.go b/database/types/build.go index 146c3dc5a..43471e34e 100644 --- a/database/types/build.go +++ b/database/types/build.go @@ -7,8 +7,8 @@ import ( "errors" api "github.com/go-vela/server/api/types" + "github.com/go-vela/server/compiler/types/raw" "github.com/go-vela/server/util" - "github.com/go-vela/types/raw" ) var ( diff --git a/database/types/build_test.go b/database/types/build_test.go index 3eb12f142..84cababf8 100644 --- a/database/types/build_test.go +++ b/database/types/build_test.go @@ -11,8 +11,8 @@ import ( "github.com/google/go-cmp/cmp" api "github.com/go-vela/server/api/types" + "github.com/go-vela/server/compiler/types/raw" "github.com/go-vela/server/database/testutils" - "github.com/go-vela/types/raw" ) func TestTypes_Build_Crop(t *testing.T) { diff --git a/database/types/deployment.go b/database/types/deployment.go index ec3453651..df300d6be 100644 --- a/database/types/deployment.go +++ b/database/types/deployment.go @@ -10,9 +10,9 @@ import ( "github.com/lib/pq" api "github.com/go-vela/server/api/types" + "github.com/go-vela/server/compiler/types/raw" "github.com/go-vela/server/util" "github.com/go-vela/types/constants" - "github.com/go-vela/types/raw" ) var ( diff --git a/database/types/deployment_test.go b/database/types/deployment_test.go index b9289047f..7f5db4d9f 100644 --- a/database/types/deployment_test.go +++ b/database/types/deployment_test.go @@ -10,7 +10,7 @@ import ( "github.com/lib/pq" api "github.com/go-vela/server/api/types" - "github.com/go-vela/types/raw" + "github.com/go-vela/server/compiler/types/raw" ) func TestDatabase_Deployment_Nullify(t *testing.T) { diff --git a/database/types/pipeline.go b/database/types/pipeline.go new file mode 100644 index 000000000..425f193e0 --- /dev/null +++ b/database/types/pipeline.go @@ -0,0 +1,231 @@ +// SPDX-License-Identifier: Apache-2.0 + +package types + +import ( + "database/sql" + "errors" + + api "github.com/go-vela/server/api/types" + "github.com/go-vela/server/util" +) + +var ( + // ErrEmptyPipelineCommit defines the error type when a + // Pipeline type has an empty Commit field provided. + ErrEmptyPipelineCommit = errors.New("empty pipeline commit provided") + + // ErrEmptyPipelineRef defines the error type when a + // Pipeline type has an empty Ref field provided. + ErrEmptyPipelineRef = errors.New("empty pipeline ref provided") + + // ErrEmptyPipelineRepoID defines the error type when a + // Pipeline type has an empty RepoID field provided. + ErrEmptyPipelineRepoID = errors.New("empty pipeline repo_id provided") + + // ErrEmptyPipelineType defines the error type when a + // Pipeline type has an empty Type field provided. + ErrEmptyPipelineType = errors.New("empty pipeline type provided") + + // ErrEmptyPipelineVersion defines the error type when a + // Pipeline type has an empty Version field provided. + ErrEmptyPipelineVersion = errors.New("empty pipeline version provided") +) + +// Pipeline is the database representation of a pipeline. +type Pipeline struct { + ID sql.NullInt64 `sql:"id"` + RepoID sql.NullInt64 `sql:"repo_id"` + Commit sql.NullString `sql:"commit"` + Flavor sql.NullString `sql:"flavor"` + Platform sql.NullString `sql:"platform"` + Ref sql.NullString `sql:"ref"` + Type sql.NullString `sql:"type"` + Version sql.NullString `sql:"version"` + ExternalSecrets sql.NullBool `sql:"external_secrets"` + InternalSecrets sql.NullBool `sql:"internal_secrets"` + Services sql.NullBool `sql:"services"` + Stages sql.NullBool `sql:"stages"` + Steps sql.NullBool `sql:"steps"` + Templates sql.NullBool `sql:"templates"` + Data []byte `sql:"data"` + + Repo Repo `gorm:"foreignKey:RepoID"` +} + +// Compress will manipulate the existing data for the +// pipeline by compressing that data. This produces +// a significantly smaller amount of data that is +// stored in the system. +func (p *Pipeline) Compress(level int) error { + // compress the database pipeline data + data, err := util.Compress(level, p.Data) + if err != nil { + return err + } + + // overwrite database pipeline data with compressed pipeline data + p.Data = data + + return nil +} + +// Decompress will manipulate the existing data for the +// pipeline by decompressing that data. This allows us +// to have a significantly smaller amount of data that +// is stored in the system. +func (p *Pipeline) Decompress() error { + // decompress the database pipeline data + data, err := util.Decompress(p.Data) + if err != nil { + return err + } + + // overwrite compressed pipeline data with decompressed pipeline data + p.Data = data + + return nil +} + +// Nullify ensures the valid flag for +// the sql.Null types are properly set. +// +// When a field within the Pipeline type is the zero +// value for the field, the valid flag is set to +// false causing it to be NULL in the database. +func (p *Pipeline) Nullify() *Pipeline { + if p == nil { + return nil + } + + // check if the ID field should be false + if p.ID.Int64 == 0 { + p.ID.Valid = false + } + + // check if the RepoID field should be false + if p.RepoID.Int64 == 0 { + p.RepoID.Valid = false + } + + // check if the Commit field should be false + if len(p.Commit.String) == 0 { + p.Commit.Valid = false + } + + // check if the Flavor field should be false + if len(p.Flavor.String) == 0 { + p.Flavor.Valid = false + } + + // check if the Platform field should be false + if len(p.Platform.String) == 0 { + p.Platform.Valid = false + } + + // check if the Ref field should be false + if len(p.Ref.String) == 0 { + p.Ref.Valid = false + } + + // check if the Type field should be false + if len(p.Type.String) == 0 { + p.Type.Valid = false + } + + // check if the Version field should be false + if len(p.Version.String) == 0 { + p.Version.Valid = false + } + + return p +} + +// ToAPI converts the Pipeline type +// to a API Pipeline type. +func (p *Pipeline) ToAPI() *api.Pipeline { + pipeline := new(api.Pipeline) + + pipeline.SetID(p.ID.Int64) + pipeline.SetRepo(p.Repo.ToAPI()) + pipeline.SetCommit(p.Commit.String) + pipeline.SetFlavor(p.Flavor.String) + pipeline.SetPlatform(p.Platform.String) + pipeline.SetRef(p.Ref.String) + pipeline.SetType(p.Type.String) + pipeline.SetVersion(p.Version.String) + pipeline.SetExternalSecrets(p.ExternalSecrets.Bool) + pipeline.SetInternalSecrets(p.InternalSecrets.Bool) + pipeline.SetServices(p.Services.Bool) + pipeline.SetStages(p.Stages.Bool) + pipeline.SetSteps(p.Steps.Bool) + pipeline.SetTemplates(p.Templates.Bool) + pipeline.SetData(p.Data) + + return pipeline +} + +// Validate verifies the necessary fields for +// the Pipeline type are populated correctly. +func (p *Pipeline) Validate() error { + // verify the Commit field is populated + if len(p.Commit.String) == 0 { + return ErrEmptyPipelineCommit + } + + // verify the Ref field is populated + if len(p.Ref.String) == 0 { + return ErrEmptyPipelineRef + } + + // verify the RepoID field is populated + if p.RepoID.Int64 <= 0 { + return ErrEmptyPipelineRepoID + } + + // verify the Type field is populated + if len(p.Type.String) == 0 { + return ErrEmptyPipelineType + } + + // verify the Version field is populated + if len(p.Version.String) == 0 { + return ErrEmptyPipelineVersion + } + + // ensure that all Pipeline string fields + // that can be returned as JSON are sanitized + // to avoid unsafe HTML content + p.Commit = sql.NullString{String: util.Sanitize(p.Commit.String), Valid: p.Commit.Valid} + p.Flavor = sql.NullString{String: util.Sanitize(p.Flavor.String), Valid: p.Flavor.Valid} + p.Platform = sql.NullString{String: util.Sanitize(p.Platform.String), Valid: p.Platform.Valid} + p.Ref = sql.NullString{String: util.Sanitize(p.Ref.String), Valid: p.Ref.Valid} + p.Type = sql.NullString{String: util.Sanitize(p.Type.String), Valid: p.Type.Valid} + p.Version = sql.NullString{String: util.Sanitize(p.Version.String), Valid: p.Version.Valid} + + return nil +} + +// PipelineFromAPI converts the API Pipeline type +// to a database Pipeline type. +func PipelineFromAPI(p *api.Pipeline) *Pipeline { + pipeline := &Pipeline{ + ID: sql.NullInt64{Int64: p.GetID(), Valid: true}, + RepoID: sql.NullInt64{Int64: p.GetRepo().GetID(), Valid: true}, + Commit: sql.NullString{String: p.GetCommit(), Valid: true}, + Flavor: sql.NullString{String: p.GetFlavor(), Valid: true}, + Platform: sql.NullString{String: p.GetPlatform(), Valid: true}, + Ref: sql.NullString{String: p.GetRef(), Valid: true}, + Type: sql.NullString{String: p.GetType(), Valid: true}, + Version: sql.NullString{String: p.GetVersion(), Valid: true}, + ExternalSecrets: sql.NullBool{Bool: p.GetExternalSecrets(), Valid: true}, + InternalSecrets: sql.NullBool{Bool: p.GetInternalSecrets(), Valid: true}, + Services: sql.NullBool{Bool: p.GetServices(), Valid: true}, + Stages: sql.NullBool{Bool: p.GetStages(), Valid: true}, + Steps: sql.NullBool{Bool: p.GetSteps(), Valid: true}, + Templates: sql.NullBool{Bool: p.GetTemplates(), Valid: true}, + Data: p.GetData(), + } + + return pipeline.Nullify() +} diff --git a/database/types/pipeline_test.go b/database/types/pipeline_test.go new file mode 100644 index 000000000..63648acdf --- /dev/null +++ b/database/types/pipeline_test.go @@ -0,0 +1,469 @@ +// SPDX-License-Identifier: Apache-2.0 + +package types + +import ( + "database/sql" + "reflect" + "testing" + + api "github.com/go-vela/server/api/types" + "github.com/go-vela/types/constants" +) + +func TestDatabase_Pipeline_Compress(t *testing.T) { + // setup tests + tests := []struct { + name string + failure bool + level int + pipeline *Pipeline + want []byte + }{ + { + name: "compression level -1", + failure: false, + level: constants.CompressionNegOne, + pipeline: &Pipeline{Data: testPipelineData()}, + want: []byte{120, 156, 100, 203, 65, 14, 3, 33, 8, 133, 225, 61, 167, 120, 23, 112, 209, 173, 183, 33, 14, 99, 201, 168, 24, 156, 216, 235, 55, 214, 164, 73, 83, 118, 252, 124, 208, 20, 31, 106, 45, 226, 65, 244, 50, 191, 196, 35, 1, 103, 225, 105, 30, 81, 216, 179, 16, 208, 11, 223, 167, 121, 141, 56, 44, 93, 226, 68, 67, 124, 106, 146, 177, 116, 64, 227, 42, 17, 46, 135, 14, 2, 0, 173, 156, 191, 129, 198, 45, 253, 7, 118, 109, 249, 223, 173, 144, 172, 86, 110, 199, 71, 175, 9, 251, 22, 82, 81, 132, 231, 94, 246, 251, 59, 0, 0, 255, 255, 33, 108, 56, 191}, + }, + { + name: "compression level 0", + failure: false, + level: constants.CompressionZero, + pipeline: &Pipeline{Data: testPipelineData()}, + want: []byte{120, 1, 0, 186, 0, 69, 255, 10, 118, 101, 114, 115, 105, 111, 110, 58, 32, 49, 10, 10, 119, 111, 114, 107, 101, 114, 58, 10, 32, 32, 102, 108, 97, 118, 111, 114, 58, 32, 108, 97, 114, 103, 101, 10, 32, 32, 112, 108, 97, 116, 102, 111, 114, 109, 58, 32, 100, 111, 99, 107, 101, 114, 10, 10, 115, 101, 114, 118, 105, 99, 101, 115, 58, 10, 32, 32, 45, 32, 110, 97, 109, 101, 58, 32, 114, 101, 100, 105, 115, 10, 32, 32, 32, 32, 105, 109, 97, 103, 101, 58, 32, 114, 101, 100, 105, 115, 10, 10, 115, 116, 101, 112, 115, 58, 10, 32, 32, 45, 32, 110, 97, 109, 101, 58, 32, 112, 105, 110, 103, 10, 32, 32, 32, 32, 105, 109, 97, 103, 101, 58, 32, 114, 101, 100, 105, 115, 10, 32, 32, 32, 32, 99, 111, 109, 109, 97, 110, 100, 115, 58, 10, 32, 32, 32, 32, 32, 32, 45, 32, 114, 101, 100, 105, 115, 45, 99, 108, 105, 32, 45, 104, 32, 114, 101, 100, 105, 115, 32, 112, 105, 110, 103, 10, 1, 0, 0, 255, 255, 33, 108, 56, 191}, + }, + { + name: "compression level 1", + failure: false, + level: constants.CompressionOne, + pipeline: &Pipeline{Data: testPipelineData()}, + want: []byte{120, 1, 100, 204, 189, 173, 195, 48, 12, 69, 225, 158, 83, 220, 5, 84, 188, 150, 219, 8, 210, 181, 30, 97, 253, 129, 52, 156, 245, 131, 36, 85, 144, 83, 127, 56, 114, 211, 195, 214, 84, 252, 137, 60, 150, 159, 116, 21, 224, 232, 249, 94, 174, 232, 217, 27, 5, 216, 61, 95, 199, 242, 161, 168, 171, 156, 116, 145, 160, 223, 86, 24, 42, 64, 194, 204, 131, 10, 103, 181, 16, 0, 176, 145, 27, 21, 206, 106, 33, 18, 23, 247, 23, 220, 54, 219, 175, 3, 128, 178, 198, 200, 179, 190, 245, 171, 244, 153, 166, 210, 13, 233, 31, 206, 106, 129, 109, 179, 201, 51, 0, 0, 255, 255, 33, 108, 56, 191}, + }, + { + name: "compression level 2", + failure: false, + level: constants.CompressionTwo, + pipeline: &Pipeline{Data: testPipelineData()}, + want: []byte{120, 94, 100, 204, 65, 170, 3, 33, 12, 198, 241, 125, 78, 241, 93, 192, 197, 219, 122, 155, 160, 25, 95, 24, 53, 18, 7, 123, 253, 226, 12, 20, 74, 179, 75, 248, 253, 67, 75, 124, 170, 245, 136, 63, 162, 151, 249, 41, 30, 9, 56, 42, 47, 243, 136, 202, 94, 132, 128, 81, 249, 58, 204, 91, 68, 182, 116, 138, 19, 77, 241, 165, 73, 230, 214, 1, 157, 155, 68, 184, 100, 157, 4, 0, 218, 184, 124, 14, 52, 47, 25, 95, 112, 104, 47, 191, 110, 135, 201, 90, 227, 158, 111, 189, 39, 60, 79, 67, 170, 138, 240, 255, 44, 184, 243, 119, 0, 0, 0, 255, 255, 33, 108, 56, 191}, + }, + { + name: "compression level 3", + failure: false, + level: constants.CompressionThree, + pipeline: &Pipeline{Data: testPipelineData()}, + want: []byte{120, 94, 100, 204, 65, 170, 3, 33, 12, 198, 241, 125, 78, 241, 93, 192, 197, 219, 122, 155, 160, 25, 95, 24, 53, 18, 7, 123, 253, 226, 12, 20, 74, 179, 75, 248, 253, 67, 75, 124, 170, 245, 136, 63, 162, 151, 249, 41, 30, 9, 56, 42, 47, 243, 136, 202, 94, 132, 128, 81, 249, 58, 204, 91, 68, 182, 116, 138, 19, 77, 241, 165, 73, 230, 214, 1, 157, 155, 68, 184, 100, 157, 4, 0, 218, 184, 124, 14, 52, 47, 25, 95, 112, 104, 47, 191, 110, 135, 201, 90, 227, 158, 111, 189, 39, 60, 79, 67, 170, 138, 240, 255, 44, 184, 243, 119, 0, 0, 0, 255, 255, 33, 108, 56, 191}, + }, + { + name: "compression level 4", + failure: false, + level: constants.CompressionFour, + pipeline: &Pipeline{Data: testPipelineData()}, + want: []byte{120, 94, 100, 203, 65, 14, 3, 33, 8, 133, 225, 61, 167, 120, 23, 112, 209, 173, 183, 33, 14, 99, 201, 168, 24, 156, 216, 235, 55, 214, 164, 73, 83, 118, 252, 124, 208, 20, 31, 106, 45, 226, 65, 244, 50, 191, 196, 35, 1, 103, 225, 105, 30, 81, 216, 179, 16, 208, 11, 223, 167, 121, 141, 56, 44, 93, 226, 68, 67, 124, 106, 146, 177, 116, 64, 227, 42, 17, 46, 135, 14, 2, 0, 173, 156, 191, 129, 198, 45, 253, 7, 118, 109, 249, 223, 173, 144, 172, 86, 110, 199, 71, 175, 9, 251, 22, 82, 81, 132, 231, 94, 246, 251, 59, 0, 0, 255, 255, 33, 108, 56, 191}, + }, + { + name: "compression level 5", + failure: false, + level: constants.CompressionFive, + pipeline: &Pipeline{Data: testPipelineData()}, + want: []byte{120, 94, 100, 203, 65, 14, 3, 33, 8, 133, 225, 61, 167, 120, 23, 112, 209, 173, 183, 33, 14, 99, 201, 168, 24, 156, 216, 235, 55, 214, 164, 73, 83, 118, 252, 124, 208, 20, 31, 106, 45, 226, 65, 244, 50, 191, 196, 35, 1, 103, 225, 105, 30, 81, 216, 179, 16, 208, 11, 223, 167, 121, 141, 56, 44, 93, 226, 68, 67, 124, 106, 146, 177, 116, 64, 227, 42, 17, 46, 135, 14, 2, 0, 173, 156, 191, 129, 198, 45, 253, 7, 118, 109, 249, 223, 173, 144, 172, 86, 110, 199, 71, 175, 9, 251, 22, 82, 81, 132, 231, 94, 246, 251, 59, 0, 0, 255, 255, 33, 108, 56, 191}, + }, + { + name: "compression level 6", + failure: false, + level: constants.CompressionSix, + pipeline: &Pipeline{Data: testPipelineData()}, + want: []byte{120, 156, 100, 203, 65, 14, 3, 33, 8, 133, 225, 61, 167, 120, 23, 112, 209, 173, 183, 33, 14, 99, 201, 168, 24, 156, 216, 235, 55, 214, 164, 73, 83, 118, 252, 124, 208, 20, 31, 106, 45, 226, 65, 244, 50, 191, 196, 35, 1, 103, 225, 105, 30, 81, 216, 179, 16, 208, 11, 223, 167, 121, 141, 56, 44, 93, 226, 68, 67, 124, 106, 146, 177, 116, 64, 227, 42, 17, 46, 135, 14, 2, 0, 173, 156, 191, 129, 198, 45, 253, 7, 118, 109, 249, 223, 173, 144, 172, 86, 110, 199, 71, 175, 9, 251, 22, 82, 81, 132, 231, 94, 246, 251, 59, 0, 0, 255, 255, 33, 108, 56, 191}, + }, + { + name: "compression level 7", + failure: false, + level: constants.CompressionSeven, + pipeline: &Pipeline{Data: testPipelineData()}, + want: []byte{120, 218, 100, 203, 65, 14, 3, 33, 8, 133, 225, 61, 167, 120, 23, 112, 209, 173, 183, 33, 14, 99, 201, 168, 24, 156, 216, 235, 55, 214, 164, 73, 83, 118, 252, 124, 208, 20, 31, 106, 45, 226, 65, 244, 50, 191, 196, 35, 1, 103, 225, 105, 30, 81, 216, 179, 16, 208, 11, 223, 167, 121, 141, 56, 44, 93, 226, 68, 67, 124, 106, 146, 177, 116, 64, 227, 42, 17, 46, 135, 14, 2, 0, 173, 156, 191, 129, 198, 45, 253, 7, 118, 109, 249, 223, 173, 144, 172, 86, 110, 199, 71, 175, 9, 251, 22, 82, 81, 132, 231, 94, 246, 251, 59, 0, 0, 255, 255, 33, 108, 56, 191}, + }, + { + name: "compression level 8", + failure: false, + level: constants.CompressionEight, + pipeline: &Pipeline{Data: testPipelineData()}, + want: []byte{120, 218, 100, 203, 65, 14, 3, 33, 8, 133, 225, 61, 167, 120, 23, 112, 209, 173, 183, 33, 14, 99, 201, 168, 24, 156, 216, 235, 55, 214, 164, 73, 83, 118, 252, 124, 208, 20, 31, 106, 45, 226, 65, 244, 50, 191, 196, 35, 1, 103, 225, 105, 30, 81, 216, 179, 16, 208, 11, 223, 167, 121, 141, 56, 44, 93, 226, 68, 67, 124, 106, 146, 177, 116, 64, 227, 42, 17, 46, 135, 14, 2, 0, 173, 156, 191, 129, 198, 45, 253, 7, 118, 109, 249, 223, 173, 144, 172, 86, 110, 199, 71, 175, 9, 251, 22, 82, 81, 132, 231, 94, 246, 251, 59, 0, 0, 255, 255, 33, 108, 56, 191}, + }, + { + name: "compression level 9", + failure: false, + level: constants.CompressionNine, + pipeline: &Pipeline{Data: testPipelineData()}, + want: []byte{120, 218, 100, 203, 65, 14, 3, 33, 8, 133, 225, 61, 167, 120, 23, 112, 209, 173, 183, 33, 14, 99, 201, 168, 24, 156, 216, 235, 55, 214, 164, 73, 83, 118, 252, 124, 208, 20, 31, 106, 45, 226, 65, 244, 50, 191, 196, 35, 1, 103, 225, 105, 30, 81, 216, 179, 16, 208, 11, 223, 167, 121, 141, 56, 44, 93, 226, 68, 67, 124, 106, 146, 177, 116, 64, 227, 42, 17, 46, 135, 14, 2, 0, 173, 156, 191, 129, 198, 45, 253, 7, 118, 109, 249, 223, 173, 144, 172, 86, 110, 199, 71, 175, 9, 251, 22, 82, 81, 132, 231, 94, 246, 251, 59, 0, 0, 255, 255, 33, 108, 56, 191}, + }, + } + + // run tests + for _, test := range tests { + t.Run(test.name, func(t *testing.T) { + err := test.pipeline.Compress(test.level) + + if test.failure { + if err == nil { + t.Errorf("Compress for %s should have returned err", test.name) + } + + return + } + + if err != nil { + t.Errorf("Compress for %s returned err: %v", test.name, err) + } + + if !reflect.DeepEqual(test.pipeline.Data, test.want) { + t.Errorf("Compress for %s is %v, want %v", test.name, string(test.pipeline.Data), string(test.want)) + } + }) + } +} + +func TestDatabase_Pipeline_Decompress(t *testing.T) { + // setup tests + tests := []struct { + name string + failure bool + pipeline *Pipeline + want []byte + }{ + { + name: "compression level -1", + failure: false, + pipeline: &Pipeline{Data: []byte{120, 156, 100, 203, 65, 14, 3, 33, 8, 133, 225, 61, 167, 120, 23, 112, 209, 173, 183, 33, 14, 99, 201, 168, 24, 156, 216, 235, 55, 214, 164, 73, 83, 118, 252, 124, 208, 20, 31, 106, 45, 226, 65, 244, 50, 191, 196, 35, 1, 103, 225, 105, 30, 81, 216, 179, 16, 208, 11, 223, 167, 121, 141, 56, 44, 93, 226, 68, 67, 124, 106, 146, 177, 116, 64, 227, 42, 17, 46, 135, 14, 2, 0, 173, 156, 191, 129, 198, 45, 253, 7, 118, 109, 249, 223, 173, 144, 172, 86, 110, 199, 71, 175, 9, 251, 22, 82, 81, 132, 231, 94, 246, 251, 59, 0, 0, 255, 255, 33, 108, 56, 191}}, + want: testPipelineData(), + }, + { + name: "compression level 0", + failure: false, + pipeline: &Pipeline{Data: []byte{120, 1, 0, 186, 0, 69, 255, 10, 118, 101, 114, 115, 105, 111, 110, 58, 32, 49, 10, 10, 119, 111, 114, 107, 101, 114, 58, 10, 32, 32, 102, 108, 97, 118, 111, 114, 58, 32, 108, 97, 114, 103, 101, 10, 32, 32, 112, 108, 97, 116, 102, 111, 114, 109, 58, 32, 100, 111, 99, 107, 101, 114, 10, 10, 115, 101, 114, 118, 105, 99, 101, 115, 58, 10, 32, 32, 45, 32, 110, 97, 109, 101, 58, 32, 114, 101, 100, 105, 115, 10, 32, 32, 32, 32, 105, 109, 97, 103, 101, 58, 32, 114, 101, 100, 105, 115, 10, 10, 115, 116, 101, 112, 115, 58, 10, 32, 32, 45, 32, 110, 97, 109, 101, 58, 32, 112, 105, 110, 103, 10, 32, 32, 32, 32, 105, 109, 97, 103, 101, 58, 32, 114, 101, 100, 105, 115, 10, 32, 32, 32, 32, 99, 111, 109, 109, 97, 110, 100, 115, 58, 10, 32, 32, 32, 32, 32, 32, 45, 32, 114, 101, 100, 105, 115, 45, 99, 108, 105, 32, 45, 104, 32, 114, 101, 100, 105, 115, 32, 112, 105, 110, 103, 10, 1, 0, 0, 255, 255, 33, 108, 56, 191}}, + want: testPipelineData(), + }, + { + name: "compression level 1", + failure: false, + pipeline: &Pipeline{Data: []byte{120, 1, 100, 204, 189, 173, 195, 48, 12, 69, 225, 158, 83, 220, 5, 84, 188, 150, 219, 8, 210, 181, 30, 97, 253, 129, 52, 156, 245, 131, 36, 85, 144, 83, 127, 56, 114, 211, 195, 214, 84, 252, 137, 60, 150, 159, 116, 21, 224, 232, 249, 94, 174, 232, 217, 27, 5, 216, 61, 95, 199, 242, 161, 168, 171, 156, 116, 145, 160, 223, 86, 24, 42, 64, 194, 204, 131, 10, 103, 181, 16, 0, 176, 145, 27, 21, 206, 106, 33, 18, 23, 247, 23, 220, 54, 219, 175, 3, 128, 178, 198, 200, 179, 190, 245, 171, 244, 153, 166, 210, 13, 233, 31, 206, 106, 129, 109, 179, 201, 51, 0, 0, 255, 255, 33, 108, 56, 191}}, + want: testPipelineData(), + }, + { + name: "compression level 2", + failure: false, + pipeline: &Pipeline{Data: []byte{120, 94, 100, 204, 65, 170, 3, 33, 12, 198, 241, 125, 78, 241, 93, 192, 197, 219, 122, 155, 160, 25, 95, 24, 53, 18, 7, 123, 253, 226, 12, 20, 74, 179, 75, 248, 253, 67, 75, 124, 170, 245, 136, 63, 162, 151, 249, 41, 30, 9, 56, 42, 47, 243, 136, 202, 94, 132, 128, 81, 249, 58, 204, 91, 68, 182, 116, 138, 19, 77, 241, 165, 73, 230, 214, 1, 157, 155, 68, 184, 100, 157, 4, 0, 218, 184, 124, 14, 52, 47, 25, 95, 112, 104, 47, 191, 110, 135, 201, 90, 227, 158, 111, 189, 39, 60, 79, 67, 170, 138, 240, 255, 44, 184, 243, 119, 0, 0, 0, 255, 255, 33, 108, 56, 191}}, + want: testPipelineData(), + }, + { + name: "compression level 3", + failure: false, + pipeline: &Pipeline{Data: []byte{120, 94, 100, 204, 65, 170, 3, 33, 12, 198, 241, 125, 78, 241, 93, 192, 197, 219, 122, 155, 160, 25, 95, 24, 53, 18, 7, 123, 253, 226, 12, 20, 74, 179, 75, 248, 253, 67, 75, 124, 170, 245, 136, 63, 162, 151, 249, 41, 30, 9, 56, 42, 47, 243, 136, 202, 94, 132, 128, 81, 249, 58, 204, 91, 68, 182, 116, 138, 19, 77, 241, 165, 73, 230, 214, 1, 157, 155, 68, 184, 100, 157, 4, 0, 218, 184, 124, 14, 52, 47, 25, 95, 112, 104, 47, 191, 110, 135, 201, 90, 227, 158, 111, 189, 39, 60, 79, 67, 170, 138, 240, 255, 44, 184, 243, 119, 0, 0, 0, 255, 255, 33, 108, 56, 191}}, + want: testPipelineData(), + }, + { + name: "compression level 4", + failure: false, + pipeline: &Pipeline{Data: []byte{120, 94, 100, 203, 65, 14, 3, 33, 8, 133, 225, 61, 167, 120, 23, 112, 209, 173, 183, 33, 14, 99, 201, 168, 24, 156, 216, 235, 55, 214, 164, 73, 83, 118, 252, 124, 208, 20, 31, 106, 45, 226, 65, 244, 50, 191, 196, 35, 1, 103, 225, 105, 30, 81, 216, 179, 16, 208, 11, 223, 167, 121, 141, 56, 44, 93, 226, 68, 67, 124, 106, 146, 177, 116, 64, 227, 42, 17, 46, 135, 14, 2, 0, 173, 156, 191, 129, 198, 45, 253, 7, 118, 109, 249, 223, 173, 144, 172, 86, 110, 199, 71, 175, 9, 251, 22, 82, 81, 132, 231, 94, 246, 251, 59, 0, 0, 255, 255, 33, 108, 56, 191}}, + want: testPipelineData(), + }, + { + name: "compression level 5", + failure: false, + pipeline: &Pipeline{Data: []byte{120, 94, 100, 203, 65, 14, 3, 33, 8, 133, 225, 61, 167, 120, 23, 112, 209, 173, 183, 33, 14, 99, 201, 168, 24, 156, 216, 235, 55, 214, 164, 73, 83, 118, 252, 124, 208, 20, 31, 106, 45, 226, 65, 244, 50, 191, 196, 35, 1, 103, 225, 105, 30, 81, 216, 179, 16, 208, 11, 223, 167, 121, 141, 56, 44, 93, 226, 68, 67, 124, 106, 146, 177, 116, 64, 227, 42, 17, 46, 135, 14, 2, 0, 173, 156, 191, 129, 198, 45, 253, 7, 118, 109, 249, 223, 173, 144, 172, 86, 110, 199, 71, 175, 9, 251, 22, 82, 81, 132, 231, 94, 246, 251, 59, 0, 0, 255, 255, 33, 108, 56, 191}}, + want: testPipelineData(), + }, + { + name: "compression level 6", + failure: false, + pipeline: &Pipeline{Data: []byte{120, 156, 100, 203, 65, 14, 3, 33, 8, 133, 225, 61, 167, 120, 23, 112, 209, 173, 183, 33, 14, 99, 201, 168, 24, 156, 216, 235, 55, 214, 164, 73, 83, 118, 252, 124, 208, 20, 31, 106, 45, 226, 65, 244, 50, 191, 196, 35, 1, 103, 225, 105, 30, 81, 216, 179, 16, 208, 11, 223, 167, 121, 141, 56, 44, 93, 226, 68, 67, 124, 106, 146, 177, 116, 64, 227, 42, 17, 46, 135, 14, 2, 0, 173, 156, 191, 129, 198, 45, 253, 7, 118, 109, 249, 223, 173, 144, 172, 86, 110, 199, 71, 175, 9, 251, 22, 82, 81, 132, 231, 94, 246, 251, 59, 0, 0, 255, 255, 33, 108, 56, 191}}, + want: testPipelineData(), + }, + { + name: "compression level 7", + failure: false, + pipeline: &Pipeline{Data: []byte{120, 218, 100, 203, 65, 14, 3, 33, 8, 133, 225, 61, 167, 120, 23, 112, 209, 173, 183, 33, 14, 99, 201, 168, 24, 156, 216, 235, 55, 214, 164, 73, 83, 118, 252, 124, 208, 20, 31, 106, 45, 226, 65, 244, 50, 191, 196, 35, 1, 103, 225, 105, 30, 81, 216, 179, 16, 208, 11, 223, 167, 121, 141, 56, 44, 93, 226, 68, 67, 124, 106, 146, 177, 116, 64, 227, 42, 17, 46, 135, 14, 2, 0, 173, 156, 191, 129, 198, 45, 253, 7, 118, 109, 249, 223, 173, 144, 172, 86, 110, 199, 71, 175, 9, 251, 22, 82, 81, 132, 231, 94, 246, 251, 59, 0, 0, 255, 255, 33, 108, 56, 191}}, + want: testPipelineData(), + }, + { + name: "compression level 8", + failure: false, + pipeline: &Pipeline{Data: []byte{120, 218, 100, 203, 65, 14, 3, 33, 8, 133, 225, 61, 167, 120, 23, 112, 209, 173, 183, 33, 14, 99, 201, 168, 24, 156, 216, 235, 55, 214, 164, 73, 83, 118, 252, 124, 208, 20, 31, 106, 45, 226, 65, 244, 50, 191, 196, 35, 1, 103, 225, 105, 30, 81, 216, 179, 16, 208, 11, 223, 167, 121, 141, 56, 44, 93, 226, 68, 67, 124, 106, 146, 177, 116, 64, 227, 42, 17, 46, 135, 14, 2, 0, 173, 156, 191, 129, 198, 45, 253, 7, 118, 109, 249, 223, 173, 144, 172, 86, 110, 199, 71, 175, 9, 251, 22, 82, 81, 132, 231, 94, 246, 251, 59, 0, 0, 255, 255, 33, 108, 56, 191}}, + want: testPipelineData(), + }, + { + name: "compression level 9", + failure: false, + pipeline: &Pipeline{Data: []byte{120, 218, 100, 203, 65, 14, 3, 33, 8, 133, 225, 61, 167, 120, 23, 112, 209, 173, 183, 33, 14, 99, 201, 168, 24, 156, 216, 235, 55, 214, 164, 73, 83, 118, 252, 124, 208, 20, 31, 106, 45, 226, 65, 244, 50, 191, 196, 35, 1, 103, 225, 105, 30, 81, 216, 179, 16, 208, 11, 223, 167, 121, 141, 56, 44, 93, 226, 68, 67, 124, 106, 146, 177, 116, 64, 227, 42, 17, 46, 135, 14, 2, 0, 173, 156, 191, 129, 198, 45, 253, 7, 118, 109, 249, 223, 173, 144, 172, 86, 110, 199, 71, 175, 9, 251, 22, 82, 81, 132, 231, 94, 246, 251, 59, 0, 0, 255, 255, 33, 108, 56, 191}}, + want: testPipelineData(), + }, + } + + // run tests + for _, test := range tests { + t.Run(test.name, func(t *testing.T) { + err := test.pipeline.Decompress() + + if test.failure { + if err == nil { + t.Errorf("Decompress for %s should have returned err", test.name) + } + + return + } + + if err != nil { + t.Errorf("Decompress for %s returned err: %v", test.name, err) + } + + if !reflect.DeepEqual(test.pipeline.Data, test.want) { + t.Errorf("Decompress for %s is %v, want %v", test.name, string(test.pipeline.Data), string(test.want)) + } + }) + } +} + +func TestDatabase_Pipeline_Nullify(t *testing.T) { + // setup types + var p *Pipeline + + want := &Pipeline{ + ID: sql.NullInt64{Int64: 0, Valid: false}, + RepoID: sql.NullInt64{Int64: 0, Valid: false}, + Commit: sql.NullString{String: "", Valid: false}, + Flavor: sql.NullString{String: "", Valid: false}, + Platform: sql.NullString{String: "", Valid: false}, + Ref: sql.NullString{String: "", Valid: false}, + Type: sql.NullString{String: "", Valid: false}, + Version: sql.NullString{String: "", Valid: false}, + } + + // setup tests + tests := []struct { + pipeline *Pipeline + want *Pipeline + }{ + { + pipeline: testPipeline(), + want: testPipeline(), + }, + { + pipeline: p, + want: nil, + }, + { + pipeline: new(Pipeline), + want: want, + }, + } + + // run tests + for _, test := range tests { + got := test.pipeline.Nullify() + + if !reflect.DeepEqual(got, test.want) { + t.Errorf("Nullify is %v, want %v", got, test.want) + } + } +} + +func TestDatabase_Pipeline_ToAPI(t *testing.T) { + // setup types + want := new(api.Pipeline) + + want.SetID(1) + want.SetRepo(testRepo().ToAPI()) + want.SetCommit("48afb5bdc41ad69bf22588491333f7cf71135163") + want.SetFlavor("large") + want.SetPlatform("docker") + want.SetRef("refs/heads/main") + want.SetType(constants.PipelineTypeYAML) + want.SetVersion("1") + want.SetExternalSecrets(false) + want.SetInternalSecrets(false) + want.SetServices(true) + want.SetStages(false) + want.SetSteps(true) + want.SetTemplates(false) + want.SetData(testPipelineData()) + + // run test + got := testPipeline().ToAPI() + + if !reflect.DeepEqual(got, want) { + t.Errorf("ToAPI is %v, want %v", got, want) + } +} + +func TestDatabase_Pipeline_Validate(t *testing.T) { + // setup tests + tests := []struct { + failure bool + pipeline *Pipeline + }{ + { + failure: false, + pipeline: testPipeline(), + }, + { // no commit set for pipeline + failure: true, + pipeline: &Pipeline{ + ID: sql.NullInt64{Int64: 1, Valid: true}, + RepoID: sql.NullInt64{Int64: 1, Valid: true}, + Ref: sql.NullString{String: "refs/heads/main", Valid: true}, + Type: sql.NullString{String: constants.PipelineTypeYAML, Valid: true}, + Version: sql.NullString{String: "1", Valid: true}, + }, + }, + { // no ref set for pipeline + failure: true, + pipeline: &Pipeline{ + ID: sql.NullInt64{Int64: 1, Valid: true}, + RepoID: sql.NullInt64{Int64: 1, Valid: true}, + Commit: sql.NullString{String: "48afb5bdc41ad69bf22588491333f7cf71135163", Valid: true}, + Type: sql.NullString{String: constants.PipelineTypeYAML, Valid: true}, + Version: sql.NullString{String: "1", Valid: true}, + }, + }, + { // no repo_id set for pipeline + failure: true, + pipeline: &Pipeline{ + ID: sql.NullInt64{Int64: 1, Valid: true}, + Commit: sql.NullString{String: "48afb5bdc41ad69bf22588491333f7cf71135163", Valid: true}, + Ref: sql.NullString{String: "refs/heads/main", Valid: true}, + Type: sql.NullString{String: constants.PipelineTypeYAML, Valid: true}, + Version: sql.NullString{String: "1", Valid: true}, + }, + }, + { // no type set for pipeline + failure: true, + pipeline: &Pipeline{ + ID: sql.NullInt64{Int64: 1, Valid: true}, + RepoID: sql.NullInt64{Int64: 1, Valid: true}, + Commit: sql.NullString{String: "48afb5bdc41ad69bf22588491333f7cf71135163", Valid: true}, + Ref: sql.NullString{String: "refs/heads/main", Valid: true}, + Version: sql.NullString{String: "1", Valid: true}, + }, + }, + { // no version set for pipeline + failure: true, + pipeline: &Pipeline{ + ID: sql.NullInt64{Int64: 1, Valid: true}, + RepoID: sql.NullInt64{Int64: 1, Valid: true}, + Commit: sql.NullString{String: "48afb5bdc41ad69bf22588491333f7cf71135163", Valid: true}, + Ref: sql.NullString{String: "refs/heads/main", Valid: true}, + + Type: sql.NullString{String: constants.PipelineTypeYAML, Valid: true}, + }, + }, + } + + // run tests + for _, test := range tests { + err := test.pipeline.Validate() + + if test.failure { + if err == nil { + t.Errorf("Validate should have returned err") + } + + continue + } + + if err != nil { + t.Errorf("Validate returned err: %v", err) + } + } +} + +func TestDatabase_PipelineFromAPI(t *testing.T) { + // setup types + want := &Pipeline{ + ID: sql.NullInt64{Int64: 1, Valid: true}, + RepoID: sql.NullInt64{Int64: 1, Valid: true}, + Commit: sql.NullString{String: "48afb5bdc41ad69bf22588491333f7cf71135163", Valid: true}, + Flavor: sql.NullString{String: "large", Valid: true}, + Platform: sql.NullString{String: "docker", Valid: true}, + Ref: sql.NullString{String: "refs/heads/main", Valid: true}, + Type: sql.NullString{String: constants.PipelineTypeYAML, Valid: true}, + Version: sql.NullString{String: "1", Valid: true}, + ExternalSecrets: sql.NullBool{Bool: false, Valid: true}, + InternalSecrets: sql.NullBool{Bool: false, Valid: true}, + Services: sql.NullBool{Bool: true, Valid: true}, + Stages: sql.NullBool{Bool: false, Valid: true}, + Steps: sql.NullBool{Bool: true, Valid: true}, + Templates: sql.NullBool{Bool: false, Valid: true}, + Data: testPipelineData(), + } + + p := new(api.Pipeline) + + p.SetID(1) + p.SetRepo(testRepo().ToAPI()) + p.SetCommit("48afb5bdc41ad69bf22588491333f7cf71135163") + p.SetFlavor("large") + p.SetPlatform("docker") + p.SetRef("refs/heads/main") + p.SetType(constants.PipelineTypeYAML) + p.SetVersion("1") + p.SetExternalSecrets(false) + p.SetInternalSecrets(false) + p.SetServices(true) + p.SetStages(false) + p.SetSteps(true) + p.SetTemplates(false) + p.SetData(testPipelineData()) + + // run test + got := PipelineFromAPI(p) + + if !reflect.DeepEqual(got, want) { + t.Errorf("PipelineFromAPI is %v, want %v", got, want) + } +} + +// testPipeline is a test helper function to create a Pipeline +// type with all fields set to a fake value. +func testPipeline() *Pipeline { + return &Pipeline{ + ID: sql.NullInt64{Int64: 1, Valid: true}, + RepoID: sql.NullInt64{Int64: 1, Valid: true}, + Commit: sql.NullString{String: "48afb5bdc41ad69bf22588491333f7cf71135163", Valid: true}, + Flavor: sql.NullString{String: "large", Valid: true}, + Platform: sql.NullString{String: "docker", Valid: true}, + Ref: sql.NullString{String: "refs/heads/main", Valid: true}, + Type: sql.NullString{String: constants.PipelineTypeYAML, Valid: true}, + Version: sql.NullString{String: "1", Valid: true}, + ExternalSecrets: sql.NullBool{Bool: false, Valid: true}, + InternalSecrets: sql.NullBool{Bool: false, Valid: true}, + Services: sql.NullBool{Bool: true, Valid: true}, + Stages: sql.NullBool{Bool: false, Valid: true}, + Steps: sql.NullBool{Bool: true, Valid: true}, + Templates: sql.NullBool{Bool: false, Valid: true}, + Data: testPipelineData(), + + Repo: *testRepo(), + } +} + +// testPipelineData is a test helper function to create the +// content for the Data field for the Pipeline type. +func testPipelineData() []byte { + return []byte(` +version: 1 + +worker: + flavor: large + platform: docker + +services: + - name: redis + image: redis + +steps: + - name: ping + image: redis + commands: + - redis-cli -h redis ping +`) +} diff --git a/mock/server/pipeline.go b/mock/server/pipeline.go index 0d90cf45d..beb0b3b7a 100644 --- a/mock/server/pipeline.go +++ b/mock/server/pipeline.go @@ -11,9 +11,9 @@ import ( yml "github.com/buildkite/yaml" "github.com/gin-gonic/gin" + api "github.com/go-vela/server/api/types" + "github.com/go-vela/server/compiler/types/yaml" "github.com/go-vela/types" - "github.com/go-vela/types/library" - "github.com/go-vela/types/yaml" ) const ( @@ -105,8 +105,60 @@ templates: // PipelineResp represents a JSON return for a single pipeline. PipelineResp = `{ "id": 1, - "repo_id": 1, - "commit": "48afb5bdc41ad69bf22588491333f7cf71135163", + "repo": { + "id": 1, + "owner": { + "id": 1, + "name": "Octocat", + "active": true + }, + "org": "Octocat", + "name": "myvela", + "full_name": "Octocat/myvela", + "link": "https://github.com/Octocat/myvela", + "clone": "https://github.com/Octocat/myvela.git", + "branch": "main", + "topics": [ + "example" + ], + "build_limit": 10, + "timeout": 30, + "counter": 1, + "visibility": "public", + "private": false, + "trusted": false, + "active": true, + "allow_events": { + "push": { + "branch": true, + "tag": false, + "delete_branch": false, + "delete_tag": false + }, + "pull_request": { + "opened": false, + "edited": false, + "synchronize": false, + "reopened": false, + "labeled": false, + "unlabeled": false + }, + "deployment": { + "created": false + }, + "comment": { + "created": false, + "edited": false + }, + "schedule": { + "run": false + } + }, + "pipeline_type": "yaml", + "previous_name": "", + "approve_build": "first-time" + }, + "commit": "8496deb0aeacd9d95078ac8d38edb447631ef369", "flavor": "", "platform": "", "ref": "refs/heads/main", @@ -125,7 +177,59 @@ templates: PipelinesResp = `[ { "id": 2 - "repo_id": 1, + "repo": { + "id": 1, + "owner": { + "id": 1, + "name": "Octocat", + "active": true + }, + "org": "Octocat", + "name": "myvela", + "full_name": "Octocat/myvela", + "link": "https://github.com/Octocat/myvela", + "clone": "https://github.com/Octocat/myvela.git", + "branch": "main", + "topics": [ + "example" + ], + "build_limit": 10, + "timeout": 30, + "counter": 1, + "visibility": "public", + "private": false, + "trusted": false, + "active": true, + "allow_events": { + "push": { + "branch": true, + "tag": false, + "delete_branch": false, + "delete_tag": false + }, + "pull_request": { + "opened": false, + "edited": false, + "synchronize": false, + "reopened": false, + "labeled": false, + "unlabeled": false + }, + "deployment": { + "created": false + }, + "comment": { + "created": false, + "edited": false + }, + "schedule": { + "run": false + } + }, + "pipeline_type": "yaml", + "previous_name": "", + "approve_build": "first-time" + }, "commit": "a49aaf4afae6431a79239c95247a2b169fd9f067", "flavor": "", "platform": "", @@ -142,7 +246,59 @@ templates: }, { "id": 1, - "repo_id": 1, + "repo": { + "id": 1, + "owner": { + "id": 1, + "name": "Octocat", + "active": true + }, + "org": "Octocat", + "name": "myvela", + "full_name": "Octocat/myvela", + "link": "https://github.com/Octocat/myvela", + "clone": "https://github.com/Octocat/myvela.git", + "branch": "main", + "topics": [ + "example" + ], + "build_limit": 10, + "timeout": 30, + "counter": 1, + "visibility": "public", + "private": false, + "trusted": false, + "active": true, + "allow_events": { + "push": { + "branch": true, + "tag": false, + "delete_branch": false, + "delete_tag": false + }, + "pull_request": { + "opened": false, + "edited": false, + "synchronize": false, + "reopened": false, + "labeled": false, + "unlabeled": false + }, + "deployment": { + "created": false + }, + "comment": { + "created": false, + "edited": false + }, + "schedule": { + "run": false + } + }, + "pipeline_type": "yaml", + "previous_name": "", + "approve_build": "first-time" + }, "commit": "48afb5bdc41ad69bf22588491333f7cf71135163", "flavor": "", "platform": "", @@ -172,7 +328,7 @@ sample: func getPipelines(c *gin.Context) { data := []byte(PipelinesResp) - var body []library.Pipeline + var body []api.Pipeline _ = json.Unmarshal(data, &body) c.JSON(http.StatusOK, body) @@ -194,7 +350,7 @@ func getPipeline(c *gin.Context) { data := []byte(PipelineResp) - var body library.Pipeline + var body api.Pipeline _ = json.Unmarshal(data, &body) c.JSON(http.StatusOK, body) @@ -204,7 +360,7 @@ func getPipeline(c *gin.Context) { func addPipeline(c *gin.Context) { data := []byte(PipelineResp) - var body library.Pipeline + var body api.Pipeline _ = json.Unmarshal(data, &body) c.JSON(http.StatusCreated, body) @@ -228,7 +384,7 @@ func updatePipeline(c *gin.Context) { data := []byte(PipelineResp) - var body library.Pipeline + var body api.Pipeline _ = json.Unmarshal(data, &body) c.JSON(http.StatusOK, body) diff --git a/mock/server/pipeline_test.go b/mock/server/pipeline_test.go index e7f6afbf8..3f67c6461 100644 --- a/mock/server/pipeline_test.go +++ b/mock/server/pipeline_test.go @@ -7,11 +7,11 @@ import ( "reflect" "testing" - "github.com/go-vela/types/library" + api "github.com/go-vela/server/api/types" ) func TestPipeline_ActivePipelineResp(t *testing.T) { - testPipeline := library.Pipeline{} + testPipeline := api.Pipeline{} err := json.Unmarshal([]byte(PipelineResp), &testPipeline) if err != nil { diff --git a/queue/redis/route.go b/queue/redis/route.go index e4fc0ae05..5ee0d29a6 100644 --- a/queue/redis/route.go +++ b/queue/redis/route.go @@ -7,8 +7,8 @@ import ( "fmt" "strings" + "github.com/go-vela/server/compiler/types/pipeline" "github.com/go-vela/types/constants" - "github.com/go-vela/types/pipeline" ) // Route decides which route a build gets placed within the queue. diff --git a/queue/redis/route_test.go b/queue/redis/route_test.go index e849f085b..5750e9277 100644 --- a/queue/redis/route_test.go +++ b/queue/redis/route_test.go @@ -6,8 +6,8 @@ import ( "strings" "testing" + "github.com/go-vela/server/compiler/types/pipeline" "github.com/go-vela/types/constants" - "github.com/go-vela/types/pipeline" ) func TestRedis_Client_Route(t *testing.T) { diff --git a/queue/service.go b/queue/service.go index a976e7045..82bfe4d57 100644 --- a/queue/service.go +++ b/queue/service.go @@ -6,8 +6,8 @@ import ( "context" "github.com/go-vela/server/api/types/settings" + "github.com/go-vela/server/compiler/types/pipeline" "github.com/go-vela/server/queue/models" - "github.com/go-vela/types/pipeline" ) // Service represents the interface for Vela integrating diff --git a/router/middleware/pipeline/context.go b/router/middleware/pipeline/context.go index d5218c3d7..8d4ad3563 100644 --- a/router/middleware/pipeline/context.go +++ b/router/middleware/pipeline/context.go @@ -5,7 +5,7 @@ package pipeline import ( "context" - "github.com/go-vela/types/library" + api "github.com/go-vela/server/api/types" ) const key = "pipeline" @@ -16,13 +16,13 @@ type Setter interface { } // FromContext returns the Pipeline associated with this context. -func FromContext(c context.Context) *library.Pipeline { +func FromContext(c context.Context) *api.Pipeline { value := c.Value(key) if value == nil { return nil } - b, ok := value.(*library.Pipeline) + b, ok := value.(*api.Pipeline) if !ok { return nil } @@ -32,6 +32,6 @@ func FromContext(c context.Context) *library.Pipeline { // ToContext adds the Pipeline to this context if it supports // the Setter interface. -func ToContext(c Setter, b *library.Pipeline) { +func ToContext(c Setter, b *api.Pipeline) { c.Set(key, b) } diff --git a/router/middleware/pipeline/context_test.go b/router/middleware/pipeline/context_test.go index 4ec7c6607..a0ec48faf 100644 --- a/router/middleware/pipeline/context_test.go +++ b/router/middleware/pipeline/context_test.go @@ -8,12 +8,12 @@ import ( "github.com/gin-gonic/gin" - "github.com/go-vela/types/library" + api "github.com/go-vela/server/api/types" ) func TestPipeline_FromContext(t *testing.T) { // setup types - _pipeline := new(library.Pipeline) + _pipeline := new(api.Pipeline) gin.SetMode(gin.TestMode) _context, _ := gin.CreateTestContext(nil) @@ -31,7 +31,7 @@ func TestPipeline_FromContext(t *testing.T) { tests := []struct { name string context *gin.Context - want *library.Pipeline + want *api.Pipeline }{ { name: "context", @@ -69,7 +69,7 @@ func TestPipeline_FromContext(t *testing.T) { func TestPipeline_ToContext(t *testing.T) { // setup types - _pipeline := new(library.Pipeline) + _pipeline := new(api.Pipeline) gin.SetMode(gin.TestMode) _context, _ := gin.CreateTestContext(nil) @@ -78,7 +78,7 @@ func TestPipeline_ToContext(t *testing.T) { tests := []struct { name string context *gin.Context - want *library.Pipeline + want *api.Pipeline }{ { name: "context", diff --git a/router/middleware/pipeline/pipeline.go b/router/middleware/pipeline/pipeline.go index e523f8d1a..ab32e5efa 100644 --- a/router/middleware/pipeline/pipeline.go +++ b/router/middleware/pipeline/pipeline.go @@ -17,11 +17,10 @@ import ( "github.com/go-vela/server/router/middleware/user" "github.com/go-vela/server/scm" "github.com/go-vela/server/util" - "github.com/go-vela/types/library" ) // Retrieve gets the pipeline in the given context. -func Retrieve(c *gin.Context) *library.Pipeline { +func Retrieve(c *gin.Context) *api.Pipeline { return FromContext(c) } diff --git a/router/middleware/pipeline/pipeline_test.go b/router/middleware/pipeline/pipeline_test.go index 28a6dc3c5..bdc7d83f3 100644 --- a/router/middleware/pipeline/pipeline_test.go +++ b/router/middleware/pipeline/pipeline_test.go @@ -13,6 +13,7 @@ import ( "time" "github.com/gin-gonic/gin" + "github.com/google/go-cmp/cmp" "github.com/sirupsen/logrus" "github.com/urfave/cli/v2" @@ -20,6 +21,7 @@ import ( "github.com/go-vela/server/compiler" "github.com/go-vela/server/compiler/native" "github.com/go-vela/server/database" + "github.com/go-vela/server/database/testutils" "github.com/go-vela/server/internal" "github.com/go-vela/server/internal/token" "github.com/go-vela/server/router/middleware/claims" @@ -29,12 +31,11 @@ import ( "github.com/go-vela/server/scm" "github.com/go-vela/server/scm/github" "github.com/go-vela/types/constants" - "github.com/go-vela/types/library" ) func TestPipeline_Retrieve(t *testing.T) { // setup types - _pipeline := new(library.Pipeline) + _pipeline := new(api.Pipeline) gin.SetMode(gin.TestMode) _context, _ := gin.CreateTestContext(nil) @@ -43,7 +44,7 @@ func TestPipeline_Retrieve(t *testing.T) { tests := []struct { name string context *gin.Context - want *library.Pipeline + want *api.Pipeline }{ { name: "context", @@ -68,10 +69,12 @@ func TestPipeline_Retrieve(t *testing.T) { func TestPipeline_Establish(t *testing.T) { // setup types - owner := new(api.User) + owner := testutils.APIUser().Crop() owner.SetID(1) + owner.SetName("octocat") + owner.SetToken("foo") - r := new(api.Repo) + r := testutils.APIRepo() r.SetID(1) r.SetOwner(owner) r.SetHash("baz") @@ -80,9 +83,9 @@ func TestPipeline_Establish(t *testing.T) { r.SetFullName("foo/bar") r.SetVisibility("public") - want := new(library.Pipeline) + want := new(api.Pipeline) want.SetID(1) - want.SetRepoID(1) + want.SetRepo(r) want.SetCommit("48afb5bdc41ad69bf22588491333f7cf71135163") want.SetFlavor("") want.SetPlatform("") @@ -97,7 +100,7 @@ func TestPipeline_Establish(t *testing.T) { want.SetTemplates(false) want.SetData([]byte{}) - got := new(library.Pipeline) + got := new(api.Pipeline) // setup database db, err := database.NewTest() @@ -106,11 +109,13 @@ func TestPipeline_Establish(t *testing.T) { } defer func() { + _ = db.DeleteUser(context.TODO(), owner) _ = db.DeletePipeline(context.TODO(), want) _ = db.DeleteRepo(context.TODO(), r) db.Close() }() + _, _ = db.CreateUser(context.TODO(), owner) _, _ = db.CreateRepo(context.TODO(), r) _, _ = db.CreatePipeline(context.TODO(), want) @@ -140,8 +145,8 @@ func TestPipeline_Establish(t *testing.T) { t.Errorf("Establish returned %v, want %v", resp.Code, http.StatusOK) } - if !reflect.DeepEqual(got, want) { - t.Errorf("Establish is %v, want %v", got, want) + if diff := cmp.Diff(got, want); diff != "" { + t.Errorf("Establish mismatch (-got +want):\n%v", diff) } } diff --git a/router/middleware/repo/repo.go b/router/middleware/repo/repo.go index bbc678660..9bd8ac8a8 100644 --- a/router/middleware/repo/repo.go +++ b/router/middleware/repo/repo.go @@ -57,3 +57,5 @@ func Establish() gin.HandlerFunc { c.Next() } } + +// EstablishSlim sets the repo name in the given context diff --git a/scm/github/deployment.go b/scm/github/deployment.go index 85f377494..b4f89627b 100644 --- a/scm/github/deployment.go +++ b/scm/github/deployment.go @@ -10,7 +10,7 @@ import ( "github.com/sirupsen/logrus" api "github.com/go-vela/server/api/types" - "github.com/go-vela/types/raw" + "github.com/go-vela/server/compiler/types/raw" ) // GetDeployment gets a deployment from the GitHub repo. diff --git a/scm/github/webhook_test.go b/scm/github/webhook_test.go index e53ab0b05..15fdb486d 100644 --- a/scm/github/webhook_test.go +++ b/scm/github/webhook_test.go @@ -16,9 +16,9 @@ import ( "github.com/google/go-cmp/cmp" api "github.com/go-vela/server/api/types" + "github.com/go-vela/server/compiler/types/raw" "github.com/go-vela/server/internal" "github.com/go-vela/types/constants" - "github.com/go-vela/types/raw" ) func TestGithub_ProcessWebhook_Push(t *testing.T) { diff --git a/util/compression.go b/util/compression.go new file mode 100644 index 000000000..59b34e9bc --- /dev/null +++ b/util/compression.go @@ -0,0 +1,71 @@ +// SPDX-License-Identifier: Apache-2.0 + +package util + +import ( + "bytes" + "compress/zlib" + "io" +) + +// Compress is a helper function to compress values. First, an +// empty buffer is created for storing compressed data. Then, +// a zlib writer, using the DEFLATE algorithm, is created with +// the provided compression level to output to this buffer. +// Finally, the provided value is compressed and written to the +// buffer and the writer is closed which flushes all bytes from +// the writer to the buffer. +func Compress(level int, value []byte) ([]byte, error) { + // create new buffer for storing compressed data + b := new(bytes.Buffer) + + // create new zlib writer for outputting data to the buffer in a compressed format + w, err := zlib.NewWriterLevel(b, level) + if err != nil { + return value, err + } + + // write data to the buffer in compressed format + _, err = w.Write(value) + if err != nil { + return value, err + } + + // close the writer + // + // compressed bytes are not flushed until the writer is closed or explicitly flushed + err = w.Close() + if err != nil { + return value, err + } + + // return compressed bytes from the buffer + return b.Bytes(), nil +} + +// Decompress is a helper function to decompress values. First, a +// buffer is created from the provided compressed data. Then, a +// zlib reader, using the DEFLATE algorithm, is created from the +// buffer as an input for reading data from the buffer. Finally, +// the data is decompressed and read from the buffer. +func Decompress(value []byte) ([]byte, error) { + // create new buffer from the compressed data + b := bytes.NewBuffer(value) + + // create new zlib reader for reading the compressed data from the buffer + r, err := zlib.NewReader(b) + if err != nil { + return value, err + } + + // close the reader after the data has been decompressed + defer r.Close() + + // capture decompressed data from the compressed data in the buffer + data, err := io.ReadAll(r) + if err != nil { + return value, err + } + + return data, nil +} diff --git a/util/compression_test.go b/util/compression_test.go new file mode 100644 index 000000000..4328b61a3 --- /dev/null +++ b/util/compression_test.go @@ -0,0 +1,222 @@ +// SPDX-License-Identifier: Apache-2.0 + +package util + +import ( + "reflect" + "testing" + + "github.com/go-vela/types/constants" +) + +func TestDatabase_compress(t *testing.T) { + // setup tests + tests := []struct { + name string + failure bool + level int + data []byte + want []byte + }{ + { + name: "compression level -1", + failure: false, + level: constants.CompressionNegOne, + data: []byte("foo"), + want: []byte{120, 156, 74, 203, 207, 7, 4, 0, 0, 255, 255, 2, 130, 1, 69}, + }, + { + name: "compression level 0", + failure: false, + level: constants.CompressionZero, + data: []byte("foo"), + want: []byte{120, 1, 0, 3, 0, 252, 255, 102, 111, 111, 1, 0, 0, 255, 255, 2, 130, 1, 69}, + }, + { + name: "compression level 1", + failure: false, + level: constants.CompressionOne, + data: []byte("foo"), + want: []byte{120, 1, 0, 3, 0, 252, 255, 102, 111, 111, 1, 0, 0, 255, 255, 2, 130, 1, 69}, + }, + { + name: "compression level 2", + failure: false, + level: constants.CompressionTwo, + data: []byte("foo"), + want: []byte{120, 94, 74, 203, 207, 7, 4, 0, 0, 255, 255, 2, 130, 1, 69}, + }, + { + name: "compression level 3", + failure: false, + level: constants.CompressionThree, + data: []byte("foo"), + want: []byte{120, 94, 74, 203, 207, 7, 4, 0, 0, 255, 255, 2, 130, 1, 69}, + }, + { + name: "compression level 4", + failure: false, + level: constants.CompressionFour, + data: []byte("foo"), + want: []byte{120, 94, 74, 203, 207, 7, 4, 0, 0, 255, 255, 2, 130, 1, 69}, + }, + { + name: "compression level 5", + failure: false, + level: constants.CompressionFive, + data: []byte("foo"), + want: []byte{120, 94, 74, 203, 207, 7, 4, 0, 0, 255, 255, 2, 130, 1, 69}, + }, + { + name: "compression level 6", + failure: false, + level: constants.CompressionSix, + data: []byte("foo"), + want: []byte{120, 156, 74, 203, 207, 7, 4, 0, 0, 255, 255, 2, 130, 1, 69}, + }, + { + name: "compression level 7", + failure: false, + level: constants.CompressionSeven, + data: []byte("foo"), + want: []byte{120, 218, 74, 203, 207, 7, 4, 0, 0, 255, 255, 2, 130, 1, 69}, + }, + { + name: "compression level 8", + failure: false, + level: constants.CompressionEight, + data: []byte("foo"), + want: []byte{120, 218, 74, 203, 207, 7, 4, 0, 0, 255, 255, 2, 130, 1, 69}, + }, + { + name: "compression level 9", + failure: false, + level: constants.CompressionNine, + data: []byte("foo"), + want: []byte{120, 218, 74, 203, 207, 7, 4, 0, 0, 255, 255, 2, 130, 1, 69}, + }, + } + + // run tests + for _, test := range tests { + t.Run(test.name, func(t *testing.T) { + got, err := Compress(test.level, test.data) + + if test.failure { + if err == nil { + t.Errorf("compress for %s should have returned err", test.name) + } + + return + } + + if err != nil { + t.Errorf("compress for %s returned err: %v", test.name, err) + } + + if !reflect.DeepEqual(got, test.want) { + t.Errorf("compress for %s is %v, want %v", test.name, string(got), string(test.want)) + } + }) + } +} + +func TestDatabase_decompress(t *testing.T) { + // setup tests + tests := []struct { + name string + failure bool + data []byte + want []byte + }{ + { + name: "compression level -1", + failure: false, + data: []byte{120, 156, 74, 203, 207, 7, 4, 0, 0, 255, 255, 2, 130, 1, 69}, + want: []byte("foo"), + }, + { + name: "compression level 0", + failure: false, + data: []byte{120, 1, 0, 3, 0, 252, 255, 102, 111, 111, 1, 0, 0, 255, 255, 2, 130, 1, 69}, + want: []byte("foo"), + }, + { + name: "compression level 1", + failure: false, + data: []byte{120, 1, 0, 3, 0, 252, 255, 102, 111, 111, 1, 0, 0, 255, 255, 2, 130, 1, 69}, + want: []byte("foo"), + }, + { + name: "compression level 2", + failure: false, + data: []byte{120, 94, 74, 203, 207, 7, 4, 0, 0, 255, 255, 2, 130, 1, 69}, + want: []byte("foo"), + }, + { + name: "compression level 3", + failure: false, + data: []byte{120, 94, 74, 203, 207, 7, 4, 0, 0, 255, 255, 2, 130, 1, 69}, + want: []byte("foo"), + }, + { + name: "compression level 4", + failure: false, + data: []byte{120, 94, 74, 203, 207, 7, 4, 0, 0, 255, 255, 2, 130, 1, 69}, + want: []byte("foo"), + }, + { + name: "compression level 5", + failure: false, + data: []byte{120, 94, 74, 203, 207, 7, 4, 0, 0, 255, 255, 2, 130, 1, 69}, + want: []byte("foo"), + }, + { + name: "compression level 6", + failure: false, + data: []byte{120, 156, 74, 203, 207, 7, 4, 0, 0, 255, 255, 2, 130, 1, 69}, + want: []byte("foo"), + }, + { + name: "compression level 7", + failure: false, + data: []byte{120, 218, 74, 203, 207, 7, 4, 0, 0, 255, 255, 2, 130, 1, 69}, + want: []byte("foo"), + }, + { + name: "compression level 8", + failure: false, + data: []byte{120, 218, 74, 203, 207, 7, 4, 0, 0, 255, 255, 2, 130, 1, 69}, + want: []byte("foo"), + }, + { + name: "compression level 9", + failure: false, + data: []byte{120, 218, 74, 203, 207, 7, 4, 0, 0, 255, 255, 2, 130, 1, 69}, + want: []byte("foo"), + }, + } + + // run tests + for _, test := range tests { + t.Run(test.name, func(t *testing.T) { + got, err := Decompress(test.data) + + if test.failure { + if err == nil { + t.Errorf("decompress for %s should have returned err", test.name) + } + + return + } + + if err != nil { + t.Errorf("decompressm for %s returned err: %v", test.name, err) + } + + if !reflect.DeepEqual(got, test.want) { + t.Errorf("decompress for %s is %v, want %v", test.name, string(got), string(test.want)) + } + }) + } +}