Skip to content

Commit

Permalink
test(artifacts): Be more lenient when filtering expected artifacts
Browse files Browse the repository at this point in the history
It is currently hard/impossible to resolve artifacts in pipelines that doesn't have any triggers defined by themselves, and are triggered by a pipeline stage in another pipeline. Previously it was possible to get it to work by editing the json of the pipeline or in the UI by adding artifacts in a temp trigger and then removing it (this would keep the expected artifacts around).
When I introduced trigger specific artifact constraints in spinnaker#4322, I made it a lot harder (if not impossible) to do this because no triggers are used and thus all expected artifacts are filtered out.

This commit contains tests that demonstrate the issue.
  • Loading branch information
jervi committed Feb 21, 2023
1 parent b0981e3 commit 0b14c2d
Show file tree
Hide file tree
Showing 2 changed files with 127 additions and 7 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -81,6 +81,29 @@ class ArtifactUtilsSpec extends Specification {
artifact.name == 'build/libs/my-jar-100.jar'
}

def "should bind stage-inlined artifacts to trigger artifacts"() {
setup:
def execution = pipeline {
stage {
name = "upstream stage"
type = "stage1"
refId = "1"
}
}

execution.trigger = new DefaultTrigger('manual')
execution.trigger.artifacts.add(Artifact.builder().type('http/file').name('build/libs/my-jar-100.jar').build())

when:
def artifact = makeArtifactUtils().getBoundArtifactForStage(execution.stages[0], null, Artifact.builder()
.type('http/file')
.name('build/libs/my-jar-\\d+.jar')
.build())

then:
artifact.name == 'build/libs/my-jar-100.jar'
}

def "should find upstream artifacts in small pipeline"() {
when:
def desired = execution.getStages().find { it.name == "desired" }
Expand Down Expand Up @@ -392,17 +415,38 @@ class ArtifactUtilsSpec extends Specification {

def "resolveArtifacts ignores expected artifacts from unrelated triggers"() {
given:
def matchArtifact = Artifact.builder().type("docker/.*").build()
def expectedArtifact1 = ExpectedArtifact.builder().id("expected-artifact-id").matchArtifact(matchArtifact).build()
def expectedArtifact2 = ExpectedArtifact.builder().id("irrelevant-artifact-id").matchArtifact(matchArtifact).build()
def receivedArtifact = Artifact.builder().name("my-artifact").type("docker/image").build()
def matchArtifact = Artifact.builder()
.type("docker/.*")
.build()
def anotherArtifact = Artifact.builder()
.type("http/file")
.build()
def expectedArtifact1 = ExpectedArtifact.builder()
.id("expected-artifact-id")
.matchArtifact(matchArtifact)
.build()
def expectedArtifact2 = ExpectedArtifact.builder()
.id("irrelevant-artifact-id")
.matchArtifact(matchArtifact)
.build()
def expectedArtifact3 = ExpectedArtifact.builder()
.id("relevant-artifact-id")
.matchArtifact(anotherArtifact)
.defaultArtifact(anotherArtifact)
.useDefaultArtifact(true)
.build()
def receivedArtifact = Artifact.builder()
.name("my-artifact")
.type("docker/image")
.build()

def pipeline = [
id: "abc",
trigger: [
type: "jenkins",
expectedArtifactIds: ["expected-artifact-id"]
],
expectedArtifacts: [expectedArtifact1, expectedArtifact2],
expectedArtifacts: [expectedArtifact1, expectedArtifact2, expectedArtifact3],
receivedArtifacts: [receivedArtifact],
]
def artifactUtils = makeArtifactUtils()
Expand All @@ -414,8 +458,8 @@ class ArtifactUtilsSpec extends Specification {
new TypeReference<List<ExpectedArtifact>>() {})

then:
resolvedArtifacts.size() == 1
resolvedArtifacts.get(0).getBoundArtifact() == receivedArtifact
resolvedArtifacts.size() == 2
resolvedArtifacts*.getBoundArtifact() == [receivedArtifact, anotherArtifact]
}

def "resolveArtifacts adds received artifacts to the trigger, skipping duplicates"() {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -20,6 +20,7 @@ import com.fasterxml.jackson.databind.JavaType
import com.fasterxml.jackson.databind.ObjectMapper
import com.netflix.spectator.api.NoopRegistry
import com.netflix.spinnaker.kork.artifacts.model.Artifact
import com.netflix.spinnaker.kork.artifacts.model.ExpectedArtifact
import com.netflix.spinnaker.orca.api.pipeline.models.PipelineExecution
import com.netflix.spinnaker.orca.api.pipeline.models.StageExecution
import com.netflix.spinnaker.orca.api.pipeline.ExecutionPreprocessor
Expand Down Expand Up @@ -497,6 +498,81 @@ class DependentPipelineStarterSpec extends Specification {
result.trigger.resolvedExpectedArtifacts.size() == 0
}

def "should find expected artifacts from parent pipeline trigger if triggered by pipeline stage"() {
given:
def triggeredPipelineConfig = [
name: "triggered",
id: "triggered",
expectedArtifacts: [],
triggers: [],
]
Artifact testArtifact1 = Artifact.builder().type("gcs/object").name("gs://test/file.yaml").build()
Artifact testArtifact2 = Artifact.builder().type("docker/image").name("gcr.io/project/image").build()
def parentPipeline = pipeline {
name = "parent"
trigger = new DefaultTrigger("webhook", null, "test", [:], [testArtifact1, testArtifact2])
authentication = new PipelineExecution.AuthenticationDetails("parentUser", "acct1", "acct2")
pipelineConfigId = "5e96d1e8-a3c0-4458-b3a4-fda17e0d5ab5"
stage {
id = "stage1"
refId = "1"
}
stage {
id = "stage2"
refId = "2"
requisiteStageRefIds = ["1"]
}
}

def uuid = "8f241d2a-7fee-4a95-8d84-0a508222032c"
ArrayList<ExpectedArtifact> expectedArtifacts = [
ExpectedArtifact.builder().id(uuid).matchArtifact(testArtifact1).build()
]
parentPipeline.trigger.setOther("expectedArtifacts", expectedArtifacts)
parentPipeline.trigger.resolvedExpectedArtifacts = expectedArtifacts
def executionLauncher = Mock(ExecutionLauncher)
def applicationContext = new StaticApplicationContext()
applicationContext.beanFactory.registerSingleton("pipelineLauncher", executionLauncher)
dependentPipelineStarter = new DependentPipelineStarter(
applicationContext,
mapper,
new ContextParameterProcessor(),
Optional.empty(),
Optional.of(artifactUtils),
new NoopRegistry()
)

and:
executionLauncher.start(*_) >> { _, p ->
return pipeline {
name = p.name
id = p.name
trigger = mapper.convertValue(p.trigger, Trigger)
}
}
artifactUtils.getArtifactsForPipelineId(*_) >> {
return new ArrayList<Artifact>();
}

when:
def result = dependentPipelineStarter.trigger(
triggeredPipelineConfig,
null,
parentPipeline,
[:],
"stage1",
buildAuthenticatedUser("user", [])
)

then:
result.trigger.artifacts.size() == 2
result.trigger.artifacts*.name.contains(testArtifact1.name)
result.trigger.artifacts*.name.contains(testArtifact2.name)
result.trigger.resolvedExpectedArtifacts.size() == 1
result.trigger.resolvedExpectedArtifacts*.boundArtifact.name == [testArtifact1.name]
result.trigger.resolvedExpectedArtifacts*.id == [uuid]
}

def "should resolve expressions in trigger"() {
given:
def triggeredPipelineConfig = [name: "triggered", id: "triggered", parameterConfig: [[name: 'a', default: '${2 == 2}']]]
Expand Down

0 comments on commit 0b14c2d

Please sign in to comment.