From 0b14c2d8e22cb2d8d020c32e6a06b7d844fa4881 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?J=C3=B8rgen=20Jervidalo?= Date: Fri, 17 Feb 2023 22:54:24 +0100 Subject: [PATCH] test(artifacts): Be more lenient when filtering expected artifacts It is currently hard/impossible to resolve artifacts in pipelines that doesn't have any triggers defined by themselves, and are triggered by a pipeline stage in another pipeline. Previously it was possible to get it to work by editing the json of the pipeline or in the UI by adding artifacts in a temp trigger and then removing it (this would keep the expected artifacts around). When I introduced trigger specific artifact constraints in #4322, I made it a lot harder (if not impossible) to do this because no triggers are used and thus all expected artifacts are filtered out. This commit contains tests that demonstrate the issue. --- .../pipeline/util/ArtifactUtilsSpec.groovy | 58 ++++++++++++-- .../DependentPipelineStarterSpec.groovy | 76 +++++++++++++++++++ 2 files changed, 127 insertions(+), 7 deletions(-) diff --git a/orca-core/src/test/groovy/com/netflix/spinnaker/orca/pipeline/util/ArtifactUtilsSpec.groovy b/orca-core/src/test/groovy/com/netflix/spinnaker/orca/pipeline/util/ArtifactUtilsSpec.groovy index b034a3202e..97ff6240d8 100644 --- a/orca-core/src/test/groovy/com/netflix/spinnaker/orca/pipeline/util/ArtifactUtilsSpec.groovy +++ b/orca-core/src/test/groovy/com/netflix/spinnaker/orca/pipeline/util/ArtifactUtilsSpec.groovy @@ -81,6 +81,29 @@ class ArtifactUtilsSpec extends Specification { artifact.name == 'build/libs/my-jar-100.jar' } + def "should bind stage-inlined artifacts to trigger artifacts"() { + setup: + def execution = pipeline { + stage { + name = "upstream stage" + type = "stage1" + refId = "1" + } + } + + execution.trigger = new DefaultTrigger('manual') + execution.trigger.artifacts.add(Artifact.builder().type('http/file').name('build/libs/my-jar-100.jar').build()) + + when: + def artifact = makeArtifactUtils().getBoundArtifactForStage(execution.stages[0], null, Artifact.builder() + .type('http/file') + .name('build/libs/my-jar-\\d+.jar') + .build()) + + then: + artifact.name == 'build/libs/my-jar-100.jar' + } + def "should find upstream artifacts in small pipeline"() { when: def desired = execution.getStages().find { it.name == "desired" } @@ -392,17 +415,38 @@ class ArtifactUtilsSpec extends Specification { def "resolveArtifacts ignores expected artifacts from unrelated triggers"() { given: - def matchArtifact = Artifact.builder().type("docker/.*").build() - def expectedArtifact1 = ExpectedArtifact.builder().id("expected-artifact-id").matchArtifact(matchArtifact).build() - def expectedArtifact2 = ExpectedArtifact.builder().id("irrelevant-artifact-id").matchArtifact(matchArtifact).build() - def receivedArtifact = Artifact.builder().name("my-artifact").type("docker/image").build() + def matchArtifact = Artifact.builder() + .type("docker/.*") + .build() + def anotherArtifact = Artifact.builder() + .type("http/file") + .build() + def expectedArtifact1 = ExpectedArtifact.builder() + .id("expected-artifact-id") + .matchArtifact(matchArtifact) + .build() + def expectedArtifact2 = ExpectedArtifact.builder() + .id("irrelevant-artifact-id") + .matchArtifact(matchArtifact) + .build() + def expectedArtifact3 = ExpectedArtifact.builder() + .id("relevant-artifact-id") + .matchArtifact(anotherArtifact) + .defaultArtifact(anotherArtifact) + .useDefaultArtifact(true) + .build() + def receivedArtifact = Artifact.builder() + .name("my-artifact") + .type("docker/image") + .build() + def pipeline = [ id: "abc", trigger: [ type: "jenkins", expectedArtifactIds: ["expected-artifact-id"] ], - expectedArtifacts: [expectedArtifact1, expectedArtifact2], + expectedArtifacts: [expectedArtifact1, expectedArtifact2, expectedArtifact3], receivedArtifacts: [receivedArtifact], ] def artifactUtils = makeArtifactUtils() @@ -414,8 +458,8 @@ class ArtifactUtilsSpec extends Specification { new TypeReference>() {}) then: - resolvedArtifacts.size() == 1 - resolvedArtifacts.get(0).getBoundArtifact() == receivedArtifact + resolvedArtifacts.size() == 2 + resolvedArtifacts*.getBoundArtifact() == [receivedArtifact, anotherArtifact] } def "resolveArtifacts adds received artifacts to the trigger, skipping duplicates"() { diff --git a/orca-front50/src/test/groovy/com/netflix/spinnaker/orca/front50/DependentPipelineStarterSpec.groovy b/orca-front50/src/test/groovy/com/netflix/spinnaker/orca/front50/DependentPipelineStarterSpec.groovy index aa08e08d3e..3ee087edc9 100644 --- a/orca-front50/src/test/groovy/com/netflix/spinnaker/orca/front50/DependentPipelineStarterSpec.groovy +++ b/orca-front50/src/test/groovy/com/netflix/spinnaker/orca/front50/DependentPipelineStarterSpec.groovy @@ -20,6 +20,7 @@ import com.fasterxml.jackson.databind.JavaType import com.fasterxml.jackson.databind.ObjectMapper import com.netflix.spectator.api.NoopRegistry import com.netflix.spinnaker.kork.artifacts.model.Artifact +import com.netflix.spinnaker.kork.artifacts.model.ExpectedArtifact import com.netflix.spinnaker.orca.api.pipeline.models.PipelineExecution import com.netflix.spinnaker.orca.api.pipeline.models.StageExecution import com.netflix.spinnaker.orca.api.pipeline.ExecutionPreprocessor @@ -497,6 +498,81 @@ class DependentPipelineStarterSpec extends Specification { result.trigger.resolvedExpectedArtifacts.size() == 0 } + def "should find expected artifacts from parent pipeline trigger if triggered by pipeline stage"() { + given: + def triggeredPipelineConfig = [ + name: "triggered", + id: "triggered", + expectedArtifacts: [], + triggers: [], + ] + Artifact testArtifact1 = Artifact.builder().type("gcs/object").name("gs://test/file.yaml").build() + Artifact testArtifact2 = Artifact.builder().type("docker/image").name("gcr.io/project/image").build() + def parentPipeline = pipeline { + name = "parent" + trigger = new DefaultTrigger("webhook", null, "test", [:], [testArtifact1, testArtifact2]) + authentication = new PipelineExecution.AuthenticationDetails("parentUser", "acct1", "acct2") + pipelineConfigId = "5e96d1e8-a3c0-4458-b3a4-fda17e0d5ab5" + stage { + id = "stage1" + refId = "1" + } + stage { + id = "stage2" + refId = "2" + requisiteStageRefIds = ["1"] + } + } + + def uuid = "8f241d2a-7fee-4a95-8d84-0a508222032c" + ArrayList expectedArtifacts = [ + ExpectedArtifact.builder().id(uuid).matchArtifact(testArtifact1).build() + ] + parentPipeline.trigger.setOther("expectedArtifacts", expectedArtifacts) + parentPipeline.trigger.resolvedExpectedArtifacts = expectedArtifacts + def executionLauncher = Mock(ExecutionLauncher) + def applicationContext = new StaticApplicationContext() + applicationContext.beanFactory.registerSingleton("pipelineLauncher", executionLauncher) + dependentPipelineStarter = new DependentPipelineStarter( + applicationContext, + mapper, + new ContextParameterProcessor(), + Optional.empty(), + Optional.of(artifactUtils), + new NoopRegistry() + ) + + and: + executionLauncher.start(*_) >> { _, p -> + return pipeline { + name = p.name + id = p.name + trigger = mapper.convertValue(p.trigger, Trigger) + } + } + artifactUtils.getArtifactsForPipelineId(*_) >> { + return new ArrayList(); + } + + when: + def result = dependentPipelineStarter.trigger( + triggeredPipelineConfig, + null, + parentPipeline, + [:], + "stage1", + buildAuthenticatedUser("user", []) + ) + + then: + result.trigger.artifacts.size() == 2 + result.trigger.artifacts*.name.contains(testArtifact1.name) + result.trigger.artifacts*.name.contains(testArtifact2.name) + result.trigger.resolvedExpectedArtifacts.size() == 1 + result.trigger.resolvedExpectedArtifacts*.boundArtifact.name == [testArtifact1.name] + result.trigger.resolvedExpectedArtifacts*.id == [uuid] + } + def "should resolve expressions in trigger"() { given: def triggeredPipelineConfig = [name: "triggered", id: "triggered", parameterConfig: [[name: 'a', default: '${2 == 2}']]]