Skip to content

Commit

Permalink
Always upload throughput results with the same name
Browse files Browse the repository at this point in the history
This solves problems around retrying throughput tests
  • Loading branch information
andrewlock committed Oct 23, 2023
1 parent 61abf67 commit 133db1c
Showing 1 changed file with 20 additions and 20 deletions.
40 changes: 20 additions & 20 deletions .azure-pipelines/ultimate-pipeline.yml
Original file line number Diff line number Diff line change
Expand Up @@ -3813,14 +3813,14 @@ stages:
- script: |
cp $(CrankDir)/*.json $(CrankDir)/results
displayName: Copy the results to results dir
condition: succeededOrFailed()
continueOnError: true
- publish: "$(CrankDir)/results"
displayName: Publish results
artifact: crank_linux_x64_$(System.JobAttempt)
condition: succeededOrFailed()
continueOnError: true
# We don't include the JobAttempt in this case, because we rely on a specific name
# and an error in the throughput tests probably means no usable data, so dont
# bother trying to upload these in case of failure, which means we can retry the
# stages without issue
artifact: crank_linux_x64_1

- job: Windows64
timeoutInMinutes: 60
Expand Down Expand Up @@ -3857,14 +3857,14 @@ stages:
- script: |
cp $(CrankDir)/*.json $(CrankDir)/results
displayName: Copy the results to results dir
condition: succeededOrFailed()
continueOnError: true
- publish: "$(CrankDir)/results"
displayName: Publish results
artifact: crank_windows_x64_$(System.JobAttempt)
condition: succeededOrFailed()
continueOnError: true
# We don't include the JobAttempt in this case, because we rely on a specific name
# and an error in the throughput tests probably means no usable data, so dont
# bother trying to upload these in case of failure, which means we can retry the
# stages without issue
artifact: crank_windows_x64_1

- job: LinuxArm64
timeoutInMinutes: 60
Expand Down Expand Up @@ -3900,14 +3900,14 @@ stages:
- script: |
cp $(CrankDir)/*.json $(CrankDir)/results
displayName: Copy the results to results dir
condition: succeededOrFailed()
continueOnError: true
- publish: "$(CrankDir)/results"
displayName: Publish results
artifact: crank_linux_arm64_$(System.JobAttempt)
condition: succeededOrFailed()
continueOnError: true
# We don't include the JobAttempt in this case, because we rely on a specific name
# and an error in the throughput tests probably means no usable data, so dont
# bother trying to upload these in case of failure, which means we can retry the
# stages without issue
artifact: crank_linux_arm64_1

- stage: throughput_profiler
condition: >
Expand Down Expand Up @@ -4062,14 +4062,14 @@ stages:
- script: |
cp $(CrankDir)/*.json $(CrankDir)/results
displayName: Copy the results to results dir
condition: succeededOrFailed()
continueOnError: true
- publish: "$(CrankDir)/results"
displayName: Publish results
artifact: crank_linux_x64_asm_$(System.JobAttempt)
condition: succeededOrFailed()
continueOnError: true
# We don't include the JobAttempt in this case, because we rely on a specific name
# and an error in the throughput tests probably means no usable data, so dont
# bother trying to upload these in case of failure, which means we can retry the
# stages without issue
artifact: crank_linux_x64_asm_1

- stage: coverage
condition: and(succeeded(), eq(variables['isBenchmarksOnlyBuild'], 'False'), eq(variables['runCodeCoverage'], 'True'))
Expand Down

0 comments on commit 133db1c

Please sign in to comment.