From 62e02e9de687578c3dc4ab51d1fb47f2b79971e2 Mon Sep 17 00:00:00 2001 From: Adam Nichols Date: Mon, 21 Mar 2022 20:29:21 +0000 Subject: [PATCH 01/58] Update cromwell version from 78 to 79 --- project/Version.scala | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/project/Version.scala b/project/Version.scala index fb665bce138..38b33e4d386 100644 --- a/project/Version.scala +++ b/project/Version.scala @@ -5,7 +5,7 @@ import sbt._ object Version { // Upcoming release, or current if we're on a master / hotfix branch - val cromwellVersion = "78" + val cromwellVersion = "79" /** * Returns true if this project should be considered a snapshot. From 1ca4d67f48a47f8ed644a8f7a1e32074caf0efbb Mon Sep 17 00:00:00 2001 From: Adam Nichols Date: Tue, 22 Mar 2022 17:12:11 -0400 Subject: [PATCH 02/58] Update release docs [BW-1101] (#6717) --- processes/release_processes/README.MD | 23 +++++++++++++++-------- 1 file changed, 15 insertions(+), 8 deletions(-) diff --git a/processes/release_processes/README.MD b/processes/release_processes/README.MD index cb709a72769..6994fd6f715 100644 --- a/processes/release_processes/README.MD +++ b/processes/release_processes/README.MD @@ -33,12 +33,13 @@ The release WDL uses a github token to perform actions on your behalf. * the full set of `repo` permissions * the `workflow` permission * only the `read:org` permission (do not let this token administer your organizations!) - * only the `user:email` permission (do not let this token administer your organizations!) -* Copy the token and save it to a file on your local machine. You'll use it in a second. + * only the `user:email` permission (do not let this token administer your user!) +* When viewing the token in the [list-view of tokens](https://github.com/settings/tokens), its scopes string should read exactly `read:org, repo, user:email, workflow` +* Copy the token and save it to a file on your local machine, for example `~/.github-token`. You'll use it in a second. #### Prepare a temporary `release` directory -This is optional, but I find it useful. Make or copy the following files into some temporary `releases/` directory: +Make or copy the following files into a temporary `release/` directory outside the Cromwell repository. This removes any chance of committing your token. * A copy of the workflow file to run (https://github.com/broadinstitute/cromwell/blob/develop/publish/publish_workflow.wdl) * An inputs json like this: @@ -60,18 +61,21 @@ Ensure you have at least 8GB; 4GB is not sufficient. #### Let people know the publish is underway -Post another message in `#ftfy-private` that the release is underway, asking everyone to hold off merges to develop until +Post another message in `#ftfy-private` that the release is underway, asking everyone to hold off merges to `develop` until the release is published. #### Run the `publish_workflow.wdl` Workflow -Run Cromwell in server mode with a persistent backing database, using Docker containers. This allows call caching to happen if you need to restart for any reason. +Run Cromwell in server mode with a persistent backing database, using Docker containers. This allows call caching to happen if you need to restart for any reason. See instructions for using a Dockerized MySQL server and CI config [here](#cromwell-setup-for-publishing). -Note that the publish workflow is quite resource intensive; it's a good idea to -shut down other resource intensive apps before launching it to avoid painfully slow or failed executions. +Note that the publish workflow is quite resource intensive; it's a good idea to shut down other resource intensive apps before launching it to avoid painfully slow or failed executions. -Using the Swagger API, submit the workflow to Cromwell along with the inputs file. +Make sure to plug in your laptop. + +Using the Swagger API at `http://localhost:8000`, submit the workflow to Cromwell along with the inputs file. + +The workflow outputs its status to the console. #### Make sure it all went swimmingly @@ -81,6 +85,9 @@ Using the Swagger API, submit the workflow to Cromwell along with the inputs fil * Look [in Travis](https://app.travis-ci.com/github/broadinstitute/cromwell/branches) for the release tag build that will publish Docker images for the new version. * Let `#ftfy-private` know that it's okay to resume merges to `develop`. * Announce release in `#dsp-batch`, set expectations about when the new version will be available in Terra. +* It will take about an additional hour for the Docker image to build in Travis before its tag appears on the [Cromwell Docker Hub page](https://hub.docker.com/r/broadinstitute/cromwell/tags). + * The relevant build is the one named `XX_hotfix` in [this list](https://app.travis-ci.com/github/broadinstitute/cromwell/builds). + * See the `cromwell::build::publish_artifacts()` function for details ### How to Release Cromwell into Firecloud / Terra From 0e91d20e6d5c6e56e4a8ec04ff424e698d99e9db Mon Sep 17 00:00:00 2001 From: Miguel Covarrubias Date: Tue, 22 Mar 2022 17:32:20 -0400 Subject: [PATCH 03/58] Call caching test fixes [BT-594] (#6715) --- .../centaur/AbstractCentaurTestCaseSpec.scala | 44 +++++++++---------- .../centaur/reporting/BigQueryReporter.scala | 2 +- .../centaur/reporting/Slf4jReporter.scala | 2 +- .../centaur/reporting/TestEnvironment.scala | 6 ++- .../backendWithNoDocker.test | 3 -- .../standardTestCases/cacheBetweenWf.test | 3 -- ...ll_cache_hit_prefixes_empty_hint_papi.test | 3 -- .../call_cache_hit_prefixes_no_hint.test | 3 -- ...s_two_roots_empty_hint_cache_hit_papi.test | 3 -- .../cwl_cache_between_workflows.test | 3 -- .../standardTestCases/floating_tags.test | 3 -- .../standardTestCases/fofn_caching.test | 3 -- .../google_artifact_registry.test | 3 -- .../standardTestCases/hello_private_repo.test | 3 -- .../standardTestCases/use_cacheCopy_dir.test | 3 -- .../centaur/api/CentaurCromwellClient.scala | 5 ++- .../test/standard/CentaurTestCase.scala | 10 +++-- .../workflow/SubmittedWorkflowTracker.scala | 31 +++++++++++++ .../centaur/test/workflow/Workflow.scala | 19 +++----- .../centaur/test/CentaurOperationsSpec.scala | 2 +- .../scala/centaur/cwl/CentaurCwlRunner.scala | 7 ++- 21 files changed, 81 insertions(+), 80 deletions(-) create mode 100644 centaur/src/main/scala/centaur/test/workflow/SubmittedWorkflowTracker.scala diff --git a/centaur/src/it/scala/centaur/AbstractCentaurTestCaseSpec.scala b/centaur/src/it/scala/centaur/AbstractCentaurTestCaseSpec.scala index 3346181259b..1ddebb6a09a 100644 --- a/centaur/src/it/scala/centaur/AbstractCentaurTestCaseSpec.scala +++ b/centaur/src/it/scala/centaur/AbstractCentaurTestCaseSpec.scala @@ -12,6 +12,7 @@ import centaur.test.CentaurTestException import centaur.test.standard.CentaurTestCase import centaur.test.submit.{SubmitResponse, SubmitWorkflowResponse} import centaur.test.workflow.WorkflowData +import cromwell.api.model.WorkflowId import org.scalatest._ import org.scalatest.flatspec.AsyncFlatSpec import org.scalatest.matchers.should.Matchers @@ -55,8 +56,6 @@ abstract class AbstractCentaurTestCaseSpec(cromwellBackends: List[String], cromw } def executeStandardTest(testCase: CentaurTestCase): Unit = { - def nameTest = s"${testCase.testFormat.testSpecString} ${testCase.workflow.testName}" - def runTestAndDeleteZippedImports(): IO[SubmitResponse] = for { submitResponse <- testCase.testFunction.run _ = cleanUpImports(testCase.workflow.data) // cleanup imports in the end of test @@ -65,9 +64,9 @@ abstract class AbstractCentaurTestCaseSpec(cromwellBackends: List[String], cromw // Make tags, but enforce lowercase: val tags = (testCase.testOptions.tags :+ testCase.workflow.testName :+ testCase.testFormat.name) map { x => Tag(x.toLowerCase) } val isIgnored = testCase.isIgnored(cromwellBackends) - val retries = ErrorReporters.retryAttempts + val retries = if (testCase.workflow.retryTestFailures) ErrorReporters.retryAttempts else 0 - runOrDont(nameTest, tags, isIgnored, retries, runTestAndDeleteZippedImports()) + runOrDont(testCase, tags, isIgnored, retries, runTestAndDeleteZippedImports()) } def executeWdlUpgradeTest(testCase: CentaurTestCase): Unit = @@ -120,66 +119,67 @@ abstract class AbstractCentaurTestCaseSpec(cromwellBackends: List[String], cromw newCase } - private def runOrDont(testName: String, + private def runOrDont(testCase: CentaurTestCase, tags: List[Tag], ignore: Boolean, retries: Int, runTest: => IO[SubmitResponse]): Unit = { - val itShould: ItVerbString = it should testName + val itShould: ItVerbString = it should testCase.name tags match { - case Nil => runOrDont(itShould, ignore, testName, retries, runTest) - case head :: Nil => runOrDont(itShould taggedAs head, ignore, testName, retries, runTest) - case head :: tail => runOrDont(itShould taggedAs(head, tail: _*), ignore, testName, retries, runTest) + case Nil => runOrDont(itShould, ignore, testCase, retries, runTest) + case head :: Nil => runOrDont(itShould taggedAs head, ignore, testCase, retries, runTest) + case head :: tail => runOrDont(itShould taggedAs(head, tail: _*), ignore, testCase, retries, runTest) } } private def runOrDont(itVerbString: ItVerbString, ignore: Boolean, - testName: String, + testCase: CentaurTestCase, retries: Int, runTest: => IO[SubmitResponse]): Unit = { if (ignore) { itVerbString ignore Future.successful(succeed) } else { - itVerbString in tryTryAgain(testName, runTest, retries).unsafeToFuture().map(_ => succeed) + itVerbString in tryTryAgain(testCase, runTest, retries).unsafeToFuture().map(_ => succeed) } } private def runOrDont(itVerbStringTaggedAs: ItVerbStringTaggedAs, ignore: Boolean, - testName: String, + testCase: CentaurTestCase, retries: Int, runTest: => IO[SubmitResponse]): Unit = { if (ignore) { itVerbStringTaggedAs ignore Future.successful(succeed) } else { itVerbStringTaggedAs in - tryTryAgain(testName, runTest, retries).unsafeToFuture().map(_ => succeed) + tryTryAgain(testCase, runTest, retries).unsafeToFuture().map(_ => succeed) } } /** * Returns an IO effect that will recursively try to run a test. * - * @param testName Name of the ScalaTest. + * @param testCase CentaurTestCase. * @param runTest Thunk to run the test. * @param retries Total number of attempts to retry. * @param attempt Current zero based attempt. * @return IO effect that will run the test, possibly retrying. */ - private def tryTryAgain(testName: String, runTest: => IO[SubmitResponse], retries: Int, attempt: Int = 0): IO[SubmitResponse] = { + private def tryTryAgain(testCase: CentaurTestCase, runTest: => IO[SubmitResponse], retries: Int, attempt: Int = 0): IO[SubmitResponse] = { + def maybeRetry(centaurTestException: CentaurTestException): IO[SubmitResponse] = { - val testEnvironment = TestEnvironment(testName, retries, attempt) + + def clearCachedResults(workflowId: WorkflowId): IO[Unit] = CromwellDatabaseCallCaching.clearCachedResults(workflowId.toString) + + val testEnvironment = TestEnvironment(testCase, retries, attempt) for { _ <- ErrorReporters.logFailure(testEnvironment, centaurTestException) r <- if (attempt < retries) { - centaurTestException - .workflowIdOption - .map(CromwellDatabaseCallCaching.clearCachedResults) - .getOrElse(IO.unit) *> - tryTryAgain(testName, runTest, retries, attempt + 1) + testCase.submittedWorkflowTracker.cleanUpBeforeRetry(clearCachedResults) *> + tryTryAgain(testCase, runTest, retries, attempt + 1) } else { IO.raiseError(centaurTestException) } @@ -193,7 +193,7 @@ abstract class AbstractCentaurTestCaseSpec(cromwellBackends: List[String], cromw case centaurTestException: CentaurTestException => maybeRetry(centaurTestException) case nonCentaurThrowable: Throwable => - val testEnvironment = TestEnvironment(testName, retries = attempt + 1, attempt) // allow one last retry + val testEnvironment = TestEnvironment(testCase, retries = attempt + 1, attempt) // allow one last retry ErrorReporters.logFailure(testEnvironment, nonCentaurThrowable) runTestIo }, diff --git a/centaur/src/it/scala/centaur/reporting/BigQueryReporter.scala b/centaur/src/it/scala/centaur/reporting/BigQueryReporter.scala index e89dc361be1..a73f26d2f19 100644 --- a/centaur/src/it/scala/centaur/reporting/BigQueryReporter.scala +++ b/centaur/src/it/scala/centaur/reporting/BigQueryReporter.scala @@ -162,7 +162,7 @@ class BigQueryReporter(override val params: ErrorReporterParams) extends ErrorRe "ci_env_centaur_type" -> ciEnvironment.centaurType, "test_attempt" -> Option(testEnvironment.attempt + 1), "test_message" -> Option(centaurTestException.message), - "test_name" -> Option(testEnvironment.name), + "test_name" -> Option(testEnvironment.testCase.name), "test_stack_trace" -> Option(ExceptionUtils.getStackTrace(centaurTestException)), "test_timestamp" -> Option(OffsetDateTime.now.toUtcMilliString), "test_workflow_id" -> centaurTestException.workflowIdOption, diff --git a/centaur/src/it/scala/centaur/reporting/Slf4jReporter.scala b/centaur/src/it/scala/centaur/reporting/Slf4jReporter.scala index 12b14f3fdaa..4f2eca11b1c 100644 --- a/centaur/src/it/scala/centaur/reporting/Slf4jReporter.scala +++ b/centaur/src/it/scala/centaur/reporting/Slf4jReporter.scala @@ -32,7 +32,7 @@ class Slf4jReporter(override val params: ErrorReporterParams) } val message = - s"Test '${testEnvironment.name}' " + + s"Test '${testEnvironment.testCase.name}' " + s"failed on attempt ${testEnvironment.attempt + 1} " + s"of ${testEnvironment.retries + 1} " + errorMessage diff --git a/centaur/src/it/scala/centaur/reporting/TestEnvironment.scala b/centaur/src/it/scala/centaur/reporting/TestEnvironment.scala index abeb725ceb1..650c7df883c 100644 --- a/centaur/src/it/scala/centaur/reporting/TestEnvironment.scala +++ b/centaur/src/it/scala/centaur/reporting/TestEnvironment.scala @@ -1,10 +1,12 @@ package centaur.reporting +import centaur.test.standard.CentaurTestCase + /** * Information about a test. * - * @param name The test name. + * @param testCase The Centaur test case. * @param retries The total number of retries. * @param attempt The zero based attempt. */ -case class TestEnvironment(name: String, retries: Int, attempt: Int) +case class TestEnvironment(testCase: CentaurTestCase, retries: Int, attempt: Int) diff --git a/centaur/src/main/resources/standardTestCases/backendWithNoDocker.test b/centaur/src/main/resources/standardTestCases/backendWithNoDocker.test index 493cd9988d8..7c2ef56192f 100644 --- a/centaur/src/main/resources/standardTestCases/backendWithNoDocker.test +++ b/centaur/src/main/resources/standardTestCases/backendWithNoDocker.test @@ -2,9 +2,6 @@ name: backendWithNoDocker backends: [LocalNoDocker] testFormat: runtwiceexpectingcallcaching -# CROM-6807 Don't retry failures, subsequent runs will fail because of unexpected cache hits from the initial run -retryTestFailures: false - files { workflow: backendWithNoDocker/backendWithNoDocker.wdl } diff --git a/centaur/src/main/resources/standardTestCases/cacheBetweenWf.test b/centaur/src/main/resources/standardTestCases/cacheBetweenWf.test index 53c1847c686..c366b0c22bd 100644 --- a/centaur/src/main/resources/standardTestCases/cacheBetweenWf.test +++ b/centaur/src/main/resources/standardTestCases/cacheBetweenWf.test @@ -1,9 +1,6 @@ name: cacheBetweenWF testFormat: runtwiceexpectingcallcaching -# CROM-6807 Don't retry failures, subsequent runs will fail because of unexpected cache hits from the initial run -retryTestFailures: false - files { workflow: cacheBetweenWF/cacheBetweenWF.wdl options: common_options/cache_read_off_write_on.options diff --git a/centaur/src/main/resources/standardTestCases/call_cache_hit_prefixes_empty_hint_papi.test b/centaur/src/main/resources/standardTestCases/call_cache_hit_prefixes_empty_hint_papi.test index f148f234dba..d9c7ee419ed 100644 --- a/centaur/src/main/resources/standardTestCases/call_cache_hit_prefixes_empty_hint_papi.test +++ b/centaur/src/main/resources/standardTestCases/call_cache_hit_prefixes_empty_hint_papi.test @@ -3,9 +3,6 @@ name: call_cache_hit_prefixes_empty_hint_papi testFormat: runtwiceexpectingcallcaching backends: [Papi] -# CROM-6807 Don't retry failures, subsequent runs will fail because of unexpected cache hits from the initial run -retryTestFailures: false - files { workflow: call_cache_hit_prefixes/call_cache_hit_prefixes.wdl inputs: call_cache_hit_prefixes/call_cache_hit_prefixes_empty_hint.inputs diff --git a/centaur/src/main/resources/standardTestCases/call_cache_hit_prefixes_no_hint.test b/centaur/src/main/resources/standardTestCases/call_cache_hit_prefixes_no_hint.test index e4aca42b5cf..cd35a78dbee 100644 --- a/centaur/src/main/resources/standardTestCases/call_cache_hit_prefixes_no_hint.test +++ b/centaur/src/main/resources/standardTestCases/call_cache_hit_prefixes_no_hint.test @@ -2,9 +2,6 @@ name: call_cache_hit_prefixes_no_hint testFormat: runtwiceexpectingcallcaching -# CROM-6807 Don't retry failures, subsequent runs will fail because of unexpected cache hits from the initial run -retryTestFailures: false - files { workflow: call_cache_hit_prefixes/call_cache_hit_prefixes.wdl inputs: call_cache_hit_prefixes/call_cache_hit_prefixes_no_hint.inputs diff --git a/centaur/src/main/resources/standardTestCases/call_cache_hit_prefixes_two_roots_empty_hint_cache_hit_papi.test b/centaur/src/main/resources/standardTestCases/call_cache_hit_prefixes_two_roots_empty_hint_cache_hit_papi.test index 8558f7fec8a..a0ef536165e 100644 --- a/centaur/src/main/resources/standardTestCases/call_cache_hit_prefixes_two_roots_empty_hint_cache_hit_papi.test +++ b/centaur/src/main/resources/standardTestCases/call_cache_hit_prefixes_two_roots_empty_hint_cache_hit_papi.test @@ -5,9 +5,6 @@ name: call_cache_hit_prefixes_two_roots_empty_hint_cache_hit_papi testFormat: runthriceexpectingcallcaching backends: [Papi] -# CROM-6807 Don't retry failures, subsequent runs will fail because of unexpected cache hits from the initial run -retryTestFailures: false - files { workflow: call_cache_hit_prefixes/call_cache_hit_prefixes.wdl inputs: call_cache_hit_prefixes/call_cache_hit_prefixes_two_roots_empty_hint_hit_papi.inputs diff --git a/centaur/src/main/resources/standardTestCases/cwl_cache_between_workflows.test b/centaur/src/main/resources/standardTestCases/cwl_cache_between_workflows.test index 1e3b6065617..e5381017829 100644 --- a/centaur/src/main/resources/standardTestCases/cwl_cache_between_workflows.test +++ b/centaur/src/main/resources/standardTestCases/cwl_cache_between_workflows.test @@ -5,9 +5,6 @@ workflowType: CWL workflowTypeVersion: v1.0 skipDescribeEndpointValidation: true -# CROM-6807 Don't retry failures, subsequent runs will fail because of unexpected cache hits from the initial run -retryTestFailures: false - files { workflow: cwl_cache_between_workflows/cwl_cache_between_workflows.cwl inputs: cwl_cache_between_workflows/cwl_cache_between_workflows.json diff --git a/centaur/src/main/resources/standardTestCases/floating_tags.test b/centaur/src/main/resources/standardTestCases/floating_tags.test index f4be0030b4f..fc2c077d0a2 100644 --- a/centaur/src/main/resources/standardTestCases/floating_tags.test +++ b/centaur/src/main/resources/standardTestCases/floating_tags.test @@ -1,9 +1,6 @@ name: floating_tags testFormat: runtwiceexpectingcallcaching -# CROM-6807 Don't retry failures, subsequent runs will fail because of unexpected cache hits from the initial run -retryTestFailures: false - files { workflow: floating_tags/floating_tags.wdl options: floating_tags/floating_tags.options diff --git a/centaur/src/main/resources/standardTestCases/fofn_caching.test b/centaur/src/main/resources/standardTestCases/fofn_caching.test index 115f3dd476b..1864bce29d5 100644 --- a/centaur/src/main/resources/standardTestCases/fofn_caching.test +++ b/centaur/src/main/resources/standardTestCases/fofn_caching.test @@ -2,9 +2,6 @@ name: fofn_caching testFormat: runtwiceexpectingcallcaching backends: [Papi-Caching-No-Copy] -# CROM-6807 Don't retry failures, subsequent runs will fail because of unexpected cache hits from the initial run -retryTestFailures: false - files { workflow: fofn_caching/fofn_caching.wdl } diff --git a/centaur/src/main/resources/standardTestCases/google_artifact_registry.test b/centaur/src/main/resources/standardTestCases/google_artifact_registry.test index 384dede4973..44d1e2a725c 100644 --- a/centaur/src/main/resources/standardTestCases/google_artifact_registry.test +++ b/centaur/src/main/resources/standardTestCases/google_artifact_registry.test @@ -1,9 +1,6 @@ name: google_artifact_registry testFormat: runtwiceexpectingcallcaching -# CROM-6807 Don't retry failures, subsequent runs will fail because of unexpected cache hits from the initial run -retryTestFailures: false - files { workflow: google_artifact_registry/google_artifact_registry.wdl } diff --git a/centaur/src/main/resources/standardTestCases/hello_private_repo.test b/centaur/src/main/resources/standardTestCases/hello_private_repo.test index 5ec7aa6a46e..00be69dd7f9 100644 --- a/centaur/src/main/resources/standardTestCases/hello_private_repo.test +++ b/centaur/src/main/resources/standardTestCases/hello_private_repo.test @@ -2,9 +2,6 @@ name: hello_private_repo testFormat: runtwiceexpectingcallcaching backends: [LocalDockerSecure] -# CROM-6807 Don't retry failures, subsequent runs will fail because of unexpected cache hits from the initial run -retryTestFailures: false - files { workflow: hello_private_repo/hello_private_repo.wdl inputs: hello_private_repo/hello_private_repo.inputs.json diff --git a/centaur/src/main/resources/standardTestCases/use_cacheCopy_dir.test b/centaur/src/main/resources/standardTestCases/use_cacheCopy_dir.test index 70bfef2594f..1b33b90fe22 100644 --- a/centaur/src/main/resources/standardTestCases/use_cacheCopy_dir.test +++ b/centaur/src/main/resources/standardTestCases/use_cacheCopy_dir.test @@ -2,9 +2,6 @@ name: use_cache_copy_dir testFormat: runtwiceexpectingcallcaching backends: [Papiv2] -# CROM-6807 Don't retry failures, subsequent runs will fail because of unexpected cache hits from the initial run -retryTestFailures: false - files { workflow: use_cacheCopy_dir/use_cacheCopy_dir.wdl } diff --git a/centaur/src/main/scala/centaur/api/CentaurCromwellClient.scala b/centaur/src/main/scala/centaur/api/CentaurCromwellClient.scala index ec35eb8f238..33f96974b42 100644 --- a/centaur/src/main/scala/centaur/api/CentaurCromwellClient.scala +++ b/centaur/src/main/scala/centaur/api/CentaurCromwellClient.scala @@ -38,7 +38,7 @@ object CentaurCromwellClient extends StrictLogging { final implicit val materializer: ActorMaterializer = ActorMaterializer(ActorMaterializerSettings(system)) final val apiVersion = "v1" val cromwellClient = new CromwellClient(CentaurConfig.cromwellUrl, apiVersion) - + val defaultMetadataArgs: Option[Map[String, List[String]]] = config.getAs[Map[String, List[String]]]("centaur.metadata-args") @@ -56,6 +56,7 @@ object CentaurCromwellClient extends StrictLogging { submittedWorkflow => for { _ <- IO(logger.info(s"Submitting ${workflow.testName} returned workflow id ${submittedWorkflow.id}")) + _ = workflow.submittedWorkflowTracker.add(submittedWorkflow) } yield submittedWorkflow ) }) @@ -124,7 +125,7 @@ object CentaurCromwellClient extends StrictLogging { def archiveStatus(id: WorkflowId): IO[String] = { sendReceiveFutureCompletion(() => cromwellClient.query(id)).map(_.results.head.metadataArchiveStatus) } - + implicit private val timer: Timer[IO] = IO.timer(blockingEc) implicit private val contextShift: ContextShift[IO] = IO.contextShift(blockingEc) diff --git a/centaur/src/main/scala/centaur/test/standard/CentaurTestCase.scala b/centaur/src/main/scala/centaur/test/standard/CentaurTestCase.scala index d24149ee98c..258c211c900 100644 --- a/centaur/src/main/scala/centaur/test/standard/CentaurTestCase.scala +++ b/centaur/src/main/scala/centaur/test/standard/CentaurTestCase.scala @@ -8,7 +8,7 @@ import centaur.test._ import centaur.test.formulas.TestFormulas import centaur.test.standard.CentaurTestFormat._ import centaur.test.submit.{SubmitHttpResponse, SubmitResponse} -import centaur.test.workflow.{AllBackendsRequired, AnyBackendRequired, OnlyBackendsAllowed, Workflow} +import centaur.test.workflow._ import com.typesafe.config.{Config, ConfigFactory} import common.validation.ErrorOr._ import cromwell.api.model.{Failed, Succeeded} @@ -18,6 +18,7 @@ import scala.util.{Failure, Success, Try} case class CentaurTestCase(workflow: Workflow, testFormat: CentaurTestFormat, testOptions: TestOptions, + submittedWorkflowTracker: SubmittedWorkflowTracker, submitResponseOption: Option[SubmitHttpResponse])( implicit cromwellTracker: Option[CromwellTracker]) { @@ -52,6 +53,8 @@ case class CentaurTestCase(workflow: Workflow, } def containsTag(tag: String): Boolean = testOptions.tags.contains(tag) + + def name: String = s"${testFormat.testSpecString} ${workflow.testName}" } object CentaurTestCase { @@ -64,12 +67,13 @@ object CentaurTestCase { } def fromConfig(conf: Config, configFile: File, cromwellTracker: Option[CromwellTracker]): ErrorOr[CentaurTestCase] = { - val workflow = Workflow.fromConfig(conf, configFile) + val submittedWorkflowTracker = new SubmittedWorkflowTracker() + val workflow = Workflow.fromConfig(conf, configFile, submittedWorkflowTracker) val format: ErrorOr[CentaurTestFormat] = CentaurTestFormat.fromConfig(conf).toValidated val options = TestOptions.fromConfig(conf) val submit = SubmitHttpResponse.fromConfig(conf) (workflow, format, options, submit) mapN { - CentaurTestCase(_, _, _, _)(cromwellTracker) + CentaurTestCase(_, _, _, submittedWorkflowTracker, _)(cromwellTracker) } } diff --git a/centaur/src/main/scala/centaur/test/workflow/SubmittedWorkflowTracker.scala b/centaur/src/main/scala/centaur/test/workflow/SubmittedWorkflowTracker.scala new file mode 100644 index 00000000000..dc3f0355f80 --- /dev/null +++ b/centaur/src/main/scala/centaur/test/workflow/SubmittedWorkflowTracker.scala @@ -0,0 +1,31 @@ +package centaur.test.workflow + +import cats.effect.IO +import cats.instances.list._ +import cats.syntax.traverse._ +import cromwell.api.model.{SubmittedWorkflow, WorkflowId} + +/** + * Tracks submitted workflow ids to enable cleanup should a test need to be retried. + */ +class SubmittedWorkflowTracker { + private var submittedWorkflowIds: List[WorkflowId] = List.empty + + /** + * Run the specified cleanup function on the submitted workflow IDs tracked by this `CentaurTestCase`, clearing out + * the list of submitted workflow IDs afterward. + */ + def cleanUpBeforeRetry(cleanUpFunction: WorkflowId => IO[Unit]): IO[Unit] = for { + _ <- submittedWorkflowIds.traverse(cleanUpFunction) + _ = submittedWorkflowIds = List.empty + } yield () + + /** + * Add a `SubmittedWorkflow` to the list of `SubmittedWorkflow`s to clean up should the test case represented by this + * object require a retry. Prevents unwanted cache hits from partially successful attempts when retrying a call + * caching test case. + */ + def add(submittedWorkflow: SubmittedWorkflow): Unit = { + submittedWorkflowIds = submittedWorkflow.id :: submittedWorkflowIds + } +} diff --git a/centaur/src/main/scala/centaur/test/workflow/Workflow.scala b/centaur/src/main/scala/centaur/test/workflow/Workflow.scala index 953f6fdef9a..743924bb9c6 100644 --- a/centaur/src/main/scala/centaur/test/workflow/Workflow.scala +++ b/centaur/src/main/scala/centaur/test/workflow/Workflow.scala @@ -1,21 +1,19 @@ package centaur.test.workflow -import java.nio.file.Path - import better.files._ import cats.data.Validated._ import cats.syntax.apply._ import cats.syntax.validated._ import centaur.test.metadata.WorkflowFlatMetadata -import com.typesafe.config.{Config, ConfigFactory} +import com.typesafe.config.Config import common.validation.ErrorOr.ErrorOr import common.validation.Validation._ import configs.Result import configs.syntax._ import cromwell.api.model.{WorkflowDescribeRequest, WorkflowSingleSubmission} +import java.nio.file.Path import scala.concurrent.duration.FiniteDuration -import scala.util.{Failure, Success, Try} final case class Workflow private(testName: String, data: WorkflowData, @@ -26,7 +24,9 @@ final case class Workflow private(testName: String, retryTestFailures: Boolean, allowOtherOutputs: Boolean, skipDescribeEndpointValidation: Boolean, + submittedWorkflowTracker: SubmittedWorkflowTracker, maximumAllowedTime: Option[FiniteDuration]) { + def toWorkflowSubmission: WorkflowSingleSubmission = WorkflowSingleSubmission( workflowSource = data.workflowContent, workflowUrl = data.workflowUrl, @@ -57,14 +57,7 @@ final case class Workflow private(testName: String, object Workflow { - def fromPath(path: Path): ErrorOr[Workflow] = { - Try(ConfigFactory.parseFile(path.toFile).resolve()) match { - case Success(c) => Workflow.fromConfig(c, path.getParent) - case Failure(_) => invalidNel(s"Invalid test config: $path") - } - } - - def fromConfig(conf: Config, configFile: File): ErrorOr[Workflow] = { + def fromConfig(conf: Config, configFile: File, submittedWorkflowTracker: SubmittedWorkflowTracker): ErrorOr[Workflow] = { conf.get[String]("name") match { case Result.Success(n) => // If backend is provided, Centaur will only run this test if that backend is available on Cromwell @@ -96,7 +89,7 @@ object Workflow { val maximumTime: Option[FiniteDuration] = conf.get[Option[FiniteDuration]]("maximumTime").value (files, directoryContentCheckValidation, metadata, retryTestFailuresErrorOr) mapN { - (f, d, m, retryTestFailures) => Workflow(n, f, m, absentMetadata, d, backendsRequirement, retryTestFailures, allowOtherOutputs, validateDescription, maximumTime) + (f, d, m, retryTestFailures) => Workflow(n, f, m, absentMetadata, d, backendsRequirement, retryTestFailures, allowOtherOutputs, validateDescription, submittedWorkflowTracker, maximumTime) } case Result.Failure(_) => invalidNel(s"No test 'name' for: $configFile") diff --git a/centaur/src/test/scala/centaur/test/CentaurOperationsSpec.scala b/centaur/src/test/scala/centaur/test/CentaurOperationsSpec.scala index 7789194a258..7cb8b5cef7e 100644 --- a/centaur/src/test/scala/centaur/test/CentaurOperationsSpec.scala +++ b/centaur/src/test/scala/centaur/test/CentaurOperationsSpec.scala @@ -16,7 +16,7 @@ class CentaurOperationsSpec extends AnyFlatSpec with Matchers { behavior of "validateMetadataJson" val placeholderSubmittedWorkflow: SubmittedWorkflow = SubmittedWorkflow(id = WorkflowId(UUID.randomUUID()), null, null) - val placeholderWorkflow: Workflow = Workflow(testName = "", null, null, null, null, null, false, false, false, null) + val placeholderWorkflow: Workflow = Workflow(testName = "", null, null, null, null, null, false, false, false, null, null) val allowableOneWordAdditions = List("farmer") diff --git a/centaurCwlRunner/src/main/scala/centaur/cwl/CentaurCwlRunner.scala b/centaurCwlRunner/src/main/scala/centaur/cwl/CentaurCwlRunner.scala index 2b02bb77308..e561934ab53 100644 --- a/centaurCwlRunner/src/main/scala/centaur/cwl/CentaurCwlRunner.scala +++ b/centaurCwlRunner/src/main/scala/centaur/cwl/CentaurCwlRunner.scala @@ -7,7 +7,7 @@ import centaur.cwl.Outputs._ import centaur.test.TestOptions import centaur.test.standard.{CentaurTestCase, CentaurTestFormat} import centaur.test.submit.{SubmitHttpResponse, SubmitWorkflowResponse} -import centaur.test.workflow.{AllBackendsRequired, Workflow, WorkflowData} +import centaur.test.workflow.{AllBackendsRequired, SubmittedWorkflowTracker, Workflow, WorkflowData} import com.typesafe.scalalogging.StrictLogging import common.util.VersionUtil import cromwell.api.model.{Aborted, Failed, NonTerminalStatus, Succeeded} @@ -155,6 +155,8 @@ object CentaurCwlRunner extends StrictLogging { val testOptions = TestOptions(List.empty, ignore = false) val submitResponseOption = None + val submittedWorkflowTracker = new SubmittedWorkflowTracker() + val workflowData = WorkflowData( Option(workflowContents), None, @@ -176,10 +178,11 @@ object CentaurCwlRunner extends StrictLogging { retryTestFailures = false, allowOtherOutputs = true, skipDescribeEndpointValidation = true, + submittedWorkflowTracker = submittedWorkflowTracker, maximumAllowedTime = None ) - val testCase = CentaurTestCase(workflow, testFormat, testOptions, submitResponseOption)(cromwellTracker = None) + val testCase = CentaurTestCase(workflow, testFormat, testOptions, submittedWorkflowTracker, submitResponseOption)(cromwellTracker = None) if (!args.quiet) { logger.info(s"Starting test for $workflowPath") From 008231d8fa03d7ff543da1ffcf32645cbdd40145 Mon Sep 17 00:00:00 2001 From: Miguel Covarrubias Date: Thu, 24 Mar 2022 06:39:45 -0400 Subject: [PATCH 04/58] Upgrade Aliyun dependencies [BT-612] (#6722) --- project/Dependencies.scala | 11 +++++++---- 1 file changed, 7 insertions(+), 4 deletions(-) diff --git a/project/Dependencies.scala b/project/Dependencies.scala index 9cdd3f73cb1..1c56bef6d17 100644 --- a/project/Dependencies.scala +++ b/project/Dependencies.scala @@ -5,9 +5,9 @@ object Dependencies { private val akkaHttpV = "10.1.15" // (CROM-6619) private val akkaV = "2.5.32" // scala-steward:off (CROM-6637) private val aliyunBcsV = "6.2.4" - private val aliyunCoreV = "4.5.25" - private val aliyunCrV = "4.1.2" - private val aliyunOssV = "3.13.1" + private val aliyunCoreV = "4.6.0" + private val aliyunCrV = "4.1.4" + private val aliyunOssV = "3.14.0" private val ammoniteOpsV = "2.4.1" private val apacheHttpClientV = "4.5.13" private val awsSdkV = "2.17.50" @@ -330,16 +330,19 @@ object Dependencies { private val aliyunOssDependencies = List( "com.aliyun.oss" % "aliyun-sdk-oss" % aliyunOssV + exclude("com.sun.activation", "jakarta.activation") ) private val aliyunBatchComputeDependencies = List( "com.aliyun" % "aliyun-java-sdk-batchcompute" % aliyunBcsV, "com.aliyun" % "aliyun-java-sdk-core" % aliyunCoreV + exclude("com.sun.activation", "jakarta.activation") ) private val aliyunCrDependencies = List( "com.aliyun" % "aliyun-java-sdk-cr" % aliyunCrV, - "com.aliyun" % "aliyun-java-sdk-core" % aliyunCoreV, + "com.aliyun" % "aliyun-java-sdk-core" % aliyunCoreV + exclude("com.sun.activation", "jakarta.activation"), "com.typesafe.akka" %% "akka-http-spray-json" % akkaHttpV ) From 07d756e2e1bbc19fd129d7eaff786b33d0a8f8e8 Mon Sep 17 00:00:00 2001 From: Miguel Covarrubias Date: Thu, 24 Mar 2022 06:42:07 -0400 Subject: [PATCH 05/58] Upgrade typelevel stuff [BT-609] (#6720) --- project/Dependencies.scala | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/project/Dependencies.scala b/project/Dependencies.scala index 1c56bef6d17..428b94f97cc 100644 --- a/project/Dependencies.scala +++ b/project/Dependencies.scala @@ -21,7 +21,7 @@ object Dependencies { cats-effect, fs2, http4s, and sttp (also to v3) should all be upgraded at the same time to use cats-effect 3.x. */ private val catsEffectV = "2.5.3" // scala-steward:off (CROM-6564) - private val catsV = "2.6.1" + private val catsV = "2.7.0" private val circeConfigV = "0.8.0" private val circeGenericExtrasV = "0.14.1" private val circeOpticsV = "0.14.1" @@ -69,7 +69,7 @@ object Dependencies { private val jacksonV = "2.13.0" private val janinoV = "3.1.6" private val jsr305V = "3.0.2" - private val kindProjectorV = "0.10.0" + private val kindProjectorV = "0.13.2" private val kittensV = "2.3.2" private val liquibaseV = "4.8.0" private val logbackV = "1.2.10" @@ -83,7 +83,7 @@ object Dependencies { private val metrics3StatsdV = "4.2.0" private val mockFtpServerV = "3.0.0" private val mockserverNettyV = "5.11.2" - private val mouseV = "1.0.5" + private val mouseV = "1.0.10" private val mysqlV = "8.0.26" private val nettyV = "4.1.72.Final" private val owlApiV = "5.1.19" @@ -534,7 +534,7 @@ object Dependencies { "com.dimafeng" %% "testcontainers-scala-postgresql" % testContainersScalaV ) ++ slf4jBindingDependencies // During testing, add an slf4j binding for _all_ libraries. - val kindProjectorPlugin = "org.typelevel" %% "kind-projector" % kindProjectorV + val kindProjectorPlugin = "org.typelevel" % "kind-projector" % kindProjectorV cross CrossVersion.full val paradisePlugin = "org.scalamacros" % "paradise" % paradiseV cross CrossVersion.full // Version of the swagger UI to write into config files From 6a8f3360cd6be4c5ec34d4eaa3964ee275e26db9 Mon Sep 17 00:00:00 2001 From: Miguel Covarrubias Date: Thu, 24 Mar 2022 06:49:41 -0400 Subject: [PATCH 06/58] Update Jackson 2.13.0 => 2.13.2 [BT-611] (#6718) --- project/Dependencies.scala | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/project/Dependencies.scala b/project/Dependencies.scala index 428b94f97cc..f28f74be81d 100644 --- a/project/Dependencies.scala +++ b/project/Dependencies.scala @@ -66,7 +66,7 @@ object Dependencies { private val heterodonV = "1.0.0-beta3" private val hsqldbV = "2.6.1" private val http4sV = "0.21.31" // this release is EOL. We need to upgrade further for cats3. https://http4s.org/versions/ - private val jacksonV = "2.13.0" + private val jacksonV = "2.13.2" private val janinoV = "3.1.6" private val jsr305V = "3.0.2" private val kindProjectorV = "0.13.2" From 4c4890ad922e13f8026cc34adca4f2c31d9fe128 Mon Sep 17 00:00:00 2001 From: Miguel Covarrubias Date: Thu, 24 Mar 2022 08:43:11 -0400 Subject: [PATCH 07/58] Upgrade mysql-connector-java 8.0.26 => 8.0.28 [BT-610] (#6719) --- project/Dependencies.scala | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/project/Dependencies.scala b/project/Dependencies.scala index f28f74be81d..e4cd096b548 100644 --- a/project/Dependencies.scala +++ b/project/Dependencies.scala @@ -84,7 +84,7 @@ object Dependencies { private val mockFtpServerV = "3.0.0" private val mockserverNettyV = "5.11.2" private val mouseV = "1.0.10" - private val mysqlV = "8.0.26" + private val mysqlV = "8.0.28" private val nettyV = "4.1.72.Final" private val owlApiV = "5.1.19" private val paradiseV = "2.1.1" From 5c98d3835c50ec3082f4b4477087388fd016cec3 Mon Sep 17 00:00:00 2001 From: Miguel Covarrubias Date: Sat, 26 Mar 2022 15:11:09 -0400 Subject: [PATCH 08/58] restructure --- src/ci/bin/test.inc.sh | 7 +++++-- 1 file changed, 5 insertions(+), 2 deletions(-) diff --git a/src/ci/bin/test.inc.sh b/src/ci/bin/test.inc.sh index b1caa9d5654..32fe6ef65b3 100644 --- a/src/ci/bin/test.inc.sh +++ b/src/ci/bin/test.inc.sh @@ -51,10 +51,13 @@ cromwell::private::set_variable_if_only_some_files_changed() { if [[ "${TRAVIS_EVENT_TYPE:-unset}" != "pull_request" ]]; then export "${variable_to_set}=false" - elif git diff --name-only "origin/${TRAVIS_BRANCH}" 2>&1 | grep -E -q --invert-match "${files_changed_regex}"; then - export "${variable_to_set}=false" else + git diff --name-only "origin/${TRAVIS_BRANCH}" 2>&1 | grep -E -q --invert-match "${files_changed_regex}" + if [[ $? ]]; then + export "${variable_to_set}=false" + else export "${variable_to_set}=true" + fi fi } From 869116b85ec7932ba9f6ca29b39651a00c74acde Mon Sep 17 00:00:00 2001 From: Miguel Covarrubias Date: Sat, 26 Mar 2022 15:12:20 -0400 Subject: [PATCH 09/58] Revert "restructure" This reverts commit 5c98d3835c50ec3082f4b4477087388fd016cec3. --- src/ci/bin/test.inc.sh | 7 ++----- 1 file changed, 2 insertions(+), 5 deletions(-) diff --git a/src/ci/bin/test.inc.sh b/src/ci/bin/test.inc.sh index 32fe6ef65b3..b1caa9d5654 100644 --- a/src/ci/bin/test.inc.sh +++ b/src/ci/bin/test.inc.sh @@ -51,13 +51,10 @@ cromwell::private::set_variable_if_only_some_files_changed() { if [[ "${TRAVIS_EVENT_TYPE:-unset}" != "pull_request" ]]; then export "${variable_to_set}=false" - else - git diff --name-only "origin/${TRAVIS_BRANCH}" 2>&1 | grep -E -q --invert-match "${files_changed_regex}" - if [[ $? ]]; then + elif git diff --name-only "origin/${TRAVIS_BRANCH}" 2>&1 | grep -E -q --invert-match "${files_changed_regex}"; then export "${variable_to_set}=false" - else + else export "${variable_to_set}=true" - fi fi } From 3f8039a8844bd22cfbf098a95d644c2e7d7ab12d Mon Sep 17 00:00:00 2001 From: Chris Llanwarne Date: Mon, 28 Mar 2022 14:36:22 -0400 Subject: [PATCH 10/58] Shift update poll date by 2 months [BW-1156] (#6723) --- .scala-steward.conf | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.scala-steward.conf b/.scala-steward.conf index 02e2704e901..f28c2b39dd4 100644 --- a/.scala-steward.conf +++ b/.scala-steward.conf @@ -25,7 +25,7 @@ # # Default: @asap # -pullRequests.frequency = "0 0 1 1,4,7,10 ?" # Run at 00:00 on the 1st day of Jan,Apr,Jul,Oct (whatever day that is) +pullRequests.frequency = "0 0 1 3,6,9,12 ?" # Run at 00:00 on the 1st day of Mar,Jun,Sep,Dec (whatever day of the week that is) # Only these dependencies which match the given patterns are updated. # From a7a6d617564edde1d4b2115c9d345e0d8ecd07c3 Mon Sep 17 00:00:00 2001 From: Miguel Covarrubias Date: Tue, 29 Mar 2022 11:00:36 -0400 Subject: [PATCH 11/58] Update AWS deps 2.17.50 => 2.17.152 [BT-608] (#6721) --- .../cromwell/cloudsupport/aws/s3/S3Storage.scala | 13 +++++++++---- project/Dependencies.scala | 2 +- 2 files changed, 10 insertions(+), 5 deletions(-) diff --git a/cloudSupport/src/main/scala/cromwell/cloudsupport/aws/s3/S3Storage.scala b/cloudSupport/src/main/scala/cromwell/cloudsupport/aws/s3/S3Storage.scala index def1455fcc4..8ab07f37064 100644 --- a/cloudSupport/src/main/scala/cromwell/cloudsupport/aws/s3/S3Storage.scala +++ b/cloudSupport/src/main/scala/cromwell/cloudsupport/aws/s3/S3Storage.scala @@ -32,6 +32,7 @@ package cromwell.cloudsupport.aws.s3 import com.typesafe.config.ConfigFactory import net.ceedubs.ficus.Ficus._ +import scala.annotation.nowarn import software.amazon.awssdk.auth.credentials.AwsCredentialsProvider import software.amazon.awssdk.regions.Region import software.amazon.awssdk.services.s3.{S3Client, S3Configuration} @@ -42,11 +43,13 @@ object S3Storage { val dualstackEnabled = ConfigFactory.load().as[Option[Boolean]]("s3.dual-stack").getOrElse(false) val pathStyleAccessEnabled = ConfigFactory.load().as[Option[Boolean]]("s3.path-style-access").getOrElse(false) - S3Configuration.builder + @nowarn("msg=method dualstackEnabled in trait Builder is deprecated") + val builder = S3Configuration.builder .accelerateModeEnabled(accelerateModeEnabled) .dualstackEnabled(dualstackEnabled) .pathStyleAccessEnabled(pathStyleAccessEnabled) - .build + + builder.build } def s3Client(configuration: S3Configuration, provider: AwsCredentialsProvider, region: Option[Region]): S3Client = { @@ -65,10 +68,12 @@ object S3Storage { dualstackEnabled: Boolean = false, pathStyleAccessEnabled: Boolean = false): S3Configuration = { - S3Configuration.builder + @nowarn("msg=method dualstackEnabled in trait Builder is deprecated") + val builder = S3Configuration.builder .accelerateModeEnabled(accelerateModeEnabled) .dualstackEnabled(dualstackEnabled) .pathStyleAccessEnabled(pathStyleAccessEnabled) - .build + + builder.build() } } diff --git a/project/Dependencies.scala b/project/Dependencies.scala index e4cd096b548..b98b4d4ccd9 100644 --- a/project/Dependencies.scala +++ b/project/Dependencies.scala @@ -10,7 +10,7 @@ object Dependencies { private val aliyunOssV = "3.14.0" private val ammoniteOpsV = "2.4.1" private val apacheHttpClientV = "4.5.13" - private val awsSdkV = "2.17.50" + private val awsSdkV = "2.17.152" // We would like to use the BOM to manage Azure SDK versions, but SBT doesn't support it. // https://github.com/Azure/azure-sdk-for-java/tree/main/sdk/boms/azure-sdk-bom // https://github.com/sbt/sbt/issues/4531 From 0bb86b95b8f5c2c193c2fe726def0e0be11b8a0f Mon Sep 17 00:00:00 2001 From: Brian Reilly Date: Wed, 13 Apr 2022 15:01:49 -0400 Subject: [PATCH 12/58] Temporarily disable ssh access test (#6728) --- centaur/src/main/resources/standardTestCases/ssh_access.test | 2 ++ 1 file changed, 2 insertions(+) diff --git a/centaur/src/main/resources/standardTestCases/ssh_access.test b/centaur/src/main/resources/standardTestCases/ssh_access.test index f0ff6df0943..1ea20bc3abd 100644 --- a/centaur/src/main/resources/standardTestCases/ssh_access.test +++ b/centaur/src/main/resources/standardTestCases/ssh_access.test @@ -1,6 +1,8 @@ name: ssh_access testFormat: workflowsuccess backends: [Papiv2] +# CROM-6872: ignoring for now until we figure out the problem +ignore: true files { workflow: ssh_access/ssh_access.wdl From a69d12ec71bd453bf50be563f0666e7fb65c6874 Mon Sep 17 00:00:00 2001 From: Miguel Covarrubias Date: Wed, 13 Apr 2022 16:55:47 -0400 Subject: [PATCH 13/58] Scala 2.13 [CROM-6036] (#6724) * wip * wip * wip * wip * checkpoint * wip * wip * wip * wip * wip * compiles??? * cleanup * grrr * fix test * fix test * fix bug in choosing default language factory * fix tests * sheesh * oops * some test fixes, some commenting out * fix ordering * forcing with some [force ci] * temporarily comment out more problematic varargs stuff * fix test * why do I have to do [force ci] to get most sub-builds to run? * wip, varargs broken * yaass * checkpoint * checkpoint [force ci] * more [force ci] * restore comment * grrr * grrr [force ci] * cleanup * debug attempt * force debug * grrr * restructure * fixy fix? * cleanup * oops * Revert "debug attempt" This reverts commit 59b4e01a6a694186a0903a652d92cfd9fea5a9e5. * Revert "force debug" This reverts commit 6b1fba2054705749789ed1148df9dbed29211e9b. * sweepy sweep * try * more * wip * docs * Scala 2.13 codegen fixes * oops * PR feedback * PR feedback * PR feedback * CI bump * Poke CI * Unpoke CI Co-authored-by: Janet Gainer-Dewar --- CHANGELOG.md | 6 ++ .../scala/cromiam/server/CromIamServer.scala | 2 +- .../server/config/CromIamServerConfig.scala | 2 +- .../status/StatusCheckedSubsystem.scala | 2 +- .../cromiam/webservice/QuerySupport.scala | 3 +- .../webservice/SubmissionSupport.scala | 2 +- .../webservice/SwaggerServiceSpec.scala | 2 +- .../backend/BackendLifecycleActor.scala | 2 +- .../BackendWorkflowFinalizationActor.scala | 2 +- .../cromwell/backend/OutputEvaluator.scala | 4 +- .../dummy/DummyInitializationActor.scala | 2 +- .../StandardAsyncExecutionActor.scala | 11 ++-- .../RootWorkflowFileHashCacheActor.scala | 6 +- .../validation/RuntimeAttributesDefault.scala | 4 +- .../RuntimeAttributesValidation.scala | 10 ++-- .../ValidatedRuntimeAttributesBuilder.scala | 4 +- .../ValidationAggregatedException.scala | 2 +- ...ckendWorkflowInitializationActorSpec.scala | 14 ++--- ...alidatedRuntimeAttributesBuilderSpec.scala | 4 +- .../RuntimeAttributesValidationSpec.scala | 2 +- .../centaur/reporting/BigQueryReporter.scala | 2 +- .../centaur/reporting/ErrorReporters.scala | 4 +- .../scala/centaur/CromwellConfiguration.scala | 2 +- .../scala/centaur/test/ObjectCounter.scala | 2 +- .../centaur/test/CentaurOperationsSpec.scala | 1 + centaur/test_cromwell.sh | 4 +- .../src/bin/centaur-cwl-runner.bash | 2 +- .../scala/centaur/cwl/CentaurCwlRunner.scala | 10 ++-- .../test/scala/CloudPreprocessorSpec.scala | 8 +-- .../impl/ftp/FtpCloudNioFileProvider.scala | 20 +++---- .../nio/impl/ftp/LeasedInputStream.scala | 2 +- .../nio/impl/ftp/LeasedOutputStream.scala | 2 +- .../nio/impl/ftp/MockFtpFileSystem.scala | 10 ++-- .../nio/spi/CloudNioDirectoryStream.scala | 10 ++-- .../cloud/nio/spi/CloudNioFileSystem.scala | 4 +- .../nio/spi/CloudNioFileSystemProvider.scala | 6 +- .../scala/cloud/nio/spi/CloudNioPath.scala | 4 +- .../main/scala/cloud/nio/spi/UnixPath.scala | 12 ++-- .../scala/cloud/nio/spi/HashTypeSpec.scala | 4 +- .../scala/cloud/nio/util/CloudNioFiles.scala | 2 +- .../scala/cloud/nio/util/VersionUtil.scala | 2 +- .../gcp/auth/GoogleAuthMode.scala | 2 +- .../aws/AwsConfigurationSpec.scala | 4 +- codegen_java/build.sbt | 22 ++++--- .../collections/EnhancedCollections.scala | 15 +++-- .../exception/ExceptionAggregation.scala | 16 ++--- .../main/scala/common/util/TerminalUtil.scala | 2 +- .../src/main/scala/common/util/TryUtil.scala | 12 ++-- .../main/scala/common/util/VersionUtil.scala | 2 +- .../collections/EnhancedCollectionsSpec.scala | 10 ++++ .../exception/ExceptionAggregationSpec.scala | 14 ++--- .../scala/common/util/StringUtilSpec.scala | 6 +- .../test/scala/common/util/TryUtilSpec.scala | 7 +-- .../common/validation/ValidationSpec.scala | 4 +- .../main/scala/cromwell/core/ConfigUtil.scala | 2 +- .../scala/cromwell/core/WorkflowOptions.scala | 4 +- .../cromwell/core/actor/BatchActor.scala | 14 ++--- .../core/callcaching/HashResultMessage.scala | 2 +- .../core/filesystem/CromwellFileSystems.scala | 2 +- .../main/scala/cromwell/core/io/AsyncIo.scala | 6 +- .../scala/cromwell/core/io/IoCommand.scala | 12 ++-- .../cromwell/core/io/IoCommandBuilder.scala | 20 +++---- .../core/io/IoPromiseProxyActor.scala | 6 +- .../scala/cromwell/core/labels/Labels.scala | 5 +- .../core/path/BetterFileMethods.scala | 10 ++-- .../core/path/EvenBetterPathMethods.scala | 2 +- .../cromwell/core/path/NioPathMethods.scala | 2 +- .../scala/cromwell/core/path/PathCopier.scala | 2 +- .../core/simpleton/WomValueBuilder.scala | 40 ++++++------- .../WomValueJsonFormatter.scala | 2 +- .../cromwell/core/actor/BatchActorSpec.scala | 12 ++-- .../filesystem/CromwellFileSystemsSpec.scala | 20 +++---- .../cromwell/core/io/IoClientHelperSpec.scala | 18 +++--- .../core/logging/LoggerWrapperSpec.scala | 20 +++---- .../cromwell/core/retry/BackoffSpec.scala | 4 +- .../scala/cromwell/util/AkkaTestUtil.scala | 2 +- .../drs/localizer/CommandLineParser.scala | 1 + .../GoogleAccessTokenStrategy.scala | 2 +- .../scala/cromwell/api/model/ShardIndex.scala | 4 +- .../cromwell/api/CromwellClientSpec.scala | 6 +- cwl/src/main/scala/cwl/CommandLineTool.scala | 21 ++++++- .../main/scala/cwl/CommandOutputBinding.scala | 7 ++- cwl/src/main/scala/cwl/CwlType.scala | 9 ++- cwl/src/main/scala/cwl/CwlWomExpression.scala | 12 ++-- .../scala/cwl/ExpressionInterpolator.scala | 2 +- cwl/src/main/scala/cwl/ExpressionTool.scala | 3 +- .../cwl/MyriadInputTypeToSecondaryFiles.scala | 2 +- cwl/src/main/scala/cwl/Workflow.scala | 6 +- cwl/src/main/scala/cwl/WorkflowStep.scala | 21 ++++--- .../cwl/WorkflowStepInputExpression.scala | 1 + .../WorkflowStepInputMergeExpression.scala | 1 + .../cwl/internal/CwlEcmaScriptDecoder.scala | 2 +- .../cwl/internal/EnhancedRhinoSandbox.scala | 2 +- cwl/src/main/scala/cwl/ontology/Schema.scala | 2 +- cwl/src/main/scala/cwl/package.scala | 9 ++- .../cwl/preprocessor/CwlPreProcessor.scala | 8 +-- cwl/src/test/scala/cwl/CwlDecoderSpec.scala | 12 ++-- .../scala/cwl/CwlEcmaScriptEncoderSpec.scala | 2 +- .../scala/cwl/CwlInputValidationSpec.scala | 18 +++--- .../test/scala/cwl/CwlWorkflowWomSpec.scala | 15 ++--- cwl/src/test/scala/cwl/DirectorySpec.scala | 4 +- cwl/src/test/scala/cwl/FileSpec.scala | 8 +-- .../test/scala/cwl/LocalIoFunctionSet.scala | 4 +- .../test/scala/cwl/ParseBigThreeSpec.scala | 6 +- .../liquibase/DiffResultFilter.scala | 2 +- .../migration/liquibase/LiquibaseUtils.scala | 4 +- .../database/slick/SlickDatabase.scala | 4 +- .../database/sql/MetadataSqlDatabase.scala | 2 +- .../docker/DockerImageIdentifier.scala | 33 +++++----- .../AlibabaCloudCRRegistry.scala | 8 +-- .../DockerRegistryV2AbstractSpec.scala | 8 +-- docs/WOMtool.md | 4 +- docs/developers/Building.md | 4 +- docs/tutorials/HPCSlurmWithLocalScratch.md | 2 +- .../engine/backend/BackendConfiguration.scala | 2 +- .../io/gcs/GcsBatchCommandContext.scala | 8 +-- .../cromwell/engine/io/nio/NioFlow.scala | 2 +- .../engine/workflow/WorkflowActor.scala | 2 +- .../workflow/WorkflowDockerLookupActor.scala | 12 ++-- .../workflow/WorkflowManagerActor.scala | 6 +- .../lifecycle/WorkflowLifecycleActor.scala | 6 +- .../deletion/DeleteWorkflowFilesActor.scala | 12 ++-- .../execution/CallMetadataHelper.scala | 12 ++-- .../execution/WorkflowExecutionActor.scala | 4 +- .../WorkflowExecutionActorData.scala | 4 +- .../callcaching/CallCacheDiffActor.scala | 2 +- .../job/EngineJobExecutionActor.scala | 26 ++++---- .../job/preparation/JobPreparationActor.scala | 8 ++- .../SubWorkflowPreparationActor.scala | 2 +- .../execution/stores/ExecutionStore.scala | 4 +- .../CopyWorkflowOutputsActor.scala | 8 +-- .../WorkflowFinalizationActor.scala | 12 ++-- .../WorkflowInitializationActor.scala | 16 ++--- .../MaterializeWorkflowDescriptorActor.scala | 6 +- .../tokens/JobTokenDispenserActor.scala | 6 +- .../tokens/RoundRobinQueueIterator.scala | 2 +- .../workflow/tokens/TokenEventLogger.scala | 4 +- .../workflow/tokens/UnhoggableTokenPool.scala | 14 ++--- .../AbortRequestScanningActor.scala | 2 +- .../workflowstore/WorkflowStoreActor.scala | 2 +- .../WorkflowStoreEngineActor.scala | 10 ++-- .../jobstore/EmptyJobStoreActor.scala | 4 +- .../cromwell/server/CromwellRootActor.scala | 4 +- .../webservice/EngineStatsActor.scala | 6 +- .../cromwell/webservice/WebServiceUtils.scala | 9 +++ .../routes/MetadataRouteSupport.scala | 2 +- .../scala/cromwell/MetadataWatchActor.scala | 12 ++-- .../engine/io/gcs/GcsBatchFlowSpec.scala | 2 +- .../execution/ExecutionStoreBenchmark.scala | 16 ++--- .../CallCachingSlickDatabaseSpec.scala | 2 +- .../preparation/JobPreparationActorSpec.scala | 4 +- .../workflow/mocks/DeclarationMock.scala | 1 - .../tokens/JobTokenDispenserActorSpec.scala | 10 ++-- .../large/MultipleTokenUsingActor.scala | 4 +- .../large/PatientTokenNeedingActor.scala | 2 +- .../workflowstore/SqlWorkflowStoreSpec.scala | 2 +- .../webservice/EngineStatsActorSpec.scala | 2 +- .../webservice/SwaggerServiceSpec.scala | 2 +- .../routes/CromwellApiServiceSpec.scala | 57 +++++++++--------- .../drs/DrsPathBuilderFactorySpec.scala | 2 +- .../filesystems/drs/DrsResolverSpec.scala | 2 +- .../gcs/batch/GcsBatchIoCommand.scala | 2 +- .../gcs/batch/GcsBatchIoCommandSpec.scala | 18 +++--- .../oss/nio/OssStorageFileSystem.scala | 2 +- .../nio/OssStorageFileSystemProvider.scala | 3 +- .../oss/nio/OssStorageObjectAttributes.scala | 2 +- .../filesystems/oss/nio/OssStoragePath.scala | 2 +- .../filesystems/oss/nio/OssStorageRetry.scala | 1 + .../filesystems/oss/nio/UnixPath.scala | 60 +++++++++---------- .../oss/nio/OssFileReadChannelSpec.scala | 4 +- .../OssStorageFileSystemProviderSpec.scala | 2 +- .../nio/OssStorageObjectAttributesSpec.scala | 4 +- .../oss/nio/OssStoragePathSpec.scala | 2 +- .../filesystems/oss/nio/UnixPathSpec.scala | 2 +- .../config/LanguageConfiguration.scala | 7 ++- .../languages/util/ImportResolver.scala | 18 ++++-- .../languages/util/LanguageFactoryUtil.scala | 2 +- .../NamespaceCacheSpec.scala | 2 +- project/Dependencies.scala | 9 +-- project/Publishing.scala | 2 +- project/Settings.scala | 28 ++------- publish/publish_workflow.wdl | 16 ++--- scripts/docker-develop/Dockerfile | 4 +- scripts/docker-develop/README.md | 2 +- scripts/gen_java_client.sh | 19 ++++-- scripts/publish-client.sh | 8 +-- .../scala/cromwell/CromwellEntryPoint.scala | 4 +- .../scala/cromwell/CromwellTestKitSpec.scala | 11 ++-- .../scala/cromwell/ReferenceConfSpec.scala | 2 +- .../engine/WorkflowStoreActorSpec.scala | 2 +- .../execution/ejea/EjeaRunningJobSpec.scala | 4 +- .../ejea/EjeaUpdatingJobStoreSpec.scala | 8 +-- .../cromwell/services/IoActorRequester.scala | 2 +- .../services/ServiceRegistryActor.scala | 14 ++--- .../ProtoHealthMonitorServiceActor.scala | 2 +- ...ynchronousThrottlingGaugeMetricActor.scala | 2 +- ...ackdriverInstrumentationServiceActor.scala | 3 +- .../StatsDInstrumentationServiceActor.scala | 3 +- .../impl/LoadControllerServiceActor.scala | 14 ++--- .../services/metadata/MetadataService.scala | 2 +- .../services/metadata/WorkflowQueryKey.scala | 3 + .../impl/MetadataDatabaseAccess.scala | 2 +- .../metadata/impl/MetadataServiceActor.scala | 4 +- .../impl/ReadMetadataRegulatorActor.scala | 2 +- .../ArchiveMetadataSchedulerActor.scala | 2 +- .../impl/builder/MetadataBuilderActor.scala | 6 +- .../impl/builder/MetadataComponent.scala | 12 ++-- .../impl/deleter/DeleteMetadataActor.scala | 2 +- .../database/LiquibaseChangeSetSpec.scala | 2 +- .../database/LiquibaseComparisonSpec.scala | 4 +- .../cromwell/services/database/LobSpec.scala | 2 +- .../database/MetadataSlickDatabaseSpec.scala | 2 +- .../services/database/QueryTimeoutSpec.scala | 2 +- .../RootAndSubworkflowLabelsSpec.scala | 2 +- ...ackdriverInstrumentationServiceActor.scala | 2 +- ...rumentationServiceActorBenchmarkSpec.scala | 4 +- .../keyvalue/InMemoryKvServiceActor.scala | 4 +- .../keyvalue/impl/KeyValueDatabaseSpec.scala | 2 +- .../services/metadata/MetadataQuerySpec.scala | 4 +- ...ryForWorkflowsMatchingParametersSpec.scala | 26 ++++---- .../impl/MetadataDatabaseAccessSpec.scala | 2 +- .../impl/MetadataServiceActorSpec.scala | 2 +- .../impl/WriteMetadataActorBenchmark.scala | 2 +- .../PubSubMetadataServiceActorSpec.scala | 14 ++--- .../services/womtool/DescriberSpec.scala | 2 +- src/ci/bin/test.inc.sh | 26 +++++++- ...wsBatchAsyncBackendJobExecutionActor.scala | 4 +- .../backend/impl/aws/AwsBatchAttributes.scala | 16 +++-- .../aws/AwsBatchInitializationActor.scala | 6 +- .../backend/impl/aws/AwsBatchJob.scala | 11 ++-- .../impl/aws/AwsBatchJobDefinition.scala | 3 +- .../impl/aws/AwsBatchRuntimeAttributes.scala | 4 +- .../aws/OccasionalStatusPollingActor.scala | 4 +- .../cromwell/backend/impl/aws/package.scala | 6 +- .../backend/impl/aws/AwsBatchJobSpec.scala | 4 +- .../aws/AwsBatchRuntimeAttributesSpec.scala | 12 ++-- .../BcsAsyncBackendJobExecutionActor.scala | 2 + .../cromwell/backend/impl/bcs/BcsJob.scala | 2 +- .../impl/bcs/BcsRuntimeAttributes.scala | 14 ++--- .../backend/impl/bcs/BcsJobSpec.scala | 2 +- .../pipelines/common/GpuTypeValidation.scala | 2 +- .../PipelinesApiConfigurationAttributes.scala | 4 +- ...esApiReferenceFilesMappingOperations.scala | 8 +-- .../PipelinesApiRuntimeAttributes.scala | 4 +- .../common/PreviousRetryReasons.scala | 10 ++-- .../api/PipelinesApiRequestManager.scala | 14 ++--- .../google/pipelines/common/io/package.scala | 4 +- ...sApiBackendLifecycleActorFactorySpec.scala | 4 +- .../common/PipelinesApiCallPathsSpec.scala | 6 +- ...elinesApiConfigurationAttributesSpec.scala | 2 +- ...sApiDockerCacheMappingOperationsSpec.scala | 2 +- .../PipelinesApiRuntimeAttributesSpec.scala | 12 ++-- .../PipelinesApiWorkflowPathsSpec.scala | 4 +- .../pipelines/v2alpha1/GenomicsFactory.scala | 4 +- .../PipelinesParameterConversions.scala | 2 +- .../v2alpha1/api/ActionBuilder.scala | 4 +- .../v2alpha1/api/Delocalization.scala | 2 +- .../v2alpha1/api/Deserialization.scala | 8 +-- .../v2alpha1/api/SSHAccessAction.scala | 2 +- .../v2alpha1/api/request/ErrorReporter.scala | 2 +- .../api/request/GetRequestHandler.scala | 4 +- .../v2alpha1/api/request/RequestHandler.scala | 2 +- .../v2alpha1/PipelinesConversionsSpec.scala | 2 +- .../v2alpha1/api/ActionBuilderSpec.scala | 2 +- .../v2alpha1/api/DeserializationSpec.scala | 16 ++--- .../v2beta/LifeSciencesFactory.scala | 4 +- .../PipelinesParameterConversions.scala | 2 +- .../pipelines/v2beta/api/ActionBuilder.scala | 4 +- .../pipelines/v2beta/api/Delocalization.scala | 2 +- .../v2beta/api/Deserialization.scala | 2 +- .../v2beta/api/SSHAccessAction.scala | 2 +- .../v2beta/api/request/ErrorReporter.scala | 2 +- .../api/request/GetRequestHandler.scala | 4 +- .../v2beta/api/request/RequestHandler.scala | 2 +- .../v2beta/PipelinesConversionsSpec.scala | 2 +- .../v2beta/api/ActionBuilderSpec.scala | 2 +- .../v2beta/api/DeserializationSpec.scala | 14 ++--- .../config/ConfigAsyncJobExecutionActor.scala | 4 +- .../BackgroundAsyncJobExecutionActor.scala | 2 +- .../backend/sfs/SharedFileSystem.scala | 14 ++--- .../SharedFileSystemExpressionFunctions.scala | 4 +- ...haredFileSystemJobExecutionActorSpec.scala | 2 +- .../impl/tes/TesRuntimeAttributes.scala | 4 +- .../impl/tes/TesRuntimeAttributesSpec.scala | 4 +- .../scala/wdl/draft2/model/AstTools.scala | 12 ++-- .../main/scala/wdl/draft2/model/WdlCall.scala | 1 - .../wdl/draft2/model/WdlExpression.scala | 6 +- .../scala/wdl/draft2/model/WdlNamespace.scala | 4 +- .../draft2/model/WdlRuntimeAttributes.scala | 2 +- .../model/WdlSyntaxErrorFormatter.scala | 2 +- .../main/scala/wdl/draft2/model/WdlTask.scala | 4 +- .../draft2/model/command/WdlCommandPart.scala | 1 - .../WdlStandardLibraryFunctions.scala | 8 +-- .../model/formatter/SyntaxFormatter.scala | 2 +- .../model/types/WdlFlavoredWomType.scala | 2 +- .../test/scala/wdl/SyntaxHighlightSpec.scala | 7 ++- .../expression/Draft2SizeFunctionSpec.scala | 20 +++---- .../wdl/expression/ValueEvaluatorSpec.scala | 4 +- .../ast2wdlom/BiscayneGenericAstNode.scala | 2 +- .../biscayne/ast2wdlom/ast2wdlom.scala | 2 +- .../biscayne/parsing/BiscayneParser.scala | 2 +- .../WdlBiscayneSyntaxErrorFormatter.scala | 2 +- .../transforms/biscayne/Ast2WdlomSpec.scala | 2 +- .../wdlom2wom/WdlDraft2WomCallNodeMaker.scala | 2 + .../wdlom2wom/WdlDraft2WomCallableMaker.scala | 5 +- .../WdlDraft2WomConditionalNodeMaker.scala | 1 - .../transforms/wdlwom/WdlScatterWomSpec.scala | 8 +-- .../ast2wdlom/Draft3GenericAstNode.scala | 2 +- .../transforms/ast2wdlom/ast2wdlom.scala | 2 +- .../transforms/parsing/Draft3Parser.scala | 2 +- .../WdlDraft3SyntaxErrorFormatter.scala | 2 +- .../transforms/ast2wdlom/Ast2WdlomSpec.scala | 2 +- .../values/Draft3SizeFunctionSpec.scala | 18 +++--- .../values/EngineFunctionEvaluators.scala | 4 +- .../base/wdlom2wdl/WdlWriterImpl.scala | 2 +- .../graph/CallElementToGraphNode.scala | 1 + .../scala/wes2cromwell/WesRunRoutes.scala | 2 +- .../main/scala/wom/types/WomArrayType.scala | 4 +- .../main/scala/wom/types/WomFloatType.scala | 2 +- .../main/scala/wom/types/WomIntegerType.scala | 4 +- .../main/scala/wom/types/WomLongType.scala | 2 +- wom/src/main/scala/wom/util/YamlUtils.scala | 2 +- wom/src/main/scala/wom/values/WomArray.scala | 2 +- wom/src/main/scala/wom/values/WomFloat.scala | 2 +- .../main/scala/wom/values/WomInteger.scala | 2 +- wom/src/main/scala/wom/values/WomMap.scala | 1 - wom/src/main/scala/wom/values/WomObject.scala | 7 +-- .../scala/wom/values/WomOptionalValue.scala | 7 +-- wom/src/main/scala/wom/values/WomValue.scala | 6 +- .../scala/wom/types/WomArrayTypeSpec.scala | 2 +- .../test/scala/wom/values/WomObjectSpec.scala | 14 ++--- .../src/main/scala/womtool/WomtoolMain.scala | 2 +- .../cmdline/WomtoolCommandLineParser.scala | 10 ++-- .../main/scala/womtool/graph/WomGraph.scala | 6 +- .../main/scala/womtool/graph/package.scala | 2 +- .../scala/womtool/input/WomGraphMaker.scala | 2 +- .../scala/womtool/wom2wdlom/WomToWdlom.scala | 10 ++-- .../scala/womtool/WomtoolValidateSpec.scala | 2 +- 338 files changed, 1082 insertions(+), 992 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 541f8740103..0aa3f5fba61 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,5 +1,11 @@ # Cromwell Change Log +## 79 Release Notes + +### Scala 2.13 + +Cromwell is now built with Scala version 2.13. This change should not be noticeable to users but may be of interest to developers of Cromwell backend implementations. + ## 75 Release Notes ### New `AwaitingCloudQuota` backend status diff --git a/CromIAM/src/main/scala/cromiam/server/CromIamServer.scala b/CromIAM/src/main/scala/cromiam/server/CromIamServer.scala index ff9283155a5..9f5af038b12 100644 --- a/CromIAM/src/main/scala/cromiam/server/CromIamServer.scala +++ b/CromIAM/src/main/scala/cromiam/server/CromIamServer.scala @@ -46,7 +46,7 @@ object CromIamServer extends HttpApp with CromIamApiService with SwaggerService override val routes: Route = allRoutes ~ swaggerUiResourceRoute - override val statusService: StatusService = new StatusService(() => Map(Cromwell -> cromwellClient.subsystemStatus, Sam -> samClient.subsystemStatus)) + override val statusService: StatusService = new StatusService(() => Map(Cromwell -> cromwellClient.subsystemStatus(), Sam -> samClient.subsystemStatus())) // Override default shutdownsignal which was just "hit return/enter" override def waitForShutdownSignal(actorSystem: ActorSystem)(implicit executionContext: ExecutionContext): Future[Done] = { diff --git a/CromIAM/src/main/scala/cromiam/server/config/CromIamServerConfig.scala b/CromIAM/src/main/scala/cromiam/server/config/CromIamServerConfig.scala index 0da45549e1d..8d27c7b980d 100644 --- a/CromIAM/src/main/scala/cromiam/server/config/CromIamServerConfig.scala +++ b/CromIAM/src/main/scala/cromiam/server/config/CromIamServerConfig.scala @@ -9,7 +9,7 @@ import common.validation.Validation._ import cromiam.server.config.CromIamServerConfig._ import net.ceedubs.ficus.Ficus._ -import scala.collection.JavaConverters._ +import scala.jdk.CollectionConverters._ import scala.util.{Failure, Success, Try} final case class CromIamServerConfig(cromIamConfig: CromIamConfig, diff --git a/CromIAM/src/main/scala/cromiam/server/status/StatusCheckedSubsystem.scala b/CromIAM/src/main/scala/cromiam/server/status/StatusCheckedSubsystem.scala index b716c405b9e..7aa6c0af752 100644 --- a/CromIAM/src/main/scala/cromiam/server/status/StatusCheckedSubsystem.scala +++ b/CromIAM/src/main/scala/cromiam/server/status/StatusCheckedSubsystem.scala @@ -19,7 +19,7 @@ trait StatusCheckedSubsystem { * error messages, otherwise OK = false and include the response body */ def subsystemStatus()(implicit ec: ExecutionContext): Future[SubsystemStatus] = { - sttp.get(statusUri).send map { x => + sttp.get(statusUri).send() map { x => x.body match { case Right(_) => SubsystemStatus(true, None) case Left(errors) => SubsystemStatus(false, Option(List(errors))) diff --git a/CromIAM/src/main/scala/cromiam/webservice/QuerySupport.scala b/CromIAM/src/main/scala/cromiam/webservice/QuerySupport.scala index 13fc96830aa..cddabe74a57 100644 --- a/CromIAM/src/main/scala/cromiam/webservice/QuerySupport.scala +++ b/CromIAM/src/main/scala/cromiam/webservice/QuerySupport.scala @@ -1,6 +1,7 @@ package cromiam.webservice import akka.event.LoggingAdapter +import akka.http.scaladsl.model.Uri.Query import akka.http.scaladsl.model._ import akka.http.scaladsl.server.Directives._ import akka.http.scaladsl.server._ @@ -86,7 +87,7 @@ trait QuerySupport extends RequestSupport { // DO NOT REMOVE THE NEXT LINE WITHOUT READING THE SCALADOC ON ensureNoLabelOrs ensureNoLabelOrs(user, labelOrs) - val newQueryBuilder = query.newBuilder + val newQueryBuilder = Query.newBuilder newQueryBuilder ++= query val collectionLabels = userCollectionLabels(user, collections) diff --git a/CromIAM/src/main/scala/cromiam/webservice/SubmissionSupport.scala b/CromIAM/src/main/scala/cromiam/webservice/SubmissionSupport.scala index 1fb3f78b017..c1f5477475b 100644 --- a/CromIAM/src/main/scala/cromiam/webservice/SubmissionSupport.scala +++ b/CromIAM/src/main/scala/cromiam/webservice/SubmissionSupport.scala @@ -109,7 +109,7 @@ object SubmissionSupport { } def extractInputAux: Directive1[Map[String, String]] = { - formFieldMap.map(_.filterKeys(_.startsWith(WorkflowInputsAuxPrefix))) + formFieldMap.map(_.view.filterKeys(_.startsWith(WorkflowInputsAuxPrefix)).toMap) } // FIXME: Much like CromwellClient see if there are ways of unifying this a bit w/ the mothership diff --git a/CromIAM/src/test/scala/cromiam/webservice/SwaggerServiceSpec.scala b/CromIAM/src/test/scala/cromiam/webservice/SwaggerServiceSpec.scala index 9bc6fddc179..c0277fd92c0 100644 --- a/CromIAM/src/test/scala/cromiam/webservice/SwaggerServiceSpec.scala +++ b/CromIAM/src/test/scala/cromiam/webservice/SwaggerServiceSpec.scala @@ -14,7 +14,7 @@ import org.yaml.snakeyaml.error.YAMLException import org.yaml.snakeyaml.nodes.MappingNode import org.yaml.snakeyaml.{Yaml => SnakeYaml} -import scala.collection.JavaConverters._ +import scala.jdk.CollectionConverters._ class SwaggerServiceSpec extends AnyFlatSpec with CromwellTimeoutSpec with SwaggerService with ScalatestRouteTest with Matchers diff --git a/backend/src/main/scala/cromwell/backend/BackendLifecycleActor.scala b/backend/src/main/scala/cromwell/backend/BackendLifecycleActor.scala index 86b40653971..72b0c24a800 100644 --- a/backend/src/main/scala/cromwell/backend/BackendLifecycleActor.scala +++ b/backend/src/main/scala/cromwell/backend/BackendLifecycleActor.scala @@ -40,7 +40,7 @@ trait BackendLifecycleActor extends Actor { protected def performActionThenRespond(operation: => Future[BackendWorkflowLifecycleActorResponse], onFailure: Throwable => BackendWorkflowLifecycleActorResponse, andThen: => Unit = ()) = { - val respondTo: ActorRef = sender + val respondTo: ActorRef = sender() operation onComplete { case Success(r) => respondTo ! r diff --git a/backend/src/main/scala/cromwell/backend/BackendWorkflowFinalizationActor.scala b/backend/src/main/scala/cromwell/backend/BackendWorkflowFinalizationActor.scala index a3cdc23cccf..2f77c2ebc2a 100644 --- a/backend/src/main/scala/cromwell/backend/BackendWorkflowFinalizationActor.scala +++ b/backend/src/main/scala/cromwell/backend/BackendWorkflowFinalizationActor.scala @@ -28,7 +28,7 @@ object BackendWorkflowFinalizationActor { trait BackendWorkflowFinalizationActor extends BackendWorkflowLifecycleActor with ActorLogging { def receive: Receive = LoggingReceive { - case Finalize => performActionThenRespond(afterAll map { _ => FinalizationSuccess }, onFailure = FinalizationFailed) + case Finalize => performActionThenRespond(afterAll() map { _ => FinalizationSuccess }, onFailure = FinalizationFailed) } /** diff --git a/backend/src/main/scala/cromwell/backend/OutputEvaluator.scala b/backend/src/main/scala/cromwell/backend/OutputEvaluator.scala index 801238f73e2..2bf215fb0fa 100644 --- a/backend/src/main/scala/cromwell/backend/OutputEvaluator.scala +++ b/backend/src/main/scala/cromwell/backend/OutputEvaluator.scala @@ -78,7 +78,7 @@ object OutputEvaluator { case Success(Invalid(errors)) => InvalidJobOutputs(errors) case Failure(exception) => JobOutputsEvaluationException(exception) } - + /* * Because Cromwell doesn't trust anyone, if custom evaluation is provided, * still make sure that all the output ports have been filled with values @@ -90,7 +90,7 @@ object OutputEvaluator { case Nil => val errorMessagePrefix = "Error applying postMapper in short-circuit output evaluation" TryUtil.sequenceMap(outputs map { case (k, v) => (k, postMapper(v))}, errorMessagePrefix) match { - case Failure(e) => InvalidJobOutputs(NonEmptyList.of(e.getMessage, e.getStackTrace.take(5).map(_.toString):_*)) + case Failure(e) => InvalidJobOutputs(NonEmptyList.of(e.getMessage, e.getStackTrace.take(5).toIndexedSeq.map(_.toString):_*)) case Success(postMappedOutputs) => ValidJobOutputs(CallOutputs(postMappedOutputs)) } case head :: tail => InvalidJobOutputs(NonEmptyList.of(toError(head), tail.map(toError): _*)) diff --git a/backend/src/main/scala/cromwell/backend/dummy/DummyInitializationActor.scala b/backend/src/main/scala/cromwell/backend/dummy/DummyInitializationActor.scala index b0962298b18..34fd2e8bc51 100644 --- a/backend/src/main/scala/cromwell/backend/dummy/DummyInitializationActor.scala +++ b/backend/src/main/scala/cromwell/backend/dummy/DummyInitializationActor.scala @@ -17,7 +17,7 @@ class DummyInitializationActor(pipelinesParams: StandardInitializationActorParam val backendAttributeValidation: RuntimeAttributesValidation[String] = new RuntimeAttributesValidation[String] { override def key: String = "backend" - override def coercion: Traversable[WomType] = Vector(WomStringType) + override def coercion: Iterable[WomType] = Vector(WomStringType) override protected def validateValue: PartialFunction[WomValue, ErrorOr[String]] = { case WomString("Dummy") => "Dummy".validNel diff --git a/backend/src/main/scala/cromwell/backend/standard/StandardAsyncExecutionActor.scala b/backend/src/main/scala/cromwell/backend/standard/StandardAsyncExecutionActor.scala index 96adb8c07b5..0f1e2c931a8 100644 --- a/backend/src/main/scala/cromwell/backend/standard/StandardAsyncExecutionActor.scala +++ b/backend/src/main/scala/cromwell/backend/standard/StandardAsyncExecutionActor.scala @@ -250,7 +250,7 @@ trait StandardAsyncExecutionActor * @param directoryFiles The directories. * @return The shell scripting. */ - def directoryScripts(directoryFiles: Traversable[WomUnlistedDirectory]): String = + def directoryScripts(directoryFiles: Iterable[WomUnlistedDirectory]): String = directoryFiles map directoryScript mkString "\n" /** @@ -284,7 +284,7 @@ trait StandardAsyncExecutionActor * @param globFiles The globs. * @return The shell scripting. */ - def globScripts(globFiles: Traversable[WomGlobFile]): String = + def globScripts(globFiles: Iterable[WomGlobFile]): String = globFiles map globScript mkString "\n" /** @@ -549,6 +549,7 @@ trait StandardAsyncExecutionActor CommandSetupSideEffectFile(womValue, alternativeName) case AsLocalizedAdHocValue(LocalizedAdHocValue(AdHocValue(womValue, alternativeName, _), _)) => CommandSetupSideEffectFile(womValue, alternativeName) + case oh => throw new Exception(s"Programmer Error! Unexpected case match: $oh") } lazy val evaluatedAdHocFiles: ErrorOr[List[AdHocValue]] = { @@ -986,7 +987,7 @@ trait StandardAsyncExecutionActor case InvalidJobOutputs(errors) => val exception = new MessageAggregation { override def exceptionContext: String = "Failed to evaluate job outputs" - override def errorMessages: Traversable[String] = errors.toList + override def errorMessages: Iterable[String] = errors.toList } FailedNonRetryableExecutionHandle(exception, kvPairsToSave = None) case JobOutputsEvaluationException(exception: Exception) if retryEvaluateOutputsAggregated(exception) => @@ -1068,7 +1069,7 @@ trait StandardAsyncExecutionActor Map(key -> nextKvPair) } - val kvsFromPreviousAttemptUpd = kvsFromPreviousAttempt.mapValues(kvPair => kvPair.copy(key = kvPair.key.copy(jobKey = nextKvJobKey))) + val kvsFromPreviousAttemptUpd = kvsFromPreviousAttempt.view.mapValues(kvPair => kvPair.copy(key = kvPair.key.copy(jobKey = nextKvJobKey))) val failedRetryCountKvPair: Map[String, KvPair] = if (incrementFailedRetryCount) getNextKvPair(FailedRetryCountKey, (previousFailedRetries + 1).toString) @@ -1081,7 +1082,7 @@ trait StandardAsyncExecutionActor val mergedKvs = kvsFromPreviousAttemptUpd ++ kvsForNextAttempt ++ failedRetryCountKvPair ++ memoryMultiplierKvPair - makeKvRequest(mergedKvs.values.map(KvPut).toSeq) map { respSeq => + makeKvRequest(mergedKvs.toMap.values.map(KvPut).toSeq) map { respSeq => val failures = respSeq.filter(_.isInstanceOf[KvFailure]) if (failures.isEmpty) { respSeq diff --git a/backend/src/main/scala/cromwell/backend/standard/callcaching/RootWorkflowFileHashCacheActor.scala b/backend/src/main/scala/cromwell/backend/standard/callcaching/RootWorkflowFileHashCacheActor.scala index c7b8f0d0093..e33625741b3 100644 --- a/backend/src/main/scala/cromwell/backend/standard/callcaching/RootWorkflowFileHashCacheActor.scala +++ b/backend/src/main/scala/cromwell/backend/standard/callcaching/RootWorkflowFileHashCacheActor.scala @@ -36,7 +36,7 @@ class RootWorkflowFileHashCacheActor private[callcaching](override val ioActor: // Hash Request case hashCommand: IoHashCommandWithContext => val key = hashCommand.fileHashContext.file - lazy val requester = FileHashRequester(sender, hashCommand.fileHashContext, hashCommand.ioHashCommand) + lazy val requester = FileHashRequester(sender(), hashCommand.fileHashContext, hashCommand.ioHashCommand) cache.get(key) match { case FileHashValueNotRequested => // The hash is not in the cache and has not been requested. Make the hash request and register this requester @@ -48,9 +48,9 @@ class RootWorkflowFileHashCacheActor private[callcaching](override val ioActor: // hash to become available. cache.put(key, FileHashValueRequested(requesters = requester :: requesters)) case FileHashSuccess(value) => - sender ! Tuple2(hashCommand.fileHashContext, IoSuccess(requester.ioCommand, value)) + sender() ! Tuple2(hashCommand.fileHashContext, IoSuccess(requester.ioCommand, value)) case FileHashFailure(error) => - sender ! Tuple2(hashCommand.fileHashContext, IoFailure(requester.ioCommand, new IOException(error))) + sender() ! Tuple2(hashCommand.fileHashContext, IoFailure(requester.ioCommand, new IOException(error))) } // Hash Success case (hashContext: FileHashContext, success @ IoSuccess(_, value: String)) => diff --git a/backend/src/main/scala/cromwell/backend/validation/RuntimeAttributesDefault.scala b/backend/src/main/scala/cromwell/backend/validation/RuntimeAttributesDefault.scala index d3f66416e68..d46bc7a66d5 100644 --- a/backend/src/main/scala/cromwell/backend/validation/RuntimeAttributesDefault.scala +++ b/backend/src/main/scala/cromwell/backend/validation/RuntimeAttributesDefault.scala @@ -12,7 +12,7 @@ import scala.util.{Failure, Try} object RuntimeAttributesDefault { - def workflowOptionsDefault(options: WorkflowOptions, mapping: Map[String, Traversable[WomType]]): + def workflowOptionsDefault(options: WorkflowOptions, mapping: Map[String, Iterable[WomType]]): Try[Map[String, WomValue]] = { options.defaultRuntimeOptions flatMap { attrs => TryUtil.sequenceMap(attrs collect { @@ -32,7 +32,7 @@ object RuntimeAttributesDefault { */ def withDefaults(attrs: EvaluatedRuntimeAttributes, defaultsList: List[EvaluatedRuntimeAttributes]): EvaluatedRuntimeAttributes = { defaultsList.foldLeft(attrs)((acc, default) => { - acc ++ default.filterKeys(!acc.keySet.contains(_)) + acc ++ default.view.filterKeys(!acc.keySet.contains(_)) }) } diff --git a/backend/src/main/scala/cromwell/backend/validation/RuntimeAttributesValidation.scala b/backend/src/main/scala/cromwell/backend/validation/RuntimeAttributesValidation.scala index 70eb416b427..65f2119a64b 100644 --- a/backend/src/main/scala/cromwell/backend/validation/RuntimeAttributesValidation.scala +++ b/backend/src/main/scala/cromwell/backend/validation/RuntimeAttributesValidation.scala @@ -84,7 +84,7 @@ object RuntimeAttributesValidation { new RuntimeAttributesValidation[ValidatedType] { override def key: String = validation.key - override def coercion: Traversable[WomType] = validation.coercion + override def coercion: Iterable[WomType] = validation.coercion override protected def validateValue: PartialFunction[WomValue, ErrorOr[ValidatedType]] = validation.validateValuePackagePrivate @@ -108,7 +108,7 @@ object RuntimeAttributesValidation { new RuntimeAttributesValidation[ValidatedType] { override def key: String = validation.key - override def coercion: Traversable[WomType] = validation.coercion + override def coercion: Iterable[WomType] = validation.coercion override protected def validateValue: PartialFunction[WomValue, ErrorOr[ValidatedType]] = validation.validateValuePackagePrivate @@ -132,7 +132,7 @@ object RuntimeAttributesValidation { new OptionalRuntimeAttributesValidation[ValidatedType] { override def key: String = validation.key - override def coercion: Traversable[WomType] = validation.coercion + override def coercion: Iterable[WomType] = validation.coercion override protected def validateOption: PartialFunction[WomValue, ErrorOr[ValidatedType]] = validation.validateValuePackagePrivate @@ -162,7 +162,7 @@ object RuntimeAttributesValidation { val attributeOptions: Map[String, Option[Any]] = validatedRuntimeAttributes.attributes.safeMapValues(unpackOption) val attributes: Map[String, String] = attributeOptions collect { - case (name, Some(values: Traversable[_])) => (name, values.mkString(",")) + case (name, Some(values: Iterable[_])) => (name, values.mkString(",")) case (name, Some(value)) => (name, value.toString) } @@ -271,7 +271,7 @@ trait RuntimeAttributesValidation[ValidatedType] { * * @return traversable of wdl types */ - def coercion: Traversable[WomType] + def coercion: Iterable[WomType] /** * Validates the wdl value. diff --git a/backend/src/main/scala/cromwell/backend/validation/ValidatedRuntimeAttributesBuilder.scala b/backend/src/main/scala/cromwell/backend/validation/ValidatedRuntimeAttributesBuilder.scala index 0ac37d656cd..6e199c4c4fe 100644 --- a/backend/src/main/scala/cromwell/backend/validation/ValidatedRuntimeAttributesBuilder.scala +++ b/backend/src/main/scala/cromwell/backend/validation/ValidatedRuntimeAttributesBuilder.scala @@ -47,7 +47,7 @@ trait ValidatedRuntimeAttributesBuilder { /** * Returns a map of coercions suitable for RuntimeAttributesDefault.workflowOptionsDefault. */ - final lazy val coercionMap: Map[String, Traversable[WomType]] = { + final lazy val coercionMap: Map[String, Iterable[WomType]] = { validations.map(validation => validation.key -> validation.coercion).toMap } @@ -64,7 +64,7 @@ trait ValidatedRuntimeAttributesBuilder { case Invalid(nel) => throw new RuntimeException with MessageAggregation with NoStackTrace { override def exceptionContext: String = "Runtime attribute validation failed" - override def errorMessages: Traversable[String] = nel.toList + override def errorMessages: Iterable[String] = nel.toList } } } diff --git a/backend/src/main/scala/cromwell/backend/validation/exception/ValidationAggregatedException.scala b/backend/src/main/scala/cromwell/backend/validation/exception/ValidationAggregatedException.scala index 36825795d69..ec3644674bb 100644 --- a/backend/src/main/scala/cromwell/backend/validation/exception/ValidationAggregatedException.scala +++ b/backend/src/main/scala/cromwell/backend/validation/exception/ValidationAggregatedException.scala @@ -3,4 +3,4 @@ package cromwell.backend.validation.exception import common.exception.MessageAggregation case class ValidationAggregatedException(override val exceptionContext: String, - override val errorMessages: Traversable[String]) extends MessageAggregation + override val errorMessages: Iterable[String]) extends MessageAggregation diff --git a/backend/src/test/scala/cromwell/backend/BackendWorkflowInitializationActorSpec.scala b/backend/src/test/scala/cromwell/backend/BackendWorkflowInitializationActorSpec.scala index 1fc1f0c6b29..55772d1de90 100644 --- a/backend/src/test/scala/cromwell/backend/BackendWorkflowInitializationActorSpec.scala +++ b/backend/src/test/scala/cromwell/backend/BackendWorkflowInitializationActorSpec.scala @@ -79,7 +79,7 @@ class BackendWorkflowInitializationActorSpec extends TestKitSuite val valid = ContinueOnReturnCodeValidation.default(optionalConfig).validate(Map(RuntimeAttributesKeys.ContinueOnReturnCodeKey -> womValue)) valid.isValid should be(result) - valid.toEither.right.get should be(ContinueOnReturnCodeFlag(value)) + valid.toEither.toOption.get should be(ContinueOnReturnCodeFlag(value)) } forAll(booleanRows) { value => @@ -90,7 +90,7 @@ class BackendWorkflowInitializationActorSpec extends TestKitSuite val valid = ContinueOnReturnCodeValidation.default(optionalConfig).validate(Map(RuntimeAttributesKeys.ContinueOnReturnCodeKey -> womValue)) valid.isValid should be(result) - valid.toEither.right.get should be(ContinueOnReturnCodeFlag(value)) + valid.toEither.toOption.get should be(ContinueOnReturnCodeFlag(value)) } forAll(booleanRows) { value => @@ -109,7 +109,7 @@ class BackendWorkflowInitializationActorSpec extends TestKitSuite val valid = ContinueOnReturnCodeValidation.default(optionalConfig).validate(Map(RuntimeAttributesKeys.ContinueOnReturnCodeKey -> womValue)) valid.isValid should be(result) - valid.toEither.right.get should be(ContinueOnReturnCodeSet(Set(value))) + valid.toEither.toOption.get should be(ContinueOnReturnCodeSet(Set(value))) } forAll(integerRows) { value => @@ -120,7 +120,7 @@ class BackendWorkflowInitializationActorSpec extends TestKitSuite val valid = ContinueOnReturnCodeValidation.default(optionalConfig).validate(Map(RuntimeAttributesKeys.ContinueOnReturnCodeKey -> womValue)) valid.isValid should be(result) - valid.toEither.right.get should be(ContinueOnReturnCodeSet(Set(value))) + valid.toEither.toOption.get should be(ContinueOnReturnCodeSet(Set(value))) } forAll(integerRows) { value => @@ -139,7 +139,7 @@ class BackendWorkflowInitializationActorSpec extends TestKitSuite val valid = ContinueOnReturnCodeValidation.default(optionalConfig).validate(Map(RuntimeAttributesKeys.ContinueOnReturnCodeKey -> womValue)) valid.isValid should be(result) - valid.toEither.right.get should be(ContinueOnReturnCodeSet(Set(value))) + valid.toEither.toOption.get should be(ContinueOnReturnCodeSet(Set(value))) } forAll(integerRows) { value => @@ -150,7 +150,7 @@ class BackendWorkflowInitializationActorSpec extends TestKitSuite val valid = ContinueOnReturnCodeValidation.default(optionalConfig).validate(Map(RuntimeAttributesKeys.ContinueOnReturnCodeKey -> womValue)) valid.isValid should be(result) - valid.toEither.right.get should be(ContinueOnReturnCodeSet(Set(value))) + valid.toEither.toOption.get should be(ContinueOnReturnCodeSet(Set(value))) } forAll(integerRows) { value => @@ -176,7 +176,7 @@ class BackendWorkflowInitializationActorSpec extends TestKitSuite val valid = ContinueOnReturnCodeValidation.default(optionalConfig).validate(Map(RuntimeAttributesKeys.ContinueOnReturnCodeKey -> womValue)) valid.isValid should be(result) - valid.toEither.left.get.toList should contain theSameElementsAs List( + valid.toEither.swap.toOption.get.toList should contain theSameElementsAs List( "Expecting continueOnReturnCode runtime attribute to be either a Boolean, a String 'true' or 'false', or an Array[Int]" ) } diff --git a/backend/src/test/scala/cromwell/backend/standard/StandardValidatedRuntimeAttributesBuilderSpec.scala b/backend/src/test/scala/cromwell/backend/standard/StandardValidatedRuntimeAttributesBuilderSpec.scala index 430045786ac..300c3dde750 100644 --- a/backend/src/test/scala/cromwell/backend/standard/StandardValidatedRuntimeAttributesBuilderSpec.scala +++ b/backend/src/test/scala/cromwell/backend/standard/StandardValidatedRuntimeAttributesBuilderSpec.scala @@ -71,7 +71,7 @@ class StandardValidatedRuntimeAttributesBuilderSpec extends AnyWordSpecLike with val runtimeAttributes = Map("docker" -> WomString("ubuntu:latest")) var warnings = List.empty[Any] val mockLogger = mock[Logger] - mockLogger.warn(anyString).answers(warnings :+= _) + mockLogger.warn(anyString).answers((warnings :+= _): Any => Unit) assertRuntimeAttributesSuccessfulCreation(runtimeAttributes, expectedRuntimeAttributes, includeDockerSupport = false, logger = mockLogger) warnings should contain theSameElementsAs List("Unrecognized runtime attribute keys: docker") @@ -93,7 +93,7 @@ class StandardValidatedRuntimeAttributesBuilderSpec extends AnyWordSpecLike with val runtimeAttributes = Map("docker" -> WomInteger(1)) var warnings = List.empty[Any] val mockLogger = mock[Logger] - mockLogger.warn(anyString).answers(warnings :+= _) + mockLogger.warn(anyString).answers((warnings :+= _): Any => Unit) assertRuntimeAttributesSuccessfulCreation(runtimeAttributes, expectedRuntimeAttributes, includeDockerSupport = false, logger = mockLogger) warnings should contain theSameElementsAs List("Unrecognized runtime attribute keys: docker") diff --git a/backend/src/test/scala/cromwell/backend/validation/RuntimeAttributesValidationSpec.scala b/backend/src/test/scala/cromwell/backend/validation/RuntimeAttributesValidationSpec.scala index 86c9d23166f..1752da9014b 100644 --- a/backend/src/test/scala/cromwell/backend/validation/RuntimeAttributesValidationSpec.scala +++ b/backend/src/test/scala/cromwell/backend/validation/RuntimeAttributesValidationSpec.scala @@ -368,7 +368,7 @@ class RuntimeAttributesValidationSpec extends AnyWordSpecLike with CromwellTimeo |continueOnReturnCode = [0,1,2] |""".stripMargin)) - ContinueOnReturnCodeValidation.configDefaultWdlValue(optinalBackendConfig).get shouldBe WomArray(WomArrayType(WomIntegerType), Array(WomInteger(0), WomInteger(1), WomInteger(2))) + ContinueOnReturnCodeValidation.configDefaultWdlValue(optinalBackendConfig).get shouldBe WomArray(WomArrayType(WomIntegerType), List(WomInteger(0), WomInteger(1), WomInteger(2))) } "return failure when tries to validate an invalid maxRetries entry" in { diff --git a/centaur/src/it/scala/centaur/reporting/BigQueryReporter.scala b/centaur/src/it/scala/centaur/reporting/BigQueryReporter.scala index a73f26d2f19..16463742632 100644 --- a/centaur/src/it/scala/centaur/reporting/BigQueryReporter.scala +++ b/centaur/src/it/scala/centaur/reporting/BigQueryReporter.scala @@ -24,7 +24,7 @@ import net.ceedubs.ficus.Ficus._ import org.apache.commons.lang3.exception.ExceptionUtils import org.threeten.bp.Duration -import scala.collection.JavaConverters._ +import scala.jdk.CollectionConverters._ import scala.concurrent.ExecutionContext class BigQueryReporter(override val params: ErrorReporterParams) extends ErrorReporter { diff --git a/centaur/src/it/scala/centaur/reporting/ErrorReporters.scala b/centaur/src/it/scala/centaur/reporting/ErrorReporters.scala index 3030703495e..8f989e3bd1b 100644 --- a/centaur/src/it/scala/centaur/reporting/ErrorReporters.scala +++ b/centaur/src/it/scala/centaur/reporting/ErrorReporters.scala @@ -6,7 +6,7 @@ import com.typesafe.config.{Config, ConfigFactory} import com.typesafe.scalalogging.StrictLogging import net.ceedubs.ficus.Ficus._ -import scala.collection.JavaConverters._ +import scala.jdk.CollectionConverters._ import scala.concurrent.ExecutionContext /** @@ -29,7 +29,7 @@ class ErrorReporters(rootConfig: Config) { AggregatedIo.aggregateExceptions("Errors while creating ErrorReporters", errorReporterNames.map(getErrorReporter)) } - val errorReporters: List[ErrorReporter] = errorReportersIo.unsafeRunSync + val errorReporters: List[ErrorReporter] = errorReportersIo.unsafeRunSync() /** The number of times any test should be retried. */ val retryAttempts: Int = errorReporterConfig.getOrElse("retry-attempts", 0) diff --git a/centaur/src/main/scala/centaur/CromwellConfiguration.scala b/centaur/src/main/scala/centaur/CromwellConfiguration.scala index e1d1383fa69..50c1ca031f3 100644 --- a/centaur/src/main/scala/centaur/CromwellConfiguration.scala +++ b/centaur/src/main/scala/centaur/CromwellConfiguration.scala @@ -4,7 +4,7 @@ import java.lang.ProcessBuilder.Redirect import better.files.File import com.typesafe.scalalogging.StrictLogging -import scala.collection.JavaConverters._ +import scala.jdk.CollectionConverters._ trait CromwellProcess extends StrictLogging { def logFile: String diff --git a/centaur/src/main/scala/centaur/test/ObjectCounter.scala b/centaur/src/main/scala/centaur/test/ObjectCounter.scala index 3849c9aea83..46affc7d552 100644 --- a/centaur/src/main/scala/centaur/test/ObjectCounter.scala +++ b/centaur/src/main/scala/centaur/test/ObjectCounter.scala @@ -5,7 +5,7 @@ import com.google.cloud.storage.{Blob, Storage} import software.amazon.awssdk.services.s3.S3Client import software.amazon.awssdk.services.s3.model.ListObjectsRequest -import scala.collection.JavaConverters._ +import scala.jdk.CollectionConverters._ import scala.language.implicitConversions trait ObjectCounter[A] { diff --git a/centaur/src/test/scala/centaur/test/CentaurOperationsSpec.scala b/centaur/src/test/scala/centaur/test/CentaurOperationsSpec.scala index 7cb8b5cef7e..9d09827027a 100644 --- a/centaur/src/test/scala/centaur/test/CentaurOperationsSpec.scala +++ b/centaur/src/test/scala/centaur/test/CentaurOperationsSpec.scala @@ -33,6 +33,7 @@ class CentaurOperationsSpec extends AnyFlatSpec with Matchers { case Success(_) if !expectMatching => fail("Metadata unexpectedly matches") case Failure(e) if expectMatching => fail("Metadata unexpectedly mismatches", e) case Failure(_) if !expectMatching => // great + case oh => throw new Exception(s"Programmer Error! Unexpected case match: $oh") } } diff --git a/centaur/test_cromwell.sh b/centaur/test_cromwell.sh index 0e3ab8c240b..2f330f1feb0 100755 --- a/centaur/test_cromwell.sh +++ b/centaur/test_cromwell.sh @@ -132,7 +132,7 @@ else fi # Add the it-classes folder to the classpath to ensure logback configuration files are picked up. -CP="${CP}:${RUN_DIR}/centaur/target/scala-2.12/it-classes" +CP="${CP}:${RUN_DIR}/centaur/target/scala-2.13/it-classes" # This is set in cromwell::private::create_centaur_variables if [ -n "${CENTAUR_CONFIG_STRING}" ]; then @@ -156,7 +156,7 @@ fi TEST_DESCRIPTION="Running Centaur with sbt test" -TEST_COMMAND="java ${CENTAUR_CONF} -cp $CP org.scalatest.tools.Runner -R centaur/target/scala-2.12/it-classes -oD -u target/test-reports -PS${TEST_THREAD_COUNT}" +TEST_COMMAND="java ${CENTAUR_CONF} -cp $CP org.scalatest.tools.Runner -R centaur/target/scala-2.13/it-classes -oD -u target/test-reports -PS${TEST_THREAD_COUNT}" if [[ -n ${EXCLUDE_TAG[*]} ]]; then TEST_DESCRIPTION=${TEST_DESCRIPTION}" excluding ${EXCLUDE_TAG[*]} tests" diff --git a/centaurCwlRunner/src/bin/centaur-cwl-runner.bash b/centaurCwlRunner/src/bin/centaur-cwl-runner.bash index 8d73f4c456e..8f39c3ec130 100755 --- a/centaurCwlRunner/src/bin/centaur-cwl-runner.bash +++ b/centaurCwlRunner/src/bin/centaur-cwl-runner.bash @@ -3,7 +3,7 @@ # `sbt assembly` must have already been run. build_root="$( dirname "${BASH_SOURCE[0]}" )/../../.." centaur_cwl_jar="${CENTAUR_CWL_JAR:-"$( \ - find "${build_root}/centaurCwlRunner/target/scala-2.12" -name 'centaur-cwl-runner-*.jar' -print0 \ + find "${build_root}/centaurCwlRunner/target/scala-2.13" -name 'centaur-cwl-runner-*.jar' -print0 \ | xargs -0 ls -1 -t \ | head -n 1 \ )"}" diff --git a/centaurCwlRunner/src/main/scala/centaur/cwl/CentaurCwlRunner.scala b/centaurCwlRunner/src/main/scala/centaur/cwl/CentaurCwlRunner.scala index e561934ab53..ec5dcad537e 100644 --- a/centaurCwlRunner/src/main/scala/centaur/cwl/CentaurCwlRunner.scala +++ b/centaurCwlRunner/src/main/scala/centaur/cwl/CentaurCwlRunner.scala @@ -46,15 +46,15 @@ object CentaurCwlRunner extends StrictLogging { // TODO: This would be cleaner with Enumeratum object ExitCode extends Enumeration { - protected case class Val(status: Int) extends super.Val + protected case class ExitVal(status: Int) extends super.Val implicit class ValueToVal(val exitCodeValue: Value) extends AnyVal { - def status: Int = exitCodeValue.asInstanceOf[Val].status + def status: Int = exitCodeValue.asInstanceOf[ExitVal].status } - val Success = Val(0) - val Failure = Val(1) - val NotImplemented = Val(33) + val Success = ExitVal(0) + val Failure = ExitVal(1) + val NotImplemented = ExitVal(33) } private val cwlPreProcessor = new CwlPreProcessor() diff --git a/centaurCwlRunner/src/test/scala/CloudPreprocessorSpec.scala b/centaurCwlRunner/src/test/scala/CloudPreprocessorSpec.scala index a64484abcbd..df13e49b666 100644 --- a/centaurCwlRunner/src/test/scala/CloudPreprocessorSpec.scala +++ b/centaurCwlRunner/src/test/scala/CloudPreprocessorSpec.scala @@ -9,10 +9,10 @@ class CloudPreprocessorSpec extends AnyFlatSpec with CromwellTimeoutSpec with Ma behavior of "PAPIPreProcessor" val pAPIPreprocessor = new CloudPreprocessor(ConfigFactory.load(), "papi.default-input-gcs-prefix") - + def validate(result: String, expectation: String) = { - val parsedResult = YamlUtils.parse(result).right.get - val parsedExpectation = YamlUtils.parse(expectation).right.get + val parsedResult = YamlUtils.parse(result).toOption.get + val parsedExpectation = YamlUtils.parse(expectation).toOption.get // This is an actual Json comparison from circe parsedResult shouldBe parsedExpectation @@ -50,7 +50,7 @@ class CloudPreprocessorSpec extends AnyFlatSpec with CromwellTimeoutSpec with Ma | } | } |} - |""".stripMargin).value.unsafeRunSync().right.get, + |""".stripMargin).value.unsafeRunSync().toOption.get, """|{ | "input": { | "null": null, diff --git a/cloud-nio/cloud-nio-impl-ftp/src/main/scala/cloud/nio/impl/ftp/FtpCloudNioFileProvider.scala b/cloud-nio/cloud-nio-impl-ftp/src/main/scala/cloud/nio/impl/ftp/FtpCloudNioFileProvider.scala index a213f4c32e4..96e05702034 100644 --- a/cloud-nio/cloud-nio-impl-ftp/src/main/scala/cloud/nio/impl/ftp/FtpCloudNioFileProvider.scala +++ b/cloud-nio/cloud-nio-impl-ftp/src/main/scala/cloud/nio/impl/ftp/FtpCloudNioFileProvider.scala @@ -20,11 +20,11 @@ class FtpCloudNioFileProvider(fsProvider: FtpCloudNioFileSystemProvider) extends FtpListFiles(cloudHost, cloudPath, "determine file existence") .run(client) .map(_.nonEmpty) - } unsafeRunSync() + }.unsafeRunSync() override def existsPaths(cloudHost: String, cloudPathPrefix: String): Boolean = withAutoRelease(cloudHost) { client => existsPathsWithClient(cloudHost, cloudPathPrefix, client) - } unsafeRunSync() + }.unsafeRunSync() private def existsPathsWithClient(cloudHost: String, cloudPathPrefix: String, client: FTPClient): IO[Boolean] = { val operation = FtpListDirectories(cloudHost, cloudPathPrefix, "determine directory existence") @@ -44,7 +44,7 @@ class FtpCloudNioFileProvider(fsProvider: FtpCloudNioFileSystemProvider) extends val cleanFiles = files.map(_.getName).map(cloudPathPrefix.stripPrefix("/").ensureSlashed + _) CloudNioFileList(cleanFiles, markerOption) }) - } unsafeRunSync() + }.unsafeRunSync() override def copy(sourceCloudHost: String, sourceCloudPath: String, targetCloudHost: String, targetCloudPath: String): Unit = { if (sourceCloudHost != targetCloudHost) throw new UnsupportedOperationException(s"Cannot copy files across different ftp servers: Source host: $sourceCloudHost, Target host: $targetCloudHost") @@ -69,11 +69,11 @@ class FtpCloudNioFileProvider(fsProvider: FtpCloudNioFileSystemProvider) extends override def deleteIfExists(cloudHost: String, cloudPath: String): Boolean = withAutoRelease(cloudHost) { client => FtpDeleteFile(cloudHost, cloudPath, "delete").run(client) - } unsafeRunSync() + }.unsafeRunSync() private def inputStream(cloudHost: String, cloudPath: String, offset: Long, lease: Lease[FTPClient]): IO[LeasedInputStream] = { FtpInputStream(cloudHost, cloudPath, offset) - .run(lease.get) + .run(lease.get()) // Wrap the input stream in a LeasedInputStream so that the lease can be released when the stream is closed .map(new LeasedInputStream(cloudHost, cloudPath, _, lease)) } @@ -83,7 +83,7 @@ class FtpCloudNioFileProvider(fsProvider: FtpCloudNioFileSystemProvider) extends lease <- acquireLease(cloudHost) is <- inputStream(cloudHost, cloudPath, offset, lease) } yield Channels.newChannel(is) - } unsafeRunSync() + }.unsafeRunSync() private def outputStream(cloudHost: String, cloudPath: String, lease: Lease[FTPClient]): IO[LeasedOutputStream] = { FtpOutputStream(cloudHost, cloudPath) @@ -96,7 +96,7 @@ class FtpCloudNioFileProvider(fsProvider: FtpCloudNioFileSystemProvider) extends lease <- acquireLease(cloudHost) os <- outputStream(cloudHost, cloudPath, lease) } yield Channels.newChannel(os) - } unsafeRunSync() + }.unsafeRunSync() override def fileAttributes(cloudHost: String, cloudPath: String): Option[CloudNioRegularFileAttributes] = withAutoRelease(cloudHost) { client => FtpListFiles(cloudHost, cloudPath, "get file attributes") @@ -106,7 +106,7 @@ class FtpCloudNioFileProvider(fsProvider: FtpCloudNioFileSystemProvider) extends new FtpCloudNioRegularFileAttributes(file, cloudHost + cloudPath) } ) - } unsafeRunSync() + }.unsafeRunSync() override def createDirectory(cloudHost: String, cloudPath: String) = withAutoRelease(cloudHost) { client => val operation = FtpCreateDirectory(cloudHost, cloudPath) @@ -114,7 +114,7 @@ class FtpCloudNioFileProvider(fsProvider: FtpCloudNioFileSystemProvider) extends operation.run(client) handleErrorWith { /* * Sometimes the creation fails with a cryptic error message and the exception generator did not recognize it. - * In that case, check after the fact if the directory does exist, and if so throw a more appropriate exception + * In that case, check after the fact if the directory does exist, and if so throw a more appropriate exception */ case e: FtpIoException => existsPathsWithClient(cloudHost, cloudPath, client) flatMap { @@ -125,7 +125,7 @@ class FtpCloudNioFileProvider(fsProvider: FtpCloudNioFileSystemProvider) extends } case other => IO.raiseError(other) } - }.void unsafeRunSync() + }.void.unsafeRunSync() private def findFileSystem(host: String): FtpCloudNioFileSystem = fsProvider.newCloudNioFileSystemFromHost(host) diff --git a/cloud-nio/cloud-nio-impl-ftp/src/main/scala/cloud/nio/impl/ftp/LeasedInputStream.scala b/cloud-nio/cloud-nio-impl-ftp/src/main/scala/cloud/nio/impl/ftp/LeasedInputStream.scala index c55ed74d6b1..29d0b7c6bf7 100644 --- a/cloud-nio/cloud-nio-impl-ftp/src/main/scala/cloud/nio/impl/ftp/LeasedInputStream.scala +++ b/cloud-nio/cloud-nio-impl-ftp/src/main/scala/cloud/nio/impl/ftp/LeasedInputStream.scala @@ -16,7 +16,7 @@ class LeasedInputStream(cloudHost: String, cloudPath: String, inputStream: Input override def available = inputStream.available() override def close() = { inputStream.close() - autoRelease(IO.pure(lease))(FtpCompletePendingCommand(cloudHost, cloudPath, "close input steam").run).void unsafeRunSync() + autoRelease(IO.pure(lease))(FtpCompletePendingCommand(cloudHost, cloudPath, "close input steam").run).void.unsafeRunSync() } override def mark(readlimit: Int) = inputStream.mark(readlimit) override def reset() = inputStream.reset() diff --git a/cloud-nio/cloud-nio-impl-ftp/src/main/scala/cloud/nio/impl/ftp/LeasedOutputStream.scala b/cloud-nio/cloud-nio-impl-ftp/src/main/scala/cloud/nio/impl/ftp/LeasedOutputStream.scala index e644f077e8a..5a4f43acabb 100644 --- a/cloud-nio/cloud-nio-impl-ftp/src/main/scala/cloud/nio/impl/ftp/LeasedOutputStream.scala +++ b/cloud-nio/cloud-nio-impl-ftp/src/main/scala/cloud/nio/impl/ftp/LeasedOutputStream.scala @@ -15,6 +15,6 @@ class LeasedOutputStream(cloudHost: String, cloudPath: String, outputStream: Out override def flush() = outputStream.flush() override def close() = { outputStream.close() - autoRelease(IO.pure(lease))(FtpCompletePendingCommand(cloudHost, cloudPath, "close input steam").run).void unsafeRunSync() + autoRelease(IO.pure(lease))(FtpCompletePendingCommand(cloudHost, cloudPath, "close input steam").run).void.unsafeRunSync() } } diff --git a/cloud-nio/cloud-nio-impl-ftp/src/test/scala/cloud/nio/impl/ftp/MockFtpFileSystem.scala b/cloud-nio/cloud-nio-impl-ftp/src/test/scala/cloud/nio/impl/ftp/MockFtpFileSystem.scala index 4707138f656..e3c954c8d24 100644 --- a/cloud-nio/cloud-nio-impl-ftp/src/test/scala/cloud/nio/impl/ftp/MockFtpFileSystem.scala +++ b/cloud-nio/cloud-nio-impl-ftp/src/test/scala/cloud/nio/impl/ftp/MockFtpFileSystem.scala @@ -7,7 +7,7 @@ import org.scalatest.{BeforeAndAfterAll, Suite} trait MockFtpFileSystem extends BeforeAndAfterAll { this: Suite => private var connectionPort: Option[Int] = None - + val fakeFtpServer = new FakeFtpServer() fakeFtpServer.setServerControlPort(0) fakeFtpServer.addUserAccount(new UserAccount("test_user", "test_password", "/")) @@ -17,18 +17,18 @@ trait MockFtpFileSystem extends BeforeAndAfterAll { this: Suite => fakeUnixFileSystem.add(new DirectoryEntry("/root")) fakeFtpServer.setFileSystem(fakeUnixFileSystem) - override def beforeAll = { + override def beforeAll() = { fakeFtpServer.start() connectionPort = Option(fakeFtpServer.getServerControlPort) } - override def afterAll = { + override def afterAll() = { fakeFtpServer.stop() } - + lazy val ftpFileSystemsConfiguration = FtpFileSystems.DefaultConfig.copy(connectionPort = connectionPort.getOrElse(throw new RuntimeException("Fake FTP server has not been started"))) lazy val ftpFileSystems = new FtpFileSystems(ftpFileSystemsConfiguration) - + // Do not call this before starting the server lazy val mockProvider = { new FtpCloudNioFileSystemProvider(ConfigFactory.empty, FtpAuthenticatedCredentials("test_user", "test_password", None), ftpFileSystems) diff --git a/cloud-nio/cloud-nio-spi/src/main/scala/cloud/nio/spi/CloudNioDirectoryStream.scala b/cloud-nio/cloud-nio-spi/src/main/scala/cloud/nio/spi/CloudNioDirectoryStream.scala index fb03ea1d43c..909c1fc54db 100644 --- a/cloud-nio/cloud-nio-spi/src/main/scala/cloud/nio/spi/CloudNioDirectoryStream.scala +++ b/cloud-nio/cloud-nio-spi/src/main/scala/cloud/nio/spi/CloudNioDirectoryStream.scala @@ -2,7 +2,7 @@ package cloud.nio.spi import java.nio.file.{DirectoryStream, Path} -import scala.collection.JavaConverters._ +import scala.jdk.CollectionConverters._ class CloudNioDirectoryStream( fileProvider: CloudNioFileProvider, @@ -11,14 +11,14 @@ class CloudNioDirectoryStream( filter: DirectoryStream.Filter[_ >: Path] ) extends DirectoryStream[Path] { - override def iterator(): java.util.Iterator[Path] = pathStream().filterNot(_ == prefix).toIterator.asJava + override def iterator(): java.util.Iterator[Path] = pathStream().filterNot(_ == prefix).iterator.asJava - private[this] def pathStream(markerOption: Option[String] = None): Stream[Path] = { + private[this] def pathStream(markerOption: Option[String] = None): LazyList[Path] = { listNext(markerOption) match { case CloudNioFileList(keys, Some(marker)) => - keys.toStream.map(toPath) ++ pathStream(Option(marker)) + keys.to(LazyList).map(toPath) ++ pathStream(Option(marker)) case CloudNioFileList(keys, None) => - keys.toStream.map(toPath) + keys.to(LazyList).map(toPath) } } diff --git a/cloud-nio/cloud-nio-spi/src/main/scala/cloud/nio/spi/CloudNioFileSystem.scala b/cloud-nio/cloud-nio-spi/src/main/scala/cloud/nio/spi/CloudNioFileSystem.scala index fc92df31108..8b93419f350 100644 --- a/cloud-nio/cloud-nio-spi/src/main/scala/cloud/nio/spi/CloudNioFileSystem.scala +++ b/cloud-nio/cloud-nio-spi/src/main/scala/cloud/nio/spi/CloudNioFileSystem.scala @@ -3,7 +3,7 @@ package cloud.nio.spi import java.nio.file._ import java.nio.file.attribute.UserPrincipalLookupService -import scala.collection.JavaConverters._ +import scala.jdk.CollectionConverters._ object CloudNioFileSystem { val Separator: String = "/" @@ -67,7 +67,7 @@ class CloudNioFileSystem(override val provider: CloudNioFileSystemProvider, val } override def hashCode(): Int = { - val state = List(provider, host) + val state = List[Object](provider, host) state.map(_.hashCode()).foldLeft(0)((a, b) => 31 * a + b) } } diff --git a/cloud-nio/cloud-nio-spi/src/main/scala/cloud/nio/spi/CloudNioFileSystemProvider.scala b/cloud-nio/cloud-nio-spi/src/main/scala/cloud/nio/spi/CloudNioFileSystemProvider.scala index 80da7c143dd..9b79e308afd 100644 --- a/cloud-nio/cloud-nio-spi/src/main/scala/cloud/nio/spi/CloudNioFileSystemProvider.scala +++ b/cloud-nio/cloud-nio-spi/src/main/scala/cloud/nio/spi/CloudNioFileSystemProvider.scala @@ -8,7 +8,7 @@ import java.nio.file.spi.FileSystemProvider import com.typesafe.config.{Config, ConfigFactory} -import scala.collection.JavaConverters._ +import scala.jdk.CollectionConverters._ import net.ceedubs.ficus.Ficus._ /** @@ -18,7 +18,7 @@ import net.ceedubs.ficus.Ficus._ abstract class CloudNioFileSystemProvider extends FileSystemProvider { def config: Config - + def usePseudoDirectories: Boolean = true def fileProvider: CloudNioFileProvider @@ -94,7 +94,7 @@ abstract class CloudNioFileSystemProvider extends FileSystemProvider { cloudNioReadChannel(retry, cloudNioPath) } } - + protected def cloudNioReadChannel(retry: CloudNioRetry, cloudNioPath: CloudNioPath): CloudNioReadChannel = new CloudNioReadChannel(fileProvider, retry, cloudNioPath) protected def cloudNioWriteChannel(retry: CloudNioRetry, cloudNioPath: CloudNioPath): CloudNioWriteChannel = new CloudNioWriteChannel(fileProvider, retry, cloudNioPath) diff --git a/cloud-nio/cloud-nio-spi/src/main/scala/cloud/nio/spi/CloudNioPath.scala b/cloud-nio/cloud-nio-spi/src/main/scala/cloud/nio/spi/CloudNioPath.scala index 7b0d16dbeff..ec0c701a9b9 100644 --- a/cloud-nio/cloud-nio-spi/src/main/scala/cloud/nio/spi/CloudNioPath.scala +++ b/cloud-nio/cloud-nio-spi/src/main/scala/cloud/nio/spi/CloudNioPath.scala @@ -5,7 +5,7 @@ import java.net.URI import java.nio.file._ import java.util.Objects -import scala.collection.JavaConverters._ +import scala.jdk.CollectionConverters._ object CloudNioPath { @@ -191,7 +191,7 @@ class CloudNioPath(filesystem: CloudNioFileSystem, private[spi] val unixPath: Un if (unixPath.isEmpty || unixPath.isRoot) { java.util.Collections.emptyIterator() } else { - unixPath.split().toStream.map(part => newPath(UnixPath.getPath(part)).asInstanceOf[Path]).toIterator.asJava + unixPath.split().to(LazyList).map(part => newPath(UnixPath.getPath(part)).asInstanceOf[Path]).iterator.asJava } } diff --git a/cloud-nio/cloud-nio-spi/src/main/scala/cloud/nio/spi/UnixPath.scala b/cloud-nio/cloud-nio-spi/src/main/scala/cloud/nio/spi/UnixPath.scala index 306afa44cc7..fb3a2bf8f50 100644 --- a/cloud-nio/cloud-nio-spi/src/main/scala/cloud/nio/spi/UnixPath.scala +++ b/cloud-nio/cloud-nio-spi/src/main/scala/cloud/nio/spi/UnixPath.scala @@ -174,11 +174,11 @@ final private[spi] case class UnixPath(path: String) extends CharSequence { breakable( while (left.hasNext && right.hasNext) { if (!(left.head == right.head)) { - break + break() } - left.next - right.next + left.next() + right.next() } ) @@ -186,11 +186,11 @@ final private[spi] case class UnixPath(path: String) extends CharSequence { while (left.hasNext) { result.append(UnixPath.ParentDir) result.append(UnixPath.Separator) - left.next + left.next() } while (right.hasNext) { - result.append(right.next) + result.append(right.next()) result.append(UnixPath.Separator) } @@ -280,7 +280,7 @@ final private[spi] case class UnixPath(path: String) extends CharSequence { def startsWith(left: Iterator[String], right: Iterator[String]): Boolean = { while (right.hasNext) { - if (!left.hasNext || right.next != left.next) { + if (!left.hasNext || right.next() != left.next()) { return false } } diff --git a/cloud-nio/cloud-nio-spi/src/test/scala/cloud/nio/spi/HashTypeSpec.scala b/cloud-nio/cloud-nio-spi/src/test/scala/cloud/nio/spi/HashTypeSpec.scala index 014a01c120a..9946afbf6a9 100644 --- a/cloud-nio/cloud-nio-spi/src/test/scala/cloud/nio/spi/HashTypeSpec.scala +++ b/cloud-nio/cloud-nio-spi/src/test/scala/cloud/nio/spi/HashTypeSpec.scala @@ -21,13 +21,13 @@ class HashTypeSpec extends AnyFlatSpecLike with Matchers { it should "calculate an etag hash on medium data" in { val eightMB = 8 * 1024 * 1024 - val value = Stream.continually(".").take(eightMB).mkString + val value = LazyList.continually(".").take(eightMB).mkString HashType.S3Etag.calculateHash(value) shouldBe "f89801f68b5028d64e0238ffb5a1b8e0" } it should "calculate an etag hash on long data" in { val eightMB = 8 * 1024 * 1024 - val value = Stream.continually(".").take(eightMB + 1).mkString + val value = LazyList.continually(".").take(eightMB + 1).mkString HashType.S3Etag.calculateHash(value) shouldBe "8e224b463f4f5202c9621820f7690a01-2" } diff --git a/cloud-nio/cloud-nio-util/src/main/scala/cloud/nio/util/CloudNioFiles.scala b/cloud-nio/cloud-nio-util/src/main/scala/cloud/nio/util/CloudNioFiles.scala index 41ca06d4660..162d47d08db 100644 --- a/cloud-nio/cloud-nio-util/src/main/scala/cloud/nio/util/CloudNioFiles.scala +++ b/cloud-nio/cloud-nio-util/src/main/scala/cloud/nio/util/CloudNioFiles.scala @@ -2,7 +2,7 @@ package cloud.nio.util import java.nio.file.{Files, Path} -import scala.collection.JavaConverters._ +import scala.jdk.CollectionConverters._ /** * Common file access utilities similar to java.nio.file.Files. diff --git a/cloud-nio/cloud-nio-util/src/main/scala/cloud/nio/util/VersionUtil.scala b/cloud-nio/cloud-nio-util/src/main/scala/cloud/nio/util/VersionUtil.scala index 5a81a731d0d..019dbe81caf 100644 --- a/cloud-nio/cloud-nio-util/src/main/scala/cloud/nio/util/VersionUtil.scala +++ b/cloud-nio/cloud-nio-util/src/main/scala/cloud/nio/util/VersionUtil.scala @@ -5,7 +5,7 @@ import java.nio.file.{Files, Paths} import com.typesafe.config.ConfigFactory import net.ceedubs.ficus.Ficus._ -import scala.collection.JavaConverters._ +import scala.jdk.CollectionConverters._ /** * Retrieves the version from an SBT generated config file. diff --git a/cloudSupport/src/main/scala/cromwell/cloudsupport/gcp/auth/GoogleAuthMode.scala b/cloudSupport/src/main/scala/cromwell/cloudsupport/gcp/auth/GoogleAuthMode.scala index 3823b020d21..e850b53807a 100644 --- a/cloudSupport/src/main/scala/cromwell/cloudsupport/gcp/auth/GoogleAuthMode.scala +++ b/cloudSupport/src/main/scala/cromwell/cloudsupport/gcp/auth/GoogleAuthMode.scala @@ -17,7 +17,7 @@ import cromwell.cloudsupport.gcp.auth.ApplicationDefaultMode.applicationDefaultC import cromwell.cloudsupport.gcp.auth.GoogleAuthMode._ import cromwell.cloudsupport.gcp.auth.ServiceAccountMode.{CredentialFileFormat, JsonFileFormat, PemFileFormat} -import scala.collection.JavaConverters._ +import scala.jdk.CollectionConverters._ import scala.util.{Failure, Success, Try} object GoogleAuthMode { diff --git a/cloudSupport/src/test/scala/cromwell/cloudsupport/aws/AwsConfigurationSpec.scala b/cloudSupport/src/test/scala/cromwell/cloudsupport/aws/AwsConfigurationSpec.scala index aaf8f365f96..6ead310c925 100644 --- a/cloudSupport/src/test/scala/cromwell/cloudsupport/aws/AwsConfigurationSpec.scala +++ b/cloudSupport/src/test/scala/cromwell/cloudsupport/aws/AwsConfigurationSpec.scala @@ -100,14 +100,14 @@ class AwsConfigurationSpec extends AnyFlatSpec with CromwellTimeoutSpec with Mat val assumeRoleWithId = (auths collectFirst { case a: AssumeRoleMode => a }).get assumeRoleWithId.name shouldBe "assume-role-based-on-another-with-external" assumeRoleWithId.baseAuthName shouldBe "default" - assumeRoleWithId.baseAuthentication.name shouldBe "default" + assumeRoleWithId.baseAuthentication().name shouldBe "default" assumeRoleWithId.roleArn shouldBe "my-role-arn" assumeRoleWithId.externalId shouldBe "my-external-id" val assumeRole = (auths.takeRight(1) collectFirst { case a: AssumeRoleMode => a }).get assumeRole.name shouldBe "assume-role-based-on-another" assumeRole.baseAuthName shouldBe "default" - assumeRole.baseAuthentication.name shouldBe "default" + assumeRole.baseAuthentication().name shouldBe "default" assumeRole.roleArn shouldBe "my-role-arn" assumeRole.externalId shouldBe "" } diff --git a/codegen_java/build.sbt b/codegen_java/build.sbt index 8f22e030116..2ba35a693c3 100644 --- a/codegen_java/build.sbt +++ b/codegen_java/build.sbt @@ -6,22 +6,26 @@ lazy val root = (project in file(".")). Seq(organization := "org.broadinstitute.cromwell", name := "cromwell-client", version := createVersion("0.1"), - scalaVersion := "2.12.15", + scalaVersion := "2.13.8", scalacOptions ++= Seq("-feature"), compile / javacOptions ++= Seq("-Xlint:deprecation"), Compile / packageDoc / publishArtifact := false, resolvers += Resolver.mavenLocal, updateOptions := updateOptions.value.withGigahorse(false), libraryDependencies ++= Seq( - "io.swagger" % "swagger-annotations" % "1.5.21", - "com.squareup.okhttp3" % "okhttp" % "3.12.1", - "com.squareup.okhttp3" % "logging-interceptor" % "3.12.1", - "com.google.code.gson" % "gson" % "2.8.5", + "io.swagger" % "swagger-annotations" % "1.6.5", + "com.squareup.okhttp3" % "okhttp" % "4.9.3", + "com.squareup.okhttp3" % "logging-interceptor" % "4.9.3", + "com.google.code.gson" % "gson" % "2.9.0", "org.apache.commons" % "commons-lang3" % "3.12.0", "org.apache.oltu.oauth2" % "org.apache.oltu.oauth2.client" % "1.0.1", - "org.threeten" % "threetenbp" % "1.3.5" % "compile", - "io.gsonfire" % "gson-fire" % "1.8.0" % "compile", - "junit" % "junit" % "4.12" % "test", - "com.novocode" % "junit-interface" % "0.10" % "test" + "javax.ws.rs" % "javax.ws.rs-api" % "2.1.1", + "javax.annotation" % "javax.annotation-api" % "1.3.2", + "com.google.code.findbugs" % "jsr305" % "3.0.2", + "org.threeten" % "threetenbp" % "1.6.0" % Compile, + "io.gsonfire" % "gson-fire" % "1.8.5" % Compile, + "junit" % "junit" % "4.13.2" % Test, + "com.novocode" % "junit-interface" % "0.11" % Test, + "org.junit.jupiter" % "junit-jupiter-api" % "5.8.2" % Test )) ++ publishSettings:_* ) diff --git a/common/src/main/scala/common/collections/EnhancedCollections.scala b/common/src/main/scala/common/collections/EnhancedCollections.scala index 6436be49077..beede1f9510 100644 --- a/common/src/main/scala/common/collections/EnhancedCollections.scala +++ b/common/src/main/scala/common/collections/EnhancedCollections.scala @@ -3,9 +3,8 @@ package common.collections import cats.data.NonEmptyList import scala.annotation.tailrec -import scala.collection.TraversableLike -import scala.collection.generic.CanBuildFrom -import scala.collection.immutable.{MapLike, Queue} +import scala.collection.IterableOps +import scala.collection.immutable.Queue import scala.reflect.ClassTag object EnhancedCollections { @@ -16,7 +15,7 @@ object EnhancedCollections { * After trying and failing to do this myself, I got this to work by copying the answer from here: * https://stackoverflow.com/questions/29886246/scala-filter-by-type */ - implicit class EnhancedTraversableLike[T2, Repr <: TraversableLike[T2, Repr], That](val traversable: TraversableLike[T2, Repr]) extends AnyVal { + implicit class EnhancedIterableOps[T2, Repr[x] <: IterableOps[x, Repr, Repr[x]]](val iterableOps: IterableOps[T2, Repr, Repr[T2]]) extends AnyVal { /** * Lets you filter a collection by type. * @@ -27,9 +26,9 @@ object EnhancedCollections { * val xs: Set[Object] * val strings: Set[String] = xs.filterByType[String] */ - def filterByType[T <: T2](implicit tag: ClassTag[T], bf: CanBuildFrom[Repr, T, That]): That = traversable.collect { case t: T => t } + def filterByType[T <: T2](implicit tag: ClassTag[T]): Repr[T] = iterableOps.collect { case t: T => t } - def firstByType[T <: T2](implicit tag: ClassTag[T]): Option[T] = traversable collectFirst { case t: T => t } + def firstByType[T <: T2](implicit tag: ClassTag[T]): Option[T] = iterableOps collectFirst { case t: T => t } } implicit class EnhancedQueue[A](val queue: Queue[A]) extends AnyVal { @@ -95,7 +94,7 @@ object EnhancedCollections { } } - implicit class EnhancedMapLike[A, +B, +This <: MapLike[A, B, This] with Map[A, B]](val mapLike: MapLike[A, B, This]) { + implicit class EnhancedMapLike[A, +B, +This <: Map[A, B]](val mapLike: Map[A, B]) { /** * 'safe' in that unlike the implementation hiding behind `MapLike#mapValues` this is strict. i.e. this will only * evaluate the supplied function once on each value and at the time this method is called. @@ -103,7 +102,7 @@ object EnhancedCollections { def safeMapValues[C](f: B => C): Map[A, C] = mapLike map { case (k, v) => k -> f(v) } /** - * Based on scalaz's intersectWith, applies `f` to values of keys found in this `mapLike` and map` + * Based on scalaz's intersectWith, applies `f` to values of keys found in this `mapLike` and map */ def intersectWith[C, D](map: Map[A, C])(f: (B, C) => D): Map[A, D] = { mapLike collect { diff --git a/common/src/main/scala/common/exception/ExceptionAggregation.scala b/common/src/main/scala/common/exception/ExceptionAggregation.scala index 5a168525d0c..bd7f030331c 100644 --- a/common/src/main/scala/common/exception/ExceptionAggregation.scala +++ b/common/src/main/scala/common/exception/ExceptionAggregation.scala @@ -8,7 +8,7 @@ import common.exception.Aggregation._ import scala.annotation.tailrec object Aggregation { - def formatMessageWithList(message: String, list: Traversable[String]) = { + def formatMessageWithList(message: String, list: Iterable[String]) = { if (list.nonEmpty) { val messages = s"\n${list.mkString("\n")}" s"$message:$messages" @@ -25,10 +25,10 @@ object Aggregation { case classic => flattenThrowableRec(r, flattened :+ classic) } } - + flattenThrowableRec(List(throwable), List.empty) } - + implicit class EnhancedThrowable(val throwable: Throwable) extends AnyVal { def flatten = flattenThrowable(throwable) } @@ -40,7 +40,7 @@ object Aggregation { */ trait MessageAggregation extends Exception { def exceptionContext: String - def errorMessages: Traversable[String] + def errorMessages: Iterable[String] override def getMessage = formatMessageWithList(exceptionContext, errorMessages) } @@ -50,7 +50,7 @@ trait MessageAggregation extends Exception { * aggregates multiple throwables into the extended Exception. */ trait ThrowableAggregation extends MessageAggregation { - def throwables: Traversable[Throwable] + def throwables: Iterable[Throwable] throwables foreach addSuppressed @@ -70,6 +70,6 @@ trait ThrowableAggregation extends MessageAggregation { /** * Generic convenience case class for aggregated exceptions. */ -case class AggregatedException(exceptionContext: String, throwables: Traversable[Throwable]) extends Exception with ThrowableAggregation -case class AggregatedMessageException(exceptionContext: String, errorMessages: Traversable[String]) extends Exception with MessageAggregation -case class CompositeException(exceptionContext: String, throwables: Traversable[Throwable], override val errorMessages: Traversable[String]) extends Exception with ThrowableAggregation +case class AggregatedException(exceptionContext: String, throwables: Iterable[Throwable]) extends Exception with ThrowableAggregation +case class AggregatedMessageException(exceptionContext: String, errorMessages: Iterable[String]) extends Exception with MessageAggregation +case class CompositeException(exceptionContext: String, throwables: Iterable[Throwable], override val errorMessages: Iterable[String]) extends Exception with ThrowableAggregation diff --git a/common/src/main/scala/common/util/TerminalUtil.scala b/common/src/main/scala/common/util/TerminalUtil.scala index f793a6e2451..dab7fb9ce17 100644 --- a/common/src/main/scala/common/util/TerminalUtil.scala +++ b/common/src/main/scala/common/util/TerminalUtil.scala @@ -1,7 +1,7 @@ package common.util object TerminalUtil { - def highlight(colorCode:Int, string:String) = s"\033[38;5;${colorCode}m$string\033[0m" + def highlight(colorCode:Int, string:String) = s"\u001B[38;5;${colorCode}m$string\u001B[0m" def mdTable(rows: Seq[Seq[String]], header: Seq[String]): String = { def maxWidth(lengths: Seq[Seq[Int]], column: Int) = lengths.map { length => length(column) }.max val widths = (rows :+ header).map { row => row.map { s => s.length } } diff --git a/common/src/main/scala/common/util/TryUtil.scala b/common/src/main/scala/common/util/TryUtil.scala index 05a1bf24ae2..e8edf01f8e5 100644 --- a/common/src/main/scala/common/util/TryUtil.scala +++ b/common/src/main/scala/common/util/TryUtil.scala @@ -19,7 +19,7 @@ object TryUtil { stringWriter.toString } - def stringifyFailures[T](possibleFailures: Traversable[Try[T]]): Traversable[String] = + def stringifyFailures[T](possibleFailures: Iterable[Try[T]]): Iterable[String] = possibleFailures.collect { case failure: Failure[T] => stringifyFailure(failure) } private def sequenceIterable[T](tries: Iterable[Try[_]], unbox: () => T, prefixErrorMessage: String): Try[T] = { @@ -33,25 +33,25 @@ object TryUtil { def sequence[T](tries: Seq[Try[T]], prefixErrorMessage: String = ""): Try[Seq[T]] = { def unbox = tries map { _.get } - sequenceIterable(tries, unbox _, prefixErrorMessage) + sequenceIterable(tries, () => unbox, prefixErrorMessage) } def sequenceOption[T](tried: Option[Try[T]], prefixErrorMessage: String = ""): Try[Option[T]] = { def unbox = tried.map(_.get) - sequenceIterable(tried.toSeq, unbox _, prefixErrorMessage) + sequenceIterable(tried.toSeq, () => unbox, prefixErrorMessage) } def sequenceMap[T, U](tries: Map[T, Try[U]], prefixErrorMessage: String = ""): Try[Map[T, U]] = { def unbox = tries safeMapValues { _.get } - sequenceIterable(tries.values, unbox _, prefixErrorMessage) + sequenceIterable(tries.values, () => unbox, prefixErrorMessage) } // NOTE: Map is invariant on the key type, so we accept _ <: Try[T] def sequenceKeyValues[T, U](tries: Map[_ <: Try[T], Try[U]], prefixErrorMessage: String = ""): Try[Map[T, U]] = { def unbox: Map[T, U] = tries map { case (tryKey, tryValue) => tryKey.get -> tryValue.get } - sequenceIterable(tries.toSeq.flatMap(Function.tupled(Seq(_, _))), unbox _, prefixErrorMessage) + sequenceIterable(tries.toSeq.flatMap(Function.tupled(Seq(_, _))), () => unbox, prefixErrorMessage) } def sequenceTuple[T, U](tries: (Try[T], Try[U]), prefixErrorMessage: String = ""): Try[(T, U)] = { @@ -59,6 +59,6 @@ object TryUtil { case (try1, try2) => (try1.get, try2.get) } - sequenceIterable(tries match { case (try1, try2) => Seq(try1, try2) }, unbox _, prefixErrorMessage) + sequenceIterable(tries match { case (try1, try2) => Seq(try1, try2) }, () => unbox, prefixErrorMessage) } } diff --git a/common/src/main/scala/common/util/VersionUtil.scala b/common/src/main/scala/common/util/VersionUtil.scala index 3697ebd7ed9..3ddea0750d5 100644 --- a/common/src/main/scala/common/util/VersionUtil.scala +++ b/common/src/main/scala/common/util/VersionUtil.scala @@ -5,7 +5,7 @@ import java.nio.file.{Files, Paths} import com.typesafe.config.ConfigFactory import net.ceedubs.ficus.Ficus._ -import scala.collection.JavaConverters._ +import scala.jdk.CollectionConverters._ /** * Retrieves the version from an SBT generated config file. diff --git a/common/src/test/scala/common/collections/EnhancedCollectionsSpec.scala b/common/src/test/scala/common/collections/EnhancedCollectionsSpec.scala index 8bdba0768d8..12dba2e15d9 100644 --- a/common/src/test/scala/common/collections/EnhancedCollectionsSpec.scala +++ b/common/src/test/scala/common/collections/EnhancedCollectionsSpec.scala @@ -17,6 +17,16 @@ class EnhancedCollectionsSpec extends AsyncFlatSpec with Matchers { stringList should be(List("hello", "world")) } + it should "work with non-Anys" in { + class A + class B extends A + class C extends B + + val abcs = List(new A(), new B(), new C(), new B(), new A()) + val cs: List[C] = abcs.filterByType[C] + cs shouldEqual abcs.collect { case c: C => c } + } + it should "filter a Set by type and return a Set" in { val objectSet = Set("hello", 3, None, "world") val intSet: Set[Int] = objectSet.filterByType[Int] diff --git a/common/src/test/scala/common/exception/ExceptionAggregationSpec.scala b/common/src/test/scala/common/exception/ExceptionAggregationSpec.scala index 6afc045d686..cd425b626f0 100644 --- a/common/src/test/scala/common/exception/ExceptionAggregationSpec.scala +++ b/common/src/test/scala/common/exception/ExceptionAggregationSpec.scala @@ -13,7 +13,7 @@ class ExceptionAggregationSpec extends AnyFlatSpecLike with CromwellTimeoutSpec "MessageAggregation" should "aggregate messages" in { val aggregatedException = new Exception with MessageAggregation { override def exceptionContext: String = "Bouuhhh" - override def errorMessages: Traversable[String] = List("Didn't work", "didn't work either") + override def errorMessages: Iterable[String] = List("Didn't work", "didn't work either") } aggregatedException.getMessage shouldBe @@ -23,7 +23,7 @@ class ExceptionAggregationSpec extends AnyFlatSpecLike with CromwellTimeoutSpec } "AggregatedMessageException" should "aggregate empty messages" in { - val aggregatedMessageException = AggregatedMessageException("Bouuhhh", Traversable.empty) + val aggregatedMessageException = AggregatedMessageException("Bouuhhh", Iterable.empty) aggregatedMessageException.getMessage shouldBe "Bouuhhh" } @@ -37,7 +37,7 @@ class ExceptionAggregationSpec extends AnyFlatSpecLike with CromwellTimeoutSpec val throwableAggregation = new Exception with ThrowableAggregation { override def exceptionContext: String = "Clearly not working" - override def throwables: Traversable[Throwable] = List(exception1, exception2) + override def throwables: Iterable[Throwable] = List(exception1, exception2) } val aggregatedException = AggregatedException("Clearly not working", List(exception1, exception2)) @@ -51,14 +51,14 @@ class ExceptionAggregationSpec extends AnyFlatSpecLike with CromwellTimeoutSpec e.getSuppressed should contain theSameElementsAs List(exception1, exception2) } } - + "ThrowableAggregation" should "aggregate throwable aggregations recursively" in { val exception1 = new RuntimeException("Nope") val exception2 = new RuntimeException("Still nope") val subAggregatedException = AggregatedException("Nope exception", List(exception1, exception2)) val exception3 = new RuntimeException("Yep Exception") val aggregatedException = AggregatedException("This is why nothing works", List(subAggregatedException, exception3)) - + aggregatedException.getMessage shouldBe """This is why nothing works: |Nope exception: | Nope @@ -87,13 +87,13 @@ class ExceptionAggregationSpec extends AnyFlatSpecLike with CromwellTimeoutSpec "ThrowableAggregation" should "flatten throwables" in { import Aggregation._ - + val exception1 = new RuntimeException("Nope") val exception2 = new RuntimeException("Still nope") val subAggregatedException = AggregatedException("Nope exception", List(exception1, exception2)) val exception3 = new RuntimeException("Yep Exception") val aggregatedException = AggregatedException("This is why nothing works", List(subAggregatedException, exception3)) - + aggregatedException.flatten.toSet shouldBe Set(exception1, exception2, exception3) } } diff --git a/common/src/test/scala/common/util/StringUtilSpec.scala b/common/src/test/scala/common/util/StringUtilSpec.scala index 7d5a3604330..3d7b8db92d1 100644 --- a/common/src/test/scala/common/util/StringUtilSpec.scala +++ b/common/src/test/scala/common/util/StringUtilSpec.scala @@ -18,8 +18,8 @@ class StringUtilSpec extends AnyFlatSpec with CromwellTimeoutSpec with Matchers // With the elided string, we stop processing early and are able to produce a nice, short string without ever // touching the later elements: fooOfBars.toPrettyElidedString(1000) should be("""Foo( - | "long long list", - | List( + | bar = "long long list", + | list = List( | "blah0blah0blah0blah0blah0blah0blah0blah0blah0blah0blah0blah0blah0blah0blah0blah0blah0blah0blah0blah0blah0blah0blah0blah0blah0blah0blah0blah0blah0blah0blah0blah0blah0blah0blah0blah0blah0blah0blah0blah0blah0blah0blah0blah0blah0blah0blah0blah0blah0blah0blah0blah0blah0blah0blah0blah0blah0blah0blah0blah0blah0blah0blah0blah0blah0blah0blah0blah0blah0blah0blah0blah0blah0blah0blah0blah0blah0blah0blah0blah0blah0blah0blah0blah0blah0blah0blah0blah0blah0blah0blah0blah0blah0blah0blah0blah0blah0blah0blah0blah0", | "blah1blah1blah1blah1blah1blah1blah1blah1blah1blah1blah1blah1blah1blah1blah1blah1blah1blah1blah1blah1blah1blah1blah1blah1blah1blah1blah1blah1blah1blah1blah1blah1blah1blah1blah1blah1blah1blah1blah1blah1blah1blah1blah1blah1blah1blah1blah1blah1blah1blah1blah1blah1blah1blah1blah1blah1blah1blah1blah1blah1blah1blah1blah1blah1blah1blah1blah1blah1blah1blah1blah1blah1blah1blah1blah1blah...""".stripMargin) @@ -128,7 +128,7 @@ object StringUtilSpec { final case class Foo(bar: String, list: List[Bar]) final class Bar(index: Int) { - private def longLine(i: Int) = '"' + s"blah$i" * 100 + '"' + private def longLine(i: Int) = "\"" + s"blah$i" * 100 + "\"" override def toString: String = if (index < 2) { longLine(index) } else { diff --git a/common/src/test/scala/common/util/TryUtilSpec.scala b/common/src/test/scala/common/util/TryUtilSpec.scala index 72bf4ac6acb..d7b8739337b 100644 --- a/common/src/test/scala/common/util/TryUtilSpec.scala +++ b/common/src/test/scala/common/util/TryUtilSpec.scala @@ -6,7 +6,6 @@ import common.util.TryUtil._ import org.scalatest.flatspec.AnyFlatSpec import org.scalatest.matchers.should.Matchers -import scala.compat.Platform.EOL import scala.util.{Failure, Success, Try} import org.scalatest.enablers.Emptiness._ @@ -16,14 +15,14 @@ class TryUtilSpec extends AnyFlatSpec with CromwellTimeoutSpec with Matchers { behavior of "TryUtil" it should "not stringify successes" in { - val result: Traversable[String] = stringifyFailures(Traversable(Success("success"))) + val result: Iterable[String] = stringifyFailures(Iterable(Success("success"))) result should be(empty) } it should "stringify failures" in { - val result: Traversable[String] = stringifyFailures(Traversable(Failure(new RuntimeException("failed")))) + val result: Iterable[String] = stringifyFailures(Iterable(Failure(new RuntimeException("failed")))) result should have size 1 - result.head should startWith(s"java.lang.RuntimeException: failed$EOL") + result.head should startWith(s"java.lang.RuntimeException: failed${java.lang.System.lineSeparator()}") } it should "sequence successful seqs" in { diff --git a/common/src/test/scala/common/validation/ValidationSpec.scala b/common/src/test/scala/common/validation/ValidationSpec.scala index b9f9fd16a47..071fbe69fe6 100644 --- a/common/src/test/scala/common/validation/ValidationSpec.scala +++ b/common/src/test/scala/common/validation/ValidationSpec.scala @@ -21,7 +21,7 @@ class ValidationSpec extends AnyFlatSpec with CromwellTimeoutSpec with Matchers it should "warn unrecognized keys" in { var warnings = List.empty[Any] val mockLogger = mock[Logger] - mockLogger.warn(anyString).answers(warnings :+= _) + mockLogger.warn(anyString).answers((warnings :+= _): Any => Unit) val keys = Set("hello") val reference = Set("world") val context = "warnings" @@ -32,7 +32,7 @@ class ValidationSpec extends AnyFlatSpec with CromwellTimeoutSpec with Matchers it should "not warn recognized keys" in { var warnings = List.empty[Any] val mockLogger = mock[Logger] - mockLogger.warn(anyString).answers(warnings :+= _) + mockLogger.warn(anyString).answers((warnings :+= _): Any => Unit) val keys = Set("hello") val reference = Set("hello", "world") val context = "warnings" diff --git a/core/src/main/scala/cromwell/core/ConfigUtil.scala b/core/src/main/scala/cromwell/core/ConfigUtil.scala index 40dae478494..0fd5002ffa8 100644 --- a/core/src/main/scala/cromwell/core/ConfigUtil.scala +++ b/core/src/main/scala/cromwell/core/ConfigUtil.scala @@ -7,7 +7,7 @@ import cats.syntax.validated._ import com.typesafe.config.{Config, ConfigException, ConfigValue} import org.slf4j.LoggerFactory -import scala.collection.JavaConverters._ +import scala.jdk.CollectionConverters._ import scala.reflect.{ClassTag, classTag} object ConfigUtil { diff --git a/core/src/main/scala/cromwell/core/WorkflowOptions.scala b/core/src/main/scala/cromwell/core/WorkflowOptions.scala index 37a41d38cc1..010300b2d8b 100644 --- a/core/src/main/scala/cromwell/core/WorkflowOptions.scala +++ b/core/src/main/scala/cromwell/core/WorkflowOptions.scala @@ -6,7 +6,7 @@ import common.util.TryUtil import common.validation.ErrorOr.ErrorOr import spray.json._ -import scala.collection.JavaConverters._ +import scala.jdk.CollectionConverters._ import scala.util.{Failure, Success, Try} /** @@ -64,7 +64,7 @@ object WorkflowOptions { case object MemoryRetryMultiplier extends WorkflowOption("memory_retry_multiplier") private lazy val WorkflowOptionsConf = ConfigFactory.load.getConfig("workflow-options") - private lazy val EncryptedFields: Seq[String] = WorkflowOptionsConf.getStringList("encrypted-fields").asScala + private lazy val EncryptedFields: Seq[String] = WorkflowOptionsConf.getStringList("encrypted-fields").asScala.toList private lazy val EncryptionKey: String = WorkflowOptionsConf.getString("base64-encryption-key") private lazy val defaultRuntimeOptionKey: String = DefaultRuntimeOptions.name private lazy val validObjectKeys: Set[String] = Set(DefaultRuntimeOptions.name, "google_labels") diff --git a/core/src/main/scala/cromwell/core/actor/BatchActor.scala b/core/src/main/scala/cromwell/core/actor/BatchActor.scala index e61c2c2bb3d..5988dd822d2 100644 --- a/core/src/main/scala/cromwell/core/actor/BatchActor.scala +++ b/core/src/main/scala/cromwell/core/actor/BatchActor.scala @@ -85,9 +85,9 @@ abstract class BatchActor[C](val flushRate: FiniteDuration, when(WaitingToProcess) { // On a regular event, only process if the batch size has been reached. - case Event(command, data) if commandToData(sender).isDefinedAt(command) => + case Event(command, data) if commandToData(sender()).isDefinedAt(command) => recentArrivalThreshold foreach { _ => mostRecentArrival = Option(OffsetDateTime.now()) } - processIfBatchSizeReached(data.enqueue(commandToData(sender)(command))) + processIfBatchSizeReached(data.enqueue(commandToData(sender())(command))) // On a scheduled process, always process case Event(ScheduledProcessAction, data) => if (suitableIntervalSinceLastArrival()) { @@ -104,17 +104,17 @@ abstract class BatchActor[C](val flushRate: FiniteDuration, when(Processing) { // Already processing, enqueue the command - case Event(command, data) if commandToData(sender).isDefinedAt(command) => - stay() using data.enqueue(commandToData(sender)(command)) + case Event(command, data) if commandToData(sender()).isDefinedAt(command) => + stay() using data.enqueue(commandToData(sender())(command)) // Already processing, can only do one at a time - case Event(ScheduledProcessAction, data) => + case Event(ScheduledProcessAction, data) => gossip(QueueWeight(data.weight)) stay() // Process is complete and we're shutting down so process even if we're under the batch size. case Event(ProcessingComplete, data) if shuttingDown => logger.info(s"{} Shutting down: processing ${data.weight} queued messages", self.path.name) processHead(data) - // Processing is complete, re-process only if needed + // Processing is complete, re-process only if needed case Event(ProcessingComplete, data) if !shuttingDown => processIfBatchSizeReached(data) case Event(ShutdownCommand, _) => @@ -150,7 +150,7 @@ abstract class BatchActor[C](val flushRate: FiniteDuration, } head.headOption match { - case Some(headOfHead) => + case Some(headOfHead) => processNonEmptyHead(NonEmptyVector(headOfHead, head.tail)) goto(Processing) using newQueue case None => diff --git a/core/src/main/scala/cromwell/core/callcaching/HashResultMessage.scala b/core/src/main/scala/cromwell/core/callcaching/HashResultMessage.scala index 7a231832a0f..920702280b0 100644 --- a/core/src/main/scala/cromwell/core/callcaching/HashResultMessage.scala +++ b/core/src/main/scala/cromwell/core/callcaching/HashResultMessage.scala @@ -7,7 +7,7 @@ object HashKey { private val KeySeparator = ": " def apply(keyComponents: String*) = new HashKey(true, keyComponents.toList) def apply(checkForHitOrMiss: Boolean, keyComponents: String*) = new HashKey(checkForHitOrMiss, keyComponents.toList) - def deserialize(serializedKey: String) = HashKey(true, serializedKey.split(KeySeparator).map(_.trim): _*) + def deserialize(serializedKey: String) = HashKey(true, serializedKey.split(KeySeparator).map(_.trim).toList) } case class HashKey(checkForHitOrMiss: Boolean, keyComponents: List[String]) { diff --git a/core/src/main/scala/cromwell/core/filesystem/CromwellFileSystems.scala b/core/src/main/scala/cromwell/core/filesystem/CromwellFileSystems.scala index 5a264ee74c0..caeb56509fd 100644 --- a/core/src/main/scala/cromwell/core/filesystem/CromwellFileSystems.scala +++ b/core/src/main/scala/cromwell/core/filesystem/CromwellFileSystems.scala @@ -15,7 +15,7 @@ import cromwell.core.path.{DefaultPathBuilderFactory, PathBuilderFactory} import net.ceedubs.ficus.Ficus._ import shapeless.syntax.typeable._ -import scala.collection.JavaConverters._ +import scala.jdk.CollectionConverters._ import scala.language.{existentials, postfixOps} import scala.util.{Failure, Try} diff --git a/core/src/main/scala/cromwell/core/io/AsyncIo.scala b/core/src/main/scala/cromwell/core/io/AsyncIo.scala index 557586cb283..435058d942c 100644 --- a/core/src/main/scala/cromwell/core/io/AsyncIo.scala +++ b/core/src/main/scala/cromwell/core/io/AsyncIo.scala @@ -33,7 +33,7 @@ class AsyncIo(ioEndpoint: ActorRef, ioCommandBuilder: IoCommandBuilder) { commandWithPromise.promise.future } } - + /** * IMPORTANT: This loads the entire content of the file into memory ! * Only use for small files ! @@ -62,10 +62,10 @@ class AsyncIo(ioEndpoint: ActorRef, ioCommandBuilder: IoCommandBuilder) { asyncCommand(ioCommandBuilder.existsCommand(path)) } - def readLinesAsync(path: Path): Future[Traversable[String]] = { + def readLinesAsync(path: Path): Future[Iterable[String]] = { asyncCommand(ioCommandBuilder.readLines(path)) } - + def isDirectory(path: Path): Future[Boolean] = { asyncCommand(ioCommandBuilder.isDirectoryCommand(path)) } diff --git a/core/src/main/scala/cromwell/core/io/IoCommand.scala b/core/src/main/scala/cromwell/core/io/IoCommand.scala index aea79d03254..a21b6f8cebd 100644 --- a/core/src/main/scala/cromwell/core/io/IoCommand.scala +++ b/core/src/main/scala/cromwell/core/io/IoCommand.scala @@ -26,9 +26,9 @@ object IoCommand { .setRandomizationFactor(0.2D) .setMaxElapsedTimeMillis((10 minutes).toMillis.toInt) .build() - + def defaultBackoff: Backoff = SimpleExponentialBackoff(defaultGoogleBackoff) - + type RetryCommand[T] = (FiniteDuration, IoCommand[T]) } @@ -66,7 +66,7 @@ trait IoCommand[+T] { logIOMsgOverLimit(s"IOCommand.success '$value'") IoSuccess(this, value) } - + /** * Fail the command with an exception */ @@ -98,13 +98,13 @@ abstract class IoCopyCommand(val source: Path, val destination: Path) extends Io override def toString = s"copy ${source.pathAsString} to ${destination.pathAsString}" override lazy val name = "copy" } - + object IoContentAsStringCommand { /** * Options to customize reading of a file. * @param maxBytes If specified, only reads up to maxBytes Bytes from the file - * @param failOnOverflow If this is true, maxBytes is specified, and the file is larger than maxBytes, fail the command. + * @param failOnOverflow If this is true, maxBytes is specified, and the file is larger than maxBytes, fail the command. */ case class IoReadOptions(maxBytes: Option[Int], failOnOverflow: Boolean) } @@ -172,7 +172,7 @@ abstract class IoExistsCommand(val file: Path) extends SingleFileIoCommand[Boole /** * Return the lines of a file in a collection */ -abstract class IoReadLinesCommand(val file: Path) extends SingleFileIoCommand[Traversable[String]] { +abstract class IoReadLinesCommand(val file: Path) extends SingleFileIoCommand[Iterable[String]] { override def toString = s"read lines of ${file.pathAsString}" override lazy val name = "read lines" } diff --git a/core/src/main/scala/cromwell/core/io/IoCommandBuilder.scala b/core/src/main/scala/cromwell/core/io/IoCommandBuilder.scala index 4265bd43fb5..43a6f5864b0 100644 --- a/core/src/main/scala/cromwell/core/io/IoCommandBuilder.scala +++ b/core/src/main/scala/cromwell/core/io/IoCommandBuilder.scala @@ -41,7 +41,7 @@ object IoCommandBuilder { * One might want to create different I/O commands to allow for optimizations when the commands are processed by the I/O actor. * Currently the only other command builder is the GcsBatchCommandBuilder that overrides some of the operations * to return GcsBatchCommands instead that will be optimized by the IoActor. - * + * * This always defaults to building a DefaultIoCommand. * @param partialBuilders list of PartialIoCommandBuilder to try */ @@ -50,40 +50,40 @@ class IoCommandBuilder(partialBuilders: List[PartialIoCommandBuilder] = List.emp private def buildOrDefault[A, B](builder: PartialIoCommandBuilder => PartialFunction[A, Try[B]], params: A, default: => B): Try[B] = { - partialBuilders.toStream.map(builder(_).lift(params)).collectFirst({ + partialBuilders.to(LazyList).map(builder(_).lift(params)).collectFirst({ case Some(command) => command }).getOrElse(Try(default)) } - + def contentAsStringCommand(path: Path, maxBytes: Option[Int], failOnOverflow: Boolean): Try[IoContentAsStringCommand] = { buildOrDefault(_.contentAsStringCommand, (path, maxBytes, failOnOverflow), DefaultIoContentAsStringCommand(path, IoReadOptions(maxBytes, failOnOverflow))) } - + def writeCommand(path: Path, content: String, options: OpenOptions, compressPayload: Boolean = false): Try[IoWriteCommand] = { buildOrDefault(_.writeCommand, (path, content, options, compressPayload), DefaultIoWriteCommand(path, content, options, compressPayload)) } - + def sizeCommand(path: Path): Try[IoSizeCommand] = { buildOrDefault(_.sizeCommand, path, DefaultIoSizeCommand(path)) - } - + } + def deleteCommand(path: Path, swallowIoExceptions: Boolean = true): Try[IoDeleteCommand] = { buildOrDefault(_.deleteCommand, (path, swallowIoExceptions), DefaultIoDeleteCommand(path, swallowIoExceptions)) } - + def copyCommand(src: Path, dest: Path): Try[IoCopyCommand] = { buildOrDefault(_.copyCommand, (src, dest), DefaultIoCopyCommand(src, dest)) } - + def hashCommand(file: Path): Try[IoHashCommand] = { buildOrDefault(_.hashCommand, file, DefaultIoHashCommand(file)) } - + def touchCommand(file: Path): Try[IoTouchCommand] = { buildOrDefault(_.touchCommand, file, DefaultIoTouchCommand(file)) } diff --git a/core/src/main/scala/cromwell/core/io/IoPromiseProxyActor.scala b/core/src/main/scala/cromwell/core/io/IoPromiseProxyActor.scala index e6ed28df665..15097a92a99 100644 --- a/core/src/main/scala/cromwell/core/io/IoPromiseProxyActor.scala +++ b/core/src/main/scala/cromwell/core/io/IoPromiseProxyActor.scala @@ -9,7 +9,7 @@ import scala.concurrent.duration.FiniteDuration object IoPromiseProxyActor { case class IoCommandWithPromise[A](ioCommand: IoCommand[A], timeout: FiniteDuration = defaultTimeout) { - val promise = Promise[A] + val promise = Promise[A]() } def props(ioActor: ActorRef) = Props(new IoPromiseProxyActor(ioActor)) } @@ -22,9 +22,9 @@ object IoPromiseProxyActor { */ class IoPromiseProxyActor(override val ioActor: ActorRef) extends Actor with ActorLogging with IoClientHelper { override def receive = ioReceive orElse actorReceive - + def actorReceive: Receive = { - case withPromise: IoCommandWithPromise[_] => + case withPromise: IoCommandWithPromise[_] => sendIoCommandWithContext(withPromise.ioCommand, withPromise.promise, withPromise.timeout) } diff --git a/core/src/main/scala/cromwell/core/labels/Labels.scala b/core/src/main/scala/cromwell/core/labels/Labels.scala index 9c3b6af1106..5499fa5b2e8 100644 --- a/core/src/main/scala/cromwell/core/labels/Labels.scala +++ b/core/src/main/scala/cromwell/core/labels/Labels.scala @@ -1,12 +1,11 @@ package cromwell.core.labels import cats.data.Validated._ -import cats.syntax.traverse._ import cats.instances.vector._ -import common.validation.ErrorOr +import cats.syntax.traverse._ import common.validation.ErrorOr.ErrorOr -import scala.collection.JavaConverters._ +import scala.jdk.CollectionConverters._ case class Labels(value: Vector[Label]) { diff --git a/core/src/main/scala/cromwell/core/path/BetterFileMethods.scala b/core/src/main/scala/cromwell/core/path/BetterFileMethods.scala index 550f2a89050..f0a6464f12e 100644 --- a/core/src/main/scala/cromwell/core/path/BetterFileMethods.scala +++ b/core/src/main/scala/cromwell/core/path/BetterFileMethods.scala @@ -12,7 +12,7 @@ import java.util.zip.Deflater import better.files.{Dispose, DisposeableExtensions, StringSplitter} -import scala.collection.JavaConverters._ +import scala.jdk.CollectionConverters._ import scala.io.{BufferedSource, Codec, Source} /** @@ -106,7 +106,7 @@ trait BetterFileMethods { final def chars(implicit charset: Charset = DefaultCharset): Iterator[Char] = betterFile.chars(charset) - final def lines(implicit charset: Charset = DefaultCharset): Traversable[String] = betterFile.lines(charset) + final def lines(implicit charset: Charset = DefaultCharset): Iterable[String] = betterFile.lines(charset) final def lineIterator(implicit charset: Charset= DefaultCharset): Iterator[String] = betterFile.lineIterator(charset) @@ -147,7 +147,7 @@ trait BetterFileMethods { betterFile.append(text)(charset) this } - + final def appendText(text: String)(implicit charset: Charset = DefaultCharset): this.type = { betterFile.appendText(text)(charset) this @@ -178,7 +178,7 @@ trait BetterFileMethods { betterFile.writeText(text)(openOptions, charset) this } - + final def write(text: String) (implicit openOptions: OpenOptions = OpenOptions.default, charset: Charset = DefaultCharset): this.type = { @@ -574,7 +574,7 @@ object BetterFileMethods { def zip(files: better.files.File*)(destination: better.files.File, compressionLevel: Int = Deflater.DEFAULT_COMPRESSION) (implicit charset: Charset = DefaultCharset): destination.type = { - destination.zipIn(files.toIterator, compressionLevel)(charset) + destination.zipIn(files.iterator, compressionLevel)(charset) } } diff --git a/core/src/main/scala/cromwell/core/path/EvenBetterPathMethods.scala b/core/src/main/scala/cromwell/core/path/EvenBetterPathMethods.scala index 53624d56048..ffe862c2df3 100644 --- a/core/src/main/scala/cromwell/core/path/EvenBetterPathMethods.scala +++ b/core/src/main/scala/cromwell/core/path/EvenBetterPathMethods.scala @@ -8,7 +8,7 @@ import java.util.zip.GZIPOutputStream import better.files.File.OpenOptions import cromwell.util.TryWithResource.tryWithResource -import scala.collection.JavaConverters._ +import scala.jdk.CollectionConverters._ import scala.concurrent.ExecutionContext import scala.io.Codec import scala.util.{Failure, Try} diff --git a/core/src/main/scala/cromwell/core/path/NioPathMethods.scala b/core/src/main/scala/cromwell/core/path/NioPathMethods.scala index b3fc695d265..f5791e50c5a 100644 --- a/core/src/main/scala/cromwell/core/path/NioPathMethods.scala +++ b/core/src/main/scala/cromwell/core/path/NioPathMethods.scala @@ -3,7 +3,7 @@ package cromwell.core.path import java.nio.file.WatchEvent.{Kind, Modifier} import java.nio.file.{LinkOption, WatchKey, WatchService} -import scala.collection.JavaConverters._ +import scala.jdk.CollectionConverters._ /** * Implements methods with the same names and signatures as java.nio.Path diff --git a/core/src/main/scala/cromwell/core/path/PathCopier.scala b/core/src/main/scala/cromwell/core/path/PathCopier.scala index f603cf71b70..b9352cb2082 100644 --- a/core/src/main/scala/cromwell/core/path/PathCopier.scala +++ b/core/src/main/scala/cromwell/core/path/PathCopier.scala @@ -41,7 +41,7 @@ object PathCopier { */ def copy(sourceFilePath: Path, destinationFilePath: Path): Try[Unit] = { Try { - Option(destinationFilePath.parent).foreach(_.createDirectories) + Option(destinationFilePath.parent).foreach(_.createDirectories()) sourceFilePath.copyTo(destinationFilePath, overwrite = true) () } recoverWith { diff --git a/core/src/main/scala/cromwell/core/simpleton/WomValueBuilder.scala b/core/src/main/scala/cromwell/core/simpleton/WomValueBuilder.scala index a5876737022..bfae7232d64 100644 --- a/core/src/main/scala/cromwell/core/simpleton/WomValueBuilder.scala +++ b/core/src/main/scala/cromwell/core/simpleton/WomValueBuilder.scala @@ -63,7 +63,7 @@ object WomValueBuilder { private val MapElementPattern = raw"^:((?:\\[]\[:]|[^]\[:])+)(.*)".r // Group tuples by key using a Map with key type `K`. - private def group[K](tuples: Traversable[(K, SimpletonComponent)]): Map[K, Traversable[SimpletonComponent]] = { + private def group[K](tuples: Iterable[(K, SimpletonComponent)]): Map[K, Iterable[SimpletonComponent]] = { tuples groupBy { case (i, _) => i } map { case (k, v) => k -> (v map { case (_, s) => s}) } } @@ -83,14 +83,14 @@ object WomValueBuilder { component.path match { case MapElementPattern(key, more) => key.unescapeMeta -> component.copy(path = more)} } - private implicit class EnhancedSimpletonComponents(val components: Traversable[SimpletonComponent]) extends AnyVal { - def asArray: List[Traversable[SimpletonComponent]] = group(components map descendIntoArray).toList.sortBy(_._1).map(_._2) - def asMap: Map[String, Traversable[SimpletonComponent]] = group(components map descendIntoMap) + private implicit class EnhancedSimpletonComponents(val components: Iterable[SimpletonComponent]) extends AnyVal { + def asArray: List[Iterable[SimpletonComponent]] = group(components map descendIntoArray).toList.sortBy(_._1).map(_._2) + def asMap: Map[String, Iterable[SimpletonComponent]] = group(components map descendIntoMap) def asPrimitive: WomValue = components.head.value def asString: String = asPrimitive.valueString } - private def toWomValue(outputType: WomType, components: Traversable[SimpletonComponent]): WomValue = { + private def toWomValue(outputType: WomType, components: Iterable[SimpletonComponent]): WomValue = { @@ -106,19 +106,19 @@ object WomValueBuilder { case MapElementPattern("right", more) => PairRight -> component.copy(path = more) } } - - def toWomFile(components: Traversable[SimpletonComponent]) = { + + def toWomFile(components: Iterable[SimpletonComponent]) = { // If there's just one simpleton, it's a primitive (file or directory) if (components.size == 1) components.asPrimitive else { // Otherwise make a map of the components and detect the type of file from the class field val groupedListing = components.asMap - + def isClass(className: String) = { groupedListing.get(ClassKey) /* If the class field is in an array it will be prefixed with a ':', so check for that as well. - * e.g: secondaryFiles[0]:class -> "File" - * secondaryFiles[0]:value -> "file/path" + * e.g: secondaryFiles[0]:class -> "File" + * secondaryFiles[0]:value -> "file/path" * would produce a Map( * ":class" -> List(Simpleton("File")), * ":value" -> List(Simpleton("file/path")) @@ -127,8 +127,8 @@ object WomValueBuilder { .orElse(groupedListing.get(s":$ClassKey")) .map(_.asPrimitive.valueString) .contains(className) - } - + } + def isDirectory = isClass(WomValueSimpleton.DirectoryClass) def isFile = isClass(WomValueSimpleton.FileClass) @@ -137,7 +137,7 @@ object WomValueBuilder { else throw new IllegalArgumentException(s"There is no WomFile that can be built from simpletons: ${groupedListing.toList.mkString(", ")}") } } - + outputType match { case _: WomPrimitiveType => components.asPrimitive @@ -153,18 +153,18 @@ object WomValueBuilder { // map keys are guaranteed by WOM to be primitives, so the "coerceRawValue(..).get" is safe. WomMap(mapType, components.asMap map { case (k, ss) => mapType.keyType.coerceRawValue(k).get -> toWomValue(mapType.valueType, ss) }) case pairType: WomPairType => - val groupedByLeftOrRight: Map[PairLeftOrRight, Traversable[SimpletonComponent]] = group(components map descendIntoPair) + val groupedByLeftOrRight: Map[PairLeftOrRight, Iterable[SimpletonComponent]] = group(components map descendIntoPair) WomPair(toWomValue(pairType.leftType, groupedByLeftOrRight(PairLeft)), toWomValue(pairType.rightType, groupedByLeftOrRight(PairRight))) case WomObjectType => // map keys are guaranteed by WOM to be primitives, so the "coerceRawValue(..).get" is safe. val map: Map[String, WomValue] = components.asMap map { case (k, ss) => k -> toWomValue(WomAnyType, ss) } WomObject(map) case composite: WomCompositeType => - val map: Map[String, WomValue] = components.asMap map { case (k, ss) => + val map: Map[String, WomValue] = components.asMap map { case (k, ss) => val valueType = composite .typeMap .getOrElse(k, throw new RuntimeException(s"Field $k is not a declared field of composite type $composite. Cannot build a WomValue from the simpletons.")) - k -> toWomValue(valueType, ss) + k -> toWomValue(valueType, ss) } WomObject.withTypeUnsafe(map, composite) case WomMaybeListedDirectoryType => @@ -183,7 +183,7 @@ object WomValueBuilder { val size = populatedValues.get("size").map(_.asString.toLong) val format = populatedValues.get("format").map(_.asString) val contents = populatedValues.get("contents").map(_.asString) - val secondaryFiles = populatedValues.get("secondaryFiles").toList.flatMap({ + val secondaryFiles = populatedValues.get("secondaryFiles").toList.flatMap({ _.asArray.map(toWomFile).collect({ case womFile: WomFile => womFile }) }) @@ -234,11 +234,11 @@ object WomValueBuilder { */ private case class SimpletonComponent(path: String, value: WomValue) - def toJobOutputs(taskOutputs: Traversable[OutputPort], simpletons: Traversable[WomValueSimpleton]): CallOutputs = { + def toJobOutputs(taskOutputs: Iterable[OutputPort], simpletons: Iterable[WomValueSimpleton]): CallOutputs = { CallOutputs(toWomValues(taskOutputs, simpletons)) } - def toWomValues(taskOutputs: Traversable[OutputPort], simpletons: Traversable[WomValueSimpleton]): Map[OutputPort, WomValue] = { + def toWomValues(taskOutputs: Iterable[OutputPort], simpletons: Iterable[WomValueSimpleton]): Map[OutputPort, WomValue] = { def simpletonToComponent(name: String)(simpleton: WomValueSimpleton): SimpletonComponent = { SimpletonComponent(simpleton.simpletonKey.drop(name.length), simpleton.simpletonValue) @@ -248,7 +248,7 @@ object WomValueBuilder { // "dehydrated" to WomValueSimpletons correctly. This code is not robust to corrupt input whatsoever. val types = taskOutputs map { o => o -> o.womType } toMap val simpletonsByOutputName = simpletons groupBy { _.simpletonKey match { case IdentifierAndPathPattern(i, _) => i } } - val simpletonComponentsByOutputName: Map[String, Traversable[SimpletonComponent]] = + val simpletonComponentsByOutputName: Map[String, Iterable[SimpletonComponent]] = simpletonsByOutputName map { case (name, ss) => name -> (ss map simpletonToComponent(name)) } types map { case (outputPort, outputType) => outputPort -> toWomValue(outputType, simpletonComponentsByOutputName.getOrElse(outputPort.internalName, Seq.empty))} } diff --git a/core/src/main/scala/cromwell/util/JsonFormatting/WomValueJsonFormatter.scala b/core/src/main/scala/cromwell/util/JsonFormatting/WomValueJsonFormatter.scala index 5363973a0e2..811f9dce61f 100644 --- a/core/src/main/scala/cromwell/util/JsonFormatting/WomValueJsonFormatter.scala +++ b/core/src/main/scala/cromwell/util/JsonFormatting/WomValueJsonFormatter.scala @@ -38,7 +38,7 @@ object WomValueJsonFormatter extends DefaultJsonProtocol { case JsString(str) => WomString(str) case JsBoolean(bool) => WomBoolean(bool) case JsNumber(decimal) if decimal.isValidInt => WomInteger(decimal.toIntExact) - case JsNumber(decimal) => WomFloat(decimal.doubleValue()) + case JsNumber(decimal) => WomFloat(decimal.doubleValue) case unsupported => throw new UnsupportedOperationException(s"Cannot deserialize $unsupported to a WomValue") } } diff --git a/core/src/test/scala/cromwell/core/actor/BatchActorSpec.scala b/core/src/test/scala/cromwell/core/actor/BatchActorSpec.scala index 83774cb5748..e426a349448 100644 --- a/core/src/test/scala/cromwell/core/actor/BatchActorSpec.scala +++ b/core/src/test/scala/cromwell/core/actor/BatchActorSpec.scala @@ -18,7 +18,7 @@ import scala.util.control.NoStackTrace class BatchActorSpec extends TestKitSuite with AnyFlatSpecLike with Matchers with Eventually { behavior of "BatchingDbWriter" - + override val patienceConfig = PatienceConfig(timeout = scaled(5.seconds), interval = scaled(1.second)) implicit val patience = patienceConfig @@ -41,7 +41,7 @@ class BatchActorSpec extends TestKitSuite with AnyFlatSpecLike with Matchers wit batch.underlyingActor.processed shouldBe Vector.empty // With this message the weight goes over 10 batch ! "bonjour" - + batch.underlyingActor.processed shouldBe Vector("hello", "hola") batch.stateData.weight shouldBe 7 @@ -134,7 +134,7 @@ class BatchActorSpec extends TestKitSuite with AnyFlatSpecLike with Matchers wit val watcher = TestProbe() watcher.watch(batch) batch ! ShutdownCommand - + eventually { watcher.expectTerminated(batch) } @@ -163,9 +163,9 @@ class BatchActorSpec extends TestKitSuite with AnyFlatSpecLike with Matchers wit watcher.watch(batch) batch ! "hola" batch ! "hello" - + batch ! ShutdownCommand - + batch ! ProcessingComplete eventually { @@ -198,7 +198,7 @@ class BatchActorSpec extends TestKitSuite with AnyFlatSpecLike with Matchers wit override protected def process(data: NonEmptyVector[String]) = { if (processingTime != Duration.Zero) { processed = processed ++ data.toVector - val promise = Promise[Int] + val promise = Promise[Int]() system.scheduler.scheduleOnce(processingTime) { promise.success(data.map(weightFunction).toVector.sum) } promise.future } else if (!fail) { diff --git a/core/src/test/scala/cromwell/core/filesystem/CromwellFileSystemsSpec.scala b/core/src/test/scala/cromwell/core/filesystem/CromwellFileSystemsSpec.scala index 5f0a57e9e3d..8e958cce518 100644 --- a/core/src/test/scala/cromwell/core/filesystem/CromwellFileSystemsSpec.scala +++ b/core/src/test/scala/cromwell/core/filesystem/CromwellFileSystemsSpec.scala @@ -25,10 +25,10 @@ class CromwellFileSystemsSpec extends AnyFlatSpec with CromwellTimeoutSpec with """.stripMargin) val cromwellFileSystems = new CromwellFileSystems(globalConfig) - + it should "build factory builders and factories for valid configuration" in { cromwellFileSystems.factoryBuilders.keySet shouldBe Set("fs1", "fs2", "fs3") - + val factoriesConfig = ConfigFactory.parseString( """ |filesystems { @@ -39,14 +39,14 @@ class CromwellFileSystemsSpec extends AnyFlatSpec with CromwellTimeoutSpec with val pathFactories = cromwellFileSystems.factoriesFromConfig(factoriesConfig) pathFactories.isRight shouldBe true - val fs1 = pathFactories.right.get("fs1") - val fs2 = pathFactories.right.get("fs2") + val fs1 = pathFactories.toOption.get("fs1") + val fs2 = pathFactories.toOption.get("fs2") fs1 shouldBe a[MockPathBuilderFactory] fs2 shouldBe a[MockPathBuilderFactory] fs1.asInstanceOf[MockPathBuilderFactory].instanceConfig.getString("somekey") shouldBe "somevalue" fs2.asInstanceOf[MockPathBuilderFactory].instanceConfig.getString("someotherkey") shouldBe "someothervalue" } - + it should "build singleton instance if specified" in { val rootConf = ConfigFactory.parseString( """ @@ -69,8 +69,8 @@ class CromwellFileSystemsSpec extends AnyFlatSpec with CromwellTimeoutSpec with val factory2 = cromwellFileSystems.buildFactory("fs1", ConfigFactory.empty) // The singleton configs should be the same for different factories - assert(factory1.right.get.asInstanceOf[MockPathBuilderFactoryCustomSingletonConfig].singletonConfig == - factory2.right.get.asInstanceOf[MockPathBuilderFactoryCustomSingletonConfig].singletonConfig) + assert(factory1.toOption.get.asInstanceOf[MockPathBuilderFactoryCustomSingletonConfig].singletonConfig == + factory2.toOption.get.asInstanceOf[MockPathBuilderFactoryCustomSingletonConfig].singletonConfig) } List( @@ -82,10 +82,10 @@ class CromwellFileSystemsSpec extends AnyFlatSpec with CromwellTimeoutSpec with it should s"fail to build factories $description" in { val result = cromwellFileSystems.factoriesFromConfig(ConfigFactory.parseString(config)) result.isLeft shouldBe true - result.left.get shouldBe expected + result.swap.toOption.get shouldBe expected } } - + val classNotFoundException = AggregatedMessageException( "Failed to initialize Cromwell filesystems", List("Class do.not.exists for filesystem fs1 cannot be found in the class path.") @@ -105,7 +105,7 @@ class CromwellFileSystemsSpec extends AnyFlatSpec with CromwellTimeoutSpec with "Failed to initialize Cromwell filesystems", List("Filesystem configuration fs1 doesn't have a class field") ) - + List( ("is invalid", "filesystems.gcs = true", invalidConfigException), ("is missing class fields", "filesystems.fs1.notclass = hello", missingClassFieldException), diff --git a/core/src/test/scala/cromwell/core/io/IoClientHelperSpec.scala b/core/src/test/scala/cromwell/core/io/IoClientHelperSpec.scala index 2f95d231c99..b025c7ff921 100644 --- a/core/src/test/scala/cromwell/core/io/IoClientHelperSpec.scala +++ b/core/src/test/scala/cromwell/core/io/IoClientHelperSpec.scala @@ -23,9 +23,9 @@ class IoClientHelperSpec extends TestKitSuite with AnyFlatSpecLike with Matchers val delegateProbe = TestProbe() val backoff = SimpleExponentialBackoff(100 seconds, 10.hours, 2D, 0D) val noResponseTimeout = 3 seconds - - val testActor = TestActorRef(new IoClientHelperTestActor(ioActorProbe.ref, delegateProbe.ref, backoff, noResponseTimeout)) - + + val testActor = TestActorRef(new IoClientHelperTestActor(ioActorProbe.ref, delegateProbe.ref, backoff, noResponseTimeout)) + val command = DefaultIoSizeCommand(mock[Path]) val response = IoSuccess(command, 5) @@ -34,13 +34,13 @@ class IoClientHelperSpec extends TestKitSuite with AnyFlatSpecLike with Matchers // Io actor receives the command ioActorProbe.expectMsg(command) - + // Io actor replies ioActorProbe.reply(response) - + // delegate should receive the response delegateProbe.expectMsg(response) - + // And nothing else, meaning the timeout timer has been cancelled delegateProbe.expectNoMessage() @@ -82,7 +82,7 @@ class IoClientHelperSpec extends TestKitSuite with AnyFlatSpecLike with Matchers } private case object ServiceUnreachable - + private class IoClientHelperTestActor(override val ioActor: ActorRef, delegateTo: ActorRef, backoff: Backoff, @@ -90,8 +90,8 @@ class IoClientHelperSpec extends TestKitSuite with AnyFlatSpecLike with Matchers implicit val ioCommandBuilder = DefaultIoCommandBuilder - override protected def initialBackoff = backoff - + override protected def initialBackoff(): Backoff = backoff + context.become(ioReceive orElse receive) override def receive: Receive = { diff --git a/core/src/test/scala/cromwell/core/logging/LoggerWrapperSpec.scala b/core/src/test/scala/cromwell/core/logging/LoggerWrapperSpec.scala index 7dd303ff29e..764d461d52d 100644 --- a/core/src/test/scala/cromwell/core/logging/LoggerWrapperSpec.scala +++ b/core/src/test/scala/cromwell/core/logging/LoggerWrapperSpec.scala @@ -38,7 +38,7 @@ class LoggerWrapperSpec extends AnyFlatSpec with CromwellTimeoutSpec with Matche ), ( "log error with one arg", - _.error("Hello {} {} {} {}", "arg1"), + _.error("Hello {} {} {} {}", arg = "arg1"), List(Slf4jMessage(Level.ERROR, List("tag: Hello {} {} {} {}", "arg1"))), List(AkkaMessage(Logging.ErrorLevel, "tag: Hello arg1 {} {} {}")) ), @@ -93,7 +93,7 @@ class LoggerWrapperSpec extends AnyFlatSpec with CromwellTimeoutSpec with Matche ), ( "log warn with one arg", - _.warn("Hello {} {} {} {}", "arg1"), + _.warn("Hello {} {} {} {}", argument = "arg1"), List(Slf4jMessage(Level.WARN, List("tag: Hello {} {} {} {}", "arg1"))), List(AkkaMessage(Logging.WarningLevel, "tag: Hello arg1 {} {} {}")) ), @@ -136,7 +136,7 @@ class LoggerWrapperSpec extends AnyFlatSpec with CromwellTimeoutSpec with Matche ), ( "log info with one arg", - _.info("Hello {} {} {} {}", "arg1"), + _.info("Hello {} {} {} {}", arg ="arg1"), List(Slf4jMessage(Level.INFO, List("tag: Hello {} {} {} {}", "arg1"))), List(AkkaMessage(Logging.InfoLevel, "tag: Hello arg1 {} {} {}")) ), @@ -179,7 +179,7 @@ class LoggerWrapperSpec extends AnyFlatSpec with CromwellTimeoutSpec with Matche ), ( "log debug with one arg", - _.debug("Hello {} {} {} {}", "arg1"), + _.debug("Hello {} {} {} {}", argument ="arg1"), List(Slf4jMessage(Level.DEBUG, List("tag: Hello {} {} {} {}", "arg1"))), List(AkkaMessage(Logging.DebugLevel, "tag: Hello arg1 {} {} {}")) ), @@ -222,7 +222,7 @@ class LoggerWrapperSpec extends AnyFlatSpec with CromwellTimeoutSpec with Matche ), ( "log trace with one arg", - _.trace("Hello {} {} {} {}", "arg1"), + _.trace("Hello {} {} {} {}", arg = "arg1"), List(Slf4jMessage(Level.TRACE, List("tag: Hello {} {} {} {}", "arg1"))), Nil ), @@ -298,31 +298,31 @@ class LoggerWrapperSpec extends AnyFlatSpec with CromwellTimeoutSpec with Matche mockLogger.error(anyString).answers(updateSlf4jMessages(Level.ERROR, _)) mockLogger.error(anyString, any[Any]()).answers(updateSlf4jMessages(Level.ERROR, _)) mockLogger.error(anyString, any[Any](), any[Any]()).answers(updateSlf4jMessages(Level.ERROR, _)) - mockLogger.error(anyString, anyVarArg[AnyRef]).answers(updateSlf4jMessages(Level.ERROR, _)) + mockLogger.error(anyString, any[Array[Object]](): _*).answers(updateSlf4jMessages(Level.ERROR, _)) mockLogger.error(anyString, any[Throwable]()).answers(updateSlf4jMessages(Level.ERROR, _)) mockLogger.warn(anyString).answers(updateSlf4jMessages(Level.WARN, _)) mockLogger.warn(anyString, any[Any]()).answers(updateSlf4jMessages(Level.WARN, _)) mockLogger.warn(anyString, any[Any](), any[Any]()).answers(updateSlf4jMessages(Level.WARN, _)) - mockLogger.warn(anyString, anyVarArg[AnyRef]).answers(updateSlf4jMessages(Level.WARN, _)) + mockLogger.warn(anyString, any[Array[Object]](): _*).answers(updateSlf4jMessages(Level.WARN, _)) mockLogger.warn(anyString, any[Throwable]()).answers(updateSlf4jMessages(Level.WARN, _)) mockLogger.info(anyString).answers(updateSlf4jMessages(Level.INFO, _)) mockLogger.info(anyString, any[Any]()).answers(updateSlf4jMessages(Level.INFO, _)) mockLogger.info(anyString, any[Any](), any[Any]()).answers(updateSlf4jMessages(Level.INFO, _)) - mockLogger.info(anyString, anyVarArg[AnyRef]).answers(updateSlf4jMessages(Level.INFO, _)) + mockLogger.info(anyString, any[Array[Object]](): _*).answers(updateSlf4jMessages(Level.INFO, _)) mockLogger.info(anyString, any[Throwable]()).answers(updateSlf4jMessages(Level.INFO, _)) mockLogger.debug(anyString).answers(updateSlf4jMessages(Level.DEBUG, _)) mockLogger.debug(anyString, any[Any]()).answers(updateSlf4jMessages(Level.DEBUG, _)) mockLogger.debug(anyString, any[Any](), any[Any]()).answers(updateSlf4jMessages(Level.DEBUG, _)) - mockLogger.debug(anyString, anyVarArg[AnyRef]).answers(updateSlf4jMessages(Level.DEBUG, _)) + mockLogger.debug(anyString, any[Array[Object]](): _*).answers(updateSlf4jMessages(Level.DEBUG, _)) mockLogger.debug(anyString, any[Throwable]()).answers(updateSlf4jMessages(Level.DEBUG, _)) mockLogger.trace(anyString).answers(updateSlf4jMessages(Level.TRACE, _)) mockLogger.trace(anyString, any[Any]()).answers(updateSlf4jMessages(Level.TRACE, _)) mockLogger.trace(anyString, any[Any](), any[Any]()).answers(updateSlf4jMessages(Level.TRACE, _)) - mockLogger.trace(anyString, anyVarArg[AnyRef]).answers(updateSlf4jMessages(Level.TRACE, _)) + mockLogger.trace(anyString, any[Array[Object]](): _*).answers(updateSlf4jMessages(Level.TRACE, _)) mockLogger.trace(anyString, any[Throwable]()).answers(updateSlf4jMessages(Level.TRACE, _)) val mockLoggingAdapter: LoggingAdapter = new LoggingAdapter { diff --git a/core/src/test/scala/cromwell/core/retry/BackoffSpec.scala b/core/src/test/scala/cromwell/core/retry/BackoffSpec.scala index 9d84408c01f..58fa597dc03 100644 --- a/core/src/test/scala/cromwell/core/retry/BackoffSpec.scala +++ b/core/src/test/scala/cromwell/core/retry/BackoffSpec.scala @@ -6,7 +6,7 @@ import common.assertion.CromwellTimeoutSpec import org.scalatest.flatspec.AnyFlatSpec import org.scalatest.matchers.should.Matchers -import scala.collection.JavaConverters._ +import scala.jdk.CollectionConverters._ import scala.concurrent.duration._ class BackoffSpec extends AnyFlatSpec with CromwellTimeoutSpec with Matchers { @@ -61,7 +61,7 @@ class BackoffSpec extends AnyFlatSpec with CromwellTimeoutSpec with Matchers { "randomization-factor" -> 0D ).asJava ) - + val backoff = SimpleExponentialBackoff(config) backoff.googleBackoff.getCurrentIntervalMillis shouldBe 5.seconds.toMillis.toInt backoff.googleBackoff.getMaxIntervalMillis shouldBe 30.seconds.toMillis.toInt diff --git a/core/src/test/scala/cromwell/util/AkkaTestUtil.scala b/core/src/test/scala/cromwell/util/AkkaTestUtil.scala index 24f2748c435..ef1183e9a07 100644 --- a/core/src/test/scala/cromwell/util/AkkaTestUtil.scala +++ b/core/src/test/scala/cromwell/util/AkkaTestUtil.scala @@ -11,7 +11,7 @@ object AkkaTestUtil { // Get a 'props' handle which will return a (inbound-only!!) wrapper around this test probe when constructed def props = Props(new Actor with ActorLogging { def receive = { - case outbound @ _ if sender == probe.ref => + case outbound @ _ if sender() == probe.ref => val msg = "Unexpected outbound message from Probe. You're doing something wrong!" log.error(msg) throw new RuntimeException(msg) diff --git a/cromwell-drs-localizer/src/main/scala/drs/localizer/CommandLineParser.scala b/cromwell-drs-localizer/src/main/scala/drs/localizer/CommandLineParser.scala index cc4dc1adda9..df0f7c2eba6 100644 --- a/cromwell-drs-localizer/src/main/scala/drs/localizer/CommandLineParser.scala +++ b/cromwell-drs-localizer/src/main/scala/drs/localizer/CommandLineParser.scala @@ -45,6 +45,7 @@ class CommandLineParser extends scopt.OptionParser[CommandLineArguments](Usage) case Some(Google) if List(c.azureSecretName, c.azureVaultName, c.azureIdentityClientId).forall(_.isEmpty) => Right(()) case Some(Google) => Left(s"One or more specified options are only valid with access token strategy '$Azure'") case Some(huh) => Left(s"Unrecognized access token strategy '$huh'") + case None => Left("Programmer error, access token strategy should not be None") } ) } diff --git a/cromwell-drs-localizer/src/main/scala/drs/localizer/accesstokens/GoogleAccessTokenStrategy.scala b/cromwell-drs-localizer/src/main/scala/drs/localizer/accesstokens/GoogleAccessTokenStrategy.scala index 513340d471d..5af2b8af441 100644 --- a/cromwell-drs-localizer/src/main/scala/drs/localizer/accesstokens/GoogleAccessTokenStrategy.scala +++ b/cromwell-drs-localizer/src/main/scala/drs/localizer/accesstokens/GoogleAccessTokenStrategy.scala @@ -4,7 +4,7 @@ import cats.syntax.validated._ import com.google.auth.oauth2.GoogleCredentials import common.validation.ErrorOr.ErrorOr -import scala.collection.JavaConverters._ +import scala.jdk.CollectionConverters._ import scala.util.{Failure, Success, Try} /** diff --git a/cromwellApiClient/src/main/scala/cromwell/api/model/ShardIndex.scala b/cromwellApiClient/src/main/scala/cromwell/api/model/ShardIndex.scala index 405305b8acf..7be28e95852 100644 --- a/cromwellApiClient/src/main/scala/cromwell/api/model/ShardIndex.scala +++ b/cromwellApiClient/src/main/scala/cromwell/api/model/ShardIndex.scala @@ -10,8 +10,8 @@ object ShardIndexFormatter extends DefaultJsonProtocol { implicit object ShardIndexJsonFormat extends RootJsonFormat[ShardIndex] { def write(si: ShardIndex) = JsNumber(si.index.getOrElse(-1)) def read(value: JsValue) = value match { - case JsNumber(i) if i.equals(-1) => ShardIndex(None) - case JsNumber(i) if i.isValidInt && i.intValue > 0 => ShardIndex(Option(i.intValue())) + case JsNumber(i) if i == -1 => ShardIndex(None) + case JsNumber(i) if i.isValidInt && i.intValue > 0 => ShardIndex(Option(i.intValue)) case other => throw new UnsupportedOperationException(s"Cannot deserialize $other into a ShardIndex") } } diff --git a/cromwellApiClient/src/test/scala/cromwell/api/CromwellClientSpec.scala b/cromwellApiClient/src/test/scala/cromwell/api/CromwellClientSpec.scala index d382588eab5..1bfecebc0d5 100644 --- a/cromwellApiClient/src/test/scala/cromwell/api/CromwellClientSpec.scala +++ b/cromwellApiClient/src/test/scala/cromwell/api/CromwellClientSpec.scala @@ -25,7 +25,7 @@ class CromwellClientSpec extends AsyncFlatSpec with BeforeAndAfterAll with Match tempFile.delete(swallowIOExceptions = true) super.afterAll() } - + it should "build an uri with query arguments" in { val id = WorkflowId.randomId() val args = Option( @@ -141,14 +141,14 @@ class CromwellClientSpec extends AsyncFlatSpec with BeforeAndAfterAll with Match |$chunkValue |""".stripMargin.replace("\n", "\r\n").trim } - val expectedFileChunks = expectedFiles map { + val expectedFileChunks = expectedFiles.iterator map { case (chunkKey, chunkFile) => s"""|--$boundary |Content-Type: application/zip |Content-Disposition: form-data; filename="${chunkFile.name}"; name="$chunkKey" |""".stripMargin.replace("\n", "\r\n").trim } - val expectedFileContents = expectedFiles map { + val expectedFileContents = expectedFiles.iterator map { case (_, chunkFile) => chunkFile.contentAsString } val boundaryEnd = s"--$boundary--" diff --git a/cwl/src/main/scala/cwl/CommandLineTool.scala b/cwl/src/main/scala/cwl/CommandLineTool.scala index 4e90d46b4f2..49a2de8c861 100644 --- a/cwl/src/main/scala/cwl/CommandLineTool.scala +++ b/cwl/src/main/scala/cwl/CommandLineTool.scala @@ -311,10 +311,29 @@ object CommandLineTool { case (StringOrInt.Int(_), StringOrInt.String(_)) => true // String > Int case (StringOrInt.String(_), StringOrInt.Int(_)) => false + case oh => throw new Exception(s"Programmer Error! Unexpected case match: $oh") }) + // https://github.com/scala/bug/issues/4097#issuecomment-292388627 + implicit def IterableSubclass[CC[X] <: Iterable[X], T: Ordering] : Ordering[CC[T]] = { + new Ordering[CC[T]] { + val ord = implicitly[Ordering[T]] + def compare(x: CC[T], y: CC[T]): Int = { + val xe = x.iterator + val ye = y.iterator + + while (xe.hasNext && ye.hasNext) { + val res = ord.compare(xe.next(), ye.next()) + if (res != 0) return res + } + + Ordering.Boolean.compare(xe.hasNext, ye.hasNext) + } + } + } + // Ordering for a CommandBindingSortingKey - implicit val SortingKeyOrdering: Ordering[CommandBindingSortingKey] = Ordering.by(_.value.toIterable) + implicit val SortingKeyOrdering: Ordering[CommandBindingSortingKey] = Ordering.by(_.value) // Ordering for a CommandPartSortMapping: order by sorting key implicit val SortKeyAndCommandPartOrdering: Ordering[SortKeyAndCommandPart] = Ordering.by(_.sortingKey) diff --git a/cwl/src/main/scala/cwl/CommandOutputBinding.scala b/cwl/src/main/scala/cwl/CommandOutputBinding.scala index 73487eaf4ee..bab616f2995 100644 --- a/cwl/src/main/scala/cwl/CommandOutputBinding.scala +++ b/cwl/src/main/scala/cwl/CommandOutputBinding.scala @@ -129,6 +129,7 @@ object CommandOutputBinding { ExpressionEvaluator.eval(expression, outputEvalParameterContext) case None => womFilesArray.valid + case oh => throw new Exception(s"Programmer Error! Unexpected case match: $oh") } } @@ -185,7 +186,7 @@ object CommandOutputBinding { // Make globbed files absolute paths by prefixing them with the output dir if necessary absolutePaths = primaryAsDirectoryOrFiles.map(_.mapFile(ioFunctionSet.pathFunctions.relativeToHostCallRoot)) - + // Load file size withFileSizes <- absolutePaths.parTraverse[IOChecked, WomFile](_.withSize(ioFunctionSet).to[IOChecked]) @@ -243,7 +244,7 @@ object CommandOutputBinding { "stdout", "stdout.background", ) - val globs: IOChecked[Seq[String]] = + val globs: IOChecked[Seq[String]] = ioFunctionSet.glob(cwlPath).toIOChecked .map({ _ @@ -254,7 +255,7 @@ object CommandOutputBinding { globs.flatMap({ files => files.toList.parTraverse[IOChecked, WomFile](v => loadFileWithContents(ioFunctionSet, commandOutputBinding)(v).to[IOChecked]) - }) + }) case other => s"Program error: $other type was not expected".invalidIOChecked } } diff --git a/cwl/src/main/scala/cwl/CwlType.scala b/cwl/src/main/scala/cwl/CwlType.scala index 1fba33d3980..7ec171d97fb 100644 --- a/cwl/src/main/scala/cwl/CwlType.scala +++ b/cwl/src/main/scala/cwl/CwlType.scala @@ -1,12 +1,11 @@ package cwl -import cats.syntax.traverse._ -import cats.syntax.functor._ -import cats.syntax.validated._ import cats.instances.list._ import cats.instances.option._ +import cats.syntax.functor._ +import cats.syntax.traverse._ +import cats.syntax.validated._ import common.validation.ErrorOr._ -import common.validation.IOChecked import common.validation.IOChecked._ import eu.timepit.refined._ import mouse.all._ @@ -252,7 +251,7 @@ case class Directory private val initializeFunction: WomMaybeListedDirectory => IoFunctionSet => IOChecked[WomValue] = { dir =>ioFunctionSet => ioFunctionSet.createTemporaryDirectory(basename).toIOChecked(ioFunctionSet.cs) map { tempDir => dir.copy(valueOption = Option(tempDir)) - } + } } new WomMaybeListedDirectory(None, listingOption, basename, initializeFunction = initializeFunction).valid } diff --git a/cwl/src/main/scala/cwl/CwlWomExpression.scala b/cwl/src/main/scala/cwl/CwlWomExpression.scala index cca40f787ce..5d77370ba49 100644 --- a/cwl/src/main/scala/cwl/CwlWomExpression.scala +++ b/cwl/src/main/scala/cwl/CwlWomExpression.scala @@ -1,13 +1,12 @@ package cwl -import cats.syntax.validated._ +import cats.instances.list._ import cats.syntax.functor._ import cats.syntax.traverse._ -import cats.instances.list._ +import cats.syntax.validated._ import common.validation.ErrorOr.{ErrorOr, ShortCircuitingFlatMap} -import common.validation.IOChecked -import common.validation.Validation._ import common.validation.IOChecked._ +import common.validation.Validation._ import cwl.ExpressionEvaluator.{ECMAScriptExpression, ECMAScriptFunction} import cwl.InitialWorkDirFileGeneratorExpression._ import cwl.InitialWorkDirRequirement.IwdrListingArrayEntry @@ -43,6 +42,7 @@ case class ECMAScriptWomExpression(expression: Expression, override def sourceString = expression match { case Expression.ECMAScriptExpression(s) => s.value case Expression.ECMAScriptFunction(s) => s.value + case oh => throw new Exception(s"Programmer Error! Unexpected case match: $oh") } override def evaluateValue(inputValues: Map[String, WomValue], ioFunctionSet: IoFunctionSet) = { @@ -66,7 +66,7 @@ final case class InitialWorkDirFileGeneratorExpression(entry: IwdrListingArrayEn } } yield WomMaybeListedDirectory(Option(directory), Option(fileListing)) } - + inputValues.toList.traverse[IOChecked, (String, WomValue)]({ case (k, v: WomMaybeListedDirectory) => val absolutePathString = ioFunctionSet.pathFunctions.relativeToHostCallRoot(v.value) @@ -163,7 +163,7 @@ object InitialWorkDirFileGeneratorExpression { val expressionEvaluation = ExpressionEvaluator.eval(expression, unmappedParameterContext) expressionEvaluation flatMap { - case array: WomArray if array.value.forall(_.isInstanceOf[WomFile]) => + case array: WomArray if array.value.forall(_.isInstanceOf[WomFile]) => array.value.toList.map(_.asInstanceOf[WomFile]).map(AdHocValue(_, alternativeName = None, inputName = None)).validNel case file: WomFile => List(AdHocValue(file, alternativeName = None, inputName = None)).validNel diff --git a/cwl/src/main/scala/cwl/ExpressionInterpolator.scala b/cwl/src/main/scala/cwl/ExpressionInterpolator.scala index 18e5cf6ce80..9a40ee8e06f 100644 --- a/cwl/src/main/scala/cwl/ExpressionInterpolator.scala +++ b/cwl/src/main/scala/cwl/ExpressionInterpolator.scala @@ -7,7 +7,7 @@ import common.validation.ErrorOr._ import wom.types.WomNothingType import wom.values._ -import scala.collection.JavaConverters._ +import scala.jdk.CollectionConverters._ /** * Partial copy-port of cwltool's expression.py. diff --git a/cwl/src/main/scala/cwl/ExpressionTool.scala b/cwl/src/main/scala/cwl/ExpressionTool.scala index 4bde4cc6c3b..53c8a675a4c 100644 --- a/cwl/src/main/scala/cwl/ExpressionTool.scala +++ b/cwl/src/main/scala/cwl/ExpressionTool.scala @@ -56,9 +56,10 @@ case class ExpressionTool( val womExpression = expression match { case StringOrExpression.String(str) => ValueAsAnExpression(WomString(str)) case StringOrExpression.Expression(expr) => ECMAScriptWomExpression(expr, inputNames, expressionLib) + case oh => throw new Exception(s"Programmer Error! Unexpected case match: $oh") } - // If we expect a certain type for + // If we expect a certain type for def coerce(womValue: WomValue, womType: WomType): Checked[WomValue] = womType.coerceRawValue(womValue).toChecked /* diff --git a/cwl/src/main/scala/cwl/MyriadInputTypeToSecondaryFiles.scala b/cwl/src/main/scala/cwl/MyriadInputTypeToSecondaryFiles.scala index 910fa2727a8..4ddc09625d3 100644 --- a/cwl/src/main/scala/cwl/MyriadInputTypeToSecondaryFiles.scala +++ b/cwl/src/main/scala/cwl/MyriadInputTypeToSecondaryFiles.scala @@ -10,7 +10,7 @@ object MyriadInputTypeToSecondaryFiles extends Poly1 { } implicit val caseArrayMyriadInputInnerType: Case.Aux[Array[MyriadInputInnerType], Option[SecondaryFiles]] = at { - _.toStream.flatMap(_.fold(MyriadInputInnerTypeToSecondaryFiles)).headOption + _.to(LazyList).flatMap(_.fold(MyriadInputInnerTypeToSecondaryFiles)).headOption } } diff --git a/cwl/src/main/scala/cwl/Workflow.scala b/cwl/src/main/scala/cwl/Workflow.scala index b9c50827808..22b30019ff1 100644 --- a/cwl/src/main/scala/cwl/Workflow.scala +++ b/cwl/src/main/scala/cwl/Workflow.scala @@ -139,11 +139,11 @@ case class Workflow private( val womType: WomType = tpe.fold(MyriadOutputTypeToWomType).apply(allRequirements.schemaDefRequirement) val parsedWorkflowOutput = FileAndId(id) - val parsedOutputSource = FullyQualifiedName(outputSource) + val parsedOutputSource = cwl.FullyQualifiedName(outputSource) // Try to find an output port for this cwl output in the set of available nodes - def lookupOutputSource(fqn: FullyQualifiedName): Checked[OutputPort] = { - def isRightOutputPort(op: GraphNodePort.OutputPort) = FullyQualifiedName.maybeApply(op.name) match { + def lookupOutputSource(fqn: cwl.FullyQualifiedName): Checked[OutputPort] = { + def isRightOutputPort(op: GraphNodePort.OutputPort) = cwl.FullyQualifiedName.maybeApply(op.name) match { case Some(f) => f.id == fqn.id case None => op.internalName == fqn.id } diff --git a/cwl/src/main/scala/cwl/WorkflowStep.scala b/cwl/src/main/scala/cwl/WorkflowStep.scala index 086a9df38fe..cf39f261bae 100644 --- a/cwl/src/main/scala/cwl/WorkflowStep.scala +++ b/cwl/src/main/scala/cwl/WorkflowStep.scala @@ -26,6 +26,7 @@ import wom.graph._ import wom.graph.expression.ExpressionNode import wom.types.WomType import wom.values.WomValue +import wom.graph.{FullyQualifiedName => WomFullyQualifiedName} /** * An individual job to run. @@ -59,9 +60,9 @@ case class WorkflowStep( lazy val allRequirements = RequirementsAndHints(requirements.toList.flatten ++ parentWorkflow.allRequirements.list) - lazy val womFqn: wom.graph.FullyQualifiedName = { + lazy val womFqn: WomFullyQualifiedName = { implicit val parentName = parentWorkflow.explicitWorkflowName - val localFqn = FullyQualifiedName.maybeApply(id).map(_.id).getOrElse(id) + val localFqn = cwl.FullyQualifiedName.maybeApply(id).map(_.id).getOrElse(id) parentWorkflow.womFqn.map(_.combine(localFqn)).getOrElse(wom.graph.FullyQualifiedName(localFqn)) } @@ -83,12 +84,12 @@ case class WorkflowStep( // Find the type of the outputs of the run section val runOutputTypes = run.fold(RunOutputsToTypeMap).apply(allRequirements.schemaDefRequirement) .map({ - case (runOutputId, womType) => FullyQualifiedName(runOutputId).id -> womType + case (runOutputId, womType) => cwl.FullyQualifiedName(runOutputId).id -> womType }) // Use them to find get the final type of the workflow outputs, and only the workflow outputs out.map({ stepOutput => val stepOutputValue = stepOutput.select[WorkflowStepOutput].map(_.id).getOrElse(stepOutput.select[String].get) - val stepOutputId = FullyQualifiedName(stepOutputValue) + val stepOutputId = cwl.FullyQualifiedName(stepOutputValue) stepOutputValue -> scatterTypeFunction(runOutputTypes(stepOutputId.id)) }).toMap } @@ -185,12 +186,12 @@ case class WorkflowStep( val scatterLookupSet = scatter.toList. flatMap(_.fold(StringOrStringArrayToStringList)). - map(id => FullyQualifiedName(id).id) + map(id => cwl.FullyQualifiedName(id).id) def isStepScattered(workflowStepInputId: String) = scatterLookupSet.contains(workflowStepInputId) val unqualifiedStepId: WomIdentifier = { - FullyQualifiedName.maybeApply(id).map({ fqn => + cwl.FullyQualifiedName.maybeApply(id).map({ fqn => WomIdentifier(LocalName(fqn.id), womFqn) }).getOrElse(WomIdentifier(id)) } @@ -216,6 +217,7 @@ case class WorkflowStep( case Run.CommandLineTool(clt) => clt.buildTaskDefinition(validator, expressionLib) case Run.Workflow(wf) => wf.womDefinition(validator, expressionLib) case Run.ExpressionTool(et) => et.buildTaskDefinition(validator, expressionLib) + case oh => throw new Exception(s"Programmer Error! Unexpected case match: $oh") } val callNodeBuilder = new CallNode.CallNodeBuilder() @@ -254,7 +256,7 @@ case class WorkflowStep( def buildUpstreamNodes(upstreamStepId: String, accumulatedNodes: Set[GraphNode]): Checked[Set[GraphNode]] = // Find the step corresponding to this upstreamStepId in the set of all the steps of this workflow for { - step <- workflow.steps.find { step => FullyQualifiedName(step.id).id == upstreamStepId }. + step <- workflow.steps.find { step => cwl.FullyQualifiedName(step.id).id == upstreamStepId }. toRight(NonEmptyList.one(s"no step of id $upstreamStepId found in ${workflow.steps.map(_.id).toList}")) call <- step.callWithInputs(typeMap, workflow, accumulatedNodes, workflowInputs, validator, expressionLib) } yield call @@ -278,7 +280,7 @@ case class WorkflowStep( } } - lazy val workflowStepInputId = FullyQualifiedName(workflowStepInput.id).id + lazy val workflowStepInputId = cwl.FullyQualifiedName(workflowStepInput.id).id def updateFold(sourceMappings: Map[String, OutputPort], newNodes: Set[GraphNode]): Checked[WorkflowStepInputFold] = { val typeExpectedByRunInput: Option[cwl.MyriadInputType] = typedRunInputs.get(workflowStepInputId).flatten @@ -313,7 +315,7 @@ case class WorkflowStep( * - points to a workflow input * - points to an upstream step */ - FullyQualifiedName(inputSource) match { + cwl.FullyQualifiedName(inputSource) match { // The source points to a workflow input, which means it should be in the workflowInputs map case FileAndId(_, _, inputId) => fromWorkflowInput(inputId).map(newMap => (sourceMappings ++ newMap, graphNodes)) // The source points to an output from a different step @@ -353,6 +355,7 @@ case class WorkflowStep( InputDefinitionFold( mappings = List(optional -> Coproduct[InputDefinitionPointer](optional.womType.none: WomValue)) ).validNel + case oh => throw new Exception(s"Programmer Error! Unexpected case match: $oh") } } diff --git a/cwl/src/main/scala/cwl/WorkflowStepInputExpression.scala b/cwl/src/main/scala/cwl/WorkflowStepInputExpression.scala index e8006ebeef5..476d7f44ef5 100644 --- a/cwl/src/main/scala/cwl/WorkflowStepInputExpression.scala +++ b/cwl/src/main/scala/cwl/WorkflowStepInputExpression.scala @@ -40,6 +40,7 @@ final case class WorkflowStepInputExpression(inputName: String, val parameterContext = ParameterContext(ioFunctionSet, expressionLib, inputValues, selfValue) expression.fold(EvaluateExpression).apply(parameterContext) + case oh => throw new Exception(s"Programmer Error! Unexpected case match: $oh") } } diff --git a/cwl/src/main/scala/cwl/WorkflowStepInputMergeExpression.scala b/cwl/src/main/scala/cwl/WorkflowStepInputMergeExpression.scala index 89e9d274215..bc7d4f43d75 100644 --- a/cwl/src/main/scala/cwl/WorkflowStepInputMergeExpression.scala +++ b/cwl/src/main/scala/cwl/WorkflowStepInputMergeExpression.scala @@ -66,6 +66,7 @@ final case class WorkflowStepInputMergeExpression(input: WorkflowStepInput, flattenedValidatedSourceValues.map(list => WomArray(list)).toValidated case (List(id), _, _) => lookupValue(id) + case oh => throw new Exception(s"Programmer Error! Unexpected case match: $oh") } } diff --git a/cwl/src/main/scala/cwl/internal/CwlEcmaScriptDecoder.scala b/cwl/src/main/scala/cwl/internal/CwlEcmaScriptDecoder.scala index 5cf4e217ebb..f7f54d3db1c 100644 --- a/cwl/src/main/scala/cwl/internal/CwlEcmaScriptDecoder.scala +++ b/cwl/src/main/scala/cwl/internal/CwlEcmaScriptDecoder.scala @@ -13,7 +13,7 @@ import shapeless.Coproduct import wom.types.WomNothingType import wom.values._ -import scala.collection.JavaConverters._ +import scala.jdk.CollectionConverters._ class CwlEcmaScriptDecoder { diff --git a/cwl/src/main/scala/cwl/internal/EnhancedRhinoSandbox.scala b/cwl/src/main/scala/cwl/internal/EnhancedRhinoSandbox.scala index c22f4fffff5..405c04fdaa2 100644 --- a/cwl/src/main/scala/cwl/internal/EnhancedRhinoSandbox.scala +++ b/cwl/src/main/scala/cwl/internal/EnhancedRhinoSandbox.scala @@ -4,7 +4,7 @@ import cwl.internal.EnhancedRhinoSandbox._ import delight.rhinosandox.internal._ import org.mozilla.javascript._ -import scala.collection.JavaConverters._ +import scala.jdk.CollectionConverters._ import scala.reflect._ /** diff --git a/cwl/src/main/scala/cwl/ontology/Schema.scala b/cwl/src/main/scala/cwl/ontology/Schema.scala index b92a9314273..a2ed13d7bf3 100644 --- a/cwl/src/main/scala/cwl/ontology/Schema.scala +++ b/cwl/src/main/scala/cwl/ontology/Schema.scala @@ -23,7 +23,7 @@ import org.semanticweb.owlapi.reasoner.{OWLReasoner, OWLReasonerFactory} import org.semanticweb.owlapi.util.OWLAPIStreamUtils import org.slf4j.LoggerFactory -import scala.collection.JavaConverters._ +import scala.jdk.CollectionConverters._ import scala.concurrent.ExecutionContext import scala.util.Try diff --git a/cwl/src/main/scala/cwl/package.scala b/cwl/src/main/scala/cwl/package.scala index 5ebe620dfa7..96796712f1e 100644 --- a/cwl/src/main/scala/cwl/package.scala +++ b/cwl/src/main/scala/cwl/package.scala @@ -38,7 +38,7 @@ package object cwl extends TypeAliases { type CwlFile = Array[Cwl] :+: Cwl :+: CNil type Cwl = Workflow :+: CommandLineTool :+: ExpressionTool :+: CNil - + object Cwl { object Workflow { def unapply(cwl: Cwl): Option[Workflow] = cwl.select[Workflow] } object CommandLineTool { def unapply(cwl: Cwl): Option[CommandLineTool] = cwl.select[CommandLineTool] } @@ -94,16 +94,18 @@ package object cwl extends TypeAliases { case Cwl.Workflow(w) => w.womExecutable(validator, inputsFile, ioFunctions, strictValidation) case Cwl.CommandLineTool(clt) => clt.womExecutable(validator, inputsFile, ioFunctions, strictValidation) case Cwl.ExpressionTool(et) => et.womExecutable(validator, inputsFile, ioFunctions, strictValidation) + case oh => throw new Exception(s"Programmer Error! Unexpected case match: $oh") } Try(executable) match { case Success(s) => s case Failure(f) => f.getMessage.invalidNelCheck + case oh => throw new Exception(s"Programmer Error! Unexpected case match: $oh") } } def requiredInputs: Map[String, WomType] = { implicit val parent = ParentName.empty - + cwl match { case Cwl.Workflow(w) => selectWomTypeInputs(w.inputs collect { case i if i.`type`.isDefined => FullyQualifiedName(i.id).id -> i.`type`.get @@ -114,6 +116,7 @@ package object cwl extends TypeAliases { case Cwl.ExpressionTool(et) => selectWomTypeInputs(et.inputs collect { case i if i.`type`.isDefined => FullyQualifiedName(i.id).id -> i.`type`.get }) + case oh => throw new Exception(s"Programmer Error! Unexpected case match: $oh") } } @@ -139,7 +142,7 @@ package object cwl extends TypeAliases { private def getSchema(schemasOption: Option[Array[String]], namespacesOption: Option[Map[String, String]]): Option[Schema] = { - schemasOption.map(Schema(_, namespacesOption getOrElse Map.empty)) + schemasOption.map(a => Schema(a.toIndexedSeq, namespacesOption getOrElse Map.empty)) } } diff --git a/cwl/src/main/scala/cwl/preprocessor/CwlPreProcessor.scala b/cwl/src/main/scala/cwl/preprocessor/CwlPreProcessor.scala index 1a0c48ab1f7..9855ee73953 100644 --- a/cwl/src/main/scala/cwl/preprocessor/CwlPreProcessor.scala +++ b/cwl/src/main/scala/cwl/preprocessor/CwlPreProcessor.scala @@ -29,7 +29,7 @@ class CwlPreProcessor(saladFunction: SaladFunction = saladCwlFile) { private val ec: ExecutionContext = ExecutionContext.fromExecutor(Executors.newFixedThreadPool(5)) private implicit val cs = IO.contextShift(ec) - + /** * This is THE main entry point into the CWL pre-processor. Takes a CWL reference and * returns a canonical JSON version with all references resolved. @@ -194,9 +194,9 @@ object CwlPreProcessor { private [preprocessor] def mapNumbers(json: Json): Json = { // Circumvent Circe's scientific format for numbers: convert to a JSON String without exponential notation. def nonScientificNumberFormatting(jsonNumber: JsonNumber): Json = { - val conversions = Stream[JsonNumber => Option[Any]]( - _.toBigInt.map(_.longValue()), - _.toBigDecimal.map(_.doubleValue()), + val conversions = LazyList[JsonNumber => Option[Any]]( + _.toBigInt.map(_.longValue), + _.toBigDecimal.map(_.doubleValue), Function.const(Option("null"))) // The `get` is safe because `Option("null")` guarantees a match even if the other two Stream elements diff --git a/cwl/src/test/scala/cwl/CwlDecoderSpec.scala b/cwl/src/test/scala/cwl/CwlDecoderSpec.scala index bbc366ee681..c206c116a84 100644 --- a/cwl/src/test/scala/cwl/CwlDecoderSpec.scala +++ b/cwl/src/test/scala/cwl/CwlDecoderSpec.scala @@ -16,7 +16,7 @@ class CwlDecoderSpec extends AnyFlatSpec with CromwellTimeoutSpec with Matchers it should "read nested workflow" in { decodeCwlFile(rootPath / "nestedworkflows.cwl"). value. - unsafeRunSync match { + unsafeRunSync() match { case Right(cwl) => val wf = cwl.select[Workflow].get wf.steps.flatMap(_.run.select[String].toList).length shouldBe 0 @@ -27,19 +27,19 @@ class CwlDecoderSpec extends AnyFlatSpec with CromwellTimeoutSpec with Matchers it should "fail to parse broken linked cwl" in { val result: Either[NonEmptyList[String], Cwl] = decodeCwlFile(rootPath / "brokenlinks.cwl"). value. - unsafeRunSync + unsafeRunSync() result.isLeft shouldBe true - withClue(s"Actual message: ${result.left.get.head}") { - "Field `run` contains undefined reference to `file://.+/wrong.cwl`".r.findAllIn(result.left.get.head).size shouldBe 1 - "Field `run` contains undefined reference to `file://.+/wrong2.cwl`".r.findAllIn(result.left.get.head).size shouldBe 1 + withClue(s"Actual message: ${result.swap.toOption.get.head}") { + "Field `run` contains undefined reference to `file://.+/wrong.cwl`".r.findAllIn(result.swap.toOption.get.head).size shouldBe 1 + "Field `run` contains undefined reference to `file://.+/wrong2.cwl`".r.findAllIn(result.swap.toOption.get.head).size shouldBe 1 } } it should "fail to parse invalid linked cwl" in { decodeCwlFile(rootPath/"links_dont_parse.cwl"). value. - unsafeRunSync match { + unsafeRunSync() match { case Left(errors) => errors.filter(_.contains("bad.cwl")).size + errors.filter(_.contains("bad2.cwl")).size shouldBe 1 case Right(_) => fail("should not have passed!") diff --git a/cwl/src/test/scala/cwl/CwlEcmaScriptEncoderSpec.scala b/cwl/src/test/scala/cwl/CwlEcmaScriptEncoderSpec.scala index 1d34e31336e..9119f75c421 100644 --- a/cwl/src/test/scala/cwl/CwlEcmaScriptEncoderSpec.scala +++ b/cwl/src/test/scala/cwl/CwlEcmaScriptEncoderSpec.scala @@ -27,7 +27,7 @@ class CwlEcmaScriptEncoderSpec extends AnyFlatSpec with CromwellTimeoutSpec with ) val result: EcmaScriptUtil.ECMAScriptVariable = encoder.encode(file) val resultMap = result.asInstanceOf[ESObject].fields - resultMap.filterKeys(_ != "secondaryFiles").toList should contain theSameElementsAs expected + resultMap.view.filterKeys(_ != "secondaryFiles").toList should contain theSameElementsAs expected resultMap("secondaryFiles") should be(a[ESArray]) resultMap("secondaryFiles").asInstanceOf[ESArray].array should be(empty) } diff --git a/cwl/src/test/scala/cwl/CwlInputValidationSpec.scala b/cwl/src/test/scala/cwl/CwlInputValidationSpec.scala index 7054263b612..c9d5272b07b 100644 --- a/cwl/src/test/scala/cwl/CwlInputValidationSpec.scala +++ b/cwl/src/test/scala/cwl/CwlInputValidationSpec.scala @@ -20,7 +20,7 @@ class CwlInputValidationSpec extends AnyFlatSpec with CromwellTimeoutSpec with M var cwlFile: BFile = _ var inputTempFile: BFile = _ - + override def beforeAll(): Unit = { inputTempFile = BFile.newTemporaryFile() cwlFile = BFile.newTemporaryFile().write( @@ -46,7 +46,7 @@ class CwlInputValidationSpec extends AnyFlatSpec with CromwellTimeoutSpec with M | items: | type: array | items: string - | # enable this when InputRecordSchemas are enabled + | # enable this when InputRecordSchemas are enabled | #w9: | # type: | # name: w9 @@ -54,16 +54,16 @@ class CwlInputValidationSpec extends AnyFlatSpec with CromwellTimeoutSpec with M | # fields: | # - name: w9a | # type: record - | # fields: + | # fields: | # - name: w9aa - | # type: string + | # type: string | w10: Directory - |steps: [] + |steps: [] |outputs: [] """.stripMargin ) } - + override def afterAll(): Unit = { cwlFile.delete() inputTempFile.delete() @@ -72,7 +72,7 @@ class CwlInputValidationSpec extends AnyFlatSpec with CromwellTimeoutSpec with M lazy val cwlWorkflow = decodeCwlFile(cwlFile).map { _.select[Workflow].get - }.value.unsafeRunSync.fold(error => throw new RuntimeException(s"broken parse! msg was $error"), identity) + }.value.unsafeRunSync().fold(error => throw new RuntimeException(s"broken parse! msg was $error"), identity) lazy val graph = cwlWorkflow.womDefinition(AcceptAllRequirements, Vector.empty) match { case Left(errors) => fail(s"Failed to build wom definition: ${errors.toList.mkString(", ")}") @@ -80,7 +80,7 @@ class CwlInputValidationSpec extends AnyFlatSpec with CromwellTimeoutSpec with M } def getOutputPort(n: Int) = graph.inputNodes.find(_.localName == s"w$n").getOrElse(fail(s"Failed to find an input node for w$n")).singleOutputPort - + lazy val w0OutputPort = getOutputPort(0) lazy val w1OutputPort = getOutputPort(1) lazy val w2OutputPort = getOutputPort(2) @@ -92,7 +92,7 @@ class CwlInputValidationSpec extends AnyFlatSpec with CromwellTimeoutSpec with M lazy val w8OutputPort = getOutputPort(8) // lazy val w9OutputPort = getOutputPort(9) lazy val w10OutputPort = getOutputPort(10) - + def validate(inputFile: String): Map[GraphNodePort.OutputPort, ResolvedExecutableInput] = { cwlWorkflow.womExecutable(AcceptAllRequirements, Option(inputFile), LocalIoFunctionSet, strictValidation = false) match { case Left(errors) => fail(s"Failed to build a wom executable: ${errors.toList.mkString(", ")}") diff --git a/cwl/src/test/scala/cwl/CwlWorkflowWomSpec.scala b/cwl/src/test/scala/cwl/CwlWorkflowWomSpec.scala index 692151139fa..52f511fd9b7 100644 --- a/cwl/src/test/scala/cwl/CwlWorkflowWomSpec.scala +++ b/cwl/src/test/scala/cwl/CwlWorkflowWomSpec.scala @@ -22,12 +22,12 @@ class CwlWorkflowWomSpec extends AnyFlatSpec with CromwellTimeoutSpec with Match import TestSetup._ implicit val parentName = ParentName.empty - + "A Cwl object for 1st-tool" should "convert to WOM" in { def validateWom(callable: Callable): Unit = callable match { case taskDefinition: CommandTaskDefinition => - val inputDef = taskDefinition.inputs.head - inputDef.name shouldBe "message" + val inputDef = taskDefinition.inputs.head + inputDef.name shouldBe "message" inputDef.womType shouldBe WomStringType () @@ -39,7 +39,7 @@ class CwlWorkflowWomSpec extends AnyFlatSpec with CromwellTimeoutSpec with Match clt <- decodeCwlFile(rootPath/"1st-tool.cwl"). map(_.select[CommandLineTool].get). value. - unsafeRunSync + unsafeRunSync() taskDef <- clt.buildTaskDefinition(_.validNel, Vector.empty) } yield validateWom(taskDef)).leftMap(e => throw new RuntimeException(s"error! $e")) } @@ -48,7 +48,7 @@ class CwlWorkflowWomSpec extends AnyFlatSpec with CromwellTimeoutSpec with Match (for { wf <- decodeCwlFile(rootPath/"1st-workflow.cwl"). value. - unsafeRunSync. + unsafeRunSync(). map(_.select[Workflow].get) womDefinition <- wf.womDefinition(AcceptAllRequirements, Vector.empty) @@ -128,12 +128,13 @@ class CwlWorkflowWomSpec extends AnyFlatSpec with CromwellTimeoutSpec with Match // can't collapse this to a single "case StringOrExpression.Expression(e) => e.value": case StringOrExpression.ECMAScriptExpression(e) => e.value case StringOrExpression.ECMAScriptFunction(f) => f.value + case oh => throw new Exception(s"Programmer Error! Unexpected case match: $oh") } private lazy val commandLineTool: CommandLineTool = { val wf = decodeCwlFile(rootPath / "three_step.cwl").map { wf => wf.select[Workflow].get - }.value.unsafeRunSync.fold(error => throw new RuntimeException(s"broken parse! msg was ${error.toList.mkString(", ")}"), identity) + }.value.unsafeRunSync().fold(error => throw new RuntimeException(s"broken parse! msg was ${error.toList.mkString(", ")}"), identity) wf.id should include("three_step") @@ -159,7 +160,7 @@ class CwlWorkflowWomSpec extends AnyFlatSpec with CromwellTimeoutSpec with Match val wf = decodeCwlFile(rootPath/"three_step.cwl").map { _.select[Workflow].get - }.value.unsafeRunSync.fold(error => throw new RuntimeException(s"broken parse: $error"), identity) + }.value.unsafeRunSync().fold(error => throw new RuntimeException(s"broken parse: $error"), identity) wf.id should include("three_step") diff --git a/cwl/src/test/scala/cwl/DirectorySpec.scala b/cwl/src/test/scala/cwl/DirectorySpec.scala index ced1eb518b3..1e7db881753 100644 --- a/cwl/src/test/scala/cwl/DirectorySpec.scala +++ b/cwl/src/test/scala/cwl/DirectorySpec.scala @@ -20,8 +20,8 @@ class DirectorySpec extends AnyFlatSpec with CromwellTimeoutSpec with Matchers { behavior of "Directory" it should "dir_example" in { - val cwl = decodeCwlFile(rootPath / "dir_example.cwl").value.unsafeRunSync.right.get - val executable = cwl.womExecutable(AcceptAllRequirements, None, NoIoFunctionSet, strictValidation = false).right.get + val cwl = decodeCwlFile(rootPath / "dir_example.cwl").value.unsafeRunSync().toOption.get + val executable = cwl.womExecutable(AcceptAllRequirements, None, NoIoFunctionSet, strictValidation = false).toOption.get val call = executable.graph.calls.head val runtimeEnvironment = RuntimeEnvironment("output/path", "temp/path",refineMV[Positive](1), 2e10, 100, 100) val defaultCallInputs = executable.graph.nodes.collect({ diff --git a/cwl/src/test/scala/cwl/FileSpec.scala b/cwl/src/test/scala/cwl/FileSpec.scala index a9601df6772..814c164bc16 100644 --- a/cwl/src/test/scala/cwl/FileSpec.scala +++ b/cwl/src/test/scala/cwl/FileSpec.scala @@ -27,8 +27,8 @@ class FileSpec extends AnyFlatSpec with CromwellTimeoutSpec with Matchers with T forAll(fileTests) { (description, filePath, ioFunctionSet, expectedCommand) => it should description in { - val cwl = decodeCwlFile(rootPath / filePath).value.unsafeRunSync.right.get - val executable = cwl.womExecutable(AcceptAllRequirements, None, ioFunctionSet, strictValidation = false).right.get + val cwl = decodeCwlFile(rootPath / filePath).value.unsafeRunSync().toOption.get + val executable = cwl.womExecutable(AcceptAllRequirements, None, ioFunctionSet, strictValidation = false).toOption.get val call = executable.graph.calls.head val runtimeEnvironment = RuntimeEnvironment("output/path", "temp/path", refineMV[Positive](1), 2e10, 100, 100) val defaultCallInputs = executable.graph.nodes.collect({ @@ -39,14 +39,14 @@ class FileSpec extends AnyFlatSpec with CromwellTimeoutSpec with Matchers with T val value: WomValue = key .valueMapper(ioFunctionSet)(oginwd.default.evaluateValue(Map.empty, ioFunctionSet).toTry.get) .value.unsafeRunSync() - .right.get + .toOption.get key -> value }).toMap val commandEither = call.callable.asInstanceOf[CallableTaskDefinition].instantiateCommand( defaultCallInputs, ioFunctionSet, identity, runtimeEnvironment ).toEither - val command = commandEither.right.get.commandString + val command = commandEither.toOption.get.commandString command should be(expectedCommand) } } diff --git a/cwl/src/test/scala/cwl/LocalIoFunctionSet.scala b/cwl/src/test/scala/cwl/LocalIoFunctionSet.scala index 396731ce609..b3ab33a5809 100644 --- a/cwl/src/test/scala/cwl/LocalIoFunctionSet.scala +++ b/cwl/src/test/scala/cwl/LocalIoFunctionSet.scala @@ -5,7 +5,7 @@ import java.util.concurrent.Executors import wom.expression.EmptyIoFunctionSet -import scala.collection.JavaConverters._ +import scala.jdk.CollectionConverters._ import scala.concurrent.{ExecutionContext, Future} import scala.util.Try @@ -14,7 +14,7 @@ import scala.util.Try */ object LocalIoFunctionSet extends EmptyIoFunctionSet { override implicit def ec: ExecutionContext = ExecutionContext.fromExecutor(Executors.newFixedThreadPool(1)) - + private def stripLocalPrefix(path: String): String = { path.stripPrefix("file://") } diff --git a/cwl/src/test/scala/cwl/ParseBigThreeSpec.scala b/cwl/src/test/scala/cwl/ParseBigThreeSpec.scala index 74e9c7aae6d..e5ac02e80f4 100644 --- a/cwl/src/test/scala/cwl/ParseBigThreeSpec.scala +++ b/cwl/src/test/scala/cwl/ParseBigThreeSpec.scala @@ -14,21 +14,21 @@ class ParseBigThreeSpec extends AnyFlatSpec with CromwellTimeoutSpec with Matche decodeCwlFile(rootPath/"1st-tool.cwl"). value. - unsafeRunSync. + unsafeRunSync(). isRight shouldBe true } it should "parse first workflow" in { decodeCwlFile(rootPath/"1st-workflow.cwl"). value. - unsafeRunSync. + unsafeRunSync(). isRight shouldBe true } it should "parse env cwl" in { decodeCwlFile(rootPath/"env.cwl"). value. - unsafeRunSync. + unsafeRunSync(). isRight shouldBe true } } diff --git a/database/migration/src/main/scala/cromwell/database/migration/liquibase/DiffResultFilter.scala b/database/migration/src/main/scala/cromwell/database/migration/liquibase/DiffResultFilter.scala index 01842ad5f07..2365dcf1e4b 100644 --- a/database/migration/src/main/scala/cromwell/database/migration/liquibase/DiffResultFilter.scala +++ b/database/migration/src/main/scala/cromwell/database/migration/liquibase/DiffResultFilter.scala @@ -5,7 +5,7 @@ import liquibase.diff.{DiffResult, Difference, ObjectDifferences} import liquibase.structure.DatabaseObject import liquibase.structure.core._ -import scala.collection.JavaConverters._ +import scala.jdk.CollectionConverters._ /** * Filters liquibase results. diff --git a/database/migration/src/main/scala/cromwell/database/migration/liquibase/LiquibaseUtils.scala b/database/migration/src/main/scala/cromwell/database/migration/liquibase/LiquibaseUtils.scala index e0147bc4a32..20a77f58c02 100644 --- a/database/migration/src/main/scala/cromwell/database/migration/liquibase/LiquibaseUtils.scala +++ b/database/migration/src/main/scala/cromwell/database/migration/liquibase/LiquibaseUtils.scala @@ -13,7 +13,7 @@ import liquibase.snapshot.{DatabaseSnapshot, SnapshotControl, SnapshotGeneratorF import liquibase.{Contexts, LabelExpression, Liquibase} import org.hsqldb.persist.HsqlDatabaseProperties -import scala.collection.JavaConverters._ +import scala.jdk.CollectionConverters._ object LiquibaseUtils { // Paranoia: Create our own mutex. https://stackoverflow.com/questions/442564/avoid-synchronizedthis-in-java @@ -160,7 +160,7 @@ object LiquibaseUtils { */ def getChangeSets(settings: LiquibaseSettings): Seq[ChangeSet] = { mutex.synchronized { - getChangeLog(settings).getChangeSets.asScala + getChangeLog(settings).getChangeSets.asScala.toList } } diff --git a/database/sql/src/main/scala/cromwell/database/slick/SlickDatabase.scala b/database/sql/src/main/scala/cromwell/database/slick/SlickDatabase.scala index f95e3d1a7ae..459c705480e 100644 --- a/database/sql/src/main/scala/cromwell/database/slick/SlickDatabase.scala +++ b/database/sql/src/main/scala/cromwell/database/slick/SlickDatabase.scala @@ -193,7 +193,7 @@ abstract class SlickDatabase(override val originalDatabaseConfig: Config) extend //database.run(action) <-- See comment above private val actionThreadPool Future { try { - if (timeout.isFinite()) { + if (timeout.isFinite) { // https://stackoverflow.com/a/52569275/818054 Await.result(database.run(action.withStatementParameters(statementInit = _.setQueryTimeout(timeout.toSeconds.toInt))), Duration.Inf) } else { @@ -254,7 +254,7 @@ abstract class SlickDatabase(override val originalDatabaseConfig: Config) extend if (failures.isEmpty) DBIO.successful(()) else { val valueList = values.toList - val failedRequests = failures.map(valueList(_)) + val failedRequests = failures.toList.map(valueList(_)) DBIO.failed(new RuntimeException( s"$description failed to upsert the following rows: ${failedRequests.mkString(", ")}" )) diff --git a/database/sql/src/main/scala/cromwell/database/sql/MetadataSqlDatabase.scala b/database/sql/src/main/scala/cromwell/database/sql/MetadataSqlDatabase.scala index 28bc99310e2..30534818f85 100644 --- a/database/sql/src/main/scala/cromwell/database/sql/MetadataSqlDatabase.scala +++ b/database/sql/src/main/scala/cromwell/database/sql/MetadataSqlDatabase.scala @@ -161,7 +161,7 @@ trait MetadataSqlDatabase extends SqlDatabase { page: Option[Int], pageSize: Option[Int], newestFirst: Boolean) - (implicit ec: ExecutionContext): Future[Traversable[WorkflowMetadataSummaryEntry]] + (implicit ec: ExecutionContext): Future[Iterable[WorkflowMetadataSummaryEntry]] def countWorkflowSummaries(parentIdWorkflowMetadataKey: String, workflowStatuses: Set[String], workflowNames: Set[String], diff --git a/dockerHashing/src/main/scala/cromwell/docker/DockerImageIdentifier.scala b/dockerHashing/src/main/scala/cromwell/docker/DockerImageIdentifier.scala index f4635db9b16..9fbd173303b 100644 --- a/dockerHashing/src/main/scala/cromwell/docker/DockerImageIdentifier.scala +++ b/dockerHashing/src/main/scala/cromwell/docker/DockerImageIdentifier.scala @@ -7,7 +7,7 @@ sealed trait DockerImageIdentifier { def repository: Option[String] def image: String def reference: String - + def swapReference(newReference: String): DockerImageIdentifier // The name of the image with a repository prefix iff a repository was explicitly specified. @@ -32,42 +32,42 @@ case class DockerImageIdentifierWithHash(host: Option[String], repository: Optio object DockerImageIdentifier { private val DefaultDockerTag = "latest" - + private val DockerStringRegex = s""" (?x) # Turn on comments and whitespace insensitivity - + ( # Begin capturing group for name [a-z0-9]+(?:[._-][a-z0-9]+)* # API v2 name component regex - see https://docs.docker.com/registry/spec/api/#/overview (?::[0-9]+)? # Optional port (?:/[a-z0-9]+(?:[._-][a-z0-9]+)*)* # Optional additional name components separated by / ) # End capturing group for name - - (?: + + (?: : # Tag separator. ':' is followed by a tag - - ( # Begin capturing group for reference + + ( # Begin capturing group for reference [A-Za-z0-9]+(?:[-.:_A-Za-z0-9]+)* # Reference - ) # End capturing group for reference + ) # End capturing group for reference )? - (?: + (?: @ # Tag separator '@' is followed by a digest - - ( # Begin capturing group for reference + + ( # Begin capturing group for reference [A-Za-z0-9]+(?:[-.:_A-Za-z0-9]+)* # Reference - ) # End capturing group for reference + ) # End capturing group for reference )? """.trim.r - + def fromString(dockerString: String): Try[DockerImageIdentifier] = { dockerString.trim match { case DockerStringRegex(name, tag, hash) => buildId(name, Option(tag), Option(hash)) case _ => Failure(new IllegalArgumentException(s"Docker image $dockerString has an invalid syntax.")) } } - + private def isRegistryHostName(str: String) = str.contains('.') || str.startsWith("localhost") - + private def buildId(name: String, tag: Option[String], hash: Option[String]) = { val (dockerHost, dockerRepo, dockerImage): (Option[String], Option[String], String) = name.split('/').toList match { // If just one component (e.g ubuntu) @@ -84,8 +84,9 @@ object DockerImageIdentifier { case host :: rest if isRegistryHostName(host) => val repo = rest.init.mkString("/") (Option(host), Option(repo), rest.last) + case oh => throw new RuntimeException(s"Programmer Error! Unexpected case match: $oh") } - + (tag, hash) match { case (None, None) => Success(DockerImageIdentifierWithoutHash(dockerHost, dockerRepo, dockerImage, DefaultDockerTag)) case (Some(t), None) => Success(DockerImageIdentifierWithoutHash(dockerHost, dockerRepo, dockerImage, t)) diff --git a/dockerHashing/src/main/scala/cromwell/docker/registryv2/flows/alibabacloudcr/AlibabaCloudCRRegistry.scala b/dockerHashing/src/main/scala/cromwell/docker/registryv2/flows/alibabacloudcr/AlibabaCloudCRRegistry.scala index bb62c2d3abc..7d974053317 100644 --- a/dockerHashing/src/main/scala/cromwell/docker/registryv2/flows/alibabacloudcr/AlibabaCloudCRRegistry.scala +++ b/dockerHashing/src/main/scala/cromwell/docker/registryv2/flows/alibabacloudcr/AlibabaCloudCRRegistry.scala @@ -86,11 +86,9 @@ class AlibabaCloudCRRegistry(config: DockerRegistryConfig) extends DockerRegistr request.setRepoName(dockerImageID.image) dockerImageID.repository foreach { repository => request.setRepoNamespace(repository) } - manifestResponseHandler(client, request, context) - .getOrElse(new Exception(s"handle response fail, please make sure the image id is correct: ${context.dockerImageID}")) match { - case succ: DockerInfoSuccessResponse => succ - case fail: DockerInfoFailedResponse => fail - case ex: Exception => throw new Exception(s"Get AliyunCr manifest failed, ${ex.getMessage}") + manifestResponseHandler(client, request, context) match { + case Success(response) => response // may be DockerInfoSuccessResponse or DockerInfoFailedResponse + case Failure(ex) => throw new Exception(s"Get AliyunCr manifest failed for ${context.dockerImageID}", ex) } } diff --git a/dockerHashing/src/test/scala/cromwell/docker/registryv2/DockerRegistryV2AbstractSpec.scala b/dockerHashing/src/test/scala/cromwell/docker/registryv2/DockerRegistryV2AbstractSpec.scala index 1183a823557..e567aa540cd 100644 --- a/dockerHashing/src/test/scala/cromwell/docker/registryv2/DockerRegistryV2AbstractSpec.scala +++ b/dockerHashing/src/test/scala/cromwell/docker/registryv2/DockerRegistryV2AbstractSpec.scala @@ -12,22 +12,22 @@ import org.scalatest.matchers.should.Matchers class DockerRegistryV2AbstractSpec extends AnyFlatSpec with CromwellTimeoutSpec with Matchers { behavior of "DockerRegistryV2Abstract" - + it should "handle gracefully if a response cannot be parsed" in { val registry = new DockerRegistryV2Abstract(DockerRegistryConfig.default) { override protected def registryHostName(dockerImageIdentifier: DockerImageIdentifier) = "N/A" override protected def authorizationServerHostName(dockerImageIdentifier: DockerImageIdentifier) = "N/A" override protected def buildTokenRequestHeaders(dockerInfoContext: DockerInfoActor.DockerInfoContext) = List.empty } - - val mediaType = MediaType.parse(DockerRegistryV2Abstract.ManifestV2MediaType).right.get + + val mediaType = MediaType.parse(DockerRegistryV2Abstract.ManifestV2MediaType).toOption.get val contentType: Header = `Content-Type`(mediaType) val mockClient = Client({ _: Request[IO] => // This response will have an empty body, so we need to be explicit about the typing: Resource.pure[IO, Response[IO]](Response(headers = Headers.of(contentType))) : Resource[IO, Response[IO]] }) - + val dockerImageIdentifier = DockerImageIdentifier.fromString("ubuntu").get val dockerInfoRequest = DockerInfoRequest(dockerImageIdentifier) val context = DockerInfoContext(dockerInfoRequest, null) diff --git a/docs/WOMtool.md b/docs/WOMtool.md index 751e19b3a17..fc8c16c7897 100644 --- a/docs/WOMtool.md +++ b/docs/WOMtool.md @@ -4,14 +4,14 @@ Command line utilities for interacting with the Workflow Object Model (WOM). You The following is the toolchain used for development of womtool. Other versions may work, but these are recommended. -* [Scala 2.12](http://www.scala-lang.org/) +* [Scala 2.13](http://www.scala-lang.org/) * [SBT 1.x](https://www.scala-sbt.org/) * [AdoptOpenJDK 11 HotSpot](https://adoptopenjdk.net/) * [Git](https://git-scm.com/) ## Building -`sbt assembly` will build a runnable JAR in `womtool/target/scala-2.12/` +`sbt assembly` will build a runnable JAR in `womtool/target/scala-2.13/` Tests are run via `sbt test`. Note that the tests do require Docker to be running. To test this out while downloading the Ubuntu image that is required for tests, run `docker pull ubuntu:latest` prior to running `sbt test` diff --git a/docs/developers/Building.md b/docs/developers/Building.md index 9322f160120..5c5cc21eaf7 100644 --- a/docs/developers/Building.md +++ b/docs/developers/Building.md @@ -3,7 +3,7 @@ Most users should not need to build Cromwell and can use pre-built Cromwell [rel If for some reason you require a non-release version of Cromwell or are developing new Cromwell features or fixes, the following are required to build Cromwell from source: -* [Scala 2.12](http://www.scala-lang.org/) +* [Scala 2.13](http://www.scala-lang.org/) * [SBT 1.x](https://www.scala-sbt.org/) * [AdoptOpenJDK 11 HotSpot](https://adoptopenjdk.net/) * [Git](https://git-scm.com/) @@ -38,7 +38,7 @@ $ sbt assembly NOTE: This command will run for a long time the first time. NOTE: Compiling will not succeed on directories encrypted with ecryptfs (ubuntu encrypted home dirs for example), due to long file paths. -`sbt assembly` will build the runnable Cromwell JAR in `server/target/scala-2.12/` with a name like `cromwell-.jar`. It will also build a runnable Womtool JAR in `womtool/target/scala-2.12/` with a name like `womtool-.jar`. +`sbt assembly` will build the runnable Cromwell JAR in `server/target/scala-2.13/` with a name like `cromwell-.jar`. It will also build a runnable Womtool JAR in `womtool/target/scala-2.13/` with a name like `womtool-.jar`. To build a [Docker](https://www.docker.com/) image, run: diff --git a/docs/tutorials/HPCSlurmWithLocalScratch.md b/docs/tutorials/HPCSlurmWithLocalScratch.md index 39e85586557..85bed57e531 100644 --- a/docs/tutorials/HPCSlurmWithLocalScratch.md +++ b/docs/tutorials/HPCSlurmWithLocalScratch.md @@ -129,7 +129,7 @@ sbt assembly ### 5. When the build was successful, we can move the new jar file into the cromwell directory ```hocon -cp server/target/scala-2.12/cromwell-52-*-SNAP.jar \ +cp server/target/scala-2.13/cromwell-52-*-SNAP.jar \ cromwell/cromwell-52-fix.jar ``` diff --git a/engine/src/main/scala/cromwell/engine/backend/BackendConfiguration.scala b/engine/src/main/scala/cromwell/engine/backend/BackendConfiguration.scala index 1dff0efa3ab..410a6261b5c 100644 --- a/engine/src/main/scala/cromwell/engine/backend/BackendConfiguration.scala +++ b/engine/src/main/scala/cromwell/engine/backend/BackendConfiguration.scala @@ -3,7 +3,7 @@ package cromwell.engine.backend import com.typesafe.config.{Config, ConfigFactory} import cromwell.backend.{BackendConfigurationDescriptor, BackendLifecycleActorFactory} import net.ceedubs.ficus.Ficus._ -import scala.collection.JavaConverters._ +import scala.jdk.CollectionConverters._ import scala.util.{Failure, Success, Try} case class BackendConfigurationEntry(name: String, lifecycleActorFactoryClass: String, config: Config) { diff --git a/engine/src/main/scala/cromwell/engine/io/gcs/GcsBatchCommandContext.scala b/engine/src/main/scala/cromwell/engine/io/gcs/GcsBatchCommandContext.scala index 47031c79560..d41b0539dc6 100644 --- a/engine/src/main/scala/cromwell/engine/io/gcs/GcsBatchCommandContext.scala +++ b/engine/src/main/scala/cromwell/engine/io/gcs/GcsBatchCommandContext.scala @@ -47,7 +47,7 @@ final case class GcsBatchCommandContext[T, U](request: GcsBatchIoCommand[T, U], override val clientContext: Option[Any] = None, backoff: Backoff = GcsBatchCommandContext.defaultBackoff, currentAttempt: Int = 1, - promise: Promise[BatchResponse] = Promise[BatchResponse] + promise: Promise[BatchResponse] = Promise[BatchResponse]() ) extends IoCommandContext[T] with StrictLogging { @@ -84,14 +84,14 @@ final case class GcsBatchCommandContext[T, U](request: GcsBatchIoCommand[T, U], * Increment backoff time and attempt count */ lazy val next: GcsBatchCommandContext[T, U] = { - this.copy(backoff = backoff.next, currentAttempt = currentAttempt + 1, promise = Promise[BatchResponse]) + this.copy(backoff = backoff.next, currentAttempt = currentAttempt + 1, promise = Promise[BatchResponse]()) } /** * Only increment backoff. To be used for failures that should be retried infinitely */ lazy val nextTransient: GcsBatchCommandContext[T, U] = { - this.copy(backoff = backoff.next, promise = Promise[BatchResponse]) + this.copy(backoff = backoff.next, promise = Promise[BatchResponse]()) } /** @@ -117,7 +117,7 @@ final case class GcsBatchCommandContext[T, U](request: GcsBatchIoCommand[T, U], // Left means the command is complete, so just create the corresponding IoSuccess with the value case Left(responseValue) => Left(success(responseValue)) // Right means there is a subsequent request to be executed, clone this context with the new request and a new promise - case Right(nextCommand) => Right(this.copy(request = nextCommand, promise = Promise[BatchResponse])) + case Right(nextCommand) => Right(this.copy(request = nextCommand, promise = Promise[BatchResponse]())) } promise.trySuccess(promiseResponse) diff --git a/engine/src/main/scala/cromwell/engine/io/nio/NioFlow.scala b/engine/src/main/scala/cromwell/engine/io/nio/NioFlow.scala index 30a179995c7..a67df146abe 100644 --- a/engine/src/main/scala/cromwell/engine/io/nio/NioFlow.scala +++ b/engine/src/main/scala/cromwell/engine/io/nio/NioFlow.scala @@ -183,7 +183,7 @@ class NioFlow(parallelism: Int, private def readLines(exists: IoReadLinesCommand) = IO { exists.file.withReader { reader => - Stream.continually(reader.readLine()).takeWhile(_ != null).toList + LazyList.continually(reader.readLine()).takeWhile(_ != null).toList } } diff --git a/engine/src/main/scala/cromwell/engine/workflow/WorkflowActor.scala b/engine/src/main/scala/cromwell/engine/workflow/WorkflowActor.scala index 64697491b23..ba82bab3e96 100644 --- a/engine/src/main/scala/cromwell/engine/workflow/WorkflowActor.scala +++ b/engine/src/main/scala/cromwell/engine/workflow/WorkflowActor.scala @@ -473,7 +473,7 @@ class WorkflowActor(workflowToStart: WorkflowToStart, case Event(msg @ EngineStatsActor.JobCountQuery, data) => data.currentLifecycleStateActor match { case Some(a) => a forward msg - case None => sender ! EngineStatsActor.NoJobs // This should be impossible, but if somehow here it's technically correct + case None => sender() ! EngineStatsActor.NoJobs // This should be impossible, but if somehow here it's technically correct } stay() case Event(AwaitMetadataIntegrity, data) => diff --git a/engine/src/main/scala/cromwell/engine/workflow/WorkflowDockerLookupActor.scala b/engine/src/main/scala/cromwell/engine/workflow/WorkflowDockerLookupActor.scala index 260cc9fbe46..32889f7f849 100644 --- a/engine/src/main/scala/cromwell/engine/workflow/WorkflowDockerLookupActor.scala +++ b/engine/src/main/scala/cromwell/engine/workflow/WorkflowDockerLookupActor.scala @@ -66,19 +66,19 @@ class WorkflowDockerLookupActor private[workflow](workflowId: WorkflowId, case Event(DockerHashStoreLoadingSuccess(dockerHashEntries), data) => loadCacheAndHandleHashRequests(dockerHashEntries, data) case Event(request: DockerInfoRequest, data) => - stay using data.addHashRequest(request, sender()) + stay() using data.addHashRequest(request, sender()) } // This is the normal operational mode. when(Running) { // This tag has already been looked up and its hash is in the mappings cache. case Event(request: DockerInfoRequest, data) if data.mappings.contains(request.dockerImageID) => - sender ! DockerInfoSuccessResponse(data.mappings(request.dockerImageID), request) + sender() ! DockerInfoSuccessResponse(data.mappings(request.dockerImageID), request) stay() // A request for the hash for this tag has already been made to the hashing actor. Don't request the hash again, // just add this sender to the list of replyTos for when the hash arrives. case Event(request: DockerInfoRequest, data) if data.hashRequests.contains(request.dockerImageID) => - stay using data.addHashRequest(request, sender()) + stay() using data.addHashRequest(request, sender()) // This tag has not (successfully) been looked up before, so look it up now. case Event(request: DockerInfoRequest, data) => requestDockerHash(request, data) @@ -172,7 +172,7 @@ class WorkflowDockerLookupActor private[workflow](workflowId: WorkflowId, sendDockerCommand(request) val replyTo = sender() val updatedData = data.copy(hashRequests = data.hashRequests + (request.dockerImageID -> NonEmptyList.of(RequestAndReplyTo(request, replyTo)))) - stay using updatedData + stay() using updatedData } private def recordMappingAndRespond(response: DockerInfoSuccessResponse, data: WorkflowDockerLookupActorData): State = { @@ -183,7 +183,7 @@ class WorkflowDockerLookupActor private[workflow](workflowId: WorkflowId, case None => fail(new Exception(s"Could not find the actors associated with $request. Available requests are ${data.hashRequests.keys.mkString(", ")}") with NoStackTrace) } val updatedData = data.copy(hashRequests = data.hashRequests - request.dockerImageID, mappings = data.mappings + (request.dockerImageID -> response.dockerInformation)) - stay using updatedData + stay() using updatedData } private def respondToAllRequests(reason: Throwable, @@ -225,7 +225,7 @@ class WorkflowDockerLookupActor private[workflow](workflowId: WorkflowId, case Some(requestAndReplyTos) => requestAndReplyTos foreach { case RequestAndReplyTo(_, replyTo) => replyTo ! failureResponse } val updatedData = data.copy(hashRequests = data.hashRequests - request.dockerImageID) - stay using updatedData + stay() using updatedData case None => log.debug(s"Unable to find requesters for failed lookup of Docker image '${request.dockerImageID}'. " + s"Most likely reason is that requesters have already been cleaned out earlier by the timeout.") diff --git a/engine/src/main/scala/cromwell/engine/workflow/WorkflowManagerActor.scala b/engine/src/main/scala/cromwell/engine/workflow/WorkflowManagerActor.scala index 4f929105bf0..15d0c4321a7 100644 --- a/engine/src/main/scala/cromwell/engine/workflow/WorkflowManagerActor.scala +++ b/engine/src/main/scala/cromwell/engine/workflow/WorkflowManagerActor.scala @@ -211,7 +211,7 @@ class WorkflowManagerActor(params: WorkflowManagerActorParams) data.idFromActor(workflowActor) foreach { workflowId => params.jobStoreActor ! RegisterWorkflowCompleted(workflowId) } - stay using data.without(workflowActor) + stay() using data.without(workflowActor) } val scheduleNextNewWorkflowPollStateFunction: StateFunction = { @@ -257,12 +257,12 @@ class WorkflowManagerActor(params: WorkflowManagerActorParams) case Event(SubWorkflowStart(actorRef), data) => // Watch for this subworkflow to expire to remove it from the Set of running subworkflows. context.watch(actorRef) - stay using data.copy(subWorkflows = data.subWorkflows + actorRef) + stay() using data.copy(subWorkflows = data.subWorkflows + actorRef) case Event(Terminated(actorRef), data) => // This is looking only for subworkflow actor terminations. If for some reason we see a termination for a // different type of actor this should be a noop since the ActorRef element being removed from the set of // subworkflows would not have been in the set in the first place. - stay using data.copy(subWorkflows = data.subWorkflows - actorRef) + stay() using data.copy(subWorkflows = data.subWorkflows - actorRef) // Uninteresting transition and current state notifications. case Event(Transition(_, _, _) | CurrentState(_, _), _) => stay() case Event(JobStoreWriteSuccess(_), _) => stay() // Snoozefest diff --git a/engine/src/main/scala/cromwell/engine/workflow/lifecycle/WorkflowLifecycleActor.scala b/engine/src/main/scala/cromwell/engine/workflow/lifecycle/WorkflowLifecycleActor.scala index c63b5771177..c5657e9a60a 100644 --- a/engine/src/main/scala/cromwell/engine/workflow/lifecycle/WorkflowLifecycleActor.scala +++ b/engine/src/main/scala/cromwell/engine/workflow/lifecycle/WorkflowLifecycleActor.scala @@ -58,7 +58,7 @@ trait AbortableWorkflowLifecycleActor[S <: WorkflowLifecycleActorState] extends goto(abortedState) using newData } else super.checkForDoneAndTransition(newData) } else { - stay using newData + stay() using newData } } } @@ -81,7 +81,7 @@ trait WorkflowLifecycleActor[S <: WorkflowLifecycleActorState] extends LoggingFS whenUnhandled { case unhandledMessage => workflowLogger.warn(s"received an unhandled message: $unhandledMessage") - stay + stay() } onTransition { @@ -102,7 +102,7 @@ trait WorkflowLifecycleActor[S <: WorkflowLifecycleActorState] extends LoggingFS goto(failureState) using newData } } else { - stay using newData + stay() using newData } } diff --git a/engine/src/main/scala/cromwell/engine/workflow/lifecycle/deletion/DeleteWorkflowFilesActor.scala b/engine/src/main/scala/cromwell/engine/workflow/lifecycle/deletion/DeleteWorkflowFilesActor.scala index 05053103262..7a1a4625751 100644 --- a/engine/src/main/scala/cromwell/engine/workflow/lifecycle/deletion/DeleteWorkflowFilesActor.scala +++ b/engine/src/main/scala/cromwell/engine/workflow/lifecycle/deletion/DeleteWorkflowFilesActor.scala @@ -49,7 +49,7 @@ class DeleteWorkflowFilesActor(rootWorkflowId: RootWorkflowId, when(Pending) { case Event(ioReceivable, _) if ioReceive.isDefinedAt(ioReceivable) => ioReceive.apply(ioReceivable) - stay + stay() case Event(StartWorkflowFilesDeletion, NoData) => val intermediateOutputs = gatherIntermediateOutputFiles(workflowAllOutputs, workflowFinalOutputs) if (intermediateOutputs.nonEmpty) { @@ -65,7 +65,7 @@ class DeleteWorkflowFilesActor(rootWorkflowId: RootWorkflowId, when(DeleteIntermediateFiles) { case Event(ioReceivable, _) if ioReceive.isDefinedAt(ioReceivable) => ioReceive.apply(ioReceivable) - stay + stay() case Event(DeleteFiles, DeletingIntermediateFilesData(intermediateFiles)) => // update deletion status in metadata val deletionInProgressEvent = metadataEventForDeletionStatus(InProgress) @@ -90,7 +90,7 @@ class DeleteWorkflowFilesActor(rootWorkflowId: RootWorkflowId, when(WaitingForIoResponses) { case Event(ioReceivable, _) if ioReceive.isDefinedAt(ioReceivable) => ioReceive.apply(ioReceivable) - stay + stay() case Event(IoSuccess(command: IoDeleteCommand, _), data: WaitingForIoResponsesData) => val (newData: WaitingForIoResponsesData, commandState) = data.commandComplete(command) commandState match { @@ -127,7 +127,7 @@ class DeleteWorkflowFilesActor(rootWorkflowId: RootWorkflowId, when(InvalidatingCallCache) { case Event(ioReceivable, _) if ioReceive.isDefinedAt(ioReceivable) => ioReceive.apply(ioReceivable) - stay + stay() case Event(InvalidateCallCache, _) => fetchCallCacheEntries(callCache) onComplete { case Failure(throwable) => self ! FailedRetrieveCallCacheIds(throwable) @@ -154,7 +154,7 @@ class DeleteWorkflowFilesActor(rootWorkflowId: RootWorkflowId, when(WaitingForInvalidateCCResponses) { case Event(ioReceivable, _) if ioReceive.isDefinedAt(ioReceivable) => ioReceive.apply(ioReceivable) - stay + stay() case Event(CallCacheInvalidatedSuccess(cacheId, _), data: WaitingForInvalidateCCResponsesData) => val (newData: WaitingForInvalidateCCResponsesData, invalidateState) = data.commandComplete(cacheId.id) invalidateState match { @@ -173,7 +173,7 @@ class DeleteWorkflowFilesActor(rootWorkflowId: RootWorkflowId, whenUnhandled { case Event(ioReceivable, _) if ioReceive.isDefinedAt(ioReceivable) => ioReceive.apply(ioReceivable) - stay + stay() case Event(ShutdownCommand, _) => stopSelf() case other => log.error(s"Programmer Error: Unexpected message to ${getClass.getSimpleName} ${self.path.name} in state $stateName with $stateData: ${other.toPrettyElidedString(1000)}") diff --git a/engine/src/main/scala/cromwell/engine/workflow/lifecycle/execution/CallMetadataHelper.scala b/engine/src/main/scala/cromwell/engine/workflow/lifecycle/execution/CallMetadataHelper.scala index b2517ac5e50..0a7e3760a67 100644 --- a/engine/src/main/scala/cromwell/engine/workflow/lifecycle/execution/CallMetadataHelper.scala +++ b/engine/src/main/scala/cromwell/engine/workflow/lifecycle/execution/CallMetadataHelper.scala @@ -14,7 +14,7 @@ import wom.values.{WomEvaluatedCallInputs, WomValue} import scala.util.Random trait CallMetadataHelper { - + def workflowIdForCallMetadata: WorkflowId def serviceRegistryActor: ActorRef @@ -92,7 +92,7 @@ trait CallMetadataHelper { case _ => throwableToMetadataEvents(metadataKeyForCall(jobKey, s"${CallMetadataKeys.Failures}"), failure).+:(retryableFailureEvent) } - + serviceRegistryActor ! PutMetadataAction(completionEvents ++ failureEvents) } @@ -108,7 +108,7 @@ trait CallMetadataHelper { val metadataKey = metadataKeyForCall(jobKey, k) MetadataEvent(metadataKey, metadataValue) } - + val sortedEvents = eventList.sortBy(_.offsetDateTime) sortedEvents.headOption foreach { firstEvent => @@ -127,7 +127,7 @@ trait CallMetadataHelper { ) ++ (eventCurrent.grouping map { g => metadataEvent(s"$eventKey:grouping", g) }) } - serviceRegistryActor ! PutMetadataAction(events.toIterable) + serviceRegistryActor ! PutMetadataAction(events.toList) } } @@ -141,8 +141,8 @@ trait CallMetadataHelper { MetadataEvent(metadataKeyForCall(jobKey, CallMetadataKeys.End), MetadataValue(OffsetDateTime.now)) ) ++ returnCodeEvent.getOrElse(List.empty) } - - private def randomNumberString: String = Random.nextInt.toString.stripPrefix("-") + + private def randomNumberString: String = Random.nextInt().toString.stripPrefix("-") def metadataKeyForCall(jobKey: JobKey, myKey: String) = MetadataKey(workflowIdForCallMetadata, Option(MetadataJobKey(jobKey.node.fullyQualifiedName, jobKey.index, jobKey.attempt)), myKey) } diff --git a/engine/src/main/scala/cromwell/engine/workflow/lifecycle/execution/WorkflowExecutionActor.scala b/engine/src/main/scala/cromwell/engine/workflow/lifecycle/execution/WorkflowExecutionActor.scala index 0dc1f953a3d..dd6f0a42835 100644 --- a/engine/src/main/scala/cromwell/engine/workflow/lifecycle/execution/WorkflowExecutionActor.scala +++ b/engine/src/main/scala/cromwell/engine/workflow/lifecycle/execution/WorkflowExecutionActor.scala @@ -369,7 +369,7 @@ case class WorkflowExecutionActor(params: WorkflowExecutionActorParams) def handleWorkflowOutputsFailure(errors: NonEmptyList[String]) = { val exception = new MessageAggregation { override def exceptionContext: String = "Workflow output evaluation failed" - override def errorMessages: Traversable[String] = errors.toList + override def errorMessages: Iterable[String] = errors.toList } context.parent ! WorkflowExecutionFailedResponse(data.jobExecutionMap, exception) goto(WorkflowExecutionFailedState) @@ -547,7 +547,7 @@ case class WorkflowExecutionActor(params: WorkflowExecutionActorParams) .map({ case (node, keys) => val tag = node.fullyQualifiedName - val shardCount = keys.map(_.index).distinct.size + val shardCount = keys.map(_.index).toList.distinct.size if (shardCount == 1) tag else s"$tag ($shardCount shards)" }) diff --git a/engine/src/main/scala/cromwell/engine/workflow/lifecycle/execution/WorkflowExecutionActorData.scala b/engine/src/main/scala/cromwell/engine/workflow/lifecycle/execution/WorkflowExecutionActorData.scala index c317817e373..ce00523a6e9 100644 --- a/engine/src/main/scala/cromwell/engine/workflow/lifecycle/execution/WorkflowExecutionActorData.scala +++ b/engine/src/main/scala/cromwell/engine/workflow/lifecycle/execution/WorkflowExecutionActorData.scala @@ -126,14 +126,14 @@ case class WorkflowExecutionActorData(workflowDescriptor: EngineWorkflowDescript ) } - def mergeExecutionDiffs(diffs: Traversable[WorkflowExecutionDiff]): WorkflowExecutionActorData = { + def mergeExecutionDiffs(diffs: Iterable[WorkflowExecutionDiff]): WorkflowExecutionActorData = { diffs.foldLeft(this)((newData, diff) => newData.mergeExecutionDiff(diff)) } def jobExecutionMap: JobExecutionMap = { downstreamExecutionMap updated (workflowDescriptor.backendDescriptor, executionStore.startedJobs) } - + def executionStoreUpdate: DataStoreUpdate = { val update = executionStore.update DataStoreUpdate(update.runnableKeys, update.statusChanges, this.copy(executionStore = update.updatedStore)) diff --git a/engine/src/main/scala/cromwell/engine/workflow/lifecycle/execution/callcaching/CallCacheDiffActor.scala b/engine/src/main/scala/cromwell/engine/workflow/lifecycle/execution/callcaching/CallCacheDiffActor.scala index 93645a87c0f..22a1612c672 100644 --- a/engine/src/main/scala/cromwell/engine/workflow/lifecycle/execution/callcaching/CallCacheDiffActor.scala +++ b/engine/src/main/scala/cromwell/engine/workflow/lifecycle/execution/callcaching/CallCacheDiffActor.scala @@ -229,7 +229,7 @@ object CallCacheDiffActor { def extractAttemptAndObject(value: JsValue): ErrorOr[(Int, JsObject)] = for { asObject <- value.mapToJsObject attempt <- asObject.fieldAsNumber("attempt") - } yield (attempt.value.intValue(), asObject) + } yield (attempt.value.intValue, asObject) def foldFunction(accumulator: ErrorOr[(Int, JsObject)], nextElement: JsValue): ErrorOr[(Int, JsObject)] = { (accumulator, extractAttemptAndObject(nextElement)) mapN { case ((previousHighestAttempt, previousJsObject), (nextAttempt, nextJsObject)) => diff --git a/engine/src/main/scala/cromwell/engine/workflow/lifecycle/execution/job/EngineJobExecutionActor.scala b/engine/src/main/scala/cromwell/engine/workflow/lifecycle/execution/job/EngineJobExecutionActor.scala index 891a5b6dd26..3bbdb5eb1f2 100644 --- a/engine/src/main/scala/cromwell/engine/workflow/lifecycle/execution/job/EngineJobExecutionActor.scala +++ b/engine/src/main/scala/cromwell/engine/workflow/lifecycle/execution/job/EngineJobExecutionActor.scala @@ -262,7 +262,7 @@ class EngineJobExecutionActor(replyTo: ActorRef, log.info(template, jobTag, data.failedCopyAttempts, callCachingParameters.maxFailedCopyAttempts, data.aggregatedHashString) } else { log.info(s"BT-322 {} cache hit copying nomatch: could not find a suitable cache hit.", jobTag) - workflowLogger.info("Could not copy a suitable cache hit for {}. No copy attempts were made.", jobTag) + workflowLogger.info("Could not copy a suitable cache hit for {}. No copy attempts were made.", arg = jobTag) } runJob(data) @@ -300,7 +300,7 @@ class EngineJobExecutionActor(replyTo: ActorRef, case Event(HashError(t), data: ResponsePendingData) => disableCacheWrite(t) // Can't write hashes for this job, but continue to wait for the lookup response. - stay using data.copy(hashes = Option(Failure(t))) + stay() using data.copy(hashes = Option(Failure(t))) } when(BackendIsCopyingCachedOutputs) { @@ -314,7 +314,7 @@ class EngineJobExecutionActor(replyTo: ActorRef, case Event(response: JobSucceededResponse, data: ResponsePendingData) if effectiveCallCachingMode.writeToCache && data.hashes.isEmpty => logCacheHitSuccessAndNotifyMetadata(data) // Wait for the CallCacheHashes - stay using data.withSuccessResponse(response) + stay() using data.withSuccessResponse(response) case Event(response: JobSucceededResponse, data: ResponsePendingData) => // bad hashes or cache write off logCacheHitSuccessAndNotifyMetadata(data) saveJobCompletionToJobStore(data.withSuccessResponse(response)) @@ -337,7 +337,7 @@ class EngineJobExecutionActor(replyTo: ActorRef, case Event(HashError(t), data: ResponsePendingData) => disableCacheWrite(t) // Can't write hashes for this job, but continue to wait for the copy response. - stay using data.copy(hashes = Option(Failure(t))) + stay() using data.copy(hashes = Option(Failure(t))) } when(InvalidatingCacheEntry) { @@ -352,7 +352,7 @@ class EngineJobExecutionActor(replyTo: ActorRef, case Event(HashError(t), data: ResponsePendingData) => disableCacheWrite(t) // Can't write hashes for this job, but continue to wait for the copy response. - stay using data.copy(hashes = Option(Failure(t))) + stay() using data.copy(hashes = Option(Failure(t))) } // Handles JobSucceededResponse messages @@ -370,7 +370,7 @@ class EngineJobExecutionActor(replyTo: ActorRef, // Hashes are still missing and we want them (writeToCache is true) - wait for them case Event(response: JobSucceededResponse, data: ResponsePendingData) if effectiveCallCachingMode.writeToCache && data.hashes.isEmpty => eventList ++= response.executionEvents - stay using data.withSuccessResponse(response) + stay() using data.withSuccessResponse(response) // Hashes are missing but writeToCache is OFF - complete the job case Event(response: JobSucceededResponse, data: ResponsePendingData) => eventList ++= response.executionEvents @@ -387,7 +387,7 @@ class EngineJobExecutionActor(replyTo: ActorRef, saveJobCompletionToJobStore(data.withFailedResponse(response)) // Hashes are still missing and we want them (writeToCache is true) - wait for them case Event(response: BackendJobFailedResponse, data: ResponsePendingData) if effectiveCallCachingMode.writeToCache && data.hashes.isEmpty => - stay using data.withFailedResponse(response) + stay() using data.withFailedResponse(response) // Hashes are missing but writeToCache is OFF - complete the job case Event(response: BackendJobFailedResponse, data: ResponsePendingData) => saveJobCompletionToJobStore(data.withFailedResponse(response)) @@ -445,7 +445,7 @@ class EngineJobExecutionActor(replyTo: ActorRef, // We're getting hash errors and the job is still running, disable call caching and stay here to wait for the job to finish case Event(HashError(t), data: ResponsePendingData) => disableCallCaching(Option(t)) - stay using data.copy(hashes = Option(Failure(t))) + stay() using data.copy(hashes = Option(Failure(t))) } when(RunningJob)(jobSuccessHandler.orElse(jobFailedHandler).orElse(jobAbortedHandler).orElse(hashSuccessResponseHandler).orElse(hashFailureResponseHandler)) @@ -479,7 +479,7 @@ class EngineJobExecutionActor(replyTo: ActorRef, whenUnhandled { case Event(EngineStatsActor.JobCountQuery, _) => - sender ! EngineStatsActor.JobCount(1) + sender() ! EngineStatsActor.JobCount(1) stay() case Event(e: ActorInitializationException, _) => respondAndStop(JobFailedNonRetryableResponse(jobDescriptorKey, e, None)) @@ -502,8 +502,8 @@ class EngineJobExecutionActor(replyTo: ActorRef, // due to timeouts). That's ok, we just ignore this message in any other situation: stay() case Event(msg, _) => - log.error("Bad message from {} to EngineJobExecutionActor in state {}(with data {}): {}", sender, stateName, stateData, msg) - stay + log.error("Bad message from {} to EngineJobExecutionActor in state {}(with data {}): {}", sender(), stateName, stateData, msg) + stay() } private def publishHashesToMetadata(maybeHashes: Option[Try[CallCacheHashes]]) = publishHashResultsToMetadata(maybeHashes.map(_.map(_.hashes))) @@ -731,7 +731,7 @@ class EngineJobExecutionActor(replyTo: ActorRef, data.ejha match { case Some(ejha) if data.failedCopyAttempts < callCachingParameters.maxFailedCopyAttempts => - workflowLogger.debug("Trying to use another cache hit for job: {}", jobDescriptorKey) + workflowLogger.debug("Trying to use another cache hit for job: {}", argument = jobDescriptorKey) ejha ! NextHit goto(CheckingCallCache) using data case Some(_) => @@ -907,7 +907,7 @@ class EngineJobExecutionActor(replyTo: ActorRef, case responseData: ResponseData => responseData.withHashes(Option(Success(hashes))) case _ => data } - stay using updatedData + stay() using updatedData } } diff --git a/engine/src/main/scala/cromwell/engine/workflow/lifecycle/execution/job/preparation/JobPreparationActor.scala b/engine/src/main/scala/cromwell/engine/workflow/lifecycle/execution/job/preparation/JobPreparationActor.scala index 190389eabfb..8540717a073 100644 --- a/engine/src/main/scala/cromwell/engine/workflow/lifecycle/execution/job/preparation/JobPreparationActor.scala +++ b/engine/src/main/scala/cromwell/engine/workflow/lifecycle/execution/job/preparation/JobPreparationActor.scala @@ -63,7 +63,7 @@ class JobPreparationActor(workflowDescriptor: EngineWorkflowDescriptor, private[preparation] lazy val expressionLanguageFunctions = { val ioFunctionSet: IoFunctionSet = factory.expressionLanguageFunctions(workflowDescriptor.backendDescriptor, jobKey, initializationData, ioActor, ioEc) - ioFunctionSet.makeInputSpecificFunctions + ioFunctionSet.makeInputSpecificFunctions() } private[preparation] lazy val dockerHashCredentials = factory.dockerHashCredentials(workflowDescriptor.backendDescriptor, initializationData) @@ -78,7 +78,7 @@ class JobPreparationActor(workflowDescriptor: EngineWorkflowDescriptor, case Valid((inputs, attributes)) => fetchDockerHashesIfNecessary(inputs, attributes) case Invalid(failure) => sendFailureAndStop(new MessageAggregation with NoStackTrace { override def exceptionContext: String = s"Call input and runtime attributes evaluation failed for ${jobKey.call.localName}" - override def errorMessages: Traversable[String] = failure.toList + override def errorMessages: Iterable[String] = failure.toList }) } } @@ -96,7 +96,7 @@ class JobPreparationActor(workflowDescriptor: EngineWorkflowDescriptor, when(FetchingKeyValueStoreEntries) { case Event(kvResponse: KvResponse, data @ JobPreparationKeyLookupData(keyLookups, maybeCallCachingEligible, dockerSize, inputs, attributes)) => keyLookups.withResponse(kvResponse.key, kvResponse) match { - case newPartialLookup: PartialKeyValueLookups => stay using data.copy(keyLookups = newPartialLookup) + case newPartialLookup: PartialKeyValueLookups => stay() using data.copy(keyLookups = newPartialLookup) case finished: KeyValueLookupResults => sendResponseAndStop(prepareBackendDescriptor(inputs, attributes, maybeCallCachingEligible, finished.unscoped, dockerSize)) } @@ -152,6 +152,8 @@ class JobPreparationActor(workflowDescriptor: EngineWorkflowDescriptor, sendDockerRequest(dockerImageId) case Failure(failure) => sendFailureAndStop(failure) + + case oh => throw new Exception(s"Programmer Error! Unexpected case match: $oh") } attributes.get(RuntimeAttributesKeys.DockerKey) match { diff --git a/engine/src/main/scala/cromwell/engine/workflow/lifecycle/execution/job/preparation/SubWorkflowPreparationActor.scala b/engine/src/main/scala/cromwell/engine/workflow/lifecycle/execution/job/preparation/SubWorkflowPreparationActor.scala index 1e38a05b0d4..f14c00965d7 100644 --- a/engine/src/main/scala/cromwell/engine/workflow/lifecycle/execution/job/preparation/SubWorkflowPreparationActor.scala +++ b/engine/src/main/scala/cromwell/engine/workflow/lifecycle/execution/job/preparation/SubWorkflowPreparationActor.scala @@ -73,7 +73,7 @@ class SubWorkflowPreparationActor(workflowDescriptor: EngineWorkflowDescriptor, case Valid(response) => context.parent ! response case Invalid(f) => context.parent ! CallPreparationFailed(callKey, new MessageAggregation { override def exceptionContext: String = "Failed to evaluate inputs for sub workflow" - override def errorMessages: Traversable[String] = f.toList + override def errorMessages: Iterable[String] = f.toList }) } context stop self diff --git a/engine/src/main/scala/cromwell/engine/workflow/lifecycle/execution/stores/ExecutionStore.scala b/engine/src/main/scala/cromwell/engine/workflow/lifecycle/execution/stores/ExecutionStore.scala index daa3ff0e92a..32d9aa2ca4c 100644 --- a/engine/src/main/scala/cromwell/engine/workflow/lifecycle/execution/stores/ExecutionStore.scala +++ b/engine/src/main/scala/cromwell/engine/workflow/lifecycle/execution/stores/ExecutionStore.scala @@ -230,7 +230,7 @@ sealed abstract class ExecutionStore private[stores](statusStore: Map[JobKey, Ex * which is equivalent to non of them being in a non-terminal status and faster to verify */ def isDone: Boolean = { - NonTerminalStatuses.map(keysWithStatus).forall(_.isEmpty) + NonTerminalStatuses.toList.map(keysWithStatus).forall(_.isEmpty) } def isStalled: Boolean = { @@ -284,7 +284,7 @@ sealed abstract class ExecutionStore private[stores](statusStore: Map[JobKey, Ex } // Filter for unstarted keys: - val readyToStart = keysWithStatus(NotStarted).toStream.filter(filterFunction) + val readyToStart = keysWithStatus(NotStarted).to(LazyList).filter(filterFunction) // Compute the first ExecutionStore.MaxJobsToStartPerTick + 1 runnable keys val keysToStartPlusOne = readyToStart.take(MaxJobsToStartPerTick + 1).toList diff --git a/engine/src/main/scala/cromwell/engine/workflow/lifecycle/finalization/CopyWorkflowOutputsActor.scala b/engine/src/main/scala/cromwell/engine/workflow/lifecycle/finalization/CopyWorkflowOutputsActor.scala index 0fdeafc06a1..b6fa6b285a6 100644 --- a/engine/src/main/scala/cromwell/engine/workflow/lifecycle/finalization/CopyWorkflowOutputsActor.scala +++ b/engine/src/main/scala/cromwell/engine/workflow/lifecycle/finalization/CopyWorkflowOutputsActor.scala @@ -39,7 +39,7 @@ class CopyWorkflowOutputsActor(workflowId: WorkflowId, override val ioActor: Act private def performActionThenRespond(operation: => Future[BackendWorkflowLifecycleActorResponse], onFailure: (Throwable) => BackendWorkflowLifecycleActorResponse) (implicit ec: ExecutionContext) = { - val respondTo: ActorRef = sender + val respondTo: ActorRef = sender() operation onComplete { case Success(r) => respondTo ! r case Failure(t) => respondTo ! onFailure(t) @@ -70,7 +70,7 @@ class CopyWorkflowOutputsActor(workflowId: WorkflowId, override val ioActor: Act val copies = outputFilePaths map { case (srcPath, dstPath) => asyncIo.copyAsync(srcPath, dstPath) } - + Future.sequence(copies) } @@ -81,7 +81,7 @@ class CopyWorkflowOutputsActor(workflowId: WorkflowId, override val ioActor: Act } } } - + private def getOutputFilePaths(workflowOutputsPath: Path): List[(Path, Path)] = { val useRelativeOutputPaths: Boolean = workflowDescriptor.getWorkflowOption(UseRelativeOutputPaths).contains("true") @@ -102,7 +102,7 @@ class CopyWorkflowOutputsActor(workflowId: WorkflowId, override val ioActor: Act lazy val truncateRegex = ".*/call-[^/]*/(shard-[0-9]+/)?(cacheCopy/)?(attempt-[0-9]+/)?(execution/)?".r val outputFileDestinations = rootAndFiles flatMap { case (workflowRoot, outputs) => - outputs map { output => + outputs map { output => val outputPath = PathFactory.buildPath(output, pathBuilders) outputPath -> { if (useRelativeOutputPaths) { diff --git a/engine/src/main/scala/cromwell/engine/workflow/lifecycle/finalization/WorkflowFinalizationActor.scala b/engine/src/main/scala/cromwell/engine/workflow/lifecycle/finalization/WorkflowFinalizationActor.scala index 735adb72d6c..8731367cfb7 100644 --- a/engine/src/main/scala/cromwell/engine/workflow/lifecycle/finalization/WorkflowFinalizationActor.scala +++ b/engine/src/main/scala/cromwell/engine/workflow/lifecycle/finalization/WorkflowFinalizationActor.scala @@ -80,7 +80,7 @@ case class WorkflowFinalizationActor(workflowDescriptor: EngineWorkflowDescripto // If an engine or backend finalization actor (children of this actor) dies, send ourselves the failure and stop the child actor override def supervisorStrategy = OneForOneStrategy() { - case failure => + case failure => self.tell(FinalizationFailed(failure), sender()) Stop } @@ -108,10 +108,10 @@ case class WorkflowFinalizationActor(workflowDescriptor: EngineWorkflowDescripto allActors match { case Failure(ex) => - sender ! WorkflowFinalizationFailedResponse(Seq(ex)) + sender() ! WorkflowFinalizationFailedResponse(Seq(ex)) goto(WorkflowFinalizationFailedState) case Success(actors) if actors.isEmpty => - sender ! WorkflowFinalizationSucceededResponse + sender() ! WorkflowFinalizationSucceededResponse goto(FinalizationSucceededState) case Success(actors) => val actorSet = actors.toSet @@ -121,7 +121,7 @@ case class WorkflowFinalizationActor(workflowDescriptor: EngineWorkflowDescripto goto(WorkflowFinalizationFailedState) } } - + // Only send to each backend the jobs that it executed private def filterJobExecutionsForBackend(calls: Set[CommandCallNode]): JobExecutionMap = { jobExecutionMap map { @@ -132,8 +132,8 @@ case class WorkflowFinalizationActor(workflowDescriptor: EngineWorkflowDescripto } when(FinalizationInProgressState) { - case Event(FinalizationSuccess, stateData) => checkForDoneAndTransition(stateData.withSuccess(sender)) - case Event(FinalizationFailed(reason), stateData) => checkForDoneAndTransition(stateData.withFailure(sender, reason)) + case Event(FinalizationSuccess, stateData) => checkForDoneAndTransition(stateData.withSuccess(sender())) + case Event(FinalizationFailed(reason), stateData) => checkForDoneAndTransition(stateData.withFailure(sender(), reason)) } when(FinalizationSucceededState) { FSM.NullFunction } diff --git a/engine/src/main/scala/cromwell/engine/workflow/lifecycle/initialization/WorkflowInitializationActor.scala b/engine/src/main/scala/cromwell/engine/workflow/lifecycle/initialization/WorkflowInitializationActor.scala index 317151600ed..2c0e8cb5fd3 100644 --- a/engine/src/main/scala/cromwell/engine/workflow/lifecycle/initialization/WorkflowInitializationActor.scala +++ b/engine/src/main/scala/cromwell/engine/workflow/lifecycle/initialization/WorkflowInitializationActor.scala @@ -89,7 +89,7 @@ case class WorkflowInitializationActor(workflowIdForLogging: PossiblyNotRootWork override def failureResponse(reasons: Seq[Throwable]) = WorkflowInitializationFailedResponse(reasons) override val abortedResponse = WorkflowInitializationAbortedResponse - private var backendActorsAndBackends: Traversable[BackendActorAndBackend] = _ + private var backendActorsAndBackends: Iterable[BackendActorAndBackend] = _ when(InitializationPendingState) { case Event(StartInitializationCommand, _) => @@ -105,11 +105,11 @@ case class WorkflowInitializationActor(workflowIdForLogging: PossiblyNotRootWork backendInitializationActors match { case Failure(ex) => - sender ! WorkflowInitializationFailedResponse(Seq(ex)) + sender() ! WorkflowInitializationFailedResponse(Seq(ex)) goto(InitializationFailedState) case Success(actors) if actors.isEmpty => backendActorsAndBackends = List.empty - sender ! WorkflowInitializationSucceededResponse(AllBackendInitializationData.empty) + sender() ! WorkflowInitializationSucceededResponse(AllBackendInitializationData.empty) goto(InitializationSucceededState) case Success(actors) => backendActorsAndBackends = actors @@ -124,17 +124,17 @@ case class WorkflowInitializationActor(workflowIdForLogging: PossiblyNotRootWork } when(InitializationInProgressState) { - case Event(InitializationSuccess(initData), stateData) => checkForDoneAndTransition(stateData.withSuccess(sender, initData)) - case Event(InitializationFailed(reason), stateData) => checkForDoneAndTransition(stateData.withFailure(sender, reason)) + case Event(InitializationSuccess(initData), stateData) => checkForDoneAndTransition(stateData.withSuccess(sender(), initData)) + case Event(InitializationFailed(reason), stateData) => checkForDoneAndTransition(stateData.withFailure(sender(), reason)) case Event(EngineLifecycleActorAbortCommand, stateData) => stateData.actors foreach { _ ! BackendWorkflowInitializationActor.Abort } goto(InitializationAbortingState) } when(InitializationAbortingState) { - case Event(InitializationSuccess(initData), stateData) => checkForDoneAndTransition(stateData.withSuccess(sender, initData)) - case Event(InitializationFailed(reason), stateData) => checkForDoneAndTransition(stateData.withFailure(sender, reason)) - case Event(BackendActorAbortedResponse, stateData) => checkForDoneAndTransition(stateData.withAborted(sender)) + case Event(InitializationSuccess(initData), stateData) => checkForDoneAndTransition(stateData.withSuccess(sender(), initData)) + case Event(InitializationFailed(reason), stateData) => checkForDoneAndTransition(stateData.withFailure(sender(), reason)) + case Event(BackendActorAbortedResponse, stateData) => checkForDoneAndTransition(stateData.withAborted(sender())) } when(InitializationSucceededState) { FSM.NullFunction } diff --git a/engine/src/main/scala/cromwell/engine/workflow/lifecycle/materialization/MaterializeWorkflowDescriptorActor.scala b/engine/src/main/scala/cromwell/engine/workflow/lifecycle/materialization/MaterializeWorkflowDescriptorActor.scala index 030ae88b559..70365819fe5 100644 --- a/engine/src/main/scala/cromwell/engine/workflow/lifecycle/materialization/MaterializeWorkflowDescriptorActor.scala +++ b/engine/src/main/scala/cromwell/engine/workflow/lifecycle/materialization/MaterializeWorkflowDescriptorActor.scala @@ -227,7 +227,7 @@ class MaterializeWorkflowDescriptorActor(serviceRegistryActor: ActorRef, workflowInitializationFailed(error, sender()) goto(MaterializationFailedState) case Event(Status.Failure(failure), _) => - workflowInitializationFailed(NonEmptyList.of(failure.getMessage, failure.getStackTrace.map(_.toString):_*), sender()) + workflowInitializationFailed(NonEmptyList.of(failure.getMessage, failure.getStackTrace.toList.map(_.toString):_*), sender()) goto(MaterializationFailedState) } @@ -249,7 +249,7 @@ class MaterializeWorkflowDescriptorActor(serviceRegistryActor: ActorRef, goto(MaterializationAbortedState) case unhandledMessage => workflowLogger.warn(s"received an unhandled message $unhandledMessage in state $stateName") - stay + stay() } private def workflowInitializationFailed(errors: NonEmptyList[String], replyTo: ActorRef) = { @@ -283,7 +283,7 @@ class MaterializeWorkflowDescriptorActor(serviceRegistryActor: ActorRef, workflowLogger.info(s"Parsing workflow as ${validFactory.languageName} ${validFactory.languageVersionName}") pushLanguageToMetadata(validFactory.languageName, validFactory.languageVersionName) } - + factory } diff --git a/engine/src/main/scala/cromwell/engine/workflow/tokens/JobTokenDispenserActor.scala b/engine/src/main/scala/cromwell/engine/workflow/tokens/JobTokenDispenserActor.scala index 05bb9c8c94a..e97ae9974f4 100644 --- a/engine/src/main/scala/cromwell/engine/workflow/tokens/JobTokenDispenserActor.scala +++ b/engine/src/main/scala/cromwell/engine/workflow/tokens/JobTokenDispenserActor.scala @@ -93,14 +93,14 @@ class JobTokenDispenserActor(override val serviceRegistryActor: ActorRef, override def receive: Actor.Receive = tokenDispensingReceive.orElse(rateReceive).orElse(instrumentationReceive(instrumentationAction)) private def tokenDispensingReceive: Receive = { - case JobTokenRequest(hogGroup, tokenType) => enqueue(sender, hogGroup.value, tokenType) - case JobTokenReturn => release(sender) + case JobTokenRequest(hogGroup, tokenType) => enqueue(sender(), hogGroup.value, tokenType) + case JobTokenReturn => release(sender()) case TokensAvailable(n) => emitHeartbeatMetrics() dispense(n) case Terminated(terminee) => onTerminate(terminee) case LogJobTokenAllocation(nextInterval) => logTokenAllocation(nextInterval) - case FetchLimitedGroups => sender ! tokenExhaustedGroups + case FetchLimitedGroups => sender() ! tokenExhaustedGroups case ShutdownCommand => context stop self } diff --git a/engine/src/main/scala/cromwell/engine/workflow/tokens/RoundRobinQueueIterator.scala b/engine/src/main/scala/cromwell/engine/workflow/tokens/RoundRobinQueueIterator.scala index aaf681affb8..36e555be98b 100644 --- a/engine/src/main/scala/cromwell/engine/workflow/tokens/RoundRobinQueueIterator.scala +++ b/engine/src/main/scala/cromwell/engine/workflow/tokens/RoundRobinQueueIterator.scala @@ -42,7 +42,7 @@ final class RoundRobinQueueIterator(initialTokenQueue: List[TokenQueue], initial // and don't keep emptying the same queue as long as it has elements // For instance, if we have 5 queues and pointer is 2, we want to try indices (2, 3, 4, 0, 1) - val indexStream = ((pointer until numberOfQueues) ++ (0 until pointer)).toStream + val indexStream = ((pointer until numberOfQueues) ++ (0 until pointer)).to(LazyList) val dequeuedTokenStream = indexStream.map(index => tokenQueues(index).dequeue -> index) val firstLeasedActor = dequeuedTokenStream.collectFirst({ diff --git a/engine/src/main/scala/cromwell/engine/workflow/tokens/TokenEventLogger.scala b/engine/src/main/scala/cromwell/engine/workflow/tokens/TokenEventLogger.scala index 27d1a10a421..7ee35f86cd5 100644 --- a/engine/src/main/scala/cromwell/engine/workflow/tokens/TokenEventLogger.scala +++ b/engine/src/main/scala/cromwell/engine/workflow/tokens/TokenEventLogger.scala @@ -32,7 +32,7 @@ class CachingTokenEventLogger(cacheEntryTTL: FiniteDuration) extends TokenEventL } override def tokenExhaustedGroups: Set[String] = { - import scala.collection.JavaConverters._ + import scala.jdk.CollectionConverters._ groupCache.asMap().keySet().asScala.toSet } @@ -47,7 +47,7 @@ class CachingTokenEventLogger(cacheEntryTTL: FiniteDuration) extends TokenEventL } override def tokenExhaustedBackends: Set[String] = { - import scala.collection.JavaConverters._ + import scala.jdk.CollectionConverters._ backendCache.asMap().keySet().asScala.toSet } diff --git a/engine/src/main/scala/cromwell/engine/workflow/tokens/UnhoggableTokenPool.scala b/engine/src/main/scala/cromwell/engine/workflow/tokens/UnhoggableTokenPool.scala index 5216c857fd0..761748abb49 100644 --- a/engine/src/main/scala/cromwell/engine/workflow/tokens/UnhoggableTokenPool.scala +++ b/engine/src/main/scala/cromwell/engine/workflow/tokens/UnhoggableTokenPool.scala @@ -33,10 +33,10 @@ final class UnhoggableTokenPool(val tokenType: JobTokenType) extends SimplePool[ def available(hogGroup: String): UnhoggableTokenPoolAvailability = { hogLimitOption match { - case None if leased < capacity => TokensAvailable + case None if leased() < capacity => TokensAvailable case None => TokenTypeExhausted case Some(hogLimit) => - if (leased < capacity) { + if (leased() < capacity) { synchronized { if (hogGroupAssignments.get(hogGroup).forall(_.size < hogLimit)) { TokensAvailable @@ -59,12 +59,12 @@ final class UnhoggableTokenPool(val tokenType: JobTokenType) extends SimplePool[ super.tryAcquire() match { case Some(lease) => val hoggingLease = new TokenHoggingLease(lease, hogGroup, this) - hogGroupAssignments += hogGroup -> (thisHogSet + hoggingLease.get) + hogGroupAssignments += hogGroup -> (thisHogSet + hoggingLease.get()) hoggingLease case None => TokenTypeExhausted } } else { - if (leased == capacity) TokenTypeExhausted else HogLimitExceeded + if (leased() == capacity) TokenTypeExhausted else HogLimitExceeded } } case None => @@ -80,7 +80,7 @@ final class UnhoggableTokenPool(val tokenType: JobTokenType) extends SimplePool[ def unhog(hogGroup: String, lease: Lease[JobToken]): Unit = { hogLimitOption foreach { _ => synchronized { - val newAssignment = hogGroupAssignments.getOrElse(hogGroup, HashSet.empty) - lease.get + val newAssignment = hogGroupAssignments.getOrElse(hogGroup, HashSet.empty) - lease.get() if (newAssignment.isEmpty) { hogGroupAssignments -= hogGroup @@ -103,7 +103,7 @@ final class UnhoggableTokenPool(val tokenType: JobTokenType) extends SimplePool[ case None => (None, None) } - TokenPoolState(hogGroupUsages, hogLimitValue, capacity, leased, leased < capacity) + TokenPoolState(hogGroupUsages, hogLimitValue, capacity, leased(), leased() < capacity) } } @@ -115,7 +115,7 @@ object UnhoggableTokenPool { final class TokenHoggingLease(lease: Lease[JobToken], hogGroup: String, pool: UnhoggableTokenPool) extends Lease[JobToken] with UnhoggableTokenPoolResult { private[this] val dirty = new AtomicBoolean(false) - override protected[this] def a: JobToken = lease.get + override protected[this] def a: JobToken = lease.get() override protected[this] def handleRelease(): Unit = { if (dirty.compareAndSet(false, true)) { diff --git a/engine/src/main/scala/cromwell/engine/workflow/workflowstore/AbortRequestScanningActor.scala b/engine/src/main/scala/cromwell/engine/workflow/workflowstore/AbortRequestScanningActor.scala index e208dcd4734..1e565886941 100644 --- a/engine/src/main/scala/cromwell/engine/workflow/workflowstore/AbortRequestScanningActor.scala +++ b/engine/src/main/scala/cromwell/engine/workflow/workflowstore/AbortRequestScanningActor.scala @@ -7,7 +7,7 @@ import cromwell.engine.workflow.WorkflowManagerActor import cromwell.engine.workflow.workflowstore.AbortRequestScanningActor.{AbortConfig, RunScan} import cromwell.engine.workflow.workflowstore.WorkflowStoreActor.{FindWorkflowsWithAbortRequested, FindWorkflowsWithAbortRequestedFailure, FindWorkflowsWithAbortRequestedSuccess} -import scala.collection.JavaConverters._ +import scala.jdk.CollectionConverters._ import scala.concurrent.duration._ diff --git a/engine/src/main/scala/cromwell/engine/workflow/workflowstore/WorkflowStoreActor.scala b/engine/src/main/scala/cromwell/engine/workflow/workflowstore/WorkflowStoreActor.scala index 31a07bca87f..4bb31db4a6d 100644 --- a/engine/src/main/scala/cromwell/engine/workflow/workflowstore/WorkflowStoreActor.scala +++ b/engine/src/main/scala/cromwell/engine/workflow/workflowstore/WorkflowStoreActor.scala @@ -54,7 +54,7 @@ final case class WorkflowStoreActor private( case GetWorkflowStoreStats => // Retrieve the workflow store stats, convert the WorkflowStoreStates to WorkflowStates val stats = workflowStore.stats.map(m => m.map(e => WorkflowState.withName(e._1.toString) -> e._2)) - stats pipeTo sender + stats pipeTo sender() () } } diff --git a/engine/src/main/scala/cromwell/engine/workflow/workflowstore/WorkflowStoreEngineActor.scala b/engine/src/main/scala/cromwell/engine/workflow/workflowstore/WorkflowStoreEngineActor.scala index 5f066feeb76..44ecc09f1c8 100644 --- a/engine/src/main/scala/cromwell/engine/workflow/workflowstore/WorkflowStoreEngineActor.scala +++ b/engine/src/main/scala/cromwell/engine/workflow/workflowstore/WorkflowStoreEngineActor.scala @@ -58,9 +58,9 @@ final case class WorkflowStoreEngineActor private(store: WorkflowStore, log.debug("Workflow store initialization successful") } addWorkCompletionHooks(InitializerCommand, work) - goto(Working) using stateData.withCurrentCommand(InitializerCommand, sender) + goto(Working) using stateData.withCurrentCommand(InitializerCommand, sender()) case Event(x: WorkflowStoreActorEngineCommand, _) => - stay using stateData.withPendingCommand(x, sender) + stay() using stateData.withPendingCommand(x, sender()) } when(Idle) { @@ -68,7 +68,7 @@ final case class WorkflowStoreEngineActor private(store: WorkflowStore, if (stateData.currentOperation.nonEmpty || stateData.pendingOperations.nonEmpty) { log.error("Non-empty WorkflowStoreActorData when in Idle state: {}", stateData) } - startNewWork(cmd, sender, stateData.withCurrentCommand(cmd, sender)) + startNewWork(cmd, sender(), stateData.withCurrentCommand(cmd, sender())) } when(Working) { @@ -78,7 +78,7 @@ final case class WorkflowStoreEngineActor private(store: WorkflowStore, case None => goto(Idle) using newData case Some(WorkflowStoreActorCommandWithSender(cmd, sndr)) => startNewWork(cmd, sndr, newData) } - case Event(cmd: WorkflowStoreActorEngineCommand, data) => stay using data.withPendingCommand(cmd, sender) + case Event(cmd: WorkflowStoreActorEngineCommand, data) => stay() using data.withPendingCommand(cmd, sender()) } whenUnhandled { @@ -90,7 +90,7 @@ final case class WorkflowStoreEngineActor private(store: WorkflowStore, stay() case Event(msg, _) => log.warning("Unexpected message to WorkflowStoreActor in state {} with data {}: {}", stateName, stateData, msg) - stay + stay() } onTransition { diff --git a/engine/src/main/scala/cromwell/jobstore/EmptyJobStoreActor.scala b/engine/src/main/scala/cromwell/jobstore/EmptyJobStoreActor.scala index 7a07b5da59f..f047e6d38e9 100644 --- a/engine/src/main/scala/cromwell/jobstore/EmptyJobStoreActor.scala +++ b/engine/src/main/scala/cromwell/jobstore/EmptyJobStoreActor.scala @@ -7,8 +7,8 @@ import cromwell.util.GracefulShutdownHelper.ShutdownCommand class EmptyJobStoreActor extends Actor { override def receive: Receive = { - case w: JobStoreWriterCommand => sender ! JobStoreWriteSuccess(w) - case _: QueryJobCompletion => sender ! JobNotComplete + case w: JobStoreWriterCommand => sender() ! JobStoreWriteSuccess(w) + case _: QueryJobCompletion => sender() ! JobNotComplete case ShutdownCommand => context stop self } } diff --git a/engine/src/main/scala/cromwell/server/CromwellRootActor.scala b/engine/src/main/scala/cromwell/server/CromwellRootActor.scala index 49a99b20cfd..df4fc5b1207 100644 --- a/engine/src/main/scala/cromwell/server/CromwellRootActor.scala +++ b/engine/src/main/scala/cromwell/server/CromwellRootActor.scala @@ -61,7 +61,9 @@ abstract class CromwellRootActor(terminator: CromwellTerminator, import CromwellRootActor._ // Make sure the filesystems are initialized at startup - val _ = CromwellFileSystems.instance + locally { + val _ = CromwellFileSystems.instance + } private val logger = Logging(context.system, this) diff --git a/engine/src/main/scala/cromwell/webservice/EngineStatsActor.scala b/engine/src/main/scala/cromwell/webservice/EngineStatsActor.scala index 7a07357ff9d..ce123db4d20 100644 --- a/engine/src/main/scala/cromwell/webservice/EngineStatsActor.scala +++ b/engine/src/main/scala/cromwell/webservice/EngineStatsActor.scala @@ -19,8 +19,8 @@ final case class EngineStatsActor(workflowActors: List[ActorRef], replyTo: Actor private var jobCounts = Map.empty[ActorRef, Int] /* - * FIXME - * Because of sub workflows there is currently no reliable way to know if we received responses from all running WEAs. + * FIXME + * Because of sub workflows there is currently no reliable way to know if we received responses from all running WEAs. * For now, we always wait for the timeout duration before responding to give a chance to all WEAs to respond (even nested ones). * This could be improved by having WEAs wait for their sub WEAs before sending back the response. */ @@ -31,7 +31,7 @@ final case class EngineStatsActor(workflowActors: List[ActorRef], replyTo: Actor override def receive = { case JobCount(count) => - jobCounts += (sender -> count) + jobCounts += (sender() -> count) case ShutItDown => reportStats() case wompWomp => log.error("Unexpected message to EngineStatsActor: {}", wompWomp) diff --git a/engine/src/main/scala/cromwell/webservice/WebServiceUtils.scala b/engine/src/main/scala/cromwell/webservice/WebServiceUtils.scala index 15607237fb0..7483b300f2b 100644 --- a/engine/src/main/scala/cromwell/webservice/WebServiceUtils.scala +++ b/engine/src/main/scala/cromwell/webservice/WebServiceUtils.scala @@ -32,6 +32,15 @@ trait WebServiceUtils { complete((statusCode, warningHeaders(warnings), value)) } + // 2.13 added this, not sure why the baseline version was compiling actually. + /** + * Completes a response of a List of Product (probably a case class), using an implicit marshaller, probably a json encoder. + */ + def completeResponse[A <: Product](statusCode: StatusCode, values: List[A], warnings: Seq[String]) + (implicit mt: ToEntityMarshaller[List[A]]): Route = { + complete((statusCode, warningHeaders(warnings), values)) + } + /** * Completes a response of string with the supplied content type. * diff --git a/engine/src/main/scala/cromwell/webservice/routes/MetadataRouteSupport.scala b/engine/src/main/scala/cromwell/webservice/routes/MetadataRouteSupport.scala index 2ccde72f88f..237e21a2edd 100644 --- a/engine/src/main/scala/cromwell/webservice/routes/MetadataRouteSupport.scala +++ b/engine/src/main/scala/cromwell/webservice/routes/MetadataRouteSupport.scala @@ -72,7 +72,7 @@ trait MetadataRouteSupport extends HttpInstrumentation { encodeResponse { path("workflows" / Segment / Segment / "metadata") { (_, possibleWorkflowId) => instrumentRequest { - parameters(('includeKey.*, 'excludeKey.*, 'expandSubWorkflows.as[Boolean].?)) { (includeKeys, excludeKeys, expandSubWorkflowsOption) => + parameters((Symbol("includeKey").*, Symbol("excludeKey").*, Symbol("expandSubWorkflows").as[Boolean].?)) { (includeKeys, excludeKeys, expandSubWorkflowsOption) => val includeKeysOption = NonEmptyList.fromList(includeKeys.toList) val excludeKeysOption = NonEmptyList.fromList(excludeKeys.toList) val expandSubWorkflows = expandSubWorkflowsOption.getOrElse(false) diff --git a/engine/src/test/scala/cromwell/MetadataWatchActor.scala b/engine/src/test/scala/cromwell/MetadataWatchActor.scala index 3c8da786b32..2c43ff74463 100644 --- a/engine/src/test/scala/cromwell/MetadataWatchActor.scala +++ b/engine/src/test/scala/cromwell/MetadataWatchActor.scala @@ -32,8 +32,8 @@ final case class MetadataWatchActor(promise: Promise[Unit], matchers: Matcher*) if (unsatisfiedMatchers.nonEmpty) tryMatchingEvents(events) replyTo ! MetadataWriteSuccess(events) // Because the MetadataWatchActor is sometimes used in place of the ServiceRegistryActor, this allows WFs to continue: - case kvGet: KvGet => sender ! KvKeyLookupFailed(kvGet) - case kvPut: KvPut => sender ! KvPutSuccess(kvPut) + case kvGet: KvGet => sender() ! KvKeyLookupFailed(kvGet) + case kvPut: KvPut => sender() ! KvPutSuccess(kvPut) case _: InstrumentationServiceMessage => // No-op, just ignore case other => log.error(s"Invalid message to MetadataWatchActor: $other") } @@ -45,11 +45,11 @@ object MetadataWatchActor { trait Matcher { private var _fullEventList: List[MetadataEvent] = List.empty - final def matches(events: Traversable[MetadataEvent]): Boolean = { + final def matches(events: Iterable[MetadataEvent]): Boolean = { _fullEventList ++= events _matches(events) } - def _matches(events: Traversable[MetadataEvent]): Boolean + def _matches(events: Iterable[MetadataEvent]): Boolean private var _nearMisses: List[String] = List.empty private def addNearMissInfo(miss: String) = _nearMisses :+= miss def nearMissInformation = _nearMisses @@ -68,14 +68,14 @@ object MetadataWatchActor { } final case class JobKeyMetadataKeyAndValueContainStringMatcher(jobKeyCheck: Option[MetadataJobKey] => Boolean, key: String, value: String) extends Matcher { - def _matches(events: Traversable[MetadataEvent]): Boolean = { + def _matches(events: Iterable[MetadataEvent]): Boolean = { events.exists(e => e.key.key.contains(key) && jobKeyCheck(e.key.jobKey) && e.value.exists { v => v.valueType == MetadataString && checkMetadataValueContains(e.key.key, v, value) }) } } abstract class KeyMatchesRegexAndValueContainsStringMatcher(keyTemplate: String, value: String) extends Matcher { val templateRegex = keyTemplate.r - def _matches(events: Traversable[MetadataEvent]): Boolean = { + def _matches(events: Iterable[MetadataEvent]): Boolean = { events.exists(e => templateRegex.findFirstIn(e.key.key).isDefined && e.value.exists { v => checkMetadataValueContains(e.key.key, v, value) }) } diff --git a/engine/src/test/scala/cromwell/engine/io/gcs/GcsBatchFlowSpec.scala b/engine/src/test/scala/cromwell/engine/io/gcs/GcsBatchFlowSpec.scala index 18e86642760..d51c9f45375 100644 --- a/engine/src/test/scala/cromwell/engine/io/gcs/GcsBatchFlowSpec.scala +++ b/engine/src/test/scala/cromwell/engine/io/gcs/GcsBatchFlowSpec.scala @@ -72,7 +72,7 @@ class GcsBatchFlowSpec extends TestKitSuite with AnyFlatSpecLike with CromwellTi projectId = anyString, ) val gcsBatchCommandContext = GcsBatchCommandContext(GcsBatchCrc32Command.forPath(mockGcsPath).get, TestProbe().ref, 5) - val recoverCommandPrivateMethod = PrivateMethod[PartialFunction[Throwable, Future[GcsBatchResponse[_]]]]('recoverCommand) + val recoverCommandPrivateMethod = PrivateMethod[PartialFunction[Throwable, Future[GcsBatchResponse[_]]]](Symbol("recoverCommand")) val partialFuncAcceptingThrowable = gcsBatchFlow invokePrivate recoverCommandPrivateMethod(gcsBatchCommandContext) val futureRes = partialFuncAcceptingThrowable(new NullPointerException(null)) // no unhandled exceptions should be thrown here diff --git a/engine/src/test/scala/cromwell/engine/workflow/lifecycle/execution/ExecutionStoreBenchmark.scala b/engine/src/test/scala/cromwell/engine/workflow/lifecycle/execution/ExecutionStoreBenchmark.scala index b3936e8b40e..6feb8338bab 100644 --- a/engine/src/test/scala/cromwell/engine/workflow/lifecycle/execution/ExecutionStoreBenchmark.scala +++ b/engine/src/test/scala/cromwell/engine/workflow/lifecycle/execution/ExecutionStoreBenchmark.scala @@ -1,8 +1,8 @@ package cromwell.engine.workflow.lifecycle.execution import cromwell.backend.BackendJobDescriptorKey +import cromwell.core.ExecutionStatus import cromwell.core.ExecutionStatus.{apply => _, _} -import cromwell.core.{ExecutionStatus, JobKey} import cromwell.engine.workflow.lifecycle.execution.keys.ScatterCollectorKey import cromwell.engine.workflow.lifecycle.execution.stores.ActiveExecutionStore import cromwell.util.SampleWdl @@ -12,9 +12,9 @@ import org.scalameter.reporting.RegressionReporter.Historian.Window import org.scalameter.reporting.RegressionReporter.Tester.Accepter import spray.json.DefaultJsonProtocol import wdl.draft2.model.WdlNamespaceWithWorkflow -import wom.graph.{CommandCallNode, ScatterNode} import wdl.transforms.draft2.wdlom2wom.WdlDraft2WomExecutableMakers._ import wom.expression.NoIoFunctionSet +import wom.graph.{CommandCallNode, ScatterNode} import wom.transforms.WomExecutableMaker.ops._ /** @@ -31,14 +31,14 @@ object ExecutionStoreBenchmark extends Bench[Double] with DefaultJsonProtocol { lazy val executor = LocalExecutor(new Executor.Warmer.Default, Aggregator.average, measurer) lazy val reporter = new RegressionReporter[Double](Accepter(), Window(0)) lazy val persistor = Persistor.None - + val inputJson = Option(SampleWdl.PrepareScatterGatherWdl().rawInputs.toJson.compactPrint) val namespace = WdlNamespaceWithWorkflow.load(SampleWdl.PrepareScatterGatherWdl().workflowSource(), Seq.empty).get val graph = namespace.toWomExecutable(inputJson, NoIoFunctionSet, strictValidation = true).getOrElse(throw new Exception("Failed to build womExecutable")).graph val prepareCall: CommandCallNode = graph.calls.find(_.localName == "do_prepare").get.asInstanceOf[CommandCallNode] val scatterCall: CommandCallNode = graph.allNodes.find(_.localName == "do_scatter").get.asInstanceOf[CommandCallNode] val scatter: ScatterNode = graph.scatters.head - + private def makeKey(call: CommandCallNode, executionStatus: ExecutionStatus)(index: Int) = { BackendJobDescriptorKey(call, Option(index), 1) -> executionStatus } @@ -53,13 +53,13 @@ object ExecutionStoreBenchmark extends Bench[Double] with DefaultJsonProtocol { doneMap = (0 until size map makeKey(prepareCall, ExecutionStatus.Done)).toMap collectorKeys = scatter.outputMapping.map(om => ScatterCollectorKey(om, size, ScatterNode.DefaultScatterCollectionFunction) -> ExecutionStatus.NotStarted).toMap notStartedMap = (0 until size map makeKey(scatterCall, ExecutionStatus.NotStarted)).toMap ++ collectorKeys - finalMap: Map[JobKey, ExecutionStatus] = doneMap ++ notStartedMap - } yield ActiveExecutionStore(finalMap, needsUpdate = true) + finalMap = doneMap ++ notStartedMap + } yield ActiveExecutionStore(finalMap.toMap, needsUpdate = true) } - + performance of "ExecutionStore" in { // Measures how fast the execution store can find runnable calls with lots of "Done" calls and "NotStarted" calls. - // Other "shapes" would be valuable to get a better sense of how this method behaves in various situations (with Collector Keys etc...) + // Other "shapes" would be valuable to get a better sense of how this method behaves in various situations (with Collector Keys etc...) measure method "update" in { val sizes: Gen[Int] = Gen.range("size")(from = 1000, upto = 10000, hop = 1000) using(stores(sizes)) in { es => diff --git a/engine/src/test/scala/cromwell/engine/workflow/lifecycle/execution/callcaching/CallCachingSlickDatabaseSpec.scala b/engine/src/test/scala/cromwell/engine/workflow/lifecycle/execution/callcaching/CallCachingSlickDatabaseSpec.scala index 14ae6afa586..2181726b708 100644 --- a/engine/src/test/scala/cromwell/engine/workflow/lifecycle/execution/callcaching/CallCachingSlickDatabaseSpec.scala +++ b/engine/src/test/scala/cromwell/engine/workflow/lifecycle/execution/callcaching/CallCachingSlickDatabaseSpec.scala @@ -141,7 +141,7 @@ class CallCachingSlickDatabaseSpec } it should "stop container if required" taggedAs DbmsTest in { - containerOpt.foreach { _.stop } + containerOpt.foreach { _.stop() } } } } diff --git a/engine/src/test/scala/cromwell/engine/workflow/lifecycle/execution/job/preparation/JobPreparationActorSpec.scala b/engine/src/test/scala/cromwell/engine/workflow/lifecycle/execution/job/preparation/JobPreparationActorSpec.scala index 14762cbefb7..00b611aff52 100644 --- a/engine/src/test/scala/cromwell/engine/workflow/lifecycle/execution/job/preparation/JobPreparationActorSpec.scala +++ b/engine/src/test/scala/cromwell/engine/workflow/lifecycle/execution/job/preparation/JobPreparationActorSpec.scala @@ -188,7 +188,7 @@ class JobPreparationActorSpec val prefetchedKey = "MemoryMultiplier" val retryFactor = 1.1 val taskMemory = 1.0 - val attributes = Map ("memory" -> WomString(taskMemory + " GB")) + val attributes = Map ("memory" -> WomString(s"$taskMemory GB")) val inputsAndAttributes = (inputs, attributes).validNel var previousMultiplier = 1.0 @@ -217,7 +217,7 @@ class JobPreparationActorSpec expectMsgPF(5 seconds) { case success: BackendJobPreparationSucceeded => success.jobDescriptor.prefetchedKvStoreEntries should be(Map(prefetchedKey -> prefetchedVal)) - success.jobDescriptor.runtimeAttributes(RuntimeAttributesKeys.MemoryKey) shouldBe WomString((taskMemory * nextMultiplier) + " GB") + success.jobDescriptor.runtimeAttributes(RuntimeAttributesKeys.MemoryKey) shouldBe WomString(s"${taskMemory * nextMultiplier} GB") } } } diff --git a/engine/src/test/scala/cromwell/engine/workflow/mocks/DeclarationMock.scala b/engine/src/test/scala/cromwell/engine/workflow/mocks/DeclarationMock.scala index 839a8a82f8c..da6c22c7d47 100644 --- a/engine/src/test/scala/cromwell/engine/workflow/mocks/DeclarationMock.scala +++ b/engine/src/test/scala/cromwell/engine/workflow/mocks/DeclarationMock.scala @@ -1,7 +1,6 @@ package cromwell.engine.workflow.mocks import org.specs2.mock.Mockito -import wdl.draft2.model.WdlExpression import wdl.draft2.model.{Declaration, WdlExpression} import wom.types.WomType diff --git a/engine/src/test/scala/cromwell/engine/workflow/tokens/JobTokenDispenserActorSpec.scala b/engine/src/test/scala/cromwell/engine/workflow/tokens/JobTokenDispenserActorSpec.scala index 33b0a12b9f8..adaadb0fef7 100644 --- a/engine/src/test/scala/cromwell/engine/workflow/tokens/JobTokenDispenserActorSpec.scala +++ b/engine/src/test/scala/cromwell/engine/workflow/tokens/JobTokenDispenserActorSpec.scala @@ -337,20 +337,22 @@ class JobTokenDispenserActorSpec extends TestKitSuite jobExecutionTokenDispenserActorName = "skip-dead-repeatedly", ) val grabberSupervisor = TestActorRef(new StoppingSupervisor(), "skip-dead-repeatedly-supervisor") - // The first 5 get a token and the 6th and 7h one are queued + // The first 5 get a token and the 6th and 7th one are queued val tokenGrabbingActors = (0 until 1000).toVector.map { i => TestActorRef[TestTokenGrabbingActor](TestTokenGrabbingActor.props(actorRefUnderTest, LimitedTo5Tokens), grabberSupervisor, s"grabber_" + i) } - val actorIterator = tokenGrabbingActors.toIterator + // Create a sliding window of 10 actors, skipping by 10 so the windows do not overlap. + val actorIterator = tokenGrabbingActors.sliding(10, 10) while (actorIterator.hasNext) { // We won't actually dispense 100, this is simulating the "steady drip" message // so that we don't have to wait 4 seconds per drip for the test case... actorRefUnderTest ! TokensAvailable(100) - val withTokens = actorIterator.take(5).toList - val nextInLine = actorIterator.take(5).toList + val window = actorIterator.next() + val withTokens = window.take(5) // First 5 in the window + val nextInLine = window.drop(5) // Last 5 in the window eventually { withTokens.foreach(_.underlyingActor.hasToken should be(true)) diff --git a/engine/src/test/scala/cromwell/engine/workflow/tokens/large/MultipleTokenUsingActor.scala b/engine/src/test/scala/cromwell/engine/workflow/tokens/large/MultipleTokenUsingActor.scala index 96466119f38..79bf6951948 100644 --- a/engine/src/test/scala/cromwell/engine/workflow/tokens/large/MultipleTokenUsingActor.scala +++ b/engine/src/test/scala/cromwell/engine/workflow/tokens/large/MultipleTokenUsingActor.scala @@ -38,13 +38,13 @@ class MultipleTokenUsingActor(tokenDispenser: ActorRef, tokenType: JobTokenType, startedJobs = totalJobs case ImBusy(msInQueue) => runningJobs += 1 - globalRunningJobCounter.increment + globalRunningJobCounter.increment() queueWaits :+= msInQueue maximumConcurrency = math.max(runningJobs, maximumConcurrency) case AllDone => completedJobs += 1 runningJobs -= 1 - globalRunningJobCounter.decrement + globalRunningJobCounter.decrement() if (completedJobs == startedJobs) { starter ! TokenUsingActorCompletion(queueWaits, maximumConcurrency, errors) context.stop(self) diff --git a/engine/src/test/scala/cromwell/engine/workflow/tokens/large/PatientTokenNeedingActor.scala b/engine/src/test/scala/cromwell/engine/workflow/tokens/large/PatientTokenNeedingActor.scala index 5c1f93f3fea..debe5fe2537 100644 --- a/engine/src/test/scala/cromwell/engine/workflow/tokens/large/PatientTokenNeedingActor.scala +++ b/engine/src/test/scala/cromwell/engine/workflow/tokens/large/PatientTokenNeedingActor.scala @@ -31,7 +31,7 @@ class PatientTokenNeedingActor(tokenDispenser: ActorRef, tokenType: JobTokenType */ override def receive = { case Begin => - starter = sender + starter = sender() val requestDelay = Random.nextInt(30) + 1 context.system.scheduler.scheduleOnce(requestDelay.millis, self, RequestToken)(context.dispatcher) () diff --git a/engine/src/test/scala/cromwell/engine/workflow/workflowstore/SqlWorkflowStoreSpec.scala b/engine/src/test/scala/cromwell/engine/workflow/workflowstore/SqlWorkflowStoreSpec.scala index 4433046fe4a..b6670788a65 100644 --- a/engine/src/test/scala/cromwell/engine/workflow/workflowstore/SqlWorkflowStoreSpec.scala +++ b/engine/src/test/scala/cromwell/engine/workflow/workflowstore/SqlWorkflowStoreSpec.scala @@ -488,7 +488,7 @@ class SqlWorkflowStoreSpec extends AnyFlatSpec with CromwellTimeoutSpec with Mat it should "stop container if required" taggedAs DbmsTest in { containerOpt.foreach { - _.stop + _.stop() } } } diff --git a/engine/src/test/scala/cromwell/webservice/EngineStatsActorSpec.scala b/engine/src/test/scala/cromwell/webservice/EngineStatsActorSpec.scala index 2e36d9ee175..9f7567e8eee 100644 --- a/engine/src/test/scala/cromwell/webservice/EngineStatsActorSpec.scala +++ b/engine/src/test/scala/cromwell/webservice/EngineStatsActorSpec.scala @@ -50,7 +50,7 @@ class EngineStatsActorSpec extends TestKitSuite with AnyFlatSpecLike with Matche object EngineStatsActorSpec { final case class FakeWorkflowActor(jobs: Int) extends Actor { override def receive = { - case JobCountQuery => sender ! JobCount(jobs) + case JobCountQuery => sender() ! JobCount(jobs) } } } diff --git a/engine/src/test/scala/cromwell/webservice/SwaggerServiceSpec.scala b/engine/src/test/scala/cromwell/webservice/SwaggerServiceSpec.scala index f68189ea66e..ed4e588cfd1 100644 --- a/engine/src/test/scala/cromwell/webservice/SwaggerServiceSpec.scala +++ b/engine/src/test/scala/cromwell/webservice/SwaggerServiceSpec.scala @@ -13,7 +13,7 @@ import org.yaml.snakeyaml.error.YAMLException import org.yaml.snakeyaml.nodes.MappingNode import org.yaml.snakeyaml.{Yaml => SnakeYaml} -import scala.collection.JavaConverters._ +import scala.jdk.CollectionConverters._ class SwaggerServiceSpec extends AnyFlatSpec with CromwellTimeoutSpec with SwaggerService with ScalatestRouteTest with Matchers with TableDrivenPropertyChecks { diff --git a/engine/src/test/scala/cromwell/webservice/routes/CromwellApiServiceSpec.scala b/engine/src/test/scala/cromwell/webservice/routes/CromwellApiServiceSpec.scala index c91017ec5a8..2a3b11a41c5 100644 --- a/engine/src/test/scala/cromwell/webservice/routes/CromwellApiServiceSpec.scala +++ b/engine/src/test/scala/cromwell/webservice/routes/CromwellApiServiceSpec.scala @@ -600,21 +600,21 @@ object CromwellApiServiceSpec { val response = WorkflowQuerySuccess(WorkflowQueryResponse(List(WorkflowQueryResult(ExistingWorkflowId.toString, None, Some(WorkflowSucceeded.toString), None, None, None, labels, Option("pid"), Option("rid"), Unarchived)), 1), None) - sender ! response + sender() ! response case ValidateWorkflowIdInMetadata(id) => - if (RecognizedWorkflowIds.contains(id)) sender ! MetadataService.RecognizedWorkflowId - else sender ! MetadataService.UnrecognizedWorkflowId + if (RecognizedWorkflowIds.contains(id)) sender() ! MetadataService.RecognizedWorkflowId + else sender() ! MetadataService.UnrecognizedWorkflowId case ValidateWorkflowIdInMetadataSummaries(id) => - if (SummarizedWorkflowIds.contains(id)) sender ! MetadataService.RecognizedWorkflowId - else sender ! MetadataService.UnrecognizedWorkflowId + if (SummarizedWorkflowIds.contains(id)) sender() ! MetadataService.RecognizedWorkflowId + else sender() ! MetadataService.UnrecognizedWorkflowId case FetchWorkflowMetadataArchiveStatusAndEndTime(id) => id match { - case ArchivedAndDeletedWorkflowId => sender ! WorkflowMetadataArchivedStatusAndEndTime(ArchivedAndDeleted, Option(OffsetDateTime.now)) - case ArchivedWorkflowId => sender ! WorkflowMetadataArchivedStatusAndEndTime(Archived, Option(OffsetDateTime.now)) - case _ => sender ! WorkflowMetadataArchivedStatusAndEndTime(Unarchived, Option(OffsetDateTime.now)) + case ArchivedAndDeletedWorkflowId => sender() ! WorkflowMetadataArchivedStatusAndEndTime(ArchivedAndDeleted, Option(OffsetDateTime.now)) + case ArchivedWorkflowId => sender() ! WorkflowMetadataArchivedStatusAndEndTime(Archived, Option(OffsetDateTime.now)) + case _ => sender() ! WorkflowMetadataArchivedStatusAndEndTime(Unarchived, Option(OffsetDateTime.now)) } case GetCurrentStatus => - sender ! StatusCheckResponse( + sender() ! StatusCheckResponse( ok = true, systems = Map( "Engine Database" -> SubsystemStatus(ok = true, messages = None))) @@ -628,29 +628,30 @@ object CromwellApiServiceSpec { case FailedWorkflowId => WorkflowFailed case _ => WorkflowSubmitted } - sender ! SuccessfulMetadataJsonResponse(request, MetadataBuilderActor.processStatusResponse(id, status)) + sender() ! SuccessfulMetadataJsonResponse(request, MetadataBuilderActor.processStatusResponse(id, status)) case request @ GetLabels(id) => - sender ! SuccessfulMetadataJsonResponse(request, MetadataBuilderActor.processLabelsResponse(id, Map("key1" -> "label1", "key2" -> "label2"))) + sender() ! SuccessfulMetadataJsonResponse(request, MetadataBuilderActor.processLabelsResponse(id, Map("key1" -> "label1", "key2" -> "label2"))) case request @ WorkflowOutputs(id) => val event = Vector(MetadataEvent(MetadataKey(id, None, "outputs:test.hello.salutation"), MetadataValue("Hello foo!", MetadataString))) - sender ! SuccessfulMetadataJsonResponse(request, MetadataBuilderActor.processOutputsResponse(id, event)) + sender() ! SuccessfulMetadataJsonResponse(request, MetadataBuilderActor.processOutputsResponse(id, event)) case request @ GetLogs(id) => - sender ! SuccessfulMetadataJsonResponse(request, MetadataBuilderActor.workflowMetadataResponse(id, logsEvents(id), includeCallsIfEmpty = false, Map.empty)) + sender() ! SuccessfulMetadataJsonResponse(request, MetadataBuilderActor.workflowMetadataResponse(id, logsEvents(id), includeCallsIfEmpty = false, Map.empty)) case request @ GetMetadataAction(MetadataQuery(id, _, _, withKeys, withoutKeys, _), _) => val withKeysList = withKeys.map(_.toList).getOrElse(List.empty) val withoutKeysList = withoutKeys.map(_.toList).getOrElse(List.empty) - sender ! SuccessfulMetadataJsonResponse(request, responseMetadataValues(id, withKeysList, withoutKeysList)) + sender() ! SuccessfulMetadataJsonResponse(request, responseMetadataValues(id, withKeysList, withoutKeysList)) case PutMetadataActionAndRespond(events, _, _) => events.head.key.workflowId match { - case CromwellApiServiceSpec.ExistingWorkflowId => sender ! MetadataWriteSuccess(events) - case CromwellApiServiceSpec.SummarizedWorkflowId => sender ! MetadataWriteSuccess(events) - case CromwellApiServiceSpec.AbortedWorkflowId => sender ! MetadataWriteFailure(new Exception("mock exception of db failure"), events) + case CromwellApiServiceSpec.ExistingWorkflowId => sender() ! MetadataWriteSuccess(events) + case CromwellApiServiceSpec.SummarizedWorkflowId => sender() ! MetadataWriteSuccess(events) + case CromwellApiServiceSpec.AbortedWorkflowId => sender() ! MetadataWriteFailure(new Exception("mock exception of db failure"), events) case WorkflowId(_) => throw new Exception("Something untoward happened, this situation is not believed to be possible at this time") + case oh => throw new Exception(s"Programmer Error! Unexpected case match: $oh") } case DescribeRequest(sourceFiles) => sourceFiles.workflowSource match { case Some("fail to describe") => - sender ! DescribeFailure("as requested, failing to describe") + sender() ! DescribeFailure("as requested, failing to describe") case Some("actor asplode") => throw new Exception("asploding now!") case _ => @@ -663,7 +664,7 @@ object CromwellApiServiceSpec { s"[reading back DescribeRequest contents] version: ${sourceFiles.workflowTypeVersion}" ) - sender ! DescribeSuccess(description = WorkflowDescription(valid = true, errors = readBack, validWorkflow = true)) + sender() ! DescribeSuccess(description = WorkflowDescription(valid = true, errors = readBack, validWorkflow = true)) } case _: InstrumentationServiceMessage => // Do nothing. case m => logger.error("Unexpected message received by MockServiceRegistryActor: {}", m) @@ -673,22 +674,24 @@ object CromwellApiServiceSpec { class MockWorkflowStoreActor extends Actor { override def receive = { case command: WorkflowOnHoldToSubmittedCommand if command.id == ExistingWorkflowId => - sender ! WorkflowOnHoldToSubmittedSuccess(command.id) + sender() ! WorkflowOnHoldToSubmittedSuccess(command.id) case command: WorkflowOnHoldToSubmittedCommand if command.id == UnrecognizedWorkflowId => - sender ! WorkflowOnHoldToSubmittedFailure(command.id, new Exception("Cannot switch to submitted")) - case SubmitWorkflow(_) => sender ! WorkflowSubmittedToStore(ExistingWorkflowId, WorkflowSubmitted) + sender() ! WorkflowOnHoldToSubmittedFailure(command.id, new Exception("Cannot switch to submitted")) + case SubmitWorkflow(_) => sender() ! WorkflowSubmittedToStore(ExistingWorkflowId, WorkflowSubmitted) case BatchSubmitWorkflows(sources) => val response = WorkflowsBatchSubmittedToStore(sources map { _ => ExistingWorkflowId }, WorkflowSubmitted) - sender ! response + sender() ! response case AbortWorkflowCommand(id) => val message = id match { case AbortingWorkflowId => WorkflowAbortRequestedResponse(id) case OnHoldWorkflowId | SubmittedWorkflowId => WorkflowAbortedResponse(id) case UnrecognizedWorkflowId => WorkflowAbortFailureResponse(id, new WorkflowNotFoundException(s"Couldn't abort $id because no workflow with that ID is in progress")) case WorkflowId(_) => throw new Exception("Something untoward happened") + case oh => throw new Exception(s"Programmer Error! Unexpected case match: $oh") } - sender ! message - case GetWorkflowStoreStats => sender ! Map(WorkflowRunning -> 5, WorkflowSubmitted -> 3, WorkflowAborting -> 2) + sender() ! message + case GetWorkflowStoreStats => sender() ! Map(WorkflowRunning -> 5, WorkflowSubmitted -> 3, WorkflowAborting -> 2) + case oh => throw new Exception(s"Programmer Error! Unexpected case match: $oh") } } @@ -696,11 +699,11 @@ object CromwellApiServiceSpec { override def receive: Receive = { case WorkflowManagerActor.EngineStatsCommand => val response = EngineStatsActor.EngineStats(1, 23) - sender ! response + sender() ! response case unexpected => val sndr = sender() log.error(s"Unexpected message {} from {}", unexpected, sndr) - sender ! s"Unexpected message received: $unexpected" + sender() ! s"Unexpected message received: $unexpected" } } } diff --git a/filesystems/drs/src/test/scala/cromwell/filesystems/drs/DrsPathBuilderFactorySpec.scala b/filesystems/drs/src/test/scala/cromwell/filesystems/drs/DrsPathBuilderFactorySpec.scala index b5205e99a32..1d941b5f6b2 100644 --- a/filesystems/drs/src/test/scala/cromwell/filesystems/drs/DrsPathBuilderFactorySpec.scala +++ b/filesystems/drs/src/test/scala/cromwell/filesystems/drs/DrsPathBuilderFactorySpec.scala @@ -35,7 +35,7 @@ class DrsPathBuilderFactorySpec extends AnyFlatSpec with CromwellTimeoutSpec wit |""".stripMargin ) - val fileSystems = new CromwellFileSystems(globalFileSystemConfig).factoriesFromConfig(fileSystemConfig).right.get + val fileSystems = new CromwellFileSystems(globalFileSystemConfig).factoriesFromConfig(fileSystemConfig).toOption.get fileSystems.keys should contain theSameElementsAs List("drs") fileSystems("drs") should be(a[DrsPathBuilderFactory]) } diff --git a/filesystems/drs/src/test/scala/cromwell/filesystems/drs/DrsResolverSpec.scala b/filesystems/drs/src/test/scala/cromwell/filesystems/drs/DrsResolverSpec.scala index 910bbb4e6c3..98d4140029b 100644 --- a/filesystems/drs/src/test/scala/cromwell/filesystems/drs/DrsResolverSpec.scala +++ b/filesystems/drs/src/test/scala/cromwell/filesystems/drs/DrsResolverSpec.scala @@ -6,7 +6,7 @@ import common.assertion.CromwellTimeoutSpec import org.scalatest.flatspec.AnyFlatSpec import org.scalatest.matchers.should.Matchers -import scala.collection.JavaConverters._ +import scala.jdk.CollectionConverters._ class DrsResolverSpec extends AnyFlatSpec with CromwellTimeoutSpec with Matchers { diff --git a/filesystems/gcs/src/main/scala/cromwell/filesystems/gcs/batch/GcsBatchIoCommand.scala b/filesystems/gcs/src/main/scala/cromwell/filesystems/gcs/batch/GcsBatchIoCommand.scala index cbb87e2b8b3..ed07fdb3605 100644 --- a/filesystems/gcs/src/main/scala/cromwell/filesystems/gcs/batch/GcsBatchIoCommand.scala +++ b/filesystems/gcs/src/main/scala/cromwell/filesystems/gcs/batch/GcsBatchIoCommand.scala @@ -12,7 +12,7 @@ import cromwell.core.io._ import cromwell.filesystems.gcs._ import mouse.all._ -import scala.collection.JavaConverters._ +import scala.jdk.CollectionConverters._ import scala.util.Try /** diff --git a/filesystems/gcs/src/test/scala/cromwell/filesystems/gcs/batch/GcsBatchIoCommandSpec.scala b/filesystems/gcs/src/test/scala/cromwell/filesystems/gcs/batch/GcsBatchIoCommandSpec.scala index 306471b333c..d54662f3535 100644 --- a/filesystems/gcs/src/test/scala/cromwell/filesystems/gcs/batch/GcsBatchIoCommandSpec.scala +++ b/filesystems/gcs/src/test/scala/cromwell/filesystems/gcs/batch/GcsBatchIoCommandSpec.scala @@ -10,7 +10,7 @@ import org.scalatest.BeforeAndAfterAll import org.scalatest.flatspec.AnyFlatSpec import org.scalatest.matchers.should.Matchers -import scala.collection.JavaConverters._ +import scala.jdk.CollectionConverters._ class GcsBatchIoCommandSpec extends AnyFlatSpec with Matchers with BeforeAndAfterAll { behavior of "GcsBatchIoCommand" @@ -34,7 +34,7 @@ class GcsBatchIoCommandSpec extends AnyFlatSpec with Matchers with BeforeAndAfte command.mapGoogleResponse(null) - command.onSuccess(null, new HttpHeaders()).toEither.right.get.left.get + command.onSuccess(null, new HttpHeaders()).toEither.toOption.get.swap.toOption.get command.onFailure(new GoogleJsonError(), new HttpHeaders()) should be(None) } @@ -59,7 +59,7 @@ class GcsBatchIoCommandSpec extends AnyFlatSpec with Matchers with BeforeAndAfte response.setSize(BigInt(139).bigInteger) command.mapGoogleResponse(response) should be(Valid(139L)) - command.onSuccess(response, new HttpHeaders()).toEither.right.get.left.get should be(139L) + command.onSuccess(response, new HttpHeaders()).toEither.toOption.get.swap.toOption.get should be(139L) command.onFailure(new GoogleJsonError(), new HttpHeaders()) should be(None) } @@ -82,7 +82,7 @@ class GcsBatchIoCommandSpec extends AnyFlatSpec with Matchers with BeforeAndAfte command.mapGoogleResponse(response) should be(Valid("aeiouy")) - command.onSuccess(response, new HttpHeaders()).toEither.right.get.left.get should be("aeiouy") + command.onSuccess(response, new HttpHeaders()).toEither.toOption.get.swap.toOption.get should be("aeiouy") command.onFailure(new GoogleJsonError(), new HttpHeaders()) should be(None) } @@ -103,7 +103,7 @@ class GcsBatchIoCommandSpec extends AnyFlatSpec with Matchers with BeforeAndAfte val response = new StorageObject() command.mapGoogleResponse(response) - command.onSuccess(response, new HttpHeaders()).toEither.right.get.left.get + command.onSuccess(response, new HttpHeaders()).toEither.toOption.get.swap.toOption.get command.onFailure(new GoogleJsonError(), new HttpHeaders()) should be(None) } @@ -124,7 +124,7 @@ class GcsBatchIoCommandSpec extends AnyFlatSpec with Matchers with BeforeAndAfte val response = new StorageObject() command.mapGoogleResponse(response) should be(Valid(true)) - command.onSuccess(response, new HttpHeaders()).toEither.right.get.left.get should be(true) + command.onSuccess(response, new HttpHeaders()).toEither.toOption.get.swap.toOption.get should be(true) val error = new GoogleJsonError() command.onFailure(error, new HttpHeaders()) should be(None) @@ -152,7 +152,7 @@ class GcsBatchIoCommandSpec extends AnyFlatSpec with Matchers with BeforeAndAfte response.setItems(List(new StorageObject()).asJava) command.mapGoogleResponse(response) should be(Valid(true)) - command.onSuccess(response, new HttpHeaders()).toEither.right.get.left.get should be(true) + command.onSuccess(response, new HttpHeaders()).toEither.toOption.get.swap.toOption.get should be(true) command.onFailure(new GoogleJsonError(), new HttpHeaders()) should be(None) } @@ -178,11 +178,11 @@ class GcsBatchIoCommandSpec extends AnyFlatSpec with Matchers with BeforeAndAfte command.mapGoogleResponse(response) should be(Valid(())) response.setDone(true) - command.onSuccess(response, new HttpHeaders()).toEither.right.get.left.get should be(()) + command.onSuccess(response, new HttpHeaders()).toEither.toOption.get.swap.toOption.get should be(()) response.setDone(false) response.setRewriteToken("token") - command.onSuccess(response, new HttpHeaders()).toEither.right.get.right.get.rewriteToken should be(Option("token")) + command.onSuccess(response, new HttpHeaders()).toEither.toOption.get.toOption.get.rewriteToken should be(Option("token")) command.onFailure(new GoogleJsonError(), new HttpHeaders()) should be(None) } diff --git a/filesystems/oss/src/main/scala/cromwell/filesystems/oss/nio/OssStorageFileSystem.scala b/filesystems/oss/src/main/scala/cromwell/filesystems/oss/nio/OssStorageFileSystem.scala index aa704561156..3d27cecf296 100644 --- a/filesystems/oss/src/main/scala/cromwell/filesystems/oss/nio/OssStorageFileSystem.scala +++ b/filesystems/oss/src/main/scala/cromwell/filesystems/oss/nio/OssStorageFileSystem.scala @@ -10,7 +10,7 @@ import com.aliyun.oss.common.auth.DefaultCredentialProvider import com.aliyun.oss.{ClientConfiguration, OSSClient} import cromwell.filesystems.oss.nio.OssStorageConfiguration.{ACCESS_ID_KEY, ACCESS_KEY_KEY, ENDPOINT_KEY, SECURITY_TOKEN_KEY} -import scala.collection.JavaConverters._ +import scala.jdk.CollectionConverters._ object OssStorageFileSystem { diff --git a/filesystems/oss/src/main/scala/cromwell/filesystems/oss/nio/OssStorageFileSystemProvider.scala b/filesystems/oss/src/main/scala/cromwell/filesystems/oss/nio/OssStorageFileSystemProvider.scala index 0f416b0f920..c820e66f411 100644 --- a/filesystems/oss/src/main/scala/cromwell/filesystems/oss/nio/OssStorageFileSystemProvider.scala +++ b/filesystems/oss/src/main/scala/cromwell/filesystems/oss/nio/OssStorageFileSystemProvider.scala @@ -12,7 +12,7 @@ import com.aliyun.oss.OSSClient import com.aliyun.oss.model.{GenericRequest, ListObjectsRequest} import com.google.common.collect.AbstractIterator -import scala.collection.JavaConverters._ +import scala.jdk.CollectionConverters._ import scala.collection.immutable.Set import collection.mutable.ArrayBuffer @@ -32,6 +32,7 @@ final case class OssStorageFileSystemProvider(config: OssStorageConfiguration) e case Some(marker: String) if !marker.isEmpty => iterator = listNext(marker) case Some(marker: String) if marker.isEmpty => iterator = Iterator() case Some(null) => iterator = Iterator() + case oh => throw new RuntimeException(s"Programmer Error! Unexpected case match: $oh") } } diff --git a/filesystems/oss/src/main/scala/cromwell/filesystems/oss/nio/OssStorageObjectAttributes.scala b/filesystems/oss/src/main/scala/cromwell/filesystems/oss/nio/OssStorageObjectAttributes.scala index ccda3ddbfcb..376aca7f2f8 100644 --- a/filesystems/oss/src/main/scala/cromwell/filesystems/oss/nio/OssStorageObjectAttributes.scala +++ b/filesystems/oss/src/main/scala/cromwell/filesystems/oss/nio/OssStorageObjectAttributes.scala @@ -4,7 +4,7 @@ import java.nio.file.attribute.FileTime import com.aliyun.oss.model.ObjectMetadata -import scala.collection.JavaConverters._ +import scala.jdk.CollectionConverters._ import scala.collection.mutable.Map import scala.util.Try diff --git a/filesystems/oss/src/main/scala/cromwell/filesystems/oss/nio/OssStoragePath.scala b/filesystems/oss/src/main/scala/cromwell/filesystems/oss/nio/OssStoragePath.scala index dd9212f685f..02cd06b090e 100644 --- a/filesystems/oss/src/main/scala/cromwell/filesystems/oss/nio/OssStoragePath.scala +++ b/filesystems/oss/src/main/scala/cromwell/filesystems/oss/nio/OssStoragePath.scala @@ -196,7 +196,7 @@ final case class OssStoragePathImpl(filesystem: OssStorageFileSystem, override v override def register(watcher: WatchService, events: Array[WatchEvent.Kind[_]], modifiers: WatchEvent.Modifier*): WatchKey = throw new UnsupportedOperationException override def iterator(): util.Iterator[Path] = { - if (path.isEmpty || path.isRoot) { + if (path.isEmpty() || path.isRoot) { return util.Collections.emptyIterator() } diff --git a/filesystems/oss/src/main/scala/cromwell/filesystems/oss/nio/OssStorageRetry.scala b/filesystems/oss/src/main/scala/cromwell/filesystems/oss/nio/OssStorageRetry.scala index ea4ab520aec..105c5458b5d 100644 --- a/filesystems/oss/src/main/scala/cromwell/filesystems/oss/nio/OssStorageRetry.scala +++ b/filesystems/oss/src/main/scala/cromwell/filesystems/oss/nio/OssStorageRetry.scala @@ -27,6 +27,7 @@ object OssStorageRetry { } else { throw e } + case oh => throw new RuntimeException(s"Programmer Error! Unexpected case match: $oh") } } diff --git a/filesystems/oss/src/main/scala/cromwell/filesystems/oss/nio/UnixPath.scala b/filesystems/oss/src/main/scala/cromwell/filesystems/oss/nio/UnixPath.scala index 6d49b3adae7..fc640680a26 100644 --- a/filesystems/oss/src/main/scala/cromwell/filesystems/oss/nio/UnixPath.scala +++ b/filesystems/oss/src/main/scala/cromwell/filesystems/oss/nio/UnixPath.scala @@ -17,11 +17,11 @@ object UnixPath { val ROOT_PATH: UnixPath = new UnixPath("/") private def isRoot(path: String) = path.length() == 1 && path.charAt(0) == SEPARATOR - private def isAbsolute(path: String) = !path.isEmpty && path.charAt(0) == SEPARATOR - private def hasTrailingSeparator(path: String) = !path.isEmpty && path.charAt(path.length - 1) == SEPARATOR + private def isAbsolute(path: String) = !path.isEmpty() && path.charAt(0) == SEPARATOR + private def hasTrailingSeparator(path: String) = !path.isEmpty() && path.charAt(path.length - 1) == SEPARATOR def getPath(path: String): UnixPath = { - if (path.isEmpty) { + if (path.isEmpty()) { return EMPTY_PATH } else if (isRoot(path)) { return ROOT_PATH @@ -37,7 +37,7 @@ object UnixPath { val builder = new StringBuilder(first) for ((part, index) <- more.view.zipWithIndex) { - if (part.isEmpty) { + if (part.isEmpty()) { // do nothing } else if (isAbsolute(part)) { if (index == more.length - 1) { @@ -66,14 +66,14 @@ final case class UnixPath(path: String) extends CharSequence def isAbsolute = UnixPath.isAbsolute(path) - def isEmpty = path.isEmpty + def isEmpty() = path.isEmpty() def hasTrailingSeparator = UnixPath.hasTrailingSeparator(path) - def seemsLikeDirectory() = path.isEmpty || hasTrailingSeparator || path.endsWith(".") && (length == 1 || path.charAt(length - 2) == UnixPath.SEPARATOR) || path.endsWith("..") && (length == 2 || path.charAt(length - 3) == UnixPath.SEPARATOR) + def seemsLikeDirectory() = path.isEmpty() || hasTrailingSeparator || path.endsWith(".") && (length == 1 || path.charAt(length - 2) == UnixPath.SEPARATOR) || path.endsWith("..") && (length == 2 || path.charAt(length - 3) == UnixPath.SEPARATOR) def getFileName: Option[UnixPath] = { - if (path.isEmpty || isRoot) { + if (path.isEmpty() || isRoot) { None } else { if (parts.size == 1 && parts.last == path) { @@ -85,7 +85,7 @@ final case class UnixPath(path: String) extends CharSequence } def getParent: Option[UnixPath] = { - if (path.isEmpty || isRoot) { + if (path.isEmpty() || isRoot) { return None } @@ -99,7 +99,7 @@ final case class UnixPath(path: String) extends CharSequence def getRoot: Option[UnixPath] = if (isAbsolute) Some(UnixPath.ROOT_PATH) else None def subPath(beginIndex: Int, endIndex: Int): Try[UnixPath] = { - if (path.isEmpty && beginIndex == 0 && endIndex == 1) { + if (path.isEmpty() && beginIndex == 0 && endIndex == 1) { return Success(this) } @@ -111,7 +111,7 @@ final case class UnixPath(path: String) extends CharSequence } def getNameCount: Int = { - if (path.isEmpty) { + if (path.isEmpty()) { 1 } else if (isRoot) { 0 @@ -121,7 +121,7 @@ final case class UnixPath(path: String) extends CharSequence } def getName(index: Int): Try[UnixPath] = { - if (path.isEmpty){ + if (path.isEmpty()){ return Failure(new IllegalArgumentException("can not get name from a empty path")) } @@ -133,7 +133,7 @@ final case class UnixPath(path: String) extends CharSequence } def resolve(other: UnixPath): UnixPath = { - if (other.path.isEmpty){ + if (other.path.isEmpty()){ this } else if (other.isAbsolute) { other @@ -153,7 +153,7 @@ final case class UnixPath(path: String) extends CharSequence } def relativize(other: UnixPath): UnixPath = { - if (path.isEmpty){ + if (path.isEmpty()){ return other } @@ -162,11 +162,11 @@ final case class UnixPath(path: String) extends CharSequence breakable( while (left.hasNext && right.hasNext){ if (!(left.head == right.head)){ - break + break() } - left.next - right.next + left.next() + right.next() } ) @@ -174,11 +174,11 @@ final case class UnixPath(path: String) extends CharSequence while (left.hasNext){ result.append(UnixPath.PARENT_DIR) result.append(UnixPath.SEPARATOR) - left.next + left.next() } while (right.hasNext) { - result.append(right.next) + result.append(right.next()) result.append(UnixPath.SEPARATOR) } @@ -241,7 +241,7 @@ final case class UnixPath(path: String) extends CharSequence if (hasTrailingSeparator) this else new UnixPath(path + UnixPath.SEPARATOR) } - def removeTrailingSeparator(): UnixPath = { + def removeTrailingSeparator()(): UnixPath = { if (!isRoot && hasTrailingSeparator) { new UnixPath(path.substring(0, length -1)) } else { @@ -250,23 +250,23 @@ final case class UnixPath(path: String) extends CharSequence } def startsWith(other: UnixPath): Boolean = { - val me = removeTrailingSeparator - val oth = other.removeTrailingSeparator + val me = removeTrailingSeparator()() + val oth = other.removeTrailingSeparator()() if (oth.path.length > me.path.length) { return false } else if (me.isAbsolute != oth.isAbsolute) { return false - } else if (!me.path.isEmpty && oth.path.isEmpty) { + } else if (!me.path.isEmpty() && oth.path.isEmpty()) { return false } - return startsWith(split, other.split) + return startsWith(split(), other.split()) } def startsWith(left: Iterator[String], right: Iterator[String]): Boolean = { while (right.hasNext){ - if (!left.hasNext || right.next != left.next) { + if (!left.hasNext || right.next() != left.next()) { return false } } @@ -274,18 +274,18 @@ final case class UnixPath(path: String) extends CharSequence } def endsWith(other: UnixPath): Boolean = { - val me = removeTrailingSeparator - val oth = other.removeTrailingSeparator + val me = removeTrailingSeparator()() + val oth = other.removeTrailingSeparator()() if (oth.path.length > me.path.length) { return false - } else if (!me.path.isEmpty && oth.path.isEmpty) { + } else if (!me.path.isEmpty() && oth.path.isEmpty()) { return false } else if (oth.isAbsolute) { return me.isAbsolute && me.path == other.path } - startsWith(me.splitReverse, other.splitReverse) + startsWith(me.splitReverse(), other.splitReverse()) } def toAbsolutePath(currentWorkingDirectory: UnixPath): Try[UnixPath] = { @@ -334,7 +334,7 @@ final case class UnixPath(path: String) extends CharSequence } def initParts(): Array[String] = { - if (path.isEmpty) { + if (path.isEmpty()) { Array.empty[String] } else { if (path.charAt(0) == UnixPath.SEPARATOR){ @@ -344,4 +344,4 @@ final case class UnixPath(path: String) extends CharSequence } } } -} \ No newline at end of file +} diff --git a/filesystems/oss/src/test/scala/cromwell/filesystems/oss/nio/OssFileReadChannelSpec.scala b/filesystems/oss/src/test/scala/cromwell/filesystems/oss/nio/OssFileReadChannelSpec.scala index a19a281e7d0..1c57460ef51 100644 --- a/filesystems/oss/src/test/scala/cromwell/filesystems/oss/nio/OssFileReadChannelSpec.scala +++ b/filesystems/oss/src/test/scala/cromwell/filesystems/oss/nio/OssFileReadChannelSpec.scala @@ -12,8 +12,8 @@ object OssFileReadChannelSpec { val FILENAME = "/test-oss-read-file" val CONTENT = "Hello World!" - implicit class Crossable[X](xs: Traversable[X]) { - def cross[Y](ys: Traversable[Y]) = for { x <- xs; y <- ys } yield (x, y) + implicit class Crossable[X](xs: Iterable[X]) { + def cross[Y](ys: Iterable[Y]) = for { x <- xs; y <- ys } yield (x, y) } } diff --git a/filesystems/oss/src/test/scala/cromwell/filesystems/oss/nio/OssStorageFileSystemProviderSpec.scala b/filesystems/oss/src/test/scala/cromwell/filesystems/oss/nio/OssStorageFileSystemProviderSpec.scala index 90394af8478..59638e36878 100644 --- a/filesystems/oss/src/test/scala/cromwell/filesystems/oss/nio/OssStorageFileSystemProviderSpec.scala +++ b/filesystems/oss/src/test/scala/cromwell/filesystems/oss/nio/OssStorageFileSystemProviderSpec.scala @@ -7,7 +7,7 @@ import java.nio.file.{DirectoryStream, NoSuchFileException, Path, StandardOpenOp import cromwell.core.TestKitSuite import org.scalatest.BeforeAndAfter -import scala.collection.JavaConverters._ +import scala.jdk.CollectionConverters._ import scala.collection.mutable.ArrayBuffer import scala.util.control.Breaks diff --git a/filesystems/oss/src/test/scala/cromwell/filesystems/oss/nio/OssStorageObjectAttributesSpec.scala b/filesystems/oss/src/test/scala/cromwell/filesystems/oss/nio/OssStorageObjectAttributesSpec.scala index 26021efc395..0c2174350af 100644 --- a/filesystems/oss/src/test/scala/cromwell/filesystems/oss/nio/OssStorageObjectAttributesSpec.scala +++ b/filesystems/oss/src/test/scala/cromwell/filesystems/oss/nio/OssStorageObjectAttributesSpec.scala @@ -71,7 +71,7 @@ class OssStorageObjectAttributesSpec extends TestKitSuite with OssNioUtilSpec { attr.creationTime shouldEqual attr.lastModifiedTime() attr.lastAccessTime shouldEqual FileTime.fromMillis(0) - attr.cacheControl shouldBe empty + attr.cacheControl() shouldBe empty attr.contentDisposition shouldBe empty attr.contentEncoding shouldBe empty attr.etag shouldBe Some(DEFAULT_ETAG) @@ -85,7 +85,7 @@ class OssStorageObjectAttributesSpec extends TestKitSuite with OssNioUtilSpec { attr.creationTime shouldEqual attr.lastModifiedTime() attr.lastAccessTime shouldEqual FileTime.fromMillis(0) - attr.cacheControl shouldBe empty + attr.cacheControl() shouldBe empty attr.contentDisposition shouldBe empty attr.contentEncoding shouldBe empty attr.etag shouldBe empty diff --git a/filesystems/oss/src/test/scala/cromwell/filesystems/oss/nio/OssStoragePathSpec.scala b/filesystems/oss/src/test/scala/cromwell/filesystems/oss/nio/OssStoragePathSpec.scala index ce892bc41fd..74008bdeb27 100644 --- a/filesystems/oss/src/test/scala/cromwell/filesystems/oss/nio/OssStoragePathSpec.scala +++ b/filesystems/oss/src/test/scala/cromwell/filesystems/oss/nio/OssStoragePathSpec.scala @@ -1,7 +1,7 @@ package cromwell.filesystems.oss.nio import cromwell.core.TestKitSuite -import scala.collection.JavaConverters._ +import scala.jdk.CollectionConverters._ class OssStoragePathSpec extends TestKitSuite with OssNioUtilSpec { diff --git a/filesystems/oss/src/test/scala/cromwell/filesystems/oss/nio/UnixPathSpec.scala b/filesystems/oss/src/test/scala/cromwell/filesystems/oss/nio/UnixPathSpec.scala index 8470da38532..de4ccf3bb17 100644 --- a/filesystems/oss/src/test/scala/cromwell/filesystems/oss/nio/UnixPathSpec.scala +++ b/filesystems/oss/src/test/scala/cromwell/filesystems/oss/nio/UnixPathSpec.scala @@ -115,7 +115,7 @@ class UnixPathSpec extends TestKitSuite with OssNioUtilSpec { it should "match expected seemsLikeDirectory" in withClue(clue) { - unixPath.seemsLikeDirectory shouldBe path.likeDir + unixPath.seemsLikeDirectory() shouldBe path.likeDir } } diff --git a/languageFactories/language-factory-core/src/main/scala/cromwell/languages/config/LanguageConfiguration.scala b/languageFactories/language-factory-core/src/main/scala/cromwell/languages/config/LanguageConfiguration.scala index ab30f0a363f..2cb09dd8a3d 100644 --- a/languageFactories/language-factory-core/src/main/scala/cromwell/languages/config/LanguageConfiguration.scala +++ b/languageFactories/language-factory-core/src/main/scala/cromwell/languages/config/LanguageConfiguration.scala @@ -5,7 +5,7 @@ import java.util.Map.Entry import com.typesafe.config.{Config, ConfigFactory} import cromwell.languages.config.CromwellLanguages.{CromwellLanguageName, CromwellLanguageVersion} -import scala.collection.JavaConverters._ +import scala.jdk.CollectionConverters._ final case class LanguagesConfiguration(languages: List[LanguageVersionConfigurationEntry], default: Option[String]) final case class LanguageVersionConfigurationEntry(name: CromwellLanguageName, versions: Map[CromwellLanguageVersion, LanguageVersionConfig], default: Option[String]) @@ -21,9 +21,10 @@ object LanguageConfiguration { val languages = LanguageNames.toList map { languageName => val languageConfig = LanguagesConfig.getConfig(languageName) - val defaultVersionName: Option[String] = if (LanguagesConfig.hasPath("default")) { Option(LanguagesConfig.getString("default")) } else None val versionSet = languageConfig.getConfig("versions") - val languageVersionNames: Set[String] = versionSet.entrySet().asScala.map(findFirstKey).filterNot(_ == "default").toSet + val allLanguageVersionNames: Set[String] = versionSet.entrySet().asScala.map(findFirstKey).toSet + val (defaultVersionKey, languageVersionNames) = allLanguageVersionNames.partition(_ == "default") + val defaultVersionName: Option[String] = defaultVersionKey.headOption map { _ => versionSet.getString("default") } val versions = (languageVersionNames.toList map { languageVersionName => val configEntry = versionSet.getConfig(s""""$languageVersionName"""") diff --git a/languageFactories/language-factory-core/src/main/scala/cromwell/languages/util/ImportResolver.scala b/languageFactories/language-factory-core/src/main/scala/cromwell/languages/util/ImportResolver.scala index fd34e402a39..01c5db23084 100644 --- a/languageFactories/language-factory-core/src/main/scala/cromwell/languages/util/ImportResolver.scala +++ b/languageFactories/language-factory-core/src/main/scala/cromwell/languages/util/ImportResolver.scala @@ -2,10 +2,9 @@ package cromwell.languages.util import java.net.{URI, URL} import java.nio.file.Paths - import better.files.File import cats.data.NonEmptyList -import cats.effect.IO +import cats.effect.{ContextShift, IO} import cats.syntax.either._ import cats.syntax.validated._ import com.softwaremill.sttp._ @@ -16,16 +15,16 @@ import common.validation.ErrorOr._ import common.validation.Checked._ import common.validation.Validation._ import cromwell.core.path.{DefaultPathBuilder, Path} + import java.nio.file.{Path => NioPath} import java.security.MessageDigest - import cromwell.core.WorkflowId import wom.ResolvedImportRecord import wom.core.WorkflowSource import wom.values._ import scala.concurrent.duration._ -import scala.concurrent.Await +import scala.concurrent.{Await, ExecutionContext} import scala.util.{Failure, Success, Try} object ImportResolver { @@ -192,7 +191,7 @@ object ImportResolver { // temporary situation to get functionality working before // starting in on async-ifying the entire WdlNamespace flow - val result: Checked[String] = Await.result(responseIO.unsafeToFuture, 15.seconds).body.leftMap { e => NonEmptyList(e.toString.trim, List.empty) } + val result: Checked[String] = Await.result(responseIO.unsafeToFuture(), 15.seconds).body.leftMap { e => NonEmptyList(e.toString.trim, List.empty) } result map { ResolvedImportBundle(_, newResolverList(toLookup), ResolvedImportRecord(toLookup)) @@ -215,6 +214,15 @@ object ImportResolver { import common.util.IntrospectableLazy._ val sttpBackend: IntrospectableLazy[SttpBackend[IO, Nothing]] = lazily { + // 2.13 Beginning with sttp 1.6.x a `ContextShift` parameter is now required to construct an + // `AsyncHttpClientCatsBackend`. There may be a more appropriate choice for backing this than the global + // execution context, but even that appears to be a better option than the status quo in 1.5.x [2]. + import scala.concurrent.ExecutionContext.Implicits.global + val ec: ExecutionContext = implicitly[ExecutionContext] + implicit val cs: ContextShift[IO] = IO.contextShift(ec) + + // [1] https://github.com/softwaremill/sttp/releases/tag/v1.6.0 + // [2] https://github.com/softwaremill/sttp/issues/217#issuecomment-499874267 AsyncHttpClientCatsBackend[IO]() } diff --git a/languageFactories/language-factory-core/src/main/scala/cromwell/languages/util/LanguageFactoryUtil.scala b/languageFactories/language-factory-core/src/main/scala/cromwell/languages/util/LanguageFactoryUtil.scala index fd731893479..b7e87034861 100644 --- a/languageFactories/language-factory-core/src/main/scala/cromwell/languages/util/LanguageFactoryUtil.scala +++ b/languageFactories/language-factory-core/src/main/scala/cromwell/languages/util/LanguageFactoryUtil.scala @@ -35,7 +35,7 @@ object LanguageFactoryUtil { DefaultPathBuilder.createTempFile(s"imports_workflow_${workflowId}_", ".zip").writeByteArray(zipContents)(OpenOptions.default) } - def unZipFile(f: Path) = Try(f.unzip) + def unZipFile(f: Path) = Try(f.unzip()) val importsFile = for { zipFile <- makeZipFile diff --git a/languageFactories/wdl-draft2/src/test/scala/languages.wdl.draft2/NamespaceCacheSpec.scala b/languageFactories/wdl-draft2/src/test/scala/languages.wdl.draft2/NamespaceCacheSpec.scala index eed726c00d3..df87f2fa644 100644 --- a/languageFactories/wdl-draft2/src/test/scala/languages.wdl.draft2/NamespaceCacheSpec.scala +++ b/languageFactories/wdl-draft2/src/test/scala/languages.wdl.draft2/NamespaceCacheSpec.scala @@ -77,7 +77,7 @@ class NamespaceCacheSpec extends AnyFlatSpec with CromwellTimeoutSpec with Befor workflowIdForLogging = WorkflowId.randomId(), ioFunctions = NoIoFunctionSet, importResolvers = List(countingResolver)).value.unsafeToFuture() - Await.result(futureNamespace, Duration.Inf).right.get + Await.result(futureNamespace, Duration.Inf).toOption.get } expectations foreach { e => diff --git a/project/Dependencies.scala b/project/Dependencies.scala index b98b4d4ccd9..facd760e8d2 100644 --- a/project/Dependencies.scala +++ b/project/Dependencies.scala @@ -87,7 +87,6 @@ object Dependencies { private val mysqlV = "8.0.28" private val nettyV = "4.1.72.Final" private val owlApiV = "5.1.19" - private val paradiseV = "2.1.1" private val pegdownV = "1.6.0" private val postgresV = "42.3.3" private val pprintV = "0.7.1" @@ -119,12 +118,15 @@ object Dependencies { * and manually uploaded to the Broad Institute artifactory at https://broadinstitute.jfrog.io/broadinstitute/. * Consider updating to the official newer Slick version once they fix issue #2076 * Related Slick PR: https://github.com/slick/slick/pull/2101 + * + * Update 2022-03-23: This #2201 PR cherry picks Greg's #2101 PR above and claims to fix the issue: + * https://github.com/slick/slick/pull/2201 */ - private val slickV = "3.3.2-2076hotfix" // scala-steward:off (CROM-6620) + private val slickV = "3.4.0-M1" private val snakeyamlV = "1.30" private val specs2MockV = "4.13.3" private val sprayJsonV = "1.3.6" - private val sttpV = "1.5.19" // scala-steward:off (CROM-6564) + private val sttpV = "1.7.2" private val swaggerParserV = "1.0.56" private val swaggerUiV = "4.5.0" private val testContainersScalaV = "0.40.2" @@ -535,7 +537,6 @@ object Dependencies { ) ++ slf4jBindingDependencies // During testing, add an slf4j binding for _all_ libraries. val kindProjectorPlugin = "org.typelevel" % "kind-projector" % kindProjectorV cross CrossVersion.full - val paradisePlugin = "org.scalamacros" % "paradise" % paradiseV cross CrossVersion.full // Version of the swagger UI to write into config files val swaggerUiVersion: String = swaggerUiV diff --git a/project/Publishing.scala b/project/Publishing.scala index 2b70cd0e559..150c30ca293 100644 --- a/project/Publishing.scala +++ b/project/Publishing.scala @@ -10,7 +10,7 @@ import sbtdocker.DockerPlugin.autoImport._ import sbtdocker.Instruction import java.io.FileNotFoundException -import scala.collection.JavaConverters._ +import scala.jdk.CollectionConverters._ import scala.sys.process._ object Publishing { diff --git a/project/Settings.scala b/project/Settings.scala index a18b4c382c2..79242733583 100644 --- a/project/Settings.scala +++ b/project/Settings.scala @@ -13,14 +13,8 @@ import sbtdocker.{DockerPlugin, Instruction, Instructions} object Settings { - /* The reason why -Xmax-classfile-name is set is because this will fail - to build on Docker otherwise. The reason why it's 200 is because it - fails if the value is too close to 256 (even 254 fails). For more info: - - https://github.com/sbt/sbt-assembly/issues/69 - https://github.com/scala/pickling/issues/10 - - Other fancy flags from https://tpolecat.github.io/2017/04/25/scalac-flags.html. + /* + Fancy flags from https://tpolecat.github.io/2017/04/25/scalac-flags.html. Per JG's work in Cromwell, the following can't be turned on without causing piles of errors in wdl4s. Many of the constructs that are flagged look suspicious and probably warrant further scrutiny, but no time for that now. @@ -32,27 +26,23 @@ object Settings { "-deprecation", "-feature", "-explaintypes", - "-Xmax-classfile-name", "200", - // the backend runs bytecode serialization, classfile writing and method-local // optimizations (-opt:l:method) in parallel on N threads "-Ybackend-parallelism", "3", "-Ycache-plugin-class-loader:last-modified", "-Ycache-macro-class-loader:last-modified", - "-encoding", "UTF-8" + "-encoding", "UTF-8", + "-Ymacro-annotations" ) val warningSettings = List( - "-Xfuture", "-Xlint:adapted-args", - "-Xlint:by-name-right-associative", "-Xlint:constant", "-Xlint:delayedinit-select", "-Xlint:doc-detached", "-Xlint:inaccessible", "-Xlint:infer-any", "-Xlint:missing-interpolator", - "-Xlint:nullary-override", "-Xlint:nullary-unit", "-Xlint:option-implicit", "-Xlint:package-object-classes", @@ -60,16 +50,12 @@ object Settings { "-Xlint:private-shadow", "-Xlint:stars-align", "-Xlint:type-parameter-shadow", - "-Xlint:unsound-match", - "-Yno-adapted-args", "-Ywarn-dead-code", "-Ywarn-numeric-widen", "-Ywarn-value-discard", - "-Ywarn-inaccessible", "-Ywarn-unused:implicits", "-Ywarn-unused:privates", "-Ywarn-unused:locals", - "-Ypartial-unification", "-Ywarn-unused:patvars" ) @@ -84,8 +70,8 @@ object Settings { assembly / assemblyMergeStrategy := customMergeStrategy.value, ) - val Scala2_12Version = "2.12.15" - private val ScalaVersion: String = Scala2_12Version + val Scala2_13Version = "2.13.8" + private val ScalaVersion: String = Scala2_13Version private val sharedSettings: Seq[Setting[_]] = cromwellVersionWithGit ++ publishingSettings ++ List( organization := "org.broadinstitute", @@ -106,9 +92,7 @@ object Settings { // No console-hostile options, otherwise the console is effectively unusable. // https://github.com/sbt/sbt/issues/1815 Compile / console / scalacOptions --= consoleHostileSettings, - addCompilerPlugin(paradisePlugin), excludeDependencies ++= List( - "org.typelevel" % "simulacrum-scalafix-annotations_2.12", "org.typelevel" % "simulacrum-scalafix-annotations_2.13" ) ) diff --git a/publish/publish_workflow.wdl b/publish/publish_workflow.wdl index 4d927c64a63..1ce6cccc05f 100644 --- a/publish/publish_workflow.wdl +++ b/publish/publish_workflow.wdl @@ -80,8 +80,8 @@ task doMajorRelease { } FIN - java -jar server/target/scala-2.12/cromwell-~{releaseVersion}.jar run --inputs hello.inputs hello.wdl - java -jar womtool/target/scala-2.12/womtool-~{releaseVersion}.jar validate --inputs hello.inputs hello.wdl + java -jar server/target/scala-2.13/cromwell-~{releaseVersion}.jar run --inputs hello.inputs hello.wdl + java -jar womtool/target/scala-2.13/womtool-~{releaseVersion}.jar validate --inputs hello.inputs hello.wdl echo 'Create the hotfix branch' git checkout -b ~{releaseVersion}_hotfix @@ -98,8 +98,8 @@ task doMajorRelease { >>> output { - File cromwellJar = "cromwell/server/target/scala-2.12/cromwell-~{releaseVersion}.jar" - File womtoolJar = "cromwell/womtool/target/scala-2.12/womtool-~{releaseVersion}.jar" + File cromwellJar = "cromwell/server/target/scala-2.13/cromwell-~{releaseVersion}.jar" + File womtoolJar = "cromwell/womtool/target/scala-2.13/womtool-~{releaseVersion}.jar" } runtime { @@ -180,16 +180,16 @@ task doMinorRelease { } FIN - java -jar server/target/scala-2.12/cromwell-~{releaseVersion}.jar run --inputs hello.inputs hello.wdl - java -jar womtool/target/scala-2.12/womtool-~{releaseVersion}.jar validate --inputs hello.inputs hello.wdl + java -jar server/target/scala-2.13/cromwell-~{releaseVersion}.jar run --inputs hello.inputs hello.wdl + java -jar womtool/target/scala-2.13/womtool-~{releaseVersion}.jar validate --inputs hello.inputs hello.wdl echo 'Push the tags' git push origin ~{releaseVersion} >>> output { - File cromwellJar = "cromwell/server/target/scala-2.12/cromwell-~{releaseVersion}.jar" - File womtoolJar = "cromwell/womtool/target/scala-2.12/womtool-~{releaseVersion}.jar" + File cromwellJar = "cromwell/server/target/scala-2.13/cromwell-~{releaseVersion}.jar" + File womtoolJar = "cromwell/womtool/target/scala-2.13/womtool-~{releaseVersion}.jar" } runtime { diff --git a/scripts/docker-develop/Dockerfile b/scripts/docker-develop/Dockerfile index 5c89ae046fe..340d7bc00c4 100644 --- a/scripts/docker-develop/Dockerfile +++ b/scripts/docker-develop/Dockerfile @@ -11,13 +11,13 @@ RUN apt-get update -q && \ # Development environment for Cromwell that includes: # -# Scala 2.12 +# Scala 2.13 # SBT 1.x # Java 11 # Git # Env variables -ENV SCALA_VERSION 2.12.14 +ENV SCALA_VERSION 2.13.8 ENV SBT_VERSION 1.5.5 # diff --git a/scripts/docker-develop/README.md b/scripts/docker-develop/README.md index 6342f473115..20bd1a8203f 100644 --- a/scripts/docker-develop/README.md +++ b/scripts/docker-develop/README.md @@ -53,7 +53,7 @@ will be `server/target`. Remember, here we are inside the container with scala a $ sbt assembly ``` -`sbt assembly` will build the runnable Cromwell JAR in `server/target/scala-2.12/` with a name like `cromwell-.jar`. +`sbt assembly` will build the runnable Cromwell JAR in `server/target/scala-2.13/` with a name like `cromwell-.jar`. You can then interact with it in the container, or on your host if you like. Remember that you have Java already in the container, so it makes sense to develop there. diff --git a/scripts/gen_java_client.sh b/scripts/gen_java_client.sh index 68aa5297a17..409af51a83f 100755 --- a/scripts/gen_java_client.sh +++ b/scripts/gen_java_client.sh @@ -5,8 +5,8 @@ BASE_PACKAGE="cromwell.client" ORIGINAL_API_YAML=engine/src/main/resources/swagger/cromwell.yaml API_YAML=codegen_java/cromwell.nofile.yaml -# Cromwell doesn't specify the OAuth configuration in it's swagger, and -# without it the client doesn't support authentication. +# Cromwell doesn't specify the OAuth configuration in its swagger, and +# without it the client doesn't support authentication. cat << EOF > $API_YAML security: - googleoauth: @@ -23,16 +23,20 @@ securityDefinitions: openid: open id authorization email: email authorization profile: profile authorization - + EOF # Swagger autogenerates clients that match the input types, and for File -# that is less than useful because clients need to supply their inputs as +# that is less than useful because clients need to supply their inputs as # File, which means actually making a file. Replacing with 'string' has nearly -# the same HTTP semantics, except for suppling the name of the client-side file +# the same HTTP semantics, except for supplying the name of the client-side file # itself, but is much more usable to a client. cat $ORIGINAL_API_YAML | sed s/type:\ file/type:\ string/g >> $API_YAML +# The OpenAPI generator clobbers our build.sbt with its own. Hide ours away, run the generator, then replace the +# generator's build.sbt with ours. +# 1. Hide our build.sbt +mv codegen_java/build.sbt codegen_java/build.sbt.bak docker run --rm -v ${PWD}:/local openapitools/openapi-generator-cli generate \ -i /local/$API_YAML \ -g java \ @@ -41,6 +45,11 @@ docker run --rm -v ${PWD}:/local openapitools/openapi-generator-cli generate \ --api-package ${BASE_PACKAGE}.api \ --model-package ${BASE_PACKAGE}.model +# 2. Remove the generator's build.sbt +rm codegen_java/build.sbt +# 3. Restore our build.sbt +mv codegen_java/build.sbt.bak codegen_java/build.sbt + cd codegen_java sbt --warn test diff --git a/scripts/publish-client.sh b/scripts/publish-client.sh index cfc9a80561d..fac3cccceb4 100755 --- a/scripts/publish-client.sh +++ b/scripts/publish-client.sh @@ -1,13 +1,11 @@ #!/usr/bin/env bash - set -e +set -e - # sbt publish publishes libs to Artifactory for the scala version sbt is running as. -# sbt +publish publishes libs to Artifactory for all scala versions listed in crossScalaVersions. -# We only do sbt publish here because Travis runs against 2.11 and 2.12 in separate jobs, so each one publishes its version to Artifactory. +# sbt publish publishes libs to Artifactory for the scala version sbt is running as. cd codegen_java if [[ "$TRAVIS_PULL_REQUEST" == "false" && "$TRAVIS_BRANCH" == "develop" ]]; then sbt --warn -Dproject.isSnapshot=false "+ publish" else sbt --warn -Dproject.isSnapshot=true "+ publish" -fi +fi diff --git a/server/src/main/scala/cromwell/CromwellEntryPoint.scala b/server/src/main/scala/cromwell/CromwellEntryPoint.scala index 57777915b8d..5acddd75ce9 100644 --- a/server/src/main/scala/cromwell/CromwellEntryPoint.scala +++ b/server/src/main/scala/cromwell/CromwellEntryPoint.scala @@ -24,7 +24,7 @@ import cromwell.server.{CromwellServer, CromwellShutdown, CromwellSystem} import net.ceedubs.ficus.Ficus._ import org.slf4j.LoggerFactory -import scala.collection.JavaConverters._ +import scala.jdk.CollectionConverters._ import scala.concurrent.duration._ import scala.concurrent.{Await, ExecutionContext, Future, TimeoutException} import scala.language.postfixOps @@ -268,7 +268,7 @@ object CromwellEntryPoint extends GracefulStopSupport { def validOrFailSubmission[A](validation: ErrorOr[A]): A = { validation.valueOr(errors => throw new RuntimeException with MessageAggregation { override def exceptionContext: String = "ERROR: Unable to submit workflow to Cromwell:" - override def errorMessages: Traversable[String] = errors.toList + override def errorMessages: Iterable[String] = errors.toList }) } diff --git a/server/src/test/scala/cromwell/CromwellTestKitSpec.scala b/server/src/test/scala/cromwell/CromwellTestKitSpec.scala index 1d0ce5db516..319b417e861 100644 --- a/server/src/test/scala/cromwell/CromwellTestKitSpec.scala +++ b/server/src/test/scala/cromwell/CromwellTestKitSpec.scala @@ -168,7 +168,7 @@ object CromwellTestKitSpec { } def replaceVariables(value: String, workflowId: WorkflowId): String = { - val variables = Map("PWD" -> Cmds.pwd, "UUID" -> workflowId) + val variables = Map("PWD" -> Cmds.pwd.toString, "UUID" -> workflowId.toString) variables.foldLeft(value) { case (result, (variableName, variableValue)) => result.replace(s"<<$variableName>>", s"$variableValue") } @@ -436,13 +436,14 @@ abstract class CromwellTestKitSpec extends TestKitSuite val valuesMap: Map[WomValue, WomValue] = map.fields.map { case (fieldName, fieldValue) => (WomString(fieldName), jsValueToWdlValue(fieldValue)) } if (valuesMap.isEmpty) WomMap(WomMapType(WomStringType, WomStringType), Map.empty) else WomMap(WomMapType(WomStringType, valuesMap.head._2.womType), valuesMap) + case oh => throw new RuntimeException(s"Programmer Error! Unexpected case match: $oh") } } } class AlwaysHappyJobStoreActor extends Actor { override def receive: Receive = { - case x: JobStoreWriterCommand => sender ! JobStoreWriteSuccess(x) + case x: JobStoreWriterCommand => sender() ! JobStoreWriteSuccess(x) } } @@ -456,13 +457,13 @@ object AlwaysHappyJobStoreActor { class EmptyCallCacheReadActor extends Actor { override def receive: Receive = { - case _: CacheLookupRequest => sender ! CacheLookupNoHit + case _: CacheLookupRequest => sender() ! CacheLookupNoHit } } class EmptyCallCacheWriteActor extends Actor { override def receive: Receive = { - case SaveCallCacheHashes => sender ! CallCacheWriteSuccess + case SaveCallCacheHashes => sender() ! CallCacheWriteSuccess } } @@ -476,7 +477,7 @@ object EmptyCallCacheWriteActor { class EmptyDockerHashActor extends Actor { override def receive: Receive = { - case request: DockerInfoRequest => sender ! DockerInfoSuccessResponse(DockerInformation(DockerHashResult("alg", "hash"), None), request) + case request: DockerInfoRequest => sender() ! DockerInfoSuccessResponse(DockerInformation(DockerHashResult("alg", "hash"), None), request) } } diff --git a/server/src/test/scala/cromwell/ReferenceConfSpec.scala b/server/src/test/scala/cromwell/ReferenceConfSpec.scala index ff10ce82c3a..993ba4da0e6 100644 --- a/server/src/test/scala/cromwell/ReferenceConfSpec.scala +++ b/server/src/test/scala/cromwell/ReferenceConfSpec.scala @@ -7,7 +7,7 @@ import org.scalatest.Suite import org.scalatest.flatspec.AnyFlatSpecLike import org.scalatest.matchers.should.Matchers -import scala.collection.JavaConverters._ +import scala.jdk.CollectionConverters._ class ReferenceConfSpec extends Suite with AnyFlatSpecLike with Matchers { diff --git a/server/src/test/scala/cromwell/engine/WorkflowStoreActorSpec.scala b/server/src/test/scala/cromwell/engine/WorkflowStoreActorSpec.scala index da3021942ac..be662e5044e 100644 --- a/server/src/test/scala/cromwell/engine/WorkflowStoreActorSpec.scala +++ b/server/src/test/scala/cromwell/engine/WorkflowStoreActorSpec.scala @@ -41,7 +41,7 @@ class WorkflowStoreActorSpec extends CromwellTestKitWordSpec with CoordinatedWor /** * Fold down a list of WorkflowToStart's, checking that their IDs are all unique */ - private def checkDistinctIds(list: Traversable[WorkflowToStart]): Boolean = { + private def checkDistinctIds(list: Iterable[WorkflowToStart]): Boolean = { def folderFunction(knownDistinct: (List[WorkflowToStart], Boolean), next: WorkflowToStart) = { val (list, distinct) = knownDistinct if (!distinct) { diff --git a/server/src/test/scala/cromwell/engine/workflow/lifecycle/execution/ejea/EjeaRunningJobSpec.scala b/server/src/test/scala/cromwell/engine/workflow/lifecycle/execution/ejea/EjeaRunningJobSpec.scala index 3f3c8e0f5c1..774c334d098 100644 --- a/server/src/test/scala/cromwell/engine/workflow/lifecycle/execution/ejea/EjeaRunningJobSpec.scala +++ b/server/src/test/scala/cromwell/engine/workflow/lifecycle/execution/ejea/EjeaRunningJobSpec.scala @@ -17,8 +17,8 @@ class EjeaRunningJobSpec extends EngineJobExecutionActorSpec with Eventually wit val hashError = HashError(new Exception("ARGH!!!") with NoStackTrace) val failureCases = List( - ("FailedRetryableResponse", failureRetryableResponse _, true), - ("FailedNonRetryableResponse", failureNonRetryableResponse _, false) + ("FailedRetryableResponse", () => failureRetryableResponse, true), + ("FailedNonRetryableResponse", () => failureNonRetryableResponse, false) ) "A 'RunningJob' EJEA" should { diff --git a/server/src/test/scala/cromwell/engine/workflow/lifecycle/execution/ejea/EjeaUpdatingJobStoreSpec.scala b/server/src/test/scala/cromwell/engine/workflow/lifecycle/execution/ejea/EjeaUpdatingJobStoreSpec.scala index d0a3b0f6fa9..61cb2eb693e 100644 --- a/server/src/test/scala/cromwell/engine/workflow/lifecycle/execution/ejea/EjeaUpdatingJobStoreSpec.scala +++ b/server/src/test/scala/cromwell/engine/workflow/lifecycle/execution/ejea/EjeaUpdatingJobStoreSpec.scala @@ -15,12 +15,12 @@ class EjeaUpdatingJobStoreSpec extends EngineJobExecutionActorSpec with HasJobSu "An EJEA in UpdatingJobStoreSpec" should { List( - ("SucceededResponse", successResponse _, true), - ("FailedRetryableResponse", failureRetryableResponse _, true), - ("FailedNonRetryableResponse", failureNonRetryableResponse _, false) + ("SucceededResponse", () => successResponse, true), + ("FailedRetryableResponse", () => failureRetryableResponse, true), + ("FailedNonRetryableResponse", () => failureNonRetryableResponse, false) ) foreach { case (name, responseMaker, retryable @ _) => s"Forward a saved $name response on and shut down, if the JobStore write is successful" in { - val response = responseMaker.apply + val response = responseMaker.apply() ejea = ejeaInUpdatingJobStoreState(response) ejea ! JobStoreWriteSuccess(null) // This value's not read... helper.replyToProbe.expectMsg(awaitTimeout, response) diff --git a/services/src/main/scala/cromwell/services/IoActorRequester.scala b/services/src/main/scala/cromwell/services/IoActorRequester.scala index 550d8f8a7df..42c9d8eefb6 100644 --- a/services/src/main/scala/cromwell/services/IoActorRequester.scala +++ b/services/src/main/scala/cromwell/services/IoActorRequester.scala @@ -19,7 +19,7 @@ trait IoActorRequester extends StrictLogging { this: Actor => def requestIoActor(backoffInterval: FiniteDuration = 1.minute): Future[ActorRef] = _ioActorPromise match { case Some(promise) => promise.future case None => - val newPromise = Promise[ActorRef] + val newPromise = Promise[ActorRef]() _ioActorPromise = Option(newPromise) requestIoActorInner(newPromise, backoffInterval) newPromise.future diff --git a/services/src/main/scala/cromwell/services/ServiceRegistryActor.scala b/services/src/main/scala/cromwell/services/ServiceRegistryActor.scala index c7056e1eef6..0c613f307ba 100644 --- a/services/src/main/scala/cromwell/services/ServiceRegistryActor.scala +++ b/services/src/main/scala/cromwell/services/ServiceRegistryActor.scala @@ -10,7 +10,7 @@ import cromwell.util.GracefulShutdownHelper import cromwell.util.GracefulShutdownHelper.ShutdownCommand import net.ceedubs.ficus.Ficus._ -import scala.collection.JavaConverters._ +import scala.jdk.CollectionConverters._ object ServiceRegistryActor { case object NoopMessage @@ -73,7 +73,7 @@ class ServiceRegistryActor(globalConfig: Config) extends Actor with ActorLogging val services: Map[String, ActorRef] = serviceProps map { case (name, props) => name -> context.actorOf(props, name) } - + private def transform(message: Any, from: ActorRef): Any = message match { case _: ListenToMessage => Listen(from) case _ => message @@ -82,15 +82,15 @@ class ServiceRegistryActor(globalConfig: Config) extends Actor with ActorLogging def receive = { case msg: ServiceRegistryMessage => services.get(msg.serviceName) match { - case Some(ref) => ref.tell(transform(msg, sender), sender) + case Some(ref) => ref.tell(transform(msg, sender()), sender()) case None => log.error("Received ServiceRegistryMessage requesting service '{}' for which no service is configured. Message: {}", msg.serviceName, msg) - sender ! ServiceRegistryFailure(msg.serviceName) + sender() ! ServiceRegistryFailure(msg.serviceName) } case meta: ServiceRegistryMetaRequest => meta match { case RequestIoActorRef => ioActor match { - case Some(ref) => sender ! IoActorRef(ref) - case None => sender ! NoIoActorRefAvailable + case Some(ref) => sender() ! IoActorRef(ref) + case None => sender() ! NoIoActorRefAvailable } case IoActorRef(ref) => if (ioActor.isEmpty) { ioActor = Option(ref) } @@ -104,7 +104,7 @@ class ServiceRegistryActor(globalConfig: Config) extends Actor with ActorLogging case NoopMessage => // Nothing to do - useful for streams that use this actor as a sink and want to send a message on completion case fool => log.error("Received message which is not a ServiceRegistryMessage: {}", fool) - sender ! ServiceRegistryFailure("Message is not a ServiceRegistryMessage: " + fool) + sender() ! ServiceRegistryFailure("Message is not a ServiceRegistryMessage: " + fool) } /** diff --git a/services/src/main/scala/cromwell/services/healthmonitor/ProtoHealthMonitorServiceActor.scala b/services/src/main/scala/cromwell/services/healthmonitor/ProtoHealthMonitorServiceActor.scala index 9062f7046d2..4d4a12e77d0 100644 --- a/services/src/main/scala/cromwell/services/healthmonitor/ProtoHealthMonitorServiceActor.scala +++ b/services/src/main/scala/cromwell/services/healthmonitor/ProtoHealthMonitorServiceActor.scala @@ -76,7 +76,7 @@ trait ProtoHealthMonitorServiceActor extends Actor with LazyLogging with Timers case Store(subsystem, status) => store(subsystem, status) scheduleSweepCheck(subsystem) - case GetCurrentStatus => sender ! getCurrentStatus + case GetCurrentStatus => sender() ! getCurrentStatus case ShutdownCommand => context.stop(self) // Not necessary but service registry requires it. See #2575 case Status.Failure(f) => logger.error("Unexpected Status.Failure received", f) case e => logger.error("Unexpected Status.Failure received: {}", e.toString) diff --git a/services/src/main/scala/cromwell/services/instrumentation/AsynchronousThrottlingGaugeMetricActor.scala b/services/src/main/scala/cromwell/services/instrumentation/AsynchronousThrottlingGaugeMetricActor.scala index b42d7eb23ac..a079e1e7fb0 100644 --- a/services/src/main/scala/cromwell/services/instrumentation/AsynchronousThrottlingGaugeMetricActor.scala +++ b/services/src/main/scala/cromwell/services/instrumentation/AsynchronousThrottlingGaugeMetricActor.scala @@ -44,7 +44,7 @@ class AsynchronousThrottlingGaugeMetricActor(metricPath: NonEmptyList[String], whenUnhandled { case Event(unexpected, _) => - log.warning(s"Programmer error: this actor should not receive message $unexpected from ${sender.path} while in state $stateName") + log.warning(s"Programmer error: this actor should not receive message $unexpected from ${sender().path} while in state $stateName") stay() } diff --git a/services/src/main/scala/cromwell/services/instrumentation/impl/stackdriver/StackdriverInstrumentationServiceActor.scala b/services/src/main/scala/cromwell/services/instrumentation/impl/stackdriver/StackdriverInstrumentationServiceActor.scala index 9de1e035c91..02dce2b78ad 100644 --- a/services/src/main/scala/cromwell/services/instrumentation/impl/stackdriver/StackdriverInstrumentationServiceActor.scala +++ b/services/src/main/scala/cromwell/services/instrumentation/impl/stackdriver/StackdriverInstrumentationServiceActor.scala @@ -16,7 +16,7 @@ import cromwell.services.instrumentation.impl.stackdriver.StackdriverConfig._ import cromwell.services.instrumentation.impl.stackdriver.StackdriverInstrumentationServiceActor._ import cromwell.util.GracefulShutdownHelper.ShutdownCommand -import scala.collection.JavaConverters._ +import scala.jdk.CollectionConverters._ import scala.concurrent.duration._ import scala.util.Try @@ -51,6 +51,7 @@ class StackdriverInstrumentationServiceActor(serviceConfig: Config, globalConfig case CromwellGauge(bucket, value) => updateMetricMap(bucket, value.toDouble, StackdriverGauge) case CromwellCount(bucket, value, _) => updateMetricMap(bucket, value.toDouble, StackdriverCumulative) case CromwellIncrement(bucket) => updateMetricMap(bucket, metricValue = 1D, metricKind = StackdriverCumulative) + case oh => throw new Exception(s"Programmer Error! Unexpected case match: $oh") } case ShutdownCommand => // flush out metrics (if any) before shut down diff --git a/services/src/main/scala/cromwell/services/instrumentation/impl/statsd/StatsDInstrumentationServiceActor.scala b/services/src/main/scala/cromwell/services/instrumentation/impl/statsd/StatsDInstrumentationServiceActor.scala index 4c886c80d30..a90c7f66a8c 100644 --- a/services/src/main/scala/cromwell/services/instrumentation/impl/statsd/StatsDInstrumentationServiceActor.scala +++ b/services/src/main/scala/cromwell/services/instrumentation/impl/statsd/StatsDInstrumentationServiceActor.scala @@ -12,7 +12,7 @@ import cromwell.util.GracefulShutdownHelper.ShutdownCommand import nl.grons.metrics4.scala.{DefaultInstrumented, Meter, MetricName} import net.ceedubs.ficus.Ficus._ -import scala.collection.JavaConverters._ +import scala.jdk.CollectionConverters._ import scala.concurrent.duration._ object StatsDInstrumentationServiceActor { @@ -68,6 +68,7 @@ class StatsDInstrumentationServiceActor(serviceConfig: Config, globalConfig: Con case CromwellCount(bucket, value, _) => updateCounter(bucket, value) case CromwellGauge(bucket, value) => updateGauge(bucket, value) case CromwellTiming(bucket, value, _) => updateTiming(bucket, value) + case oh => throw new Exception(s"Programmer Error! Unexpected case match: $oh") } case ShutdownCommand => context stop self } diff --git a/services/src/main/scala/cromwell/services/loadcontroller/impl/LoadControllerServiceActor.scala b/services/src/main/scala/cromwell/services/loadcontroller/impl/LoadControllerServiceActor.scala index f478f8c6981..8dba38e7e99 100644 --- a/services/src/main/scala/cromwell/services/loadcontroller/impl/LoadControllerServiceActor.scala +++ b/services/src/main/scala/cromwell/services/loadcontroller/impl/LoadControllerServiceActor.scala @@ -37,13 +37,13 @@ class LoadControllerServiceActor(serviceConfig: Config, private [impl] var loadLevel: LoadLevel = NormalLoad private [impl] var monitoredActors: Set[ActorRef] = Set.empty private [impl] var loadMetrics: Map[ActorAndMetric, LoadLevel] = Map.empty - + override def receive = listenerManagement.orElse(controlReceive) override def preStart() = { - if (controlFrequency.isFinite()) + if (controlFrequency.isFinite) timers.startPeriodicTimer(LoadControlTimerKey, LoadControlTimerAction, controlFrequency.asInstanceOf[FiniteDuration]) - else + else log.info("Load control disabled") super.preStart() } @@ -80,14 +80,14 @@ class LoadControllerServiceActor(serviceConfig: Config, loadLevel = newLoadLevel sendGauge(NonEmptyList.one("global"), loadLevel.level.toLong, LoadInstrumentationPrefix) } - + private def handleTerminated(terminee: ActorRef) = { monitoredActors = monitoredActors - terminee - loadMetrics = loadMetrics.filterKeys({ + loadMetrics = loadMetrics.view.filterKeys({ case ActorAndMetric(actor, _) => actor != terminee - }) + }).toMap } - + private def highLoadMetricsForLogging = { loadMetrics.collect({ case (ActorAndMetric(_, metricPath), HighLoad) => metricPath.head diff --git a/services/src/main/scala/cromwell/services/metadata/MetadataService.scala b/services/src/main/scala/cromwell/services/metadata/MetadataService.scala index e992006dc4a..61acd8052c8 100644 --- a/services/src/main/scala/cromwell/services/metadata/MetadataService.scala +++ b/services/src/main/scala/cromwell/services/metadata/MetadataService.scala @@ -175,7 +175,7 @@ object MetadataService { final case class WorkflowQuerySuccess(response: WorkflowQueryResponse, meta: Option[QueryMetadata]) extends MetadataQueryResponse final case class WorkflowQueryFailure(reason: Throwable) extends MetadataQueryResponse - private implicit class EnhancedWomTraversable(val womValues: Traversable[WomValue]) extends AnyVal { + private implicit class EnhancedWomTraversable(val womValues: Iterable[WomValue]) extends AnyVal { def toEvents(metadataKey: MetadataKey): List[MetadataEvent] = if (womValues.isEmpty) { List(MetadataEvent.empty(metadataKey.copy(key = s"${metadataKey.key}[]"))) } else { diff --git a/services/src/main/scala/cromwell/services/metadata/WorkflowQueryKey.scala b/services/src/main/scala/cromwell/services/metadata/WorkflowQueryKey.scala index 2268c529897..726361f2cfc 100644 --- a/services/src/main/scala/cromwell/services/metadata/WorkflowQueryKey.scala +++ b/services/src/main/scala/cromwell/services/metadata/WorkflowQueryKey.scala @@ -193,6 +193,7 @@ sealed trait DateTimeWorkflowQueryKey extends WorkflowQueryKey[Option[OffsetDate case Success(dt) => Option(dt).validNel[String] case _ => s"Value given for $displayName does not parse as a datetime: $v".invalidNel[Option[OffsetDateTime]] } + case oh => throw new Exception(s"Programmer Error! Unexpected case match: $oh") } } def displayName: String @@ -219,6 +220,7 @@ sealed trait IntWorkflowQueryKey extends WorkflowQueryKey[Option[Int]] { case Success(intVal) => if (intVal > 0) Option(intVal).validNel else s"Integer value not greater than 0".invalidNel[Option[Int]] case _ => s"Value given for $displayName does not parse as a integer: $v".invalidNel[Option[Int]] } + case oh => throw new Exception(s"Programmer Error! Unexpected case match: $oh") } } def displayName: String @@ -235,6 +237,7 @@ sealed trait BooleanWorkflowQueryKey extends WorkflowQueryKey[Boolean] { case _ => s"Value given for $displayName does not parse as a boolean: $v".invalidNel[Boolean] } } + case oh => throw new Exception(s"Programmer Error! Unexpected case match: $oh") } } def displayName: String diff --git a/services/src/main/scala/cromwell/services/metadata/impl/MetadataDatabaseAccess.scala b/services/src/main/scala/cromwell/services/metadata/impl/MetadataDatabaseAccess.scala index bfbe31ffc40..271251bcdd7 100644 --- a/services/src/main/scala/cromwell/services/metadata/impl/MetadataDatabaseAccess.scala +++ b/services/src/main/scala/cromwell/services/metadata/impl/MetadataDatabaseAccess.scala @@ -304,7 +304,7 @@ trait MetadataDatabaseAccess { } } - def summariesToQueryResults(workflows: Traversable[WorkflowMetadataSummaryEntry]): Future[List[MetadataService.WorkflowQueryResult]] = { + def summariesToQueryResults(workflows: Iterable[WorkflowMetadataSummaryEntry]): Future[List[MetadataService.WorkflowQueryResult]] = { workflows.toList.traverse(summaryToQueryResult) } diff --git a/services/src/main/scala/cromwell/services/metadata/impl/MetadataServiceActor.scala b/services/src/main/scala/cromwell/services/metadata/impl/MetadataServiceActor.scala index 33005882f8f..d320adb42ae 100644 --- a/services/src/main/scala/cromwell/services/metadata/impl/MetadataServiceActor.scala +++ b/services/src/main/scala/cromwell/services/metadata/impl/MetadataServiceActor.scala @@ -55,7 +55,7 @@ case class MetadataServiceActor(serviceConfig: Config, globalConfig: Config, ser private val metadataSummaryRefreshInterval: Option[FiniteDuration] = { val duration = serviceConfig.getOrElse[Duration]("metadata-summary-refresh-interval", default = 1 second) - if (duration.isFinite()) Option(duration.asInstanceOf[FiniteDuration]) else None + if (duration.isFinite) Option(duration.asInstanceOf[FiniteDuration]) else None } private val metadataSummaryRefreshLimit = serviceConfig.getOrElse("metadata-summary-refresh-limit", default = 5000) @@ -180,7 +180,7 @@ case class MetadataServiceActor(serviceConfig: Config, globalConfig: Config, ser } private def sendMetadataTableSizeMetrics(): Unit = { - getMetadataTableSizeInformation onComplete { + getMetadataTableSizeInformation() onComplete { case Success(v) => v foreach { d => sendGauge(dataLengthMetricsPath, d.dataLength) diff --git a/services/src/main/scala/cromwell/services/metadata/impl/ReadMetadataRegulatorActor.scala b/services/src/main/scala/cromwell/services/metadata/impl/ReadMetadataRegulatorActor.scala index 4dc7bcfffd0..1c50cb00404 100644 --- a/services/src/main/scala/cromwell/services/metadata/impl/ReadMetadataRegulatorActor.scala +++ b/services/src/main/scala/cromwell/services/metadata/impl/ReadMetadataRegulatorActor.scala @@ -59,7 +59,7 @@ class ReadMetadataRegulatorActor(metadataBuilderActorProps: PropsMaker, readMeta case response @ (_: MetadataJsonResponse | _: MetadataQueryResponse | _: RootAndSubworkflowLabelsLookupResponse | _: MetadataLookupStreamSuccess) => handleResponseFromMetadataWorker(response) } - case other => log.error(s"Programmer Error: Unexpected message $other received from $sender") + case other => log.error(s"Programmer Error: Unexpected message $other received from ${sender()}") } def handleResponseFromMetadataWorker(response: Any): Unit = { diff --git a/services/src/main/scala/cromwell/services/metadata/impl/archiver/ArchiveMetadataSchedulerActor.scala b/services/src/main/scala/cromwell/services/metadata/impl/archiver/ArchiveMetadataSchedulerActor.scala index e31fd09351e..1f5ac1f4460 100644 --- a/services/src/main/scala/cromwell/services/metadata/impl/archiver/ArchiveMetadataSchedulerActor.scala +++ b/services/src/main/scala/cromwell/services/metadata/impl/archiver/ArchiveMetadataSchedulerActor.scala @@ -104,7 +104,7 @@ class ArchiveMetadataSchedulerActor(archiveMetadataConfig: ArchiveMetadataConfig scheduleNextWorkflowToArchive() }) case ShutdownCommand => context.stop(self) // TODO: cancel any streaming that might be happening? - case other => log.info(s"Programmer Error! The ArchiveMetadataSchedulerActor received unexpected message! ($sender sent ${other.toPrettyElidedString(1000)}})") + case other => log.info(s"Programmer Error! The ArchiveMetadataSchedulerActor received unexpected message! (${sender()} sent ${other.toPrettyElidedString(1000)}})") } def workflowsLeftToArchiveMetric(): Unit = { diff --git a/services/src/main/scala/cromwell/services/metadata/impl/builder/MetadataBuilderActor.scala b/services/src/main/scala/cromwell/services/metadata/impl/builder/MetadataBuilderActor.scala index 12fe06cafff..cc77522811f 100644 --- a/services/src/main/scala/cromwell/services/metadata/impl/builder/MetadataBuilderActor.scala +++ b/services/src/main/scala/cromwell/services/metadata/impl/builder/MetadataBuilderActor.scala @@ -106,7 +106,7 @@ object MetadataBuilderActor { * 1 -> Seq[Events], * 2 -> Seq[Events], * ... - * ), + * ), * ... * ), * ... @@ -309,11 +309,11 @@ class MetadataBuilderActor(readMetadataWorkerMaker: () => Props, metadataReadRow case Event(message, HasWorkData(target, _)) => log.error(s"Received unexpected message $message in state $stateName with target: $target") self ! PoisonPill - stay + stay() case Event(message, MetadataBuilderActor.HasReceivedEventsData(target, _, _, _, _, _)) => log.error(s"Received unexpected message $message in state $stateName with target: $target") self ! PoisonPill - stay + stay() } def processSubWorkflowMetadata(metadataResponse: MetadataJsonResponse, data: HasReceivedEventsData) = { diff --git a/services/src/main/scala/cromwell/services/metadata/impl/builder/MetadataComponent.scala b/services/src/main/scala/cromwell/services/metadata/impl/builder/MetadataComponent.scala index 14cf230e882..8f2697fe26a 100644 --- a/services/src/main/scala/cromwell/services/metadata/impl/builder/MetadataComponent.scala +++ b/services/src/main/scala/cromwell/services/metadata/impl/builder/MetadataComponent.scala @@ -51,12 +51,12 @@ object MetadataComponent { /* ******************************* */ /* *** Metadata Events Parsing *** */ /* ******************************* */ - + private val KeySeparator = MetadataKey.KeySeparator // Split on every unescaped KeySeparator val KeySplitter = s"(? ExecutionStatus.withName(primitive.v.value) - } + }(ExecutionStatus.ExecutionStatusOrdering) val WorkflowStateOrdering: Ordering[MetadataPrimitive] = Ordering.by { primitive: MetadataPrimitive => WorkflowState.withName(primitive.v.value) diff --git a/services/src/main/scala/cromwell/services/metadata/impl/deleter/DeleteMetadataActor.scala b/services/src/main/scala/cromwell/services/metadata/impl/deleter/DeleteMetadataActor.scala index 116456f9fc3..92b1fff49ee 100644 --- a/services/src/main/scala/cromwell/services/metadata/impl/deleter/DeleteMetadataActor.scala +++ b/services/src/main/scala/cromwell/services/metadata/impl/deleter/DeleteMetadataActor.scala @@ -79,7 +79,7 @@ class DeleteMetadataActor(deleteMetadataConfig: DeleteMetadataConfig, } case ShutdownCommand => context.stop(self) // TODO: cancel any deletion action that might be happening? - case other => log.info(s"Programmer Error! The DeleteMetadataSchedulerActor received unexpected message! ($sender sent ${other.toPrettyElidedString(1000)}})") + case other => log.info(s"Programmer Error! The DeleteMetadataSchedulerActor received unexpected message! (${sender()} sent ${other.toPrettyElidedString(1000)}})") } def workflowsLeftToDeleteMetric(): Unit = { diff --git a/services/src/test/scala/cromwell/services/database/LiquibaseChangeSetSpec.scala b/services/src/test/scala/cromwell/services/database/LiquibaseChangeSetSpec.scala index 76d72001cc8..3943fb65423 100644 --- a/services/src/test/scala/cromwell/services/database/LiquibaseChangeSetSpec.scala +++ b/services/src/test/scala/cromwell/services/database/LiquibaseChangeSetSpec.scala @@ -8,7 +8,7 @@ import org.scalatest.flatspec.AnyFlatSpec import org.scalatest.matchers.should.Matchers import org.scalatest.enablers.Emptiness._ -import scala.collection.JavaConverters._ +import scala.jdk.CollectionConverters._ import scala.concurrent.ExecutionContext class LiquibaseChangeSetSpec extends AnyFlatSpec with CromwellTimeoutSpec with Matchers { diff --git a/services/src/test/scala/cromwell/services/database/LiquibaseComparisonSpec.scala b/services/src/test/scala/cromwell/services/database/LiquibaseComparisonSpec.scala index 2b292c871ac..255237b396f 100644 --- a/services/src/test/scala/cromwell/services/database/LiquibaseComparisonSpec.scala +++ b/services/src/test/scala/cromwell/services/database/LiquibaseComparisonSpec.scala @@ -15,7 +15,7 @@ import org.scalatest.flatspec.AnyFlatSpec import org.scalatest.matchers.should.Matchers import slick.jdbc.GetResult -import scala.collection.JavaConverters._ +import scala.jdk.CollectionConverters._ import scala.concurrent.ExecutionContext import scala.concurrent.duration._ import scala.reflect._ @@ -225,7 +225,7 @@ class LiquibaseComparisonSpec extends AnyFlatSpec with CromwellTimeoutSpec with } it should "stop container if required" taggedAs DbmsTest in { - containerOpt.foreach { _.stop } + containerOpt.foreach { _.stop() } } } } diff --git a/services/src/test/scala/cromwell/services/database/LobSpec.scala b/services/src/test/scala/cromwell/services/database/LobSpec.scala index 042d901eab2..eee252d31eb 100644 --- a/services/src/test/scala/cromwell/services/database/LobSpec.scala +++ b/services/src/test/scala/cromwell/services/database/LobSpec.scala @@ -201,7 +201,7 @@ class LobSpec extends AnyFlatSpec with CromwellTimeoutSpec with Matchers with Sc } it should "stop container if required" taggedAs DbmsTest in { - containerOpt.foreach { _.stop } + containerOpt.foreach { _.stop() } } } } diff --git a/services/src/test/scala/cromwell/services/database/MetadataSlickDatabaseSpec.scala b/services/src/test/scala/cromwell/services/database/MetadataSlickDatabaseSpec.scala index b1e4e6d77a2..3a8a320e1d2 100644 --- a/services/src/test/scala/cromwell/services/database/MetadataSlickDatabaseSpec.scala +++ b/services/src/test/scala/cromwell/services/database/MetadataSlickDatabaseSpec.scala @@ -207,7 +207,7 @@ class MetadataSlickDatabaseSpec extends AnyFlatSpec with CromwellTimeoutSpec wit } it should "stop container if required" taggedAs DbmsTest in { - containerOpt.foreach { _.stop } + containerOpt.foreach { _.stop() } } } diff --git a/services/src/test/scala/cromwell/services/database/QueryTimeoutSpec.scala b/services/src/test/scala/cromwell/services/database/QueryTimeoutSpec.scala index d6716ad46b9..621b14c8441 100644 --- a/services/src/test/scala/cromwell/services/database/QueryTimeoutSpec.scala +++ b/services/src/test/scala/cromwell/services/database/QueryTimeoutSpec.scala @@ -37,7 +37,7 @@ class QueryTimeoutSpec extends AnyFlatSpec with CromwellTimeoutSpec with Matcher } it should "stop container if required" taggedAs DbmsTest in { - containerOpt.foreach { _.stop } + containerOpt.foreach { _.stop() } } } } diff --git a/services/src/test/scala/cromwell/services/database/RootAndSubworkflowLabelsSpec.scala b/services/src/test/scala/cromwell/services/database/RootAndSubworkflowLabelsSpec.scala index 19bffea6930..88fceef7063 100644 --- a/services/src/test/scala/cromwell/services/database/RootAndSubworkflowLabelsSpec.scala +++ b/services/src/test/scala/cromwell/services/database/RootAndSubworkflowLabelsSpec.scala @@ -80,7 +80,7 @@ class RootAndSubworkflowLabelsSpec extends AnyFlatSpec with CromwellTimeoutSpec } it should "stop container if required" taggedAs DbmsTest in { - containerOpt.foreach { _.stop } + containerOpt.foreach { _.stop() } } } } diff --git a/services/src/test/scala/cromwell/services/instrumentation/impl/stackdriver/TestStackdriverInstrumentationServiceActor.scala b/services/src/test/scala/cromwell/services/instrumentation/impl/stackdriver/TestStackdriverInstrumentationServiceActor.scala index e804b3593ac..e1b708b6a72 100644 --- a/services/src/test/scala/cromwell/services/instrumentation/impl/stackdriver/TestStackdriverInstrumentationServiceActor.scala +++ b/services/src/test/scala/cromwell/services/instrumentation/impl/stackdriver/TestStackdriverInstrumentationServiceActor.scala @@ -3,7 +3,7 @@ package cromwell.services.instrumentation.impl.stackdriver import akka.actor.ActorRef import com.google.monitoring.v3.CreateTimeSeriesRequest import com.typesafe.config.Config -import scala.collection.JavaConverters._ +import scala.jdk.CollectionConverters._ class TestStackdriverInstrumentationServiceActor(serviceConfig: Config, globalConfig: Config, serviceRegistryActor: ActorRef) extends StackdriverInstrumentationServiceActor(serviceConfig, globalConfig, serviceRegistryActor) { diff --git a/services/src/test/scala/cromwell/services/instrumentation/impl/statsd/StatsDInstrumentationServiceActorBenchmarkSpec.scala b/services/src/test/scala/cromwell/services/instrumentation/impl/statsd/StatsDInstrumentationServiceActorBenchmarkSpec.scala index d9e405a1b60..82b3183faef 100644 --- a/services/src/test/scala/cromwell/services/instrumentation/impl/statsd/StatsDInstrumentationServiceActorBenchmarkSpec.scala +++ b/services/src/test/scala/cromwell/services/instrumentation/impl/statsd/StatsDInstrumentationServiceActorBenchmarkSpec.scala @@ -12,7 +12,7 @@ import org.scalatest.concurrent.Eventually import org.scalatest.flatspec.AnyFlatSpecLike import org.scalatest.matchers.should.Matchers -import scala.collection.JavaConverters._ +import scala.jdk.CollectionConverters._ import scala.concurrent.duration._ class StatsDInstrumentationServiceActorBenchmarkSpec extends TestKitSuite with AnyFlatSpecLike with BeforeAndAfterAll with Matchers with Eventually { @@ -35,7 +35,7 @@ class StatsDInstrumentationServiceActorBenchmarkSpec extends TestKitSuite with A it should "have good throughput for gauges" in { val instrumentationActor = TestActorRef(new StatsDInstrumentationServiceActor(config, ConfigFactory.load(), registryProbe)) val gaugeName = instrumentationActor.underlyingActor.metricBaseName.append(testBucket.toStatsDString()).name - Stream.range(0, 1 * 1000 * 1000, 1).foreach({ i => + LazyList.range(0, 1 * 1000 * 1000, 1).foreach({ i => instrumentationActor ! InstrumentationServiceMessage(CromwellGauge(testBucket, i.toLong)) }) eventually { diff --git a/services/src/test/scala/cromwell/services/keyvalue/InMemoryKvServiceActor.scala b/services/src/test/scala/cromwell/services/keyvalue/InMemoryKvServiceActor.scala index 8670bcc6612..c4f0c6177c0 100644 --- a/services/src/test/scala/cromwell/services/keyvalue/InMemoryKvServiceActor.scala +++ b/services/src/test/scala/cromwell/services/keyvalue/InMemoryKvServiceActor.scala @@ -12,8 +12,8 @@ final class InMemoryKvServiceActor extends KeyValueServiceActor { var kvStore = Map.empty[ScopedKey, String] override def receive = { - case get: KvGet => respond(sender, get, doGet(get)) - case put: KvPut => respond(sender, put, doPut(put)) + case get: KvGet => respond(sender(), get, doGet(get)) + case put: KvPut => respond(sender(), put, doPut(put)) } def doGet(get: KvGet): Future[KvResponse] = kvStore.get(get.key) match { diff --git a/services/src/test/scala/cromwell/services/keyvalue/impl/KeyValueDatabaseSpec.scala b/services/src/test/scala/cromwell/services/keyvalue/impl/KeyValueDatabaseSpec.scala index 813079480dc..e673b6f012f 100644 --- a/services/src/test/scala/cromwell/services/keyvalue/impl/KeyValueDatabaseSpec.scala +++ b/services/src/test/scala/cromwell/services/keyvalue/impl/KeyValueDatabaseSpec.scala @@ -116,7 +116,7 @@ class KeyValueDatabaseSpec extends AnyFlatSpec with CromwellTimeoutSpec with Mat } it should "stop container" taggedAs DbmsTest in { - containerOpt.foreach { _.stop } + containerOpt.foreach { _.stop() } } } } diff --git a/services/src/test/scala/cromwell/services/metadata/MetadataQuerySpec.scala b/services/src/test/scala/cromwell/services/metadata/MetadataQuerySpec.scala index 95faf42f74c..5a908aaf092 100644 --- a/services/src/test/scala/cromwell/services/metadata/MetadataQuerySpec.scala +++ b/services/src/test/scala/cromwell/services/metadata/MetadataQuerySpec.scala @@ -40,7 +40,7 @@ object MetadataQuerySpec { final class MetadataServiceActor_CustomizeRead(config: Config, serviceRegistryActor: ActorRef, readWorkerMaker: () => Props) extends MetadataServiceActor(MetadataServiceActorSpec.globalConfigToMetadataServiceConfig(config), config, serviceRegistryActor) { - override def readMetadataWorkerActorProps(): Props = readWorkerMaker.apply.withDispatcher(cromwell.core.Dispatcher.ServiceDispatcher) + override def readMetadataWorkerActorProps(): Props = readWorkerMaker.apply().withDispatcher(cromwell.core.Dispatcher.ServiceDispatcher) } object MetadataServiceActor_CustomizeRead { @@ -53,7 +53,7 @@ object MetadataQuerySpec { final class CannedResponseReadMetadataWorker(cannedResponses: Map[BuildMetadataJsonAction, MetadataServiceResponse]) extends Actor { override def receive: Receive = { - case msg: BuildMetadataJsonAction => sender ! cannedResponses.getOrElse(msg, throw new Exception(s"Unexpected inbound message: $msg")) + case msg: BuildMetadataJsonAction => sender() ! cannedResponses.getOrElse(msg, throw new Exception(s"Unexpected inbound message: $msg")) } } } diff --git a/services/src/test/scala/cromwell/services/metadata/QueryForWorkflowsMatchingParametersSpec.scala b/services/src/test/scala/cromwell/services/metadata/QueryForWorkflowsMatchingParametersSpec.scala index 80874fed155..344b01ef410 100644 --- a/services/src/test/scala/cromwell/services/metadata/QueryForWorkflowsMatchingParametersSpec.scala +++ b/services/src/test/scala/cromwell/services/metadata/QueryForWorkflowsMatchingParametersSpec.scala @@ -21,15 +21,15 @@ class QueryForWorkflowsMatchingParametersSpec extends AnyWordSpec with CromwellT val result = WorkflowQueryParameters.runValidation(Seq.empty) result match { case Valid(r) => - r.startDate should be('empty) - r.endDate should be('empty) - r.names should be('empty) - r.statuses should be('empty) - r.labelsAnd should be ('empty) - r.labelsOr should be ('empty) - r.excludeLabelsAnd should be ('empty) - r.excludeLabelsOr should be ('empty) - r.submissionTime should be('empty) + r.startDate should be(Symbol("empty")) + r.endDate should be(Symbol("empty")) + r.names should be(Symbol("empty")) + r.statuses should be(Symbol("empty")) + r.labelsAnd should be (Symbol("empty")) + r.labelsOr should be (Symbol("empty")) + r.excludeLabelsAnd should be (Symbol("empty")) + r.excludeLabelsOr should be (Symbol("empty")) + r.submissionTime should be(Symbol("empty")) case Invalid(fs) => throw new RuntimeException(fs.toList.mkString(", ")) } @@ -221,7 +221,7 @@ class QueryForWorkflowsMatchingParametersSpec extends AnyWordSpec with CromwellT fs.toList.head should include("Label values do not match allowed pattern label-key:label-value") } } - + "valid labels with invalid format for OR" in { val goodLabelKey = "0-label-key" val rawParameters = Seq( @@ -297,9 +297,9 @@ class QueryForWorkflowsMatchingParametersSpec extends AnyWordSpec with CromwellT throw new RuntimeException(s"Unexpected success: $r") case Invalid(fs) => fs.toList should have size 3 - fs.toList find { _ == "Unrecognized status values: Moseying" } getOrElse fail - fs.toList find { _ contains "does not parse as a datetime" } getOrElse fail - fs.toList find { _ contains "Name values do not match allowed workflow naming pattern" } getOrElse fail + fs.toList find { _ == "Unrecognized status values: Moseying" } getOrElse fail() + fs.toList find { _ contains "does not parse as a datetime" } getOrElse fail() + fs.toList find { _ contains "Name values do not match allowed workflow naming pattern" } getOrElse fail() } } } diff --git a/services/src/test/scala/cromwell/services/metadata/impl/MetadataDatabaseAccessSpec.scala b/services/src/test/scala/cromwell/services/metadata/impl/MetadataDatabaseAccessSpec.scala index 5d5e87cc961..b5653ec4c5c 100644 --- a/services/src/test/scala/cromwell/services/metadata/impl/MetadataDatabaseAccessSpec.scala +++ b/services/src/test/scala/cromwell/services/metadata/impl/MetadataDatabaseAccessSpec.scala @@ -597,7 +597,7 @@ class MetadataDatabaseAccessSpec extends AnyFlatSpec with CromwellTimeoutSpec wi } it should "stop container if required" taggedAs DbmsTest in { - containerOpt.foreach { _.stop } + containerOpt.foreach { _.stop() } } } } diff --git a/services/src/test/scala/cromwell/services/metadata/impl/MetadataServiceActorSpec.scala b/services/src/test/scala/cromwell/services/metadata/impl/MetadataServiceActorSpec.scala index eed3dacf79b..22ff8c78d5e 100644 --- a/services/src/test/scala/cromwell/services/metadata/impl/MetadataServiceActorSpec.scala +++ b/services/src/test/scala/cromwell/services/metadata/impl/MetadataServiceActorSpec.scala @@ -53,7 +53,7 @@ class MetadataServiceActorSpec extends ServicesSpec { val event3_1: MetadataEvent = MetadataEvent(key3, Option(MetadataValue("value3")), moment.plusSeconds(4)) val event3_2: MetadataEvent = MetadataEvent(key3, None, moment.plusSeconds(5)) - override def beforeAll: Unit = { + override def beforeAll(): Unit = { // Even though event1_1 arrives second, the older timestamp should mean it does not replace event1_2: val putAction2 = PutMetadataAction(event1_2) diff --git a/services/src/test/scala/cromwell/services/metadata/impl/WriteMetadataActorBenchmark.scala b/services/src/test/scala/cromwell/services/metadata/impl/WriteMetadataActorBenchmark.scala index aa1ab48d9d0..96418047723 100644 --- a/services/src/test/scala/cromwell/services/metadata/impl/WriteMetadataActorBenchmark.scala +++ b/services/src/test/scala/cromwell/services/metadata/impl/WriteMetadataActorBenchmark.scala @@ -67,6 +67,6 @@ class WriteMetadataActorBenchmark extends TestKitSuite with AnyFlatSpecLike with } it should "stop container if required" taggedAs IntegrationTest in { - containerOpt.foreach { _.stop } + containerOpt.foreach { _.stop() } } } diff --git a/services/src/test/scala/cromwell/services/metadata/impl/pubsub/PubSubMetadataServiceActorSpec.scala b/services/src/test/scala/cromwell/services/metadata/impl/pubsub/PubSubMetadataServiceActorSpec.scala index ad5c46527f3..3caee16eb11 100644 --- a/services/src/test/scala/cromwell/services/metadata/impl/pubsub/PubSubMetadataServiceActorSpec.scala +++ b/services/src/test/scala/cromwell/services/metadata/impl/pubsub/PubSubMetadataServiceActorSpec.scala @@ -20,7 +20,7 @@ class PubSubMetadataServiceActorSpec extends ServicesSpec { import PubSubMetadataServiceActorSpec._ val registryProbe: ActorRef = TestProbe("registryProbe").ref - + "A PubSubMetadataActor with an empty serviceConfig" should { "fail to build" in { EventFilter[ActorInitializationException](occurrences = 1) intercept { @@ -203,22 +203,22 @@ object PubSubMetadataServiceActorSpec { override def createTopic(topicName: String): Future[Boolean] override def createSubscription(topicName: String, subscriptionName: String): Future[Boolean] - override def publishMessages(topicName: String, messages: Seq[String]): Future[Unit] + override def publishMessages(topicName: String, messages: scala.collection.Seq[String]): Future[Unit] // The following aren't used so leaving them empty override def deleteTopic(topicName: String): Future[Boolean] = throw new UnsupportedOperationException override def getTopic(topicName: String)(implicit executionContext: ExecutionContext): Future[Option[Topic]] = throw new UnsupportedOperationException override def deleteSubscription(subscriptionName: String): Future[Boolean] = throw new UnsupportedOperationException - override def acknowledgeMessages(subscriptionName: String, messages: Seq[PubSubMessage]): Future[Unit] = throw new UnsupportedOperationException - override def acknowledgeMessagesById(subscriptionName: String, ackIds: Seq[String]): Future[Unit] = throw new UnsupportedOperationException - override def pullMessages(subscriptionName: String, maxMessages: Int): Future[Seq[PubSubMessage]] = throw new UnsupportedOperationException + override def acknowledgeMessages(subscriptionName: String, messages: scala.collection.Seq[PubSubMessage]): Future[Unit] = throw new UnsupportedOperationException + override def acknowledgeMessagesById(subscriptionName: String, ackIds: scala.collection.Seq[String]): Future[Unit] = throw new UnsupportedOperationException + override def pullMessages(subscriptionName: String, maxMessages: Int): Future[scala.collection.Seq[PubSubMessage]] = throw new UnsupportedOperationException override def setTopicIamPermissions(topicName: String, permissions: Map[WorkbenchEmail, String]): Future[Unit] = throw new UnsupportedOperationException } class SuccessfulMockGooglePubSubDao extends MockGooglePubSubDao { override def createTopic(topicName: String): Future[Boolean] = Future.successful(true) override def createSubscription(topicName: String, subscriptionName: String): Future[Boolean] = Future.successful(true) - override def publishMessages(topicName: String, messages: Seq[String]): Future[Unit] = Future.successful(()) + override def publishMessages(topicName: String, messages: scala.collection.Seq[String]): Future[Unit] = Future.successful(()) } class FailingToCreateTopicMockGooglePubSubDao extends SuccessfulMockGooglePubSubDao { @@ -226,7 +226,7 @@ object PubSubMetadataServiceActorSpec { } class FailToPublishMockGooglePubSubDao extends SuccessfulMockGooglePubSubDao { - override def publishMessages(topicName: String, messages: Seq[String]): Future[Unit] = Future.failed(new RuntimeException("sorry charlie")) + override def publishMessages(topicName: String, messages: scala.collection.Seq[String]): Future[Unit] = Future.failed(new RuntimeException("sorry charlie")) } // This doesn't include a project so should be a failure diff --git a/services/src/test/scala/cromwell/services/womtool/DescriberSpec.scala b/services/src/test/scala/cromwell/services/womtool/DescriberSpec.scala index 467650684e0..95a0f64fd96 100644 --- a/services/src/test/scala/cromwell/services/womtool/DescriberSpec.scala +++ b/services/src/test/scala/cromwell/services/womtool/DescriberSpec.scala @@ -56,7 +56,7 @@ class DescriberSpec extends AnyFlatSpec with CromwellTimeoutSpec with Matchers { case UrlAndDescription(url, _) => interimWsfc.copy(workflowUrl = Option(url)) } - check(wsfc, parse(testCase.expectedDescription).right.get) + check(wsfc, parse(testCase.expectedDescription).toOption.get) } } } diff --git a/src/ci/bin/test.inc.sh b/src/ci/bin/test.inc.sh index b1caa9d5654..eb52ab58ad1 100644 --- a/src/ci/bin/test.inc.sh +++ b/src/ci/bin/test.inc.sh @@ -51,10 +51,30 @@ cromwell::private::set_variable_if_only_some_files_changed() { if [[ "${TRAVIS_EVENT_TYPE:-unset}" != "pull_request" ]]; then export "${variable_to_set}=false" - elif git diff --name-only "origin/${TRAVIS_BRANCH}" 2>&1 | grep -E -q --invert-match "${files_changed_regex}"; then - export "${variable_to_set}=false" else + # Large changesets seem to trigger the situation described in [1] where a `git diff` pipelined to `grep` can cause + # `grep` to exit 0 on the first match while `git diff` is still writing to the pipe. When this happens `git diff` + # is killed with a SIGPIPE and exits with code 141. With `set -o pipefail` this causes the entire pipeline to exit + # with code 141, which sets `$variable_to_set` to `true` when it probably should have been set to `false`. + # + # Instead of composing these commands into a pipeline write to a temporary file. + # + # [1] https://stackoverflow.com/a/19120674 + # [2] https://gist.github.com/mohanpedala/1e2ff5661761d3abd0385e8223e16425#set--o-pipefail + + files_changed_temporary_file=$(mktemp) + + git diff --name-only "origin/${TRAVIS_BRANCH}" > "${files_changed_temporary_file}" 2>&1 && \ + grep -E -q --invert-match "${files_changed_regex}" "${files_changed_temporary_file}" + RESULT=$? + + if [[ $RESULT -eq 0 ]]; then + export "${variable_to_set}=false" + else export "${variable_to_set}=true" + fi + + rm "${files_changed_temporary_file}" fi } @@ -1132,7 +1152,7 @@ cromwell::private::make_build_directories() { cromwell::private::find_cromwell_jar() { CROMWELL_BUILD_CROMWELL_JAR="$( \ - find "${CROMWELL_BUILD_ROOT_DIRECTORY}/server/target/scala-2.12" -name "cromwell-*.jar" -print0 \ + find "${CROMWELL_BUILD_ROOT_DIRECTORY}/server/target/scala-2.13" -name "cromwell-*.jar" -print0 \ | xargs -0 ls -1 -t \ | head -n 1 \ 2> /dev/null \ diff --git a/supportedBackends/aws/src/main/scala/cromwell/backend/impl/aws/AwsBatchAsyncBackendJobExecutionActor.scala b/supportedBackends/aws/src/main/scala/cromwell/backend/impl/aws/AwsBatchAsyncBackendJobExecutionActor.scala index 4fdba3b568c..fed3b2c09e0 100755 --- a/supportedBackends/aws/src/main/scala/cromwell/backend/impl/aws/AwsBatchAsyncBackendJobExecutionActor.scala +++ b/supportedBackends/aws/src/main/scala/cromwell/backend/impl/aws/AwsBatchAsyncBackendJobExecutionActor.scala @@ -163,7 +163,7 @@ class AwsBatchAsyncBackendJobExecutionActor(override val standardParams: Standar */ lazy val cmdScript = configuration.fileSystem match { - case AWSBatchStorageSystems.s3 => commandScriptContents.toEither.right.get + case AWSBatchStorageSystems.s3 => commandScriptContents.toEither.toOption.get case _ => execScript } @@ -428,7 +428,7 @@ class AwsBatchAsyncBackendJobExecutionActor(override val standardParams: Standar for { //upload the command script _ <- uploadScriptFile() - completionPromise = Promise[SubmitJobResponse] + completionPromise = Promise[SubmitJobResponse]() //send a message to the Actor requesting a job submission _ = backendSingletonActor ! SubmitAwsJobRequest(batchJob, attributes, completionPromise) //the future response of the submit job request diff --git a/supportedBackends/aws/src/main/scala/cromwell/backend/impl/aws/AwsBatchAttributes.scala b/supportedBackends/aws/src/main/scala/cromwell/backend/impl/aws/AwsBatchAttributes.scala index 26f69c4e79a..e0dd8951550 100755 --- a/supportedBackends/aws/src/main/scala/cromwell/backend/impl/aws/AwsBatchAttributes.scala +++ b/supportedBackends/aws/src/main/scala/cromwell/backend/impl/aws/AwsBatchAttributes.scala @@ -31,28 +31,26 @@ package cromwell.backend.impl.aws -import java.net.{URI, URL} - import cats.data.Validated._ -import cats.syntax.either._ import cats.syntax.apply._ +import cats.syntax.either._ import com.typesafe.config.{Config, ConfigValue} -import cromwell.cloudsupport.aws.auth.AwsAuthMode -import cromwell.backend.impl.aws.callcaching.{AwsBatchCacheHitDuplicationStrategy, CopyCachedOutputs, UseOriginalCachedOutputs} -import cromwell.cloudsupport.aws.AwsConfiguration import common.exception.MessageAggregation import common.validation.ErrorOr._ import common.validation.Validation._ import cromwell.backend.CommonBackendConfigurationAttributes -import eu.timepit.refined.api.Refined -import eu.timepit.refined.api._ +import cromwell.backend.impl.aws.callcaching.{AwsBatchCacheHitDuplicationStrategy, CopyCachedOutputs, UseOriginalCachedOutputs} +import cromwell.cloudsupport.aws.AwsConfiguration +import cromwell.cloudsupport.aws.auth.AwsAuthMode import eu.timepit.refined._ +import eu.timepit.refined.api.Refined import eu.timepit.refined.numeric._ import net.ceedubs.ficus.Ficus._ import net.ceedubs.ficus.readers.{StringReader, ValueReader} import org.slf4j.{Logger, LoggerFactory} -import scala.collection.JavaConverters._ +import java.net.{URI, URL} +import scala.jdk.CollectionConverters._ case class AwsBatchAttributes(fileSystem: String, auth: AwsAuthMode, diff --git a/supportedBackends/aws/src/main/scala/cromwell/backend/impl/aws/AwsBatchInitializationActor.scala b/supportedBackends/aws/src/main/scala/cromwell/backend/impl/aws/AwsBatchInitializationActor.scala index 93f06cd3477..fe3668f9b00 100755 --- a/supportedBackends/aws/src/main/scala/cromwell/backend/impl/aws/AwsBatchInitializationActor.scala +++ b/supportedBackends/aws/src/main/scala/cromwell/backend/impl/aws/AwsBatchInitializationActor.scala @@ -71,12 +71,12 @@ class AwsBatchInitializationActor(params: AwsBatchInitializationActorParams) override def beforeAll(): Future[Option[BackendInitializationData]] = { configuration.fileSystem match { - case AWSBatchStorageSystems.s3 => super.beforeAll - case _ => { + case AWSBatchStorageSystems.s3 => super.beforeAll() + case _ => { initializationData map { data => publishWorkflowRoot(data.workflowPaths.workflowRoot.pathAsString) Option(data) - } + } } } } diff --git a/supportedBackends/aws/src/main/scala/cromwell/backend/impl/aws/AwsBatchJob.scala b/supportedBackends/aws/src/main/scala/cromwell/backend/impl/aws/AwsBatchJob.scala index a22cf25f307..de4617ad786 100755 --- a/supportedBackends/aws/src/main/scala/cromwell/backend/impl/aws/AwsBatchJob.scala +++ b/supportedBackends/aws/src/main/scala/cromwell/backend/impl/aws/AwsBatchJob.scala @@ -53,9 +53,8 @@ import software.amazon.awssdk.services.s3.S3Client import software.amazon.awssdk.services.s3.model.{GetObjectRequest, HeadObjectRequest, NoSuchKeyException, PutObjectRequest} import wdl4s.parser.MemoryUnit -import scala.collection.JavaConverters._ +import scala.jdk.CollectionConverters._ import scala.concurrent.duration._ -import scala.language.higherKinds import scala.util.{Random, Try} /** @@ -121,7 +120,7 @@ final case class AwsBatchJob(jobDescriptor: BackendJobDescriptor, // WDL/CWL //internal to the container, therefore not mounted val workDir = "/tmp/scratch" //working in a mount will cause collisions in long running workers - val replaced = commandScript.replaceAllLiterally(AwsBatchWorkingDisk.MountPoint.pathAsString, workDir) + val replaced = commandScript.replace(AwsBatchWorkingDisk.MountPoint.pathAsString, workDir) val insertionPoint = replaced.indexOf("\n", replaced.indexOf("#!")) +1 //just after the new line after the shebang! /* generate a series of s3 copy statements to copy any s3 files into the container. We randomize the order @@ -137,12 +136,12 @@ final case class AwsBatchJob(jobDescriptor: BackendJobDescriptor, // WDL/CWL case input: AwsBatchFileInput if input.s3key.startsWith("s3://") => s"$s3Cmd cp --no-progress ${input.s3key} ${input.mount.mountPoint.pathAsString}/${input.local}" - .replaceAllLiterally(AwsBatchWorkingDisk.MountPoint.pathAsString, workDir) + .replace(AwsBatchWorkingDisk.MountPoint.pathAsString, workDir) case input: AwsBatchFileInput => //here we don't need a copy command but the centaurTests expect us to verify the existence of the file val filePath = s"${input.mount.mountPoint.pathAsString}/${input.local.pathAsString}" - .replaceAllLiterally(AwsBatchWorkingDisk.MountPoint.pathAsString, workDir) + .replace(AwsBatchWorkingDisk.MountPoint.pathAsString, workDir) s"test -e $filePath || echo 'input file: $filePath does not exist' && exit 1" @@ -466,7 +465,7 @@ final case class AwsBatchJob(jobDescriptor: BackendJobDescriptor, // WDL/CWL .logGroupName("/aws/batch/job") .logStreamName(detail.container.logStreamName) .startFromHead(true) - .build).events.asScala + .build).events.asScala.toList val eventMessages = for ( event <- events ) yield event.message eventMessages mkString "\n" } diff --git a/supportedBackends/aws/src/main/scala/cromwell/backend/impl/aws/AwsBatchJobDefinition.scala b/supportedBackends/aws/src/main/scala/cromwell/backend/impl/aws/AwsBatchJobDefinition.scala index ae356b89772..549d3c65185 100755 --- a/supportedBackends/aws/src/main/scala/cromwell/backend/impl/aws/AwsBatchJobDefinition.scala +++ b/supportedBackends/aws/src/main/scala/cromwell/backend/impl/aws/AwsBatchJobDefinition.scala @@ -37,7 +37,7 @@ import cromwell.backend.io.JobPaths import software.amazon.awssdk.services.batch.model.{ContainerProperties, Host, KeyValuePair, MountPoint, ResourceRequirement, ResourceType, Volume} import cromwell.backend.impl.aws.io.AwsBatchVolume -import scala.collection.JavaConverters._ +import scala.jdk.CollectionConverters._ import java.security.MessageDigest import org.apache.commons.lang3.builder.{ToStringBuilder, ToStringStyle} import org.slf4j.{Logger, LoggerFactory} @@ -188,6 +188,7 @@ trait AwsBatchJobDefinitionBuilder { rc += shell rc += options rc += packedCommand + rc.toList } def build(context: AwsBatchJobDefinitionContext): AwsBatchJobDefinition diff --git a/supportedBackends/aws/src/main/scala/cromwell/backend/impl/aws/AwsBatchRuntimeAttributes.scala b/supportedBackends/aws/src/main/scala/cromwell/backend/impl/aws/AwsBatchRuntimeAttributes.scala index c6fc2a5f51f..ca0eeb9b10c 100755 --- a/supportedBackends/aws/src/main/scala/cromwell/backend/impl/aws/AwsBatchRuntimeAttributes.scala +++ b/supportedBackends/aws/src/main/scala/cromwell/backend/impl/aws/AwsBatchRuntimeAttributes.scala @@ -317,7 +317,7 @@ class ArnValidation(override val key: String) extends StringRuntimeAttributesVal object ZonesValidation extends RuntimeAttributesValidation[Vector[String]] { override def key: String = AwsBatchRuntimeAttributes.ZonesKey - override def coercion: Traversable[WomType] = Set(WomStringType, WomArrayType(WomStringType)) + override def coercion: Iterable[WomType] = Set(WomStringType, WomArrayType(WomStringType)) override protected def validateValue: PartialFunction[WomValue, ErrorOr[Vector[String]]] = { case WomString(s) => s.split("\\s+").toVector.validNel @@ -332,7 +332,7 @@ object ZonesValidation extends RuntimeAttributesValidation[Vector[String]] { object DisksValidation extends RuntimeAttributesValidation[Seq[AwsBatchVolume]] { override def key: String = AwsBatchRuntimeAttributes.DisksKey - override def coercion: Traversable[WomType] = Set(WomStringType, WomArrayType(WomStringType)) + override def coercion: Iterable[WomType] = Set(WomStringType, WomArrayType(WomStringType)) override protected def validateValue: PartialFunction[WomValue, ErrorOr[Seq[AwsBatchVolume]]] = { case WomString(value) => validateLocalDisks(value.split(",\\s*").toSeq) diff --git a/supportedBackends/aws/src/main/scala/cromwell/backend/impl/aws/OccasionalStatusPollingActor.scala b/supportedBackends/aws/src/main/scala/cromwell/backend/impl/aws/OccasionalStatusPollingActor.scala index f7281a313f2..152130745f5 100644 --- a/supportedBackends/aws/src/main/scala/cromwell/backend/impl/aws/OccasionalStatusPollingActor.scala +++ b/supportedBackends/aws/src/main/scala/cromwell/backend/impl/aws/OccasionalStatusPollingActor.scala @@ -9,7 +9,7 @@ import software.amazon.awssdk.services.batch.BatchClient import software.amazon.awssdk.services.batch.model.ListJobsRequest import scala.annotation.tailrec -import scala.collection.JavaConverters._ +import scala.jdk.CollectionConverters._ import scala.concurrent.duration._ import scala.concurrent.{ExecutionContext, Future} import scala.util.{Failure, Success, Try} @@ -45,7 +45,7 @@ class OccasionalStatusPollingActor(configRegion: Option[Region], optAwsAuthMode: override def receive = { case WhatsMyStatus(queueArn, jobId) => queuesToMonitor += queueArn // Set addition so expectation is a no-op almost every time - sender ! NotifyOfStatus(queueArn, jobId, statuses.get(jobId)) + sender() ! NotifyOfStatus(queueArn, jobId, statuses.get(jobId)) case UpdateStatuses => Future { diff --git a/supportedBackends/aws/src/main/scala/cromwell/backend/impl/aws/package.scala b/supportedBackends/aws/src/main/scala/cromwell/backend/impl/aws/package.scala index 48d970e2b67..9b171741471 100644 --- a/supportedBackends/aws/src/main/scala/cromwell/backend/impl/aws/package.scala +++ b/supportedBackends/aws/src/main/scala/cromwell/backend/impl/aws/package.scala @@ -1,8 +1,5 @@ package cromwell.backend.impl -import java.io.ByteArrayOutputStream -import java.util.zip.GZIPOutputStream - import cats.data.ReaderT import com.google.common.io.BaseEncoding import cromwell.cloudsupport.aws.auth.AwsAuthMode @@ -10,7 +7,8 @@ import software.amazon.awssdk.awscore.client.builder.AwsClientBuilder import software.amazon.awssdk.regions.Region import software.amazon.awssdk.services.batch.model.KeyValuePair -import scala.language.higherKinds +import java.io.ByteArrayOutputStream +import java.util.zip.GZIPOutputStream package object aws { diff --git a/supportedBackends/aws/src/test/scala/cromwell/backend/impl/aws/AwsBatchJobSpec.scala b/supportedBackends/aws/src/test/scala/cromwell/backend/impl/aws/AwsBatchJobSpec.scala index 5037fc21051..82e0babae80 100644 --- a/supportedBackends/aws/src/test/scala/cromwell/backend/impl/aws/AwsBatchJobSpec.scala +++ b/supportedBackends/aws/src/test/scala/cromwell/backend/impl/aws/AwsBatchJobSpec.scala @@ -145,7 +145,7 @@ class AwsBatchJobSpec extends TestKitSuite with AnyFlatSpecLike with Matchers wi it should "generate appropriate KV pairs for the container environment for S3" in { val job = generateBasicJob - val generateEnvironmentKVPairs = PrivateMethod[List[KeyValuePair]]('generateEnvironmentKVPairs) + val generateEnvironmentKVPairs = PrivateMethod[List[KeyValuePair]](Symbol("generateEnvironmentKVPairs")) // testing a private method see https://www.scalatest.org/user_guide/using_PrivateMethodTester val kvPairs = job invokePrivate generateEnvironmentKVPairs("script-bucket", "prefix-", "key") @@ -158,7 +158,7 @@ class AwsBatchJobSpec extends TestKitSuite with AnyFlatSpecLike with Matchers wi it should "generate appropriate KV pairs for the container environment for Local FS" in { val job = generateBasicJobForLocalFS - val generateEnvironmentKVPairs = PrivateMethod[List[KeyValuePair]]('generateEnvironmentKVPairs) + val generateEnvironmentKVPairs = PrivateMethod[List[KeyValuePair]](Symbol("generateEnvironmentKVPairs")) // testing a private method see https://www.scalatest.org/user_guide/using_PrivateMethodTester val kvPairs = job invokePrivate generateEnvironmentKVPairs("script-bucket", "prefix-", "key") diff --git a/supportedBackends/aws/src/test/scala/cromwell/backend/impl/aws/AwsBatchRuntimeAttributesSpec.scala b/supportedBackends/aws/src/test/scala/cromwell/backend/impl/aws/AwsBatchRuntimeAttributesSpec.scala index 09e1ee94351..ace177ea007 100644 --- a/supportedBackends/aws/src/test/scala/cromwell/backend/impl/aws/AwsBatchRuntimeAttributesSpec.scala +++ b/supportedBackends/aws/src/test/scala/cromwell/backend/impl/aws/AwsBatchRuntimeAttributesSpec.scala @@ -150,13 +150,13 @@ class AwsBatchRuntimeAttributesSpec extends AnyWordSpecLike with CromwellTimeout } "validate a valid continueOnReturnCode array entry" in { - val runtimeAttributes = Map("docker" -> WomString("ubuntu:latest"), "scriptBucketName" -> WomString("my-stuff"), "continueOnReturnCode" -> WomArray(WomArrayType(WomIntegerType), Array(WomInteger(1), WomInteger(2)))) + val runtimeAttributes = Map("docker" -> WomString("ubuntu:latest"), "scriptBucketName" -> WomString("my-stuff"), "continueOnReturnCode" -> WomArray(WomArrayType(WomIntegerType), List(WomInteger(1), WomInteger(2)))) val expectedRuntimeAttributes = expectedDefaults.copy(continueOnReturnCode = ContinueOnReturnCodeSet(Set(1, 2))) assertAwsBatchRuntimeAttributesSuccessfulCreation(runtimeAttributes, expectedRuntimeAttributes) } "coerce then validate a valid continueOnReturnCode array entry" in { - val runtimeAttributes = Map("docker" -> WomString("ubuntu:latest"), "scriptBucketName" -> WomString("my-stuff"), "continueOnReturnCode" -> WomArray(WomArrayType(WomStringType), Array(WomString("1"), WomString("2")))) + val runtimeAttributes = Map("docker" -> WomString("ubuntu:latest"), "scriptBucketName" -> WomString("my-stuff"), "continueOnReturnCode" -> WomArray(WomArrayType(WomStringType), List(WomString("1"), WomString("2")))) val expectedRuntimeAttributes = expectedDefaults.copy(continueOnReturnCode = ContinueOnReturnCodeSet(Set(1, 2))) assertAwsBatchRuntimeAttributesSuccessfulCreation(runtimeAttributes, expectedRuntimeAttributes) } @@ -206,13 +206,13 @@ class AwsBatchRuntimeAttributesSpec extends AnyWordSpecLike with CromwellTimeout } "validate a valid array zones entry" in { - val runtimeAttributes = Map("docker" -> WomString("ubuntu:latest"), "scriptBucketName" -> WomString("my-stuff"), "zones" -> WomArray(WomArrayType(WomStringType), Array(WomString("us-east-1a"), WomString("us-east-1b")))) + val runtimeAttributes = Map("docker" -> WomString("ubuntu:latest"), "scriptBucketName" -> WomString("my-stuff"), "zones" -> WomArray(WomArrayType(WomStringType), List(WomString("us-east-1a"), WomString("us-east-1b")))) val expectedRuntimeAttributes = expectedDefaults.copy(zones = Vector("us-east-1a", "us-east-1b")) assertAwsBatchRuntimeAttributesSuccessfulCreation(runtimeAttributes, expectedRuntimeAttributes) } "fail to validate an invalid array zones entry" in { - val runtimeAttributes = Map("docker" -> WomString("ubuntu:latest"), "scriptBucketName" -> WomString("my-stuff"), "zones" -> WomArray(WomArrayType(WomIntegerType), Array(WomInteger(1), WomInteger(2)))) + val runtimeAttributes = Map("docker" -> WomString("ubuntu:latest"), "scriptBucketName" -> WomString("my-stuff"), "zones" -> WomArray(WomArrayType(WomIntegerType), List(WomInteger(1), WomInteger(2)))) assertAwsBatchRuntimeAttributesFailedCreation(runtimeAttributes, "Expecting zones runtime attribute to be either a whitespace separated String or an Array[String]") } @@ -228,14 +228,14 @@ class AwsBatchRuntimeAttributesSpec extends AnyWordSpecLike with CromwellTimeout } "validate a valid disks array entry" in { - val runtimeAttributes = Map("docker" -> WomString("ubuntu:latest"), "scriptBucketName" -> WomString("my-stuff"), "disks" -> WomArray(WomArrayType(WomStringType), Array(WomString("local-disk"), WomString("local-disk")))) + val runtimeAttributes = Map("docker" -> WomString("ubuntu:latest"), "scriptBucketName" -> WomString("my-stuff"), "disks" -> WomArray(WomArrayType(WomStringType), List(WomString("local-disk"), WomString("local-disk")))) val expectedRuntimeAttributes = expectedDefaults.copy(disks = Seq(AwsBatchVolume.parse("local-disk").get, AwsBatchVolume.parse("local-disk").get)) assertAwsBatchRuntimeAttributesSuccessfulCreation(runtimeAttributes, expectedRuntimeAttributes) } // TODO: This is working ok (appropriate error messages), though test is throwing due to message inconsistency // "fail to validate a valid disks array entry" in { - // val runtimeAttributes = Map("docker" -> WomString("ubuntu:latest"), "disks" -> WomArray(WomArrayType(WomStringType), Array(WomString("blah"), WomString("blah blah")))) + // val runtimeAttributes = Map("docker" -> WomString("ubuntu:latest"), "disks" -> WomArray(WomArrayType(WomStringType), List(WomString("blah"), WomString("blah blah")))) // assertAwsBatchRuntimeAttributesFailedCreation(runtimeAttributes, "Disk strings should be of the format 'local-disk' or '/mount/point' but got 'blah blah'") // } diff --git a/supportedBackends/bcs/src/main/scala/cromwell/backend/impl/bcs/BcsAsyncBackendJobExecutionActor.scala b/supportedBackends/bcs/src/main/scala/cromwell/backend/impl/bcs/BcsAsyncBackendJobExecutionActor.scala index 379f553010f..73d622eaecd 100644 --- a/supportedBackends/bcs/src/main/scala/cromwell/backend/impl/bcs/BcsAsyncBackendJobExecutionActor.scala +++ b/supportedBackends/bcs/src/main/scala/cromwell/backend/impl/bcs/BcsAsyncBackendJobExecutionActor.scala @@ -21,6 +21,7 @@ import wom.types.WomSingleFileType import wom.values._ import mouse.all._ +import scala.annotation.nowarn import scala.concurrent.Future import scala.concurrent.duration._ import scala.util.{Success, Try} @@ -311,6 +312,7 @@ final class BcsAsyncBackendJobExecutionActor(override val standardParams: Standa bcsJobPaths.workerPath } + @nowarn("msg=a type was inferred to be `Object`; this may indicate a programming error.") override def executeAsync(): Future[ExecutionHandle] = { commandScriptContents.fold( errors => Future.failed(new RuntimeException(errors.toList.mkString(", "))), diff --git a/supportedBackends/bcs/src/main/scala/cromwell/backend/impl/bcs/BcsJob.scala b/supportedBackends/bcs/src/main/scala/cromwell/backend/impl/bcs/BcsJob.scala index 39aede7233d..c19e1228864 100644 --- a/supportedBackends/bcs/src/main/scala/cromwell/backend/impl/bcs/BcsJob.scala +++ b/supportedBackends/bcs/src/main/scala/cromwell/backend/impl/bcs/BcsJob.scala @@ -6,7 +6,7 @@ import com.aliyuncs.batchcompute.pojo.v20151111._ import cromwell.core.ExecutionEvent import cromwell.core.path.Path -import collection.JavaConverters._ +import scala.jdk.CollectionConverters._ import scala.util.{Failure, Success, Try} object BcsJob{ diff --git a/supportedBackends/bcs/src/main/scala/cromwell/backend/impl/bcs/BcsRuntimeAttributes.scala b/supportedBackends/bcs/src/main/scala/cromwell/backend/impl/bcs/BcsRuntimeAttributes.scala index 44f10a1c2fe..da73c3ef747 100644 --- a/supportedBackends/bcs/src/main/scala/cromwell/backend/impl/bcs/BcsRuntimeAttributes.scala +++ b/supportedBackends/bcs/src/main/scala/cromwell/backend/impl/bcs/BcsRuntimeAttributes.scala @@ -176,7 +176,7 @@ object MountsValidation { class MountsValidation(override val config: Option[Config]) extends RuntimeAttributesValidation[Seq[BcsMount]] with OptionalWithDefault[Seq[BcsMount]] { override def key: String = BcsRuntimeAttributes.MountsKey - override def coercion: Traversable[WomType] = Set(WomStringType, WomArrayType(WomStringType)) + override def coercion: Iterable[WomType] = Set(WomStringType, WomArrayType(WomStringType)) override protected def validateValue: PartialFunction[WomValue, ErrorOr[Seq[BcsMount]]] = { case WomString(value) => validateMounts(value.split(",\\s*").toSeq) @@ -218,7 +218,7 @@ class UserDataValidation(override val config: Option[Config]) extends RuntimeAtt override def usedInCallCaching: Boolean = true - override def coercion: Traversable[WomType] = Set(WomStringType, WomArrayType(WomStringType)) + override def coercion: Iterable[WomType] = Set(WomStringType, WomArrayType(WomStringType)) override protected def validateValue: PartialFunction[WomValue, ErrorOr[Seq[BcsUserData]]] = { case WomString(value) => validateUserData(value.split(",\\s*").toSeq) @@ -284,7 +284,7 @@ class ClusterValidation(override val config: Option[Config]) extends RuntimeAttr { override def key: String = "cluster" - override def coercion: Traversable[WomType] = Set(WomStringType) + override def coercion: Iterable[WomType] = Set(WomStringType) override def validateValue: PartialFunction[WomValue, ErrorOr[BcsClusterIdOrConfiguration]] = { case WomString(s) => BcsClusterIdOrConfiguration.parse(s.toString) match { @@ -301,7 +301,7 @@ object SystemDiskValidation { class SystemDiskValidation(override val config: Option[Config]) extends RuntimeAttributesValidation[BcsSystemDisk] with OptionalWithDefault[BcsSystemDisk] { override def key: String = "systemDisk" - override def coercion: Traversable[WomType] = Set(WomStringType) + override def coercion: Iterable[WomType] = Set(WomStringType) override def validateValue: PartialFunction[WomValue, ErrorOr[BcsSystemDisk]] = { case WomString(s) => BcsDisk.parse(s.toString) match { case Success(disk: BcsSystemDisk) => disk.validNel @@ -317,7 +317,7 @@ object DataDiskValidation { class DataDiskValidation(override val config: Option[Config]) extends RuntimeAttributesValidation[BcsDataDisk] with OptionalWithDefault[BcsDataDisk] { override def key: String = "dataDisk" - override def coercion: Traversable[WomType] = Set(WomStringType) + override def coercion: Iterable[WomType] = Set(WomStringType) override def validateValue: PartialFunction[WomValue, ErrorOr[BcsDataDisk]] = { case WomString(s) => BcsDisk.parse(s.toString) match { case Success(disk: BcsDataDisk) => disk.validNel @@ -333,7 +333,7 @@ object DockerTagValidation { class DockerTagValidation(override val config: Option[Config]) extends RuntimeAttributesValidation[BcsDocker] with OptionalWithDefault[BcsDocker] { override def key: String = "dockerTag" - override def coercion: Traversable[WomType] = Set(WomStringType) + override def coercion: Iterable[WomType] = Set(WomStringType) override def validateValue: PartialFunction[WomValue, ErrorOr[BcsDocker]] = { case WomString(s) => BcsDocker.parse(s.toString) match { case Success(docker: BcsDocker) => docker.validNel @@ -359,7 +359,7 @@ object VpcValidation { class VpcValidation(override val config: Option[Config]) extends RuntimeAttributesValidation[BcsVpcConfiguration] with OptionalWithDefault[BcsVpcConfiguration] { override def key: String = "vpc" - override def coercion: Traversable[WomType] = Set(WomStringType) + override def coercion: Iterable[WomType] = Set(WomStringType) override def validateValue: PartialFunction[WomValue, ErrorOr[BcsVpcConfiguration]] = { case WomString(s) => BcsVpcConfiguration.parse(s.toString) match { case Success(vpc: BcsVpcConfiguration) => vpc.validNel diff --git a/supportedBackends/bcs/src/test/scala/cromwell/backend/impl/bcs/BcsJobSpec.scala b/supportedBackends/bcs/src/test/scala/cromwell/backend/impl/bcs/BcsJobSpec.scala index 13ed05bbb0c..84ed4c4aa58 100644 --- a/supportedBackends/bcs/src/test/scala/cromwell/backend/impl/bcs/BcsJobSpec.scala +++ b/supportedBackends/bcs/src/test/scala/cromwell/backend/impl/bcs/BcsJobSpec.scala @@ -59,7 +59,7 @@ class BcsJobSpec extends BcsTestUtilSpec { it should "have correct docker option" in { val dockerImage = "ubuntu/latest" - val dockerPath = "oss://bcs-reg/ubuntu/"toLowerCase() + val dockerPath = "oss://bcs-reg/ubuntu/".toLowerCase() val runtime = Map("dockerTag" -> WomString(s"$dockerImage $dockerPath")) taskWithRuntime(runtime).getParameters.getCommand.getEnvVars.get(BcsJob.BcsDockerImageEnvKey) shouldEqual null taskWithRuntime(runtime).getParameters.getCommand.getEnvVars.get(BcsJob.BcsDockerPathEnvKey) shouldEqual null diff --git a/supportedBackends/google/pipelines/common/src/main/scala/cromwell/backend/google/pipelines/common/GpuTypeValidation.scala b/supportedBackends/google/pipelines/common/src/main/scala/cromwell/backend/google/pipelines/common/GpuTypeValidation.scala index b9a3ba17d68..540888f9180 100644 --- a/supportedBackends/google/pipelines/common/src/main/scala/cromwell/backend/google/pipelines/common/GpuTypeValidation.scala +++ b/supportedBackends/google/pipelines/common/src/main/scala/cromwell/backend/google/pipelines/common/GpuTypeValidation.scala @@ -16,7 +16,7 @@ object GpuTypeValidation { class GpuTypeValidation extends RuntimeAttributesValidation[GpuType] { override def key = RuntimeAttributesKeys.GpuTypeKey - override def coercion: Traversable[WomType] = Set(WomStringType) + override def coercion: Iterable[WomType] = Set(WomStringType) override def validateValue: PartialFunction[WomValue, ErrorOr[GpuType]] = { case WomString(s) => GpuType(s).validNel case other => s"Invalid '$key': String value required but got ${other.womType.friendlyName}. See ${GpuType.MoreDetailsURL} for a list of options".invalidNel diff --git a/supportedBackends/google/pipelines/common/src/main/scala/cromwell/backend/google/pipelines/common/PipelinesApiConfigurationAttributes.scala b/supportedBackends/google/pipelines/common/src/main/scala/cromwell/backend/google/pipelines/common/PipelinesApiConfigurationAttributes.scala index 21ca513570d..adfaa54c1d5 100644 --- a/supportedBackends/google/pipelines/common/src/main/scala/cromwell/backend/google/pipelines/common/PipelinesApiConfigurationAttributes.scala +++ b/supportedBackends/google/pipelines/common/src/main/scala/cromwell/backend/google/pipelines/common/PipelinesApiConfigurationAttributes.scala @@ -24,7 +24,7 @@ import net.ceedubs.ficus.Ficus._ import org.slf4j.{Logger, LoggerFactory} import java.net.URL -import scala.collection.JavaConverters._ +import scala.jdk.CollectionConverters._ import scala.concurrent.duration._ import scala.util.matching.Regex import scala.util.{Failure, Success, Try} @@ -194,7 +194,7 @@ object PipelinesApiConfigurationAttributes val pipelineTimeout: FiniteDuration = backendConfig.getOrElse("pipeline-timeout", 7.days) val logFlushPeriod: Option[FiniteDuration] = backendConfig.as[Option[FiniteDuration]]("log-flush-period") match { - case Some(duration) if duration.isFinite() => Option(duration) + case Some(duration) if duration.isFinite => Option(duration) // "Inf" disables upload case Some(_) => None // Defaults to 1 minute diff --git a/supportedBackends/google/pipelines/common/src/main/scala/cromwell/backend/google/pipelines/common/PipelinesApiReferenceFilesMappingOperations.scala b/supportedBackends/google/pipelines/common/src/main/scala/cromwell/backend/google/pipelines/common/PipelinesApiReferenceFilesMappingOperations.scala index f1b1cd92829..d190325f0cd 100644 --- a/supportedBackends/google/pipelines/common/src/main/scala/cromwell/backend/google/pipelines/common/PipelinesApiReferenceFilesMappingOperations.scala +++ b/supportedBackends/google/pipelines/common/src/main/scala/cromwell/backend/google/pipelines/common/PipelinesApiReferenceFilesMappingOperations.scala @@ -52,7 +52,7 @@ trait PipelinesApiReferenceFilesMappingOperations { def getReferenceDisksToMount(referenceFileToDiskImageMapping: Map[String, PipelinesApiReferenceFilesDisk], inputFilePaths: Set[String]): List[PipelinesApiReferenceFilesDisk] = { - referenceFileToDiskImageMapping.filterKeys(key => inputFilePaths.contains(s"gs://$key")).values.toList.distinct + referenceFileToDiskImageMapping.view.filterKeys(key => inputFilePaths.contains(s"gs://$key")).values.toList.distinct } private def getReferenceFileToValidatedGcsPathMap(referenceFiles: Set[ReferenceFile]): IO[Map[ReferenceFile, ValidFullGcsPath]] = { @@ -106,16 +106,16 @@ trait PipelinesApiReferenceFilesMappingOperations { for { referenceFilesWithValidPaths <- getReferenceFileToValidatedGcsPathMap(allReferenceFilesFromManifestMap.keySet) filesWithValidatedCrc32cs <- bulkValidateCrc32cs(gcsClient, referenceFilesWithValidPaths) - } yield allReferenceFilesFromManifestMap.filterKeys(key => filesWithValidatedCrc32cs.getOrElse(key, false)) + } yield allReferenceFilesFromManifestMap.view.filterKeys(key => filesWithValidatedCrc32cs.getOrElse(key, false)) validReferenceFilesFromManifestMapIo map { validReferenceFilesFromManifestMap => val invalidReferenceFiles = allReferenceFilesFromManifestMap.keySet -- validReferenceFilesFromManifestMap.keySet if (invalidReferenceFiles.nonEmpty) { logger.warn(s"The following files listed in references manifest have checksum mismatch with actual files in GCS: ${invalidReferenceFiles.mkString(",")}") } - validReferenceFilesFromManifestMap map { + validReferenceFilesFromManifestMap.map { case (refFile, disk) => (refFile.path, disk) - } + }.toMap } } } diff --git a/supportedBackends/google/pipelines/common/src/main/scala/cromwell/backend/google/pipelines/common/PipelinesApiRuntimeAttributes.scala b/supportedBackends/google/pipelines/common/src/main/scala/cromwell/backend/google/pipelines/common/PipelinesApiRuntimeAttributes.scala index 83682f42c6f..f1a8767ee5b 100644 --- a/supportedBackends/google/pipelines/common/src/main/scala/cromwell/backend/google/pipelines/common/PipelinesApiRuntimeAttributes.scala +++ b/supportedBackends/google/pipelines/common/src/main/scala/cromwell/backend/google/pipelines/common/PipelinesApiRuntimeAttributes.scala @@ -239,7 +239,7 @@ object PipelinesApiRuntimeAttributes { object ZonesValidation extends RuntimeAttributesValidation[Vector[String]] { override def key: String = PipelinesApiRuntimeAttributes.ZonesKey - override def coercion: Traversable[WomType] = Set(WomStringType, WomArrayType(WomStringType)) + override def coercion: Iterable[WomType] = Set(WomStringType, WomArrayType(WomStringType)) override protected def validateValue: PartialFunction[WomValue, ErrorOr[Vector[String]]] = { case WomString(s) => s.split("\\s+").toVector.validNel @@ -254,7 +254,7 @@ object ZonesValidation extends RuntimeAttributesValidation[Vector[String]] { object DisksValidation extends RuntimeAttributesValidation[Seq[PipelinesApiAttachedDisk]] { override def key: String = PipelinesApiRuntimeAttributes.DisksKey - override def coercion: Traversable[WomType] = Set(WomStringType, WomArrayType(WomStringType)) + override def coercion: Iterable[WomType] = Set(WomStringType, WomArrayType(WomStringType)) override protected def validateValue: PartialFunction[WomValue, ErrorOr[Seq[PipelinesApiAttachedDisk]]] = { case WomString(value) => validateLocalDisks(value.split(",\\s*").toSeq) diff --git a/supportedBackends/google/pipelines/common/src/main/scala/cromwell/backend/google/pipelines/common/PreviousRetryReasons.scala b/supportedBackends/google/pipelines/common/src/main/scala/cromwell/backend/google/pipelines/common/PreviousRetryReasons.scala index 8e7a0fd7cd1..01819ce0082 100644 --- a/supportedBackends/google/pipelines/common/src/main/scala/cromwell/backend/google/pipelines/common/PreviousRetryReasons.scala +++ b/supportedBackends/google/pipelines/common/src/main/scala/cromwell/backend/google/pipelines/common/PreviousRetryReasons.scala @@ -1,14 +1,12 @@ package cromwell.google.pipelines.common -import cromwell.services.keyvalue.KeyValueServiceActor._ -import common.validation.ErrorOr.ErrorOr -import cats.syntax.validated._ import cats.syntax.apply._ +import cats.syntax.validated._ +import common.validation.ErrorOr.ErrorOr +import cromwell.backend.google.pipelines.common.PipelinesApiBackendLifecycleActorFactory.{preemptionCountKey, unexpectedRetryCountKey} +import cromwell.services.keyvalue.KeyValueServiceActor._ import scala.util.{Failure, Success, Try} -import cromwell.backend.google.pipelines.common.PipelinesApiBackendLifecycleActorFactory.preemptionCountKey -import cromwell.backend.google.pipelines.common.PipelinesApiBackendLifecycleActorFactory.unexpectedRetryCountKey -import common.validation.ErrorOr case class PreviousRetryReasons(preempted: Int, unexpectedRetry: Int) diff --git a/supportedBackends/google/pipelines/common/src/main/scala/cromwell/backend/google/pipelines/common/api/PipelinesApiRequestManager.scala b/supportedBackends/google/pipelines/common/src/main/scala/cromwell/backend/google/pipelines/common/api/PipelinesApiRequestManager.scala index c9c6c03dcc2..02950e8bec8 100644 --- a/supportedBackends/google/pipelines/common/src/main/scala/cromwell/backend/google/pipelines/common/api/PipelinesApiRequestManager.scala +++ b/supportedBackends/google/pipelines/common/src/main/scala/cromwell/backend/google/pipelines/common/api/PipelinesApiRequestManager.scala @@ -39,29 +39,29 @@ class PipelinesApiRequestManager(val qps: Int Refined Positive, requestWorkers: /* * Context: the batch.execute() method throws an IOException("insufficient data written") in certain conditions. * Here is what we know about it and how this attempts to address the issue. - * + * * It was determined empirically that errors start to be thrown when the batch request approaches 15MB. * Looking more closely at timing it appears that the exception takes almost exactly 60 seconds to be thrown * from the batch.execute method, which suggests that this might be time related rather than byte size related and that * the 15MB limit is just an artifact of how much data can be sent / received in 60 seconds by the client / server. - * + * * In an attempt to provide a fix for this issue, the total size of the batch size is limited to 14MB, which is a rather * arbitrary value only supported by local testing. - * + * * Result of further investigation on the cause: * IOException("insufficient data written") is being thrown because the http request attempts to close() its output stream. * The close() method throws the "insufficient data written" exception because it still had data to send. * The close() method was called as part of a finally, because an exception was thrown earlier when attempting to write to the * stream. This exception is however swallowed by the one thrown in the close(). - * This commit https://github.com/google/google-http-java-client/pull/333 fixes the swallowing issue so the original + * This commit https://github.com/google/google-http-java-client/pull/333 fixes the swallowing issue so the original * exception is thrown instead: IOException(“Error writing request body to server”). * Tracing back why this exception is being thrown, it appears that at some point the socket gets closed externally * (maybe the google server closes it ?) * which results in a SocketException("broken pipe") being thrown and eventually bubbles up to the IOExceptions above. - * + * * see sun.net.www.protocol.http.HttpURLConnection * and com.google.api.client.http.javanet.NetHttpRequest - * + * */ private val maxBatchRequestSize: Long = 14L * 1024L * 1024L private val requestTooLargeException = new UserPAPIApiException( @@ -110,7 +110,7 @@ class PipelinesApiRequestManager(val qps: Int Refined Positive, requestWorkers: create.requester ! PipelinesApiRunCreationQueryFailed(create, requestTooLargeException) } else workQueue :+= create case abort: PAPIAbortRequest => workQueue :+= abort - case PipelinesWorkerRequestWork(maxBatchSize) => handleWorkerAskingForWork(sender, maxBatchSize) + case PipelinesWorkerRequestWork(maxBatchSize) => handleWorkerAskingForWork(sender(), maxBatchSize) case failure: PAPIApiRequestFailed => handleQueryFailure(failure) case Terminated(actorRef) => onFailure(actorRef, new RuntimeException("PipelinesApiRequestHandler actor termination caught by manager") with NoStackTrace) diff --git a/supportedBackends/google/pipelines/common/src/main/scala/cromwell/backend/google/pipelines/common/io/package.scala b/supportedBackends/google/pipelines/common/src/main/scala/cromwell/backend/google/pipelines/common/io/package.scala index 67b4fd98de0..01bbc913e87 100644 --- a/supportedBackends/google/pipelines/common/src/main/scala/cromwell/backend/google/pipelines/common/io/package.scala +++ b/supportedBackends/google/pipelines/common/src/main/scala/cromwell/backend/google/pipelines/common/io/package.scala @@ -8,11 +8,11 @@ package object io { implicit class PathEnhanced(val path: Path) extends AnyVal { def writeAsJson(content: String): Path = { - path.writeBytes(content.getBytes.toIterator)(Seq(CloudStorageOptions.withMimeType("application/json"))) + path.writeBytes(content.getBytes.iterator)(Seq(CloudStorageOptions.withMimeType("application/json"))) } def writeAsText(content: String): Path = { - path.writeBytes(content.getBytes.toIterator)(Seq(CloudStorageOptions.withMimeType("text/plain"))) + path.writeBytes(content.getBytes.iterator)(Seq(CloudStorageOptions.withMimeType("text/plain"))) } } diff --git a/supportedBackends/google/pipelines/common/src/test/scala/cromwell/backend/google/pipelines/common/PipelinesApiBackendLifecycleActorFactorySpec.scala b/supportedBackends/google/pipelines/common/src/test/scala/cromwell/backend/google/pipelines/common/PipelinesApiBackendLifecycleActorFactorySpec.scala index 19b2e35bbad..d997bedbd1c 100644 --- a/supportedBackends/google/pipelines/common/src/test/scala/cromwell/backend/google/pipelines/common/PipelinesApiBackendLifecycleActorFactorySpec.scala +++ b/supportedBackends/google/pipelines/common/src/test/scala/cromwell/backend/google/pipelines/common/PipelinesApiBackendLifecycleActorFactorySpec.scala @@ -23,9 +23,9 @@ class PipelinesApiBackendLifecycleActorFactorySpec extends AnyFlatSpecLike with endpointUrl = null, location = "location", maxPollingInterval = 0, - qps = refineV[Positive](1).right.get, + qps = refineV[Positive](1).toOption.get, cacheHitDuplicationStrategy = null, - requestWorkers = refineV[Positive](1).right.get, + requestWorkers = refineV[Positive](1).toOption.get, pipelineTimeout = 1 second, logFlushPeriod = Option(1 second), gcsTransferConfiguration = null, diff --git a/supportedBackends/google/pipelines/common/src/test/scala/cromwell/backend/google/pipelines/common/PipelinesApiCallPathsSpec.scala b/supportedBackends/google/pipelines/common/src/test/scala/cromwell/backend/google/pipelines/common/PipelinesApiCallPathsSpec.scala index d84bbc35541..68233bc1a14 100644 --- a/supportedBackends/google/pipelines/common/src/test/scala/cromwell/backend/google/pipelines/common/PipelinesApiCallPathsSpec.scala +++ b/supportedBackends/google/pipelines/common/src/test/scala/cromwell/backend/google/pipelines/common/PipelinesApiCallPathsSpec.scala @@ -25,7 +25,7 @@ class PipelinesApiCallPathsSpec extends TestKitSuite with AnyFlatSpecLike with M inputFileAsJson = Option(JsObject(SampleWdl.HelloWorld.rawInputs.safeMapValues(JsString.apply)).compactPrint) ) val jobDescriptorKey = firstJobDescriptorKey(workflowDescriptor) - val workflowPaths = PipelinesApiWorkflowPaths(workflowDescriptor, NoCredentials.getInstance(), NoCredentials.getInstance(), papiConfiguration, pathBuilders, PipelinesApiInitializationActor.defaultStandardStreamNameToFileNameMetadataMapper) + val workflowPaths = PipelinesApiWorkflowPaths(workflowDescriptor, NoCredentials.getInstance(), NoCredentials.getInstance(), papiConfiguration, pathBuilders(), PipelinesApiInitializationActor.defaultStandardStreamNameToFileNameMetadataMapper) val callPaths = PipelinesApiJobPaths(workflowPaths, jobDescriptorKey) @@ -41,7 +41,7 @@ class PipelinesApiCallPathsSpec extends TestKitSuite with AnyFlatSpecLike with M inputFileAsJson = Option(JsObject(SampleWdl.HelloWorld.rawInputs.safeMapValues(JsString.apply)).compactPrint) ) val jobDescriptorKey = firstJobDescriptorKey(workflowDescriptor) - val workflowPaths = PipelinesApiWorkflowPaths(workflowDescriptor, NoCredentials.getInstance(), NoCredentials.getInstance(), papiConfiguration, pathBuilders, PipelinesApiInitializationActor.defaultStandardStreamNameToFileNameMetadataMapper) + val workflowPaths = PipelinesApiWorkflowPaths(workflowDescriptor, NoCredentials.getInstance(), NoCredentials.getInstance(), papiConfiguration, pathBuilders(), PipelinesApiInitializationActor.defaultStandardStreamNameToFileNameMetadataMapper) val callPaths = PipelinesApiJobPaths(workflowPaths, jobDescriptorKey) @@ -61,7 +61,7 @@ class PipelinesApiCallPathsSpec extends TestKitSuite with AnyFlatSpecLike with M inputFileAsJson = Option(JsObject(SampleWdl.HelloWorld.rawInputs.safeMapValues(JsString.apply)).compactPrint) ) val jobDescriptorKey = firstJobDescriptorKey(workflowDescriptor) - val workflowPaths = PipelinesApiWorkflowPaths(workflowDescriptor, NoCredentials.getInstance(), NoCredentials.getInstance(), papiConfiguration, pathBuilders, PipelinesApiInitializationActor.defaultStandardStreamNameToFileNameMetadataMapper) + val workflowPaths = PipelinesApiWorkflowPaths(workflowDescriptor, NoCredentials.getInstance(), NoCredentials.getInstance(), papiConfiguration, pathBuilders(), PipelinesApiInitializationActor.defaultStandardStreamNameToFileNameMetadataMapper) val callPaths = PipelinesApiJobPaths(workflowPaths, jobDescriptorKey) diff --git a/supportedBackends/google/pipelines/common/src/test/scala/cromwell/backend/google/pipelines/common/PipelinesApiConfigurationAttributesSpec.scala b/supportedBackends/google/pipelines/common/src/test/scala/cromwell/backend/google/pipelines/common/PipelinesApiConfigurationAttributesSpec.scala index 72546a04168..59625afa4a0 100644 --- a/supportedBackends/google/pipelines/common/src/test/scala/cromwell/backend/google/pipelines/common/PipelinesApiConfigurationAttributesSpec.scala +++ b/supportedBackends/google/pipelines/common/src/test/scala/cromwell/backend/google/pipelines/common/PipelinesApiConfigurationAttributesSpec.scala @@ -345,7 +345,7 @@ class PipelinesApiConfigurationAttributesSpec extends AnyFlatSpec with CromwellT |""".stripMargin val backendConfig = ConfigFactory.parseString(configString(manifestConfig)) val validation = PipelinesApiConfigurationAttributes.validateReferenceDiskManifestConfigs(backendConfig, "papi") - val manifests: List[ManifestFile] = validation.toEither.right.get.get + val manifests: List[ManifestFile] = validation.toEither.toOption.get.get manifests shouldBe List( ManifestFile( diff --git a/supportedBackends/google/pipelines/common/src/test/scala/cromwell/backend/google/pipelines/common/PipelinesApiDockerCacheMappingOperationsSpec.scala b/supportedBackends/google/pipelines/common/src/test/scala/cromwell/backend/google/pipelines/common/PipelinesApiDockerCacheMappingOperationsSpec.scala index 5598d50496f..22cf68e1057 100644 --- a/supportedBackends/google/pipelines/common/src/test/scala/cromwell/backend/google/pipelines/common/PipelinesApiDockerCacheMappingOperationsSpec.scala +++ b/supportedBackends/google/pipelines/common/src/test/scala/cromwell/backend/google/pipelines/common/PipelinesApiDockerCacheMappingOperationsSpec.scala @@ -60,7 +60,7 @@ class PipelinesApiDockerCacheMappingOperationsSpec mockClient } - val readFileFromGcsPrivateMethod = PrivateMethod[IO[DockerImageCacheManifest]]('readDockerImageCacheManifestFileFromGCS) + val readFileFromGcsPrivateMethod = PrivateMethod[IO[DockerImageCacheManifest]](Symbol("readDockerImageCacheManifestFileFromGCS")) val parsedJsonAsManifestIO = pipelinesApiDockerCacheMappingOperationsMock invokePrivate readFileFromGcsPrivateMethod(mockGcsClient, testJsonGcsPath) val parsedJsonAsManifest = parsedJsonAsManifestIO.unsafeRunSync() diff --git a/supportedBackends/google/pipelines/common/src/test/scala/cromwell/backend/google/pipelines/common/PipelinesApiRuntimeAttributesSpec.scala b/supportedBackends/google/pipelines/common/src/test/scala/cromwell/backend/google/pipelines/common/PipelinesApiRuntimeAttributesSpec.scala index 998c1c686e3..4492d7f0607 100644 --- a/supportedBackends/google/pipelines/common/src/test/scala/cromwell/backend/google/pipelines/common/PipelinesApiRuntimeAttributesSpec.scala +++ b/supportedBackends/google/pipelines/common/src/test/scala/cromwell/backend/google/pipelines/common/PipelinesApiRuntimeAttributesSpec.scala @@ -74,13 +74,13 @@ final class PipelinesApiRuntimeAttributesSpec } "validate a valid continueOnReturnCode array entry" in { - val runtimeAttributes = Map("docker" -> WomString("ubuntu:latest"), "continueOnReturnCode" -> WomArray(WomArrayType(WomIntegerType), Array(WomInteger(1), WomInteger(2)))) + val runtimeAttributes = Map("docker" -> WomString("ubuntu:latest"), "continueOnReturnCode" -> WomArray(WomArrayType(WomIntegerType), List(WomInteger(1), WomInteger(2)))) val expectedRuntimeAttributes = expectedDefaults.copy(continueOnReturnCode = ContinueOnReturnCodeSet(Set(1, 2))) assertPapiRuntimeAttributesSuccessfulCreation(runtimeAttributes, expectedRuntimeAttributes) } "coerce then validate a valid continueOnReturnCode array entry" in { - val runtimeAttributes = Map("docker" -> WomString("ubuntu:latest"), "continueOnReturnCode" -> WomArray(WomArrayType(WomStringType), Array(WomString("1"), WomString("2")))) + val runtimeAttributes = Map("docker" -> WomString("ubuntu:latest"), "continueOnReturnCode" -> WomArray(WomArrayType(WomStringType), List(WomString("1"), WomString("2")))) val expectedRuntimeAttributes = expectedDefaults.copy(continueOnReturnCode = ContinueOnReturnCodeSet(Set(1, 2))) assertPapiRuntimeAttributesSuccessfulCreation(runtimeAttributes, expectedRuntimeAttributes) } @@ -119,13 +119,13 @@ final class PipelinesApiRuntimeAttributesSpec } "validate a valid array zones entry" in { - val runtimeAttributes = Map("docker" -> WomString("ubuntu:latest"), "zones" -> WomArray(WomArrayType(WomStringType), Array(WomString("us-central1-y"), WomString("us-central1-z")))) + val runtimeAttributes = Map("docker" -> WomString("ubuntu:latest"), "zones" -> WomArray(WomArrayType(WomStringType), List(WomString("us-central1-y"), WomString("us-central1-z")))) val expectedRuntimeAttributes = expectedDefaults.copy(zones = Vector("us-central1-y", "us-central1-z")) assertPapiRuntimeAttributesSuccessfulCreation(runtimeAttributes, expectedRuntimeAttributes) } "fail to validate an invalid array zones entry" in { - val runtimeAttributes = Map("docker" -> WomString("ubuntu:latest"), "zones" -> WomArray(WomArrayType(WomIntegerType), Array(WomInteger(1), WomInteger(2)))) + val runtimeAttributes = Map("docker" -> WomString("ubuntu:latest"), "zones" -> WomArray(WomArrayType(WomIntegerType), List(WomInteger(1), WomInteger(2)))) assertPapiRuntimeAttributesFailedCreation(runtimeAttributes, "Expecting zones runtime attribute to be either a whitespace separated String or an Array[String]") } @@ -164,13 +164,13 @@ final class PipelinesApiRuntimeAttributesSpec } "validate a valid disks array entry" in { - val runtimeAttributes = Map("docker" -> WomString("ubuntu:latest"), "disks" -> WomArray(WomArrayType(WomStringType), Array(WomString("local-disk 20 SSD"), WomString("local-disk 30 SSD")))) + val runtimeAttributes = Map("docker" -> WomString("ubuntu:latest"), "disks" -> WomArray(WomArrayType(WomStringType), List(WomString("local-disk 20 SSD"), WomString("local-disk 30 SSD")))) val expectedRuntimeAttributes = expectedDefaults.copy(disks = Seq(PipelinesApiAttachedDisk.parse("local-disk 20 SSD").get, PipelinesApiAttachedDisk.parse("local-disk 30 SSD").get)) assertPapiRuntimeAttributesSuccessfulCreation(runtimeAttributes, expectedRuntimeAttributes) } "fail to validate a valid disks array entry" in { - val runtimeAttributes = Map("docker" -> WomString("ubuntu:latest"), "disks" -> WomArray(WomArrayType(WomStringType), Array(WomString("blah"), WomString("blah blah")))) + val runtimeAttributes = Map("docker" -> WomString("ubuntu:latest"), "disks" -> WomArray(WomArrayType(WomStringType), List(WomString("blah"), WomString("blah blah")))) assertPapiRuntimeAttributesFailedCreation(runtimeAttributes, "Disk strings should be of the format 'local-disk SIZE TYPE' or '/mount/point SIZE TYPE'") } diff --git a/supportedBackends/google/pipelines/common/src/test/scala/cromwell/backend/google/pipelines/common/PipelinesApiWorkflowPathsSpec.scala b/supportedBackends/google/pipelines/common/src/test/scala/cromwell/backend/google/pipelines/common/PipelinesApiWorkflowPathsSpec.scala index f53cb467008..750a7e177a2 100644 --- a/supportedBackends/google/pipelines/common/src/test/scala/cromwell/backend/google/pipelines/common/PipelinesApiWorkflowPathsSpec.scala +++ b/supportedBackends/google/pipelines/common/src/test/scala/cromwell/backend/google/pipelines/common/PipelinesApiWorkflowPathsSpec.scala @@ -20,12 +20,12 @@ class PipelinesApiWorkflowPathsSpec extends TestKitSuite with AnyFlatSpecLike wi var workflowDescriptor: BackendWorkflowDescriptor = _ var workflowPaths: PipelinesApiWorkflowPaths = _ - override def beforeAll: Unit = { + override def beforeAll(): Unit = { workflowDescriptor = buildWdlWorkflowDescriptor( SampleWdl.HelloWorld.workflowSource(), inputFileAsJson = Option(JsObject(SampleWdl.HelloWorld.rawInputs.safeMapValues(JsString.apply)).compactPrint) ) - workflowPaths = PipelinesApiWorkflowPaths(workflowDescriptor, NoCredentials.getInstance(), NoCredentials.getInstance(), papiConfiguration, pathBuilders, PipelinesApiInitializationActor.defaultStandardStreamNameToFileNameMetadataMapper) + workflowPaths = PipelinesApiWorkflowPaths(workflowDescriptor, NoCredentials.getInstance(), NoCredentials.getInstance(), papiConfiguration, pathBuilders(), PipelinesApiInitializationActor.defaultStandardStreamNameToFileNameMetadataMapper) } it should "map the correct paths" in { diff --git a/supportedBackends/google/pipelines/v2alpha1/src/main/scala/cromwell/backend/google/pipelines/v2alpha1/GenomicsFactory.scala b/supportedBackends/google/pipelines/v2alpha1/src/main/scala/cromwell/backend/google/pipelines/v2alpha1/GenomicsFactory.scala index 5c3db8068cc..09fb0713de0 100644 --- a/supportedBackends/google/pipelines/v2alpha1/src/main/scala/cromwell/backend/google/pipelines/v2alpha1/GenomicsFactory.scala +++ b/supportedBackends/google/pipelines/v2alpha1/src/main/scala/cromwell/backend/google/pipelines/v2alpha1/GenomicsFactory.scala @@ -24,7 +24,7 @@ import mouse.all._ import wdl4s.parser.MemoryUnit import wom.format.MemorySize -import scala.collection.JavaConverters._ +import scala.jdk.CollectionConverters._ case class GenomicsFactory(applicationName: String, authMode: GoogleAuthMode, endpointUrl: URL)(implicit gcsTransferConfiguration: GcsTransferConfiguration) extends PipelinesApiFactoryInterface with ContainerSetup @@ -187,7 +187,7 @@ case class GenomicsFactory(applicationName: String, authMode: GoogleAuthMode, en .setResources(resources) .setActions(sortedActions.asJava) .setEnvironment(environment) - .setTimeout(createPipelineParameters.pipelineTimeout.toSeconds + "s") + .setTimeout(createPipelineParameters.pipelineTimeout.toSeconds.toString + "s") val pipelineRequest = new RunPipelineRequest() .setPipeline(pipeline) diff --git a/supportedBackends/google/pipelines/v2alpha1/src/main/scala/cromwell/backend/google/pipelines/v2alpha1/PipelinesParameterConversions.scala b/supportedBackends/google/pipelines/v2alpha1/src/main/scala/cromwell/backend/google/pipelines/v2alpha1/PipelinesParameterConversions.scala index 270e640618f..b445f4786f8 100644 --- a/supportedBackends/google/pipelines/v2alpha1/src/main/scala/cromwell/backend/google/pipelines/v2alpha1/PipelinesParameterConversions.scala +++ b/supportedBackends/google/pipelines/v2alpha1/src/main/scala/cromwell/backend/google/pipelines/v2alpha1/PipelinesParameterConversions.scala @@ -29,7 +29,7 @@ trait PipelinesParameterConversions { fileInput.cloudPath match { case drsPath: DrsPath => - import collection.JavaConverters._ + import scala.jdk.CollectionConverters._ val drsFileSystemProvider = drsPath.drsPath.getFileSystem.provider.asInstanceOf[DrsCloudNioFileSystemProvider] diff --git a/supportedBackends/google/pipelines/v2alpha1/src/main/scala/cromwell/backend/google/pipelines/v2alpha1/api/ActionBuilder.scala b/supportedBackends/google/pipelines/v2alpha1/src/main/scala/cromwell/backend/google/pipelines/v2alpha1/api/ActionBuilder.scala index e4d7b304e70..0d38d9f2398 100644 --- a/supportedBackends/google/pipelines/v2alpha1/src/main/scala/cromwell/backend/google/pipelines/v2alpha1/api/ActionBuilder.scala +++ b/supportedBackends/google/pipelines/v2alpha1/src/main/scala/cromwell/backend/google/pipelines/v2alpha1/api/ActionBuilder.scala @@ -13,7 +13,7 @@ import cromwell.docker.DockerImageIdentifier import cromwell.docker.registryv2.flows.dockerhub.DockerHub import mouse.all._ -import scala.collection.JavaConverters._ +import scala.jdk.CollectionConverters._ import scala.concurrent.duration._ /** @@ -51,7 +51,7 @@ object ActionBuilder { def withMounts(mounts: List[Mount]): Action = action.setMounts(mounts.asJava) def withLabels(labels: Map[String, String]): Action = action.setLabels(labels.asJava) def withTimeout(timeout: Duration): Action = timeout match { - case fd: FiniteDuration => action.setTimeout(fd.toSeconds + "s") + case fd: FiniteDuration => action.setTimeout(fd.toSeconds.toString + "s") case _ => action } diff --git a/supportedBackends/google/pipelines/v2alpha1/src/main/scala/cromwell/backend/google/pipelines/v2alpha1/api/Delocalization.scala b/supportedBackends/google/pipelines/v2alpha1/src/main/scala/cromwell/backend/google/pipelines/v2alpha1/api/Delocalization.scala index b04fdb6ed72..efb3b017f89 100644 --- a/supportedBackends/google/pipelines/v2alpha1/src/main/scala/cromwell/backend/google/pipelines/v2alpha1/api/Delocalization.scala +++ b/supportedBackends/google/pipelines/v2alpha1/src/main/scala/cromwell/backend/google/pipelines/v2alpha1/api/Delocalization.scala @@ -18,7 +18,7 @@ import cromwell.backend.google.pipelines.v2alpha1.RuntimeOutputMapping import cromwell.core.path.{DefaultPathBuilder, Path} import wom.runtime.WomOutputRuntimeExtractor -import scala.collection.JavaConverters._ +import scala.jdk.CollectionConverters._ import scala.concurrent.duration._ object Delocalization { diff --git a/supportedBackends/google/pipelines/v2alpha1/src/main/scala/cromwell/backend/google/pipelines/v2alpha1/api/Deserialization.scala b/supportedBackends/google/pipelines/v2alpha1/src/main/scala/cromwell/backend/google/pipelines/v2alpha1/api/Deserialization.scala index f7f9a719149..f2883b6b285 100644 --- a/supportedBackends/google/pipelines/v2alpha1/src/main/scala/cromwell/backend/google/pipelines/v2alpha1/api/Deserialization.scala +++ b/supportedBackends/google/pipelines/v2alpha1/src/main/scala/cromwell/backend/google/pipelines/v2alpha1/api/Deserialization.scala @@ -12,7 +12,7 @@ import common.validation.Validation._ import cromwell.backend.google.pipelines.v2alpha1.api.request.ErrorReporter._ import mouse.all._ -import scala.collection.JavaConverters._ +import scala.jdk.CollectionConverters._ import scala.reflect.ClassTag import scala.util.{Failure, Success, Try} @@ -29,12 +29,12 @@ private [api] object Deserialization { def findEvent[T <: GenericJson](events: List[Event], filter: T => Boolean = Function.const(true)(_: T)) (implicit tag: ClassTag[T]): Option[RequestContextReader[Option[T]]] = - events.toStream + events.to(LazyList) .map(_.details(tag)) .collectFirst({ case Some(event) if event.map(filter).getOrElse(false) => event.toErrorOr.fallBack }) - + implicit class EventDeserialization(val event: Event) extends AnyVal { /** * Attempts to deserialize the details map to T @@ -131,7 +131,7 @@ private [api] object Deserialization { // If the value can be assigned directly to the field, just do that case (Some(f), _) if f.getType.isAssignableFrom(value.getClass) => newT.set(key, value) - // If it can't be assigned and the value is a map, it is very likely that the field "key" of T is of some type U + // If it can't be assigned and the value is a map, it is very likely that the field "key" of T is of some type U // but has been deserialized to a Map[String, Object]. In this case we retrieve the type U from the field and recurse // to deserialize properly case (Some(f), map: java.util.Map[String, Object] @unchecked) if classOf[GenericJson].isAssignableFrom(f.getType) => diff --git a/supportedBackends/google/pipelines/v2alpha1/src/main/scala/cromwell/backend/google/pipelines/v2alpha1/api/SSHAccessAction.scala b/supportedBackends/google/pipelines/v2alpha1/src/main/scala/cromwell/backend/google/pipelines/v2alpha1/api/SSHAccessAction.scala index 9d4dcef7872..a1fd56bf1d1 100644 --- a/supportedBackends/google/pipelines/v2alpha1/src/main/scala/cromwell/backend/google/pipelines/v2alpha1/api/SSHAccessAction.scala +++ b/supportedBackends/google/pipelines/v2alpha1/src/main/scala/cromwell/backend/google/pipelines/v2alpha1/api/SSHAccessAction.scala @@ -4,7 +4,7 @@ import com.google.api.services.genomics.v2alpha1.model.{Action, Mount} import cromwell.backend.google.pipelines.common.action.ActionUtils import cromwell.backend.google.pipelines.common.api.PipelinesApiRequestFactory.CreatePipelineParameters -import scala.collection.JavaConverters._ +import scala.jdk.CollectionConverters._ trait SSHAccessAction { diff --git a/supportedBackends/google/pipelines/v2alpha1/src/main/scala/cromwell/backend/google/pipelines/v2alpha1/api/request/ErrorReporter.scala b/supportedBackends/google/pipelines/v2alpha1/src/main/scala/cromwell/backend/google/pipelines/v2alpha1/api/request/ErrorReporter.scala index 4298b5bce8d..9b88a2f54bd 100644 --- a/supportedBackends/google/pipelines/v2alpha1/src/main/scala/cromwell/backend/google/pipelines/v2alpha1/api/request/ErrorReporter.scala +++ b/supportedBackends/google/pipelines/v2alpha1/src/main/scala/cromwell/backend/google/pipelines/v2alpha1/api/request/ErrorReporter.scala @@ -14,7 +14,7 @@ import cromwell.core.{ExecutionEvent, WorkflowId} import io.grpc.{Status => GStatus} import mouse.all._ -import scala.collection.JavaConverters._ +import scala.jdk.CollectionConverters._ object ErrorReporter { type RequestContext = (WorkflowId, Operation) diff --git a/supportedBackends/google/pipelines/v2alpha1/src/main/scala/cromwell/backend/google/pipelines/v2alpha1/api/request/GetRequestHandler.scala b/supportedBackends/google/pipelines/v2alpha1/src/main/scala/cromwell/backend/google/pipelines/v2alpha1/api/request/GetRequestHandler.scala index c56ee152a71..9a1c239fbbe 100644 --- a/supportedBackends/google/pipelines/v2alpha1/src/main/scala/cromwell/backend/google/pipelines/v2alpha1/api/request/GetRequestHandler.scala +++ b/supportedBackends/google/pipelines/v2alpha1/src/main/scala/cromwell/backend/google/pipelines/v2alpha1/api/request/GetRequestHandler.scala @@ -19,14 +19,14 @@ import cromwell.core.ExecutionEvent import io.grpc.Status import org.apache.commons.lang3.exception.ExceptionUtils -import scala.collection.JavaConverters._ +import scala.jdk.CollectionConverters._ import scala.concurrent.{ExecutionContext, Future} import scala.language.postfixOps import scala.util.{Failure, Try, Success => TrySuccess} trait GetRequestHandler { this: RequestHandler => // the Genomics batch endpoint doesn't seem to be able to handle get requests on V2 operations at the moment - // For now, don't batch the request and execute it on its own + // For now, don't batch the request and execute it on its own def handleRequest(pollingRequest: PAPIStatusPollRequest, batch: BatchRequest, pollingManager: ActorRef)(implicit ec: ExecutionContext): Future[Try[Unit]] = Future(pollingRequest.httpRequest.execute()) map { case response if response.isSuccessStatusCode => val operation = response.parseAs(classOf[Operation]) diff --git a/supportedBackends/google/pipelines/v2alpha1/src/main/scala/cromwell/backend/google/pipelines/v2alpha1/api/request/RequestHandler.scala b/supportedBackends/google/pipelines/v2alpha1/src/main/scala/cromwell/backend/google/pipelines/v2alpha1/api/request/RequestHandler.scala index 5176e1ae510..6cb2b148a58 100644 --- a/supportedBackends/google/pipelines/v2alpha1/src/main/scala/cromwell/backend/google/pipelines/v2alpha1/api/request/RequestHandler.scala +++ b/supportedBackends/google/pipelines/v2alpha1/src/main/scala/cromwell/backend/google/pipelines/v2alpha1/api/request/RequestHandler.scala @@ -11,7 +11,7 @@ import cromwell.cloudsupport.gcp.auth.GoogleAuthMode import org.slf4j.{Logger, LoggerFactory} import java.net.URL -import scala.collection.JavaConverters._ +import scala.jdk.CollectionConverters._ import scala.concurrent.{ExecutionContext, Future} import scala.util.Try diff --git a/supportedBackends/google/pipelines/v2alpha1/src/test/scala/cromwell/backend/google/pipelines/v2alpha1/PipelinesConversionsSpec.scala b/supportedBackends/google/pipelines/v2alpha1/src/test/scala/cromwell/backend/google/pipelines/v2alpha1/PipelinesConversionsSpec.scala index 9ed91ee2a67..3535f1db8d8 100644 --- a/supportedBackends/google/pipelines/v2alpha1/src/test/scala/cromwell/backend/google/pipelines/v2alpha1/PipelinesConversionsSpec.scala +++ b/supportedBackends/google/pipelines/v2alpha1/src/test/scala/cromwell/backend/google/pipelines/v2alpha1/PipelinesConversionsSpec.scala @@ -15,7 +15,7 @@ import eu.timepit.refined.refineMV import org.scalatest.flatspec.AnyFlatSpec import org.scalatest.matchers.should.Matchers -import scala.collection.JavaConverters._ +import scala.jdk.CollectionConverters._ import scala.concurrent.duration.DurationInt class PipelinesConversionsSpec extends AnyFlatSpec with CromwellTimeoutSpec with Matchers { diff --git a/supportedBackends/google/pipelines/v2alpha1/src/test/scala/cromwell/backend/google/pipelines/v2alpha1/api/ActionBuilderSpec.scala b/supportedBackends/google/pipelines/v2alpha1/src/test/scala/cromwell/backend/google/pipelines/v2alpha1/api/ActionBuilderSpec.scala index 8fad7d5fbfc..83d1be04e74 100644 --- a/supportedBackends/google/pipelines/v2alpha1/src/test/scala/cromwell/backend/google/pipelines/v2alpha1/api/ActionBuilderSpec.scala +++ b/supportedBackends/google/pipelines/v2alpha1/src/test/scala/cromwell/backend/google/pipelines/v2alpha1/api/ActionBuilderSpec.scala @@ -9,7 +9,7 @@ import org.scalatest.flatspec.AnyFlatSpec import org.scalatest.matchers.should.Matchers import org.scalatest.prop.TableDrivenPropertyChecks -import scala.collection.JavaConverters._ +import scala.jdk.CollectionConverters._ class ActionBuilderSpec extends AnyFlatSpec with CromwellTimeoutSpec with Matchers with TableDrivenPropertyChecks { diff --git a/supportedBackends/google/pipelines/v2alpha1/src/test/scala/cromwell/backend/google/pipelines/v2alpha1/api/DeserializationSpec.scala b/supportedBackends/google/pipelines/v2alpha1/src/test/scala/cromwell/backend/google/pipelines/v2alpha1/api/DeserializationSpec.scala index 40b85504009..1c936e76317 100644 --- a/supportedBackends/google/pipelines/v2alpha1/src/test/scala/cromwell/backend/google/pipelines/v2alpha1/api/DeserializationSpec.scala +++ b/supportedBackends/google/pipelines/v2alpha1/src/test/scala/cromwell/backend/google/pipelines/v2alpha1/api/DeserializationSpec.scala @@ -9,7 +9,7 @@ import cromwell.backend.google.pipelines.v2alpha1.api.Deserialization._ import org.scalatest.flatspec.AnyFlatSpec import org.scalatest.matchers.should.Matchers -import scala.collection.JavaConverters._ +import scala.jdk.CollectionConverters._ class DeserializationSpec extends AnyFlatSpec with CromwellTimeoutSpec with Matchers { behavior of "Deserialization" @@ -50,7 +50,7 @@ class DeserializationSpec extends AnyFlatSpec with CromwellTimeoutSpec with Matc val event1 = deserializedEvents.head event1.getDescription shouldBe "event 1 description" event1.getTimestamp shouldBe "2018-04-20T14:38:25+00:00" - // Event1 details are of type WorkerAssignedEvent, so it should not be defined for something else + // Event1 details are of type WorkerAssignedEvent, so it should not be defined for something else event1.details[ContainerStartedEvent] should not be defined val event1Details = event1.details[WorkerAssignedEvent].map(_.get) @@ -140,7 +140,7 @@ class DeserializationSpec extends AnyFlatSpec with CromwellTimeoutSpec with Matc it should "be able to say if the operation has started" in { val operation = new Operation() - + def makeMetadata(details: Map[String, Object]) = Map[String, AnyRef]( "events" -> new util.ArrayList( List[java.util.Map[String, Object]]( @@ -152,7 +152,7 @@ class DeserializationSpec extends AnyFlatSpec with CromwellTimeoutSpec with Matc ).asJava ) ).asJava - + val metadataMapStarted = makeMetadata(Map[String, Object]( "@type" -> "WorkerAssignedEvent", "zone" -> "event 1 Zone", @@ -162,7 +162,7 @@ class DeserializationSpec extends AnyFlatSpec with CromwellTimeoutSpec with Matc val metadataMapNotStarted2 = makeMetadata(Map[String, Object]( "@type" -> "ContainerStartedEvent" )) - + operation.setMetadata(metadataMapStarted) operation.hasStarted shouldBe true operation.setMetadata(metadataMapNotStarted) @@ -170,7 +170,7 @@ class DeserializationSpec extends AnyFlatSpec with CromwellTimeoutSpec with Matc operation.setMetadata(metadataMapNotStarted2) operation.hasStarted shouldBe false } - + it should "deserialize big decimals correctly" in { val valueMap = Map[String, Object]( "integerValue" -> BigDecimal(5), @@ -178,7 +178,7 @@ class DeserializationSpec extends AnyFlatSpec with CromwellTimeoutSpec with Matc "floatValue" -> BigDecimal.decimal(7F), "longValue" -> BigDecimal.decimal(8L) ).asJava - + val deserialized = Deserialization.deserializeTo[DeserializationTestClass](valueMap) deserialized.isSuccess shouldBe true val deserializedSuccess = deserialized.get @@ -187,5 +187,5 @@ class DeserializationSpec extends AnyFlatSpec with CromwellTimeoutSpec with Matc deserializedSuccess.floatValue shouldBe 7F deserializedSuccess.longValue shouldBe 8L } - + } diff --git a/supportedBackends/google/pipelines/v2beta/src/main/scala/cromwell/backend/google/pipelines/v2beta/LifeSciencesFactory.scala b/supportedBackends/google/pipelines/v2beta/src/main/scala/cromwell/backend/google/pipelines/v2beta/LifeSciencesFactory.scala index 5e29694003e..e4c95dcb1e0 100644 --- a/supportedBackends/google/pipelines/v2beta/src/main/scala/cromwell/backend/google/pipelines/v2beta/LifeSciencesFactory.scala +++ b/supportedBackends/google/pipelines/v2beta/src/main/scala/cromwell/backend/google/pipelines/v2beta/LifeSciencesFactory.scala @@ -24,7 +24,7 @@ import mouse.all._ import wdl4s.parser.MemoryUnit import wom.format.MemorySize -import scala.collection.JavaConverters._ +import scala.jdk.CollectionConverters._ case class LifeSciencesFactory(applicationName: String, authMode: GoogleAuthMode, endpointUrl: URL, location: String)(implicit gcsTransferConfiguration: GcsTransferConfiguration) extends PipelinesApiFactoryInterface with ContainerSetup @@ -185,7 +185,7 @@ case class LifeSciencesFactory(applicationName: String, authMode: GoogleAuthMode .setResources(resources) .setActions(sortedActions.asJava) .setEnvironment(environment) - .setTimeout(createPipelineParameters.pipelineTimeout.toSeconds + "s") + .setTimeout(createPipelineParameters.pipelineTimeout.toSeconds.toString + "s") val pipelineRequest = new RunPipelineRequest() .setPipeline(pipeline) diff --git a/supportedBackends/google/pipelines/v2beta/src/main/scala/cromwell/backend/google/pipelines/v2beta/PipelinesParameterConversions.scala b/supportedBackends/google/pipelines/v2beta/src/main/scala/cromwell/backend/google/pipelines/v2beta/PipelinesParameterConversions.scala index c6787cb12f8..505c23edb9c 100644 --- a/supportedBackends/google/pipelines/v2beta/src/main/scala/cromwell/backend/google/pipelines/v2beta/PipelinesParameterConversions.scala +++ b/supportedBackends/google/pipelines/v2beta/src/main/scala/cromwell/backend/google/pipelines/v2beta/PipelinesParameterConversions.scala @@ -29,7 +29,7 @@ trait PipelinesParameterConversions { fileInput.cloudPath match { case drsPath: DrsPath => - import collection.JavaConverters._ + import scala.jdk.CollectionConverters._ val drsFileSystemProvider = drsPath.drsPath.getFileSystem.provider.asInstanceOf[DrsCloudNioFileSystemProvider] diff --git a/supportedBackends/google/pipelines/v2beta/src/main/scala/cromwell/backend/google/pipelines/v2beta/api/ActionBuilder.scala b/supportedBackends/google/pipelines/v2beta/src/main/scala/cromwell/backend/google/pipelines/v2beta/api/ActionBuilder.scala index 69ab6f356c6..8a1a3bfc97e 100644 --- a/supportedBackends/google/pipelines/v2beta/src/main/scala/cromwell/backend/google/pipelines/v2beta/api/ActionBuilder.scala +++ b/supportedBackends/google/pipelines/v2beta/src/main/scala/cromwell/backend/google/pipelines/v2beta/api/ActionBuilder.scala @@ -12,7 +12,7 @@ import cromwell.docker.DockerImageIdentifier import cromwell.docker.registryv2.flows.dockerhub.DockerHub import mouse.all._ -import scala.collection.JavaConverters._ +import scala.jdk.CollectionConverters._ import scala.concurrent.duration._ /** @@ -47,7 +47,7 @@ object ActionBuilder { def withMounts(mounts: List[Mount]): Action = action.setMounts(mounts.asJava) def withLabels(labels: Map[String, String]): Action = action.setLabels(labels.asJava) def withTimeout(timeout: Duration): Action = timeout match { - case fd: FiniteDuration => action.setTimeout(fd.toSeconds + "s") + case fd: FiniteDuration => action.setTimeout(fd.toSeconds.toString + "s") case _ => action } def withIgnoreExitStatus(ignoreExitStatus: Boolean): Action = action.setIgnoreExitStatus(ignoreExitStatus) diff --git a/supportedBackends/google/pipelines/v2beta/src/main/scala/cromwell/backend/google/pipelines/v2beta/api/Delocalization.scala b/supportedBackends/google/pipelines/v2beta/src/main/scala/cromwell/backend/google/pipelines/v2beta/api/Delocalization.scala index d361b911770..8fc0d81cff1 100644 --- a/supportedBackends/google/pipelines/v2beta/src/main/scala/cromwell/backend/google/pipelines/v2beta/api/Delocalization.scala +++ b/supportedBackends/google/pipelines/v2beta/src/main/scala/cromwell/backend/google/pipelines/v2beta/api/Delocalization.scala @@ -18,7 +18,7 @@ import cromwell.backend.google.pipelines.v2beta.RuntimeOutputMapping import cromwell.core.path.{DefaultPathBuilder, Path} import wom.runtime.WomOutputRuntimeExtractor -import scala.collection.JavaConverters._ +import scala.jdk.CollectionConverters._ import scala.concurrent.duration._ trait Delocalization { diff --git a/supportedBackends/google/pipelines/v2beta/src/main/scala/cromwell/backend/google/pipelines/v2beta/api/Deserialization.scala b/supportedBackends/google/pipelines/v2beta/src/main/scala/cromwell/backend/google/pipelines/v2beta/api/Deserialization.scala index f8c7c779c04..54de52e74be 100644 --- a/supportedBackends/google/pipelines/v2beta/src/main/scala/cromwell/backend/google/pipelines/v2beta/api/Deserialization.scala +++ b/supportedBackends/google/pipelines/v2beta/src/main/scala/cromwell/backend/google/pipelines/v2beta/api/Deserialization.scala @@ -12,7 +12,7 @@ import common.validation.ErrorOr._ import common.validation.Validation._ import mouse.all._ -import scala.collection.JavaConverters._ +import scala.jdk.CollectionConverters._ import scala.reflect.ClassTag import scala.util.{Failure, Success, Try} diff --git a/supportedBackends/google/pipelines/v2beta/src/main/scala/cromwell/backend/google/pipelines/v2beta/api/SSHAccessAction.scala b/supportedBackends/google/pipelines/v2beta/src/main/scala/cromwell/backend/google/pipelines/v2beta/api/SSHAccessAction.scala index f289d5f7557..342b5701847 100644 --- a/supportedBackends/google/pipelines/v2beta/src/main/scala/cromwell/backend/google/pipelines/v2beta/api/SSHAccessAction.scala +++ b/supportedBackends/google/pipelines/v2beta/src/main/scala/cromwell/backend/google/pipelines/v2beta/api/SSHAccessAction.scala @@ -5,7 +5,7 @@ import cromwell.backend.google.pipelines.common.action.ActionUtils import cromwell.backend.google.pipelines.common.api.PipelinesApiRequestFactory.CreatePipelineParameters import cromwell.backend.google.pipelines.v2beta.api.ActionBuilder._ -import scala.collection.JavaConverters._ +import scala.jdk.CollectionConverters._ trait SSHAccessAction { diff --git a/supportedBackends/google/pipelines/v2beta/src/main/scala/cromwell/backend/google/pipelines/v2beta/api/request/ErrorReporter.scala b/supportedBackends/google/pipelines/v2beta/src/main/scala/cromwell/backend/google/pipelines/v2beta/api/request/ErrorReporter.scala index 038db8c998e..1c57c3ba2ce 100644 --- a/supportedBackends/google/pipelines/v2beta/src/main/scala/cromwell/backend/google/pipelines/v2beta/api/request/ErrorReporter.scala +++ b/supportedBackends/google/pipelines/v2beta/src/main/scala/cromwell/backend/google/pipelines/v2beta/api/request/ErrorReporter.scala @@ -12,7 +12,7 @@ import cromwell.core.{ExecutionEvent, WorkflowId} import io.grpc.{Status => GStatus} import mouse.all._ -import scala.collection.JavaConverters._ +import scala.jdk.CollectionConverters._ object ErrorReporter { type RequestContext = (WorkflowId, Operation) diff --git a/supportedBackends/google/pipelines/v2beta/src/main/scala/cromwell/backend/google/pipelines/v2beta/api/request/GetRequestHandler.scala b/supportedBackends/google/pipelines/v2beta/src/main/scala/cromwell/backend/google/pipelines/v2beta/api/request/GetRequestHandler.scala index 320cc89b1f9..5bcb9c812f4 100644 --- a/supportedBackends/google/pipelines/v2beta/src/main/scala/cromwell/backend/google/pipelines/v2beta/api/request/GetRequestHandler.scala +++ b/supportedBackends/google/pipelines/v2beta/src/main/scala/cromwell/backend/google/pipelines/v2beta/api/request/GetRequestHandler.scala @@ -19,14 +19,14 @@ import cromwell.core.ExecutionEvent import io.grpc.Status import org.apache.commons.lang3.exception.ExceptionUtils -import scala.collection.JavaConverters._ +import scala.jdk.CollectionConverters._ import scala.concurrent.{ExecutionContext, Future} import scala.language.postfixOps import scala.util.{Failure, Try, Success => TrySuccess} trait GetRequestHandler { this: RequestHandler => // the Genomics batch endpoint doesn't seem to be able to handle get requests on V2 operations at the moment - // For now, don't batch the request and execute it on its own + // For now, don't batch the request and execute it on its own def handleRequest(pollingRequest: PAPIStatusPollRequest, batch: BatchRequest, pollingManager: ActorRef)(implicit ec: ExecutionContext): Future[Try[Unit]] = Future(pollingRequest.httpRequest.execute()) map { case response if response.isSuccessStatusCode => val operation = response.parseAs(classOf[Operation]) diff --git a/supportedBackends/google/pipelines/v2beta/src/main/scala/cromwell/backend/google/pipelines/v2beta/api/request/RequestHandler.scala b/supportedBackends/google/pipelines/v2beta/src/main/scala/cromwell/backend/google/pipelines/v2beta/api/request/RequestHandler.scala index da72170fda9..523556e378d 100644 --- a/supportedBackends/google/pipelines/v2beta/src/main/scala/cromwell/backend/google/pipelines/v2beta/api/request/RequestHandler.scala +++ b/supportedBackends/google/pipelines/v2beta/src/main/scala/cromwell/backend/google/pipelines/v2beta/api/request/RequestHandler.scala @@ -11,7 +11,7 @@ import cromwell.cloudsupport.gcp.auth.GoogleAuthMode import org.slf4j.{Logger, LoggerFactory} import java.net.URL -import scala.collection.JavaConverters._ +import scala.jdk.CollectionConverters._ import scala.concurrent.{ExecutionContext, Future} import scala.util.Try diff --git a/supportedBackends/google/pipelines/v2beta/src/test/scala/cromwell/backend/google/pipelines/v2beta/PipelinesConversionsSpec.scala b/supportedBackends/google/pipelines/v2beta/src/test/scala/cromwell/backend/google/pipelines/v2beta/PipelinesConversionsSpec.scala index 6bbdad8940e..c3ed9d27b02 100644 --- a/supportedBackends/google/pipelines/v2beta/src/test/scala/cromwell/backend/google/pipelines/v2beta/PipelinesConversionsSpec.scala +++ b/supportedBackends/google/pipelines/v2beta/src/test/scala/cromwell/backend/google/pipelines/v2beta/PipelinesConversionsSpec.scala @@ -15,7 +15,7 @@ import eu.timepit.refined.refineMV import org.scalatest.flatspec.AnyFlatSpec import org.scalatest.matchers.should.Matchers -import scala.collection.JavaConverters._ +import scala.jdk.CollectionConverters._ import scala.concurrent.duration.DurationInt class PipelinesConversionsSpec extends AnyFlatSpec with CromwellTimeoutSpec with Matchers { diff --git a/supportedBackends/google/pipelines/v2beta/src/test/scala/cromwell/backend/google/pipelines/v2beta/api/ActionBuilderSpec.scala b/supportedBackends/google/pipelines/v2beta/src/test/scala/cromwell/backend/google/pipelines/v2beta/api/ActionBuilderSpec.scala index d75230cdd0b..44abde7862c 100644 --- a/supportedBackends/google/pipelines/v2beta/src/test/scala/cromwell/backend/google/pipelines/v2beta/api/ActionBuilderSpec.scala +++ b/supportedBackends/google/pipelines/v2beta/src/test/scala/cromwell/backend/google/pipelines/v2beta/api/ActionBuilderSpec.scala @@ -9,7 +9,7 @@ import org.scalatest.flatspec.AnyFlatSpec import org.scalatest.matchers.should.Matchers import org.scalatest.prop.TableDrivenPropertyChecks -import scala.collection.JavaConverters._ +import scala.jdk.CollectionConverters._ class ActionBuilderSpec extends AnyFlatSpec with CromwellTimeoutSpec with Matchers with TableDrivenPropertyChecks { diff --git a/supportedBackends/google/pipelines/v2beta/src/test/scala/cromwell/backend/google/pipelines/v2beta/api/DeserializationSpec.scala b/supportedBackends/google/pipelines/v2beta/src/test/scala/cromwell/backend/google/pipelines/v2beta/api/DeserializationSpec.scala index 2d1d929922c..37020b0b0d6 100644 --- a/supportedBackends/google/pipelines/v2beta/src/test/scala/cromwell/backend/google/pipelines/v2beta/api/DeserializationSpec.scala +++ b/supportedBackends/google/pipelines/v2beta/src/test/scala/cromwell/backend/google/pipelines/v2beta/api/DeserializationSpec.scala @@ -9,7 +9,7 @@ import cromwell.backend.google.pipelines.v2beta.api.Deserialization._ import org.scalatest.flatspec.AnyFlatSpec import org.scalatest.matchers.should.Matchers -import scala.collection.JavaConverters._ +import scala.jdk.CollectionConverters._ import scala.util.{Failure, Success} class DeserializationSpec extends AnyFlatSpec with CromwellTimeoutSpec with Matchers with StrictLogging { @@ -49,7 +49,7 @@ class DeserializationSpec extends AnyFlatSpec with CromwellTimeoutSpec with Matc val event1 = deserializedEvents.head event1.getDescription shouldBe "event 1 description" event1.getTimestamp shouldBe "2018-04-20T14:38:25+00:00" - // Event1 details are of type WorkerAssignedEvent, so it should not be defined for something else + // Event1 details are of type WorkerAssignedEvent, so it should not be defined for something else event1.getContainerStarted shouldBe null val event1Details = event1.getWorkerAssigned @@ -137,7 +137,7 @@ class DeserializationSpec extends AnyFlatSpec with CromwellTimeoutSpec with Matc it should "be able to say if the operation has started" in { val operation = new Operation() - + def makeMetadata(details: Map[String, Object]) = Map[String, AnyRef]( "events" -> new util.ArrayList( List[java.util.Map[String, Object]]( @@ -158,7 +158,7 @@ class DeserializationSpec extends AnyFlatSpec with CromwellTimeoutSpec with Matc val metadataMapNotStarted2 = makeMetadata(Map[String, Object]( "containerStarted" -> Map().asJava )) - + operation.setMetadata(metadataMapStarted) operation.hasStarted shouldBe true operation.setMetadata(metadataMapNotStarted) @@ -166,7 +166,7 @@ class DeserializationSpec extends AnyFlatSpec with CromwellTimeoutSpec with Matc operation.setMetadata(metadataMapNotStarted2) operation.hasStarted shouldBe false } - + it should "deserialize big decimals correctly" in { val valueMap = Map[String, Object]( "integerValue" -> BigDecimal(5), @@ -174,7 +174,7 @@ class DeserializationSpec extends AnyFlatSpec with CromwellTimeoutSpec with Matc "floatValue" -> BigDecimal.decimal(7F), "longValue" -> BigDecimal.decimal(8L) ).asJava - + val deserialized = Deserialization.deserializeTo[DeserializationTestClass](valueMap) deserialized match { case Success(deserializedSuccess) => @@ -186,5 +186,5 @@ class DeserializationSpec extends AnyFlatSpec with CromwellTimeoutSpec with Matc fail("Bad deserialization", f) } } - + } diff --git a/supportedBackends/sfs/src/main/scala/cromwell/backend/impl/sfs/config/ConfigAsyncJobExecutionActor.scala b/supportedBackends/sfs/src/main/scala/cromwell/backend/impl/sfs/config/ConfigAsyncJobExecutionActor.scala index 71656cc10dd..0cf90bf9f1d 100644 --- a/supportedBackends/sfs/src/main/scala/cromwell/backend/impl/sfs/config/ConfigAsyncJobExecutionActor.scala +++ b/supportedBackends/sfs/src/main/scala/cromwell/backend/impl/sfs/config/ConfigAsyncJobExecutionActor.scala @@ -147,12 +147,12 @@ sealed trait ConfigAsyncJobExecutionActor extends SharedFileSystemAsyncJobExecut Map.empty ) - dockerPaths ++ Map( + (dockerPaths ++ Map( StdoutInput -> WomString(standardPaths.output.pathAsString), StderrInput -> WomString(standardPaths.error.pathAsString), ScriptInput -> WomString(jobPaths.script.pathAsString), JobShellInput -> WomString(jobShell), - ) + )).toMap } /** diff --git a/supportedBackends/sfs/src/main/scala/cromwell/backend/sfs/BackgroundAsyncJobExecutionActor.scala b/supportedBackends/sfs/src/main/scala/cromwell/backend/sfs/BackgroundAsyncJobExecutionActor.scala index b03f9a615ad..199c08787a0 100644 --- a/supportedBackends/sfs/src/main/scala/cromwell/backend/sfs/BackgroundAsyncJobExecutionActor.scala +++ b/supportedBackends/sfs/src/main/scala/cromwell/backend/sfs/BackgroundAsyncJobExecutionActor.scala @@ -29,7 +29,7 @@ trait BackgroundAsyncJobExecutionActor extends SharedFileSystemAsyncJobExecution override def makeProcessRunner(): ProcessRunner = { val stdout = standardPaths.output.plusExt("background") val stderr = standardPaths.error.plusExt("background") - val argv = Seq("/bin/bash", backgroundScript) + val argv: Seq[Any] = Seq("/bin/bash", backgroundScript) new ProcessRunner(argv, stdout, stderr) } diff --git a/supportedBackends/sfs/src/main/scala/cromwell/backend/sfs/SharedFileSystem.scala b/supportedBackends/sfs/src/main/scala/cromwell/backend/sfs/SharedFileSystem.scala index 57b548b9804..eaf087f50c7 100644 --- a/supportedBackends/sfs/src/main/scala/cromwell/backend/sfs/SharedFileSystem.scala +++ b/supportedBackends/sfs/src/main/scala/cromwell/backend/sfs/SharedFileSystem.scala @@ -18,7 +18,7 @@ import net.ceedubs.ficus.Ficus._ import wom.WomFileMapper import wom.values._ -import scala.collection.JavaConverters._ +import scala.jdk.CollectionConverters._ import scala.collection.mutable import scala.concurrent.{Await, ExecutionContext} import scala.concurrent.duration.Duration @@ -88,8 +88,8 @@ object SharedFileSystem extends StrictLogging { action } - private def duplicate(description: String, source: Path, dest: Path, strategies: Stream[DuplicationStrategy], docker: Boolean): Try[Unit] = { - val attempts: Stream[Try[Unit]] = strategies.map(_.apply(source.followSymbolicLinks, dest, docker)) + private def duplicate(description: String, source: Path, dest: Path, strategies: LazyList[DuplicationStrategy], docker: Boolean): Try[Unit] = { + val attempts: LazyList[Try[Unit]] = strategies.map(_.apply(source.followSymbolicLinks, dest, docker)) attempts.find(_.isSuccess) getOrElse { TryUtil.sequence(attempts, s"Could not $description $source -> $dest").void } @@ -205,7 +205,7 @@ trait SharedFileSystem extends PathFactory { private def getConfigStrategies(configPath: String): Seq[String] = { if (sharedFileSystemConfig.hasPath(configPath)) { - sharedFileSystemConfig.getStringList(configPath).asScala + sharedFileSystemConfig.getStringList(configPath).asScala.toList } else { DefaultStrategies } @@ -259,7 +259,7 @@ trait SharedFileSystem extends PathFactory { } def cacheCopy(sourceFilePath: Path, destinationFilePath: Path): Try[Unit] = { - duplicate("cache", sourceFilePath, destinationFilePath, Cachers.toStream, docker = false) + duplicate("cache", sourceFilePath, destinationFilePath, Cachers.to(LazyList), docker = false) } /** @@ -317,7 +317,7 @@ trait SharedFileSystem extends PathFactory { } // Optional function to adjust the path to "docker path" if the call runs in docker - localizeWomFile(toCallPath, strategies.toStream, docker)(staged) + localizeWomFile(toCallPath, strategies.to(LazyList), docker)(staged) } /** @@ -328,7 +328,7 @@ trait SharedFileSystem extends PathFactory { * @param womFile WomFile to localize * @return localized WomFile */ - private def localizeWomFile(toDestPath: WomFile => String => Try[PairOfFiles], strategies: Stream[DuplicationStrategy], docker: Boolean) + private def localizeWomFile(toDestPath: WomFile => String => Try[PairOfFiles], strategies: LazyList[DuplicationStrategy], docker: Boolean) (womFile: WomFile): WomFile = { val localized = womFile mapWomFile { file => val result = toDestPath(file)(file.value) flatMap { diff --git a/supportedBackends/sfs/src/main/scala/cromwell/backend/sfs/SharedFileSystemExpressionFunctions.scala b/supportedBackends/sfs/src/main/scala/cromwell/backend/sfs/SharedFileSystemExpressionFunctions.scala index 78fd4cad7c8..303e8d621e0 100644 --- a/supportedBackends/sfs/src/main/scala/cromwell/backend/sfs/SharedFileSystemExpressionFunctions.scala +++ b/supportedBackends/sfs/src/main/scala/cromwell/backend/sfs/SharedFileSystemExpressionFunctions.scala @@ -28,7 +28,7 @@ class SharedFileSystemExpressionFunctions(standardParams: StandardExpressionFunc this(DefaultStandardExpressionFunctionsParams(pathBuilders, callContext, ioActorProxy, ec)) } - override def makeInputSpecificFunctions: IoFunctionSet = new SharedFileSystemExpressionFunctionsForInput(standardParams) + override def makeInputSpecificFunctions(): IoFunctionSet = new SharedFileSystemExpressionFunctionsForInput(standardParams) override def postMapping(path: Path) = { path match { @@ -42,7 +42,7 @@ class SharedFileSystemExpressionFunctionsForInput(standardParams: StandardExpres extends SharedFileSystemExpressionFunctions(standardParams) { // override needed to prevent class self-reference - override def makeInputSpecificFunctions: IoFunctionSet = this + override def makeInputSpecificFunctions(): IoFunctionSet = this lazy val cromwellCwd: Path = DefaultPathBuilder.build(sys.props("user.dir")).get diff --git a/supportedBackends/sfs/src/test/scala/cromwell/backend/sfs/SharedFileSystemJobExecutionActorSpec.scala b/supportedBackends/sfs/src/test/scala/cromwell/backend/sfs/SharedFileSystemJobExecutionActorSpec.scala index a0e24519168..848b5e56c6b 100644 --- a/supportedBackends/sfs/src/test/scala/cromwell/backend/sfs/SharedFileSystemJobExecutionActorSpec.scala +++ b/supportedBackends/sfs/src/test/scala/cromwell/backend/sfs/SharedFileSystemJobExecutionActorSpec.scala @@ -115,7 +115,7 @@ class SharedFileSystemJobExecutionActorSpec extends TestKitSuite val expectedOutputs: CallOutputs = WomMocks.mockOutputExpectations( Map( "localize.out" -> WomArray(WomArrayType(WomStringType), - Array( + List( WomString("content from json inputs"), WomString("content from call inputs"))) ) diff --git a/supportedBackends/tes/src/main/scala/cromwell/backend/impl/tes/TesRuntimeAttributes.scala b/supportedBackends/tes/src/main/scala/cromwell/backend/impl/tes/TesRuntimeAttributes.scala index 0895d7e1696..4212ff4be67 100644 --- a/supportedBackends/tes/src/main/scala/cromwell/backend/impl/tes/TesRuntimeAttributes.scala +++ b/supportedBackends/tes/src/main/scala/cromwell/backend/impl/tes/TesRuntimeAttributes.scala @@ -60,13 +60,13 @@ object TesRuntimeAttributes { if (config.useBackendParameters) runtimeAttributes - .filterKeys(k => !keysToExclude.contains(k)) + .view.filterKeys(k => !keysToExclude.contains(k)) .flatMap( _ match { case (key, WomString(s)) => Option((key, Option(s))) case (key, WomOptionalValue(WomStringType, Some(WomString(optS)))) => Option((key, Option(optS))) case (key, WomOptionalValue(WomStringType, None)) => Option((key, None)) case _ => None - }) + }).toMap else Map.empty } diff --git a/supportedBackends/tes/src/test/scala/cromwell/backend/impl/tes/TesRuntimeAttributesSpec.scala b/supportedBackends/tes/src/test/scala/cromwell/backend/impl/tes/TesRuntimeAttributesSpec.scala index 9b3ee586ad9..1b3b3b7aea8 100644 --- a/supportedBackends/tes/src/test/scala/cromwell/backend/impl/tes/TesRuntimeAttributesSpec.scala +++ b/supportedBackends/tes/src/test/scala/cromwell/backend/impl/tes/TesRuntimeAttributesSpec.scala @@ -106,13 +106,13 @@ class TesRuntimeAttributesSpec extends AnyWordSpecLike with CromwellTimeoutSpec } "validate a valid continueOnReturnCode array entry" in { - val runtimeAttributes = Map("docker" -> WomString("ubuntu:latest"), "continueOnReturnCode" -> WomArray(WomArrayType(WomIntegerType), Array(WomInteger(1), WomInteger(2)))) + val runtimeAttributes = Map("docker" -> WomString("ubuntu:latest"), "continueOnReturnCode" -> WomArray(WomArrayType(WomIntegerType), List(WomInteger(1), WomInteger(2)))) val expectedRuntimeAttributes = expectedDefaultsPlusUbuntuDocker.copy(continueOnReturnCode = ContinueOnReturnCodeSet(Set(1, 2))) assertSuccess(runtimeAttributes, expectedRuntimeAttributes) } "coerce then validate a valid continueOnReturnCode array entry" in { - val runtimeAttributes = Map("docker" -> WomString("ubuntu:latest"), "continueOnReturnCode" -> WomArray(WomArrayType(WomStringType), Array(WomString("1"), WomString("2")))) + val runtimeAttributes = Map("docker" -> WomString("ubuntu:latest"), "continueOnReturnCode" -> WomArray(WomArrayType(WomStringType), List(WomString("1"), WomString("2")))) val expectedRuntimeAttributes = expectedDefaultsPlusUbuntuDocker.copy(continueOnReturnCode = ContinueOnReturnCodeSet(Set(1, 2))) assertSuccess(runtimeAttributes, expectedRuntimeAttributes) } diff --git a/wdl/model/draft2/src/main/scala/wdl/draft2/model/AstTools.scala b/wdl/model/draft2/src/main/scala/wdl/draft2/model/AstTools.scala index e94487e458f..c1f3c18242b 100644 --- a/wdl/model/draft2/src/main/scala/wdl/draft2/model/AstTools.scala +++ b/wdl/model/draft2/src/main/scala/wdl/draft2/model/AstTools.scala @@ -11,7 +11,7 @@ import wom.core._ import wom.types._ import wom.values._ -import scala.collection.JavaConverters._ +import scala.jdk.CollectionConverters._ import scala.language.postfixOps object AstTools { @@ -50,7 +50,7 @@ object AstTools { /* * Find all interpolations in the string terminal. * e.g: String a = "hello ${you}" - * We'll create an expression from "you" and remember the position in the string + * We'll create an expression from "you" and remember the position in the string * "hello ${you}" at which we found "${you}". */ val interpolatedExpressionAstNodesAndTheirMatchPosition = InterpolationTagPattern @@ -365,16 +365,16 @@ object AstTools { /* terminal is the "lefter" lhs * trail is how we arrived to identifier from the original ast * e.g #1 (in "pseudo ast code"): - * + * * If MemberAccess is "a.b" * terminal will be Terminal("a") * trail will be Seq( * MemberAccess( * lhs: Terminal("a"), * rhs: Terminal("b") - * ) + * ) * ) - * + * * e.g #2: * If MemberAccess is "a.b.c" * terminal will be Terminal("a") @@ -388,7 +388,7 @@ object AstTools { * rhs: Terminal("b") * ) * ) - * + * * There also might be other types of nodes in trail than MemberAccess depending the expression. */ expr.findTerminalsWithTrail("identifier").collect({ diff --git a/wdl/model/draft2/src/main/scala/wdl/draft2/model/WdlCall.scala b/wdl/model/draft2/src/main/scala/wdl/draft2/model/WdlCall.scala index 1e88db9dd8a..8d1973bd109 100644 --- a/wdl/model/draft2/src/main/scala/wdl/draft2/model/WdlCall.scala +++ b/wdl/model/draft2/src/main/scala/wdl/draft2/model/WdlCall.scala @@ -3,7 +3,6 @@ package wdl.draft2.model import wdl.draft2.model.AstTools.EnhancedAstNode import wdl.draft2.model.exception.{ValidationException, VariableLookupException, VariableNotFoundException} import wdl.draft2.model.expression.WdlFunctions -import wdl.draft2.model.exception.{ValidationException, VariableLookupException} import wdl.draft2.parser.WdlParser.{Ast, SyntaxError, Terminal} import wom.callable.Callable._ import wom.types.WomOptionalType diff --git a/wdl/model/draft2/src/main/scala/wdl/draft2/model/WdlExpression.scala b/wdl/model/draft2/src/main/scala/wdl/draft2/model/WdlExpression.scala index 8836a5fd4a7..14e32d76ce0 100644 --- a/wdl/model/draft2/src/main/scala/wdl/draft2/model/WdlExpression.scala +++ b/wdl/model/draft2/src/main/scala/wdl/draft2/model/WdlExpression.scala @@ -23,7 +23,7 @@ import wom.graph.expression.AnonymousExpressionNode.AnonymousExpressionConstruct import wom.types.{WomAnyType, WomType} import wom.values.{WomFile, WomFloat, WomSingleFile, WomValue} -import scala.collection.JavaConverters._ +import scala.jdk.CollectionConverters._ import scala.concurrent.Await import scala.concurrent.duration.Duration import scala.language.postfixOps @@ -101,7 +101,7 @@ object WdlExpression { ValueEvaluator(lookup, functions).evaluate(ast) def evaluateFiles(ast: AstNode, lookup: ScopedLookupFunction, functions: WdlFunctions[WomValue], coerceTo: WomType = WomAnyType) = - FileEvaluator(expression.ValueEvaluator(lookup, functions), coerceTo).evaluate(ast) + FileEvaluator(ValueEvaluator(lookup, functions), coerceTo).evaluate(ast) def evaluateType(ast: AstNode, lookup: (String) => WomType, functions: WdlFunctions[WomType], from: Option[Scope] = None) = TypeEvaluator(lookup, functions, from).evaluate(ast) @@ -202,7 +202,7 @@ case class WdlExpression(ast: AstNode) extends WomValue { override def toWomString: String = toString(NullSyntaxHighlighter) - def prerequisiteCallNames: Set[FullyQualifiedName] = { + def prerequisiteCallNames: Set[String] = { this.topLevelMemberAccesses map { _.lhsString } } def topLevelMemberAccesses: Set[MemberAccess] = AstTools.findTopLevelMemberAccesses(ast) map { MemberAccess(_) } toSet diff --git a/wdl/model/draft2/src/main/scala/wdl/draft2/model/WdlNamespace.scala b/wdl/model/draft2/src/main/scala/wdl/draft2/model/WdlNamespace.scala index de5fff3c4da..d1f75461a46 100644 --- a/wdl/model/draft2/src/main/scala/wdl/draft2/model/WdlNamespace.scala +++ b/wdl/model/draft2/src/main/scala/wdl/draft2/model/WdlNamespace.scala @@ -19,7 +19,7 @@ import wom.core._ import wom.types._ import wom.values.WomValue -import scala.collection.JavaConverters._ +import scala.jdk.CollectionConverters._ import scala.collection.mutable import scala.util.{Failure, Success, Try} @@ -411,7 +411,7 @@ object WdlNamespace { case class ScopeAccumulator(accumulated: Seq[Scope] = Seq.empty, errors: Seq[String] = Seq.empty) - def lookForDuplicates(scopes: Traversable[Scope]) = { + def lookForDuplicates(scopes: Iterable[Scope]) = { scopes.foldLeft(ScopeAccumulator()) { (acc, cur) => val possibleError = acc.accumulated.find(_.unqualifiedName == cur.unqualifiedName) map { duplicate => val (dupName, dupTerminal) = scopeNameAndTerminal(duplicate) diff --git a/wdl/model/draft2/src/main/scala/wdl/draft2/model/WdlRuntimeAttributes.scala b/wdl/model/draft2/src/main/scala/wdl/draft2/model/WdlRuntimeAttributes.scala index 8ff07707ac6..84508d001c6 100644 --- a/wdl/model/draft2/src/main/scala/wdl/draft2/model/WdlRuntimeAttributes.scala +++ b/wdl/model/draft2/src/main/scala/wdl/draft2/model/WdlRuntimeAttributes.scala @@ -5,7 +5,7 @@ import wdl.draft2.model.AstTools.{AstNodeName, EnhancedAstNode} import wdl.draft2.parser.WdlParser.{Ast, AstList} import wom.RuntimeAttributes -import scala.collection.JavaConverters._ +import scala.jdk.CollectionConverters._ case class WdlRuntimeAttributes(attrs: Map[String, WdlExpression]) { def toWomRuntimeAttributes(task: WdlTask) = RuntimeAttributes(attrs.safeMapValues(WdlWomExpression(_, task))) diff --git a/wdl/model/draft2/src/main/scala/wdl/draft2/model/WdlSyntaxErrorFormatter.scala b/wdl/model/draft2/src/main/scala/wdl/draft2/model/WdlSyntaxErrorFormatter.scala index d06343da7e8..158037a6105 100644 --- a/wdl/model/draft2/src/main/scala/wdl/draft2/model/WdlSyntaxErrorFormatter.scala +++ b/wdl/model/draft2/src/main/scala/wdl/draft2/model/WdlSyntaxErrorFormatter.scala @@ -5,7 +5,7 @@ import wdl.draft2.parser.WdlParser._ import wom.core._ import wom.types.WomType -import scala.collection.JavaConverters._ +import scala.jdk.CollectionConverters._ case class WdlSyntaxErrorFormatter(terminalMap: Map[Terminal, WorkflowSource]) extends SyntaxErrorFormatter { diff --git a/wdl/model/draft2/src/main/scala/wdl/draft2/model/WdlTask.scala b/wdl/model/draft2/src/main/scala/wdl/draft2/model/WdlTask.scala index 3c3265efcf1..130c4b0f706 100644 --- a/wdl/model/draft2/src/main/scala/wdl/draft2/model/WdlTask.scala +++ b/wdl/model/draft2/src/main/scala/wdl/draft2/model/WdlTask.scala @@ -14,7 +14,7 @@ import wdl.draft2.parser.WdlParser._ import wom.InstantiatedCommand import wom.values.WomValue -import scala.collection.JavaConverters._ +import scala.jdk.CollectionConverters._ import scala.language.postfixOps object WdlTask { @@ -120,7 +120,7 @@ case class WdlTask(name: String, import WdlTask.instantiatedCommandMonoid val fullInstantiatedCommand: ErrorOr[InstantiatedCommand] = commandTemplate.toList .flatTraverse(_.instantiate(declarations, mappedInputs, functions, valueMapper)).map(_.combineAll) - + // `normalize` the instantiation (i.e. don't break Python code indentation) fullInstantiatedCommand map { c => List(c.copy(commandString = StringUtil.normalize(c.commandString)))} } diff --git a/wdl/model/draft2/src/main/scala/wdl/draft2/model/command/WdlCommandPart.scala b/wdl/model/draft2/src/main/scala/wdl/draft2/model/command/WdlCommandPart.scala index 15f1d221c09..39aea939c39 100644 --- a/wdl/model/draft2/src/main/scala/wdl/draft2/model/command/WdlCommandPart.scala +++ b/wdl/model/draft2/src/main/scala/wdl/draft2/model/command/WdlCommandPart.scala @@ -3,7 +3,6 @@ package wdl.draft2.model.command import common.validation.ErrorOr.ErrorOr import wdl.draft2.model.Declaration import wdl.draft2.model.expression.{WdlFunctions, WdlStandardLibraryFunctions} -import wdl.draft2.model.expression.WdlStandardLibraryFunctions import wdl.shared.FileSizeLimitationConfig import wom.callable.RuntimeEnvironment import wom.expression.IoFunctionSet diff --git a/wdl/model/draft2/src/main/scala/wdl/draft2/model/expression/WdlStandardLibraryFunctions.scala b/wdl/model/draft2/src/main/scala/wdl/draft2/model/expression/WdlStandardLibraryFunctions.scala index 72e5774d463..5cd98ad7a61 100644 --- a/wdl/model/draft2/src/main/scala/wdl/draft2/model/expression/WdlStandardLibraryFunctions.scala +++ b/wdl/model/draft2/src/main/scala/wdl/draft2/model/expression/WdlStandardLibraryFunctions.scala @@ -22,7 +22,7 @@ import scala.concurrent.duration.Duration import scala.util.{Failure, Success, Try} trait WdlStandardLibraryFunctions extends WdlFunctions[WomValue] { - def readFile(path: String, sizeLimit: Int): String + def readFile(path: String, sizeLimit: Int): String def writeFile(path: String, content: String): Try[WomFile] @@ -44,7 +44,7 @@ trait WdlStandardLibraryFunctions extends WdlFunctions[WomValue] { } yield file } - def read_objects(params: Seq[Try[WomValue]]): Try[WomArray] = extractObjects("read_objects", params) map { WomArray(WomArrayType(WomObjectType), _) } + def read_objects(params: Seq[Try[WomValue]]): Try[WomArray] = extractObjects("read_objects", params) map { a => WomArray(WomArrayType(WomObjectType), a.toIndexedSeq) } def read_string(params: Seq[Try[WomValue]]): Try[WomString] = readContentsFromSingleFileParameter("read_string", params, fileSizeLimitationConfig.readStringLimit).map(s => WomString(s.trim)) def read_json(params: Seq[Try[WomValue]]): Try[WomValue] = readContentsFromSingleFileParameter("read_json", params, fileSizeLimitationConfig.readJsonLimit).map(_.parseJson).flatMap(WomObjectType.coerceRawValue) def read_int(params: Seq[Try[WomValue]]): Try[WomInteger] = readContentsFromSingleFileParameter("read_int", params, fileSizeLimitationConfig.readIntLimit).map(s => WomString(s.trim)) map { s => WomInteger(s.value.trim.toInt) } @@ -65,7 +65,7 @@ trait WdlStandardLibraryFunctions extends WdlFunctions[WomValue] { for { contents <- readContentsFromSingleFileParameter("read_lines", params, fileSizeLimitationConfig.readLinesLimit) lines = contents.split("\n") - } yield WomArray(WomArrayType(WomStringType), lines map WomString) + } yield WomArray(WomArrayType(WomStringType), lines.toIndexedSeq map WomString) } def read_map(params: Seq[Try[WomValue]]): Try[WomMap] = { @@ -327,7 +327,7 @@ object WdlStandardLibraryFunctions { def optionalSafeFileSize(value: WomValue): Try[Long] = value match { case f if f.isInstanceOf[WomSingleFile] || WomSingleFileType.isCoerceableFrom(f.womType) => Try(Await.result(ioFunctionSet.size(f.valueString), Duration.Inf)) case WomOptionalValue(_, Some(o)) => optionalSafeFileSize(o) - case WomOptionalValue(f, None) if isOptionalOfFileType(f) => Success(0l) + case WomOptionalValue(f, None) if isOptionalOfFileType(f) => Success(0L) case _ => Failure(new Exception(s"The 'size' method expects a 'File' or 'File?' argument but instead got ${value.womType.stableName}.")) } diff --git a/wdl/model/draft2/src/main/scala/wdl/draft2/model/formatter/SyntaxFormatter.scala b/wdl/model/draft2/src/main/scala/wdl/draft2/model/formatter/SyntaxFormatter.scala index 2ba176118e7..d96b8fee102 100644 --- a/wdl/model/draft2/src/main/scala/wdl/draft2/model/formatter/SyntaxFormatter.scala +++ b/wdl/model/draft2/src/main/scala/wdl/draft2/model/formatter/SyntaxFormatter.scala @@ -7,7 +7,7 @@ import wdl.draft2.model.command.StringCommandPart import wdl.draft2.parser.WdlParser.{Ast, AstList, AstNode} import wom.types.WomType -import scala.collection.JavaConverters._ +import scala.jdk.CollectionConverters._ trait SyntaxHighlighter { def keyword(s: String): String = s diff --git a/wdl/model/draft2/src/main/scala/wdl/draft2/model/types/WdlFlavoredWomType.scala b/wdl/model/draft2/src/main/scala/wdl/draft2/model/types/WdlFlavoredWomType.scala index 4f6d911ab63..4a74ef94842 100644 --- a/wdl/model/draft2/src/main/scala/wdl/draft2/model/types/WdlFlavoredWomType.scala +++ b/wdl/model/draft2/src/main/scala/wdl/draft2/model/types/WdlFlavoredWomType.scala @@ -7,7 +7,7 @@ import wom.core.WorkflowSource import wom.types.{WomBooleanType, WomFloatType, WomIntegerType, WomType} import wom.values.{WomBoolean, WomFloat, WomInteger, WomValue} -import scala.collection.JavaConverters._ +import scala.jdk.CollectionConverters._ object WdlFlavoredWomType { private val parser = new WdlParser() diff --git a/wdl/model/draft2/src/test/scala/wdl/SyntaxHighlightSpec.scala b/wdl/model/draft2/src/test/scala/wdl/SyntaxHighlightSpec.scala index b3f5f97b5a3..95cf4333c2a 100644 --- a/wdl/model/draft2/src/test/scala/wdl/SyntaxHighlightSpec.scala +++ b/wdl/model/draft2/src/test/scala/wdl/SyntaxHighlightSpec.scala @@ -8,6 +8,8 @@ import wdl.draft2.model.WdlNamespace import wdl.draft2.model.formatter.{AnsiSyntaxHighlighter, HtmlSyntaxHighlighter, SyntaxFormatter} import wom.ResolvedImportRecord +import scala.annotation.nowarn + class SyntaxHighlightSpec extends AnyWordSpec with CromwellTimeoutSpec with Matchers { "SyntaxFormatter for typical workflow" should { val namespace = WdlNamespace.loadUsingSource( @@ -101,6 +103,7 @@ class SyntaxHighlightSpec extends AnyWordSpec with CromwellTimeoutSpec with Matc |} """.stripMargin, None, None).get + @nowarn("msg=Unicode escapes in triple quoted strings are deprecated, use the literal character instead") val console = """\u001b[38;5;214mtask\u001b[0m \u001b[38;5;253mPairedFastQsToUnmappedBAM\u001b[0m { | \u001b[38;5;33mFile\u001b[0m \u001b[38;5;112mfastq_1\u001b[0m @@ -154,7 +157,7 @@ class SyntaxHighlightSpec extends AnyWordSpec with CromwellTimeoutSpec with Matc | \u001b[38;5;33mMap[String, Array[String]]\u001b[0m \u001b[38;5;112mmetadata\u001b[0m | \u001b[38;5;214mscatter\u001b[0m (readgroup in readgroup_list) { | \u001b[38;5;214mcall\u001b[0m \u001b[38;5;253mPairedFastQsToUnmappedBAM\u001b[0m { - | input: library_name=metadata[readgroup][1], run_date=metadata[readgroup][3], readgroup_name=readgroup, platform_name=metadata[readgroup][4], platform_unit=metadata[readgroup][2], fastq_1=fastq_pairs[readgroup][0], fastq_2=fastq_pairs[readgroup][1], sample_name=metadata[readgroup][0], sequencing_center=metadata[readgroup][5] + | input: library_name=metadata[readgroup][1], platform_unit=metadata[readgroup][2], fastq_1=fastq_pairs[readgroup][0], fastq_2=fastq_pairs[readgroup][1], sample_name=metadata[readgroup][0], sequencing_center=metadata[readgroup][5], run_date=metadata[readgroup][3], readgroup_name=readgroup, platform_name=metadata[readgroup][4] | } | } | \u001b[38;5;33mArray[File]\u001b[0m \u001b[38;5;112moutput_bams\u001b[0m = PairedFastQsToUnmappedBAM.output_bam @@ -222,7 +225,7 @@ class SyntaxHighlightSpec extends AnyWordSpec with CromwellTimeoutSpec with Matc | Map[String, Array[String]] metadata | scatter (readgroup in readgroup_list) { | call PairedFastQsToUnmappedBAM { - | input: library_name=metadata[readgroup][1], run_date=metadata[readgroup][3], readgroup_name=readgroup, platform_name=metadata[readgroup][4], platform_unit=metadata[readgroup][2], fastq_1=fastq_pairs[readgroup][0], fastq_2=fastq_pairs[readgroup][1], sample_name=metadata[readgroup][0], sequencing_center=metadata[readgroup][5] + | input: library_name=metadata[readgroup][1], platform_unit=metadata[readgroup][2], fastq_1=fastq_pairs[readgroup][0], fastq_2=fastq_pairs[readgroup][1], sample_name=metadata[readgroup][0], sequencing_center=metadata[readgroup][5], run_date=metadata[readgroup][3], readgroup_name=readgroup, platform_name=metadata[readgroup][4] | } | } | Array[File] output_bams = PairedFastQsToUnmappedBAM.output_bam diff --git a/wdl/model/draft2/src/test/scala/wdl/expression/Draft2SizeFunctionSpec.scala b/wdl/model/draft2/src/test/scala/wdl/expression/Draft2SizeFunctionSpec.scala index 9ea590a4a82..075d8910003 100644 --- a/wdl/model/draft2/src/test/scala/wdl/expression/Draft2SizeFunctionSpec.scala +++ b/wdl/model/draft2/src/test/scala/wdl/expression/Draft2SizeFunctionSpec.scala @@ -19,32 +19,32 @@ class Draft2SizeFunctionSpec extends AnyFlatSpec with CromwellTimeoutSpec with M behavior of "ReadLikeFunctions.size" it should "correctly report a 2048 byte file, in bytes by default" in { - val readLike = testFunctions(Success(2048l)) + val readLike = testFunctions(Success(2048L)) validate(readLike.size(Seq(Success(WomSingleFile("blah"))))) { res => assert(res == WomFloat(2048d)) } } it should "correctly report a 2048 byte file, in bytes" in { - val readLike = testFunctions(Success(2048l)) + val readLike = testFunctions(Success(2048L)) validate(readLike.size(Seq(Success(WomSingleFile("blah")), Success(WomString("B"))))) { res => assert(res == WomFloat(2048d)) } } it should "correctly report a 2048 byte file, in KB" in { - val readLike = testFunctions(Success(2048l)) + val readLike = testFunctions(Success(2048L)) validate(readLike.size(Seq(Success(WomSingleFile("blah")), Success(WomString("KB"))))) { res => assert(res == WomFloat(2d)) } } it should "correctly report a 2048 byte file, in KiB" in { - val readLike = testFunctions(Success(2048l)) + val readLike = testFunctions(Success(2048L)) validate(readLike.size(Seq(Success(WomSingleFile("blah")), Success(WomString("Ki"))))) { res => assert(res == WomFloat(2d)) } } it should "correctly report the size of a supplied, optional, 2048 byte file" in { - val readLike = testFunctions(Success(2048l)) + val readLike = testFunctions(Success(2048L)) validate(readLike.size(Seq(Success(WomOptionalValue(WomSingleFileType, Option(WomSingleFile("blah"))))))) { res => assert(res == WomFloat(2048d)) } } it should "correctly report the size of a supplied, optional optional, 2048 byte file" in { - val readLike = testFunctions(Success(2048l)) + val readLike = testFunctions(Success(2048L)) validate(readLike.size(Seq(Success(WomOptionalValue( WomOptionalType(WomSingleFileType), Option(WomOptionalValue(WomSingleFileType, Option(WomSingleFile("blah")))) @@ -52,24 +52,24 @@ class Draft2SizeFunctionSpec extends AnyFlatSpec with CromwellTimeoutSpec with M } it should "correctly report the size of a supplied, optional, 2048 byte file, in MB" in { - val readLike = testFunctions(Success(2048l)) + val readLike = testFunctions(Success(2048L)) validate(readLike.size(Seq(Success(WomOptionalValue( WomSingleFileType, Option(WomSingleFile("blah")))), Success(WomString("MB") )))) { res => assert(res == WomFloat(0.001953125d)) } } it should "correctly report that an unsupplied optional file is empty" in { - val readLike = testFunctions(Success(2048l)) + val readLike = testFunctions(Success(2048L)) validate(readLike.size(Seq(Success(WomOptionalValue(WomSingleFileType, None))))) { res => assert(res == WomFloat(0d)) } } it should "correctly report that an unsupplied File?? is empty" in { - val readLike = testFunctions(Success(2048l)) + val readLike = testFunctions(Success(2048L)) validate(readLike.size(Seq(Success(WomOptionalValue(WomOptionalType(WomSingleFileType), None))))) { res => assert(res == WomFloat(0d)) } } it should "correctly report that an unsupplied optional file is empty, even in MB" in { - val readLike = testFunctions(Success(2048l)) + val readLike = testFunctions(Success(2048L)) validate(readLike.size(Seq(Success(WomOptionalValue(WomSingleFileType, None)), Success(WomString("MB"))))) { res => assert(res == WomFloat(0d)) } } diff --git a/wdl/model/draft2/src/test/scala/wdl/expression/ValueEvaluatorSpec.scala b/wdl/model/draft2/src/test/scala/wdl/expression/ValueEvaluatorSpec.scala index dd3c626f49a..42004e1e8a5 100644 --- a/wdl/model/draft2/src/test/scala/wdl/expression/ValueEvaluatorSpec.scala +++ b/wdl/model/draft2/src/test/scala/wdl/expression/ValueEvaluatorSpec.scala @@ -402,7 +402,7 @@ class ValueEvaluatorSpec extends AnyFlatSpec with CromwellTimeoutSpec with Match (""" "a\nb" """, WomString("a\nb")), (""" "a\nb\t" """, WomString("a\nb\t")), (""" "a\n\"b\t\"" """, WomString("a\n\"b\t\"")), - (""" "be \u266f or be \u266e, just don't be \u266d" """, WomString("be \u266f or be \u266e, just don't be \u266d")), + (""" "be ♯ or be ♮, just don't be ♭" """, WomString("be ♯ or be ♮, just don't be ♭")), // Optional types // String @@ -413,7 +413,7 @@ class ValueEvaluatorSpec extends AnyFlatSpec with CromwellTimeoutSpec with Match ("s == someStr", WomBoolean(false)), ("s < someStr", WomBoolean(true)), ("s > someStr", WomBoolean(false)), - + ("someStr + s", WomString("someStrs")), ("someInt + s", WomString("1s")), ("someFloat + s", WomString("0.5s")), diff --git a/wdl/transforms/biscayne/src/main/scala/wdl/transforms/biscayne/ast2wdlom/BiscayneGenericAstNode.scala b/wdl/transforms/biscayne/src/main/scala/wdl/transforms/biscayne/ast2wdlom/BiscayneGenericAstNode.scala index 78bf50ea756..cdfbc8569b3 100644 --- a/wdl/transforms/biscayne/src/main/scala/wdl/transforms/biscayne/ast2wdlom/BiscayneGenericAstNode.scala +++ b/wdl/transforms/biscayne/src/main/scala/wdl/transforms/biscayne/ast2wdlom/BiscayneGenericAstNode.scala @@ -2,7 +2,7 @@ package wdl.transforms.biscayne.ast2wdlom import wdl.biscayne.parser.WdlParser.{Ast, AstList, AstNode, Terminal} import wdl.transforms.base.ast2wdlom.{GenericAst, GenericAstList, GenericAstNode, GenericTerminal} -import scala.collection.JavaConverters._ +import scala.jdk.CollectionConverters._ case class BiscayneGenericAst(ast: Ast) extends GenericAst { override def getAttribute(attr: String): GenericAstNode = Option(ast.getAttribute(attr)).map(BiscayneGenericAstNode.apply).orNull diff --git a/wdl/transforms/biscayne/src/main/scala/wdl/transforms/biscayne/ast2wdlom/ast2wdlom.scala b/wdl/transforms/biscayne/src/main/scala/wdl/transforms/biscayne/ast2wdlom/ast2wdlom.scala index 22ab6fa7fd5..18b560bd99f 100644 --- a/wdl/transforms/biscayne/src/main/scala/wdl/transforms/biscayne/ast2wdlom/ast2wdlom.scala +++ b/wdl/transforms/biscayne/src/main/scala/wdl/transforms/biscayne/ast2wdlom/ast2wdlom.scala @@ -17,7 +17,7 @@ package object ast2wdlom { val wrapAst: CheckedAtoB[Ast, GenericAst] = CheckedAtoB.fromCheck { a => BiscayneGenericAst(a).validNelCheck } val wrapAstNode: CheckedAtoB[AstNode, GenericAstNode] = CheckedAtoB.fromCheck { a => BiscayneGenericAstNode(a).validNelCheck } - implicit val astNodeToStaticString: CheckedAtoB[GenericAstNode, StaticString] = AstNodeToStaticString.astNodeToStaticStringElement + implicit val astNodeToStaticString: CheckedAtoB[GenericAstNode, StaticString] = AstNodeToStaticString.astNodeToStaticStringElement() // meta sections implicit val astNodeToMetaKvPair: CheckedAtoB[GenericAstNode, MetaKvPair] = AstNodeToMetaKvPair.astNodeToMetaKvPair diff --git a/wdl/transforms/biscayne/src/main/scala/wdl/transforms/biscayne/parsing/BiscayneParser.scala b/wdl/transforms/biscayne/src/main/scala/wdl/transforms/biscayne/parsing/BiscayneParser.scala index 506e38cbc18..d0d7ad82f89 100644 --- a/wdl/transforms/biscayne/src/main/scala/wdl/transforms/biscayne/parsing/BiscayneParser.scala +++ b/wdl/transforms/biscayne/src/main/scala/wdl/transforms/biscayne/parsing/BiscayneParser.scala @@ -7,7 +7,7 @@ import wdl.biscayne.parser.WdlParser import wdl.biscayne.parser.WdlParser.Ast import wom.core.WorkflowSource -import scala.collection.JavaConverters._ +import scala.jdk.CollectionConverters._ import scala.util.Try object StringParser { diff --git a/wdl/transforms/biscayne/src/main/scala/wdl/transforms/biscayne/parsing/WdlBiscayneSyntaxErrorFormatter.scala b/wdl/transforms/biscayne/src/main/scala/wdl/transforms/biscayne/parsing/WdlBiscayneSyntaxErrorFormatter.scala index fc89a148e8f..fd8c7120c5e 100644 --- a/wdl/transforms/biscayne/src/main/scala/wdl/transforms/biscayne/parsing/WdlBiscayneSyntaxErrorFormatter.scala +++ b/wdl/transforms/biscayne/src/main/scala/wdl/transforms/biscayne/parsing/WdlBiscayneSyntaxErrorFormatter.scala @@ -3,7 +3,7 @@ package wdl.transforms.biscayne.parsing import wdl.biscayne.parser.WdlParser._ import wom.core.WorkflowSource -import scala.collection.JavaConverters._ +import scala.jdk.CollectionConverters._ case class WdlBiscayneSyntaxErrorFormatter(terminalMap: Map[Terminal, WorkflowSource]) extends SyntaxErrorFormatter { diff --git a/wdl/transforms/biscayne/src/test/scala/wdl/transforms/biscayne/Ast2WdlomSpec.scala b/wdl/transforms/biscayne/src/test/scala/wdl/transforms/biscayne/Ast2WdlomSpec.scala index ba25f2df0ad..183c0141c02 100644 --- a/wdl/transforms/biscayne/src/test/scala/wdl/transforms/biscayne/Ast2WdlomSpec.scala +++ b/wdl/transforms/biscayne/src/test/scala/wdl/transforms/biscayne/Ast2WdlomSpec.scala @@ -21,7 +21,7 @@ import wom.callable.MetaValueElement.MetaValueElementInteger import wom.types.WomIntegerType import wom.values.WomInteger -import scala.collection.JavaConverters._ +import scala.jdk.CollectionConverters._ object Ast2WdlomSpec { val parser = new WdlParser() diff --git a/wdl/transforms/draft2/src/main/scala/wdl/transforms/draft2/wdlom2wom/WdlDraft2WomCallNodeMaker.scala b/wdl/transforms/draft2/src/main/scala/wdl/transforms/draft2/wdlom2wom/WdlDraft2WomCallNodeMaker.scala index 387c95387d9..442c938c834 100644 --- a/wdl/transforms/draft2/src/main/scala/wdl/transforms/draft2/wdlom2wom/WdlDraft2WomCallNodeMaker.scala +++ b/wdl/transforms/draft2/src/main/scala/wdl/transforms/draft2/wdlom2wom/WdlDraft2WomCallNodeMaker.scala @@ -124,6 +124,8 @@ object WdlDraft2WomCallNodeMaker extends WomCallNodeMaker[WdlCall] { case optional@OptionalInputDefinition(n, womType, _, _) => val identifier = wdlCall.womIdentifier.combine(n) withGraphInputNode(optional, OptionalGraphInputNode(identifier, womType, identifier.fullyQualifiedName.value)) + + case oh => throw new Exception(s"Programmer Error! Unexpected case match: $oh") } } diff --git a/wdl/transforms/draft2/src/main/scala/wdl/transforms/draft2/wdlom2wom/WdlDraft2WomCallableMaker.scala b/wdl/transforms/draft2/src/main/scala/wdl/transforms/draft2/wdlom2wom/WdlDraft2WomCallableMaker.scala index bdfd7bc50b5..c05f83897f3 100644 --- a/wdl/transforms/draft2/src/main/scala/wdl/transforms/draft2/wdlom2wom/WdlDraft2WomCallableMaker.scala +++ b/wdl/transforms/draft2/src/main/scala/wdl/transforms/draft2/wdlom2wom/WdlDraft2WomCallableMaker.scala @@ -2,11 +2,10 @@ package wdl.transforms.draft2.wdlom2wom import common.validation.ErrorOr.ErrorOr import wdl.draft2.model.{WdlCallable, WdlTask, WdlWorkflow} -import wdl.draft2.model.{WdlTask, WdlWorkflow} -import wom.transforms.WomCallableMaker import wom.callable.Callable -import wom.transforms.WomWorkflowDefinitionMaker.ops._ +import wom.transforms.WomCallableMaker import wom.transforms.WomCommandTaskDefinitionMaker.ops._ +import wom.transforms.WomWorkflowDefinitionMaker.ops._ object WdlDraft2WomCallableMaker extends WomCallableMaker[WdlCallable] { override def toWomCallable(callable: WdlCallable): ErrorOr[Callable] = callable match { diff --git a/wdl/transforms/draft2/src/main/scala/wdl/transforms/draft2/wdlom2wom/WdlDraft2WomConditionalNodeMaker.scala b/wdl/transforms/draft2/src/main/scala/wdl/transforms/draft2/wdlom2wom/WdlDraft2WomConditionalNodeMaker.scala index 1f220bd367e..f4303e8ebe5 100644 --- a/wdl/transforms/draft2/src/main/scala/wdl/transforms/draft2/wdlom2wom/WdlDraft2WomConditionalNodeMaker.scala +++ b/wdl/transforms/draft2/src/main/scala/wdl/transforms/draft2/wdlom2wom/WdlDraft2WomConditionalNodeMaker.scala @@ -5,7 +5,6 @@ import cats.syntax.apply._ import cats.syntax.validated._ import common.validation.ErrorOr._ import wdl.draft2.model.{If, Scope, WdlWomExpression} -import wdl.draft2.model.{Scope, WdlWomExpression} import wom.graph.ConditionalNode.ConditionalNodeWithNewNodes import wom.graph.GraphNodePort.OutputPort import wom.graph._ diff --git a/wdl/transforms/draft2/src/test/scala/wdl/transforms/wdlwom/WdlScatterWomSpec.scala b/wdl/transforms/draft2/src/test/scala/wdl/transforms/wdlwom/WdlScatterWomSpec.scala index 485f3666bbc..1a2c586e7fd 100644 --- a/wdl/transforms/draft2/src/test/scala/wdl/transforms/wdlwom/WdlScatterWomSpec.scala +++ b/wdl/transforms/draft2/src/test/scala/wdl/transforms/wdlwom/WdlScatterWomSpec.scala @@ -55,9 +55,9 @@ class WdlScatterWomSpec extends AnyFlatSpec with CromwellTimeoutSpec with Matche val scatterExpressionNode = workflowGraph.nodes.collectFirst { case expr: ExpressionNode if expr.localName == "x" => expr }.getOrElse(fail("Resulting graph did not contain the 'x' ExpressionNode")) - + scatterNode.inputPorts.map(_.upstream) shouldBe Set(scatterExpressionNode.singleOutputPort) - + val foo_out_output = workflowGraph.nodes.collectFirst { case gon: GraphOutputNode if gon.localName == "foo.out" => gon }.getOrElse(fail("Resulting graph did not contain the 'foo.out' GraphOutputNode")) @@ -77,7 +77,7 @@ class WdlScatterWomSpec extends AnyFlatSpec with CromwellTimeoutSpec with Matche val foo_callNode = validatedOuterGraph.scatterNode.innerGraph.nodes.collectFirst { case c: CommandCallNode if c.localName == "foo" => c }.getOrElse(fail("Scatter inner graph did not contain a call to 'foo'")) - + foo_callNode.identifier.fullyQualifiedName.value shouldBe "scatter_test.foo" val foo_out_innerOutput = validatedOuterGraph.scatterNode.innerGraph.nodes.collectFirst { @@ -188,7 +188,7 @@ class WdlScatterWomSpec extends AnyFlatSpec with CromwellTimeoutSpec with Matche def validateGraph(workflowGraph: Graph) = { // Find the inputs: - val inputNodes: Set[ExternalGraphInputNode] = workflowGraph.nodes.filterByType[RequiredGraphInputNode] + val inputNodes = workflowGraph.nodes.filterByType[RequiredGraphInputNode] inputNodes.map {_.localName} should be(Set("foo.j")) inputNodes.map {_.identifier.fullyQualifiedName.value} should be(Set("scatter_test.foo.j")) diff --git a/wdl/transforms/draft3/src/main/scala/wdl/draft3/transforms/ast2wdlom/Draft3GenericAstNode.scala b/wdl/transforms/draft3/src/main/scala/wdl/draft3/transforms/ast2wdlom/Draft3GenericAstNode.scala index 647d8c9ae00..7b0405b473a 100644 --- a/wdl/transforms/draft3/src/main/scala/wdl/draft3/transforms/ast2wdlom/Draft3GenericAstNode.scala +++ b/wdl/transforms/draft3/src/main/scala/wdl/draft3/transforms/ast2wdlom/Draft3GenericAstNode.scala @@ -2,7 +2,7 @@ package wdl.draft3.transforms.ast2wdlom import wdl.draft3.parser.WdlParser.{Ast, AstList, AstNode, Terminal} import wdl.transforms.base.ast2wdlom.{GenericAst, GenericAstList, GenericAstNode, GenericTerminal} -import scala.collection.JavaConverters._ +import scala.jdk.CollectionConverters._ case class Draft3GenericAst(ast: Ast) extends GenericAst { override def getAttribute(attr: String): GenericAstNode = Option(ast.getAttribute(attr)).map(Draft3GenericAstNode.apply).orNull diff --git a/wdl/transforms/draft3/src/main/scala/wdl/draft3/transforms/ast2wdlom/ast2wdlom.scala b/wdl/transforms/draft3/src/main/scala/wdl/draft3/transforms/ast2wdlom/ast2wdlom.scala index 47be01c2db4..7765086a848 100644 --- a/wdl/transforms/draft3/src/main/scala/wdl/draft3/transforms/ast2wdlom/ast2wdlom.scala +++ b/wdl/transforms/draft3/src/main/scala/wdl/draft3/transforms/ast2wdlom/ast2wdlom.scala @@ -16,7 +16,7 @@ package object ast2wdlom { val wrapAst: CheckedAtoB[Ast, GenericAst] = CheckedAtoB.fromCheck { a => Draft3GenericAst(a).validNelCheck } val wrapAstNode: CheckedAtoB[AstNode, GenericAstNode] = CheckedAtoB.fromCheck { a => Draft3GenericAstNode(a).validNelCheck } - implicit val astNodeToStaticString: CheckedAtoB[GenericAstNode, StaticString] = AstNodeToStaticString.astNodeToStaticStringElement + implicit val astNodeToStaticString: CheckedAtoB[GenericAstNode, StaticString] = AstNodeToStaticString.astNodeToStaticStringElement() // meta sections implicit val astNodeToMetaKvPair: CheckedAtoB[GenericAstNode, MetaKvPair] = AstNodeToMetaKvPair.astNodeToMetaKvPair diff --git a/wdl/transforms/draft3/src/main/scala/wdl/draft3/transforms/parsing/Draft3Parser.scala b/wdl/transforms/draft3/src/main/scala/wdl/draft3/transforms/parsing/Draft3Parser.scala index b3ff3bd43b1..6f732eda085 100644 --- a/wdl/transforms/draft3/src/main/scala/wdl/draft3/transforms/parsing/Draft3Parser.scala +++ b/wdl/transforms/draft3/src/main/scala/wdl/draft3/transforms/parsing/Draft3Parser.scala @@ -3,7 +3,7 @@ package wdl.draft3.transforms.parsing import better.files.File import common.Checked -import scala.collection.JavaConverters._ +import scala.jdk.CollectionConverters._ import common.validation.Validation.TryValidation import wdl.draft3.parser.WdlParser import wdl.draft3.parser.WdlParser.Ast diff --git a/wdl/transforms/draft3/src/main/scala/wdl/draft3/transforms/parsing/WdlDraft3SyntaxErrorFormatter.scala b/wdl/transforms/draft3/src/main/scala/wdl/draft3/transforms/parsing/WdlDraft3SyntaxErrorFormatter.scala index 1cd344a50ff..acba6e0fb6f 100644 --- a/wdl/transforms/draft3/src/main/scala/wdl/draft3/transforms/parsing/WdlDraft3SyntaxErrorFormatter.scala +++ b/wdl/transforms/draft3/src/main/scala/wdl/draft3/transforms/parsing/WdlDraft3SyntaxErrorFormatter.scala @@ -1,6 +1,6 @@ package wdl.draft3.transforms.parsing -import scala.collection.JavaConverters._ +import scala.jdk.CollectionConverters._ import wdl.draft3.parser.WdlParser._ import wom.core.WorkflowSource diff --git a/wdl/transforms/draft3/src/test/scala/wdl/draft3/transforms/ast2wdlom/Ast2WdlomSpec.scala b/wdl/transforms/draft3/src/test/scala/wdl/draft3/transforms/ast2wdlom/Ast2WdlomSpec.scala index 4cfa6002aa1..035d4e6efa6 100644 --- a/wdl/transforms/draft3/src/test/scala/wdl/draft3/transforms/ast2wdlom/Ast2WdlomSpec.scala +++ b/wdl/transforms/draft3/src/test/scala/wdl/draft3/transforms/ast2wdlom/Ast2WdlomSpec.scala @@ -16,7 +16,7 @@ import wdl.model.draft3.elements.ExpressionElement.IdentifierLookup import wdl.model.draft3.elements._ import wdl.transforms.base.ast2wdlom.GenericAstNode -import scala.collection.JavaConverters._ +import scala.jdk.CollectionConverters._ class Ast2WdlomSpec extends AnyFlatSpec with CromwellTimeoutSpec with Matchers { diff --git a/wdl/transforms/draft3/src/test/scala/wdl/draft3/transforms/expression/values/Draft3SizeFunctionSpec.scala b/wdl/transforms/draft3/src/test/scala/wdl/draft3/transforms/expression/values/Draft3SizeFunctionSpec.scala index 030debf9a74..41c767d3a80 100644 --- a/wdl/transforms/draft3/src/test/scala/wdl/draft3/transforms/expression/values/Draft3SizeFunctionSpec.scala +++ b/wdl/transforms/draft3/src/test/scala/wdl/draft3/transforms/expression/values/Draft3SizeFunctionSpec.scala @@ -24,25 +24,25 @@ class Draft3SizeFunctionSpec extends AnyFlatSpec with CromwellTimeoutSpec with M behavior of "ReadLikeFunctions.size" it should "correctly report a 2048 byte file, in bytes by default" in { - validate(Size(PrimitiveLiteralExpressionElement(WomSingleFile("blah")), None).evaluateValue(Map.empty, testFunctions(Success(2048l)), None)) { + validate(Size(PrimitiveLiteralExpressionElement(WomSingleFile("blah")), None).evaluateValue(Map.empty, testFunctions(Success(2048L)), None)) { res => assert(res.value == WomFloat(2048d)) } } it should "correctly report a 2048 byte file, in bytes" in { - validate(Size(PrimitiveLiteralExpressionElement(WomSingleFile("blah")), Some(PrimitiveLiteralExpressionElement(WomString("B")))).evaluateValue(Map.empty, testFunctions(Success(2048l)), None)) { + validate(Size(PrimitiveLiteralExpressionElement(WomSingleFile("blah")), Some(PrimitiveLiteralExpressionElement(WomString("B")))).evaluateValue(Map.empty, testFunctions(Success(2048L)), None)) { res => assert(res.value == WomFloat(2048d)) } } it should "correctly report a 2048 byte file, in KB" in { - validate(Size(PrimitiveLiteralExpressionElement(WomSingleFile("blah")), Some(PrimitiveLiteralExpressionElement(WomString("KB")))).evaluateValue(Map.empty, testFunctions(Success(2048l)), None)) { + validate(Size(PrimitiveLiteralExpressionElement(WomSingleFile("blah")), Some(PrimitiveLiteralExpressionElement(WomString("KB")))).evaluateValue(Map.empty, testFunctions(Success(2048L)), None)) { res => assert(res.value == WomFloat(2.0d)) } } it should "correctly report a 2048 byte file, in KiB" in { - validate(Size(PrimitiveLiteralExpressionElement(WomSingleFile("blah")), Some(PrimitiveLiteralExpressionElement(WomString("KiB")))).evaluateValue(Map.empty, testFunctions(Success(2048l)), None)) { + validate(Size(PrimitiveLiteralExpressionElement(WomSingleFile("blah")), Some(PrimitiveLiteralExpressionElement(WomString("KiB")))).evaluateValue(Map.empty, testFunctions(Success(2048L)), None)) { res => assert(res.value == WomFloat(2d)) } } @@ -50,7 +50,7 @@ class Draft3SizeFunctionSpec extends AnyFlatSpec with CromwellTimeoutSpec with M it should "correctly report the size of a supplied, optional, 2048 byte file" in { val value = WomOptionalValue(WomSingleFileType, Option(WomSingleFile("blah"))) - validate(Size(IdentifierLookup("x"), None).evaluateValue(Map("x" -> value), testFunctions(Success(2048l)), None)) { + validate(Size(IdentifierLookup("x"), None).evaluateValue(Map("x" -> value), testFunctions(Success(2048L)), None)) { res => assert(res.value == WomFloat(2048d)) } } @@ -58,7 +58,7 @@ class Draft3SizeFunctionSpec extends AnyFlatSpec with CromwellTimeoutSpec with M it should "correctly report the size of a supplied, optional optional, 2048 byte file" in { val value = WomOptionalValue(WomOptionalType(WomSingleFileType), Option(WomOptionalValue(WomSingleFileType, Option(WomSingleFile("blah"))))) - validate(Size(IdentifierLookup("x"), None).evaluateValue(Map("x" -> value), testFunctions(Success(2048l)), None)) { + validate(Size(IdentifierLookup("x"), None).evaluateValue(Map("x" -> value), testFunctions(Success(2048L)), None)) { res => assert(res.value == WomFloat(2048d)) } } @@ -66,7 +66,7 @@ class Draft3SizeFunctionSpec extends AnyFlatSpec with CromwellTimeoutSpec with M it should "correctly report the size of a supplied, optional, 2048 byte file, in MB" in { val value = WomOptionalValue(WomSingleFileType, Option(WomSingleFile("blah"))) - validate(Size(IdentifierLookup("x"), Some(PrimitiveLiteralExpressionElement(WomString("MB")))).evaluateValue(Map("x" -> value), testFunctions(Success(2048l)), None)) { + validate(Size(IdentifierLookup("x"), Some(PrimitiveLiteralExpressionElement(WomString("MB")))).evaluateValue(Map("x" -> value), testFunctions(Success(2048L)), None)) { res => assert(res.value == WomFloat(0.001953125d)) } } @@ -98,7 +98,7 @@ class Draft3SizeFunctionSpec extends AnyFlatSpec with CromwellTimeoutSpec with M it should "correctly report the size of an array of files, in GiB" in { val value = WomArray(Seq(WomSingleFile("blah"), WomSingleFile("blah"))) - validate(Size(IdentifierLookup("x"), Some(PrimitiveLiteralExpressionElement(WomString("GiB")))).evaluateValue(Map("x" -> value), testFunctions(Success(2048l)), None)) { + validate(Size(IdentifierLookup("x"), Some(PrimitiveLiteralExpressionElement(WomString("GiB")))).evaluateValue(Map("x" -> value), testFunctions(Success(2048L)), None)) { res => assert(res.value == WomFloat(2048d * 2 / 1024 / 1024 / 1024)) } } @@ -114,7 +114,7 @@ class Draft3SizeFunctionSpec extends AnyFlatSpec with CromwellTimeoutSpec with M it should "correctly report the size of a mixed Array[File?] - some supplied and some not" in { val value = WomArray(Seq(WomOptionalValue(WomSingleFileType, Some(WomSingleFile("blah"))), WomOptionalValue(WomSingleFileType, None), WomOptionalValue(WomSingleFileType, Some(WomSingleFile("blah"))), WomOptionalValue(WomSingleFileType, None))) - validate(Size(IdentifierLookup("x"), None).evaluateValue(Map("x" -> value), testFunctions(Success(2048l)), None)) { + validate(Size(IdentifierLookup("x"), None).evaluateValue(Map("x" -> value), testFunctions(Success(2048L)), None)) { res => assert(res.value == WomFloat(2048d * 2)) } } diff --git a/wdl/transforms/new-base/src/main/scala/wdl/transforms/base/linking/expression/values/EngineFunctionEvaluators.scala b/wdl/transforms/new-base/src/main/scala/wdl/transforms/base/linking/expression/values/EngineFunctionEvaluators.scala index 551dde8086f..f40c8a9431a 100644 --- a/wdl/transforms/new-base/src/main/scala/wdl/transforms/base/linking/expression/values/EngineFunctionEvaluators.scala +++ b/wdl/transforms/new-base/src/main/scala/wdl/transforms/base/linking/expression/values/EngineFunctionEvaluators.scala @@ -140,7 +140,7 @@ object EngineFunctionEvaluators { val tryResult = for { read <- readFile(fileToRead, ioFunctionSet, fileSizeLimitationConfig.readObjectLimit) objects <- WomObject.fromTsv(read) - } yield WomArray(objects) + } yield WomArray(objects.toIndexedSeq) tryResult.map(EvaluatedValue(_, Seq.empty)).toErrorOr.contextualizeErrors(s"""read_objects("${fileToRead.value}")""") } @@ -568,7 +568,7 @@ object EngineFunctionEvaluators { case f if f.isInstanceOf[WomSingleFile] || WomSingleFileType.isCoerceableFrom(f.womType) => f.coerceToType[WomSingleFile] flatMap { file => Try(Await.result(ioFunctionSet.size(file.valueString), Duration.Inf)).toErrorOr } case WomOptionalValue(f, Some(o)) if isOptionalOfFileType(f) => optionalSafeFileSize(o) - case WomOptionalValue(f, None) if isOptionalOfFileType(f) => 0l.validNel + case WomOptionalValue(f, None) if isOptionalOfFileType(f) => 0L.validNel case WomArray(WomArrayType(womType), values) if isOptionalOfFileType(womType) => values.toList.traverse(optionalSafeFileSize).map(_.sum) case _ => s"The 'size' method expects a 'File', 'File?', 'Array[File]' or Array[File?] argument but instead got ${value.womType.stableName}.".invalidNel } diff --git a/wdl/transforms/new-base/src/main/scala/wdl/transforms/base/wdlom2wdl/WdlWriterImpl.scala b/wdl/transforms/new-base/src/main/scala/wdl/transforms/base/wdlom2wdl/WdlWriterImpl.scala index 861eb234d0d..0597b9c830e 100644 --- a/wdl/transforms/new-base/src/main/scala/wdl/transforms/base/wdlom2wdl/WdlWriterImpl.scala +++ b/wdl/transforms/new-base/src/main/scala/wdl/transforms/base/wdlom2wdl/WdlWriterImpl.scala @@ -7,7 +7,7 @@ import wom.callable.MetaValueElement import wom.callable.MetaValueElement._ import wom.types._ import WdlWriter._ -import common.collections.EnhancedCollections.EnhancedTraversableLike +import common.collections.EnhancedCollections._ import org.apache.commons.text.StringEscapeUtils object WdlWriterImpl { diff --git a/wdl/transforms/new-base/src/main/scala/wdl/transforms/base/wdlom2wom/graph/CallElementToGraphNode.scala b/wdl/transforms/new-base/src/main/scala/wdl/transforms/base/wdlom2wom/graph/CallElementToGraphNode.scala index 0f821cf4794..322fc5ceba6 100644 --- a/wdl/transforms/new-base/src/main/scala/wdl/transforms/base/wdlom2wom/graph/CallElementToGraphNode.scala +++ b/wdl/transforms/new-base/src/main/scala/wdl/transforms/base/wdlom2wom/graph/CallElementToGraphNode.scala @@ -184,6 +184,7 @@ object CallElementToGraphNode { // Leave it unsupplied: InputDefinitionFold() } + case oh => throw new Exception(s"Programmer Error! Unexpected case match: $oh") } } diff --git a/wes2cromwell/src/main/scala/wes2cromwell/WesRunRoutes.scala b/wes2cromwell/src/main/scala/wes2cromwell/WesRunRoutes.scala index ac60d227141..d5736857334 100644 --- a/wes2cromwell/src/main/scala/wes2cromwell/WesRunRoutes.scala +++ b/wes2cromwell/src/main/scala/wes2cromwell/WesRunRoutes.scala @@ -100,7 +100,7 @@ object WesRunRoutes { case _ => None } - def completeCromwellResponse(future: ⇒ Future[WesResponse]): Route = { + def completeCromwellResponse(future: => Future[WesResponse]): Route = { onComplete(future) { case Success(a) => complete(a) case Failure(e) => complete(WesErrorResponse(e.getMessage, StatusCodes.InternalServerError.intValue)) diff --git a/wom/src/main/scala/wom/types/WomArrayType.scala b/wom/src/main/scala/wom/types/WomArrayType.scala index 89ecdfa2eb3..86343179ddd 100644 --- a/wom/src/main/scala/wom/types/WomArrayType.scala +++ b/wom/src/main/scala/wom/types/WomArrayType.scala @@ -5,7 +5,7 @@ import spray.json.JsArray import wom.values.WomArray.WomArrayLike import wom.values.{WomArray, WomSingleFile, WomString, WomValue} -import scala.collection.JavaConverters._ +import scala.jdk.CollectionConverters._ import scala.util.{Failure, Success, Try} sealed trait WomArrayType extends WomType { @@ -24,7 +24,7 @@ sealed trait WomArrayType extends WomType { override protected def coercion: PartialFunction[Any, WomValue] = { case s: Seq[Any] if allowEmpty || s.nonEmpty => coerceIterable(s) case js: JsArray if allowEmpty || js.elements.nonEmpty => coerceIterable(js.elements) - case javaList: java.util.List[_] if allowEmpty || !javaList.isEmpty => coerceIterable(javaList.asScala) + case javaList: java.util.List[_] if allowEmpty || !javaList.isEmpty => coerceIterable(javaList.asScala.toList) case WomArray(WomMaybeEmptyArrayType.EmptyArrayType, _) => WomArray(this, Seq.empty) case womArray: WomArray if (allowEmpty || womArray.nonEmpty) diff --git a/wom/src/main/scala/wom/types/WomFloatType.scala b/wom/src/main/scala/wom/types/WomFloatType.scala index 0e9d2bd5cc8..b7d2d6991b8 100644 --- a/wom/src/main/scala/wom/types/WomFloatType.scala +++ b/wom/src/main/scala/wom/types/WomFloatType.scala @@ -12,7 +12,7 @@ case object WomFloatType extends WomPrimitiveType { case f: Float => WomFloat(f.toDouble) case d: Double => WomFloat(d) case i: Integer => WomFloat(i.toDouble) - case n: JsNumber => WomFloat(n.value.doubleValue()) + case n: JsNumber => WomFloat(n.value.doubleValue) case f: WomFloat => f case i: WomInteger => WomFloat(i.value.toDouble) case l: WomLong => WomFloat(l.value.toDouble) diff --git a/wom/src/main/scala/wom/types/WomIntegerType.scala b/wom/src/main/scala/wom/types/WomIntegerType.scala index f8ccc4b5b5d..a177dd5d924 100644 --- a/wom/src/main/scala/wom/types/WomIntegerType.scala +++ b/wom/src/main/scala/wom/types/WomIntegerType.scala @@ -11,7 +11,7 @@ case object WomIntegerType extends WomPrimitiveType { override protected def coercion = { case i: Integer => WomInteger(i) - case n: JsNumber if n.value.isValidInt => WomInteger(n.value.intValue()) + case n: JsNumber if n.value.isValidInt => WomInteger(n.value.intValue) case i: WomInteger => i case WomLong(i) if i.inIntRange => WomInteger(i.toInt) case WomLong(i) => throw new RuntimeException( @@ -22,7 +22,7 @@ case object WomIntegerType extends WomPrimitiveType { case s: String => val bigTry = Try(BigDecimal(s)) if (bigTry.isSuccess) - WomInteger(bigTry.get.intValue()) + WomInteger(bigTry.get.intValue) else WomInteger(s.toInt) case s: JsString => WomInteger(s.value.toInt) diff --git a/wom/src/main/scala/wom/types/WomLongType.scala b/wom/src/main/scala/wom/types/WomLongType.scala index 79afbe905aa..48891be6ce4 100644 --- a/wom/src/main/scala/wom/types/WomLongType.scala +++ b/wom/src/main/scala/wom/types/WomLongType.scala @@ -9,7 +9,7 @@ case object WomLongType extends WomPrimitiveType { override protected def coercion = { case i: Long => WomLong(i) case i: Integer => WomLong(i.toLong) - case n: JsNumber if n.value.isValidLong => WomLong(n.value.longValue()) + case n: JsNumber if n.value.isValidLong => WomLong(n.value.longValue) case WomInteger(i) => WomLong(i.toLong) case i: WomLong => i case s: WomString => WomLong(s.value.toLong) diff --git a/wom/src/main/scala/wom/util/YamlUtils.scala b/wom/src/main/scala/wom/util/YamlUtils.scala index 31d0bd77af4..b4e0f063c9b 100644 --- a/wom/src/main/scala/wom/util/YamlUtils.scala +++ b/wom/src/main/scala/wom/util/YamlUtils.scala @@ -19,7 +19,7 @@ import org.yaml.snakeyaml.parser.ParserImpl import org.yaml.snakeyaml.reader.StreamReader import org.yaml.snakeyaml.resolver.Resolver -import scala.collection.JavaConverters._ +import scala.jdk.CollectionConverters._ object YamlUtils { diff --git a/wom/src/main/scala/wom/values/WomArray.scala b/wom/src/main/scala/wom/values/WomArray.scala index a11666ed029..0fdcde242be 100644 --- a/wom/src/main/scala/wom/values/WomArray.scala +++ b/wom/src/main/scala/wom/values/WomArray.scala @@ -11,7 +11,7 @@ import wom.expression.IoFunctionSet import wom.types._ import wom.values.WomArray.WomArrayLike -import scala.language.{higherKinds, postfixOps} +import scala.language.postfixOps import scala.util.{Failure, Success, Try} object WomArray { diff --git a/wom/src/main/scala/wom/values/WomFloat.scala b/wom/src/main/scala/wom/values/WomFloat.scala index c922405afa4..08ce5ed912d 100644 --- a/wom/src/main/scala/wom/values/WomFloat.scala +++ b/wom/src/main/scala/wom/values/WomFloat.scala @@ -11,7 +11,7 @@ case class WomFloat(value: Double) extends WomPrimitive { rhs match { case r:WomFloat => Success(WomFloat(value + r.value)) case r:WomInteger => Success(WomFloat(value + r.value)) - case r:WomString => Success(WomString(value + r.value)) + case r:WomString => Success(WomString(s"$value${r.value}")) case r: WomOptionalValue => evaluateIfDefined("+", r, add) case _ => invalid(s"$this + $rhs") } diff --git a/wom/src/main/scala/wom/values/WomInteger.scala b/wom/src/main/scala/wom/values/WomInteger.scala index 7e0918f7455..1bcc2e117ac 100644 --- a/wom/src/main/scala/wom/values/WomInteger.scala +++ b/wom/src/main/scala/wom/values/WomInteger.scala @@ -10,7 +10,7 @@ case class WomInteger(value: Int) extends WomPrimitive { override def add(rhs: WomValue): Try[WomValue] = rhs match { case r:WomInteger => Success(WomInteger(value + r.value)) - case r:WomString => Success(WomString(value + r.value)) + case r:WomString => Success(WomString(s"$value${r.value}")) case r:WomFloat => Success(WomFloat(value + r.value)) case r: WomOptionalValue => evaluateIfDefined("+", r, add) case _ => invalid(s"$value + $rhs") diff --git a/wom/src/main/scala/wom/values/WomMap.scala b/wom/src/main/scala/wom/values/WomMap.scala index 4c188f7ef3b..6c0ada18849 100644 --- a/wom/src/main/scala/wom/values/WomMap.scala +++ b/wom/src/main/scala/wom/values/WomMap.scala @@ -12,7 +12,6 @@ import cats.syntax.traverse._ import common.validation.IOChecked.IOChecked import wom.expression.IoFunctionSet -import scala.language.higherKinds import scala.util.{Failure, Success, Try} object WomMap { diff --git a/wom/src/main/scala/wom/values/WomObject.scala b/wom/src/main/scala/wom/values/WomObject.scala index 994238d5442..5c4cc1b53e2 100644 --- a/wom/src/main/scala/wom/values/WomObject.scala +++ b/wom/src/main/scala/wom/values/WomObject.scala @@ -12,7 +12,6 @@ import wom.expression.IoFunctionSet import wom.types._ import wom.util.FileUtil -import scala.language.higherKinds import scala.util.{Failure, Success, Try} trait WomObjectLike extends WomValue { @@ -93,9 +92,9 @@ object WomObject { case _ => Failure(new UnsupportedOperationException("Could not serialize array: Objects in the array have different attributes.")) } } - + def apply(values: Map[String, WomValue]) = new WomObject(values, WomObjectType) - + def withTypeUnsafe(values: Map[String, Any], objectTypeLike: WomObjectTypeLike): WomObject = { import common.validation.Validation._ withTypeErrorOr(values, objectTypeLike).toTry.get @@ -115,7 +114,7 @@ case class WomObject private[WomObject] (values: Map[String, WomValue], womType: lazy val orderedAttributes = values.keySet.toSeq lazy val orderedValues = orderedAttributes map { values(_) } lazy val womObjectTypeLike = womType - + def tsvSerialize: Try[String] = Try { val keysLine = orderedAttributes.mkString(start = "", sep = "\t", end = "\n") val values = orderedValues map { diff --git a/wom/src/main/scala/wom/values/WomOptionalValue.scala b/wom/src/main/scala/wom/values/WomOptionalValue.scala index ec5f5cf7ffa..d130dda13fa 100644 --- a/wom/src/main/scala/wom/values/WomOptionalValue.scala +++ b/wom/src/main/scala/wom/values/WomOptionalValue.scala @@ -7,7 +7,6 @@ import wom.expression.IoFunctionSet import wom.types.{WomOptionalType, WomType} import scala.annotation.tailrec -import scala.language.higherKinds import scala.util.{Success, Try} final case class WomOptionalValue(innerType: WomType, value: Option[WomValue]) extends WomValue { @@ -23,7 +22,7 @@ final case class WomOptionalValue(innerType: WomType, value: Option[WomValue]) e case Some(lhs) => lhs.subtract(rhs) case None => emptyValueFailure("-") } - + override def multiply(rhs: WomValue): Try[WomValue] = value match { case Some(lhs) => lhs.multiply(rhs) case None => emptyValueFailure("*") @@ -33,7 +32,7 @@ final case class WomOptionalValue(innerType: WomType, value: Option[WomValue]) e case Some(lhs) => lhs.divide(rhs) case None => emptyValueFailure("/") } - + override def mod(rhs: WomValue): Try[WomValue] = value match { case Some(lhs) => lhs.mod(rhs) case None => emptyValueFailure("%") @@ -135,7 +134,7 @@ final case class WomOptionalValue(innerType: WomType, value: Option[WomValue]) e case Some(v) => v.valueString case None => "" } - + def traverse[A <: WomValue, G[_]](f: WomValue => G[A])(implicit applicative: Applicative[G]) = value map { v => applicative.map(f(v)) { WomOptionalValue(_) diff --git a/wom/src/main/scala/wom/values/WomValue.scala b/wom/src/main/scala/wom/values/WomValue.scala index 5c9aa7182eb..45aa88abe12 100644 --- a/wom/src/main/scala/wom/values/WomValue.scala +++ b/wom/src/main/scala/wom/values/WomValue.scala @@ -60,7 +60,7 @@ trait WomValue { private def symbolHash[K](hashedMap: Map[K, SymbolHash])(implicit ord: Ordering[K]): SymbolHash = { // productIterator returns an Iterator over the elements of a Tuple2 Map entry. - val concatenatedMap = TreeMap(hashedMap.toArray: _*) flatMap { _.productIterator } mkString "" + val concatenatedMap = TreeMap(hashedMap.toIndexedSeq: _*) flatMap { _.productIterator } mkString "" symbolHash(concatenatedMap) } @@ -79,8 +79,8 @@ trait WomValue { /** * Perform any potentially async initialization on this wom value before it can be used to evaluate an expression * or instantiate a command for instance. - * - * TODO: It would be better if the return type was the concrete one instead of a generic WomValue, but this + * + * TODO: It would be better if the return type was the concrete one instead of a generic WomValue, but this * seems hard to do without WomValue being parameterized. */ def initialize(ioFunctionSet: IoFunctionSet): IOChecked[WomValue] = IOChecked.pure(this) diff --git a/wom/src/test/scala/wom/types/WomArrayTypeSpec.scala b/wom/src/test/scala/wom/types/WomArrayTypeSpec.scala index 2367bde84ca..c11e2997e2d 100644 --- a/wom/src/test/scala/wom/types/WomArrayTypeSpec.scala +++ b/wom/src/test/scala/wom/types/WomArrayTypeSpec.scala @@ -6,7 +6,7 @@ import org.scalatest.matchers.should.Matchers import spray.json.{JsArray, JsNumber} import wom.values._ -import scala.collection.JavaConverters._ +import scala.jdk.CollectionConverters._ import scala.util.{Failure, Success} diff --git a/wom/src/test/scala/wom/values/WomObjectSpec.scala b/wom/src/test/scala/wom/values/WomObjectSpec.scala index 74890997576..bed14c9ae46 100644 --- a/wom/src/test/scala/wom/values/WomObjectSpec.scala +++ b/wom/src/test/scala/wom/values/WomObjectSpec.scala @@ -25,7 +25,7 @@ class WomObjectSpec extends AnyFlatSpec with CromwellTimeoutSpec with Matchers w // Test both a version of the TSV with and without a trailing newline. correctTSV.withTrimmed foreach { tsv => val parsed = WomObject.fromTsv(tsv) - parsed should be a 'success + parsed should be a Symbol("success") val array: Array[WomObject] = parsed.success.value array should have size 1 @@ -47,13 +47,13 @@ class WomObjectSpec extends AnyFlatSpec with CromwellTimeoutSpec with Matchers w for { tsv <- List(emptyTSV, oneRowTSV) t <- tsv.withTrimmed - _ = WomObject.fromTsv(t) should be a 'failure + _ = WomObject.fromTsv(t) should be a Symbol("failure") } yield () } it should "NOT read from a non homogeneous TSV file" in { nonHomogeneousTSV.withTrimmed foreach { - WomObject.fromTsv(_) should be a 'failure + WomObject.fromTsv(_) should be a Symbol("failure") } } @@ -61,7 +61,7 @@ class WomObjectSpec extends AnyFlatSpec with CromwellTimeoutSpec with Matchers w correctTSV.withTrimmed foreach { tsv => val obj = WomObject.fromTsv(tsv).get.head val serialized = obj.tsvSerialize - serialized should be a 'success + serialized should be a Symbol("success") serialized.success.value shouldEqual correctTSV } } @@ -69,7 +69,7 @@ class WomObjectSpec extends AnyFlatSpec with CromwellTimeoutSpec with Matchers w it should "read a WomArray[WomObject] from a correct TSV file" in { List(arrayTSV, arrayTSV.trim) foreach { tsv => val parsed = WomObject.fromTsv(tsv) - parsed should be a 'success + parsed should be a Symbol("success") val array: Array[WomObject] = parsed.success.value array should have size 2 @@ -96,9 +96,9 @@ class WomObjectSpec extends AnyFlatSpec with CromwellTimeoutSpec with Matchers w it should "serialize a WomArray[WomObject] to TSV" in { List(arrayTSV, arrayTSV.trim) foreach { tsv => - val array = WomArray(WomArrayType(WomObjectType), WomObject.fromTsv(tsv).get) + val array = WomArray(WomArrayType(WomObjectType), WomObject.fromTsv(tsv).get.toIndexedSeq) val serialized = array.tsvSerialize - serialized should be a 'success + serialized should be a Symbol("success") serialized.success.value shouldEqual arrayTSV } } diff --git a/womtool/src/main/scala/womtool/WomtoolMain.scala b/womtool/src/main/scala/womtool/WomtoolMain.scala index 3a484fbd24b..e1fba99d978 100644 --- a/womtool/src/main/scala/womtool/WomtoolMain.scala +++ b/womtool/src/main/scala/womtool/WomtoolMain.scala @@ -163,7 +163,7 @@ object WomtoolMain extends App with StrictLogging { } } - val termination = runWomtool(args) + val termination = runWomtool(args.toIndexedSeq) termination.stdout foreach Console.out.println termination.stderr foreach Console.err.println diff --git a/womtool/src/main/scala/womtool/cmdline/WomtoolCommandLineParser.scala b/womtool/src/main/scala/womtool/cmdline/WomtoolCommandLineParser.scala index b066e44e093..9bdc9ef11a1 100644 --- a/womtool/src/main/scala/womtool/cmdline/WomtoolCommandLineParser.scala +++ b/womtool/src/main/scala/womtool/cmdline/WomtoolCommandLineParser.scala @@ -29,17 +29,17 @@ class WomtoolCommandLineParser extends scopt.OptionParser[PartialWomtoolCommandL arg[String]("workflow-source") .text("Path to workflow file.") - .required + .required() .action((s, c) => c.copy(workflowSource = Option(DefaultPathBuilder.get(s)))) opt[String]('i', "inputs") .text("Workflow inputs file.") - .optional + .optional() .action((s, c) => c.copy(workflowInputs = Option(DefaultPathBuilder.get(s)))) opt[String]('h', "highlight-mode") .text("Highlighting mode, one of 'html', 'console' (used only with 'highlight' command)") - .optional + .optional() .action((s, c) => s match { case "html" => c.copy(highlightMode = Option(HtmlHighlighting)) case "console" => c.copy(highlightMode = Option(ConsoleHighlighting)) @@ -48,12 +48,12 @@ class WomtoolCommandLineParser extends scopt.OptionParser[PartialWomtoolCommandL opt[Boolean]('o', name="optional-inputs") .text("If set, optional inputs are also included in the inputs set. Default is 'true' (used only with the inputs command)") - .optional + .optional() .action((b, c) => c.copy(displayOptionalInputs = Some(b))) opt[Unit]('l', name = "list-dependencies") .text("An optional flag to list files referenced in import statements (used only with 'validate' command)") - .optional + .optional() .action((_, c) => c.copy(listDependencies = Option(true))) head("womtool", womtoolVersion) diff --git a/womtool/src/main/scala/womtool/graph/WomGraph.scala b/womtool/src/main/scala/womtool/graph/WomGraph.scala index 459aab8f930..d81f36e5538 100644 --- a/womtool/src/main/scala/womtool/graph/WomGraph.scala +++ b/womtool/src/main/scala/womtool/graph/WomGraph.scala @@ -24,10 +24,10 @@ import wom.executable.WomBundle import wom.expression.NoIoFunctionSet import wom.graph._ import wom.transforms.WomBundleMaker.ops._ -import wom.types.{WomMaybePopulatedFileType, _} +import wom.types._ import womtool.graph.WomGraph._ -import scala.collection.JavaConverters._ +import scala.jdk.CollectionConverters._ class WomGraph(graphName: String, graph: Graph) { @@ -209,7 +209,7 @@ object WomGraph { (for { clt <- CwlDecoder.decodeCwlFile(File(filePath)). value. - unsafeRunSync + unsafeRunSync() inputs = clt.requiredInputs fakedInputs = JsObject(inputs map { i => i._1 -> fakeInput(i._2) }) wom <- clt.womExecutable(AcceptAllRequirements, Option(fakedInputs.prettyPrint), NoIoFunctionSet, strictValidation = false) diff --git a/womtool/src/main/scala/womtool/graph/package.scala b/womtool/src/main/scala/womtool/graph/package.scala index 77e87a20343..e2ae058369c 100644 --- a/womtool/src/main/scala/womtool/graph/package.scala +++ b/womtool/src/main/scala/womtool/graph/package.scala @@ -6,7 +6,7 @@ import wom.graph.expression.{AnonymousExpressionNode, ExpressionNode} package object graph { - private[graph] def dotSafe(s: String) = s""""${s.replaceAllLiterally("\"", "\\\"")}"""" + private[graph] def dotSafe(s: String) = s""""${s.replace("\"", "\\\"")}"""" private[graph] implicit class GraphNodeGraphics(val graphNode: GraphNode) extends AnyVal { def graphFillColor = graphNode match { diff --git a/womtool/src/main/scala/womtool/input/WomGraphMaker.scala b/womtool/src/main/scala/womtool/input/WomGraphMaker.scala index 94b32292864..50749640bcc 100644 --- a/womtool/src/main/scala/womtool/input/WomGraphMaker.scala +++ b/womtool/src/main/scala/womtool/input/WomGraphMaker.scala @@ -17,7 +17,7 @@ import wom.executable.WomBundle import wom.expression.NoIoFunctionSet import wom.graph._ -import scala.collection.JavaConverters._ +import scala.jdk.CollectionConverters._ import scala.util.Try object WomGraphMaker { diff --git a/womtool/src/main/scala/womtool/wom2wdlom/WomToWdlom.scala b/womtool/src/main/scala/womtool/wom2wdlom/WomToWdlom.scala index 410594eb6c2..6d7e499e2cd 100644 --- a/womtool/src/main/scala/womtool/wom2wdlom/WomToWdlom.scala +++ b/womtool/src/main/scala/womtool/wom2wdlom/WomToWdlom.scala @@ -4,7 +4,7 @@ import cats.syntax.traverse._ import cats.instances.list._ import cats.instances.either._ import common.Checked -import common.collections.EnhancedCollections.EnhancedTraversableLike +import common.collections.EnhancedCollections._ import common.transforms.CheckedAtoB import common.validation.Checked._ import shapeless.{Inl, Inr} @@ -221,10 +221,10 @@ object WomToWdlom { // WOM has some explicit representations that are implicit in WDL; they are necessary for execution, // but do not make sense (or are illegal) in WDL source. private def selectWdlomRepresentableNodes(allNodes: Set[GraphNode]): Set[GraphNode] = { - val expressions: Set[GraphNode] = allNodes.filterByType[ExposedExpressionNode] - val scatters: Set[GraphNode] = allNodes.filterByType[ScatterNode] - val calls: Set[GraphNode] = allNodes.filterByType[CallNode] - val conditionals: Set[GraphNode] = allNodes.filterByType[ConditionalNode] + val expressions = allNodes.filterByType[ExposedExpressionNode] + val scatters = allNodes.filterByType[ScatterNode] + val calls = allNodes.filterByType[CallNode] + val conditionals = allNodes.filterByType[ConditionalNode] expressions ++ scatters ++ calls ++ conditionals } diff --git a/womtool/src/test/scala/womtool/WomtoolValidateSpec.scala b/womtool/src/test/scala/womtool/WomtoolValidateSpec.scala index 2bc2f45e101..d0a68a57640 100644 --- a/womtool/src/test/scala/womtool/WomtoolValidateSpec.scala +++ b/womtool/src/test/scala/womtool/WomtoolValidateSpec.scala @@ -9,7 +9,7 @@ import org.scalatest.flatspec.AnyFlatSpec import org.scalatest.matchers.should.Matchers import womtool.WomtoolMain.{SuccessfulTermination, UnsuccessfulTermination} -import scala.collection.JavaConverters._ +import scala.jdk.CollectionConverters._ import scala.collection.immutable From eba73e94bad3b1a135aa33d484111b885e61419e Mon Sep 17 00:00:00 2001 From: Adam Nichols Date: Thu, 14 Apr 2022 13:04:47 -0400 Subject: [PATCH 14/58] Remove references to JIRA from issue template [BW-1208] (#6729) --- .github/issue_template.md | 24 +++++------------------- 1 file changed, 5 insertions(+), 19 deletions(-) diff --git a/.github/issue_template.md b/.github/issue_template.md index d7a758d8bf6..03c1a0839aa 100644 --- a/.github/issue_template.md +++ b/.github/issue_template.md @@ -1,27 +1,13 @@ -### -### IMPORTANT: Please file new issues over in our Jira issue tracker! -### -### https://broadworkbench.atlassian.net/projects/BA/issues -### -### You may need to create an account before you can view/create issues. -### - - - - + From 9b396aac87f1f6872a1821a01754a913a18b245b Mon Sep 17 00:00:00 2001 From: kshakir Date: Thu, 14 Apr 2022 14:29:26 -0400 Subject: [PATCH 15/58] Fix call cache checks in case of cache invalidation w/ test [CROM-6603] (#6725) * Fix call cache checks in case of cache invalidation [CROM-6603] * Add tests for call caching skipping good hits * Update changelog Co-authored-by: Nikita Myazin Co-authored-by: Janet Gainer-Dewar --- CHANGELOG.md | 4 ++ .../invalidate_bad_caches_use_good.wdl | 72 +++++++++++++++++++ .../invalidate_bad_caches_use_good_jes.test | 18 +++++ .../invalidate_bad_caches_use_good_local.test | 19 +++++ .../slick/CallCachingSlickDatabase.scala | 6 +- ...CallCachingAggregationEntryComponent.scala | 12 ++-- .../database/sql/CallCachingSqlDatabase.scala | 2 +- .../execution/callcaching/CallCache.scala | 4 +- .../callcaching/CallCacheReadActor.scala | 6 +- .../CallCacheReadingJobActor.scala | 27 +++---- .../CallCacheReadingJobActorSpec.scala | 62 ++++++++-------- .../CallCachingSlickDatabaseSpec.scala | 2 +- 12 files changed, 175 insertions(+), 59 deletions(-) create mode 100644 centaur/src/main/resources/standardTestCases/invalidate_bad_caches/invalidate_bad_caches_use_good.wdl create mode 100644 centaur/src/main/resources/standardTestCases/invalidate_bad_caches_use_good_jes.test create mode 100644 centaur/src/main/resources/standardTestCases/invalidate_bad_caches_use_good_local.test diff --git a/CHANGELOG.md b/CHANGELOG.md index 0aa3f5fba61..62a3ad92218 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -6,6 +6,10 @@ Cromwell is now built with Scala version 2.13. This change should not be noticeable to users but may be of interest to developers of Cromwell backend implementations. +### Bug Fixes + + * Fixed a call caching bug in which an invalid cache entry could cause a valid cache entry to be ignored. + ## 75 Release Notes ### New `AwaitingCloudQuota` backend status diff --git a/centaur/src/main/resources/standardTestCases/invalidate_bad_caches/invalidate_bad_caches_use_good.wdl b/centaur/src/main/resources/standardTestCases/invalidate_bad_caches/invalidate_bad_caches_use_good.wdl new file mode 100644 index 00000000000..dafcd54e077 --- /dev/null +++ b/centaur/src/main/resources/standardTestCases/invalidate_bad_caches/invalidate_bad_caches_use_good.wdl @@ -0,0 +1,72 @@ +task make_file { + Boolean ready + command { + # This comment adds noise to this command to stop it from call caching to other test cases + # invalidate_bad_caches_use_good + echo woohoo > out.txt + } + runtime { + docker: "ubuntu@sha256:71cd81252a3563a03ad8daee81047b62ab5d892ebbfbf71cf53415f29c130950" + } + output { + Boolean done = true + File out = "out.txt" + } +} + +task delete_file_in_gcs { + Boolean ready + String file_path + command { + gsutil rm ${file_path} + } + runtime { + # google/cloud-sdk:354.0.0-slim + docker: "google/cloud-sdk@sha256:b5bd0d4b9e56a8b82cea893e7c45f9dfb01fa7cb4e1ce0d426a4468d64654710" + } + output { + Boolean done = true + } +} + +task delete_file_local { + Boolean ready + String file_path_raw + String file_path = sub(file_path_raw, "file://", "") + + command { + rm ${file_path} + } + output { + Boolean done = true + } + runtime { + backend: "Local" + } +} + +workflow invalidate_bad_caches { + Boolean running_on_jes + + call make_file as make_first_file { input: ready = true } + call make_file as make_second_file { input: ready = make_first_file.done } + + if (running_on_jes) { + call delete_file_in_gcs { + input: + ready = make_second_file.done, + file_path = make_first_file.out + } + } + if (!running_on_jes) { + call delete_file_local { + input: + ready = make_second_file.done, + file_path_raw = make_first_file.out + } + } + + call make_file as cache_third_file { + input: ready = select_first([delete_file_in_gcs.done, delete_file_local.done]) + } +} diff --git a/centaur/src/main/resources/standardTestCases/invalidate_bad_caches_use_good_jes.test b/centaur/src/main/resources/standardTestCases/invalidate_bad_caches_use_good_jes.test new file mode 100644 index 00000000000..6d72c14c648 --- /dev/null +++ b/centaur/src/main/resources/standardTestCases/invalidate_bad_caches_use_good_jes.test @@ -0,0 +1,18 @@ +name: invalidate_bad_caches_use_good_jes +testFormat: workflowsuccess +backends: [Papi] + +files { + workflow: invalidate_bad_caches/invalidate_bad_caches_use_good.wdl + inputs: invalidate_bad_caches/jes.inputs +} + +metadata { + "calls.invalidate_bad_caches.make_first_file.callCaching.result": "Cache Miss" + "calls.invalidate_bad_caches.make_first_file.callCaching.allowResultReuse": false + "calls.invalidate_bad_caches.make_second_file.callCaching.result": "Cache Hit: <>:invalidate_bad_caches.make_first_file:-1" + "calls.invalidate_bad_caches.make_second_file.callCaching.allowResultReuse": true + "calls.invalidate_bad_caches.delete_file_in_gcs.callCaching.result": "Cache Miss" + "calls.invalidate_bad_caches.cache_third_file.callCaching.result": "Cache Hit: <>:invalidate_bad_caches.make_second_file:-1" + "calls.invalidate_bad_caches.cache_third_file.callCaching.allowResultReuse": true +} diff --git a/centaur/src/main/resources/standardTestCases/invalidate_bad_caches_use_good_local.test b/centaur/src/main/resources/standardTestCases/invalidate_bad_caches_use_good_local.test new file mode 100644 index 00000000000..2d3bc8a4e31 --- /dev/null +++ b/centaur/src/main/resources/standardTestCases/invalidate_bad_caches_use_good_local.test @@ -0,0 +1,19 @@ +name: invalidate_bad_caches_use_good_local +testFormat: workflowsuccess +backends: [Local] +tags: [localdockertest] + +files { + workflow: invalidate_bad_caches/invalidate_bad_caches_use_good.wdl + inputs: invalidate_bad_caches/local.inputs +} + +metadata { + "calls.invalidate_bad_caches.make_first_file.callCaching.result": "Cache Miss" + "calls.invalidate_bad_caches.make_first_file.callCaching.allowResultReuse": false + "calls.invalidate_bad_caches.make_second_file.callCaching.result": "Cache Hit: <>:invalidate_bad_caches.make_first_file:-1" + "calls.invalidate_bad_caches.make_second_file.callCaching.allowResultReuse": true + "calls.invalidate_bad_caches.delete_file_local.callCaching.result": "Cache Miss" + "calls.invalidate_bad_caches.cache_third_file.callCaching.result": "Cache Hit: <>:invalidate_bad_caches.make_second_file:-1" + "calls.invalidate_bad_caches.cache_third_file.callCaching.allowResultReuse": true +} diff --git a/database/sql/src/main/scala/cromwell/database/slick/CallCachingSlickDatabase.scala b/database/sql/src/main/scala/cromwell/database/slick/CallCachingSlickDatabase.scala index 17a4e2b61d6..2a78cc5300b 100644 --- a/database/sql/src/main/scala/cromwell/database/slick/CallCachingSlickDatabase.scala +++ b/database/sql/src/main/scala/cromwell/database/slick/CallCachingSlickDatabase.scala @@ -85,12 +85,12 @@ trait CallCachingSlickDatabase extends CallCachingSqlDatabase { runTransaction(action) } - override def findCacheHitForAggregation(baseAggregationHash: String, inputFilesAggregationHash: Option[String], callCachePathPrefixes: Option[List[String]], hitNumber: Int) + override def findCacheHitForAggregation(baseAggregationHash: String, inputFilesAggregationHash: Option[String], callCachePathPrefixes: Option[List[String]], excludedIds: Set[Int]) (implicit ec: ExecutionContext): Future[Option[Int]] = { val action = callCachePathPrefixes match { case None => - dataAccess.callCachingEntriesForAggregatedHashes(baseAggregationHash, inputFilesAggregationHash, hitNumber).result.headOption + dataAccess.callCachingEntriesForAggregatedHashes(baseAggregationHash, inputFilesAggregationHash, excludedIds).result.headOption case Some(ps) => val one :: two :: three :: _ = prefixesAndLengths(ps) dataAccess.callCachingEntriesForAggregatedHashesWithPrefixes( @@ -98,7 +98,7 @@ trait CallCachingSlickDatabase extends CallCachingSqlDatabase { one.prefix, one.length, two.prefix, two.length, three.prefix, three.length, - hitNumber).result.headOption + excludedIds).result.headOption } runTransaction(action) diff --git a/database/sql/src/main/scala/cromwell/database/slick/tables/CallCachingAggregationEntryComponent.scala b/database/sql/src/main/scala/cromwell/database/slick/tables/CallCachingAggregationEntryComponent.scala index aa537946cc3..6fea80dae79 100644 --- a/database/sql/src/main/scala/cromwell/database/slick/tables/CallCachingAggregationEntryComponent.scala +++ b/database/sql/src/main/scala/cromwell/database/slick/tables/CallCachingAggregationEntryComponent.scala @@ -69,26 +69,26 @@ trait CallCachingAggregationEntryComponent { (detritusPath.substring(0, prefix3Length) === prefix3)} yield ()).exists ) - def callCachingEntriesForAggregatedHashes(baseAggregation: Rep[String], inputFilesAggregation: Rep[Option[String]], number: Int) = { + def callCachingEntriesForAggregatedHashes(baseAggregation: Rep[String], inputFilesAggregation: Rep[Option[String]], excludedIds: Set[Int]) = { (for { callCachingEntry <- callCachingEntries - if callCachingEntry.allowResultReuse + if callCachingEntry.allowResultReuse && !(callCachingEntry.callCachingEntryId inSet excludedIds) callCachingAggregationEntry <- callCachingAggregationEntries if callCachingEntry.callCachingEntryId === callCachingAggregationEntry.callCachingEntryId if callCachingAggregationEntry.baseAggregation === baseAggregation if (callCachingAggregationEntry.inputFilesAggregation.isEmpty && inputFilesAggregation.isEmpty) || (callCachingAggregationEntry.inputFilesAggregation === inputFilesAggregation) - } yield callCachingAggregationEntry.callCachingEntryId).drop(number - 1).take(1) + } yield callCachingAggregationEntry.callCachingEntryId).take(1) } def callCachingEntriesForAggregatedHashesWithPrefixes(baseAggregation: Rep[String], inputFilesAggregation: Rep[Option[String]], prefix1: Rep[String], prefix1Length: Rep[Int], prefix2: Rep[String], prefix2Length: Rep[Int], prefix3: Rep[String], prefix3Length: Rep[Int], - number: Int) = { + excludedIds: Set[Int]) = { (for { callCachingEntry <- callCachingEntries - if callCachingEntry.allowResultReuse + if callCachingEntry.allowResultReuse && !(callCachingEntry.callCachingEntryId inSet excludedIds) callCachingAggregationEntry <- callCachingAggregationEntries if callCachingEntry.callCachingEntryId === callCachingAggregationEntry.callCachingEntryId if callCachingAggregationEntry.baseAggregation === baseAggregation @@ -103,6 +103,6 @@ trait CallCachingAggregationEntryComponent { if (detritusPath.substring(0, prefix1Length) === prefix1) || (detritusPath.substring(0, prefix2Length) === prefix2) || (detritusPath.substring(0, prefix3Length) === prefix3) - } yield callCachingAggregationEntry.callCachingEntryId).drop(number - 1).take(1) + } yield callCachingAggregationEntry.callCachingEntryId).take(1) } } diff --git a/database/sql/src/main/scala/cromwell/database/sql/CallCachingSqlDatabase.scala b/database/sql/src/main/scala/cromwell/database/sql/CallCachingSqlDatabase.scala index 04e31693608..23a744b4cf1 100644 --- a/database/sql/src/main/scala/cromwell/database/sql/CallCachingSqlDatabase.scala +++ b/database/sql/src/main/scala/cromwell/database/sql/CallCachingSqlDatabase.scala @@ -11,7 +11,7 @@ trait CallCachingSqlDatabase { def hasMatchingCallCachingEntriesForBaseAggregation(baseAggregationHash: String, callCachePathPrefixes: Option[List[String]]) (implicit ec: ExecutionContext): Future[Boolean] - def findCacheHitForAggregation(baseAggregationHash: String, inputFilesAggregationHash: Option[String], callCachePathPrefixes: Option[List[String]], hitNumber: Int) + def findCacheHitForAggregation(baseAggregationHash: String, inputFilesAggregationHash: Option[String], callCachePathPrefixes: Option[List[String]], excludedIds: Set[Int]) (implicit ec: ExecutionContext): Future[Option[Int]] def queryResultsForCacheId(callCachingEntryId: Int) diff --git a/engine/src/main/scala/cromwell/engine/workflow/lifecycle/execution/callcaching/CallCache.scala b/engine/src/main/scala/cromwell/engine/workflow/lifecycle/execution/callcaching/CallCache.scala index 78ba0561cea..5e23a69d0c7 100644 --- a/engine/src/main/scala/cromwell/engine/workflow/lifecycle/execution/callcaching/CallCache.scala +++ b/engine/src/main/scala/cromwell/engine/workflow/lifecycle/execution/callcaching/CallCache.scala @@ -77,13 +77,13 @@ class CallCache(database: CallCachingSqlDatabase) { database.hasMatchingCallCachingEntriesForBaseAggregation(baseAggregatedHash, ccpp) } - def callCachingHitForAggregatedHashes(aggregatedCallHashes: AggregatedCallHashes, prefixesHint: Option[CallCachePathPrefixes], hitNumber: Int) + def callCachingHitForAggregatedHashes(aggregatedCallHashes: AggregatedCallHashes, prefixesHint: Option[CallCachePathPrefixes], excludedIds: Set[CallCachingEntryId]) (implicit ec: ExecutionContext): Future[Option[CallCachingEntryId]] = { database.findCacheHitForAggregation( baseAggregationHash = aggregatedCallHashes.baseAggregatedHash, inputFilesAggregationHash = aggregatedCallHashes.inputFilesAggregatedHash, callCachePathPrefixes = prefixesHint.map(_.prefixes), - hitNumber).map(_ map CallCachingEntryId.apply) + excludedIds.map(_.id)).map(_ map CallCachingEntryId.apply) } def fetchCachedResult(callCachingEntryId: CallCachingEntryId)(implicit ec: ExecutionContext): Future[Option[CallCachingJoin]] = { diff --git a/engine/src/main/scala/cromwell/engine/workflow/lifecycle/execution/callcaching/CallCacheReadActor.scala b/engine/src/main/scala/cromwell/engine/workflow/lifecycle/execution/callcaching/CallCacheReadActor.scala index 696c90099e7..16c4ce39ed7 100644 --- a/engine/src/main/scala/cromwell/engine/workflow/lifecycle/execution/callcaching/CallCacheReadActor.scala +++ b/engine/src/main/scala/cromwell/engine/workflow/lifecycle/execution/callcaching/CallCacheReadActor.scala @@ -33,8 +33,8 @@ class CallCacheReadActor(cache: CallCache, case true => HasMatchingEntries case false => NoMatchingEntries } - case CacheLookupRequest(aggregatedCallHashes, cacheHitNumber, prefixesHint) => - cache.callCachingHitForAggregatedHashes(aggregatedCallHashes, prefixesHint, cacheHitNumber) map { + case CacheLookupRequest(aggregatedCallHashes, excludedIds, prefixesHint) => + cache.callCachingHitForAggregatedHashes(aggregatedCallHashes, prefixesHint, excludedIds) map { case Some(nextHit) => CacheLookupNextHit(nextHit) case None => CacheLookupNoHit } @@ -78,7 +78,7 @@ object CallCacheReadActor { case class AggregatedCallHashes(baseAggregatedHash: String, inputFilesAggregatedHash: Option[String]) sealed trait CallCacheReadActorRequest - final case class CacheLookupRequest(aggregatedCallHashes: AggregatedCallHashes, cacheHitNumber: Int, prefixesHint: Option[CallCachePathPrefixes]) extends CallCacheReadActorRequest + final case class CacheLookupRequest(aggregatedCallHashes: AggregatedCallHashes, excludedIds: Set[CallCachingEntryId], prefixesHint: Option[CallCachePathPrefixes]) extends CallCacheReadActorRequest final case class HasMatchingInitialHashLookup(aggregatedTaskHash: String, cacheHitHints: List[CacheHitHint] = List.empty) extends CallCacheReadActorRequest final case class CallCacheEntryForCall(workflowId: WorkflowId, jobKey: BackendJobDescriptorKey) extends CallCacheReadActorRequest diff --git a/engine/src/main/scala/cromwell/engine/workflow/lifecycle/execution/callcaching/CallCacheReadingJobActor.scala b/engine/src/main/scala/cromwell/engine/workflow/lifecycle/execution/callcaching/CallCacheReadingJobActor.scala index 97b5a47837d..953da95042f 100644 --- a/engine/src/main/scala/cromwell/engine/workflow/lifecycle/execution/callcaching/CallCacheReadingJobActor.scala +++ b/engine/src/main/scala/cromwell/engine/workflow/lifecycle/execution/callcaching/CallCacheReadingJobActor.scala @@ -8,6 +8,7 @@ import cromwell.engine.workflow.lifecycle.execution.callcaching.CallCacheHashing import cromwell.engine.workflow.lifecycle.execution.callcaching.CallCacheReadActor._ import cromwell.engine.workflow.lifecycle.execution.callcaching.CallCacheReadingJobActor._ import cromwell.engine.workflow.lifecycle.execution.callcaching.EngineJobHashingActor.{CacheHit, CacheMiss, HashError} +import cromwell.services.CallCaching.CallCachingEntryId /** * Receives hashes from the CallCacheHashingJobActor and makes requests to the database to determine whether or not there might be a hit @@ -30,7 +31,7 @@ class CallCacheReadingJobActor(callCacheReadActor: ActorRef, prefixesHint: Optio when(WaitingForInitialHash) { case Event(InitialHashingResult(_, aggregatedBaseHash, hints), CCRJANoData) => callCacheReadActor ! HasMatchingInitialHashLookup(aggregatedBaseHash, hints) - goto(WaitingForHashCheck) using CCRJAWithData(sender(), aggregatedBaseHash, None, 1) + goto(WaitingForHashCheck) using CCRJAWithData(sender(), aggregatedBaseHash, fileHash = None, seenCaches = Set.empty) } when(WaitingForHashCheck) { @@ -43,24 +44,24 @@ class CallCacheReadingJobActor(callCacheReadActor: ActorRef, prefixesHint: Optio when(WaitingForFileHashes) { case Event(CompleteFileHashingResult(_, aggregatedFileHash), data: CCRJAWithData) => - callCacheReadActor ! CacheLookupRequest(AggregatedCallHashes(data.initialHash, aggregatedFileHash), data.currentHitNumber, prefixesHint) + callCacheReadActor ! CacheLookupRequest(AggregatedCallHashes(data.initialHash, aggregatedFileHash), data.seenCaches, prefixesHint) goto(WaitingForCacheHitOrMiss) using data.withFileHash(aggregatedFileHash) case Event(NoFileHashesResult, data: CCRJAWithData) => - callCacheReadActor ! CacheLookupRequest(AggregatedCallHashes(data.initialHash, None), data.currentHitNumber, prefixesHint) + callCacheReadActor ! CacheLookupRequest(AggregatedCallHashes(data.initialHash, None), data.seenCaches, prefixesHint) goto(WaitingForCacheHitOrMiss) } when(WaitingForCacheHitOrMiss) { case Event(CacheLookupNextHit(hit), data: CCRJAWithData) => context.parent ! CacheHit(hit) - stay() using data.increment + stay() using data.withSeenCache(hit) case Event(CacheLookupNoHit, _) => cacheMiss - case Event(NextHit, CCRJAWithData(_, aggregatedInitialHash, aggregatedFileHash, currentHitNumber)) => - callCacheReadActor ! CacheLookupRequest(AggregatedCallHashes(aggregatedInitialHash, aggregatedFileHash), currentHitNumber, prefixesHint) + case Event(NextHit, CCRJAWithData(_, aggregatedInitialHash, aggregatedFileHash, seenCaches)) => + callCacheReadActor ! CacheLookupRequest(AggregatedCallHashes(aggregatedInitialHash, aggregatedFileHash), seenCaches, prefixesHint) stay() } - + whenUnhandled { case Event(_: HashingFailedMessage, _) => // No need to send to the parent since it also receives file hash updates @@ -69,7 +70,7 @@ class CallCacheReadingJobActor(callCacheReadActor: ActorRef, prefixesHint: Optio context.parent ! HashError(failure) cacheMiss } - + def cacheMiss = { context.parent ! CacheMiss context stop self @@ -78,11 +79,11 @@ class CallCacheReadingJobActor(callCacheReadActor: ActorRef, prefixesHint: Optio } object CallCacheReadingJobActor { - + def props(callCacheReadActor: ActorRef, prefixesHint: Option[CallCachePathPrefixes]) = { Props(new CallCacheReadingJobActor(callCacheReadActor, prefixesHint)).withDispatcher(EngineDispatcher) } - + sealed trait CallCacheReadingJobActorState case object WaitingForInitialHash extends CallCacheReadingJobActorState case object WaitingForHashCheck extends CallCacheReadingJobActorState @@ -91,9 +92,9 @@ object CallCacheReadingJobActor { sealed trait CCRJAData case object CCRJANoData extends CCRJAData - case class CCRJAWithData(hashingActor: ActorRef, initialHash: String, fileHash: Option[String], currentHitNumber: Int) extends CCRJAData { - def increment = this.copy(currentHitNumber = currentHitNumber + 1) - def withFileHash(aggregatedFileHash: String) = this.copy(fileHash = Option(aggregatedFileHash)) + case class CCRJAWithData(hashingActor: ActorRef, initialHash: String, fileHash: Option[String], seenCaches: Set[CallCachingEntryId]) extends CCRJAData { + def withSeenCache(id: CallCachingEntryId): CCRJAWithData = this.copy(seenCaches = seenCaches + id) + def withFileHash(aggregatedFileHash: String): CCRJAWithData = this.copy(fileHash = Option(aggregatedFileHash)) } case object NextHit diff --git a/engine/src/test/scala/cromwell/engine/workflow/lifecycle/execution/callcaching/CallCacheReadingJobActorSpec.scala b/engine/src/test/scala/cromwell/engine/workflow/lifecycle/execution/callcaching/CallCacheReadingJobActorSpec.scala index b4349baa4a7..a39597fa347 100644 --- a/engine/src/test/scala/cromwell/engine/workflow/lifecycle/execution/callcaching/CallCacheReadingJobActorSpec.scala +++ b/engine/src/test/scala/cromwell/engine/workflow/lifecycle/execution/callcaching/CallCacheReadingJobActorSpec.scala @@ -14,7 +14,7 @@ import org.scalatest.matchers.should.Matchers class CallCacheReadingJobActorSpec extends TestKitSuite with AnyFlatSpecLike with Matchers with Eventually { behavior of "CallCacheReadingJobActor" - + it should "try to match initial hashes against DB" in { val callCacheReadProbe = TestProbe() val callCacheHashingActor = TestProbe() @@ -27,7 +27,7 @@ class CallCacheReadingJobActorSpec extends TestKitSuite with AnyFlatSpecLike wit callCacheReadProbe.expectMsg(HasMatchingInitialHashLookup(aggregatedInitialhash)) eventually { actorUnderTest.stateName shouldBe WaitingForHashCheck - actorUnderTest.stateData shouldBe CCRJAWithData(callCacheHashingActor.ref, aggregatedInitialhash, None, 1) + actorUnderTest.stateData shouldBe CCRJAWithData(callCacheHashingActor.ref, aggregatedInitialhash, None, Set.empty) } } @@ -35,7 +35,7 @@ class CallCacheReadingJobActorSpec extends TestKitSuite with AnyFlatSpecLike wit val callCacheReadProbe = TestProbe() val callCacheHashingActor = TestProbe() val actorUnderTest = TestFSMRef(new CallCacheReadingJobActor(callCacheReadProbe.ref, prefixesHint = None)) - actorUnderTest.setState(WaitingForHashCheck, CCRJAWithData(callCacheHashingActor.ref, "AggregatedInitialHash", None, 1)) + actorUnderTest.setState(WaitingForHashCheck, CCRJAWithData(callCacheHashingActor.ref, "AggregatedInitialHash", None, Set.empty)) callCacheReadProbe.send(actorUnderTest, HasMatchingEntries) callCacheHashingActor.expectMsg(NextBatchOfFileHashesRequest) @@ -48,11 +48,11 @@ class CallCacheReadingJobActorSpec extends TestKitSuite with AnyFlatSpecLike wit val callCacheReadProbe = TestProbe() val callCacheHashingActor = TestProbe() val parent = TestProbe() - + val actorUnderTest = TestFSMRef(new CallCacheReadingJobActor(callCacheReadProbe.ref, prefixesHint = None), parent.ref) parent.watch(actorUnderTest) - - actorUnderTest.setState(WaitingForHashCheck, CCRJAWithData(callCacheHashingActor.ref, "AggregatedInitialHash", None, 1)) + + actorUnderTest.setState(WaitingForHashCheck, CCRJAWithData(callCacheHashingActor.ref, "AggregatedInitialHash", None, Set.empty)) callCacheReadProbe.send(actorUnderTest, NoMatchingEntries) parent.expectMsg(CacheMiss) @@ -62,38 +62,38 @@ class CallCacheReadingJobActorSpec extends TestKitSuite with AnyFlatSpecLike wit it should "ask for matching cache entries for both aggregated hashes when got both" in { val callCacheReadProbe = TestProbe() val callCacheHashingActor = TestProbe() - + val actorUnderTest = TestFSMRef(new CallCacheReadingJobActor(callCacheReadProbe.ref, prefixesHint = None), TestProbe().ref) val aggregatedInitialHash: String = "AggregatedInitialHash" val aggregatedFileHash: String = "AggregatedFileHash" - actorUnderTest.setState(WaitingForFileHashes, CCRJAWithData(callCacheHashingActor.ref, aggregatedInitialHash, None, 1)) + actorUnderTest.setState(WaitingForFileHashes, CCRJAWithData(callCacheHashingActor.ref, aggregatedInitialHash, None, Set.empty)) val fileHashes = Set(HashResult(HashKey("f1"), HashValue("h1")), HashResult(HashKey("f2"), HashValue("h2"))) callCacheHashingActor.send(actorUnderTest, CompleteFileHashingResult(fileHashes, aggregatedFileHash)) - callCacheReadProbe.expectMsg(CacheLookupRequest(AggregatedCallHashes(aggregatedInitialHash, aggregatedFileHash), 1, prefixesHint = None)) + callCacheReadProbe.expectMsg(CacheLookupRequest(AggregatedCallHashes(aggregatedInitialHash, aggregatedFileHash), Set.empty, prefixesHint = None)) eventually { actorUnderTest.stateName shouldBe WaitingForCacheHitOrMiss - actorUnderTest.stateData shouldBe CCRJAWithData(callCacheHashingActor.ref, aggregatedInitialHash, Some(aggregatedFileHash), 1) + actorUnderTest.stateData shouldBe CCRJAWithData(callCacheHashingActor.ref, aggregatedInitialHash, Some(aggregatedFileHash), Set.empty) } } it should "ask for matching cache entries for initial hashes when there is no file input" in { val callCacheReadProbe = TestProbe() val callCacheHashingActor = TestProbe() - + val actorUnderTest = TestFSMRef(new CallCacheReadingJobActor(callCacheReadProbe.ref, prefixesHint = None), TestProbe().ref) val aggregatedInitialHash: String = "AggregatedInitialHash" - actorUnderTest.setState(WaitingForFileHashes, CCRJAWithData(callCacheHashingActor.ref, aggregatedInitialHash, None, 1)) + actorUnderTest.setState(WaitingForFileHashes, CCRJAWithData(callCacheHashingActor.ref, aggregatedInitialHash, None, Set.empty)) callCacheHashingActor.send(actorUnderTest, NoFileHashesResult) - callCacheReadProbe.expectMsg(CacheLookupRequest(AggregatedCallHashes(aggregatedInitialHash, None), 1, prefixesHint = None)) + callCacheReadProbe.expectMsg(CacheLookupRequest(AggregatedCallHashes(aggregatedInitialHash, None), Set.empty, prefixesHint = None)) eventually { actorUnderTest.stateName shouldBe WaitingForCacheHitOrMiss - actorUnderTest.stateData shouldBe CCRJAWithData(callCacheHashingActor.ref, aggregatedInitialHash, None, 1) + actorUnderTest.stateData shouldBe CCRJAWithData(callCacheHashingActor.ref, aggregatedInitialHash, None, Set.empty) } } @@ -101,11 +101,11 @@ class CallCacheReadingJobActorSpec extends TestKitSuite with AnyFlatSpecLike wit val callCacheReadProbe = TestProbe() val callCacheHashingActor = TestProbe() val parent = TestProbe() - + val actorUnderTest = TestFSMRef(new CallCacheReadingJobActor(callCacheReadProbe.ref, prefixesHint = None), parent.ref) val aggregatedInitialHash: String = "AggregatedInitialHash" - actorUnderTest.setState(WaitingForCacheHitOrMiss, CCRJAWithData(callCacheHashingActor.ref, aggregatedInitialHash, None, 1)) + actorUnderTest.setState(WaitingForCacheHitOrMiss, CCRJAWithData(callCacheHashingActor.ref, aggregatedInitialHash, None, Set.empty)) val id: CallCachingEntryId = CallCachingEntryId(8) callCacheReadProbe.send(actorUnderTest, CacheLookupNextHit(id)) @@ -113,7 +113,7 @@ class CallCacheReadingJobActorSpec extends TestKitSuite with AnyFlatSpecLike wit eventually { actorUnderTest.stateName shouldBe WaitingForCacheHitOrMiss - actorUnderTest.stateData shouldBe CCRJAWithData(callCacheHashingActor.ref, aggregatedInitialHash, None, 2) + actorUnderTest.stateData shouldBe CCRJAWithData(callCacheHashingActor.ref, aggregatedInitialHash, None, Set(id)) } } @@ -121,12 +121,12 @@ class CallCacheReadingJobActorSpec extends TestKitSuite with AnyFlatSpecLike wit val callCacheReadProbe = TestProbe() val callCacheHashingActor = TestProbe() val parent = TestProbe() - + val actorUnderTest = TestFSMRef(new CallCacheReadingJobActor(callCacheReadProbe.ref, prefixesHint = None), parent.ref) parent.watch(actorUnderTest) - + val aggregatedInitialHash: String = "AggregatedInitialHash" - actorUnderTest.setState(WaitingForCacheHitOrMiss, CCRJAWithData(callCacheHashingActor.ref, aggregatedInitialHash, None, 1)) + actorUnderTest.setState(WaitingForCacheHitOrMiss, CCRJAWithData(callCacheHashingActor.ref, aggregatedInitialHash, None, Set.empty)) callCacheReadProbe.send(actorUnderTest, CacheLookupNoHit) parent.expectMsg(CacheMiss) @@ -137,14 +137,15 @@ class CallCacheReadingJobActorSpec extends TestKitSuite with AnyFlatSpecLike wit it should "ask callCacheReadActor for next hit when requested (initial hash only)" in { val callCacheReadProbe = TestProbe() val callCacheHashingActor = TestProbe() - + val actorUnderTest = TestFSMRef(new CallCacheReadingJobActor(callCacheReadProbe.ref, prefixesHint = None), TestProbe().ref) val aggregatedInitialHash: String = "AggregatedInitialHash" - actorUnderTest.setState(WaitingForCacheHitOrMiss, CCRJAWithData(callCacheHashingActor.ref, aggregatedInitialHash, None, 2)) + val seenCaches: Set[CallCachingEntryId] = Set(CallCachingEntryId(0)) + actorUnderTest.setState(WaitingForCacheHitOrMiss, CCRJAWithData(callCacheHashingActor.ref, aggregatedInitialHash, None, seenCaches)) actorUnderTest ! NextHit - callCacheReadProbe.expectMsg(CacheLookupRequest(AggregatedCallHashes(aggregatedInitialHash, None), 2, prefixesHint = None)) + callCacheReadProbe.expectMsg(CacheLookupRequest(AggregatedCallHashes(aggregatedInitialHash, None), seenCaches, prefixesHint = None)) actorUnderTest.stateName shouldBe WaitingForCacheHitOrMiss } @@ -152,15 +153,16 @@ class CallCacheReadingJobActorSpec extends TestKitSuite with AnyFlatSpecLike wit it should "ask callCacheReadActor for next hit when requested (with file hash)" in { val callCacheReadProbe = TestProbe() val callCacheHashingActor = TestProbe() - + val actorUnderTest = TestFSMRef(new CallCacheReadingJobActor(callCacheReadProbe.ref, prefixesHint = None), TestProbe().ref) val aggregatedInitialHash: String = "AggregatedInitialHash" val aggregatedFileHash: String = "AggregatedFileHash" - actorUnderTest.setState(WaitingForCacheHitOrMiss, CCRJAWithData(callCacheHashingActor.ref, aggregatedInitialHash, Option(aggregatedFileHash), 2)) + val seenCaches: Set[CallCachingEntryId] = Set(CallCachingEntryId(0)) + actorUnderTest.setState(WaitingForCacheHitOrMiss, CCRJAWithData(callCacheHashingActor.ref, aggregatedInitialHash, Option(aggregatedFileHash), seenCaches)) actorUnderTest ! NextHit - callCacheReadProbe.expectMsg(CacheLookupRequest(AggregatedCallHashes(aggregatedInitialHash, Option(aggregatedFileHash)), 2, prefixesHint = None)) + callCacheReadProbe.expectMsg(CacheLookupRequest(AggregatedCallHashes(aggregatedInitialHash, Option(aggregatedFileHash)), seenCaches, prefixesHint = None)) actorUnderTest.stateName shouldBe WaitingForCacheHitOrMiss } @@ -169,12 +171,12 @@ class CallCacheReadingJobActorSpec extends TestKitSuite with AnyFlatSpecLike wit val callCacheReadProbe = TestProbe() val callCacheHashingActor = TestProbe() val parent = TestProbe() - + val actorUnderTest = TestFSMRef(new CallCacheReadingJobActor(callCacheReadProbe.ref, prefixesHint = None), parent.ref) parent.watch(actorUnderTest) val aggregatedInitialHash: String = "AggregatedInitialHash" - actorUnderTest.setState(WaitingForCacheHitOrMiss, CCRJAWithData(callCacheHashingActor.ref, aggregatedInitialHash, None, 1)) + actorUnderTest.setState(WaitingForCacheHitOrMiss, CCRJAWithData(callCacheHashingActor.ref, aggregatedInitialHash, None, Set.empty)) callCacheHashingActor.send(actorUnderTest, HashingFailedMessage("file", new Exception("Hashing failed"))) parent.expectMsg(CacheMiss) @@ -186,12 +188,12 @@ class CallCacheReadingJobActorSpec extends TestKitSuite with AnyFlatSpecLike wit val callCacheReadProbe = TestProbe() val callCacheHashingActor = TestProbe() val parent = TestProbe() - + val actorUnderTest = TestFSMRef(new CallCacheReadingJobActor(callCacheReadProbe.ref, prefixesHint = None), parent.ref) parent.watch(actorUnderTest) val aggregatedInitialHash: String = "AggregatedInitialHash" - actorUnderTest.setState(WaitingForCacheHitOrMiss, CCRJAWithData(callCacheHashingActor.ref, aggregatedInitialHash, None, 1)) + actorUnderTest.setState(WaitingForCacheHitOrMiss, CCRJAWithData(callCacheHashingActor.ref, aggregatedInitialHash, None, Set.empty)) val reason: Exception = new Exception("Lookup failed") callCacheHashingActor.send(actorUnderTest, CacheResultLookupFailure(reason)) diff --git a/engine/src/test/scala/cromwell/engine/workflow/lifecycle/execution/callcaching/CallCachingSlickDatabaseSpec.scala b/engine/src/test/scala/cromwell/engine/workflow/lifecycle/execution/callcaching/CallCachingSlickDatabaseSpec.scala index 2181726b708..408b1087588 100644 --- a/engine/src/test/scala/cromwell/engine/workflow/lifecycle/execution/callcaching/CallCachingSlickDatabaseSpec.scala +++ b/engine/src/test/scala/cromwell/engine/workflow/lifecycle/execution/callcaching/CallCachingSlickDatabaseSpec.scala @@ -103,7 +103,7 @@ class CallCachingSlickDatabaseSpec "BASE_AGGREGATION", Option("FILE_AGGREGATION"), callCachePathPrefixes = prefixOption, - 1 + Set.empty ) _ = hit shouldBe empty } yield ()).futureValue From 06eb9821a31a2fcd86e18928a2c6f56493d7a2c1 Mon Sep 17 00:00:00 2001 From: Brian Reilly Date: Thu, 14 Apr 2022 14:51:30 -0400 Subject: [PATCH 16/58] Update comment in ssh_access test (#6732) --- centaur/src/main/resources/standardTestCases/ssh_access.test | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/centaur/src/main/resources/standardTestCases/ssh_access.test b/centaur/src/main/resources/standardTestCases/ssh_access.test index 1ea20bc3abd..f8b3d799f6f 100644 --- a/centaur/src/main/resources/standardTestCases/ssh_access.test +++ b/centaur/src/main/resources/standardTestCases/ssh_access.test @@ -1,7 +1,9 @@ name: ssh_access testFormat: workflowsuccess backends: [Papiv2] -# CROM-6872: ignoring for now until we figure out the problem +# CROM-6872: disabling this test because it is consistently failing in our test infrastructure. This +# is a community-contributed feature that is not officially supported by the Cromwell development +# team at the Broad Institute. ignore: true files { From 810557b4907a9b186ff51665786fb4e9724feb13 Mon Sep 17 00:00:00 2001 From: Chris Llanwarne Date: Fri, 15 Apr 2022 10:00:24 -0400 Subject: [PATCH 17/58] Update source files directory and assert their accessibility (#6727) --- .../delete_intermediates/no_input_delete_setup.wdl | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/centaur/src/main/resources/standardTestCases/delete_intermediates/no_input_delete_setup.wdl b/centaur/src/main/resources/standardTestCases/delete_intermediates/no_input_delete_setup.wdl index cc1b5e106a1..2281a36749e 100644 --- a/centaur/src/main/resources/standardTestCases/delete_intermediates/no_input_delete_setup.wdl +++ b/centaur/src/main/resources/standardTestCases/delete_intermediates/no_input_delete_setup.wdl @@ -25,7 +25,9 @@ task makeFileAndIndex { input { } command { - gsutil cp gs://cloud-cromwell-dev-self-cleaning/cromwell_execution/no_input_delete/source_files/*.* gs://cloud-cromwell-dev-self-cleaning/cromwell_execution/no_input_delete/test_execution + set -euo pipefail # Makes sure we fail quickly if the gsutil cp fails + + gsutil cp 'gs://cloud-cromwell-dev/cromwell_execution/no_input_delete/source_files/*.*' gs://cloud-cromwell-dev-self-cleaning/cromwell_execution/no_input_delete/test_execution/ echo $(gsutil stat gs://cloud-cromwell-dev-self-cleaning/cromwell_execution/no_input_delete/test_execution/no_input_delete.txt) > ~{outputFile} } runtime { From fb9c769f99f12e1730515f6d3cb2c13f326c5803 Mon Sep 17 00:00:00 2001 From: Adam Nichols Date: Mon, 25 Apr 2022 13:12:01 -0400 Subject: [PATCH 18/58] Automatically build Cromwell, update Cromwhelm on merge to develop [BW-1211] (#6739) --- .github/workflows/chart_update_on_merge.yml | 71 +++++++++++++++++++++ 1 file changed, 71 insertions(+) create mode 100644 .github/workflows/chart_update_on_merge.yml diff --git a/.github/workflows/chart_update_on_merge.yml b/.github/workflows/chart_update_on_merge.yml new file mode 100644 index 00000000000..9e847bc17eb --- /dev/null +++ b/.github/workflows/chart_update_on_merge.yml @@ -0,0 +1,71 @@ +name: chart-update-on-merge + +on: + pull_request: + types: + - closed + +jobs: + chart-update: + name: Cromwhelm Chart Auto Updater + if: github.event.pull_request.merged == true + runs-on: self-hosted # Faster machines; see https://github.com/broadinstitute/cromwell/settings/actions/runners + steps: + - name: Clone Cromwell + uses: actions/checkout@v2 + with: + repository: broadinstitute/cromwell + token: ${{ secrets.BROADBOT_GITHUB_TOKEN }} # Has to be set at checkout AND later when pushing to work + path: cromwell + - uses: olafurpg/setup-scala@v10 + with: + java-version: adopt@1.11 + - name: Clone Cromwhelm + uses: actions/checkout@v2 + with: + repository: broadinstitute/cromwhelm + token: ${{ secrets.BROADBOT_GITHUB_TOKEN }} # Has to be set at checkout AND later when pushing to work + path: cromwhelm + - name: Find Cromwell short SHA + run: | + set -e + cd cromwell + echo "CROMWELL_SHORT_SHA=`git rev-parse --short $GITHUB_SHA`" >> $GITHUB_ENV + - name: Find Cromwell release number + run: | + set -e + previous_version=$(curl -X GET https://api.github.com/repos/broadinstitute/cromwell/releases/latest | jq .tag_name | xargs) + if ! [[ "${previous_version}" =~ ^[0-9][0-9]+$ ]]; then + exit 1 + fi + echo "CROMWELL_NUMBER=$((previous_version + 1))" >> $GITHUB_ENV + - name: Save complete image ID + run: | + echo "CROMWELL_SNAP_VERSION=`echo "$CROMWELL_NUMBER-$CROMWELL_SHORT_SHA-SNAP"`" >> $GITHUB_ENV + # `DSDEJENKINS_PASSWORD` auto syncs from vault with https://github.com/broadinstitute/terraform-ap-deployments/pull/614 + - name: Login to Docker Hub + uses: docker/login-action@v1 + with: + username: dsdejenkins + password: ${{ secrets.DSDEJENKINS_PASSWORD }} + - name: Build Cromwell Docker + run: | + set -e + cd cromwell + sbt server/docker + docker push broadinstitute/cromwell:$CROMWELL_SNAP_VERSION + - name: Edit & push chart + env: + BROADBOT_GITHUB_TOKEN: ${{ secrets.BROADBOT_GITHUB_TOKEN }} + run: | + set -e + cd cromwhelm + git checkout main + ls -la + sed -i "s/appVersion.*/appVersion: \"$CROMWELL_SNAP_VERSION\"/" cromwell-helm/Chart.yaml + sed -i "s/image: broadinstitute\/cromwell.*/image: broadinstitute\/cromwell:$CROMWELL_SNAP_VERSION/" cromwell-helm/templates/cromwell.yaml + git diff + git config --global user.name "broadbot" + git config --global user.email "broadbot@broadinstitute.org" + git commit -am "Auto update to Cromwell $CROMWELL_SNAP_VERSION" + git push https://broadbot:$BROADBOT_GITHUB_TOKEN@github.com/broadinstitute/cromwhelm.git main From 69b85e0fd18195ada189f18181e3ac0f7511a364 Mon Sep 17 00:00:00 2001 From: kshakir Date: Mon, 25 Apr 2022 14:45:29 -0400 Subject: [PATCH 19/58] BW-1222 Reduced test framework dependencies (#6735) --- .../scala/cromiam/sam/SamClientSpec.scala | 21 +- .../scala/cromwell/backend/BackendSpec.scala | 28 +- .../backend/OutputEvaluatorSpec.scala | 27 +- .../RuntimeAttributeValidationSpec.scala | 63 +++-- .../backend/io/WorkflowPathsSpec.scala | 23 +- ...alidatedRuntimeAttributesBuilderSpec.scala | 9 +- .../StandardFileHashingActorSpec.scala | 8 +- build.sbt | 2 +- .../centaur/reporting/Slf4jReporter.scala | 2 +- .../testfilecheck/FileCheckerSpec.scala | 39 ++- .../drs/DrsCloudNioFileProviderSpec.scala | 20 +- .../MarthaHttpRequestRetryStrategySpec.scala | 24 +- .../impl/drs/MockEngineDrsPathResolver.scala | 5 +- .../nio/impl/ftp/FtpClientPoolSpec.scala | 5 +- .../FtpCloudNioFileSystemProviderSpec.scala | 26 +- .../nio/impl/ftp/FtpCredentialsSpec.scala | 13 +- .../nio/impl/ftp/FtpFileSystemsSpec.scala | 30 +- .../cloud/nio/impl/ftp/FtpUtilSpec.scala | 9 +- .../nio/impl/ftp/LeaseInputStreamSpec.scala | 10 +- .../nio/impl/ftp/LeaseOutputStreamSpec.scala | 13 +- .../ftp/operations/FtpOperationSpec.scala | 4 +- .../scala/common/mock/MockImplicits.scala | 43 +++ .../test/scala/common/mock/MockSugar.scala | 71 +++++ .../common/validation/ValidationSpec.scala | 5 +- core/src/test/scala/cromwell/core/Tags.scala | 1 - .../scala/cromwell/core/io/AsyncIoSpec.scala | 11 +- .../cromwell/core/io/IoClientHelperSpec.scala | 12 +- .../core/logging/LoggerWrapperSpec.scala | 8 +- .../core/simpleton/WomValueBuilderSpec.scala | 22 +- .../test/scala/cromwell/util/SampleWdl.scala | 131 +++++++-- cwl/src/test/scala/cwl/ScatterLogicSpec.scala | 28 +- .../scala/cwl/WomTypeConversionSpec.scala | 81 +++--- .../cwl/WorkflowStepInputExpressionSpec.scala | 21 +- .../scala/cwl/WorkflowStepInputSpec.scala | 123 +++++---- .../preprocessor/CwlPreProcessorSpec.scala | 50 ++-- .../AlibabaCloudCRRegistrySpec.scala | 14 +- .../engine/io/gcs/GcsBatchFlowSpec.scala | 12 +- .../cromwell/engine/io/nio/NioFlowSpec.scala | 8 +- .../WorkflowDockerLookupActorSpec.scala | 4 +- .../SubWorkflowExecutionActorSpec.scala | 76 +++--- .../CallCacheHashingJobActorSpec.scala | 35 +-- .../CallCachingSlickDatabaseSpec.scala | 10 +- .../EngineJobHashingActorSpec.scala | 18 +- .../job/preparation/CallPreparationSpec.scala | 4 +- .../preparation/JobPreparationActorSpec.scala | 8 +- .../JobPreparationTestHelper.scala | 31 ++- .../workflow/mocks/DeclarationMock.scala | 6 +- .../engine/workflow/mocks/TaskMock.scala | 8 +- .../workflow/mocks/WdlWomExpressionMock.scala | 16 +- .../workflowstore/SqlWorkflowStoreSpec.scala | 28 +- .../webservice/MetadataBuilderActorSpec.scala | 29 +- .../filesystems/drs/DrsReaderSpec.scala | 36 ++- .../gcs/GcsEnhancedRequestSpec.scala | 54 ++-- .../filesystems/oss/nio/OssNioUtilSpec.scala | 18 +- .../OssStorageFileAttributesViewSpec.scala | 4 +- .../nio/OssStorageObjectAttributesSpec.scala | 30 +- .../nio/TTLOssStorageConfigurationSpec.scala | 15 +- project/Dependencies.scala | 83 ++++-- project/Testing.scala | 8 +- .../engine/WorkflowStoreActorSpec.scala | 4 +- ...terializeWorkflowDescriptorActorSpec.scala | 48 ++-- .../ejea/EngineJobExecutionActorSpec.scala | 3 +- .../execution/ejea/PerTestHelper.scala | 90 +++--- .../jobstore/JobStoreServiceSpec.scala | 4 +- .../SubWorkflowStoreSpec.scala | 24 +- .../impl/MetadataDatabaseAccessSpec.scala | 17 +- ...tchAsyncBackendJobExecutionActorSpec.scala | 7 +- .../impl/aws/AwsBatchCallPathsSpec.scala | 7 +- .../aws/AwsBatchInitializationActorSpec.scala | 70 +++-- .../aws/AwsBatchJobExecutionActorSpec.scala | 4 +- .../backend/impl/aws/AwsBatchJobSpec.scala | 3 +- .../aws/AwsBatchRuntimeAttributesSpec.scala | 3 +- .../impl/aws/AwsBatchWorkflowPathsSpec.scala | 4 +- .../backend/impl/bcs/BcsJobPathsSpec.scala | 7 +- .../backend/impl/bcs/BcsJobSpec.scala | 3 +- .../backend/impl/bcs/BcsTestUtilSpec.scala | 82 +++--- .../BcsBackendCacheHitCopyingActorSpec.scala | 17 +- ...ApiAsyncBackendJobExecutionActorSpec.scala | 258 ++++++++++++++---- .../common/PipelinesApiCallPathsSpec.scala | 3 +- ...sApiDockerCacheMappingOperationsSpec.scala | 6 +- .../PipelinesApiInitializationActorSpec.scala | 11 +- .../PipelinesApiJobExecutionActorSpec.scala | 4 +- .../PipelinesApiRuntimeAttributesSpec.scala | 6 +- .../PipelinesApiWorkflowPathsSpec.scala | 3 +- .../api/PipelinesApiRequestWorkerSpec.scala | 13 +- ...esApiBackendCacheHitCopyingActorSpec.scala | 13 +- ...ApiAsyncBackendJobExecutionActorSpec.scala | 6 +- .../v2alpha1/api/ActionCommandsSpec.scala | 13 +- ...ApiAsyncBackendJobExecutionActorSpec.scala | 6 +- .../v2beta/api/ActionCommandsSpec.scala | 13 +- .../config/ConfigHashingStrategySpec.scala | 27 +- .../backend/sfs/SharedFileSystemSpec.scala | 62 +++-- .../impl/tes/TesInitializationActorSpec.scala | 4 +- .../impl/tes/TesWorkflowPathsSpec.scala | 13 +- .../scala/wom/types/WomLongTypeSpec.scala | 38 +-- 95 files changed, 1475 insertions(+), 964 deletions(-) create mode 100644 common/src/test/scala/common/mock/MockImplicits.scala create mode 100644 common/src/test/scala/common/mock/MockSugar.scala diff --git a/CromIAM/src/test/scala/cromiam/sam/SamClientSpec.scala b/CromIAM/src/test/scala/cromiam/sam/SamClientSpec.scala index 085fe06f04c..40f0cda2e86 100644 --- a/CromIAM/src/test/scala/cromiam/sam/SamClientSpec.scala +++ b/CromIAM/src/test/scala/cromiam/sam/SamClientSpec.scala @@ -15,11 +15,10 @@ import org.broadinstitute.dsde.workbench.model.WorkbenchUserId import org.scalatest.BeforeAndAfterAll import org.scalatest.flatspec.AsyncFlatSpec import org.scalatest.matchers.should.Matchers -import org.specs2.mock.Mockito import scala.concurrent.ExecutionContextExecutor -class SamClientSpec extends AsyncFlatSpec with Matchers with BeforeAndAfterAll with Mockito { +class SamClientSpec extends AsyncFlatSpec with Matchers with BeforeAndAfterAll { implicit val actorSystem: ActorSystem = ActorSystem("SamClientSpec") implicit val ece: ExecutionContextExecutor = actorSystem.dispatcher @@ -28,20 +27,20 @@ class SamClientSpec extends AsyncFlatSpec with Matchers with BeforeAndAfterAll w private val expectedErrorResponse = HttpResponse(StatusCodes.InternalServerError, entity = HttpEntity("expected error")) - val authorization = Authorization(OAuth2BearerToken("my-token")) - val authorizedUserWithCollection = User(WorkbenchUserId(MockSamClient.AuthorizedUserCollectionStr), authorization) - val unauthorizedUserWithNoCollection = + private val authorization = Authorization(OAuth2BearerToken("my-token")) + private val authorizedUserWithCollection = User(WorkbenchUserId(MockSamClient.AuthorizedUserCollectionStr), authorization) + private val unauthorizedUserWithNoCollection = User(WorkbenchUserId(MockSamClient.UnauthorizedUserCollectionStr), authorization) - val notWhitelistedUser = User(WorkbenchUserId(MockSamClient.NotWhitelistedUser), authorization) + private val notWhitelistedUser = User(WorkbenchUserId(MockSamClient.NotWhitelistedUser), authorization) - val authorizedCollection = Collection(MockSamClient.AuthorizedUserCollectionStr) - val unauthorizedCollection = Collection(MockSamClient.UnauthorizedUserCollectionStr) - val authorizedCollectionRequest = + private val authorizedCollection = Collection(MockSamClient.AuthorizedUserCollectionStr) + private val unauthorizedCollection = Collection(MockSamClient.UnauthorizedUserCollectionStr) + private val authorizedCollectionRequest = CollectionAuthorizationRequest(authorizedUserWithCollection, authorizedCollection, "add") - val unauthorizedCollectionRequest = + private val unauthorizedCollectionRequest = CollectionAuthorizationRequest(unauthorizedUserWithNoCollection, unauthorizedCollection, "add") - val emptyHttpRequest: HttpRequest = HttpRequest() + private val emptyHttpRequest: HttpRequest = HttpRequest() override protected def afterAll(): Unit = { actorSystem.terminate() diff --git a/backend/src/test/scala/cromwell/backend/BackendSpec.scala b/backend/src/test/scala/cromwell/backend/BackendSpec.scala index e882dc054f0..8ddce2be6ea 100644 --- a/backend/src/test/scala/cromwell/backend/BackendSpec.scala +++ b/backend/src/test/scala/cromwell/backend/BackendSpec.scala @@ -11,7 +11,6 @@ import cromwell.core.{HogGroup, WorkflowId, WorkflowOptions} import org.scalatest.concurrent.{ScalaFutures, ScaledTimeSpans} import org.scalatest.matchers.should.Matchers import org.scalatest.time.{Millis, Seconds, Span} -import org.specs2.mock.Mockito import spray.json.{JsObject, JsValue} import wom.callable.Callable.{InputDefinition, RequiredInputDefinition} import wom.core.WorkflowSource @@ -21,25 +20,27 @@ import wom.graph.{CommandCallNode, OptionalGraphInputNodeWithDefault} import wom.transforms.WomExecutableMaker.ops._ import wom.values.WomValue -trait BackendSpec extends ScalaFutures with Matchers with Mockito with ScaledTimeSpans { +trait BackendSpec extends ScalaFutures with Matchers with ScaledTimeSpans { - implicit val defaultPatience = PatienceConfig(timeout = scaled(Span(10, Seconds)), interval = Span(500, Millis)) + implicit val defaultPatience: PatienceConfig = + PatienceConfig(timeout = scaled(Span(10, Seconds)), interval = Span(500, Millis)) - def testWorkflow(workflow: TestWorkflow, backend: BackendJobExecutionActor, inputs: Map[String, WomValue] = Map.empty) = { + def testWorkflow(workflow: TestWorkflow, + backend: BackendJobExecutionActor): Unit = { executeJobAndAssertOutputs(backend, workflow.expectedResponse) } def buildWorkflowDescriptor(workflowSource: WorkflowSource, inputFileAsJson: Option[String], options: WorkflowOptions = WorkflowOptions(JsObject(Map.empty[String, JsValue])), - runtime: String = "") = { + runtime: String = ""): BackendWorkflowDescriptor = { val wdlNamespace = WdlNamespaceWithWorkflow.load(workflowSource.replaceAll("RUNTIME", runtime), Seq.empty[Draft2ImportResolver]).get val executable = wdlNamespace.toWomExecutable(inputFileAsJson, NoIoFunctionSet, strictValidation = true) match { case Left(errors) => fail(s"Fail to build wom executable: ${errors.toList.mkString(", ")}") case Right(e) => e } - + BackendWorkflowDescriptor( WorkflowId.randomId(), executable.entryPoint, @@ -55,8 +56,8 @@ trait BackendSpec extends ScalaFutures with Matchers with Mockito with ScaledTim def buildWdlWorkflowDescriptor(workflowSource: WorkflowSource, inputFileAsJson: Option[String] = None, options: WorkflowOptions = WorkflowOptions(JsObject(Map.empty[String, JsValue])), - runtime: String = "") = { - + runtime: String = ""): BackendWorkflowDescriptor = { + buildWorkflowDescriptor(workflowSource, inputFileAsJson, options, runtime) } @@ -70,7 +71,7 @@ trait BackendSpec extends ScalaFutures with Matchers with Mockito with ScaledTim def fqnMapToDeclarationMap(m: Map[OutputPort, WomValue]): Map[InputDefinition, WomValue] = { m map { - case (outputPort, womValue) => RequiredInputDefinition(outputPort.name, womValue.womType) -> womValue + case (outputPort, womValue) => RequiredInputDefinition(outputPort.name, womValue.womType) -> womValue } } @@ -80,7 +81,7 @@ trait BackendSpec extends ScalaFutures with Matchers with Mockito with ScaledTim runtimeAttributeDefinitions: Set[RuntimeAttributeDefinition]): BackendJobDescriptor = { val call = workflowDescriptor.callable.graph.nodes.collectFirst({ case t: CommandCallNode => t}).get val jobKey = BackendJobDescriptorKey(call, None, 1) - + val inputDeclarations: Map[InputDefinition, WomValue] = call.inputDefinitionMappings.map { case (inputDef, resolved) => inputDef -> resolved.select[WomValue].orElse( @@ -131,7 +132,8 @@ trait BackendSpec extends ScalaFutures with Matchers with Mockito with ScaledTim BackendJobDescriptor(workflowDescriptor, jobKey, runtimeAttributes, inputDeclarations, NoDocker, None, Map.empty) } - def assertResponse(executionResponse: BackendJobExecutionResponse, expectedResponse: BackendJobExecutionResponse) = { + def assertResponse(executionResponse: BackendJobExecutionResponse, + expectedResponse: BackendJobExecutionResponse): Unit = { (executionResponse, expectedResponse) match { case (JobSucceededResponse(_, _, responseOutputs, _, _, _, _), JobSucceededResponse(_, _, expectedOutputs, _, _, _, _)) => responseOutputs.outputs.size shouldBe expectedOutputs.outputs.size @@ -148,6 +150,7 @@ trait BackendSpec extends ScalaFutures with Matchers with Mockito with ScaledTim case (response, expectation) => fail(s"Execution response $response wasn't conform to expectation $expectation") } + () } private def concatenateCauseMessages(t: Throwable): String = t match { @@ -156,7 +159,8 @@ trait BackendSpec extends ScalaFutures with Matchers with Mockito with ScaledTim case other: Throwable => other.getMessage + concatenateCauseMessages(t.getCause) } - def executeJobAndAssertOutputs(backend: BackendJobExecutionActor, expectedResponse: BackendJobExecutionResponse) = { + def executeJobAndAssertOutputs(backend: BackendJobExecutionActor, + expectedResponse: BackendJobExecutionResponse): Unit = { whenReady(backend.execute) { executionResponse => assertResponse(executionResponse, expectedResponse) } diff --git a/backend/src/test/scala/cromwell/backend/OutputEvaluatorSpec.scala b/backend/src/test/scala/cromwell/backend/OutputEvaluatorSpec.scala index 2f82e3452c4..0959f92577b 100644 --- a/backend/src/test/scala/cromwell/backend/OutputEvaluatorSpec.scala +++ b/backend/src/test/scala/cromwell/backend/OutputEvaluatorSpec.scala @@ -11,7 +11,6 @@ import cromwell.core.CallOutputs import cromwell.util.WomMocks import org.scalatest.flatspec.AnyFlatSpec import org.scalatest.matchers.should.Matchers -import org.specs2.mock.Mockito import wom.callable.Callable.{InputDefinition, OutputDefinition, RequiredInputDefinition} import wom.expression.{FileEvaluation, IoFunctionSet, NoIoFunctionSet, WomExpression} import wom.graph.WomIdentifier @@ -21,16 +20,16 @@ import wom.values.{WomInteger, WomValue} import scala.concurrent.duration._ import scala.concurrent.{Await, ExecutionContext, ExecutionContextExecutor} -class OutputEvaluatorSpec extends AnyFlatSpec with CromwellTimeoutSpec with Matchers with Mockito { +class OutputEvaluatorSpec extends AnyFlatSpec with CromwellTimeoutSpec with Matchers { behavior of "OutputEvaluator" - val FutureTimeout = 20.seconds + private val FutureTimeout = 20.seconds final implicit val blockingEc: ExecutionContextExecutor = ExecutionContext.fromExecutor( Executors.newCachedThreadPool() ) - + // Depends on an input - def o1Expression = new WomExpression { + private def o1Expression = new WomExpression { override def sourceString: String = "o1" override def inputs: Set[String] = Set("input") override def evaluateValue(inputValues: Map[String, WomValue], ioFunctionSet: IoFunctionSet): ErrorOr[WomValue] = { @@ -41,7 +40,7 @@ class OutputEvaluatorSpec extends AnyFlatSpec with CromwellTimeoutSpec with Matc } // Depends on a previous output - def o2Expression = new WomExpression { + private def o2Expression = new WomExpression { override def sourceString: String = "o2" override def inputs: Set[String] = Set("o1") override def evaluateValue(inputValues: Map[String, WomValue], ioFunctionSet: IoFunctionSet): ErrorOr[WomValue] = { @@ -51,7 +50,7 @@ class OutputEvaluatorSpec extends AnyFlatSpec with CromwellTimeoutSpec with Matc override def evaluateFiles(inputTypes: Map[String, WomValue], ioFunctionSet: IoFunctionSet, coerceTo: WomType): ErrorOr[Set[FileEvaluation]] = throw new UnsupportedOperationException } - def invalidWomExpression1 = new WomExpression { + private def invalidWomExpression1 = new WomExpression { override def sourceString: String = "invalid1" override def inputs: Set[String] = Set.empty override def evaluateValue(inputValues: Map[String, WomValue], ioFunctionSet: IoFunctionSet): ErrorOr[WomValue] = { @@ -65,7 +64,7 @@ class OutputEvaluatorSpec extends AnyFlatSpec with CromwellTimeoutSpec with Matc } } - def invalidWomExpression2 = new WomExpression { + private def invalidWomExpression2 = new WomExpression { override def sourceString: String = "invalid2" override def inputs: Set[String] = Set.empty override def evaluateValue(inputValues: Map[String, WomValue], ioFunctionSet: IoFunctionSet): ErrorOr[WomValue] = { @@ -78,10 +77,10 @@ class OutputEvaluatorSpec extends AnyFlatSpec with CromwellTimeoutSpec with Matc "Invalid expression 2".invalidNel } } - + val exception = new Exception("Expression evaluation exception") - - def throwingWomExpression = new WomExpression { + + private def throwingWomExpression = new WomExpression { override def sourceString: String = "throwing" override def inputs: Set[String] = Set.empty override def evaluateValue(inputValues: Map[String, WomValue], ioFunctionSet: IoFunctionSet): ErrorOr[WomValue] = { @@ -98,17 +97,17 @@ class OutputEvaluatorSpec extends AnyFlatSpec with CromwellTimeoutSpec with Matc val mockInputs: Map[InputDefinition, WomValue] = Map( RequiredInputDefinition("input", WomIntegerType) -> WomInteger(5) ) - + it should "evaluate valid jobs outputs" in { val mockOutputs = List ( OutputDefinition("o1", WomIntegerType, o1Expression), OutputDefinition("o2", WomIntegerType, o2Expression) ) - + val call = WomMocks.mockTaskCall(WomIdentifier("call"), WomMocks.EmptyTaskDefinition.copy(outputs = mockOutputs)) val key = BackendJobDescriptorKey(call, None, 1) val jobDescriptor = BackendJobDescriptor(null, key, null, mockInputs, null, None, null) - + Await.result(OutputEvaluator.evaluateOutputs(jobDescriptor, NoIoFunctionSet), FutureTimeout) match { case ValidJobOutputs(outputs) => outputs shouldBe CallOutputs(Map( jobDescriptor.taskCall.outputPorts.find(_.name == "o1").get -> WomInteger(5), diff --git a/backend/src/test/scala/cromwell/backend/RuntimeAttributeValidationSpec.scala b/backend/src/test/scala/cromwell/backend/RuntimeAttributeValidationSpec.scala index 50ddcf07224..99bec0baebd 100644 --- a/backend/src/test/scala/cromwell/backend/RuntimeAttributeValidationSpec.scala +++ b/backend/src/test/scala/cromwell/backend/RuntimeAttributeValidationSpec.scala @@ -1,50 +1,81 @@ package cromwell.backend -import org.scalacheck.{Arbitrary, Gen, Properties} -import org.scalacheck.Prop._ +import org.scalacheck.{Arbitrary, Gen} +import org.scalatest.flatspec.AnyFlatSpec +import org.scalatest.matchers.should.Matchers +import org.scalatestplus.scalacheck.ScalaCheckDrivenPropertyChecks import wom.expression.WomExpression import wom.values.{WomString, WomValue} -class RuntimeAttributeValidationSpec extends Properties("Runtime Attribute Validation") { +class RuntimeAttributeValidationSpec extends AnyFlatSpec with Matchers with ScalaCheckDrivenPropertyChecks { + behavior of "RuntimeAttributeValidation" import WomGenerators._ - property("use default and validate it when runtime is not specified") = forAll { + it should "use default and validate it when runtime is not specified" in forAll { (taskName: String, attributeName: String, womValue: WomValue) => val defaultRuntimeAttributes = Map(attributeName -> womValue) val defaultValue = womValue.asWomExpression val validator: Option[WomExpression] => Boolean = _.contains(defaultValue) - BackendWorkflowInitializationActor.validateRuntimeAttributes(taskName, defaultRuntimeAttributes, Map.empty, Map((attributeName,validator)) ).isValid + assert( + BackendWorkflowInitializationActor.validateRuntimeAttributes( + taskName = taskName, + defaultRuntimeAttributes = defaultRuntimeAttributes, + runtimeAttributes = Map.empty, + runtimeAttributeValidators = Map((attributeName, validator)), + ).isValid + ) } - property("return invalid if validator fails the test ") = forAll { + it should "return invalid if validator fails the test" in forAll { (taskName: String, attributeName: String, womValue: WomValue) => val defaultRuntimeAttributes = Map(attributeName -> womValue) - BackendWorkflowInitializationActor.validateRuntimeAttributes(taskName, defaultRuntimeAttributes, Map.empty, Map((attributeName,(_: Option[WomExpression]) => false))).isInvalid + assert( + BackendWorkflowInitializationActor.validateRuntimeAttributes( + taskName = taskName, + defaultRuntimeAttributes = defaultRuntimeAttributes, + runtimeAttributes = Map.empty, + runtimeAttributeValidators = Map((attributeName, (_: Option[WomExpression]) => false)), + ).isInvalid + ) } - property("use runtime setting (not default) when both are set") = forAll { + it should "use runtime setting (not default) when both are set" in forAll { (taskName: String, attributeName: String, defaultWomValue: WomValue, runtimeWomExpression: WomExpression) => val defaultRuntimeAttributes = Map(attributeName -> defaultWomValue) val runtimeAttributes = Map(attributeName -> runtimeWomExpression) val validator: Option[WomExpression] => Boolean = _.contains(runtimeWomExpression) - BackendWorkflowInitializationActor.validateRuntimeAttributes(taskName, defaultRuntimeAttributes, runtimeAttributes, Map((attributeName,validator))).isValid + assert( + BackendWorkflowInitializationActor.validateRuntimeAttributes( + taskName = taskName, + defaultRuntimeAttributes = defaultRuntimeAttributes, + runtimeAttributes = runtimeAttributes, + runtimeAttributeValidators = Map((attributeName, validator)), + ).isValid + ) } - property("fail validation if no setting is present but it should be") = forAll { + it should "fail validation if no setting is present but it should be" in forAll { (taskName: String, attributeName: String) => val validator: Option[WomExpression] => Boolean = { case None => false case Some(x) => throw new RuntimeException(s"expecting the runtime validator to receive a None but got $x") } - BackendWorkflowInitializationActor.validateRuntimeAttributes(taskName, Map.empty, Map.empty, Map((attributeName,validator))).isInvalid + assert( + BackendWorkflowInitializationActor.validateRuntimeAttributes( + taskName = taskName, + defaultRuntimeAttributes = Map.empty, + runtimeAttributes = Map.empty, + runtimeAttributeValidators = Map((attributeName, validator)), + ).isInvalid + ) } - property("use the taskName and attribute name in correct places for failures") = forAll { + it should "use the taskName and attribute name in correct places for failures" in forAll { (taskName: String, attributeName: String) => val validator: Option[WomExpression] => Boolean = { @@ -54,12 +85,10 @@ class RuntimeAttributeValidationSpec extends Properties("Runtime Attribute Valid BackendWorkflowInitializationActor.validateRuntimeAttributes(taskName, Map.empty, Map.empty, Map((attributeName,validator))).fold( { errors => val error = errors.toList.head - all( - "attribute name should be set correctly" |: error.runtimeAttributeName == attributeName, - "task name should be set correctly" |: error.jobTag == taskName - ) + withClue("attribute name should be set correctly")(error.runtimeAttributeName shouldBe attributeName) + withClue("task name should be set correctly")(error.jobTag shouldBe taskName) }, - _ => "expecting validation to fail!" |: false + _ => fail("expecting validation to fail!") ) } } diff --git a/backend/src/test/scala/cromwell/backend/io/WorkflowPathsSpec.scala b/backend/src/test/scala/cromwell/backend/io/WorkflowPathsSpec.scala index a40a47b8f57..cb53276f499 100644 --- a/backend/src/test/scala/cromwell/backend/io/WorkflowPathsSpec.scala +++ b/backend/src/test/scala/cromwell/backend/io/WorkflowPathsSpec.scala @@ -9,9 +9,10 @@ import cromwell.util.WomMocks import org.mockito.Mockito._ import org.scalatest.flatspec.AnyFlatSpec import org.scalatest.matchers.should.Matchers -import wom.graph.WomIdentifier +import wom.graph.{GraphNode, WomIdentifier} +import common.mock.MockSugar -class WorkflowPathsSpec extends AnyFlatSpec with CromwellTimeoutSpec with Matchers with BackendSpec { +class WorkflowPathsSpec extends AnyFlatSpec with CromwellTimeoutSpec with Matchers with BackendSpec with MockSugar { def createConfig(values: Map[String, String]): Config = { val config = mock[Config] @@ -23,7 +24,7 @@ class WorkflowPathsSpec extends AnyFlatSpec with CromwellTimeoutSpec with Matche config } - def rootConfig(root: Option[String], dockerRoot: Option[String]) = { + def rootConfig(root: Option[String], dockerRoot: Option[String]): Config = { val values: Map[String,String] = root.map("root" -> _).toMap ++ dockerRoot.map("dockerRoot" -> _).toMap createConfig(values) } @@ -31,12 +32,12 @@ class WorkflowPathsSpec extends AnyFlatSpec with CromwellTimeoutSpec with Matche case class TestConfig(name: String, root: Option[String], dockerRoot: Option[String]) val testConfigs: List[TestConfig] = List( TestConfig("defaults", None, None), // Defaults testing - TestConfig("custom root defined", Some("local-cromwell-executions"), None), - TestConfig("custom dockerRoot defined", None, Some("/dockerRootExecutions")), - TestConfig("both root and dockerRoot defined", Some("local-cromwell-executions"), Some("/dockerRootExecutions")) + TestConfig("custom root defined", Option("local-cromwell-executions"), None), + TestConfig("custom dockerRoot defined", None, Option("/dockerRootExecutions")), + TestConfig("both root and dockerRoot defined", Option("local-cromwell-executions"), Option("/dockerRootExecutions")) ) - def testWorkflowPaths(root: Option[String], dockerRoot: Option[String]) = { + def testWorkflowPaths(root: Option[String], dockerRoot: Option[String]): Unit = { val backendConfig = rootConfig(root, dockerRoot) val wd = buildWdlWorkflowDescriptor(TestWorkflows.HelloWorld) val workflowPaths = new WorkflowPathsWithDocker(wd, backendConfig) @@ -47,9 +48,10 @@ class WorkflowPathsSpec extends AnyFlatSpec with CromwellTimeoutSpec with Matche DefaultPathBuilder.get(s"$expectedRoot/wf_hello/$id").toAbsolutePath.pathAsString workflowPaths.dockerWorkflowRoot.pathAsString shouldBe s"$expectedDockerRoot/wf_hello/$id" + () } - def testSubWorkflowPaths(root: Option[String], dockerRoot: Option[String]) = { + def testSubWorkflowPaths(root: Option[String], dockerRoot: Option[String]): Unit = { val backendConfig = rootConfig(root, dockerRoot) val rootWd = mock[BackendWorkflowDescriptor] val rootWorkflow = WomMocks.mockWorkflowDefinition("rootWorkflow") @@ -65,8 +67,8 @@ class WorkflowPathsSpec extends AnyFlatSpec with CromwellTimeoutSpec with Matche val call1 = WomMocks.mockTaskCall(WomIdentifier("call1")) - val jobKey = new JobKey { - override def node = call1 + val jobKey: JobKey = new JobKey { + override def node: GraphNode = call1 override def tag: String = "tag1" override def index: Option[Int] = Option(1) override def attempt: Int = 2 @@ -84,6 +86,7 @@ class WorkflowPathsSpec extends AnyFlatSpec with CromwellTimeoutSpec with Matche s"$expectedRoot/rootWorkflow/$rootWorkflowId/call-call1/shard-1/attempt-2/subWorkflow/$subWorkflowId" ).toAbsolutePath.pathAsString workflowPaths.dockerWorkflowRoot.pathAsString shouldBe s"$expectedDockerRoot/rootWorkflow/$rootWorkflowId/call-call1/shard-1/attempt-2/subWorkflow/$subWorkflowId" + () } testConfigs.foreach { config => diff --git a/backend/src/test/scala/cromwell/backend/standard/StandardValidatedRuntimeAttributesBuilderSpec.scala b/backend/src/test/scala/cromwell/backend/standard/StandardValidatedRuntimeAttributesBuilderSpec.scala index 300c3dde750..0497c8a1ba1 100644 --- a/backend/src/test/scala/cromwell/backend/standard/StandardValidatedRuntimeAttributesBuilderSpec.scala +++ b/backend/src/test/scala/cromwell/backend/standard/StandardValidatedRuntimeAttributesBuilderSpec.scala @@ -1,18 +1,21 @@ package cromwell.backend.standard +import com.typesafe.config.Config import common.assertion.CromwellTimeoutSpec import cromwell.backend.validation._ import cromwell.backend.{RuntimeAttributeDefinition, TestConfig} import cromwell.core.WorkflowOptions +import org.mockito.ArgumentMatchers._ import org.scalatest.matchers.should.Matchers import org.scalatest.wordspec.AnyWordSpecLike import org.slf4j.{Logger, LoggerFactory} -import org.specs2.mock.Mockito +import common.mock.MockSugar import spray.json.{JsArray, JsBoolean, JsNumber, JsObject, JsValue} import wom.RuntimeAttributesKeys._ import wom.values._ -class StandardValidatedRuntimeAttributesBuilderSpec extends AnyWordSpecLike with CromwellTimeoutSpec with Matchers with Mockito { +class StandardValidatedRuntimeAttributesBuilderSpec extends AnyWordSpecLike with CromwellTimeoutSpec with Matchers + with MockSugar { val HelloWorld: String = s""" @@ -139,7 +142,7 @@ class StandardValidatedRuntimeAttributesBuilderSpec extends AnyWordSpecLike with val defaultLogger: Logger = LoggerFactory.getLogger(classOf[StandardValidatedRuntimeAttributesBuilderSpec]) val emptyWorkflowOptions: WorkflowOptions = WorkflowOptions.fromMap(Map.empty).get - val mockBackendRuntimeConfig = Option(TestConfig.optionalRuntimeConfig) + val mockBackendRuntimeConfig: Option[Config] = Option(TestConfig.optionalRuntimeConfig) private def assertRuntimeAttributesSuccessfulCreation(runtimeAttributes: Map[String, WomValue], expectedRuntimeAttributes: Map[String, Any], diff --git a/backend/src/test/scala/cromwell/backend/standard/callcaching/StandardFileHashingActorSpec.scala b/backend/src/test/scala/cromwell/backend/standard/callcaching/StandardFileHashingActorSpec.scala index 51767801cea..c7463cc50a0 100644 --- a/backend/src/test/scala/cromwell/backend/standard/callcaching/StandardFileHashingActorSpec.scala +++ b/backend/src/test/scala/cromwell/backend/standard/callcaching/StandardFileHashingActorSpec.scala @@ -10,7 +10,7 @@ import cromwell.core.io.{IoCommand, IoCommandBuilder, IoHashCommand, IoSuccess, import cromwell.core.path.{DefaultPathBuilder, Path} import org.scalatest.flatspec.AnyFlatSpecLike import org.scalatest.matchers.should.Matchers -import org.specs2.mock.Mockito +import common.mock.MockSugar import wom.values.WomSingleFile import scala.concurrent.TimeoutException @@ -19,7 +19,7 @@ import scala.util.control.NoStackTrace import scala.util.{Failure, Try} class StandardFileHashingActorSpec extends TestKitSuite with ImplicitSender - with AnyFlatSpecLike with Matchers with Mockito { + with AnyFlatSpecLike with Matchers with MockSugar { behavior of "StandardFileHashingActor" @@ -104,9 +104,9 @@ class StandardFileHashingActorSpec extends TestKitSuite with ImplicitSender }) val standardFileHashingActorRef = parentProbe.childActorOf(props, "testStandardFileHashingActorHashString") - val fileHashContext = mock[FileHashContext].smart + val fileHashContext = mock[FileHashContext] fileHashContext.file returns "/expected/failure/path" - val command = mock[IoCommand[Int]].smart + val command = mock[IoCommand[Int]] val message: (FileHashContext, IoSuccess[Int]) = (fileHashContext, IoSuccess(command, 1357)) standardFileHashingActorRef ! message diff --git a/build.sbt b/build.sbt index ff364407b2a..90aaf566658 100644 --- a/build.sbt +++ b/build.sbt @@ -92,7 +92,7 @@ lazy val cloudSupport = project .dependsOn(common % "test->test") lazy val awsS3FileSystem = (project in file("filesystems/s3")) - .withLibrarySettings("cromwell-aws-s3filesystem") + .withLibrarySettings("cromwell-aws-s3filesystem", s3FileSystemDependencies) .dependsOn(core) .dependsOn(cloudSupport) .dependsOn(core % "test->test") diff --git a/centaur/src/it/scala/centaur/reporting/Slf4jReporter.scala b/centaur/src/it/scala/centaur/reporting/Slf4jReporter.scala index 4f2eca11b1c..4332e01db24 100644 --- a/centaur/src/it/scala/centaur/reporting/Slf4jReporter.scala +++ b/centaur/src/it/scala/centaur/reporting/Slf4jReporter.scala @@ -3,7 +3,7 @@ package centaur.reporting import cats.effect.IO import centaur.test.CentaurTestException import com.typesafe.scalalogging.StrictLogging -import org.testcontainers.shaded.org.apache.commons.lang.exception.ExceptionUtils +import org.apache.commons.lang3.exception.ExceptionUtils import scala.concurrent.ExecutionContext diff --git a/centaur/src/test/scala/centaur/testfilecheck/FileCheckerSpec.scala b/centaur/src/test/scala/centaur/testfilecheck/FileCheckerSpec.scala index e9b6bd46bf0..f389192aa7f 100644 --- a/centaur/src/test/scala/centaur/testfilecheck/FileCheckerSpec.scala +++ b/centaur/src/test/scala/centaur/testfilecheck/FileCheckerSpec.scala @@ -1,41 +1,38 @@ package centaur.testfilecheck import java.util - import com.google.api.gax.paging.Page import com.google.cloud.storage.Storage.BlobListOption import com.google.cloud.storage.{Blob, Storage} import common.assertion.CromwellTimeoutSpec import org.mockito.Mockito._ -import org.specs2.matcher.ShouldMatchers -import org.specs2.mock.Mockito +import common.mock.MockSugar import software.amazon.awssdk.services.s3.S3Client import software.amazon.awssdk.services.s3.model.{ListObjectsRequest, ListObjectsResponse, S3Object} import org.scalatest.flatspec.AnyFlatSpec -class FileCheckerSpec extends AnyFlatSpec with CromwellTimeoutSpec with ShouldMatchers with Mockito { +class FileCheckerSpec extends AnyFlatSpec with CromwellTimeoutSpec with MockSugar { import centaur.test.ObjectCounterInstances._ - val s3PrefixRegex = "^s3:\\/\\/.*" - val gsPrefixRegex = "^gs:\\/\\/.*" - - val amazonS3mock = mock[S3Client] - val testPath = "s3://my-cool-bucket/path/to/file" - val bucketName = "my-cool-bucket" - val dirName = "path/to/file" - val wrongBucketPrefix = "s3Bucket://my-not-so-cool-bucket/somelogs/empty" - val EmptyTestPath = "" - val gsPathType = "gs://" - val testGsPath = "gs://my-cool-bucket/path/to/file" - val objResponse = ListObjectsResponse.builder() + private val s3PrefixRegex = "^s3:\\/\\/.*" + private val gsPrefixRegex = "^gs:\\/\\/.*" + + private val amazonS3mock = mock[S3Client] + private val testPath = "s3://my-cool-bucket/path/to/file" + private val bucketName = "my-cool-bucket" + private val dirName = "path/to/file" + private val wrongBucketPrefix = "s3Bucket://my-not-so-cool-bucket/somelogs/empty" + private val EmptyTestPath = "" + private val testGsPath = "gs://my-cool-bucket/path/to/file" + private val objResponse = ListObjectsResponse.builder() .contents(util.Arrays.asList(S3Object.builder() .build())) .build() - val objRequest = ListObjectsRequest.builder().bucket(bucketName).prefix(dirName).build() - val awsS3Path = awsS3ObjectCounter.parsePath(s3PrefixRegex)(testPath) - val gsPath = gcsObjectCounter.parsePath(gsPrefixRegex)(testGsPath) + private val objRequest = ListObjectsRequest.builder().bucket(bucketName).prefix(dirName).build() + private val awsS3Path = awsS3ObjectCounter.parsePath(s3PrefixRegex)(testPath) + private val gsPath = gcsObjectCounter.parsePath(gsPrefixRegex)(testGsPath) "parsePath" should "return a bucket and directories" in { @@ -77,7 +74,7 @@ class FileCheckerSpec extends AnyFlatSpec with CromwellTimeoutSpec with ShouldMa val blob = null val blobbies = new util.ArrayList[Blob]() blobbies.add(blob) - when(pageMock.iterateAll).thenReturns(blobbies) + when(pageMock.iterateAll).thenReturn(blobbies) when(gcsMock.list(bucketName, BlobListOption.prefix(dirName))).thenReturn(pageMock) val actualObjectCounts = gcsObjectCounter.countObjectsAtPath(gcsMock)(gsPath) val expectedObjectCounts = 1 @@ -89,7 +86,7 @@ class FileCheckerSpec extends AnyFlatSpec with CromwellTimeoutSpec with ShouldMa val gcsMock = mock[Storage] val pageMock = mock[Page[Blob]] val blobbies = new util.ArrayList[Blob]() - when(pageMock.iterateAll).thenReturns(blobbies) + when(pageMock.iterateAll).thenReturn(blobbies) when(gcsMock.list(bucketName, BlobListOption.prefix(dirName))).thenReturn(pageMock) val actualObjectCounts = gcsObjectCounter.countObjectsAtPath(gcsMock)(gsPath) val expectedObjectCounts = 0 diff --git a/cloud-nio/cloud-nio-impl-drs/src/test/scala/cloud/nio/impl/drs/DrsCloudNioFileProviderSpec.scala b/cloud-nio/cloud-nio-impl-drs/src/test/scala/cloud/nio/impl/drs/DrsCloudNioFileProviderSpec.scala index aeb2ba42b0f..fd7b71ef4ee 100644 --- a/cloud-nio/cloud-nio-impl-drs/src/test/scala/cloud/nio/impl/drs/DrsCloudNioFileProviderSpec.scala +++ b/cloud-nio/cloud-nio-impl-drs/src/test/scala/cloud/nio/impl/drs/DrsCloudNioFileProviderSpec.scala @@ -6,21 +6,21 @@ import cloud.nio.impl.drs.DrsCloudNioFileProvider.DrsReadInterpreter import cloud.nio.spi.{FileHash, HashType} import com.typesafe.config.ConfigFactory import common.assertion.CromwellTimeoutSpec -import org.apache.commons.compress.utils.SeekableInMemoryByteChannel import org.apache.http.HttpVersion import org.apache.http.client.methods.{CloseableHttpResponse, HttpPost} import org.apache.http.impl.client.{CloseableHttpClient, HttpClientBuilder} import org.apache.http.message.BasicStatusLine -import org.mockito.Mockito.verify +import org.mockito.ArgumentMatchers._ +import org.mockito.Mockito._ import org.scalatest.flatspec.AnyFlatSpecLike import org.scalatest.matchers.should.Matchers -import org.specs2.mock.Mockito +import common.mock.MockSugar import java.nio.channels.ReadableByteChannel import java.time.{Instant, OffsetDateTime, ZoneOffset} import scala.concurrent.duration._ -class DrsCloudNioFileProviderSpec extends AnyFlatSpecLike with CromwellTimeoutSpec with Matchers with Mockito { +class DrsCloudNioFileProviderSpec extends AnyFlatSpecLike with CromwellTimeoutSpec with Matchers with MockSugar { behavior of "DrsCloudNioFileProvider" it should "parse a config and create a working file system provider" in { @@ -60,13 +60,13 @@ class DrsCloudNioFileProviderSpec extends AnyFlatSpecLike with CromwellTimeoutSp } it should "check existing drs objects" in { - val httpResponse = mock[CloseableHttpResponse].smart + val httpResponse = mock[CloseableHttpResponse] httpResponse.getStatusLine returns new BasicStatusLine(HttpVersion.HTTP_1_1, 200, "OK") - val httpClient = mock[CloseableHttpClient].smart - doReturn(httpResponse).when(httpClient).execute(anyObject[HttpPost]) + val httpClient = mock[CloseableHttpClient] + httpClient.execute(any[HttpPost]) returns httpResponse - val httpClientBuilder = mock[HttpClientBuilder].smart + val httpClientBuilder = mock[HttpClientBuilder] httpClientBuilder.build() returns httpClient val fileSystemProvider = new MockDrsCloudNioFileSystemProvider(httpClientBuilder = Option(httpClientBuilder)) @@ -86,7 +86,7 @@ class DrsCloudNioFileProviderSpec extends AnyFlatSpecLike with CromwellTimeoutSp } } - val readChannel: ReadableByteChannel = new SeekableInMemoryByteChannel(Array.emptyByteArray) + val readChannel = mock[ReadableByteChannel] val drsReadInterpreter: DrsReadInterpreter = (_, marthaResponse) => { IO( (marthaResponse.gsUri, marthaResponse.googleServiceAccount) match { @@ -112,7 +112,7 @@ class DrsCloudNioFileProviderSpec extends AnyFlatSpecLike with CromwellTimeoutSp } } - val readChannel: ReadableByteChannel = new SeekableInMemoryByteChannel(Array.emptyByteArray) + val readChannel = mock[ReadableByteChannel] val drsReadInterpreter: DrsReadInterpreter = (_, marthaResponse) => { IO( marthaResponse.accessUrl match { diff --git a/cloud-nio/cloud-nio-impl-drs/src/test/scala/cloud/nio/impl/drs/MarthaHttpRequestRetryStrategySpec.scala b/cloud-nio/cloud-nio-impl-drs/src/test/scala/cloud/nio/impl/drs/MarthaHttpRequestRetryStrategySpec.scala index dbe85425264..8fd1e948607 100644 --- a/cloud-nio/cloud-nio-impl-drs/src/test/scala/cloud/nio/impl/drs/MarthaHttpRequestRetryStrategySpec.scala +++ b/cloud-nio/cloud-nio-impl-drs/src/test/scala/cloud/nio/impl/drs/MarthaHttpRequestRetryStrategySpec.scala @@ -8,20 +8,20 @@ import org.apache.http.message.BasicStatusLine import org.apache.http.protocol.HttpContext import org.scalatest.flatspec.AnyFlatSpec import org.scalatest.matchers.should.Matchers -import org.specs2.mock.Mockito +import common.mock.MockSugar import scala.concurrent.duration._ -class MarthaHttpRequestRetryStrategySpec extends AnyFlatSpec with Matchers with Mockito { +class MarthaHttpRequestRetryStrategySpec extends AnyFlatSpec with Matchers with MockSugar { behavior of "MarthaHttpRequestRetryStrategy" it should "retry 500 errors a configured number of times" in { val drsConfig = MockDrsPaths.mockDrsConfig.copy(numRetries = 3) val retryStrategy = new MarthaHttpRequestRetryStrategy(drsConfig) - val http500Response = mock[CloseableHttpResponse].smart + val http500Response = mock[CloseableHttpResponse] http500Response.getStatusLine returns new BasicStatusLine(HttpVersion.HTTP_1_1, 500, "Testing 500") - val httpContext = mock[HttpContext].smart + val httpContext = mock[HttpContext] // initial failure retryStrategy.retryRequest(http500Response, 1, httpContext) should be(true) @@ -36,13 +36,13 @@ class MarthaHttpRequestRetryStrategySpec extends AnyFlatSpec with Matchers with it should "retry 500 errors even after a number of 408/429 errors" in { val drsConfig = MockDrsPaths.mockDrsConfig.copy(numRetries = 3) val retryStrategy = new MarthaHttpRequestRetryStrategy(drsConfig) - val http500Response = mock[CloseableHttpResponse].smart + val http500Response = mock[CloseableHttpResponse] http500Response.getStatusLine returns new BasicStatusLine(HttpVersion.HTTP_1_1, 500, "Testing 500") - val http408Response = mock[CloseableHttpResponse].smart + val http408Response = mock[CloseableHttpResponse] http408Response.getStatusLine returns new BasicStatusLine(HttpVersion.HTTP_1_1, 408, "Testing 408") - val http429Response = mock[CloseableHttpResponse].smart + val http429Response = mock[CloseableHttpResponse] http429Response.getStatusLine returns new BasicStatusLine(HttpVersion.HTTP_1_1, 429, "Testing 429") - val httpContext = mock[HttpContext].smart + val httpContext = mock[HttpContext] // initial failure retryStrategy.retryRequest(http500Response, 1, httpContext) should be(true) @@ -64,9 +64,9 @@ class MarthaHttpRequestRetryStrategySpec extends AnyFlatSpec with Matchers with it should "not retry an HTTP 401" in { val drsConfig = MockDrsPaths.mockDrsConfig.copy(numRetries = 3) val retryStrategy = new MarthaHttpRequestRetryStrategy(drsConfig) - val http400Response = mock[CloseableHttpResponse].smart + val http400Response = mock[CloseableHttpResponse] http400Response.getStatusLine returns new BasicStatusLine(HttpVersion.HTTP_1_1, 401, "Testing 401") - val httpContext = mock[HttpContext].smart + val httpContext = mock[HttpContext] retryStrategy.retryRequest(http400Response, 1, httpContext) should be(false) } @@ -74,8 +74,8 @@ class MarthaHttpRequestRetryStrategySpec extends AnyFlatSpec with Matchers with it should "retry IO exceptions a configured number of times" in { val drsConfig = MockDrsPaths.mockDrsConfig.copy(numRetries = 3) val retryStrategy = new MarthaHttpRequestRetryStrategy(drsConfig) - val exception = mock[IOException].smart - val httpContext = mock[HttpContext].smart + val exception = mock[IOException] + val httpContext = mock[HttpContext] // initial failure retryStrategy.retryRequest(exception, 1, httpContext) should be(true) diff --git a/cloud-nio/cloud-nio-impl-drs/src/test/scala/cloud/nio/impl/drs/MockEngineDrsPathResolver.scala b/cloud-nio/cloud-nio-impl-drs/src/test/scala/cloud/nio/impl/drs/MockEngineDrsPathResolver.scala index 7ddcb830b70..116155753f1 100644 --- a/cloud-nio/cloud-nio-impl-drs/src/test/scala/cloud/nio/impl/drs/MockEngineDrsPathResolver.scala +++ b/cloud-nio/cloud-nio-impl-drs/src/test/scala/cloud/nio/impl/drs/MockEngineDrsPathResolver.scala @@ -6,8 +6,7 @@ import cats.syntax.validated._ import com.google.cloud.NoCredentials import common.validation.ErrorOr.ErrorOr import org.apache.http.impl.client.HttpClientBuilder -import org.specs2.mock.Mockito -import org.specs2.mock.Mockito._ +import common.mock.MockSugar import scala.concurrent.duration.Duration @@ -18,7 +17,7 @@ class MockEngineDrsPathResolver(drsConfig: DrsConfig = MockDrsPaths.mockDrsConfi extends EngineDrsPathResolver(drsConfig, GoogleDrsCredentials(NoCredentials.getInstance, accessTokenAcceptableTTL)) { override protected lazy val httpClientBuilder: HttpClientBuilder = - httpClientBuilderOverride getOrElse Mockito.mock[HttpClientBuilder].smart + httpClientBuilderOverride getOrElse MockSugar.mock[HttpClientBuilder] private lazy val mockMarthaUri = drsConfig.marthaUrl diff --git a/cloud-nio/cloud-nio-impl-ftp/src/test/scala/cloud/nio/impl/ftp/FtpClientPoolSpec.scala b/cloud-nio/cloud-nio-impl-ftp/src/test/scala/cloud/nio/impl/ftp/FtpClientPoolSpec.scala index 8332460e8e5..91b69bf7e94 100644 --- a/cloud-nio/cloud-nio-impl-ftp/src/test/scala/cloud/nio/impl/ftp/FtpClientPoolSpec.scala +++ b/cloud-nio/cloud-nio-impl-ftp/src/test/scala/cloud/nio/impl/ftp/FtpClientPoolSpec.scala @@ -1,15 +1,14 @@ package cloud.nio.impl.ftp import common.assertion.CromwellTimeoutSpec +import common.mock.MockSugar import org.apache.commons.net.ftp.FTPClient import org.scalatest.flatspec.AnyFlatSpec import org.scalatest.matchers.should.Matchers -import org.specs2.mock.Mockito import scala.concurrent.duration._ - -class FtpClientPoolSpec extends AnyFlatSpec with CromwellTimeoutSpec with Matchers with Mockito { +class FtpClientPoolSpec extends AnyFlatSpec with CromwellTimeoutSpec with Matchers with MockSugar { behavior of "FtpClientPoolSpec" diff --git a/cloud-nio/cloud-nio-impl-ftp/src/test/scala/cloud/nio/impl/ftp/FtpCloudNioFileSystemProviderSpec.scala b/cloud-nio/cloud-nio-impl-ftp/src/test/scala/cloud/nio/impl/ftp/FtpCloudNioFileSystemProviderSpec.scala index 5c86a2ee079..e23e8ba450e 100644 --- a/cloud-nio/cloud-nio-impl-ftp/src/test/scala/cloud/nio/impl/ftp/FtpCloudNioFileSystemProviderSpec.scala +++ b/cloud-nio/cloud-nio-impl-ftp/src/test/scala/cloud/nio/impl/ftp/FtpCloudNioFileSystemProviderSpec.scala @@ -3,17 +3,18 @@ package cloud.nio.impl.ftp import java.net.URI import java.nio.channels.ReadableByteChannel import java.nio.file.FileAlreadyExistsException - import cloud.nio.impl.ftp.FtpUtil.FtpIoException import cloud.nio.spi.{CloudNioRegularFileAttributes, CloudNioRetry} import com.typesafe.config.ConfigFactory import common.assertion.CromwellTimeoutSpec +import common.mock.MockSugar import org.apache.commons.net.ftp.FTPReply -import org.scalamock.scalatest.{MixedMockFactory, MockFactory} +import org.mockito.Mockito._ import org.scalatest.flatspec.AnyFlatSpec import org.scalatest.matchers.should.Matchers -class FtpCloudNioFileSystemProviderSpec extends AnyFlatSpec with CromwellTimeoutSpec with Matchers with MockFactory with MixedMockFactory with MockFtpFileSystem { +class FtpCloudNioFileSystemProviderSpec extends AnyFlatSpec with CromwellTimeoutSpec with Matchers with MockSugar + with MockFtpFileSystem { behavior of "FtpCloudNioFileSystemProviderSpec" @@ -42,30 +43,33 @@ class FtpCloudNioFileSystemProviderSpec extends AnyFlatSpec with CromwellTimeout } it should "pre compute the size before opening a read channel to avoid deadlocks" in { - val mockSizeFunction = mockFunction[Long] - val provider = new FtpCloudNioFileSystemProvider(ConfigFactory.empty(), FtpAnonymousCredentials, ftpFileSystems) { + val mockSizeFunction = mock[() => Long] + val provider: FtpCloudNioFileSystemProvider = new FtpCloudNioFileSystemProvider( + ConfigFactory.empty, FtpAnonymousCredentials, ftpFileSystems + ) { - override def fileProvider = new FtpCloudNioFileProvider(this) { - override def fileAttributes(cloudHost: String, cloudPath: String) = + override def fileProvider: FtpCloudNioFileProvider = new FtpCloudNioFileProvider(this) { + override def fileAttributes(cloudHost: String, cloudPath: String): Option[CloudNioRegularFileAttributes] = Option( new CloudNioRegularFileAttributes { override def fileHash = throw new UnsupportedOperationException() override def lastModifiedTime() = throw new UnsupportedOperationException() - override def size() = mockSizeFunction() + override def size(): Long = mockSizeFunction() override def fileKey() = throw new UnsupportedOperationException() } ) - override def read(cloudHost: String, cloudPath: String, offset: Long) = { + override def read(cloudHost: String, cloudPath: String, offset: Long): ReadableByteChannel = { mock[ReadableByteChannel] } } } // This should only be called once, not every time we ask for the channel size - mockSizeFunction.expects().onCall(_ => 60).once() + when(mockSizeFunction.apply()).thenReturn(60) val cloudNioPath = provider.getPath(URI.create("ftp://host.com/my_file.txt")) val channel = provider.cloudNioReadChannel(new CloudNioRetry(ConfigFactory.empty()), cloudNioPath) + verify(mockSizeFunction).apply() channel.size() shouldBe 60L channel.size() shouldBe 60L @@ -78,7 +82,7 @@ class FtpCloudNioFileSystemProviderSpec extends AnyFlatSpec with CromwellTimeout fakeUnixFileSystem.exists(directoryPath) shouldBe false mockProvider.createDirectory(newDirectory) fakeUnixFileSystem.exists(directoryPath) shouldBe true - + // Now we should throw an exception because the directory exists a[FileAlreadyExistsException] shouldBe thrownBy(mockProvider.createDirectory(newDirectory)) } diff --git a/cloud-nio/cloud-nio-impl-ftp/src/test/scala/cloud/nio/impl/ftp/FtpCredentialsSpec.scala b/cloud-nio/cloud-nio-impl-ftp/src/test/scala/cloud/nio/impl/ftp/FtpCredentialsSpec.scala index 8870325b38a..0ba19060ab2 100644 --- a/cloud-nio/cloud-nio-impl-ftp/src/test/scala/cloud/nio/impl/ftp/FtpCredentialsSpec.scala +++ b/cloud-nio/cloud-nio-impl-ftp/src/test/scala/cloud/nio/impl/ftp/FtpCredentialsSpec.scala @@ -4,11 +4,12 @@ import java.io.IOException import common.assertion.CromwellTimeoutSpec import org.apache.commons.net.ftp.FTPClient +import org.mockito.ArgumentMatchers._ import org.scalatest.flatspec.AnyFlatSpec import org.scalatest.matchers.should.Matchers -import org.specs2.mock.Mockito +import common.mock.MockSugar -class FtpCredentialsSpec extends AnyFlatSpec with Matchers with Mockito with CromwellTimeoutSpec { +class FtpCredentialsSpec extends AnyFlatSpec with Matchers with MockSugar with CromwellTimeoutSpec { behavior of "FtpCredentialsSpec" @@ -24,14 +25,14 @@ class FtpCredentialsSpec extends AnyFlatSpec with Matchers with Mockito with Cro loggedInWithAccount = true true }) - + FtpAuthenticatedCredentials("user", "password", None).login(client) loggedInWithoutAccount shouldBe true loggedInWithAccount shouldBe false // reset loggedInWithoutAccount= false - + FtpAuthenticatedCredentials("user", "password", Option("account")).login(client) loggedInWithAccount shouldBe true loggedInWithoutAccount shouldBe false @@ -42,10 +43,10 @@ class FtpCredentialsSpec extends AnyFlatSpec with Matchers with Mockito with Cro client.login(anyString, anyString).responds(_ => false) an[IOException] shouldBe thrownBy(FtpAuthenticatedCredentials("user", "password", None).login(client)) - + val noooo = new Exception("I can't login !") client.login(anyString, anyString).responds(_ => throw noooo) - + val loginException = the[IOException] thrownBy FtpAuthenticatedCredentials("user", "password", None).login(client) loginException.getCause shouldBe noooo } diff --git a/cloud-nio/cloud-nio-impl-ftp/src/test/scala/cloud/nio/impl/ftp/FtpFileSystemsSpec.scala b/cloud-nio/cloud-nio-impl-ftp/src/test/scala/cloud/nio/impl/ftp/FtpFileSystemsSpec.scala index 870af8e57fe..86f86914103 100644 --- a/cloud-nio/cloud-nio-impl-ftp/src/test/scala/cloud/nio/impl/ftp/FtpFileSystemsSpec.scala +++ b/cloud-nio/cloud-nio-impl-ftp/src/test/scala/cloud/nio/impl/ftp/FtpFileSystemsSpec.scala @@ -3,38 +3,40 @@ package cloud.nio.impl.ftp import cloud.nio.impl.ftp.FtpFileSystems.FtpCacheKey import com.typesafe.config.ConfigFactory import common.assertion.CromwellTimeoutSpec -import org.scalamock.function.MockFunction1 -import org.scalamock.scalatest.MockFactory +import common.mock.MockSugar +import org.mockito.Mockito._ import org.scalatest.flatspec.AnyFlatSpec import org.scalatest.matchers.should.Matchers -class FtpFileSystemsSpec extends AnyFlatSpec with CromwellTimeoutSpec with Matchers with MockFactory { +class FtpFileSystemsSpec extends AnyFlatSpec with CromwellTimeoutSpec with Matchers with MockSugar { behavior of "FtpFileSystemsSpec" - val emptyConfig = ConfigFactory.empty - + private val emptyConfig = ConfigFactory.empty + it should "cache file systems per server per user" in { - val mockCreateFunction = mockFunction[FtpCacheKey, FtpCloudNioFileSystem] + val mockCreateFunction = mock[FtpCacheKey => FtpCloudNioFileSystem] val ftpFileSystems = new MockFtpFileSystems(FtpFileSystems.DefaultConfig, mockCreateFunction) - + val authenticatedCredentials1 = FtpAuthenticatedCredentials("user1", "password", None) - + val provider = new FtpCloudNioFileSystemProvider(emptyConfig, authenticatedCredentials1, ftpFileSystems) // Same as provider1, just other instance val providerClone = new FtpCloudNioFileSystemProvider(emptyConfig, authenticatedCredentials1, ftpFileSystems) - + // Expects the creation function to only be called once, since the 2 providers have the same credentials, even though they're // different instances - mockCreateFunction.expects(FtpCacheKey("host1.com", provider)) - .returns(new FtpCloudNioFileSystem(provider, "host1.com")) - .once() + val ftpCacheKey = FtpCacheKey("host1.com", provider) + when(mockCreateFunction.apply(ftpCacheKey)) thenReturn + new FtpCloudNioFileSystem(provider, "host1.com") provider.newCloudNioFileSystemFromHost("host1.com") providerClone.newCloudNioFileSystemFromHost("host1.com") + verify(mockCreateFunction).apply(ftpCacheKey) } - - class MockFtpFileSystems(conf: FtpFileSystemsConfiguration, mockCreateFunction: MockFunction1[FtpCacheKey, FtpCloudNioFileSystem]) extends FtpFileSystems(conf) { + + class MockFtpFileSystems(conf: FtpFileSystemsConfiguration, + mockCreateFunction: FtpCacheKey => FtpCloudNioFileSystem) extends FtpFileSystems(conf) { override private[ftp] def createFileSystem(key: FtpCacheKey) = mockCreateFunction(key) } } diff --git a/cloud-nio/cloud-nio-impl-ftp/src/test/scala/cloud/nio/impl/ftp/FtpUtilSpec.scala b/cloud-nio/cloud-nio-impl-ftp/src/test/scala/cloud/nio/impl/ftp/FtpUtilSpec.scala index 1d843abafc4..0e86194ecbd 100644 --- a/cloud-nio/cloud-nio-impl-ftp/src/test/scala/cloud/nio/impl/ftp/FtpUtilSpec.scala +++ b/cloud-nio/cloud-nio-impl-ftp/src/test/scala/cloud/nio/impl/ftp/FtpUtilSpec.scala @@ -8,18 +8,17 @@ import common.assertion.CromwellTimeoutSpec import org.apache.commons.net.ftp.FTPClient import org.scalatest.flatspec.AnyFlatSpec import org.scalatest.matchers.should.Matchers -import org.specs2.mock.Mockito import scala.concurrent.duration._ -class FtpUtilSpec extends AnyFlatSpec with CromwellTimeoutSpec with Matchers with Mockito { +class FtpUtilSpec extends AnyFlatSpec with CromwellTimeoutSpec with Matchers { behavior of "autoRelease" it should "release the lease when the client fails the operation without throwing" in { val clientPool = new FtpClientPool(1, 10.minutes, () => { new FTPClient }) val lease = clientPool.acquire() - + val action = autoRelease(IO.pure(lease)) { _ => IO.raiseError(FtpIoException("boom", 1, "re-boom")) } @@ -31,7 +30,7 @@ class FtpUtilSpec extends AnyFlatSpec with CromwellTimeoutSpec with Matchers wit it should "invalidate the lease when the client fails the operation by throwing" in { val clientPool = new FtpClientPool(1, 10.minutes, () => { new FTPClient }) val lease = clientPool.acquire() - + val action = autoRelease(IO.pure(lease)) { _ => IO.raiseError(FtpIoException("boom", 1, "re-boom", Option(new IOException("baaaam")))) } @@ -43,7 +42,7 @@ class FtpUtilSpec extends AnyFlatSpec with CromwellTimeoutSpec with Matchers wit it should "release the lease when the operation succeeds" in { val clientPool = new FtpClientPool(1, 10.minutes, () => { new FTPClient }) val lease = clientPool.acquire() - + val action = autoRelease(IO.pure(lease)) { _ => IO.pure("yeahh") } diff --git a/cloud-nio/cloud-nio-impl-ftp/src/test/scala/cloud/nio/impl/ftp/LeaseInputStreamSpec.scala b/cloud-nio/cloud-nio-impl-ftp/src/test/scala/cloud/nio/impl/ftp/LeaseInputStreamSpec.scala index 4bb0f756a35..6b70679a239 100644 --- a/cloud-nio/cloud-nio-impl-ftp/src/test/scala/cloud/nio/impl/ftp/LeaseInputStreamSpec.scala +++ b/cloud-nio/cloud-nio-impl-ftp/src/test/scala/cloud/nio/impl/ftp/LeaseInputStreamSpec.scala @@ -6,19 +6,19 @@ import common.assertion.CromwellTimeoutSpec import org.apache.commons.net.ftp.FTPClient import org.scalatest.flatspec.AnyFlatSpec import org.scalatest.matchers.should.Matchers -import org.specs2.mock.Mockito +import common.mock.MockSugar import scala.concurrent.duration._ -class LeaseInputStreamSpec extends AnyFlatSpec with CromwellTimeoutSpec with Matchers with Mockito { +class LeaseInputStreamSpec extends AnyFlatSpec with CromwellTimeoutSpec with Matchers with MockSugar { behavior of "LeaseInputStreamSpec" it should "complete the command and release the lease when closing the stream" in { - val is = new InputStream { + val is: InputStream = new InputStream { var counter = 1 override def read() = 1 - override def close() = counter = 0 - override def available() = counter + override def close(): Unit = counter = 0 + override def available(): Int = counter } val mockClient = mock[FTPClient] var completed: Boolean = false diff --git a/cloud-nio/cloud-nio-impl-ftp/src/test/scala/cloud/nio/impl/ftp/LeaseOutputStreamSpec.scala b/cloud-nio/cloud-nio-impl-ftp/src/test/scala/cloud/nio/impl/ftp/LeaseOutputStreamSpec.scala index ee9bfebd5d6..ee82de6ffea 100644 --- a/cloud-nio/cloud-nio-impl-ftp/src/test/scala/cloud/nio/impl/ftp/LeaseOutputStreamSpec.scala +++ b/cloud-nio/cloud-nio-impl-ftp/src/test/scala/cloud/nio/impl/ftp/LeaseOutputStreamSpec.scala @@ -6,11 +6,10 @@ import common.assertion.CromwellTimeoutSpec import org.apache.commons.net.ftp.FTPClient import org.scalatest.flatspec.AnyFlatSpec import org.scalatest.matchers.should.Matchers -import org.specs2.mock.Mockito +import common.mock.MockSugar import scala.concurrent.duration._ - -class LeaseOutputStreamSpec extends AnyFlatSpec with CromwellTimeoutSpec with Matchers with Mockito { +class LeaseOutputStreamSpec extends AnyFlatSpec with CromwellTimeoutSpec with Matchers with MockSugar { behavior of "LeaseInputStreamSpec" @@ -33,11 +32,11 @@ class LeaseOutputStreamSpec extends AnyFlatSpec with CromwellTimeoutSpec with Ma // When accessing a released lease, get throws an IllegalStateException an[IllegalStateException] shouldBe thrownBy(lease.get()) } - + private class TestOutputStream extends OutputStream { var closed = false - override def close() = closed = true - def isClosed = closed - override def write(b: Int) = {} + override def close(): Unit = closed = true + def isClosed: Boolean = closed + override def write(b: Int): Unit = {} } } diff --git a/cloud-nio/cloud-nio-impl-ftp/src/test/scala/cloud/nio/impl/ftp/operations/FtpOperationSpec.scala b/cloud-nio/cloud-nio-impl-ftp/src/test/scala/cloud/nio/impl/ftp/operations/FtpOperationSpec.scala index 5c543452d4b..c6856a6ea86 100644 --- a/cloud-nio/cloud-nio-impl-ftp/src/test/scala/cloud/nio/impl/ftp/operations/FtpOperationSpec.scala +++ b/cloud-nio/cloud-nio-impl-ftp/src/test/scala/cloud/nio/impl/ftp/operations/FtpOperationSpec.scala @@ -7,9 +7,9 @@ import common.assertion.CromwellTimeoutSpec import org.apache.commons.net.ftp.{FTPClient, FTPReply} import org.scalatest.flatspec.AnyFlatSpec import org.scalatest.matchers.should.Matchers -import org.specs2.mock.Mockito +import common.mock.MockSugar -class FtpOperationSpec extends AnyFlatSpec with CromwellTimeoutSpec with Matchers with Mockito { +class FtpOperationSpec extends AnyFlatSpec with CromwellTimeoutSpec with Matchers with MockSugar { it should "generate somewhat accurate exceptions" in { val client = mock[FTPClient] val operation = FtpListFiles("ftp.example.com", "location", "do something") diff --git a/common/src/test/scala/common/mock/MockImplicits.scala b/common/src/test/scala/common/mock/MockImplicits.scala new file mode 100644 index 00000000000..3e90e183d8e --- /dev/null +++ b/common/src/test/scala/common/mock/MockImplicits.scala @@ -0,0 +1,43 @@ +package common.mock + +import org.mockito.Mockito +import org.mockito.stubbing.OngoingStubbing + +trait MockImplicits { + + /** + * Ported from specs2's abandoned mock classes, provides DSL methods. + * + * https://github.com/etorreborre/specs2/commit/6d56660e70980b5958e6c4ed8fd4158bf1cecf70#diff-a2627f56c432e4bc37f36bc56e13852225813aa604918471b61ec2080462d722 + */ + implicit class MockEnhanced[A](methodCall: A) { + def returns(result: A): OngoingStubbing[A] = { + Mockito.when(methodCall).thenReturn(result) + } + + def answers(function: Any => A): OngoingStubbing[A] = { + Mockito.when(methodCall) thenAnswer { + invocationOnMock => { + val args = invocationOnMock.getArguments + // The DSL behavior of the below is directly taken with thanks from the link above. + args.size match { + case 0 => + function match { + case function0: Function0[_] => + function0.apply().asInstanceOf[A] + case _ => + function.apply(invocationOnMock.getMock) + } + case 1 => + function(args(0)) + case _ => + function(args) + } + } + } + } + + def responds(f: Any => A): OngoingStubbing[A] = answers(f) + } + +} diff --git a/common/src/test/scala/common/mock/MockSugar.scala b/common/src/test/scala/common/mock/MockSugar.scala new file mode 100644 index 00000000000..27a4ae87b55 --- /dev/null +++ b/common/src/test/scala/common/mock/MockSugar.scala @@ -0,0 +1,71 @@ +package common.mock + +import org.mockito.{ArgumentCaptor, Mockito} + +import scala.reflect.{ClassTag, classTag} + +/** + * Yet another scala wrapper around Mockito. + * + * As of Aug 2021 there are a few mockito wrapper choices. + * + * `mockito-scala`: + * - lots of nice DSL, but who knows if it'll last + * - behind on Scala 3 support + * - [[https://github.com/mockito/mockito-scala/issues/364]] + * + * `scalatestplus`: + * - stuck on mockito 3.4.x + * - [[https://github.com/scalatest/scalatestplus-mockito/issues/24]] + * - entire library only provides four one-line wrappers + * - [[https://github.com/scalatest/scalatestplus-mockito/blob/release-3.2.9.0-for-mockito-3.4/src/main/scala/org/scalatestplus/mockito/MockitoSugar.scala]] + * + * `scalamock`: + * - might be abandoned? + * - [[https://github.com/paulbutcher/ScalaMock/issues/396]] + * + * `specs2-mock`: + * - As of Specs2 5.x the mock wrappers appear to be gone, pointing to scalamock instead: + * - [[https://etorreborre.github.io/specs2/guide/5.0.0-RC-01/org.specs2.guide.Installation.html]] + * - Btw, specs2-mock 4.x pulls in specs2-core possibly leading SBT to look for specs using the specs2 test framework: + * - [[https://etorreborre.github.io/specs2/guide/SPECS2-4.12.0/org.specs2.guide.Installation.html#other-dependencies]] + */ +trait MockSugar extends MockImplicits { + + /** + * Returns a new mock with Smart Nulls. + * + * Note: if you run into issues with `mock` then try [[mockWithDefaults]]. + */ + def mock[A: ClassTag]: A = { + Mockito.mock( + classTag[A].runtimeClass.asInstanceOf[Class[A]], + Mockito.withSettings().defaultAnswer(Mockito.RETURNS_SMART_NULLS), + ) + } + + /** + * Creates a mock returning default values instead of Smart Nulls. + * + * Works around a cryptic issue that that popped up in PipelinesApiBackendCacheHitCopyingActorSpec: + * {{{ + * Underlying exception : java.lang.IllegalArgumentException: Cannot cast to primitive type: int + * org.mockito.exceptions.base.MockitoException: + * Mockito cannot mock this class: class cromwell.backend.google.pipelines.common.PipelinesApiConfigurationAttributes. + * }}} + * + * An alternative workaround was to use `Mockito.doReturn(retVal).when(mockObj).someMethod`. + */ + def mockWithDefaults[A: ClassTag]: A = { + Mockito.mock( + classTag[A].runtimeClass.asInstanceOf[Class[A]], + Mockito.withSettings().defaultAnswer(Mockito.RETURNS_DEFAULTS), + ) + } + + def capture[A: ClassTag]: ArgumentCaptor[A] = { + ArgumentCaptor.forClass(classTag[A].runtimeClass.asInstanceOf[Class[A]]) + } +} + +object MockSugar extends MockSugar diff --git a/common/src/test/scala/common/validation/ValidationSpec.scala b/common/src/test/scala/common/validation/ValidationSpec.scala index 071fbe69fe6..543e33d7573 100644 --- a/common/src/test/scala/common/validation/ValidationSpec.scala +++ b/common/src/test/scala/common/validation/ValidationSpec.scala @@ -6,15 +6,16 @@ import cats.syntax.validated._ import common.assertion.CromwellTimeoutSpec import common.exception.AggregatedMessageException import common.validation.Validation._ +import org.mockito.ArgumentMatchers._ import org.scalatest.flatspec.AnyFlatSpec import org.scalatest.matchers.should.Matchers import org.slf4j.Logger -import org.specs2.mock.Mockito +import common.mock.MockSugar import scala.util.{Failure, Success} -class ValidationSpec extends AnyFlatSpec with CromwellTimeoutSpec with Matchers with Mockito { +class ValidationSpec extends AnyFlatSpec with CromwellTimeoutSpec with Matchers with MockSugar { behavior of "Validation" diff --git a/core/src/test/scala/cromwell/core/Tags.scala b/core/src/test/scala/cromwell/core/Tags.scala index 39c3cbf9750..3fefe826423 100644 --- a/core/src/test/scala/cromwell/core/Tags.scala +++ b/core/src/test/scala/cromwell/core/Tags.scala @@ -6,6 +6,5 @@ object Tags { object DockerTest extends Tag("DockerTest") object IntegrationTest extends Tag("CromwellIntegrationTest") object DbmsTest extends Tag("DbmsTest") - object PostWomTest extends Tag("PostWomTest") object AwsTest extends Tag("AwsTest") } diff --git a/core/src/test/scala/cromwell/core/io/AsyncIoSpec.scala b/core/src/test/scala/cromwell/core/io/AsyncIoSpec.scala index 4b7aff15962..83521ea3432 100644 --- a/core/src/test/scala/cromwell/core/io/AsyncIoSpec.scala +++ b/core/src/test/scala/cromwell/core/io/AsyncIoSpec.scala @@ -9,22 +9,21 @@ import cromwell.core.TestKitSuite import cromwell.core.path.{DefaultPathBuilder, Path} import org.scalatest.flatspec.AsyncFlatSpecLike import org.scalatest.matchers.should.Matchers -import org.scalatestplus.mockito.MockitoSugar import scala.util.{Failure, Try} import scala.util.control.NoStackTrace -class AsyncIoSpec extends TestKitSuite with AsyncFlatSpecLike with Matchers with MockitoSugar { +class AsyncIoSpec extends TestKitSuite with AsyncFlatSpecLike with Matchers { behavior of "AsyncIoSpec" - + implicit val ioCommandBuilder: DefaultIoCommandBuilder.type = DefaultIoCommandBuilder - + it should "write asynchronously" in { val testActor = TestActorRef(new AsyncIoTestActor(simpleIoActor)) val testPath = DefaultPathBuilder.createTempFile() - + testActor.underlyingActor.asyncIo.writeAsync(testPath, "hello", Seq.empty) map { _ => assert(testPath.contentAsString == "hello") } @@ -74,7 +73,7 @@ class AsyncIoSpec extends TestKitSuite with AsyncFlatSpecLike with Matchers with } testPath.write("new text") - + testActor.underlyingActor.asyncIo.copyAsync(testPath, testCopyPath) map { _ => assert(testCopyPath.exists) assert(testCopyPath.contentAsString == "new text") diff --git a/core/src/test/scala/cromwell/core/io/IoClientHelperSpec.scala b/core/src/test/scala/cromwell/core/io/IoClientHelperSpec.scala index b025c7ff921..f77df6fa011 100644 --- a/core/src/test/scala/cromwell/core/io/IoClientHelperSpec.scala +++ b/core/src/test/scala/cromwell/core/io/IoClientHelperSpec.scala @@ -2,6 +2,7 @@ package cromwell.core.io import akka.actor.{Actor, ActorLogging, ActorRef} import akka.testkit.{TestActorRef, TestProbe} +import common.mock.MockSugar import common.util.Backoff import cromwell.core.TestKitSuite import cromwell.core.io.DefaultIoCommand.DefaultIoSizeCommand @@ -9,12 +10,11 @@ import cromwell.core.path.Path import cromwell.core.retry.SimpleExponentialBackoff import org.scalatest.flatspec.AnyFlatSpecLike import org.scalatest.matchers.should.Matchers -import org.scalatestplus.mockito.MockitoSugar -import scala.concurrent.duration.{FiniteDuration, _} +import scala.concurrent.duration._ import scala.language.postfixOps -class IoClientHelperSpec extends TestKitSuite with AnyFlatSpecLike with Matchers with MockitoSugar { +class IoClientHelperSpec extends TestKitSuite with AnyFlatSpecLike with Matchers with MockSugar { behavior of "IoClientHelperSpec" @@ -88,7 +88,7 @@ class IoClientHelperSpec extends TestKitSuite with AnyFlatSpecLike with Matchers backoff: Backoff, noResponseTimeout: FiniteDuration) extends Actor with ActorLogging with IoClientHelper { - implicit val ioCommandBuilder = DefaultIoCommandBuilder + implicit val ioCommandBuilder: DefaultIoCommandBuilder.type = DefaultIoCommandBuilder override protected def initialBackoff(): Backoff = backoff @@ -98,11 +98,11 @@ class IoClientHelperSpec extends TestKitSuite with AnyFlatSpecLike with Matchers case message => delegateTo ! message } - def sendMessage(command: IoCommand[_]) = { + def sendMessage(command: IoCommand[_]): Unit = { sendIoCommandWithCustomTimeout(command, noResponseTimeout) } - def sendMessageWithContext(context: Any, command: IoCommand[_]) = { + def sendMessageWithContext(context: Any, command: IoCommand[_]): Unit = { sendIoCommandWithContext(command, context, noResponseTimeout) } diff --git a/core/src/test/scala/cromwell/core/logging/LoggerWrapperSpec.scala b/core/src/test/scala/cromwell/core/logging/LoggerWrapperSpec.scala index 764d461d52d..d40456e0128 100644 --- a/core/src/test/scala/cromwell/core/logging/LoggerWrapperSpec.scala +++ b/core/src/test/scala/cromwell/core/logging/LoggerWrapperSpec.scala @@ -5,18 +5,20 @@ import akka.event.{Logging, LoggingAdapter} import common.assertion.CromwellTimeoutSpec import cromwell.core.logging.LoggerWrapperSpec._ import org.apache.commons.lang3.exception.ExceptionUtils +import org.mockito.ArgumentMatchers._ import org.scalatest.flatspec.AnyFlatSpec import org.scalatest.matchers.should.Matchers import org.scalatest.prop._ import org.slf4j.Logger import org.slf4j.event.Level -import org.specs2.mock.Mockito +import common.mock.MockSugar -class LoggerWrapperSpec extends AnyFlatSpec with CromwellTimeoutSpec with Matchers with Mockito with TableDrivenPropertyChecks { +class LoggerWrapperSpec extends AnyFlatSpec with CromwellTimeoutSpec with Matchers with MockSugar + with TableDrivenPropertyChecks { behavior of "LoggerWrapper" - val wrapperTests = Table[String, LoggerWrapper => Unit, List[Slf4jMessage], List[AkkaMessage]]( + private val wrapperTests = Table[String, LoggerWrapper => Unit, List[Slf4jMessage], List[AkkaMessage]]( ( "description", "wrapperFunction", diff --git a/core/src/test/scala/cromwell/core/simpleton/WomValueBuilderSpec.scala b/core/src/test/scala/cromwell/core/simpleton/WomValueBuilderSpec.scala index 4568df81941..8db76caab1d 100644 --- a/core/src/test/scala/cromwell/core/simpleton/WomValueBuilderSpec.scala +++ b/core/src/test/scala/cromwell/core/simpleton/WomValueBuilderSpec.scala @@ -6,7 +6,6 @@ import cromwell.core.simpleton.WomValueSimpleton._ import cromwell.util.WomMocks import org.scalatest.flatspec.AnyFlatSpec import org.scalatest.matchers.should.Matchers -import org.specs2.mock.Mockito import wom.callable.Callable.OutputDefinition import wom.expression.PlaceholderWomExpression import wom.types.{WomArrayType, WomIntegerType, WomMapType, WomStringType} @@ -16,10 +15,10 @@ import scala.util.Success object WomValueBuilderSpec { // WdlValueBuilder doesn't care about this expression, but something needs to be passed to the TaskOutput constructor. - val IgnoredExpression = PlaceholderWomExpression(Set.empty, WomStringType) + val IgnoredExpression: PlaceholderWomExpression = PlaceholderWomExpression(Set.empty, WomStringType) } -class WomValueBuilderSpec extends AnyFlatSpec with CromwellTimeoutSpec with Matchers with Mockito { +class WomValueBuilderSpec extends AnyFlatSpec with CromwellTimeoutSpec with Matchers { case class SimpletonConversion(name: String, womValue: WomValue, simpletons: Seq[WomValueSimpleton]) val simpletonConversions = List( @@ -149,13 +148,13 @@ class WomValueBuilderSpec extends AnyFlatSpec with CromwellTimeoutSpec with Matc * - a glob file * - an unlisted directory * - a glob file - * + * * Note: glob files technically are never simpletonized but as WomFiles they *can* be */ SimpletonConversion( "directory", WomMaybeListedDirectory( - Option("outerValueName"), + Option("outerValueName"), Option(List( WomSingleFile("outerSingleFile"), WomMaybeListedDirectory(Option("innerValueName"), Option(List(WomSingleFile("innerSingleFile")))), @@ -178,16 +177,16 @@ class WomValueBuilderSpec extends AnyFlatSpec with CromwellTimeoutSpec with Matc List( WomValueSimpleton("directory:class", WomString("Directory")), WomValueSimpleton("directory:value", WomString("outerValueName")), - + WomValueSimpleton("directory:listing[0]", WomSingleFile("outerSingleFile")), - + WomValueSimpleton("directory:listing[1]:class", WomString("Directory")), WomValueSimpleton("directory:listing[1]:value", WomString("innerValueName")), WomValueSimpleton("directory:listing[1]:listing[0]", WomSingleFile("innerSingleFile")), - + WomValueSimpleton("directory:listing[2]:class", WomString("File")), WomValueSimpleton("directory:listing[2]:value", WomString("populatedInnerValueName")), - + WomValueSimpleton("directory:listing[2]:checksum", WomString("innerChecksum")), WomValueSimpleton("directory:listing[2]:size", WomInteger(10)), WomValueSimpleton("directory:listing[2]:format", WomString("innerFormat")), @@ -198,7 +197,7 @@ class WomValueBuilderSpec extends AnyFlatSpec with CromwellTimeoutSpec with Matc WomValueSimpleton("directory:listing[2]:secondaryFiles[1]:listing[0]", WomSingleFile("innerDirectorySingleFile")), WomValueSimpleton("directory:listing[2]:secondaryFiles[2]", WomUnlistedDirectory("innerUnlistedDirectory")), WomValueSimpleton("directory:listing[2]:secondaryFiles[3]", WomGlobFile("innerGlobFile")), - + WomValueSimpleton("directory:listing[3]", WomUnlistedDirectory("outerUnlistedDirectory")), WomValueSimpleton("directory:listing[4]", WomGlobFile("outerGlobFile")) ) @@ -283,7 +282,8 @@ class WomValueBuilderSpec extends AnyFlatSpec with CromwellTimeoutSpec with Matc } } - def assertSimpletonsEqual(expectedSimpletons: Iterable[WomValueSimpleton], actualSimpletons: Iterable[WomValueSimpleton]) = { + private def assertSimpletonsEqual(expectedSimpletons: Iterable[WomValueSimpleton], + actualSimpletons: Iterable[WomValueSimpleton]): Unit = { // Sanity check, make sure we don't lose anything when we "toSet": actualSimpletons.toSet should contain theSameElementsAs actualSimpletons diff --git a/core/src/test/scala/cromwell/util/SampleWdl.scala b/core/src/test/scala/cromwell/util/SampleWdl.scala index 4b2f773bf63..6e790ca67be 100644 --- a/core/src/test/scala/cromwell/util/SampleWdl.scala +++ b/core/src/test/scala/cromwell/util/SampleWdl.scala @@ -1,13 +1,13 @@ package cromwell.util import java.util.UUID - -import cromwell.core.path.{DefaultPathBuilder, Path} +import cromwell.core.path.{DefaultPath, DefaultPathBuilder, Path} import cromwell.core.{WorkflowOptions, WorkflowSourceFilesCollection, WorkflowSourceFilesWithDependenciesZip, WorkflowSourceFilesWithoutImports} import spray.json._ import wom.core.{ExecutableInputMap, WorkflowJson, WorkflowSource} import wom.values._ +import scala.annotation.tailrec import scala.language.postfixOps case class WorkflowImport(name: String, content: String) @@ -64,7 +64,7 @@ trait SampleWdl extends TestFileUtil { val rawInputs: ExecutableInputMap - def name = getClass.getSimpleName.stripSuffix("$") + def name: WorkflowJson = getClass.getSimpleName.stripSuffix("$") def createFileArray(base: Path): Unit = { createFile("f1", base, "line1\nline2\n") @@ -73,14 +73,15 @@ trait SampleWdl extends TestFileUtil { () } - def cleanupFileArray(base: Path) = { + def cleanupFileArray(base: Path): Path = { deleteFile(base.resolve("f1")) deleteFile(base.resolve("f2")) deleteFile(base.resolve("f3")) } implicit object AnyJsonFormat extends JsonFormat[Any] { - def write(x: Any) = x match { + @tailrec + def write(x: Any): JsValue = x match { case n: Int => JsNumber(n) case s: String => JsString(s) case b: Boolean => if(b) JsTrue else JsFalse @@ -96,19 +97,19 @@ trait SampleWdl extends TestFileUtil { } implicit object RawInputsJsonFormat extends JsonFormat[ExecutableInputMap] { - def write(inputs: ExecutableInputMap) = JsObject(inputs map { case (k, v) => k -> v.toJson }) + def write(inputs: ExecutableInputMap): JsValue = JsObject(inputs map { case (k, v) => k -> v.toJson }) def read(value: JsValue) = throw new UnsupportedOperationException(s"Reading JSON not implemented: $value") } def workflowJson: WorkflowJson = rawInputs.toJson.prettyPrint - def deleteFile(path: Path) = path.delete() + def deleteFile(path: Path): path.type = path.delete() } object SampleWdl { object HelloWorld extends SampleWdl { - override def workflowSource(runtime: String = "") = + override def workflowSource(runtime: String = ""): WorkflowSource = s""" |task hello { | String addressee @@ -133,7 +134,7 @@ object SampleWdl { } object GoodbyeWorld extends SampleWdl { - override def workflowSource(runtime: String = "") = + override def workflowSource(runtime: String = ""): WorkflowSource = """ |task goodbye { | command { @@ -154,7 +155,7 @@ object SampleWdl { } object EmptyString extends SampleWdl { - override def workflowSource(runtime: String = "") = + override def workflowSource(runtime: String = ""): WorkflowSource = s""" |task hello { | command { @@ -195,7 +196,7 @@ object SampleWdl { } object CoercionNotDefined extends SampleWdl { - override def workflowSource(runtime: String = "") = { + override def workflowSource(runtime: String = ""): WorkflowSource = { s""" |task summary { | String bfile @@ -226,9 +227,9 @@ object SampleWdl { } trait ThreeStepTemplate extends SampleWdl { - override def workflowSource(runtime: String = "") = sourceString().replaceAll("RUNTIME", runtime) + override def workflowSource(runtime: String = ""): WorkflowSource = sourceString().replaceAll("RUNTIME", runtime) private val outputSectionPlaceholder = "OUTPUTSECTIONPLACEHOLDER" - def sourceString(outputsSection: String = "") = { + def sourceString(outputsSection: String = ""): WorkflowJson = { val withPlaceholders = s""" |task ps { @@ -287,7 +288,7 @@ object SampleWdl { object ThreeStep extends ThreeStepTemplate object ThreeStepWithOutputsSection extends ThreeStepTemplate { - override def workflowSource(runtime: String = "") = sourceString(outputsSection = + override def workflowSource(runtime: String = ""): WorkflowJson = sourceString(outputsSection = """ |output { | cgrep.count @@ -372,8 +373,78 @@ object SampleWdl { override val rawInputs: Map[String, Any] = Map.empty } + object CurrentDirectoryMaps extends SampleWdl { + override def workflowSource(runtime: String): String = + """ + |task whereami { + | Map[String, File] stringToFileMap + | Map[File, String] fileToStringMap + | Map[File, File] fileToFileMap + | Map[String, String] stringToString + | command { + | pwd + | } + | output { + | String pwd = read_string(stdout()) + | } + | RUNTIME + |} + | + |workflow wf_whereami { + | call whereami + |} + |""".stripMargin.replace("RUNTIME", runtime) + + override val rawInputs: Map[String, Any] = Map.empty + } + + object CurrentDirectoryArray extends SampleWdl { + override def workflowSource(runtime: String): String = + """ + |task whereami { + | Array[File] fileArray + | command { + | pwd + | } + | output { + | String pwd = read_string(stdout()) + | } + | RUNTIME + |} + | + |workflow wf_whereami { + | call whereami + |} + |""".stripMargin.replace("RUNTIME", runtime) + + override val rawInputs: Map[String, Any] = Map.empty + } + + object CurrentDirectoryFiles extends SampleWdl { + override def workflowSource(runtime: String): String = + """ + |task whereami { + | File file1 + | File file2 + | command { + | pwd + | } + | output { + | String pwd = read_string(stdout()) + | } + | RUNTIME + |} + | + |workflow wf_whereami { + | call whereami + |} + |""".stripMargin.replace("RUNTIME", runtime) + + override val rawInputs: Map[String, Any] = Map.empty + } + object ArrayIO extends SampleWdl { - override def workflowSource(runtime: String = "") = + override def workflowSource(runtime: String = ""): WorkflowSource = s""" |task serialize { | Array[String] strs @@ -397,7 +468,7 @@ object SampleWdl { } class ScatterWdl extends SampleWdl { - val tasks = s"""task A { + val tasks: String = s"""task A { | command { | echo -n -e "jeff\nchris\nmiguel\nthibault\nkhalid\nruchi" | } @@ -451,7 +522,7 @@ object SampleWdl { |} """.stripMargin - override def workflowSource(runtime: String = "") = + override def workflowSource(runtime: String = ""): WorkflowSource = s"""$tasks | |workflow w { @@ -469,7 +540,7 @@ object SampleWdl { } object SimpleScatterWdl extends SampleWdl { - override def workflowSource(runtime: String = "") = + override def workflowSource(runtime: String = ""): WorkflowSource = s"""task echo_int { | Int int | command {echo $${int}} @@ -492,7 +563,7 @@ object SampleWdl { } object SimpleScatterWdlWithOutputs extends SampleWdl { - override def workflowSource(runtime: String = "") = + override def workflowSource(runtime: String = ""): WorkflowSource = s"""task echo_int { | Int int | command {echo $${int}} @@ -517,7 +588,7 @@ object SampleWdl { } case class PrepareScatterGatherWdl(salt: String = UUID.randomUUID().toString) extends SampleWdl { - override def workflowSource(runtime: String = "") = { + override def workflowSource(runtime: String = ""): WorkflowSource = { s""" |# |# Goal here is to split up the input file into files of 1 line each (in the prepare) then in parallel call wc -w on each newly created file and count the words into another file then in the gather, sum the results of each parallel call to come up with @@ -572,7 +643,7 @@ object SampleWdl { """.stripMargin.replace("RUNTIME", runtime) } - val contents = + val contents: String = """|the |total number |of words in this @@ -585,7 +656,7 @@ object SampleWdl { } object FileClobber extends SampleWdl { - override def workflowSource(runtime: String = "") = + override def workflowSource(runtime: String = ""): WorkflowSource = s"""task read_line { | File in | command { cat $${in} } @@ -598,10 +669,10 @@ object SampleWdl { |} """.stripMargin - val tempDir1 = DefaultPathBuilder.createTempDirectory("FileClobber1") - val tempDir2 = DefaultPathBuilder.createTempDirectory("FileClobber2") - val firstFile = createFile(name = "file.txt", contents = "first file.txt", dir = tempDir1) - val secondFile = createFile(name = "file.txt", contents = "second file.txt", dir = tempDir2) + val tempDir1: DefaultPath = DefaultPathBuilder.createTempDirectory("FileClobber1") + val tempDir2: DefaultPath = DefaultPathBuilder.createTempDirectory("FileClobber2") + val firstFile: Path = createFile(name = "file.txt", contents = "first file.txt", dir = tempDir1) + val secondFile: Path = createFile(name = "file.txt", contents = "second file.txt", dir = tempDir2) override val rawInputs = Map( "two.x.in" -> firstFile.pathAsString, @@ -715,8 +786,8 @@ object SampleWdl { |} """.stripMargin.replace("RUNTIME", runtime) - val tempDir = DefaultPathBuilder.createTempDirectory("CallCachingHashingWdl") - val cannedFile = createCannedFile(prefix = "canned", contents = "file contents", dir = Option(tempDir)) + val tempDir: DefaultPath = DefaultPathBuilder.createTempDirectory("CallCachingHashingWdl") + val cannedFile: Path = createCannedFile(prefix = "canned", contents = "file contents", dir = Option(tempDir)) override val rawInputs = Map( "w.t.a" -> WomInteger(1), "w.t.b" -> WomFloat(1.1), @@ -726,7 +797,7 @@ object SampleWdl { } object ExpressionsInInputs extends SampleWdl { - override def workflowSource(runtime: String = "") = + override def workflowSource(runtime: String = ""): WorkflowSource = s"""task echo { | String inString | command { @@ -756,7 +827,7 @@ object SampleWdl { } object WorkflowFailSlow extends SampleWdl { - override def workflowSource(runtime: String = "") = + override def workflowSource(runtime: String = ""): WorkflowSource = s""" task shouldCompleteFast { | Int a diff --git a/cwl/src/test/scala/cwl/ScatterLogicSpec.scala b/cwl/src/test/scala/cwl/ScatterLogicSpec.scala index 486347e2d3a..8afd3c9df16 100644 --- a/cwl/src/test/scala/cwl/ScatterLogicSpec.scala +++ b/cwl/src/test/scala/cwl/ScatterLogicSpec.scala @@ -5,7 +5,6 @@ import org.scalatest.BeforeAndAfterEach import org.scalatest.flatspec.AnyFlatSpec import org.scalatest.matchers.should.Matchers import org.scalatest.prop.TableDrivenPropertyChecks -import org.specs2.mock.Mockito import spray.json.DefaultJsonProtocol import wom.graph.ScatterNode.ScatterVariableAndValue import wom.graph.expression.PlainAnonymousExpressionNode @@ -13,28 +12,29 @@ import wom.graph.{ScatterNode, ScatterVariableNode, WomIdentifier} import wom.types.{WomArrayType, WomStringType} import wom.values.{WomArray, WomString, WomValue} -class ScatterLogicSpec extends AnyFlatSpec with CromwellTimeoutSpec with Matchers with TableDrivenPropertyChecks with Mockito with DefaultJsonProtocol with BeforeAndAfterEach { +class ScatterLogicSpec extends AnyFlatSpec with CromwellTimeoutSpec with Matchers with TableDrivenPropertyChecks + with DefaultJsonProtocol with BeforeAndAfterEach { private val expressionNode = PlainAnonymousExpressionNode(WomIdentifier("name"), null, WomStringType, Map.empty) private val arrayStringType = WomArrayType(WomStringType) - val emptyArray = ScatterVariableAndValue( + private val emptyArray = ScatterVariableAndValue( ScatterVariableNode(null, expressionNode, WomStringType), WomArray(arrayStringType, List.empty) ) - val simpleArray2 = ScatterVariableAndValue( + private val simpleArray2 = ScatterVariableAndValue( ScatterVariableNode(null, expressionNode, WomStringType), WomArray(arrayStringType, List(WomString("a"), WomString("b"))) ) - val simpleArray3 = ScatterVariableAndValue( + private val simpleArray3 = ScatterVariableAndValue( ScatterVariableNode(null, expressionNode, WomStringType), WomArray(arrayStringType, List(WomString("a"), WomString("b"), WomString("c"))) ) - val simpleArray4 = ScatterVariableAndValue( + private val simpleArray4 = ScatterVariableAndValue( ScatterVariableNode(null, expressionNode, WomStringType), WomArray(arrayStringType, List(WomString("a"), WomString("b"), WomString("c"), WomString("d"))) ) - - override def beforeEach() = { + + override def beforeEach(): Unit = { // The index length of the SVN is a mutable value, to avoid tests stepping on each others reset it to the default value // before each test emptyArray.scatterVariableNode.withRelativeIndexLength(1) @@ -43,12 +43,12 @@ class ScatterLogicSpec extends AnyFlatSpec with CromwellTimeoutSpec with Matcher simpleArray4.scatterVariableNode.withRelativeIndexLength(1) } - val valueA = WomString("a") - val valueB = WomString("b") - val valueC = WomString("c") - val valueD = WomString("d") - - def validateScatterCombinations(list: List[ScatterNode.ScatterVariableAndValue], + private val valueA = WomString("a") + private val valueB = WomString("b") + private val valueC = WomString("c") + private val valueD = WomString("d") + + private def validateScatterCombinations(list: List[ScatterNode.ScatterVariableAndValue], scatterSize: Int, expected: List[List[Int]]) = { // Go through all the variable nodes and feed all the shard numbers to the indexForShard function diff --git a/cwl/src/test/scala/cwl/WomTypeConversionSpec.scala b/cwl/src/test/scala/cwl/WomTypeConversionSpec.scala index c636a3104ef..e7b76b153dc 100644 --- a/cwl/src/test/scala/cwl/WomTypeConversionSpec.scala +++ b/cwl/src/test/scala/cwl/WomTypeConversionSpec.scala @@ -1,63 +1,69 @@ package cwl import cats.data.NonEmptyList -import org.scalacheck.Properties -import org.scalacheck.Prop._ +import org.scalatest.flatspec.AnyFlatSpec +import org.scalatest.matchers.should.Matchers import shapeless.Coproduct import wom.types._ import mouse.`try`._ import scala.util.Try -object WomTypeConversionSpec extends Properties("CWL -> WOM Conversion"){ +class WomTypeConversionSpec extends AnyFlatSpec with Matchers { + behavior of "WomTypeConversion" /* ******** Inputs *********** */ - property("ArrayInputSchema") = secure { + it should "convert ArrayInputSchema" in { val y = Coproduct[MyriadInputInnerType](CwlType.String) val x = InputArraySchema(items = Coproduct[MyriadInputType](y)) val z = Coproduct[MyriadInputInnerType](x) - Coproduct[MyriadInputType](z).fold(MyriadInputTypeToWomType).apply(SchemaDefRequirement()) == WomArrayType(WomStringType) + Coproduct[MyriadInputType](z).fold(MyriadInputTypeToWomType).apply(SchemaDefRequirement()) shouldBe + WomArrayType(WomStringType) } - property("Cwl String") = secure { + it should "convert Cwl Input String" in { val y = Coproduct[MyriadInputInnerType](CwlType.String) - Coproduct[MyriadInputType](y).fold(MyriadInputTypeToWomType).apply(SchemaDefRequirement()) == WomStringType + Coproduct[MyriadInputType](y).fold(MyriadInputTypeToWomType).apply(SchemaDefRequirement()) shouldBe WomStringType } - property("Array of a single type is actually one type not in an array") = secure { + it should "convert Array of a single type is actually one type not in an array" in { val y = Coproduct[MyriadInputInnerType](CwlType.String) - Coproduct[MyriadInputType](Array(y)).fold(MyriadInputTypeToWomType).apply(SchemaDefRequirement()) == WomStringType + Coproduct[MyriadInputType](Array(y)).fold(MyriadInputTypeToWomType).apply(SchemaDefRequirement()) shouldBe + WomStringType } - property("Array of more than one type becomes a coproduct") = secure { + it should "convert Array of more than one type becomes a coproduct" in { val y = Coproduct[MyriadInputInnerType](CwlType.String) val z = Coproduct[MyriadInputInnerType](CwlType.Boolean) - Coproduct[MyriadInputType](Array(y, z)).fold(MyriadInputTypeToWomType).apply(SchemaDefRequirement()) == WomCoproductType(NonEmptyList.of(WomStringType, WomBooleanType)) + Coproduct[MyriadInputType](Array(y, z)).fold(MyriadInputTypeToWomType).apply(SchemaDefRequirement()) shouldBe + WomCoproductType(NonEmptyList.of(WomStringType, WomBooleanType)) } - property("Array of more than one type and a null becomes an optional coproduct") = secure { + it should "convert Array of more than one type and a null becomes an optional coproduct" in { val x = Coproduct[MyriadInputInnerType](CwlType.Null) val y = Coproduct[MyriadInputInnerType](CwlType.String) val z = Coproduct[MyriadInputInnerType](CwlType.Boolean) - Coproduct[MyriadInputType](Array(x, y, z)).fold(MyriadInputTypeToWomType).apply(SchemaDefRequirement()) == WomOptionalType(WomCoproductType(NonEmptyList.of(WomStringType, WomBooleanType))) + Coproduct[MyriadInputType](Array(x, y, z)).fold(MyriadInputTypeToWomType).apply(SchemaDefRequirement()) shouldBe + WomOptionalType(WomCoproductType(NonEmptyList.of(WomStringType, WomBooleanType))) } - property("a 2-element Array of a single type accompanied by a null is an optional type") = secure { + it should "convert a 2-element Input Array of a single type accompanied by a null is an optional type" in { val y = Coproduct[MyriadInputInnerType](CwlType.String) val z = Coproduct[MyriadInputInnerType](CwlType.Null) testInputArray(Array(y,z), WomOptionalType(WomStringType)) } - def testInputArray(array: Array[MyriadInputInnerType], assertedType: WomType ) = { + private def testInputArray(array: Array[MyriadInputInnerType], assertedType: WomType ) = { def f(array: Array[MyriadInputInnerType]) = Try(Coproduct[MyriadInputType](array).fold(MyriadInputTypeToWomType).apply(SchemaDefRequirement())).cata( - success => (success == assertedType) :| "input should evaluate to an optional array of files type", - failure => false :| s"expected an optional array of files type but received a failure" + success => withClue("input should evaluate to an optional array of files type")(success shouldBe assertedType), + failure => fail(s"expected an optional array of files type but received a failure: $failure") ) - f(array) && f(array.reverse) + f(array) + f(array.reverse) } - property("Optional Array of a type is interpreted correctly as input type") = secure { + it should "convert Optional Array of a type is interpreted correctly as input type" in { val miit = Coproduct[MyriadInputInnerType](CwlType.File) val mit = Coproduct[MyriadInputType](miit) val ias = InputArraySchema(items = mit) @@ -67,52 +73,57 @@ object WomTypeConversionSpec extends Properties("CWL -> WOM Conversion"){ } /* ******** Outputs *********** */ - property("ArrayOutputSchema") = secure { + it should "convert ArrayOutputSchema" in { val y = Coproduct[MyriadOutputInnerType](CwlType.String) val x = OutputArraySchema(items = Coproduct[MyriadOutputType](y)) val z = Coproduct[MyriadOutputInnerType](x) - Coproduct[MyriadOutputType](z).fold(MyriadOutputTypeToWomType).apply(SchemaDefRequirement()) == WomArrayType(WomStringType) + Coproduct[MyriadOutputType](z).fold(MyriadOutputTypeToWomType).apply(SchemaDefRequirement()) shouldBe + WomArrayType(WomStringType) } - property("Cwl String") = secure { + it should "convert Cwl Output String" in { val y = Coproduct[MyriadOutputInnerType](CwlType.String) - Coproduct[MyriadOutputType](y).fold(MyriadOutputTypeToWomType).apply(SchemaDefRequirement()) == WomStringType + Coproduct[MyriadOutputType](y).fold(MyriadOutputTypeToWomType).apply(SchemaDefRequirement()) shouldBe WomStringType } - property("Array of a single type is the same as one type not in an array") = secure { + it should "convert Array of a single type is the same as one type not in an array" in { val y = Coproduct[MyriadOutputInnerType](CwlType.String) - Coproduct[MyriadOutputType](Array(y)).fold(MyriadOutputTypeToWomType).apply(SchemaDefRequirement()) == WomStringType + Coproduct[MyriadOutputType](Array(y)).fold(MyriadOutputTypeToWomType).apply(SchemaDefRequirement()) shouldBe + WomStringType } - property("Output Array of more than one type becomes a coproduct") = secure { + it should "convert Output Array of more than one type becomes a coproduct" in { val y = Coproduct[MyriadOutputInnerType](CwlType.String) val z = Coproduct[MyriadOutputInnerType](CwlType.Boolean) - Coproduct[MyriadOutputType](Array(y, z)).fold(MyriadOutputTypeToWomType).apply(SchemaDefRequirement()) == WomCoproductType(NonEmptyList.of(WomStringType, WomBooleanType)) + Coproduct[MyriadOutputType](Array(y, z)).fold(MyriadOutputTypeToWomType).apply(SchemaDefRequirement()) shouldBe + WomCoproductType(NonEmptyList.of(WomStringType, WomBooleanType)) } - property("Output Array of more than one types including a null is an optional coproduct") = secure { + it should "convert Output Array of more than one types including a null is an optional coproduct" in { val x = Coproduct[MyriadOutputInnerType](CwlType.Null) val y = Coproduct[MyriadOutputInnerType](CwlType.String) val z = Coproduct[MyriadOutputInnerType](CwlType.Boolean) - Coproduct[MyriadOutputType](Array(x, y, z)).fold(MyriadOutputTypeToWomType).apply(SchemaDefRequirement()) == WomOptionalType(WomCoproductType(NonEmptyList.of(WomStringType, WomBooleanType))) + Coproduct[MyriadOutputType](Array(x, y, z)).fold(MyriadOutputTypeToWomType).apply(SchemaDefRequirement()) shouldBe + WomOptionalType(WomCoproductType(NonEmptyList.of(WomStringType, WomBooleanType))) } - def testOutputArray(array: Array[MyriadOutputInnerType], assertedType: WomType) = { + private def testOutputArray(array: Array[MyriadOutputInnerType], assertedType: WomType) = { def f(array: Array[MyriadOutputInnerType]) = Try(Coproduct[MyriadOutputType](array).fold(MyriadOutputTypeToWomType).apply(SchemaDefRequirement())).cata( - success => (success == assertedType) :| "input should evaluate to an optional string type", - failure => false :| s"expected an optional string type but received a failure" + success => withClue("input should evaluate to an optional string type")(success shouldBe assertedType), + failure => fail(s"expected an optional string type but received a failure: $failure") ) - f(array) && f(array.reverse) + f(array) + f(array.reverse) } - property("a 2-element Array of a single type accompanied by a null is an optional type") = secure { + it should "convert a 2-element Output Array of a single type accompanied by a null is an optional type" in { val y = Coproduct[MyriadOutputInnerType](CwlType.String) val z = Coproduct[MyriadOutputInnerType](CwlType.Null) testOutputArray(Array(y,z), WomOptionalType(WomStringType)) } - property("Optional Array of a type is interpreted correctly as output type") = secure { + it should "convert Optional Array of a type is interpreted correctly as output type" in { val moit = Coproduct[MyriadOutputInnerType](CwlType.File) val mot = Coproduct[MyriadOutputType](moit) val oas = OutputArraySchema(items = mot) diff --git a/cwl/src/test/scala/cwl/WorkflowStepInputExpressionSpec.scala b/cwl/src/test/scala/cwl/WorkflowStepInputExpressionSpec.scala index 054ef80c268..122156b4f4e 100644 --- a/cwl/src/test/scala/cwl/WorkflowStepInputExpressionSpec.scala +++ b/cwl/src/test/scala/cwl/WorkflowStepInputExpressionSpec.scala @@ -3,17 +3,18 @@ package cwl import cats.data.NonEmptyList import cats.data.Validated._ import cwl.WorkflowStepInput.InputSource -import org.scalacheck.Prop._ -import org.scalacheck.Properties +import org.scalatest.flatspec.AnyFlatSpec +import org.scalatest.matchers.should.Matchers import shapeless.Coproduct import wom.graph.GraphNodePort.GraphNodeOutputPort import wom.graph.WomIdentifier import wom.types._ import wom.values.{WomArray, WomInteger, WomString} -object WorkflowStepInputExpressionSpec extends Properties("Workflow Step Input Expression") { +class WorkflowStepInputExpressionSpec extends AnyFlatSpec with Matchers { + behavior of "WorkflowStepInputExpression" - property("assert source arrays are concatenated when merge_flattened LinkMergeMethod is used") = secure { + it should "assert source arrays are concatenated when merge_flattened LinkMergeMethod is used" in { val inputSource = Coproduct[InputSource](Array("i1", "i2")) val i1OutputPort = GraphNodeOutputPort(WomIdentifier("i1"), WomStringType, null) val i2OutputPort = GraphNodeOutputPort(WomIdentifier("i2"), WomStringType, null) @@ -22,36 +23,36 @@ object WorkflowStepInputExpressionSpec extends Properties("Workflow Step Input E val mergeTpeEither = wsi.determineMergeType( sources = Map("i1" -> WomArrayType(WomStringType), "i2" -> WomArrayType(WomStringType)), expectedTypeAsWom = Option(WomArrayType(WomStringType))) - + val tpe: WomType = fromEither(mergeTpeEither).getOrElse(throw new RuntimeException("expected a womType but evaluation of determineType failed")) val expression = WorkflowStepInputMergeExpression(wsi, tpe, NonEmptyList.of("i1" -> i1OutputPort, "i2" -> i2OutputPort), Vector.empty) val r = expression.evaluateValue(Map("i1" -> WomString("1"), "i2" -> WomString("2")), null) - r == Valid(WomArray(Seq(WomString("1"), WomString("2")))) + r shouldBe Valid(WomArray(Seq(WomString("1"), WomString("2")))) } - property("array of one entry for each input link when merge_nested is used") = secure { + it should "array of one entry for each input link when merge_nested is used" in { val inputSource = Coproduct[InputSource](Array("i1", "i2")) val i1OutputPort = GraphNodeOutputPort(WomIdentifier("i1"), WomStringType, null) val i2OutputPort = GraphNodeOutputPort(WomIdentifier("i2"), WomStringType, null) val wsi = WorkflowStepInput("s#h", source = Option(inputSource)) val expression = WorkflowStepInputMergeExpression(wsi, null, NonEmptyList.of("i1" -> i1OutputPort, "i2" -> i2OutputPort), Vector.empty) - expression.evaluateValue(Map("i1" -> WomInteger(1), "i2" -> WomInteger(2)), null) == + expression.evaluateValue(Map("i1" -> WomInteger(1), "i2" -> WomInteger(2)), null) shouldBe Valid(WomArray(Seq( WomInteger(1), WomInteger(2) ))) } - property("list of one entry for when there is only one input link and when merge_nested is used") = secure { + it should "list of one entry for when there is only one input link and when merge_nested is used" in { val inputSource = Coproduct[InputSource]("i1") val wsi = WorkflowStepInput("s#h", source = Option(inputSource)) val i1OutputPort = GraphNodeOutputPort(WomIdentifier("i1"), WomStringType, null) val expression = WorkflowStepInputMergeExpression(wsi, null, NonEmptyList.of("i1" -> i1OutputPort), Vector.empty) - expression.evaluateValue(Map("i1" -> WomInteger(1), "i2" -> WomInteger(2)), null) == + expression.evaluateValue(Map("i1" -> WomInteger(1), "i2" -> WomInteger(2)), null) shouldBe Valid(WomInteger(1)) } diff --git a/cwl/src/test/scala/cwl/WorkflowStepInputSpec.scala b/cwl/src/test/scala/cwl/WorkflowStepInputSpec.scala index dd29fd5d4fd..5592e2ffddd 100644 --- a/cwl/src/test/scala/cwl/WorkflowStepInputSpec.scala +++ b/cwl/src/test/scala/cwl/WorkflowStepInputSpec.scala @@ -1,78 +1,109 @@ package cwl import cats.data.NonEmptyList -import cwl.command.ParentName -import org.scalacheck.Prop._ -import org.scalacheck.Properties -import shapeless.Coproduct +import org.scalatest.flatspec.AnyFlatSpec +import org.scalatest.matchers.should.Matchers import wom.types._ -object WorkflowStepInputSpec extends Properties("WorkflowStepInput") { - implicit val pn = ParentName("_") +class WorkflowStepInputSpec extends AnyFlatSpec with Matchers { + behavior of "WorkflowStepInput" - val it = Coproduct[MyriadInputInnerType](CwlType.String) - val stringType = Coproduct[MyriadInputType](it) - - val arraySchema = Coproduct[MyriadInputInnerType](InputArraySchema(items = stringType)) - - val arrayStringType = WomArrayType(WomStringType) - - property("use single source type if no default value and no merge method is specified") = secure { - WorkflowStepInput.determineMergeType(Map("s#in" -> WomStringType), None, None, hasDefault = false) == + it should "use single source type if no default value and no merge method is specified" in { + WorkflowStepInput.determineMergeType( + sources = Map("s#in" -> WomStringType), + linkMerge = None, + expectedTypeAsWom = None, + hasDefault = false + ) shouldBe Right(WomStringType) } - property("use optional single source type if no default value and no merge method is specified") = secure { - WorkflowStepInput.determineMergeType(Map("s#in" -> WomOptionalType(WomStringType)), None, None, hasDefault = false) == + it should "use optional single source type if no default value and no merge method is specified" in { + WorkflowStepInput.determineMergeType( + sources = Map("s#in" -> WomOptionalType(WomStringType)), + linkMerge = None, + expectedTypeAsWom = None, + hasDefault = false, + ) shouldBe Right(WomOptionalType(WomStringType)) } - property("use single source type if there's a default value and no merge method is specified") = secure { - WorkflowStepInput.determineMergeType(Map("s#in" -> WomStringType), None, None, hasDefault = true) == + it should "use single source type if there's a default value and no merge method is specified" in { + WorkflowStepInput.determineMergeType( + sources = Map("s#in" -> WomStringType), + linkMerge = None, + expectedTypeAsWom = None, + hasDefault = true, + ) shouldBe Right(WomStringType) } - property("use unpacked single source type if there's a default value and no merge method is specified") = secure { - WorkflowStepInput.determineMergeType(Map("s#in" -> WomOptionalType(WomStringType)), None, None, hasDefault = true) == + it should "use unpacked single source type if there's a default value and no merge method is specified" in { + WorkflowStepInput.determineMergeType( + sources = Map("s#in" -> WomOptionalType(WomStringType)), + linkMerge = None, + expectedTypeAsWom = None, + hasDefault = true, + ) shouldBe Right(WomStringType) } - property("wrap single source type in an array if merge method is nested") = secure { - WorkflowStepInput.determineMergeType(Map("s#in" -> WomStringType), Option(LinkMergeMethod.MergeNested), None, hasDefault = false) == + it should "wrap single source type in an array if merge method is nested" in { + WorkflowStepInput.determineMergeType( + sources = Map("s#in" -> WomStringType), + linkMerge = Option(LinkMergeMethod.MergeNested), + expectedTypeAsWom = None, + hasDefault = false, + ) shouldBe Right(WomArrayType(WomStringType)) } - property("find the closest common type to all sources if merge method is nested") = secure { - WorkflowStepInput.determineMergeType(Map("s#in" -> WomStringType, "s#in2" -> WomIntegerType), Option(LinkMergeMethod.MergeNested), None, hasDefault = false) == + it should "find the closest common type to all sources if merge method is nested" in { + WorkflowStepInput.determineMergeType( + sources = Map("s#in" -> WomStringType, "s#in2" -> WomIntegerType), + linkMerge = Option(LinkMergeMethod.MergeNested), + expectedTypeAsWom = None, + hasDefault = false, + ) shouldBe Right(WomMaybeEmptyArrayType(WomStringType)) } - property("validate array inner type against target type if merge method is flattened") = secure { - WorkflowStepInput.determineMergeType(Map("s#in" -> WomStringType, "s#in2" -> WomStringType), - Option(LinkMergeMethod.MergeFlattened), - Option(arrayStringType), hasDefault = false - ) == Right(WomArrayType(WomStringType)) + it should "validate array inner type against target type if merge method is flattened" in { + WorkflowStepInput.determineMergeType( + sources = Map("s#in" -> WomStringType, "s#in2" -> WomStringType), + linkMerge = Option(LinkMergeMethod.MergeFlattened), + expectedTypeAsWom = Option(WomArrayType(WomStringType)), + hasDefault = false, + ) shouldBe Right(WomArrayType(WomStringType)) } - property("validate type against target type if merge method is flattened") = secure { - WorkflowStepInput.determineMergeType(Map("s#in" -> WomStringType, "s#in2" -> WomStringType), - Option(LinkMergeMethod.MergeFlattened), - Option(WomStringType), hasDefault = false - ) == Right(WomArrayType(WomStringType)) + it should "validate type against target type if merge method is flattened" in { + WorkflowStepInput.determineMergeType( + sources = Map("s#in" -> WomStringType, "s#in2" -> WomStringType), + linkMerge = Option(LinkMergeMethod.MergeFlattened), + expectedTypeAsWom = Option(WomStringType), + hasDefault = false, + ) shouldBe Right(WomArrayType(WomStringType)) } - property("fail if target type does not conform to source types if merge method is flattened") = secure { - WorkflowStepInput.determineMergeType(Map("s#in" -> WomObjectType, "s#in2" -> WomObjectType), - Option(LinkMergeMethod.MergeFlattened), - Option(WomStringType), hasDefault = false - ) == Left(NonEmptyList.one("could not verify that types Map(s#in -> WomObjectType, s#in2 -> WomObjectType) and the items type of the run's InputArraySchema WomStringType were compatible")) + it should "fail if target type does not conform to source types if merge method is flattened" in { + WorkflowStepInput.determineMergeType( + sources = Map("s#in" -> WomObjectType, "s#in2" -> WomObjectType), + linkMerge = Option(LinkMergeMethod.MergeFlattened), + expectedTypeAsWom = Option(WomStringType), + hasDefault = false, + ) shouldBe Left(NonEmptyList.one( + "could not verify that types Map(s#in -> WomObjectType, s#in2 -> WomObjectType)" + + " and the items type of the run's InputArraySchema WomStringType were compatible" + )) } - property("fall back to the closest common type") = secure { - WorkflowStepInput.determineMergeType(Map("s#in" -> WomBooleanType, "s#in2" -> WomIntegerType), - Option(LinkMergeMethod.MergeFlattened), - None, hasDefault = false - ) == Right(WomStringType) + it should "fall back to the closest common type" in { + WorkflowStepInput.determineMergeType( + sources = Map("s#in" -> WomBooleanType, "s#in2" -> WomIntegerType), + linkMerge = Option(LinkMergeMethod.MergeFlattened), + expectedTypeAsWom = None, + hasDefault = false, + ) shouldBe Right(WomStringType) } } - diff --git a/cwl/src/test/scala/cwl/preprocessor/CwlPreProcessorSpec.scala b/cwl/src/test/scala/cwl/preprocessor/CwlPreProcessorSpec.scala index a03cd54246e..c0aba9c975d 100644 --- a/cwl/src/test/scala/cwl/preprocessor/CwlPreProcessorSpec.scala +++ b/cwl/src/test/scala/cwl/preprocessor/CwlPreProcessorSpec.scala @@ -3,22 +3,23 @@ package cwl.preprocessor import better.files.File import cats.data.NonEmptyList import common.assertion.CromwellTimeoutSpec -import common.validation.IOChecked._ +import common.mock.MockSugar +import cwl.preprocessor.CwlPreProcessor.SaladFunction import io.circe.Printer -import org.scalamock.function.MockFunction1 -import org.scalamock.scalatest.MockFactory +import org.mockito.ArgumentMatchers._ +import org.mockito.Mockito._ import org.scalatest.flatspec.AnyFlatSpec import org.scalatest.matchers.should.Matchers -class CwlPreProcessorSpec extends AnyFlatSpec with CromwellTimeoutSpec with Matchers with MockFactory { +class CwlPreProcessorSpec extends AnyFlatSpec with CromwellTimeoutSpec with Matchers with MockSugar { behavior of "CwlPreProcessor" - val resourcesRoot = File(getClass.getResource(".").getPath) - val echoFileTool = CwlFileReference(resourcesRoot / "echo_tool.cwl", None) + private val resourcesRoot = File(getClass.getResource(".").getPath) + private val echoFileTool = CwlFileReference(resourcesRoot / "echo_tool.cwl", None) it should "flatten a simple file" in { validate(makeTestRoot("simple_workflow"), None) { mockSaladingFunction => - mockSaladingFunction.expects(echoFileTool).onCall(CwlPreProcessor.saladCwlFile) + verify(mockSaladingFunction).apply(echoFileTool) } } @@ -38,11 +39,11 @@ class CwlPreProcessorSpec extends AnyFlatSpec with CromwellTimeoutSpec with Matc */ it should "flatten file with sub workflow, self reference and valid cyclic dependency" in { val testRoot = makeTestRoot("complex_workflow") - val subWorkflow = CwlFileReference(testRoot / "sub" / "sub_workflow.cwl", Some("sub-echo-workflow-1")) + val subWorkflow = CwlFileReference(testRoot / "sub" / "sub_workflow.cwl", Option("sub-echo-workflow-1")) validate(testRoot, Option("echo-workflow-2")) { mockSaladingFunction => - mockSaladingFunction.expects(echoFileTool).onCall(CwlPreProcessor.saladCwlFile) - mockSaladingFunction.expects(subWorkflow).onCall(CwlPreProcessor.saladCwlFile) + verify(mockSaladingFunction).apply(echoFileTool) + verify(mockSaladingFunction).apply(subWorkflow) } } @@ -67,8 +68,8 @@ class CwlPreProcessorSpec extends AnyFlatSpec with CromwellTimeoutSpec with Matc NonEmptyList.one(s"Found a circular dependency on file://$testRoot/root_workflow.cwl") ) ) { mockSaladingFunction => - mockSaladingFunction.expects(subWorkflow1).onCall(CwlPreProcessor.saladCwlFile) - mockSaladingFunction.expects(subWorkflow2).onCall(CwlPreProcessor.saladCwlFile) + verify(mockSaladingFunction).apply(subWorkflow1) + verify(mockSaladingFunction).apply(subWorkflow2) } } @@ -79,32 +80,32 @@ class CwlPreProcessorSpec extends AnyFlatSpec with CromwellTimeoutSpec with Matc val subWorkflow2 = CwlFileReference(testRoot / "parseInt-tool.cwl", None) validate(testRoot, None, uuidExtractor = Option("step0/([0-9a-f]{8}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{12})/")) { mockSaladingFunction => - mockSaladingFunction.expects(subWorkflow1).onCall(CwlPreProcessor.saladCwlFile) - mockSaladingFunction.expects(subWorkflow2).onCall(CwlPreProcessor.saladCwlFile) + verify(mockSaladingFunction).apply(subWorkflow1) + verify(mockSaladingFunction).apply(subWorkflow2) } } - def makeTestRoot(testDirectoryName: String) = resourcesRoot / testDirectoryName + private def makeTestRoot(testDirectoryName: String) = resourcesRoot / testDirectoryName def validate[T](testRoot: File, root: Option[String], expectedFailure: Option[NonEmptyList[String]] = None, uuidExtractor: Option[String] = None - )(additionalValidation: MockFunction1[CwlReference, IOChecked[String]] => T) = { + )(additionalValidation: SaladFunction => T): Unit = { val rootWorkflowReference = CwlFileReference(testRoot / "root_workflow.cwl", root) // Mocking the salad function allows us to validate how many times it is called exactly and with which parameters - val mockSaladingFunction = mockFunction[CwlReference, IOChecked[String]] + val mockSaladingFunction = mock[SaladFunction] + when(mockSaladingFunction.apply(any[CwlReference])) thenAnswer { + invocationOnMock => + CwlPreProcessor.saladCwlFile(invocationOnMock.getArgument[CwlReference](0)) + } val preProcessor = new CwlPreProcessor(mockSaladingFunction) - val saladExpectations = additionalValidation - // Always validate that the root is saladed - .andThen(_ => mockSaladingFunction.expects(rootWorkflowReference).onCall(CwlPreProcessor.saladCwlFile)) - - // Asserts that dependencies are only saladed once and exactly once - inAnyOrder(saladExpectations(mockSaladingFunction)) - val process = preProcessor.preProcessCwl(rootWorkflowReference).value.unsafeRunSync() + // Asserts that dependencies are only saladed once and exactly once + verify(mockSaladingFunction).apply(rootWorkflowReference) + additionalValidation(mockSaladingFunction) (process, expectedFailure) match { case (Left(errors), Some(failures)) => errors shouldBe failures @@ -119,5 +120,6 @@ class CwlPreProcessorSpec extends AnyFlatSpec with CromwellTimeoutSpec with Matc result shouldBe io.circe.parser.parse(expectationContent).getOrElse(fail("Failed to parse expectation. Your test is broken !")) case (Right(_), Some(failures)) => fail("Unexpected success to pre-process workflow, was expecting failures: " + failures.toList.mkString(", ")) } + () } } diff --git a/dockerHashing/src/test/scala/cromwell/docker/registryv2/AlibabaCloudCRRegistrySpec.scala b/dockerHashing/src/test/scala/cromwell/docker/registryv2/AlibabaCloudCRRegistrySpec.scala index 8ad76ceef7d..0687f75019f 100644 --- a/dockerHashing/src/test/scala/cromwell/docker/registryv2/AlibabaCloudCRRegistrySpec.scala +++ b/dockerHashing/src/test/scala/cromwell/docker/registryv2/AlibabaCloudCRRegistrySpec.scala @@ -9,12 +9,11 @@ import net.ceedubs.ficus.Ficus._ import org.scalatest.BeforeAndAfter import org.scalatest.flatspec.AnyFlatSpecLike import org.scalatest.matchers.should.Matchers -import org.scalatestplus.mockito.MockitoSugar import spray.json._ object AlibabaCloudCRRegistrySpec { - val AlibabaCloudCRRegistryConfigString = + val AlibabaCloudCRRegistryConfigString: String = s""" |enable = true |# How should docker hashes be looked up. Possible values are "local" and "remote" @@ -33,16 +32,16 @@ object AlibabaCloudCRRegistrySpec { | """.stripMargin - val AlibabaCloudCRRegistryConfig = ConfigFactory.parseString(AlibabaCloudCRRegistryConfigString) + val AlibabaCloudCRRegistryConfig: Config = ConfigFactory.parseString(AlibabaCloudCRRegistryConfigString) } -class AlibabaCloudCRRegistrySpec extends TestKitSuite with AnyFlatSpecLike with Matchers with MockitoSugar with BeforeAndAfter { +class AlibabaCloudCRRegistrySpec extends TestKitSuite with AnyFlatSpecLike with Matchers with BeforeAndAfter { behavior of "AlibabaCloudCRRegistry" val hashValue = "fcf39ed78ef0fa27bcc74713b85259alop1b12e6a201e3083af50fd8eda1cbe1" val tag = "0.2" val notExistTag = "0.3" - val CRResponse = + val CRResponse: String = s""" |{ | "data": { @@ -81,7 +80,7 @@ class AlibabaCloudCRRegistrySpec extends TestKitSuite with AnyFlatSpecLike with val testCRDockerImage = s"registry.cn-shanghai.aliyuncs.com/batchcompute/ubuntu:$tag" val testInvalidCRDockerImage = "registry.cn-not-exist.aliyuncs.com/batchcompute/ubuntu:0.2" registry.accepts(DockerImageIdentifier.fromString(testCRDockerImage).get) shouldEqual true - registry.isValidAlibabaCloudCRHost(Some(testInvalidCRDockerImage)) shouldEqual false + registry.isValidAlibabaCloudCRHost(Option(testInvalidCRDockerImage)) shouldEqual false registry.isValidAlibabaCloudCRHost(None) shouldEqual false } @@ -111,7 +110,8 @@ class AlibabaCloudCRRegistrySpec extends TestKitSuite with AnyFlatSpecLike with val cRResponseJsObj = CRResponse.parseJson.asJsObject() registry.extractDigestFromBody(cRResponseJsObj, context) match { - case DockerInfoFailedResponse(t, _) => t.getMessage should be(s"Manifest response did not contain a expected tag: $notExistTag, ${cRResponseJsObj}") + case DockerInfoFailedResponse(t, _) => + t.getMessage should be(s"Manifest response did not contain a expected tag: $notExistTag, $cRResponseJsObj") case _ => fail("Failed to get a DockerInfoFailedResponse result.") } } diff --git a/engine/src/test/scala/cromwell/engine/io/gcs/GcsBatchFlowSpec.scala b/engine/src/test/scala/cromwell/engine/io/gcs/GcsBatchFlowSpec.scala index d51c9f45375..c133e7618c0 100644 --- a/engine/src/test/scala/cromwell/engine/io/gcs/GcsBatchFlowSpec.scala +++ b/engine/src/test/scala/cromwell/engine/io/gcs/GcsBatchFlowSpec.scala @@ -11,14 +11,14 @@ import cromwell.filesystems.gcs.batch.GcsBatchCrc32Command import org.scalatest.PrivateMethodTester import org.scalatest.flatspec.AnyFlatSpecLike import org.scalatest.matchers.should.Matchers -import org.specs2.mock.Mockito +import common.mock.MockSugar import scala.concurrent.duration._ import scala.concurrent.{ExecutionContextExecutor, Future} import scala.language.postfixOps - -class GcsBatchFlowSpec extends TestKitSuite with AnyFlatSpecLike with CromwellTimeoutSpec with Matchers with PrivateMethodTester with Mockito { +class GcsBatchFlowSpec extends TestKitSuite with AnyFlatSpecLike with CromwellTimeoutSpec with Matchers + with PrivateMethodTester with MockSugar { private val NoopOnRetry: IoCommandContext[_] => Throwable => Unit = _ => _ => () private val NoopOnBackpressure: Option[Double] => Unit = _ => () @@ -67,9 +67,9 @@ class GcsBatchFlowSpec extends TestKitSuite with AnyFlatSpecLike with CromwellTi val mockGcsPath = GcsPath( nioPath = CloudStorageFileSystem.forBucket("bucket").getPath("test"), - apiStorage = anyObject[com.google.api.services.storage.Storage], - cloudStorage = anyObject[com.google.cloud.storage.Storage], - projectId = anyString, + apiStorage = mock[com.google.api.services.storage.Storage], + cloudStorage = mock[com.google.cloud.storage.Storage], + projectId = "GcsBatchFlowSpec-project", ) val gcsBatchCommandContext = GcsBatchCommandContext(GcsBatchCrc32Command.forPath(mockGcsPath).get, TestProbe().ref, 5) val recoverCommandPrivateMethod = PrivateMethod[PartialFunction[Throwable, Future[GcsBatchResponse[_]]]](Symbol("recoverCommand")) diff --git a/engine/src/test/scala/cromwell/engine/io/nio/NioFlowSpec.scala b/engine/src/test/scala/cromwell/engine/io/nio/NioFlowSpec.scala index f4a583fefc8..4b3461ae7d3 100644 --- a/engine/src/test/scala/cromwell/engine/io/nio/NioFlowSpec.scala +++ b/engine/src/test/scala/cromwell/engine/io/nio/NioFlowSpec.scala @@ -15,11 +15,11 @@ import cromwell.engine.io.IoAttempts.EnhancedCromwellIoException import cromwell.engine.io.IoCommandContext import cromwell.filesystems.drs.DrsPath import cromwell.filesystems.gcs.GcsPath +import org.mockito.ArgumentMatchers._ import org.mockito.Mockito.{times, verify, when} import org.scalatest.flatspec.AsyncFlatSpecLike import org.scalatest.matchers.should.Matchers -import org.scalatestplus.mockito.MockitoSugar -import org.specs2.mock.Mockito._ +import common.mock.MockSugar import java.nio.file.NoSuchFileException import java.util.UUID @@ -29,7 +29,7 @@ import scala.language.postfixOps import scala.util.Failure import scala.util.control.NoStackTrace -class NioFlowSpec extends TestKitSuite with AsyncFlatSpecLike with Matchers with MockitoSugar { +class NioFlowSpec extends TestKitSuite with AsyncFlatSpecLike with Matchers with MockSugar { behavior of "NioFlowSpec" @@ -112,7 +112,7 @@ class NioFlowSpec extends TestKitSuite with AsyncFlatSpecLike with Matchers with it should "get hash from a GcsPath" in { val exception = new Exception("everything's fine, I am an expected blob failure") with NoStackTrace - val testPath = mock[GcsPath].smart + val testPath = mock[GcsPath] testPath.objectBlobId returns Failure(exception) val context = DefaultCommandContext(hashCommand(testPath).get, replyTo) diff --git a/engine/src/test/scala/cromwell/engine/workflow/WorkflowDockerLookupActorSpec.scala b/engine/src/test/scala/cromwell/engine/workflow/WorkflowDockerLookupActorSpec.scala index e238336b37d..1d2652b83f1 100644 --- a/engine/src/test/scala/cromwell/engine/workflow/WorkflowDockerLookupActorSpec.scala +++ b/engine/src/test/scala/cromwell/engine/workflow/WorkflowDockerLookupActorSpec.scala @@ -19,7 +19,6 @@ import cromwell.services.ServicesStore._ import org.scalatest.BeforeAndAfter import org.scalatest.flatspec.AnyFlatSpecLike import org.scalatest.matchers.should.Matchers -import org.specs2.mock.Mockito import scala.concurrent.duration._ import scala.concurrent.{ExecutionContext, Future} @@ -32,8 +31,7 @@ class WorkflowDockerLookupActorSpec with AnyFlatSpecLike with Matchers with ImplicitSender - with BeforeAndAfter - with Mockito { + with BeforeAndAfter { var workflowId: WorkflowId = _ var dockerSendingActor: TestProbe = _ diff --git a/engine/src/test/scala/cromwell/engine/workflow/lifecycle/execution/SubWorkflowExecutionActorSpec.scala b/engine/src/test/scala/cromwell/engine/workflow/lifecycle/execution/SubWorkflowExecutionActorSpec.scala index 5f98508b38c..fa90e796d95 100644 --- a/engine/src/test/scala/cromwell/engine/workflow/lifecycle/execution/SubWorkflowExecutionActorSpec.scala +++ b/engine/src/test/scala/cromwell/engine/workflow/lifecycle/execution/SubWorkflowExecutionActorSpec.scala @@ -3,7 +3,7 @@ package cromwell.engine.workflow.lifecycle.execution import java.util.UUID import java.util.concurrent.atomic.AtomicInteger -import akka.actor.Props +import akka.actor.{ActorRef, Props} import akka.testkit.{TestFSMRef, TestProbe} import com.typesafe.config.ConfigFactory import common.assertion.ManyTimes @@ -30,34 +30,36 @@ import org.scalatest.BeforeAndAfterAll import org.scalatest.concurrent.Eventually import org.scalatest.flatspec.AnyFlatSpecLike import org.scalatest.matchers.should.Matchers -import org.specs2.mock.Mockito +import common.mock.MockSugar import wom.graph.WomIdentifier import scala.concurrent.duration._ import scala.language.postfixOps import scala.util.control.NoStackTrace -class SubWorkflowExecutionActorSpec extends TestKitSuite with AnyFlatSpecLike with Matchers with Mockito with Eventually with BeforeAndAfterAll { +class SubWorkflowExecutionActorSpec extends TestKitSuite with AnyFlatSpecLike with Matchers with MockSugar + with Eventually with BeforeAndAfterAll { behavior of "SubWorkflowExecutionActor" - var serviceRegistryProbe: TestProbe = _ - var jobStoreProbe: TestProbe = _ - var subWorkflowStoreProbe: TestProbe = _ - var callCacheReadActorProbe: TestProbe = _ - var callCacheWriteActorProbe: TestProbe = _ - var dockerHashActorProbe: TestProbe = _ - var ioActorProbe: TestProbe = _ - var jobRestartCheckTokenDispenserProbe: TestProbe = _ - var jobExecutionTokenDispenserProbe: TestProbe = _ - var preparationActor: TestProbe = _ - var subWorkflowActor: TestProbe = _ - var deathWatch: TestProbe = _ - var parentProbe: TestProbe = _ - val parentBackendDescriptor = mock[BackendWorkflowDescriptor] - val parentWorkflowId: WorkflowId = WorkflowId.randomId() + private var serviceRegistryProbe: TestProbe = _ + private var jobStoreProbe: TestProbe = _ + private var jobRestartCheckTokenDispenserProbe: TestProbe = _ + private var jobExecutionTokenDispenserProbe: TestProbe = _ + private var subWorkflowStoreProbe: TestProbe = _ + private var callCacheReadActorProbe: TestProbe = _ + private var callCacheWriteActorProbe: TestProbe = _ + private var dockerHashActorProbe: TestProbe = _ + private var ioActorProbe: TestProbe = _ + private var preparationActor: TestProbe = _ + private var subWorkflowActor: TestProbe = _ + private var deathWatch: TestProbe = _ + private var parentProbe: TestProbe = _ + private val parentBackendDescriptor = mock[BackendWorkflowDescriptor] + private val parentWorkflowId: WorkflowId = WorkflowId.randomId() + parentBackendDescriptor.id returns parentWorkflowId - val parentWorkflowDescriptor = EngineWorkflowDescriptor( + private val parentWorkflowDescriptor = EngineWorkflowDescriptor( WomMocks.mockWorkflowDefinition("workflow"), parentBackendDescriptor, Map.empty, @@ -65,12 +67,12 @@ class SubWorkflowExecutionActorSpec extends TestKitSuite with AnyFlatSpecLike wi List.empty, CallCachingOff ) - val subWorkflow = WomMocks.mockWorkflowDefinition("sub_wf") - val subWorkflowCall = WomMocks.mockWorkflowCall(WomIdentifier("workflow"), definition = subWorkflow) - val subKey: SubWorkflowKey = SubWorkflowKey(subWorkflowCall, None, 1) - val rootConfig = ConfigFactory.load + private val subWorkflow = WomMocks.mockWorkflowDefinition("sub_wf") + private val subWorkflowCall = WomMocks.mockWorkflowCall(WomIdentifier("workflow"), definition = subWorkflow) + private val subKey: SubWorkflowKey = SubWorkflowKey(subWorkflowCall, None, 1) + private val rootConfig = ConfigFactory.load - val awaitTimeout: FiniteDuration = 10 seconds + private val awaitTimeout: FiniteDuration = 10 seconds override def beforeAll(): Unit = { serviceRegistryProbe = TestProbe() @@ -88,7 +90,7 @@ class SubWorkflowExecutionActorSpec extends TestKitSuite with AnyFlatSpecLike wi parentProbe = TestProbe() } - def buildSWEA(startState: StartableState = Submitted) = { + private def buildSWEA(startState: StartableState = Submitted) = { new TestFSMRef[SubWorkflowExecutionActorState, SubWorkflowExecutionActorData, SubWorkflowExecutionActor](system, Props( new SubWorkflowExecutionActor( subKey, @@ -112,8 +114,9 @@ class SubWorkflowExecutionActorSpec extends TestKitSuite with AnyFlatSpecLike wi fileHashCacheActor = None, blacklistCache = None ) { - override def createSubWorkflowPreparationActor(subWorkflowId: WorkflowId) = preparationActor.ref - override def createSubWorkflowActor(createSubWorkflowActor: EngineWorkflowDescriptor) = subWorkflowActor.ref + override def createSubWorkflowPreparationActor(subWorkflowId: WorkflowId): ActorRef = preparationActor.ref + override def createSubWorkflowActor(createSubWorkflowActor: EngineWorkflowDescriptor): ActorRef = + subWorkflowActor.ref }), parentProbe.ref, s"SubWorkflowExecutionActorSpec-${UUID.randomUUID()}") } @@ -140,7 +143,7 @@ class SubWorkflowExecutionActorSpec extends TestKitSuite with AnyFlatSpecLike wi eventually { swea.stateName shouldBe WaitingForValueStore - swea.stateData.subWorkflowId shouldBe Some(subWorkflowUuid) + swea.stateData.subWorkflowId shouldBe Option(subWorkflowUuid) } } @@ -175,7 +178,7 @@ class SubWorkflowExecutionActorSpec extends TestKitSuite with AnyFlatSpecLike wi it should "Run a sub workflow" in { val swea = buildSWEA() - swea.setState(SubWorkflowPreparingState, SubWorkflowExecutionActorLiveData(Some(WorkflowId.randomId()), None)) + swea.setState(SubWorkflowPreparingState, SubWorkflowExecutionActorLiveData(Option(WorkflowId.randomId()), None)) val subWorkflowId = WorkflowId.randomId() val subBackendDescriptor = mock[BackendWorkflowDescriptor] @@ -199,7 +202,7 @@ class SubWorkflowExecutionActorSpec extends TestKitSuite with AnyFlatSpecLike wi it should "Fail a sub workflow if preparation fails" in { val swea = buildSWEA() - swea.setState(SubWorkflowPreparingState, SubWorkflowExecutionActorLiveData(Some(WorkflowId.randomId()), None)) + swea.setState(SubWorkflowPreparingState, SubWorkflowExecutionActorLiveData(Option(WorkflowId.randomId()), None)) deathWatch watch swea val subWorkflowKey = mock[SubWorkflowKey] @@ -217,7 +220,7 @@ class SubWorkflowExecutionActorSpec extends TestKitSuite with AnyFlatSpecLike wi it should "Relay Workflow Successful message" in { val swea = buildSWEA() val subworkflowId = WorkflowId.randomId() - swea.setState(SubWorkflowRunningState, SubWorkflowExecutionActorLiveData(Some(subworkflowId), None)) + swea.setState(SubWorkflowRunningState, SubWorkflowExecutionActorLiveData(Option(subworkflowId), None)) deathWatch watch swea @@ -234,7 +237,7 @@ class SubWorkflowExecutionActorSpec extends TestKitSuite with AnyFlatSpecLike wi it should "Relay Workflow Failed message" in { val swea = buildSWEA() - swea.setState(SubWorkflowRunningState, SubWorkflowExecutionActorLiveData(Some(WorkflowId.randomId()), None)) + swea.setState(SubWorkflowRunningState, SubWorkflowExecutionActorLiveData(Option(WorkflowId.randomId()), None)) deathWatch watch swea @@ -252,7 +255,7 @@ class SubWorkflowExecutionActorSpec extends TestKitSuite with AnyFlatSpecLike wi it should "Switch Succeeded to Failed and try again if the final metadata entry doesn't write" in { val swea = buildSWEA() - swea.setState(SubWorkflowRunningState, SubWorkflowExecutionActorLiveData(Some(WorkflowId.randomId()), None)) + swea.setState(SubWorkflowRunningState, SubWorkflowExecutionActorLiveData(Option(WorkflowId.randomId()), None)) deathWatch watch swea @@ -295,7 +298,7 @@ class SubWorkflowExecutionActorSpec extends TestKitSuite with AnyFlatSpecLike wi it should "Relay Workflow Aborted message" in { val swea = buildSWEA() - swea.setState(SubWorkflowRunningState, SubWorkflowExecutionActorLiveData(Some(WorkflowId.randomId()), None)) + swea.setState(SubWorkflowRunningState, SubWorkflowExecutionActorLiveData(Option(WorkflowId.randomId()), None)) deathWatch watch swea @@ -311,7 +314,10 @@ class SubWorkflowExecutionActorSpec extends TestKitSuite with AnyFlatSpecLike wi it should "Relay Workflow Abort command message" in { val swea = buildSWEA() - swea.setState(SubWorkflowRunningState, SubWorkflowExecutionActorLiveData(Some(WorkflowId.randomId()), Option(subWorkflowActor.ref))) + swea.setState( + SubWorkflowRunningState, + SubWorkflowExecutionActorLiveData(Option(WorkflowId.randomId()), Option(subWorkflowActor.ref)), + ) deathWatch watch swea diff --git a/engine/src/test/scala/cromwell/engine/workflow/lifecycle/execution/callcaching/CallCacheHashingJobActorSpec.scala b/engine/src/test/scala/cromwell/engine/workflow/lifecycle/execution/callcaching/CallCacheHashingJobActorSpec.scala index a8a288993ee..602d04c145d 100644 --- a/engine/src/test/scala/cromwell/engine/workflow/lifecycle/execution/callcaching/CallCacheHashingJobActorSpec.scala +++ b/engine/src/test/scala/cromwell/engine/workflow/lifecycle/execution/callcaching/CallCacheHashingJobActorSpec.scala @@ -5,6 +5,7 @@ import akka.actor.{ActorRef, Props} import akka.testkit.{TestFSMRef, TestProbe} import cats.data.NonEmptyList import cats.syntax.validated._ +import common.mock.MockSugar import cromwell.backend._ import cromwell.backend.standard.callcaching.StandardFileHashingActor.{FileHashResponse, SingleFileHashRequest} import cromwell.core.TestKitSuite @@ -23,7 +24,8 @@ import wom.values.{WomInteger, WomSingleFile, WomString, WomValue} import scala.util.control.NoStackTrace -class CallCacheHashingJobActorSpec extends TestKitSuite with AnyFlatSpecLike with BackendSpec with Matchers with Eventually with TableDrivenPropertyChecks { +class CallCacheHashingJobActorSpec extends TestKitSuite with AnyFlatSpecLike with BackendSpec with Matchers + with Eventually with TableDrivenPropertyChecks with MockSugar { behavior of "CallCacheReadingJobActor" def templateJobDescriptor(inputs: Map[LocallyQualifiedName, WomValue] = Map.empty): BackendJobDescriptor = { @@ -90,7 +92,7 @@ class CallCacheHashingJobActorSpec extends TestKitSuite with AnyFlatSpecLike wit callCachePathPrefixes = None, fileHashBatchSize = 100 ), parent.ref) - + val expectedInitialHashes = Set( // md5 of Do the stuff... now HashResult(HashKey("command template"), HashValue("2259B15D9120F50C1BD4B2A3E2CE5A0E")), @@ -121,7 +123,7 @@ class CallCacheHashingJobActorSpec extends TestKitSuite with AnyFlatSpecLike wit batchSize = 100 ) } - + def makeCCHJA(callCacheReader: Option[ActorRef], testFileHashingActor: ActorRef, parent: ActorRef, @@ -145,11 +147,11 @@ class CallCacheHashingJobActorSpec extends TestKitSuite with AnyFlatSpecLike wit } }, parent) } - + it should "send hash file requests when receiving a NextBatchOfFileHashesRequest" in { val callCacheReadProbe = TestProbe() val fileHashingActor = TestProbe() - + val cchja = makeCCHJA(Option(callCacheReadProbe.ref), fileHashingActor.ref, TestProbe().ref) val fileHashRequest1 = SingleFileHashRequest(null, null, null, null) @@ -169,10 +171,10 @@ class CallCacheHashingJobActorSpec extends TestKitSuite with AnyFlatSpecLike wit it should "send NoFileHashesResult and stop if there are no input files" in { val parent = TestProbe() val callCacheReadProbe = TestProbe() - + val cchja = makeCCHJA(Option(callCacheReadProbe.ref), TestProbe().ref, parent.ref) parent.watch(cchja) - + cchja.setState( WaitingForHashFileRequest, CallCacheHashingJobActorData(List.empty, List.empty, Option(callCacheReadProbe.ref), 50) @@ -182,12 +184,12 @@ class CallCacheHashingJobActorSpec extends TestKitSuite with AnyFlatSpecLike wit callCacheReadProbe.expectMsgClass(classOf[InitialHashingResult]) parent.expectMsgClass(classOf[InitialHashingResult]) - + callCacheReadProbe.expectMsg(NoFileHashesResult) parent.expectMsg(NoFileHashesResult) parent.expectTerminated(cchja) } - + def selfSendNextBatchRequest(ccReader: Option[ActorRef]): Assertion = { val fileHashingActor = TestProbe() val result: PartialFileHashingResult = PartialFileHashingResult(NonEmptyList.of(mock[HashResult])) @@ -218,7 +220,8 @@ class CallCacheHashingJobActorSpec extends TestKitSuite with AnyFlatSpecLike wit it should "send FinalFileHashingResult to parent and CCReader and die" in { val parent = TestProbe() val callCacheReadProbe = TestProbe() - List(CompleteFileHashingResult(Set(mock[HashResult]), "AggregatedFileHash"), NoFileHashesResult) foreach { result => + List(CompleteFileHashingResult(Set(mock[HashResult]), "AggregatedFileHash"), NoFileHashesResult) foreach + { result => val newData = CallCacheHashingJobActorData(List.empty, List.empty, Option(callCacheReadProbe.ref), 50) val cchja = makeCCHJA(Option(callCacheReadProbe.ref), TestProbe().ref, parent.ref, writeToCache = true, Option(newData -> Option(result))) @@ -274,9 +277,9 @@ class CallCacheHashingJobActorSpec extends TestKitSuite with AnyFlatSpecLike wit val hashKey = HashKey("file") val fileHashRequest: SingleFileHashRequest = SingleFileHashRequest(null, hashKey, null, null) val data = CallCacheHashingJobActorData(List(List(fileHashRequest)), List.empty, Option(callCacheReadProbe.ref), 50) - + cchja.setState(WaitingForHashFileRequest, data) - + system stop callCacheReadProbe.ref fileHasher.expectMsg(fileHashRequest) val result: HashResult = HashResult(hashKey, HashValue("fileHash")) @@ -284,7 +287,7 @@ class CallCacheHashingJobActorSpec extends TestKitSuite with AnyFlatSpecLike wit parent.expectMsg(CompleteFileHashingResult(Set(result), "45F27DD26834DBACBB05BBB1D651F5D1")) } - + it should "propagate HashingFailedMessage errors and die" in { val callCacheReadProbe = TestProbe() val parent = TestProbe() @@ -293,7 +296,7 @@ class CallCacheHashingJobActorSpec extends TestKitSuite with AnyFlatSpecLike wit cchja.setState(WaitingForHashFileRequest) parent.expectMsgClass(classOf[InitialHashingResult]) callCacheReadProbe.expectMsgClass(classOf[InitialHashingResult]) - + val hashFailed = HashingFailedMessage( "fileName", new Exception("Hashing failed ! - part of test flow") with NoStackTrace @@ -303,12 +306,12 @@ class CallCacheHashingJobActorSpec extends TestKitSuite with AnyFlatSpecLike wit callCacheReadProbe.expectMsg(hashFailed) parent.expectTerminated(cchja) } - + it should "run properly when writeToCache is ON and there is no CCRead actor" in { val parent = TestProbe() val cchja = makeCCHJA(None, TestProbe().ref, parent.ref) parent.watch(cchja) - + parent.expectMsgClass(classOf[InitialHashingResult]) parent.expectMsg(NoFileHashesResult) parent.expectTerminated(cchja) diff --git a/engine/src/test/scala/cromwell/engine/workflow/lifecycle/execution/callcaching/CallCachingSlickDatabaseSpec.scala b/engine/src/test/scala/cromwell/engine/workflow/lifecycle/execution/callcaching/CallCachingSlickDatabaseSpec.scala index 408b1087588..279ee80681c 100644 --- a/engine/src/test/scala/cromwell/engine/workflow/lifecycle/execution/callcaching/CallCachingSlickDatabaseSpec.scala +++ b/engine/src/test/scala/cromwell/engine/workflow/lifecycle/execution/callcaching/CallCachingSlickDatabaseSpec.scala @@ -14,19 +14,19 @@ import org.scalatest.flatspec.AnyFlatSpec import org.scalatest.matchers.should.Matchers import org.scalatest.prop.TableDrivenPropertyChecks import org.scalatest.time.{Millis, Seconds, Span} -import org.specs2.mock.Mockito import scala.concurrent.ExecutionContext class CallCachingSlickDatabaseSpec - extends AnyFlatSpec with CromwellTimeoutSpec with Matchers with ScalaFutures with BeforeAndAfterAll with Mockito with TableDrivenPropertyChecks { + extends AnyFlatSpec with CromwellTimeoutSpec with Matchers with ScalaFutures with BeforeAndAfterAll + with TableDrivenPropertyChecks { - implicit val ec = ExecutionContext.global - implicit val defaultPatience = PatienceConfig(scaled(Span(5, Seconds)), scaled(Span(100, Millis))) + implicit val ec: ExecutionContext = ExecutionContext.global + implicit val defaultPatience: PatienceConfig = PatienceConfig(scaled(Span(5, Seconds)), scaled(Span(100, Millis))) // Test with and without prefixes. With prefixing tests accessing the detritus value CLOB, especially with MariaDB. // https://jira.mariadb.org/browse/CONJ-717 - val allowResultReuseTests = Table( + private val allowResultReuseTests = Table( ("description", "prefixOption"), ("without prefixes", None), ("with some prefixes", Option(List("prefix1", "prefix2", "prefix3", "prefix4"))), diff --git a/engine/src/test/scala/cromwell/engine/workflow/lifecycle/execution/callcaching/EngineJobHashingActorSpec.scala b/engine/src/test/scala/cromwell/engine/workflow/lifecycle/execution/callcaching/EngineJobHashingActorSpec.scala index b3875f1a749..1d78da0e37b 100644 --- a/engine/src/test/scala/cromwell/engine/workflow/lifecycle/execution/callcaching/EngineJobHashingActorSpec.scala +++ b/engine/src/test/scala/cromwell/engine/workflow/lifecycle/execution/callcaching/EngineJobHashingActorSpec.scala @@ -19,8 +19,10 @@ import wdl.draft2.model.command.StringCommandPart import wom.core.LocallyQualifiedName import wom.graph.WomIdentifier import wom.values.WomValue +import common.mock.MockSugar -class EngineJobHashingActorSpec extends TestKitSuite with AnyFlatSpecLike with Matchers with BackendSpec with TableDrivenPropertyChecks with Eventually { +class EngineJobHashingActorSpec extends TestKitSuite with AnyFlatSpecLike with Matchers with BackendSpec + with MockSugar with TableDrivenPropertyChecks with Eventually { behavior of "EngineJobHashingActor" def templateJobDescriptor(inputs: Map[LocallyQualifiedName, WomValue] = Map.empty): BackendJobDescriptor = { @@ -33,7 +35,7 @@ class EngineJobHashingActorSpec extends TestKitSuite with AnyFlatSpecLike with M val jobDescriptor = BackendJobDescriptor(workflowDescriptor, BackendJobDescriptorKey(call, None, 1), Map.empty, fqnWdlMapToDeclarationMap(inputs), NoDocker, None, Map.empty) jobDescriptor } - + val serviceRegistryActorProbe: TestProbe = TestProbe() def makeEJHA(receiver: ActorRef, activity: CallCachingActivity, ccReaderProps: Props = Props.empty): TestActorRef[EngineJobHashingActor] = { @@ -62,7 +64,7 @@ class EngineJobHashingActorSpec extends TestKitSuite with AnyFlatSpecLike with M val initialResult: InitialHashingResult = mock[InitialHashingResult] actorUnderTest ! initialResult eventually { - actorUnderTest.underlyingActor.initialHash shouldBe Some(initialResult) + actorUnderTest.underlyingActor.initialHash shouldBe Option(initialResult) } } @@ -94,14 +96,14 @@ class EngineJobHashingActorSpec extends TestKitSuite with AnyFlatSpecLike with M it should "send hashes to receiver when receiving a CompleteFileHashingResult" in { val receiver = TestProbe() val actorUnderTest = makeEJHA(receiver.ref, CallCachingActivity(ReadAndWriteCache)) - + val initialHashes = Set(HashResult(HashKey("key"), HashValue("value"))) val initialAggregatedHash = "aggregatedHash" val initialResult = InitialHashingResult(initialHashes, initialAggregatedHash) val fileHashes = Set(HashResult(HashKey("file key"), HashValue("value"))) val fileAggregatedHash = "aggregatedFileHash" val fileResult = CompleteFileHashingResult(fileHashes, fileAggregatedHash) - + actorUnderTest ! initialResult actorUnderTest ! fileResult receiver.expectMsg(CallCacheHashes(initialHashes, initialAggregatedHash, Option(FileHashes(fileHashes, fileAggregatedHash)))) @@ -135,7 +137,7 @@ class EngineJobHashingActorSpec extends TestKitSuite with AnyFlatSpecLike with M case NextHit => monitorProbe.ref forward NextHit } }) - + val actorUnderTest = makeEJHA(receiver.ref, activity, ccReadActorProps) actorUnderTest ! NextHit @@ -174,7 +176,7 @@ class EngineJobHashingActorSpec extends TestKitSuite with AnyFlatSpecLike with M serviceRegistryActorProbe.expectMsgClass(classOf[PutMetadataAction]) receiver.expectTerminated(actorUnderTest) } - + object EngineJobHashingActorTest { def props(receiver: ActorRef, serviceRegistryActor: ActorRef, @@ -199,7 +201,7 @@ class EngineJobHashingActorSpec extends TestKitSuite with AnyFlatSpecLike with M callCachingEligible = callCachingEligible, fileHashBatchSize = fileHashBatchSize)) } - + class EngineJobHashingActorTest(receiver: ActorRef, serviceRegistryActor: ActorRef, jobDescriptor: BackendJobDescriptor, diff --git a/engine/src/test/scala/cromwell/engine/workflow/lifecycle/execution/job/preparation/CallPreparationSpec.scala b/engine/src/test/scala/cromwell/engine/workflow/lifecycle/execution/job/preparation/CallPreparationSpec.scala index 664bcb648f9..7d0d40fd6f7 100644 --- a/engine/src/test/scala/cromwell/engine/workflow/lifecycle/execution/job/preparation/CallPreparationSpec.scala +++ b/engine/src/test/scala/cromwell/engine/workflow/lifecycle/execution/job/preparation/CallPreparationSpec.scala @@ -6,7 +6,7 @@ import cromwell.core.CallKey import cromwell.engine.workflow.lifecycle.execution.stores.ValueStore import org.scalatest.flatspec.AnyFlatSpec import org.scalatest.matchers.should.Matchers -import org.specs2.mock.Mockito +import common.mock.MockSugar import shapeless._ import wom.callable.Callable.RequiredInputDefinition import wom.expression.{NoIoFunctionSet, WomExpression} @@ -15,7 +15,7 @@ import wom.graph.CommandCallNode import wom.types.WomSingleFileType import wom.values.WomString -class CallPreparationSpec extends AnyFlatSpec with CromwellTimeoutSpec with Matchers with Mockito { +class CallPreparationSpec extends AnyFlatSpec with CromwellTimeoutSpec with Matchers with MockSugar { it should "disallow empty Strings being input as Files" in { val callKey = mock[CallKey] diff --git a/engine/src/test/scala/cromwell/engine/workflow/lifecycle/execution/job/preparation/JobPreparationActorSpec.scala b/engine/src/test/scala/cromwell/engine/workflow/lifecycle/execution/job/preparation/JobPreparationActorSpec.scala index 00b611aff52..3744ea9e3cd 100644 --- a/engine/src/test/scala/cromwell/engine/workflow/lifecycle/execution/job/preparation/JobPreparationActorSpec.scala +++ b/engine/src/test/scala/cromwell/engine/workflow/lifecycle/execution/job/preparation/JobPreparationActorSpec.scala @@ -13,7 +13,7 @@ import cromwell.services.keyvalue.KeyValueServiceActor.{KvGet, KvKeyLookupFailed import org.scalatest.BeforeAndAfter import org.scalatest.flatspec.AnyFlatSpecLike import org.scalatest.matchers.should.Matchers -import org.specs2.mock.Mockito +import common.mock.MockSugar import wom.RuntimeAttributesKeys import wom.callable.Callable.InputDefinition import wom.core.LocallyQualifiedName @@ -24,7 +24,7 @@ import scala.language.postfixOps import scala.util.control.NoStackTrace class JobPreparationActorSpec - extends TestKitSuite with AnyFlatSpecLike with Matchers with ImplicitSender with BeforeAndAfter with Mockito { + extends TestKitSuite with AnyFlatSpecLike with Matchers with ImplicitSender with BeforeAndAfter with MockSugar { behavior of "JobPreparationActor" @@ -130,7 +130,9 @@ class JobPreparationActorSpec val actor = TestActorRef(helper.buildTestJobPreparationActor(1 minute, 1 minutes, List.empty, inputsAndAttributes, List.empty), self) actor ! Start(ValueStore.empty) helper.workflowDockerLookupActor.expectMsgClass(classOf[DockerInfoRequest]) - helper.workflowDockerLookupActor.reply(DockerInfoSuccessResponse(DockerInformation(hashResult, None), mock[DockerInfoRequest])) + helper.workflowDockerLookupActor.reply( + DockerInfoSuccessResponse(DockerInformation(hashResult, None), mock[DockerInfoRequest]) + ) expectMsgPF(5 seconds) { case success: BackendJobPreparationSucceeded => success.jobDescriptor.runtimeAttributes("docker").valueString shouldBe dockerValue diff --git a/engine/src/test/scala/cromwell/engine/workflow/lifecycle/execution/job/preparation/JobPreparationTestHelper.scala b/engine/src/test/scala/cromwell/engine/workflow/lifecycle/execution/job/preparation/JobPreparationTestHelper.scala index 3db6ef6430a..d4e62c262f6 100644 --- a/engine/src/test/scala/cromwell/engine/workflow/lifecycle/execution/job/preparation/JobPreparationTestHelper.scala +++ b/engine/src/test/scala/cromwell/engine/workflow/lifecycle/execution/job/preparation/JobPreparationTestHelper.scala @@ -10,7 +10,7 @@ import cromwell.engine.workflow.lifecycle.execution.WorkflowExecutionActorData import cromwell.engine.workflow.lifecycle.execution.job.preparation.JobPreparationTestHelper._ import cromwell.engine.workflow.lifecycle.execution.stores.ValueStore import cromwell.services.keyvalue.KeyValueServiceActor.{KvJobKey, ScopedKey} -import org.specs2.mock.Mockito +import common.mock.MockSugar import wdl.draft2.model.LocallyQualifiedName import wom.expression.NoIoFunctionSet import wom.graph.{CommandCallNode, WomIdentifier} @@ -18,22 +18,23 @@ import wom.values.{WomEvaluatedCallInputs, WomValue} import scala.concurrent.duration.FiniteDuration -class JobPreparationTestHelper(implicit val system: ActorSystem) extends Mockito { - val executionData = mock[WorkflowExecutionActorData] - val workflowDescriptor = mock[EngineWorkflowDescriptor] - val backendDescriptor = mock[BackendWorkflowDescriptor] - val workflowId = WorkflowId.randomId() +class JobPreparationTestHelper(implicit val system: ActorSystem) extends MockSugar { + val executionData: WorkflowExecutionActorData = mock[WorkflowExecutionActorData] + val workflowDescriptor: EngineWorkflowDescriptor = mock[EngineWorkflowDescriptor] + val backendDescriptor: BackendWorkflowDescriptor = mock[BackendWorkflowDescriptor] + val workflowId: WorkflowId = WorkflowId.randomId() workflowDescriptor.backendDescriptor returns backendDescriptor workflowDescriptor.id returns workflowId workflowDescriptor.possiblyNotRootWorkflowId returns workflowId.toPossiblyNotRoot workflowDescriptor.rootWorkflowId returns workflowId.toRoot workflowDescriptor.rootWorkflow returns workflowDescriptor executionData.workflowDescriptor returns workflowDescriptor - val call = CommandCallNode(WomIdentifier("JobPreparationSpec_call"), null, null, null, Set.empty, null, None) - val mockJobKey = BackendJobDescriptorKey(call, None, 1) - val serviceRegistryProbe = TestProbe() - val ioActor = TestProbe() - val workflowDockerLookupActor = TestProbe() + val call: CommandCallNode = + CommandCallNode(WomIdentifier("JobPreparationSpec_call"), null, null, null, Set.empty, null, None) + val mockJobKey: BackendJobDescriptorKey = BackendJobDescriptorKey(call, None, 1) + val serviceRegistryProbe: TestProbe = TestProbe() + val ioActor: TestProbe = TestProbe() + val workflowDockerLookupActor: TestProbe = TestProbe() val scopedKeyMaker: ScopedKeyMaker = key => ScopedKey(workflowId, KvJobKey("correct.horse.battery.staple", None, 1), key) @@ -42,7 +43,7 @@ class JobPreparationTestHelper(implicit val system: ActorSystem) extends Mockito dockerHashCredentials: List[Any], inputsAndAttributes: ErrorOr[(WomEvaluatedCallInputs, Map[LocallyQualifiedName, WomValue])], kvStoreKeysForPrefetch: List[String], - jobKey: BackendJobDescriptorKey = mockJobKey) = { + jobKey: BackendJobDescriptorKey = mockJobKey): Props = { Props(new TestJobPreparationActor( kvStoreKeysForPrefetch = kvStoreKeysForPrefetch, @@ -78,15 +79,15 @@ private[preparation] class TestJobPreparationActor(kvStoreKeysForPrefetch: List[ ioActor = ioActor, backendSingletonActor = None) { - override lazy val kvStoreKeysToPrefetch = kvStoreKeysForPrefetch + override private[preparation] lazy val kvStoreKeysToPrefetch = kvStoreKeysForPrefetch override private[preparation] lazy val expressionLanguageFunctions = NoIoFunctionSet override private[preparation] lazy val dockerHashCredentials = dockerHashCredentialsInput override private[preparation] lazy val noResponseTimeout = dockerNoResponseTimeoutInput override private[preparation] lazy val hasDockerDefinition = true - override def scopedKey(key: String) = scopedKeyMaker.apply(key) - override def evaluateInputsAndAttributes(valueStore: ValueStore) = inputsAndAttributes + override private[preparation] def scopedKey(key: String): ScopedKey = scopedKeyMaker.apply(key) + override private[preparation] def evaluateInputsAndAttributes(valueStore: ValueStore) = inputsAndAttributes override private[preparation] def jobExecutionProps(jobDescriptor: BackendJobDescriptor, initializationData: Option[BackendInitializationData], diff --git a/engine/src/test/scala/cromwell/engine/workflow/mocks/DeclarationMock.scala b/engine/src/test/scala/cromwell/engine/workflow/mocks/DeclarationMock.scala index da6c22c7d47..be7b0d741a3 100644 --- a/engine/src/test/scala/cromwell/engine/workflow/mocks/DeclarationMock.scala +++ b/engine/src/test/scala/cromwell/engine/workflow/mocks/DeclarationMock.scala @@ -1,6 +1,6 @@ package cromwell.engine.workflow.mocks -import org.specs2.mock.Mockito +import common.mock.MockSugar import wdl.draft2.model.{Declaration, WdlExpression} import wom.types.WomType @@ -8,10 +8,10 @@ object DeclarationMock { type DeclarationMockType = (String, WomType, WdlExpression) } -trait DeclarationMock extends Mockito { +trait DeclarationMock extends MockSugar { def mockDeclaration(name: String, womType: WomType, - expression: WdlExpression) = { + expression: WdlExpression): Declaration = { val declaration = mock[Declaration] declaration.unqualifiedName returns name declaration.expression returns Option(expression) diff --git a/engine/src/test/scala/cromwell/engine/workflow/mocks/TaskMock.scala b/engine/src/test/scala/cromwell/engine/workflow/mocks/TaskMock.scala index bf07f0a950a..7ce565a4b6c 100644 --- a/engine/src/test/scala/cromwell/engine/workflow/mocks/TaskMock.scala +++ b/engine/src/test/scala/cromwell/engine/workflow/mocks/TaskMock.scala @@ -1,18 +1,18 @@ package cromwell.engine.workflow.mocks import cromwell.engine.workflow.mocks.DeclarationMock.DeclarationMockType -import org.specs2.mock.Mockito +import common.mock.MockSugar import wdl.draft2.parser.WdlParser.Ast import wdl.draft2.model.{Declaration, TaskOutput, WdlRuntimeAttributes, WdlTask} -trait TaskMock extends Mockito { - +trait TaskMock extends MockSugar { + def mockTask(name: String, declarations: Seq[Declaration] = Seq.empty, runtimeAttributes: WdlRuntimeAttributes = new WdlRuntimeAttributes(Map.empty), commandTemplateString: String = "!!shazam!!", outputs: Seq[DeclarationMockType] = Seq.empty - ) = { + ): WdlTask = { val task = mock[WdlTask] task.declarations returns declarations task.runtimeAttributes returns runtimeAttributes diff --git a/engine/src/test/scala/cromwell/engine/workflow/mocks/WdlWomExpressionMock.scala b/engine/src/test/scala/cromwell/engine/workflow/mocks/WdlWomExpressionMock.scala index 17e8ea74566..b6b33aebfe6 100644 --- a/engine/src/test/scala/cromwell/engine/workflow/mocks/WdlWomExpressionMock.scala +++ b/engine/src/test/scala/cromwell/engine/workflow/mocks/WdlWomExpressionMock.scala @@ -1,6 +1,7 @@ package cromwell.engine.workflow.mocks -import org.specs2.mock.Mockito +import common.mock.MockSugar +import org.mockito.ArgumentMatchers._ import wdl.draft2.model.{Scope, WdlExpression, WdlWomExpression} import wdl.draft2.model.WdlExpression._ import wdl.draft2.model.expression.WdlFunctions @@ -8,18 +9,11 @@ import wom.values.{WomInteger, WomString, WomValue} import scala.util.Success -trait WdlWomExpressionMock extends Mockito { - val helloStringExpression = { - val expression = mock[WdlExpression] - expression.valueString returns "hello" - expression.evaluate(any[ScopedLookupFunction], any[ WdlFunctions[WomValue]]) returns Success(WomString("hello")) - expression - } - +trait WdlWomExpressionMock extends MockSugar { def mockStringExpression(value: String): WdlWomExpression = { val expression = mock[WdlExpression] expression.valueString returns value - expression.evaluate(any[ScopedLookupFunction], any[ WdlFunctions[WomValue]]) returns Success(WomString(value)) + expression.evaluate(any[ScopedLookupFunction](), any[WdlFunctions[WomValue]]) returns Success(WomString(value)) val mockScope = mock[Scope] WdlWomExpression(expression, mockScope) @@ -28,7 +22,7 @@ trait WdlWomExpressionMock extends Mockito { def mockIntExpression(value: Int): WdlWomExpression = { val expression = mock[WdlExpression] expression.valueString returns value.toString - expression.evaluate(any[ScopedLookupFunction], any[ WdlFunctions[WomValue]]) returns Success(WomInteger(value)) + expression.evaluate(any[ScopedLookupFunction](), any[WdlFunctions[WomValue]]) returns Success(WomInteger(value)) val mockScope = mock[Scope] WdlWomExpression(expression, mockScope) diff --git a/engine/src/test/scala/cromwell/engine/workflow/workflowstore/SqlWorkflowStoreSpec.scala b/engine/src/test/scala/cromwell/engine/workflow/workflowstore/SqlWorkflowStoreSpec.scala index b6670788a65..fe51c9b065b 100644 --- a/engine/src/test/scala/cromwell/engine/workflow/workflowstore/SqlWorkflowStoreSpec.scala +++ b/engine/src/test/scala/cromwell/engine/workflow/workflowstore/SqlWorkflowStoreSpec.scala @@ -1,7 +1,6 @@ package cromwell.engine.workflow.workflowstore import java.time.OffsetDateTime - import cats.data.NonEmptyList import com.dimafeng.testcontainers.Container import common.assertion.CromwellTimeoutSpec @@ -14,17 +13,20 @@ import org.scalatest.concurrent.ScalaFutures import org.scalatest.flatspec.AnyFlatSpec import org.scalatest.matchers.should.Matchers import org.scalatest.time.{Millis, Seconds, Span} -import org.specs2.mock.Mockito + +import scala.concurrent.ExecutionContextExecutor import spray.json.{JsObject, JsString} import scala.concurrent.duration._ import scala.concurrent.{Await, ExecutionContext, Future} -class SqlWorkflowStoreSpec extends AnyFlatSpec with CromwellTimeoutSpec with Matchers with ScalaFutures with BeforeAndAfterAll with Mockito { - implicit val ec = ExecutionContext.global - implicit val defaultPatience = PatienceConfig(scaled(Span(20, Seconds)), scaled(Span(100, Millis))) - val onHoldSourceFilesCollection = NonEmptyList.of( +class SqlWorkflowStoreSpec extends AnyFlatSpec with CromwellTimeoutSpec with Matchers with ScalaFutures + with BeforeAndAfterAll { + implicit val ec: ExecutionContextExecutor = ExecutionContext.global + implicit val defaultPatience: PatienceConfig = PatienceConfig(scaled(Span(20, Seconds)), scaled(Span(100, Millis))) + + private val onHoldSourceFilesCollection = NonEmptyList.of( WorkflowSourceFilesCollection( Option("sample"), None, @@ -41,7 +43,7 @@ class SqlWorkflowStoreSpec extends AnyFlatSpec with CromwellTimeoutSpec with Mat ) ) - val excludedGroupSourceFilesCollection = NonEmptyList.of( + private val excludedGroupSourceFilesCollection = NonEmptyList.of( WorkflowSourceFilesCollection( Option("sample"), None, @@ -58,7 +60,7 @@ class SqlWorkflowStoreSpec extends AnyFlatSpec with CromwellTimeoutSpec with Mat ) ) - val includedGroupSourceFilesCollection1 = NonEmptyList.of( + private val includedGroupSourceFilesCollection1 = NonEmptyList.of( WorkflowSourceFilesCollection( Option("sample"), None, @@ -75,7 +77,7 @@ class SqlWorkflowStoreSpec extends AnyFlatSpec with CromwellTimeoutSpec with Mat ) ) - val includedGroupSourceFilesCollection2 = NonEmptyList.of( + private val includedGroupSourceFilesCollection2 = NonEmptyList.of( WorkflowSourceFilesCollection( Option("sample"), None, @@ -92,7 +94,7 @@ class SqlWorkflowStoreSpec extends AnyFlatSpec with CromwellTimeoutSpec with Mat ) ) - val includedGroupSourceFilesCollection3 = NonEmptyList.of( + private val includedGroupSourceFilesCollection3 = NonEmptyList.of( WorkflowSourceFilesCollection( Option("sample"), None, @@ -410,7 +412,7 @@ class SqlWorkflowStoreSpec extends AnyFlatSpec with CromwellTimeoutSpec with Mat ((for { _ <- workflowStore.add(sourcesToSubmit1) _ <- workflowStore.add(sourcesToSubmit2) - } yield ("incorrectly accepted")) recoverWith { + } yield "incorrectly accepted") recoverWith { case error => for { message <- Future { error.getMessage should be(s"Requested workflow IDs are already in use: $requestedId") @@ -443,7 +445,7 @@ class SqlWorkflowStoreSpec extends AnyFlatSpec with CromwellTimeoutSpec with Mat ((for { _ <- workflowStore.add(sourcesToSubmit1) _ <- workflowStore.add(sourcesToSubmit2) - } yield ("incorrectly accepted")) recoverWith { + } yield "incorrectly accepted") recoverWith { case error => for { message <- Future { error.getMessage should be(s"Requested workflow IDs are already in use: $requestedId1") @@ -473,7 +475,7 @@ class SqlWorkflowStoreSpec extends AnyFlatSpec with CromwellTimeoutSpec with Mat ((for { _ <- workflowStore.add(sourcesToSubmit) - } yield ("incorrectly accepted")) recoverWith { + } yield "incorrectly accepted") recoverWith { case error => for { message <- Future { error.getMessage should be(s"Requested workflow IDs are duplicated: $requestedId1") diff --git a/engine/src/test/scala/cromwell/webservice/MetadataBuilderActorSpec.scala b/engine/src/test/scala/cromwell/webservice/MetadataBuilderActorSpec.scala index b53fd37dc2b..ac7966f1da7 100644 --- a/engine/src/test/scala/cromwell/webservice/MetadataBuilderActorSpec.scala +++ b/engine/src/test/scala/cromwell/webservice/MetadataBuilderActorSpec.scala @@ -16,14 +16,13 @@ import org.scalatest.flatspec.AsyncFlatSpecLike import org.scalatest.matchers.should.Matchers import org.scalatest.prop.TableDrivenPropertyChecks import org.scalatest.{Assertion, Succeeded} -import org.specs2.mock.Mockito import spray.json._ import scala.concurrent.Future import scala.concurrent.duration._ import scala.util.Random -class MetadataBuilderActorSpec extends TestKitSuite with AsyncFlatSpecLike with Matchers with Mockito +class MetadataBuilderActorSpec extends TestKitSuite with AsyncFlatSpecLike with Matchers with TableDrivenPropertyChecks with ImplicitSender { behavior of "MetadataBuilderActor" @@ -83,12 +82,12 @@ class MetadataBuilderActorSpec extends TestKitSuite with AsyncFlatSpecLike with val workflowA = WorkflowId.randomId() val workflowACalls = List( - Option(MetadataJobKey("callB", Some(1), 3)), + Option(MetadataJobKey("callB", Option(1), 3)), Option(MetadataJobKey("callB", None, 1)), - Option(MetadataJobKey("callB", Some(1), 2)), + Option(MetadataJobKey("callB", Option(1), 2)), Option(MetadataJobKey("callA", None, 1)), - Option(MetadataJobKey("callB", Some(1), 1)), - Option(MetadataJobKey("callB", Some(0), 1)), + Option(MetadataJobKey("callB", Option(1), 1)), + Option(MetadataJobKey("callB", Option(0), 1)), None ) val workflowAEvents = workflowACalls map { makeEvent(workflowA, _) } @@ -555,11 +554,11 @@ class MetadataBuilderActorSpec extends TestKitSuite with AsyncFlatSpecLike with metadataBuilderActorName = "mba-non-empty-values", ) } - + it should "expand sub workflow metadata when asked for" in { val mainWorkflowId = WorkflowId.randomId() val subWorkflowId = WorkflowId.randomId() - + val mainEvents = List( MetadataEvent(MetadataKey(mainWorkflowId, Option(MetadataJobKey("callA", None, 1)), "subWorkflowId"), MetadataValue(subWorkflowId)) ) @@ -567,13 +566,13 @@ class MetadataBuilderActorSpec extends TestKitSuite with AsyncFlatSpecLike with val subEvents = List( MetadataEvent(MetadataKey(mainWorkflowId, None, "some"), MetadataValue("sub workflow info")) ) - + val mainQuery = MetadataQuery(mainWorkflowId, None, None, None, None, expandSubWorkflows = true) val mainQueryAction = GetMetadataAction(mainQuery) - + val subQuery = MetadataQuery(subWorkflowId, None, None, None, None, expandSubWorkflows = true) val subQueryAction = GetMetadataAction(subQuery, checkTotalMetadataRowNumberBeforeQuerying = false) - + val parentProbe = TestProbe("parentProbe") val mockReadMetadataWorkerActor = TestProbe("mockReadMetadataWorkerActor") @@ -590,7 +589,7 @@ class MetadataBuilderActorSpec extends TestKitSuite with AsyncFlatSpecLike with mockReadMetadataWorkerActor.reply(MetadataLookupResponse(mainQuery, mainEvents)) mockReadMetadataWorkerActor.expectMsg(defaultTimeout, subQueryAction) mockReadMetadataWorkerActor.reply(MetadataLookupResponse(subQuery, subEvents)) - + val expandedRes = s""" |{ @@ -615,7 +614,7 @@ class MetadataBuilderActorSpec extends TestKitSuite with AsyncFlatSpecLike with val bmr = response.mapTo[SuccessfulMetadataJsonResponse] bmr map { b => b.responseJson shouldBe expandedRes.parseJson} } - + it should "NOT expand sub workflow metadata when NOT asked for" in { val mainWorkflowId = WorkflowId.randomId() val subWorkflowId = WorkflowId.randomId() @@ -626,7 +625,7 @@ class MetadataBuilderActorSpec extends TestKitSuite with AsyncFlatSpecLike with val queryNoExpand = MetadataQuery(mainWorkflowId, None, None, None, None, expandSubWorkflows = false) val queryNoExpandAction = GetMetadataAction(queryNoExpand) - + val parentProbe = TestProbe("parentProbe") val mockReadMetadataWorkerActor = TestProbe("mockReadMetadataWorkerActor") @@ -651,7 +650,7 @@ class MetadataBuilderActorSpec extends TestKitSuite with AsyncFlatSpecLike with | "subWorkflowId": "$subWorkflowId", | "attempt": 1, | "shardIndex": -1 - | } + | } | ] | }, | "id": "$mainWorkflowId" diff --git a/filesystems/drs/src/test/scala/cromwell/filesystems/drs/DrsReaderSpec.scala b/filesystems/drs/src/test/scala/cromwell/filesystems/drs/DrsReaderSpec.scala index c126c5ddb01..fbcd900f371 100644 --- a/filesystems/drs/src/test/scala/cromwell/filesystems/drs/DrsReaderSpec.scala +++ b/filesystems/drs/src/test/scala/cromwell/filesystems/drs/DrsReaderSpec.scala @@ -7,15 +7,18 @@ import cromwell.core.WorkflowOptions import org.apache.http.client.methods.{CloseableHttpResponse, HttpGet} import org.apache.http.entity.ByteArrayEntity import org.apache.http.impl.client.{CloseableHttpClient, HttpClientBuilder} -import org.mockito.Mockito.verify +import org.mockito.ArgumentMatchers._ +import org.mockito.Mockito._ import org.scalatest.flatspec.AnyFlatSpecLike import org.scalatest.matchers.should.Matchers -import org.specs2.mock.Mockito -import org.testcontainers.shaded.org.apache.commons.io.IOUtils +import common.mock.MockSugar +import java.io.EOFException import java.nio.ByteBuffer +import java.nio.channels.ReadableByteChannel +import scala.annotation.tailrec -class DrsReaderSpec extends AnyFlatSpecLike with CromwellTimeoutSpec with Matchers with Mockito { +class DrsReaderSpec extends AnyFlatSpecLike with CromwellTimeoutSpec with Matchers with MockSugar { behavior of "DrsReader" @@ -63,13 +66,13 @@ class DrsReaderSpec extends AnyFlatSpecLike with CromwellTimeoutSpec with Matche it should "return a closeable channel for an access url" in { val exampleBytes = Array[Byte](1, 2, 3) - val httpResponse = mock[CloseableHttpResponse].smart + val httpResponse = mock[CloseableHttpResponse] httpResponse.getEntity returns new ByteArrayEntity(exampleBytes) - val httpClient = mock[CloseableHttpClient].smart - doReturn(httpResponse).when(httpClient).execute(anyObject[HttpGet]) + val httpClient = mock[CloseableHttpClient] + httpClient.execute(any[HttpGet]) returns httpResponse - val httpClientBuilder = mock[HttpClientBuilder].smart + val httpClientBuilder = mock[HttpClientBuilder] httpClientBuilder.build() returns httpClient val drsPathResolver = new MockEngineDrsPathResolver(httpClientBuilderOverride = Option(httpClientBuilder)) @@ -82,7 +85,7 @@ class DrsReaderSpec extends AnyFlatSpecLike with CromwellTimeoutSpec with Matche val buffer = ByteBuffer.allocate(exampleBytes.length) channel.isOpen should be (true) - IOUtils.readFully(channel, buffer) + DrsReaderSpec.readToBuffer(channel, buffer) channel.close() val httpGetCapture = capture[HttpGet] @@ -92,9 +95,22 @@ class DrsReaderSpec extends AnyFlatSpecLike with CromwellTimeoutSpec with Matche verify(httpClient).close() verify(httpResponse).close() - val actualHeaders = httpGetCapture.value.getAllHeaders + val actualHeaders = httpGetCapture.getValue.getAllHeaders actualHeaders.length should be(1) actualHeaders(0).getName should be("hello") actualHeaders(0).getValue should be("world") } } + +object DrsReaderSpec { + @tailrec + def readToBuffer(input: ReadableByteChannel, buffer: ByteBuffer): Unit = { + if (buffer.remaining() > 0) { + if (input.read(buffer) >= 0) { + readToBuffer(input, buffer) + } else { + throw new EOFException(s"input exhausted with ${buffer.remaining()} expected bytes") + } + } + } +} diff --git a/filesystems/gcs/src/test/scala/cromwell/filesystems/gcs/GcsEnhancedRequestSpec.scala b/filesystems/gcs/src/test/scala/cromwell/filesystems/gcs/GcsEnhancedRequestSpec.scala index 1a2f9b32e3c..01a75e21ee5 100644 --- a/filesystems/gcs/src/test/scala/cromwell/filesystems/gcs/GcsEnhancedRequestSpec.scala +++ b/filesystems/gcs/src/test/scala/cromwell/filesystems/gcs/GcsEnhancedRequestSpec.scala @@ -8,68 +8,86 @@ import com.google.cloud.storage.StorageException import com.google.cloud.storage.contrib.nio.CloudStorageFileSystem import common.assertion.CromwellTimeoutSpec import cromwell.filesystems.gcs.RequesterPaysErrors._ -import org.scalamock.scalatest.MockFactory +import org.mockito.ArgumentMatchers._ +import org.mockito.Mockito._ import org.scalatest.flatspec.AnyFlatSpec import org.scalatest.matchers.should.Matchers -import org.specs2.mock.Mockito +import common.mock.MockSugar -class GcsEnhancedRequestSpec extends AnyFlatSpec with CromwellTimeoutSpec with Matchers with Mockito with MockFactory { +class GcsEnhancedRequestSpec extends AnyFlatSpec with CromwellTimeoutSpec with Matchers with MockSugar { behavior of "GcsEnhancedRequest" - val path = GcsPath(CloudStorageFileSystem.forBucket("bucket").getPath("test"), any[com.google.api.services.storage.Storage], any[com.google.cloud.storage.Storage], anyString) + private val path = + GcsPath( + CloudStorageFileSystem.forBucket("bucket").getPath("test"), + mock[com.google.api.services.storage.Storage], + mock[com.google.cloud.storage.Storage], + "GcsEnhancedRequest-project", + ) val requesterPaysException = new StorageException(BucketIsRequesterPaysErrorCode, "Bucket is a requester pays bucket but no user project provided.") it should "attempt first without project, and not retry if the requests succeeds" in { - val testFunction = mockFunction[Boolean, String] - testFunction.expects(false).returns("hello").once() + val testFunction = mock[Boolean => String] + when(testFunction.apply(false)).thenReturn("hello") GcsEnhancedRequest.recoverFromProjectNotProvided(path, testFunction).unsafeRunSync() shouldBe "hello" + verify(testFunction).apply(false) + verify(testFunction).apply(anyBoolean) } it should "retry requests with project if the error matches and succeed" in { - val testFunction = mockFunction[Boolean, String] + val testFunction = mock[Boolean => String] // First time, throw a requester pays exception - testFunction.expects(false).throws(requesterPaysException) + when(testFunction.apply(false)).thenThrow(requesterPaysException) // We expect it to be called a second time with withProject = true this time - testFunction.expects(true).returns("hello") + when(testFunction.apply(true)).thenReturn("hello") GcsEnhancedRequest.recoverFromProjectNotProvided(path, testFunction).unsafeRunSync() shouldBe "hello" + verify(testFunction).apply(false) + verify(testFunction).apply(true) + verify(testFunction, times(2)).apply(anyBoolean) } it should "retry requests with project if the error matches and fail" in { - val testFunction = mockFunction[Boolean, String] + val testFunction = mock[Boolean => String] // First time, throw a requester pays exception - testFunction.expects(false).throws(requesterPaysException) + when(testFunction.apply(false)).thenThrow(requesterPaysException) // We expect it to be called a second time with withProject = true this time, and fail for another reason - testFunction.expects(true).throws(new RuntimeException("it really doesn't work")) + when(testFunction.apply(true)).thenThrow(new RuntimeException("it really doesn't work")) a[RuntimeException] should be thrownBy GcsEnhancedRequest.recoverFromProjectNotProvided(path, testFunction).unsafeRunSync() } it should "not retry requests if the error does not match" in { - val testFunction = mockFunction[Boolean, String] + val testFunction = mock[Boolean => String] // Throw an unrelated exception, should only be called once - testFunction.expects(false).throws(new RuntimeException("it really doesn't work")).once() + when(testFunction.apply(false)).thenThrow(new RuntimeException("it really doesn't work")) a[RuntimeException] should be thrownBy GcsEnhancedRequest.recoverFromProjectNotProvided(path, testFunction).unsafeRunSync() + verify(testFunction).apply(false) + verify(testFunction).apply(anyBoolean) } it should "re throw StorageException 404 to NoSuchFileException" in { - val testFunction = mockFunction[Boolean, String] + val testFunction = mock[Boolean => String] // Throw an unrelated exception, should only be called once - testFunction.expects(false).throws(new StorageException(404, "gs://does/not/exist")).once() + when(testFunction.apply(false)).thenThrow(new StorageException(404, "gs://does/not/exist")) a[FileNotFoundException] should be thrownBy GcsEnhancedRequest.recoverFromProjectNotProvided(path, testFunction).unsafeRunSync() + verify(testFunction).apply(false) + verify(testFunction).apply(anyBoolean) } it should "re throw GoogleJsonResponseException 404 to NoSuchFileException" in { - val testFunction = mockFunction[Boolean, String] + val testFunction = mock[Boolean => String] val builder = new HttpResponseException.Builder(404, "NotFound", new HttpHeaders) val error = new GoogleJsonError() error.setCode(404) // Throw an unrelated exception, should only be called once - testFunction.expects(false).throws(new GoogleJsonResponseException(builder, error)).once() + when(testFunction.apply(false)).thenAnswer(_ => throw new GoogleJsonResponseException(builder, error)) a[FileNotFoundException] should be thrownBy GcsEnhancedRequest.recoverFromProjectNotProvided(path, testFunction).unsafeRunSync() + verify(testFunction).apply(false) + verify(testFunction).apply(anyBoolean) } } diff --git a/filesystems/oss/src/test/scala/cromwell/filesystems/oss/nio/OssNioUtilSpec.scala b/filesystems/oss/src/test/scala/cromwell/filesystems/oss/nio/OssNioUtilSpec.scala index b01772e8170..b4ebb84b7fa 100644 --- a/filesystems/oss/src/test/scala/cromwell/filesystems/oss/nio/OssNioUtilSpec.scala +++ b/filesystems/oss/src/test/scala/cromwell/filesystems/oss/nio/OssNioUtilSpec.scala @@ -7,7 +7,6 @@ import common.assertion.CromwellTimeoutSpec import org.scalatest._ import org.scalatest.flatspec.AnyFlatSpecLike import org.scalatest.matchers.should.Matchers -import org.scalatestplus.mockito.MockitoSugar import scala.util.control.Breaks import scala.util.{Failure, Success, Try} @@ -29,7 +28,7 @@ object OssNioUtilSpec { ) } -trait OssNioUtilSpec extends AnyFlatSpecLike with CromwellTimeoutSpec with MockitoSugar with Matchers { +trait OssNioUtilSpec extends AnyFlatSpecLike with CromwellTimeoutSpec with Matchers { override def withFixture(test: NoArgTest): Outcome = { if (test.tags.contains(NeedAK.name)) { @@ -57,14 +56,15 @@ trait OssNioUtilSpec extends AnyFlatSpecLike with CromwellTimeoutSpec with Mocki OssStorageConfiguration.parseMap(ossInfo) } getOrElse(throw new IllegalArgumentException("you should supply oss info before testing oss related operation")) - lazy val mockOssConf: OssStorageConfiguration = new DefaultOssStorageConfiguration("mock-endpoint", "mock-id", "mock-key", None) + lazy val mockOssConf: OssStorageConfiguration = + DefaultOssStorageConfiguration("mock-endpoint", "mock-id", "mock-key", None) - lazy val ossProvider = OssStorageFileSystemProvider(ossConf) - lazy val mockProvider = OssStorageFileSystemProvider(mockOssConf) - lazy val ossFileSystem = OssStorageFileSystem(bucket, ossConf) - lazy val mockFileSystem = OssStorageFileSystem(bucket, mockOssConf) - val fileName = DEFAULT_FILE_NAME - val fileContent = DEFAULT_CONTENT + lazy val ossProvider: OssStorageFileSystemProvider = OssStorageFileSystemProvider(ossConf) + lazy val mockProvider: OssStorageFileSystemProvider = OssStorageFileSystemProvider(mockOssConf) + lazy val ossFileSystem: OssStorageFileSystem = OssStorageFileSystem(bucket, ossConf) + lazy val mockFileSystem: OssStorageFileSystem = OssStorageFileSystem(bucket, mockOssConf) + val fileName: String = DEFAULT_FILE_NAME + val fileContent: String = DEFAULT_CONTENT lazy val ossClient: OSSClient = mockOssConf.newOssClient() diff --git a/filesystems/oss/src/test/scala/cromwell/filesystems/oss/nio/OssStorageFileAttributesViewSpec.scala b/filesystems/oss/src/test/scala/cromwell/filesystems/oss/nio/OssStorageFileAttributesViewSpec.scala index 635b287ae2f..7825a369076 100644 --- a/filesystems/oss/src/test/scala/cromwell/filesystems/oss/nio/OssStorageFileAttributesViewSpec.scala +++ b/filesystems/oss/src/test/scala/cromwell/filesystems/oss/nio/OssStorageFileAttributesViewSpec.scala @@ -11,11 +11,11 @@ class OssStorageFileAttributesViewSpec extends OssNioUtilSpec { import OssStorageObjectAttributesSpec._ - def getObject = { + private def getObject = { OssStoragePath.getPath(mockFileSystem, fileName) } - def getDir = { + private def getDir = { OssStoragePath.getPath(mockFileSystem, "/bcs-dir/") } diff --git a/filesystems/oss/src/test/scala/cromwell/filesystems/oss/nio/OssStorageObjectAttributesSpec.scala b/filesystems/oss/src/test/scala/cromwell/filesystems/oss/nio/OssStorageObjectAttributesSpec.scala index 0c2174350af..4b2ab275b4d 100644 --- a/filesystems/oss/src/test/scala/cromwell/filesystems/oss/nio/OssStorageObjectAttributesSpec.scala +++ b/filesystems/oss/src/test/scala/cromwell/filesystems/oss/nio/OssStorageObjectAttributesSpec.scala @@ -1,24 +1,22 @@ package cromwell.filesystems.oss.nio import java.nio.file.attribute.FileTime - import com.aliyun.oss.model.ObjectMetadata +import common.mock.MockSugar import cromwell.core.TestKitSuite import org.mockito.Mockito._ -import java.text.SimpleDateFormat -import java.util.Locale - -import org.scalatestplus.mockito.MockitoSugar +import java.text.SimpleDateFormat +import java.util.{Date, Locale} -object OssStorageObjectAttributesSpec extends MockitoSugar{ +object OssStorageObjectAttributesSpec extends MockSugar { val DEFAULT_BUCKET = "bcs-bucket" val DEFAULT_FILE_NAME = "/bcs-dir/bcs-file" val DEFAULT_LENGTH: Long = 2102784 - val DEFAULT_MODIFIED = { + val DEFAULT_MODIFIED: Date = { val target = "Thu Dec 21 15:19:27 CST 2017" val df = new SimpleDateFormat("EEE MMM dd kk:mm:ss z yyyy", Locale.ENGLISH) df.parse(target) @@ -26,15 +24,15 @@ object OssStorageObjectAttributesSpec extends MockitoSugar{ val DEFAULT_ETAG = "F80066F040BDA4F991DB5F8AEC9905FB" - val DEFAULT_CONTENT_DISPOSITION = None.orNull + val DEFAULT_CONTENT_DISPOSITION: String = null - val DEFAULT_CACHE_CONTROL = None.orNull + val DEFAULT_CACHE_CONTROL: String = null - val DEFAULT_CONTENT_ENCODING = None.orNull + val DEFAULT_CONTENT_ENCODING: String = null val DEFAULT_CONTENT_TYPE = "application/x-msdownload" - def getObjectMeta = { + def getObjectMeta: ObjectMetadata = { val meta = mock[ObjectMetadata] when(meta.getContentDisposition).thenReturn(DEFAULT_CONTENT_DISPOSITION) @@ -56,16 +54,16 @@ class OssStorageObjectAttributesSpec extends TestKitSuite with OssNioUtilSpec { import OssStorageObjectAttributesSpec._ - def getObject = { + def getObject: OssStoragePathImpl = { OssStoragePath.getPath(mockFileSystem, fileName) } - def getDir = { + def getDir: OssStoragePathImpl = { OssStoragePath.getPath(mockFileSystem, "/bcs-dir/") } "an oss object attr" should "be an right" in { - val attr = new OssStorageObjectAttributes(getObjectMeta, getObject) + val attr = OssStorageObjectAttributes(getObjectMeta, getObject) attr.fileKey shouldEqual getObject.pathAsString @@ -74,12 +72,12 @@ class OssStorageObjectAttributesSpec extends TestKitSuite with OssNioUtilSpec { attr.cacheControl() shouldBe empty attr.contentDisposition shouldBe empty attr.contentEncoding shouldBe empty - attr.etag shouldBe Some(DEFAULT_ETAG) + attr.etag shouldBe Option(DEFAULT_ETAG) attr.size shouldBe DEFAULT_LENGTH } "an oss directory attr" should "be an right" in { - val attr = new OssStorageDirectoryAttributes(getDir) + val attr = OssStorageDirectoryAttributes(getDir) attr.fileKey shouldEqual getDir.pathAsString diff --git a/filesystems/oss/src/test/scala/cromwell/filesystems/oss/nio/TTLOssStorageConfigurationSpec.scala b/filesystems/oss/src/test/scala/cromwell/filesystems/oss/nio/TTLOssStorageConfigurationSpec.scala index 416748600af..739760fea42 100644 --- a/filesystems/oss/src/test/scala/cromwell/filesystems/oss/nio/TTLOssStorageConfigurationSpec.scala +++ b/filesystems/oss/src/test/scala/cromwell/filesystems/oss/nio/TTLOssStorageConfigurationSpec.scala @@ -1,18 +1,15 @@ package cromwell.filesystems.oss.nio import java.net.URI - -import com.typesafe.config.ConfigFactory +import com.typesafe.config.{Config, ConfigFactory} import cromwell.core.TestKitSuite import org.scalatest.BeforeAndAfter import org.scalatest.flatspec.AnyFlatSpecLike import org.scalatest.matchers.should.Matchers -import org.scalatestplus.mockito.MockitoSugar - object TTLOssStorageConfigurationSpec { - val BcsBackendConfigString = + val BcsBackendConfigString: String = s""" | auth { | endpoint = "oss-cn-shanghai.aliyuncs.com" @@ -25,15 +22,15 @@ object TTLOssStorageConfigurationSpec { | } """.stripMargin - val BcsBackendConfig = ConfigFactory.parseString(BcsBackendConfigString) + val BcsBackendConfig: Config = ConfigFactory.parseString(BcsBackendConfigString) } -class TTLOssStorageConfigurationSpec extends TestKitSuite with AnyFlatSpecLike with Matchers with MockitoSugar with BeforeAndAfter { +class TTLOssStorageConfigurationSpec extends TestKitSuite with AnyFlatSpecLike with Matchers with BeforeAndAfter { val expectedEndpoint = "oss-cn-shanghai.aliyuncs.com" val expectedAccessId = "test-access-id" val expectedAccessKey = "test-access-key" - val expectedToken = Some("test-security-token") - val expectedFullEndpoint = URI.create("http://oss-cn-shanghai.aliyuncs.com") + val expectedToken: Option[String] = Option("test-security-token") + val expectedFullEndpoint: URI = URI.create("http://oss-cn-shanghai.aliyuncs.com") behavior of "TTLOssStorageConfiguration" diff --git a/project/Dependencies.scala b/project/Dependencies.scala index facd760e8d2..9a558a70a91 100644 --- a/project/Dependencies.scala +++ b/project/Dependencies.scala @@ -38,9 +38,6 @@ object Dependencies { private val delightRhinoSandboxV = "0.0.15" private val diffsonSprayJsonV = "4.1.1" private val ficusV = "1.5.1" - // The "com.vladsch.flexmark" % "flexmark-profile-pegdown" % flexmarkV dependency is an implicit, version-specific - // runtime dependency of ScalaTest. They must be upgraded together, based on the ScalaTest version. - private val flexmarkV = "0.62.2" // scala-steward:off private val fs2V = "2.5.9" // scala-steward:off (CROM-6564) private val googleApiClientV = "1.33.2" private val googleCloudBigQueryV = "2.10.0" @@ -69,6 +66,7 @@ object Dependencies { private val jacksonV = "2.13.2" private val janinoV = "3.1.6" private val jsr305V = "3.0.2" + private val junitV = "4.13.2" private val kindProjectorV = "0.13.2" private val kittensV = "2.3.2" private val liquibaseV = "4.8.0" @@ -82,12 +80,12 @@ object Dependencies { private val metrics4ScalaV = "4.2.8" private val metrics3StatsdV = "4.2.0" private val mockFtpServerV = "3.0.0" + private val mockitoV = "3.11.2" private val mockserverNettyV = "5.11.2" private val mouseV = "1.0.10" private val mysqlV = "8.0.28" private val nettyV = "4.1.72.Final" private val owlApiV = "5.1.19" - private val pegdownV = "1.6.0" private val postgresV = "42.3.3" private val pprintV = "0.7.1" private val rdf4jV = "3.7.1" @@ -97,14 +95,10 @@ object Dependencies { private val scalaGraphV = "1.13.1" private val scalaLoggingV = "3.9.4" private val scalaPoolV = "0.4.3" - private val scalacheckV = "1.15.4" private val scalacticV = "3.2.10" private val scalameterV = "0.19" - private val scalamockV = "5.2.0" - // scalatestV and flexmarkV must be upgraded together. Check the ScalaTest release notes to - // find the version of FlexMark that corresponds to the new version of ScalaTest. private val scalatestV = "3.2.10" - private val scalatestPlusMockitoV = "1.0.0-M2" + private val scalatestScalacheckV = scalatestV + ".0" private val scoptV = "4.0.1" private val sentryLogbackV = "5.2.4" private val shapelessV = "2.3.7" @@ -124,7 +118,6 @@ object Dependencies { */ private val slickV = "3.4.0-M1" private val snakeyamlV = "1.30" - private val specs2MockV = "4.13.3" private val sprayJsonV = "1.3.6" private val sttpV = "1.7.2" private val swaggerParserV = "1.0.56" @@ -202,7 +195,6 @@ object Dependencies { "commons-net" % "commons-net" % commonNetV, "io.github.andrebeat" %% "scala-pool" % scalaPoolV, "com.google.guava" % "guava" % guavaV, - "org.scalamock" %% "scalamock" % scalamockV % Test, "org.mockftpserver" % "MockFtpServer" % mockFtpServerV % Test ) @@ -383,6 +375,46 @@ object Dependencies { "com.google.cloud" % "google-cloud-monitoring" % googleCloudMonitoringV ) + /* + Generators are eventually coming to ScalaTest. Someday... + - https://youtu.be/lKtg-CDVDsI?t=562 + + For now use scalatestplus' scalacheck wrapper. + + Tests that insist on using PropertyGenerators should actually use ScalaTest's wrapper. ScalaCheck tests no longer + run by default. See Testing.scala where only `ScalaTest` is specified in the `testFrameworks`. + + See also (may be out of date): + - https://github.com/scalatest/scalatest/issues/1735 + - https://www.scalatest.org/user_guide/generator_driven_property_checks + - https://www.scalatest.org/user_guide/writing_scalacheck_style_properties + */ + private val scalacheckBaseV = "1.15" + private val scalacheckDependencies = List( + "org.scalatestplus" %% s"scalacheck-${scalacheckBaseV.replace(".", "-")}" % scalatestScalacheckV % Test, + ) + + /* + Note: `junitDependencies` only adds the dependency for JUnit tests to compile. + + To actually _run_ the tests via SBT one would need the SBT to JUnit interface: + - https://github.com/sbt/junit-interface/ + + However, as of Aug 2021 there is only one S3 Java file using JUnit, and that code was copy-pasted from an + external GitHub repo. See `s3fsDependencies` for more information. + + Also as of Aug 2021 Testing.scala only looks for and runs ScalaTest during regular testing. + */ + private val junitDependencies = List( + "junit" % "junit" % junitV % Test + ) + + private val testDatabaseDependencies = + List("scalatest", "mysql", "mariadb", "postgresql") + .map(name => "com.dimafeng" %% s"testcontainers-scala-$name" % testContainersScalaV % Test) + + val s3FileSystemDependencies: List[ModuleID] = junitDependencies + val gcsFileSystemDependencies: List[ModuleID] = akkaHttpDependencies val httpFileSystemDependencies: List[ModuleID] = akkaHttpDependencies @@ -394,10 +426,9 @@ object Dependencies { val womDependencies: List[ModuleID] = List( "com.typesafe.scala-logging" %% "scala-logging" % scalaLoggingV, "io.spray" %% "spray-json" % sprayJsonV, - "org.scalacheck" %% "scalacheck" % scalacheckV % Test, "org.typelevel" %% "simulacrum" % simulacrumV, "commons-codec" % "commons-codec" % commonsCodecV - ) ++ circeDependencies ++ refinedTypeDependenciesList + ) ++ scalacheckDependencies ++ circeDependencies ++ refinedTypeDependenciesList val wdlDependencies: List[ModuleID] = List( "commons-io" % "commons-io" % commonsIoV, @@ -441,11 +472,9 @@ object Dependencies { "com.lihaoyi" %% "ammonite-ops" % ammoniteOpsV, "org.broadinstitute" % "heterodon" % heterodonV classifier "single", "org.scalactic" %% "scalactic" % scalacticV, - "org.scalacheck" %% "scalacheck" % scalacheckV % Test, "io.circe" %% "circe-optics" % circeOpticsV, "org.mozilla" % "rhino" % rhinoV, "org.javadelight" % "delight-rhino-sandbox" % delightRhinoSandboxV, - "org.scalamock" %% "scalamock" % scalamockV % Test, "commons-io" % "commons-io" % commonsIoV % Test ) ++ betterFilesDependencies ++ owlApiDependencies @@ -461,7 +490,6 @@ object Dependencies { "com.chuusai" %% "shapeless" % shapelessV, "com.storm-enroute" %% "scalameter" % scalameterV % Test, "com.github.scopt" %% "scopt" % scoptV, - "org.scalamock" %% "scalamock" % scalamockV % Test ) ++ akkaStreamDependencies ++ configDependencies ++ catsDependencies ++ circeDependencies ++ googleApiClientDependencies ++ statsDDependencies ++ betterFilesDependencies ++ // TODO: We're not using the "F" in slf4j. Core only supports logback, specifically the WorkflowLogger. @@ -492,12 +520,13 @@ object Dependencies { exclude("org.scala-tools.testing", "test-interface"), "com.fasterxml.jackson.core" % "jackson-databind" % jacksonV, "io.github.andrebeat" %% "scala-pool" % scalaPoolV - ) ++ swaggerUiDependencies ++ akkaHttpDependencies ++ akkaHttpCirceIntegrationDependency ++ circeDependencies + ) ++ swaggerUiDependencies ++ akkaHttpDependencies ++ akkaHttpCirceIntegrationDependency ++ circeDependencies ++ + testDatabaseDependencies - val servicesDependencies = List( + val servicesDependencies: List[ModuleID] = List( "com.google.api" % "gax-grpc" % googleGaxGrpcV, "org.apache.commons" % "commons-csv" % commonsCsvV, - ) + ) ++ testDatabaseDependencies val serverDependencies: List[ModuleID] = slf4jBindingDependencies @@ -511,10 +540,9 @@ object Dependencies { val wes2cromwellDependencies: List[ModuleID] = coreDependencies ++ akkaHttpDependencies - val backendDependencies = List( - "org.scalacheck" %% "scalacheck" % scalacheckV % Test, + val backendDependencies: List[ModuleID] = List( "co.fs2" %% "fs2-io" % fs2V - ) + ) ++ scalacheckDependencies val bcsBackendDependencies: List[ModuleID] = commonDependencies ++ refinedTypeDependenciesList ++ aliyunBatchComputeDependencies @@ -526,14 +554,9 @@ object Dependencies { val testDependencies: List[ModuleID] = List( "org.scalatest" %% "scalatest" % scalatestV, - "org.scalatestplus" %% "scalatestplus-mockito" % scalatestPlusMockitoV, - "com.vladsch.flexmark" % "flexmark-profile-pegdown" % flexmarkV, - "org.pegdown" % "pegdown" % pegdownV, - "org.specs2" %% "specs2-mock" % specs2MockV, - "com.dimafeng" %% "testcontainers-scala-scalatest" % testContainersScalaV, - "com.dimafeng" %% "testcontainers-scala-mysql" % testContainersScalaV, - "com.dimafeng" %% "testcontainers-scala-mariadb" % testContainersScalaV, - "com.dimafeng" %% "testcontainers-scala-postgresql" % testContainersScalaV + // Use mockito Java DSL directly instead of the numerous and often hard to keep updated Scala DSLs. + // See also scaladoc in common.mock.MockSugar and that trait's various usages. + "org.mockito" % "mockito-core" % mockitoV ) ++ slf4jBindingDependencies // During testing, add an slf4j binding for _all_ libraries. val kindProjectorPlugin = "org.typelevel" % "kind-projector" % kindProjectorV cross CrossVersion.full diff --git a/project/Testing.scala b/project/Testing.scala index 89a6ffc6832..54ddc458353 100644 --- a/project/Testing.scala +++ b/project/Testing.scala @@ -56,8 +56,6 @@ object Testing { Tests.Argument( TestFrameworks.ScalaTest, "-oDSI", - "-h", - "target/test-reports", "-u", "target/test-reports", "-F", @@ -91,14 +89,18 @@ object Testing { // Only run one minnie-kenny.sh at a time! private lazy val minnieKennySingleRunner = new MinnieKennySingleRunner + private val ScalaMeterFramework = new TestFramework("org.scalameter.ScalaMeterFramework") + val testSettings = List( libraryDependencies ++= testDependencies.map(_ % Test), // `test` (or `assembly`) - Run most tests Test / testOptions ++= Seq(TestReportArgs) ++ filterTestArgs, // `alltests:test` - Run all tests AllTests / testOptions := (Test / testOptions).value.diff(filterTestArgs), + // Reduce the load on SBT by only searching for ScalaTest specs excluding others like JUnit and ScalaCheck + testFrameworks := List(TestFrameworks.ScalaTest), // Add scalameter as a test framework in the CromwellBenchmarkTest scope - CromwellBenchmarkTest / testFrameworks += new TestFramework("org.scalameter.ScalaMeterFramework"), + CromwellBenchmarkTest / testFrameworks := List(TestFrameworks.ScalaTest, ScalaMeterFramework), // Don't execute benchmarks in parallel CromwellBenchmarkTest / parallelExecution := false, // Until we move away from Travis do not execute ANY tests in parallel (see also Settings.sharedSettings) diff --git a/server/src/test/scala/cromwell/engine/WorkflowStoreActorSpec.scala b/server/src/test/scala/cromwell/engine/WorkflowStoreActorSpec.scala index be662e5044e..d0f39fe8889 100644 --- a/server/src/test/scala/cromwell/engine/WorkflowStoreActorSpec.scala +++ b/server/src/test/scala/cromwell/engine/WorkflowStoreActorSpec.scala @@ -26,13 +26,13 @@ import mouse.all._ import org.scalatest.BeforeAndAfter import org.scalatest.concurrent.Eventually import org.scalatest.matchers.should.Matchers -import org.specs2.mock.Mockito import scala.concurrent.Await import scala.concurrent.duration._ import scala.language.postfixOps -class WorkflowStoreActorSpec extends CromwellTestKitWordSpec with CoordinatedWorkflowStoreActorBuilder with SqlWorkflowStoreBuilder with Matchers with BeforeAndAfter with Mockito with Eventually with CromwellTimeoutSpec { +class WorkflowStoreActorSpec extends CromwellTestKitWordSpec with CoordinatedWorkflowStoreActorBuilder + with SqlWorkflowStoreBuilder with Matchers with BeforeAndAfter with Eventually with CromwellTimeoutSpec { private val helloWorldSourceFiles = HelloWorld.asWorkflowSources().asInstanceOf[WorkflowSourceFilesWithoutImports] private val helloWorldSourceFilesOnHold = HelloWorld.asWorkflowSources(workflowOnHold = true) private val helloCwlWorldSourceFiles = diff --git a/server/src/test/scala/cromwell/engine/workflow/lifecycle/MaterializeWorkflowDescriptorActorSpec.scala b/server/src/test/scala/cromwell/engine/workflow/lifecycle/MaterializeWorkflowDescriptorActorSpec.scala index 17b607c3bc6..fb44db3003c 100644 --- a/server/src/test/scala/cromwell/engine/workflow/lifecycle/MaterializeWorkflowDescriptorActorSpec.scala +++ b/server/src/test/scala/cromwell/engine/workflow/lifecycle/MaterializeWorkflowDescriptorActorSpec.scala @@ -13,25 +13,24 @@ import cromwell.engine.workflow.lifecycle.materialization.MaterializeWorkflowDes import cromwell.util.SampleWdl.HelloWorld import cromwell.{CromwellTestKitSpec, CromwellTestKitWordSpec} import org.scalatest.BeforeAndAfter -import org.scalatestplus.mockito.MockitoSugar import spray.json.DefaultJsonProtocol._ import spray.json._ import wom.values.{WomInteger, WomString} import scala.concurrent.duration._ -class MaterializeWorkflowDescriptorActorSpec extends CromwellTestKitWordSpec with BeforeAndAfter with MockitoSugar { +class MaterializeWorkflowDescriptorActorSpec extends CromwellTestKitWordSpec with BeforeAndAfter { - val ioActor = system.actorOf(SimpleIoActor.props) - val workflowId = WorkflowId.randomId() - val minimumConf = ConfigFactory.parseString( + private val ioActor = system.actorOf(SimpleIoActor.props) + private val workflowId = WorkflowId.randomId() + private val minimumConf = ConfigFactory.parseString( """ |backend { | default = "Local" |} |""".stripMargin ).withFallback(CromwellTestKitSpec.DefaultConfig) - val differentDefaultBackendConf = ConfigFactory.parseString( + private val differentDefaultBackendConf = ConfigFactory.parseString( """ |backend { | default = "DefaultBackend" @@ -44,26 +43,33 @@ class MaterializeWorkflowDescriptorActorSpec extends CromwellTestKitWordSpec wit |""".stripMargin ).withFallback(CromwellTestKitSpec.DefaultConfig) val unstructuredFile = "fubar badness!" - val validOptions = WorkflowOptions.fromJsonString(""" { "write_to_cache": true } """).get + private val validOptions = WorkflowOptions.fromJsonString(""" { "write_to_cache": true } """).get val validCustomLabelsFile="""{ "label1": "value1", "label2": "value2", "Label1": "valu£1" }""" val badCustomLabelsFile="""{ "key with characters more than 255-at vero eos et accusamus et iusto odio dignissimos ducimus qui blanditiis praesentium voluptatum deleniti atque corrupti quos dolores et quas molestias excepturi sint occaecati cupiditate non provident, similique sunt in culpas": "value with characters more than 255-at vero eos et accusamus et iusto odio dignissimos ducimus qui blanditiis praesentium voluptatum deleniti atque corrupti quos dolores et quas molestias excepturi sint occaecati cupiditate non provident, similique sunt in culpa" }""" - val validInputsJson = HelloWorld.rawInputs.toJson.toString() - val workflowSourceWithDocker = HelloWorld.workflowSource(""" runtime { docker: "ubuntu:latest" } """) - val workflowSourceNoDocker = HelloWorld.workflowSource(""" runtime { } """) - val Timeout = 10.second.dilated - val NoBehaviorActor = system.actorOf(Props.empty) + private val validInputsJson = HelloWorld.rawInputs.toJson.toString() + private val workflowSourceNoDocker = HelloWorld.workflowSource(""" runtime { } """) + private val Timeout = 10.second.dilated + private val NoBehaviorActor = system.actorOf(Props.empty) val callCachingEnabled = true val invalidateBadCacheResults = true - val validMemoryRetryOptions1 = WorkflowOptions.fromJsonString(""" { "memory_retry_multiplier": 1.0 } """).get - val validMemoryRetryOptions2 = WorkflowOptions.fromJsonString(""" { "memory_retry_multiplier": 99.0 } """).get - val validMemoryRetryOptions3 = WorkflowOptions.fromJsonString(""" { "memory_retry_multiplier": 12.34 } """).get - val invalidMemoryRetryOptions1 = WorkflowOptions.fromJsonString(""" { "memory_retry_multiplier": 0.9 } """).get - val invalidMemoryRetryOptions2 = WorkflowOptions.fromJsonString(""" { "memory_retry_multiplier": 99.1 } """).get - val invalidMemoryRetryOptions3 = WorkflowOptions.fromJsonString(""" { "memory_retry_multiplier": -1.1 } """).get - val invalidMemoryRetryOptions4 = WorkflowOptions.fromJsonString(""" { "memory_retry_multiplier": "invalid value" } """).get - val invalidMemoryRetryOptions5 = WorkflowOptions.fromJsonString(""" { "memory_retry_multiplier": true } """).get + private val validMemoryRetryOptions1 = + WorkflowOptions.fromJsonString(""" { "memory_retry_multiplier": 1.0 } """).get + private val validMemoryRetryOptions2 = + WorkflowOptions.fromJsonString(""" { "memory_retry_multiplier": 99.0 } """).get + private val validMemoryRetryOptions3 = + WorkflowOptions.fromJsonString(""" { "memory_retry_multiplier": 12.34 } """).get + private val invalidMemoryRetryOptions1 = + WorkflowOptions.fromJsonString(""" { "memory_retry_multiplier": 0.9 } """).get + private val invalidMemoryRetryOptions2 = + WorkflowOptions.fromJsonString(""" { "memory_retry_multiplier": 99.1 } """).get + private val invalidMemoryRetryOptions3 = + WorkflowOptions.fromJsonString(""" { "memory_retry_multiplier": -1.1 } """).get + private val invalidMemoryRetryOptions4 = + WorkflowOptions.fromJsonString(""" { "memory_retry_multiplier": "invalid value" } """).get + private val invalidMemoryRetryOptions5 = + WorkflowOptions.fromJsonString(""" { "memory_retry_multiplier": true } """).get before { } @@ -73,7 +79,7 @@ class MaterializeWorkflowDescriptorActorSpec extends CromwellTestKitWordSpec wit system.stop(ioActor) } - val fooHogGroup = HogGroup("foo") + private val fooHogGroup = HogGroup("foo") "MaterializeWorkflowDescriptorActor" should { "accept valid WDL, inputs and options files" in { diff --git a/server/src/test/scala/cromwell/engine/workflow/lifecycle/execution/ejea/EngineJobExecutionActorSpec.scala b/server/src/test/scala/cromwell/engine/workflow/lifecycle/execution/ejea/EngineJobExecutionActorSpec.scala index 5cef7f9e155..e1e613a6189 100644 --- a/server/src/test/scala/cromwell/engine/workflow/lifecycle/execution/ejea/EngineJobExecutionActorSpec.scala +++ b/server/src/test/scala/cromwell/engine/workflow/lifecycle/execution/ejea/EngineJobExecutionActorSpec.scala @@ -12,13 +12,12 @@ import org.scalatest._ import org.scalatest.concurrent.{Eventually, ScalaFutures} import org.scalatest.matchers.should.Matchers import org.scalatest.wordspec.AnyWordSpecLike -import org.specs2.mock.Mockito import scala.concurrent.duration._ import scala.language.postfixOps trait EngineJobExecutionActorSpec extends AbstractEngineJobExecutionActorSpec - with Matchers with Mockito with BeforeAndAfterAll with BeforeAndAfter { + with Matchers with BeforeAndAfterAll with BeforeAndAfter { // If we WANT something to happen, make sure it happens within this window: val awaitTimeout: FiniteDuration = 10 seconds diff --git a/server/src/test/scala/cromwell/engine/workflow/lifecycle/execution/ejea/PerTestHelper.scala b/server/src/test/scala/cromwell/engine/workflow/lifecycle/execution/ejea/PerTestHelper.scala index c7379cf1314..e1176318fc6 100644 --- a/server/src/test/scala/cromwell/engine/workflow/lifecycle/execution/ejea/PerTestHelper.scala +++ b/server/src/test/scala/cromwell/engine/workflow/lifecycle/execution/ejea/PerTestHelper.scala @@ -15,34 +15,35 @@ import cromwell.engine.workflow.mocks.{DeclarationMock, TaskMock, WdlWomExpressi import cromwell.services.CallCaching.CallCachingEntryId import cromwell.util.AkkaTestUtil._ import cromwell.util.WomMocks -import org.specs2.mock.Mockito import wom.callable.Callable.{OutputDefinition, OverridableInputDefinitionWithDefault} +import wom.callable.CallableTaskDefinition import wom.expression.{IoFunctionSet, NoIoFunctionSet} import wom.graph.{CommandCallNode, WomIdentifier} import wom.types.{WomIntegerType, WomStringType} import scala.concurrent.ExecutionContext import scala.concurrent.duration.FiniteDuration -import scala.util.Success +import scala.util.{Success, Try} -private[ejea] class PerTestHelper(implicit val system: ActorSystem) extends Mockito with TaskMock with WdlWomExpressionMock with DeclarationMock { +private[ejea] class PerTestHelper(implicit val system: ActorSystem) + extends TaskMock with WdlWomExpressionMock with DeclarationMock { - val workflowId = WorkflowId.randomId() + val workflowId: WorkflowId = WorkflowId.randomId() val workflowName = "wf" val taskName = "foobar" val jobFqn = s"$workflowName.$taskName" - val jobIndex = Some(1) + val jobIndex: Option[Int] = Option(1) val jobAttempt = 1 - val task = WomMocks.mockTaskDefinition(taskName).copy( + val task: CallableTaskDefinition = WomMocks.mockTaskDefinition(taskName).copy( inputs = List(OverridableInputDefinitionWithDefault("inInt", WomIntegerType, mockIntExpression(543))), outputs = List(OutputDefinition("outString", WomStringType, mockStringExpression("hello"))) ) val call: CommandCallNode = WomMocks.mockTaskCall(WomIdentifier(taskName, jobFqn), task) - val jobDescriptorKey = BackendJobDescriptorKey(call, jobIndex, jobAttempt) + val jobDescriptorKey: BackendJobDescriptorKey = BackendJobDescriptorKey(call, jobIndex, jobAttempt) - val backendWorkflowDescriptor = BackendWorkflowDescriptor(id = workflowId, + val backendWorkflowDescriptor: BackendWorkflowDescriptor = BackendWorkflowDescriptor(id = workflowId, callable = null, knownValues = null, workflowOptions = WorkflowOptions.empty, @@ -50,28 +51,37 @@ private[ejea] class PerTestHelper(implicit val system: ActorSystem) extends Mock hogGroup = HogGroup("foo"), List.empty, None) - val backendJobDescriptor = BackendJobDescriptor(backendWorkflowDescriptor, jobDescriptorKey, runtimeAttributes = Map.empty, evaluatedTaskInputs = Map.empty, FloatingDockerTagWithoutHash("ubuntu:latest"), None, Map.empty) + val backendJobDescriptor: BackendJobDescriptor = + BackendJobDescriptor( + workflowDescriptor = backendWorkflowDescriptor, + key = jobDescriptorKey, + runtimeAttributes = Map.empty, + evaluatedTaskInputs = Map.empty, + maybeCallCachingEligible = FloatingDockerTagWithoutHash("ubuntu:latest"), + dockerSize = None, + prefetchedKvStoreEntries = Map.empty, + ) var fetchCachedResultsActorCreations: ExpectOne[(CallCachingEntryId, Seq[OutputDefinition])] = NothingYet var jobHashingInitializations: ExpectOne[(BackendJobDescriptor, CallCachingActivity)] = NothingYet var invalidateCacheActorCreations: ExpectOne[CallCachingEntryId] = NothingYet - val deathwatch = TestProbe() - val bjeaProbe = TestProbe() - val bjeaProps = bjeaProbe.props - val replyToProbe = TestProbe() - val parentProbe = TestProbe() - val serviceRegistryProbe = TestProbe() - val ioActorProbe = TestProbe() - val jobStoreProbe = TestProbe() - val callCacheReadActorProbe = TestProbe() - val callCacheWriteActorProbe = TestProbe() - val dockerHashActorProbe = TestProbe() - val callCacheHitCopyingProbe = TestProbe() - val jobPreparationProbe = TestProbe() - val jobRestartCheckTokenDispenserProbe = TestProbe() - val jobExecutionTokenDispenserProbe = TestProbe() - val ejhaProbe = TestProbe() + val deathwatch: TestProbe = TestProbe() + val bjeaProbe: TestProbe = TestProbe() + val bjeaProps: Props = bjeaProbe.props + val replyToProbe: TestProbe = TestProbe() + val parentProbe: TestProbe = TestProbe() + val serviceRegistryProbe: TestProbe = TestProbe() + val ioActorProbe: TestProbe = TestProbe() + val jobStoreProbe: TestProbe = TestProbe() + val callCacheReadActorProbe: TestProbe = TestProbe() + val callCacheWriteActorProbe: TestProbe = TestProbe() + val dockerHashActorProbe: TestProbe = TestProbe() + val callCacheHitCopyingProbe: TestProbe = TestProbe() + val jobPreparationProbe: TestProbe = TestProbe() + val jobRestartCheckTokenDispenserProbe: TestProbe = TestProbe() + val jobExecutionTokenDispenserProbe: TestProbe = TestProbe() + val ejhaProbe: TestProbe = TestProbe() def buildFactory(backendConfigurationDescriptor: BackendConfigurationDescriptor): BackendLifecycleActorFactory = new BackendLifecycleActorFactory { @@ -97,16 +107,7 @@ private[ejea] class PerTestHelper(implicit val system: ActorSystem) extends Mock override def fileHashingActorProps: Option[(BackendJobDescriptor, Option[BackendInitializationData], ActorRef, ActorRef, Option[ActorRef]) => Props] = { - Option(fileHashingActorInner(classOf[DefaultStandardFileHashingActor])) - } - - def fileHashingActorInner(standardFileHashingActor: Class[_ <: StandardFileHashingActor]) - (jobDescriptor: BackendJobDescriptor, - initializationDataOption: Option[BackendInitializationData], - serviceRegistryActor: ActorRef, - ioActor: ActorRef, - fileHashCacheActor: Option[ActorRef]): Props = { - Props.empty + Option((_, _, _, _, _) => Props.empty) } // These two factory methods should never be called from EJEA or any of its descendants: @@ -196,14 +197,21 @@ private[ejea] class MockEjea(helper: PerTestHelper, command = if (restarting) RecoverJobCommand else ExecuteJobCommand, callCachingParameters = callCachingParameters) { - implicit val system = context.system - override def makeFetchCachedResultsActor(cacheId: CallCachingEntryId) = helper.fetchCachedResultsActorCreations = helper.fetchCachedResultsActorCreations.foundOne((cacheId, null)) - override def initializeJobHashing(jobDescriptor: BackendJobDescriptor, activity: CallCachingActivity, callCachingEligible: CallCachingEligible) = { + implicit val system: ActorSystem = context.system + override def makeFetchCachedResultsActor(cacheId: CallCachingEntryId): Unit = { + helper.fetchCachedResultsActorCreations = + helper.fetchCachedResultsActorCreations.foundOne((cacheId, null)) + } + override def initializeJobHashing(jobDescriptor: BackendJobDescriptor, + activity: CallCachingActivity, + callCachingEligible: CallCachingEligible): Try[ActorRef] = { helper.jobHashingInitializations = helper.jobHashingInitializations.foundOne((jobDescriptor, activity)) Success(helper.ejhaProbe.ref) } - override def createBackendJobExecutionActor(data: ResponsePendingData) = helper.bjeaProbe.ref + override def createBackendJobExecutionActor(data: ResponsePendingData): ActorRef = helper.bjeaProbe.ref override def invalidateCacheHit(cacheId: CallCachingEntryId): Unit = { helper.invalidateCacheActorCreations = helper.invalidateCacheActorCreations.foundOne(cacheId) } - override def createJobPreparationActor(jobPrepProps: Props, name: String) = jobPreparationProbe.ref - override def onTimedTransition(from: EngineJobExecutionActorState, to: EngineJobExecutionActorState, duration: FiniteDuration) = {} + override def createJobPreparationActor(jobPrepProps: Props, name: String): ActorRef = jobPreparationProbe.ref + override def onTimedTransition(from: EngineJobExecutionActorState, + to: EngineJobExecutionActorState, + duration: FiniteDuration): Unit = {} } diff --git a/server/src/test/scala/cromwell/jobstore/JobStoreServiceSpec.scala b/server/src/test/scala/cromwell/jobstore/JobStoreServiceSpec.scala index c58d40c526e..e2734b68fef 100644 --- a/server/src/test/scala/cromwell/jobstore/JobStoreServiceSpec.scala +++ b/server/src/test/scala/cromwell/jobstore/JobStoreServiceSpec.scala @@ -11,7 +11,6 @@ import cromwell.jobstore.JobStoreServiceSpec._ import cromwell.services.EngineServicesStore import cromwell.util.WomMocks import org.scalatest.matchers.should.Matchers -import org.specs2.mock.Mockito import wom.callable.Callable.OutputDefinition import wom.expression.PlaceholderWomExpression import wom.graph.WomIdentifier @@ -25,7 +24,8 @@ object JobStoreServiceSpec { private val EmptyExpression = PlaceholderWomExpression(Set.empty, WomStringType) } -class JobStoreServiceSpec extends CromwellTestKitWordSpec with Matchers with Mockito with CoordinatedWorkflowStoreActorBuilder with SqlWorkflowStoreBuilder with CromwellTimeoutSpec { +class JobStoreServiceSpec extends CromwellTestKitWordSpec with Matchers with CoordinatedWorkflowStoreActorBuilder + with SqlWorkflowStoreBuilder with CromwellTimeoutSpec { "JobStoreService" should { "register Job and Workflow completions and read back (query) the result" in { diff --git a/server/src/test/scala/cromwell/subworkflowstore/SubWorkflowStoreSpec.scala b/server/src/test/scala/cromwell/subworkflowstore/SubWorkflowStoreSpec.scala index 37f17ac6934..c52f356c87c 100644 --- a/server/src/test/scala/cromwell/subworkflowstore/SubWorkflowStoreSpec.scala +++ b/server/src/test/scala/cromwell/subworkflowstore/SubWorkflowStoreSpec.scala @@ -17,7 +17,6 @@ import cromwell.util.WomMocks import cromwell.{CromwellTestKitSpec, CromwellTestKitWordSpec} import mouse.all._ import org.scalatest.matchers.should.Matchers -import org.specs2.mock.Mockito import wdl.draft2.model.WdlExpression import wom.graph.{GraphNode, WomIdentifier} @@ -30,7 +29,8 @@ object SubWorkflowStoreSpec { val EmptyExpression = WdlExpression.fromString(""" "" """) } -class SubWorkflowStoreSpec extends CromwellTestKitWordSpec with CoordinatedWorkflowStoreActorBuilder with CromwellTimeoutSpec with Matchers with Mockito { +class SubWorkflowStoreSpec extends CromwellTestKitWordSpec with CoordinatedWorkflowStoreActorBuilder + with CromwellTimeoutSpec with Matchers { "SubWorkflowStore" should { "work" in { lazy val subWorkflowStore = new SqlSubWorkflowStore(EngineServicesStore.engineDatabaseInterface) @@ -88,7 +88,15 @@ class SubWorkflowStoreSpec extends CromwellTestKitWordSpec with CoordinatedWorkf // Query for sub workflow subWorkflowStoreService ! QuerySubWorkflow(parentWorkflowId, jobKey) - val subWorkflowEntry = SubWorkflowStoreEntry(Option(0), parentWorkflowId.toString, jobKey.node.fullyQualifiedName, jobKey.index.fromIndex, jobKey.attempt, subWorkflowId.toString, Some(0)) + val subWorkflowEntry = SubWorkflowStoreEntry( + rootWorkflowId = Option(0), + parentWorkflowExecutionUuid = parentWorkflowId.toString, + callFullyQualifiedName = jobKey.node.fullyQualifiedName, + callIndex = jobKey.index.fromIndex, + callAttempt = jobKey.attempt, + subWorkflowExecutionUuid = subWorkflowId.toString, + subWorkflowStoreEntryId = Option(0), + ) expectMsg[SubWorkflowFound](SubWorkflowFound(subWorkflowEntry)) // Register sub sub workflow @@ -97,7 +105,15 @@ class SubWorkflowStoreSpec extends CromwellTestKitWordSpec with CoordinatedWorkf // Query for sub sub workflow subWorkflowStoreService ! QuerySubWorkflow(subWorkflowId, jobKey) - val subSubWorkflowEntry = SubWorkflowStoreEntry(Option(0), subWorkflowId.toString, jobKey.node.fullyQualifiedName, jobKey.index.fromIndex, jobKey.attempt, subSubWorkflowId.toString, Some(1)) + val subSubWorkflowEntry = SubWorkflowStoreEntry( + rootWorkflowId = Option(0), + parentWorkflowExecutionUuid = subWorkflowId.toString, + callFullyQualifiedName = jobKey.node.fullyQualifiedName, + callIndex = jobKey.index.fromIndex, + callAttempt = jobKey.attempt, + subWorkflowExecutionUuid = subSubWorkflowId.toString, + subWorkflowStoreEntryId = Option(1), + ) expectMsg[SubWorkflowFound](SubWorkflowFound(subSubWorkflowEntry)) // Delete root workflow diff --git a/services/src/test/scala/cromwell/services/metadata/impl/MetadataDatabaseAccessSpec.scala b/services/src/test/scala/cromwell/services/metadata/impl/MetadataDatabaseAccessSpec.scala index b5653ec4c5c..8e6029140e9 100644 --- a/services/src/test/scala/cromwell/services/metadata/impl/MetadataDatabaseAccessSpec.scala +++ b/services/src/test/scala/cromwell/services/metadata/impl/MetadataDatabaseAccessSpec.scala @@ -18,7 +18,6 @@ import org.scalatest.concurrent.{Eventually, ScalaFutures} import org.scalatest.time.{Millis, Seconds, Span} import org.scalatest.BeforeAndAfterAll import org.scalatest.enablers.Emptiness._ -import org.specs2.mock.Mockito import scala.concurrent.duration._ import scala.concurrent.{Await, ExecutionContext, Future} @@ -39,23 +38,25 @@ object MetadataDatabaseAccessSpec { val Subworkflow2Name = "test_subworkflow_2" } -class MetadataDatabaseAccessSpec extends AnyFlatSpec with CromwellTimeoutSpec with Matchers with ScalaFutures with BeforeAndAfterAll with Eventually with Mockito { +class MetadataDatabaseAccessSpec extends AnyFlatSpec with CromwellTimeoutSpec with Matchers with ScalaFutures + with BeforeAndAfterAll with Eventually { import MetadataDatabaseAccessSpec._ - implicit val ec = ExecutionContext.global + implicit val ec: ExecutionContext = ExecutionContext.global - implicit val defaultPatience = PatienceConfig(scaled(Span(30, Seconds)), scaled(Span(100, Millis))) + implicit val defaultPatience: PatienceConfig = PatienceConfig(scaled(Span(30, Seconds)), scaled(Span(100, Millis))) DatabaseSystem.All foreach { databaseSystem => behavior of s"MetadataDatabaseAccess on ${databaseSystem.name}" val containerOpt: Option[Container] = DatabaseTestKit.getDatabaseTestContainer(databaseSystem) - lazy val dataAccess = new MetadataDatabaseAccess with MetadataServicesStore { - override val metadataDatabaseInterface: MetadataSlickDatabase = { - DatabaseTestKit.initializeDatabaseByContainerOptTypeAndSystem(containerOpt, MetadataDatabaseType, databaseSystem) + lazy val dataAccess: MetadataDatabaseAccess with MetadataServicesStore = + new MetadataDatabaseAccess with MetadataServicesStore { + override val metadataDatabaseInterface: MetadataSlickDatabase = { + DatabaseTestKit.initializeDatabaseByContainerOptTypeAndSystem(containerOpt, MetadataDatabaseType, databaseSystem) + } } - } def publishMetadataEvents(baseKey: MetadataKey, keyValues: Array[(String, String)]): Future[Unit] = { val events = keyValues map { case (k, v) => diff --git a/supportedBackends/aws/src/test/scala/cromwell/backend/impl/aws/AwsBatchAsyncBackendJobExecutionActorSpec.scala b/supportedBackends/aws/src/test/scala/cromwell/backend/impl/aws/AwsBatchAsyncBackendJobExecutionActorSpec.scala index 01f80ccd056..ba2e5580b3f 100644 --- a/supportedBackends/aws/src/test/scala/cromwell/backend/impl/aws/AwsBatchAsyncBackendJobExecutionActorSpec.scala +++ b/supportedBackends/aws/src/test/scala/cromwell/backend/impl/aws/AwsBatchAsyncBackendJobExecutionActorSpec.scala @@ -61,7 +61,6 @@ import org.scalatest.concurrent.PatienceConfiguration import org.scalatest.flatspec.AnyFlatSpecLike import org.scalatest.matchers.should.Matchers import org.slf4j.Logger -import org.specs2.mock.Mockito import software.amazon.awssdk.auth.credentials.AnonymousCredentialsProvider import software.amazon.awssdk.regions.Region import spray.json._ @@ -82,14 +81,14 @@ import scala.language.postfixOps import scala.util.Success class AwsBatchAsyncBackendJobExecutionActorSpec extends TestKitSuite - with AnyFlatSpecLike with Matchers with ImplicitSender with Mockito with BackendSpec with BeforeAndAfter with DefaultJsonProtocol { + with AnyFlatSpecLike with Matchers with ImplicitSender with BackendSpec with BeforeAndAfter with DefaultJsonProtocol { lazy val mockPathBuilderS3: S3PathBuilder = S3PathBuilder.fromProvider( AnonymousCredentialsProvider.create, S3Storage.DefaultConfiguration, WorkflowOptions.empty, Option(Region.US_EAST_1) ) - lazy val mockPathBuilderLocal = DefaultPathBuilder + lazy val mockPathBuilderLocal: PathBuilder = DefaultPathBuilder var kvService: ActorRef = system.actorOf(Props(new InMemoryKvServiceActor), "kvService") @@ -872,7 +871,7 @@ class AwsBatchAsyncBackendJobExecutionActorSpec extends TestKitSuite pendingExecutionResponse.jobDescriptor should be(jobDescriptor) pendingExecutionResponse.pendingJob should be(StandardAsyncJob(jobId)) pendingExecutionResponse.previousState should be(None) - pendingExecutionResponse.runInfo should be(Some(backend.batchJob)) + pendingExecutionResponse.runInfo should be(Option(backend.batchJob)) } } diff --git a/supportedBackends/aws/src/test/scala/cromwell/backend/impl/aws/AwsBatchCallPathsSpec.scala b/supportedBackends/aws/src/test/scala/cromwell/backend/impl/aws/AwsBatchCallPathsSpec.scala index b93e41d2d11..b250ab12ed4 100644 --- a/supportedBackends/aws/src/test/scala/cromwell/backend/impl/aws/AwsBatchCallPathsSpec.scala +++ b/supportedBackends/aws/src/test/scala/cromwell/backend/impl/aws/AwsBatchCallPathsSpec.scala @@ -36,15 +36,13 @@ import cromwell.backend.BackendSpec import cromwell.backend.io.JobPathsSpecHelper._ import org.scalatest.flatspec.AnyFlatSpecLike import software.amazon.awssdk.auth.credentials.AnonymousCredentialsProvider -// import cromwell.cloudsupport.gcp.auth.AwsBatchAuthModeSpec import cromwell.core.Tags.AwsTest import cromwell.core.TestKitSuite import cromwell.util.SampleWdl import org.scalatest.matchers.should.Matchers -import org.specs2.mock.Mockito import spray.json.{JsObject, JsString} -class AwsBatchCallPathsSpec extends TestKitSuite with AnyFlatSpecLike with Matchers with Mockito { +class AwsBatchCallPathsSpec extends TestKitSuite with AnyFlatSpecLike with Matchers { import AwsBatchTestConfig._ import BackendSpec._ @@ -52,7 +50,6 @@ class AwsBatchCallPathsSpec extends TestKitSuite with AnyFlatSpecLike with Match behavior of "AwsBatchCallPaths" it should "map the correct filenames" taggedAs AwsTest in { - // AwsBatchAuthModeSpec.assumeHasApplicationDefaultCredentials() val workflowDescriptor = buildWdlWorkflowDescriptor( SampleWdl.HelloWorld.workflowSource(), @@ -114,7 +111,7 @@ class AwsBatchCallPathsSpec extends TestKitSuite with AnyFlatSpecLike with Match ) val callPaths = AwsBatchJobPaths(workflowPaths, jobDescriptorKey) - + callPaths.callContext.root.pathAsString should be(s"s3://my-cromwell-workflows-bucket/wf_hello/${workflowDescriptor.id}/call-hello") callPaths.callContext.stdout should diff --git a/supportedBackends/aws/src/test/scala/cromwell/backend/impl/aws/AwsBatchInitializationActorSpec.scala b/supportedBackends/aws/src/test/scala/cromwell/backend/impl/aws/AwsBatchInitializationActorSpec.scala index a70369c4ebf..f1e2eb40bf9 100644 --- a/supportedBackends/aws/src/test/scala/cromwell/backend/impl/aws/AwsBatchInitializationActorSpec.scala +++ b/supportedBackends/aws/src/test/scala/cromwell/backend/impl/aws/AwsBatchInitializationActorSpec.scala @@ -36,24 +36,22 @@ import java.util.UUID import akka.actor.Props import akka.testkit._ import com.typesafe.config.{Config, ConfigFactory} -import cromwell.backend.BackendWorkflowInitializationActor.{InitializationFailed, Initialize} +import cromwell.backend.BackendWorkflowInitializationActor.{InitializationFailed, InitializationSuccess, Initialize} import cromwell.backend.async.RuntimeAttributeValidationFailures import cromwell.backend.{BackendConfigurationDescriptor, BackendSpec, BackendWorkflowDescriptor} import org.scalatest.flatspec.AnyFlatSpecLike -// import cromwell.cloudsupport.aws.auth.AwsAuthModeSpec import cromwell.core.Dispatcher.BackendDispatcher -import cromwell.core.Tags.PostWomTest +import cromwell.core.Tags.IntegrationTest import cromwell.core.TestKitSuite -// import cromwell.core.logging.LoggingTest._ +import cromwell.core.logging.LoggingTest._ import org.scalatest.matchers.should.Matchers -import org.specs2.mock.Mockito import spray.json._ import wom.graph.CommandCallNode import scala.concurrent.duration._ class AwsBatchInitializationActorSpec extends TestKitSuite with AnyFlatSpecLike with Matchers - with ImplicitSender with Mockito { + with ImplicitSender { val Timeout: FiniteDuration = 30.second.dilated import BackendSpec._ @@ -85,14 +83,8 @@ class AwsBatchInitializationActorSpec extends TestKitSuite with AnyFlatSpecLike | | auths = [ | { - | name = "application-default" - | scheme = "application_default" - | }, - | { - | name = "user-via-refresh" - | scheme = "refresh_token" - | access-key = "secret_id" - | secret-key = "secret_secret" + | name = "default" + | scheme = "default" | } | ] |} @@ -103,11 +95,17 @@ class AwsBatchInitializationActorSpec extends TestKitSuite with AnyFlatSpecLike | // Base bucket for workflow executions | root = "s3://my-cromwell-workflows-bucket" | + | auth = default + | numSubmitAttempts = 1 + | numCreateDefinitionAttempts = 1 + | | // Polling for completion backs-off gradually for slower-running jobs. | // This is the maximum polling interval (in seconds): | maximum-polling-interval = 600 | | default-runtime-attributes { + | queueArn: "arn:aws:batch:us-east-1:12345:job-queue/example" + | scriptBucketName: example | cpu: 1 | failOnStderr: false | # Allowed to be a boolean, or a list of Ints, or an Int @@ -121,7 +119,7 @@ class AwsBatchInitializationActorSpec extends TestKitSuite with AnyFlatSpecLike | filesystems { | s3 { | // A reference to a potentially different auth for manipulating files via engine functions. - | auth = "application-default" + | auth = "default" | } | } | @@ -187,28 +185,26 @@ class AwsBatchInitializationActorSpec extends TestKitSuite with AnyFlatSpecLike behavior of "AwsBatchInitializationActor" - // it should "log a warning message when there are unsupported runtime attributes" taggedAs IntegrationTest in { - // AwsAuthModeSpec.assumeHasApplicationDefaultCredentials() - // - // within(Timeout) { - // val workflowDescriptor = buildWdlWorkflowDescriptor(HelloWorld, - // runtime = """runtime { docker: "ubuntu/latest" test: true }""") - // val backend = getAwsBatchBackend(workflowDescriptor, workflowDescriptor.callable.taskCallNodes, - // defaultBackendConfig) - // val eventPattern = - // "Key/s [test] is/are not supported by backend. Unsupported attributes will not be part of job executions." - // EventFilter.warning(pattern = escapePattern(eventPattern), occurrences = 1) intercept { - // backend ! Initialize - // } - // expectMsgPF() { - // case InitializationSuccess(_) => //Docker entry is present. - // case InitializationFailed(failure) => fail(s"InitializationSuccess was expected but got $failure") - // } - // } - // } - - // Depends on https://github.com/broadinstitute/cromwell/issues/2606 - it should "return InitializationFailed when docker runtime attribute key is not present" taggedAs PostWomTest ignore { + it should "log a warning message when there are unsupported runtime attributes" taggedAs IntegrationTest in { + + within(Timeout) { + val workflowDescriptor = buildWdlWorkflowDescriptor(HelloWorld, + runtime = """runtime { docker: "ubuntu/latest" test: true }""") + val backend = getAwsBatchBackend(workflowDescriptor, workflowDescriptor.callable.taskCallNodes, + defaultBackendConfig) + val eventPattern = + "Key/s [test] is/are not supported by backend. Unsupported attributes will not be part of job executions." + EventFilter.warning(pattern = escapePattern(eventPattern), occurrences = 1) intercept { + backend ! Initialize + } + expectMsgPF() { + case InitializationSuccess(_) => //Docker entry is present. + case InitializationFailed(failure) => fail(s"InitializationSuccess was expected but got $failure") + } + } + } + + it should "return InitializationFailed when docker runtime attribute key is not present" taggedAs IntegrationTest in { within(Timeout) { val workflowDescriptor = buildWdlWorkflowDescriptor(HelloWorld, runtime = """runtime { }""") val backend = getAwsBatchBackend(workflowDescriptor, workflowDescriptor.callable.taskCallNodes, diff --git a/supportedBackends/aws/src/test/scala/cromwell/backend/impl/aws/AwsBatchJobExecutionActorSpec.scala b/supportedBackends/aws/src/test/scala/cromwell/backend/impl/aws/AwsBatchJobExecutionActorSpec.scala index 351bcef6460..be384d2af77 100644 --- a/supportedBackends/aws/src/test/scala/cromwell/backend/impl/aws/AwsBatchJobExecutionActorSpec.scala +++ b/supportedBackends/aws/src/test/scala/cromwell/backend/impl/aws/AwsBatchJobExecutionActorSpec.scala @@ -40,14 +40,14 @@ import cromwell.backend.{BackendJobDescriptor, MinimumRuntimeSettings} import cromwell.core.TestKitSuite import org.scalatest.flatspec.AnyFlatSpecLike import org.scalatest.matchers.should.Matchers -import org.specs2.mock.Mockito +import common.mock.MockSugar import scala.concurrent.duration._ import scala.concurrent.{ExecutionContext, Promise} import scala.util.control.NoStackTrace import scala.util.{Failure, Success} -class AwsBatchJobExecutionActorSpec extends TestKitSuite with AnyFlatSpecLike with Matchers with Mockito { +class AwsBatchJobExecutionActorSpec extends TestKitSuite with AnyFlatSpecLike with Matchers with MockSugar { behavior of "AwsBatchJobExecutionActor" diff --git a/supportedBackends/aws/src/test/scala/cromwell/backend/impl/aws/AwsBatchJobSpec.scala b/supportedBackends/aws/src/test/scala/cromwell/backend/impl/aws/AwsBatchJobSpec.scala index 82e0babae80..e9a5f1f86d9 100644 --- a/supportedBackends/aws/src/test/scala/cromwell/backend/impl/aws/AwsBatchJobSpec.scala +++ b/supportedBackends/aws/src/test/scala/cromwell/backend/impl/aws/AwsBatchJobSpec.scala @@ -43,7 +43,6 @@ import eu.timepit.refined.numeric._ import org.scalatest.PrivateMethodTester import org.scalatest.flatspec.AnyFlatSpecLike import org.scalatest.matchers.should.Matchers -import org.specs2.mock.Mockito import software.amazon.awssdk.auth.credentials.AnonymousCredentialsProvider import software.amazon.awssdk.services.batch.model.KeyValuePair import spray.json.{JsObject, JsString} @@ -51,7 +50,7 @@ import wdl4s.parser.MemoryUnit import wom.format.MemorySize import wom.graph.CommandCallNode -class AwsBatchJobSpec extends TestKitSuite with AnyFlatSpecLike with Matchers with Mockito with PrivateMethodTester { +class AwsBatchJobSpec extends TestKitSuite with AnyFlatSpecLike with Matchers with PrivateMethodTester { import AwsBatchTestConfig._ System.setProperty("aws.region", "us-east-1") diff --git a/supportedBackends/aws/src/test/scala/cromwell/backend/impl/aws/AwsBatchRuntimeAttributesSpec.scala b/supportedBackends/aws/src/test/scala/cromwell/backend/impl/aws/AwsBatchRuntimeAttributesSpec.scala index ace177ea007..fbab5a81a23 100644 --- a/supportedBackends/aws/src/test/scala/cromwell/backend/impl/aws/AwsBatchRuntimeAttributesSpec.scala +++ b/supportedBackends/aws/src/test/scala/cromwell/backend/impl/aws/AwsBatchRuntimeAttributesSpec.scala @@ -42,14 +42,13 @@ import eu.timepit.refined.refineMV import org.scalatest.matchers.should.Matchers import org.scalatest.wordspec.AnyWordSpecLike import org.slf4j.helpers.NOPLogger -import org.specs2.mock.Mockito import spray.json._ import wdl4s.parser.MemoryUnit import wom.format.MemorySize import wom.types._ import wom.values._ -class AwsBatchRuntimeAttributesSpec extends AnyWordSpecLike with CromwellTimeoutSpec with Matchers with Mockito { +class AwsBatchRuntimeAttributesSpec extends AnyWordSpecLike with CromwellTimeoutSpec with Matchers { def workflowOptionsWithDefaultRA(defaults: Map[String, JsValue]): WorkflowOptions = { WorkflowOptions(JsObject(Map( diff --git a/supportedBackends/aws/src/test/scala/cromwell/backend/impl/aws/AwsBatchWorkflowPathsSpec.scala b/supportedBackends/aws/src/test/scala/cromwell/backend/impl/aws/AwsBatchWorkflowPathsSpec.scala index e983fe26089..2b2d0ea14d8 100644 --- a/supportedBackends/aws/src/test/scala/cromwell/backend/impl/aws/AwsBatchWorkflowPathsSpec.scala +++ b/supportedBackends/aws/src/test/scala/cromwell/backend/impl/aws/AwsBatchWorkflowPathsSpec.scala @@ -35,15 +35,13 @@ import common.collections.EnhancedCollections._ import cromwell.backend.BackendSpec import org.scalatest.flatspec.AnyFlatSpecLike import software.amazon.awssdk.auth.credentials.AnonymousCredentialsProvider -// import cromwell.cloudsupport.aws.auth.AwsBatchAuthModeSpec import cromwell.core.Tags.AwsTest import cromwell.core.TestKitSuite import cromwell.util.SampleWdl import org.scalatest.matchers.should.Matchers -import org.specs2.mock.Mockito import spray.json.{JsObject, JsString} -class AwsBatchWorkflowPathsSpec extends TestKitSuite with AnyFlatSpecLike with Matchers with Mockito { +class AwsBatchWorkflowPathsSpec extends TestKitSuite with AnyFlatSpecLike with Matchers { import AwsBatchTestConfig._ import BackendSpec._ diff --git a/supportedBackends/bcs/src/test/scala/cromwell/backend/impl/bcs/BcsJobPathsSpec.scala b/supportedBackends/bcs/src/test/scala/cromwell/backend/impl/bcs/BcsJobPathsSpec.scala index 02339302d94..15148f39a35 100644 --- a/supportedBackends/bcs/src/test/scala/cromwell/backend/impl/bcs/BcsJobPathsSpec.scala +++ b/supportedBackends/bcs/src/test/scala/cromwell/backend/impl/bcs/BcsJobPathsSpec.scala @@ -1,14 +1,15 @@ package cromwell.backend.impl.bcs +import common.mock.MockSugar import cromwell.filesystems.oss.OssPath -import org.mockito.Mockito.when +import org.mockito.Mockito._ -class BcsJobPathsSpec extends BcsTestUtilSpec { +class BcsJobPathsSpec extends BcsTestUtilSpec with MockSugar { behavior of s"BcsJobPathsSpec" var root: OssPath = mockPathBuilder.build("oss://bcs-test/root/").getOrElse(throw new IllegalArgumentException()) - var workflowPath = { + var workflowPath: BcsWorkflowPaths = { val workflowPaths = mock[BcsWorkflowPaths] when(workflowPaths.workflowRoot).thenReturn(root) diff --git a/supportedBackends/bcs/src/test/scala/cromwell/backend/impl/bcs/BcsJobSpec.scala b/supportedBackends/bcs/src/test/scala/cromwell/backend/impl/bcs/BcsJobSpec.scala index 84ed4c4aa58..ad4ec85b64f 100644 --- a/supportedBackends/bcs/src/test/scala/cromwell/backend/impl/bcs/BcsJobSpec.scala +++ b/supportedBackends/bcs/src/test/scala/cromwell/backend/impl/bcs/BcsJobSpec.scala @@ -2,10 +2,11 @@ package cromwell.backend.impl.bcs import com.aliyuncs.batchcompute.main.v20151111.BatchComputeClient import com.aliyuncs.batchcompute.pojo.v20151111.TaskDescription +import common.mock.MockSugar import wom.values._ -class BcsJobSpec extends BcsTestUtilSpec { +class BcsJobSpec extends BcsTestUtilSpec with MockSugar { behavior of s"BcsJob" diff --git a/supportedBackends/bcs/src/test/scala/cromwell/backend/impl/bcs/BcsTestUtilSpec.scala b/supportedBackends/bcs/src/test/scala/cromwell/backend/impl/bcs/BcsTestUtilSpec.scala index e883da5fb10..61da2f6aba0 100644 --- a/supportedBackends/bcs/src/test/scala/cromwell/backend/impl/bcs/BcsTestUtilSpec.scala +++ b/supportedBackends/bcs/src/test/scala/cromwell/backend/impl/bcs/BcsTestUtilSpec.scala @@ -1,10 +1,10 @@ package cromwell.backend.impl.bcs -import com.typesafe.config.ConfigFactory +import com.typesafe.config.{Config, ConfigFactory} import common.collections.EnhancedCollections._ import cromwell.backend.BackendSpec.buildWdlWorkflowDescriptor import cromwell.backend.validation.ContinueOnReturnCodeSet -import cromwell.backend.{BackendConfigurationDescriptor, BackendJobDescriptorKey, RuntimeAttributeDefinition} +import cromwell.backend.{BackendConfigurationDescriptor, BackendJobDescriptorKey, BackendWorkflowDescriptor, RuntimeAttributeDefinition} import cromwell.core.{TestKitSuite, WorkflowOptions} import cromwell.filesystems.oss.OssPathBuilder import cromwell.filesystems.oss.nio.DefaultOssStorageConfiguration @@ -12,14 +12,13 @@ import cromwell.util.SampleWdl import org.scalatest.BeforeAndAfter import org.scalatest.flatspec.AnyFlatSpecLike import org.scalatest.matchers.should.Matchers -import org.scalatestplus.mockito.MockitoSugar import org.slf4j.helpers.NOPLogger import spray.json.{JsObject, JsString} import wom.values.WomValue object BcsTestUtilSpec { - val DefaultRunAttributesString = + val DefaultRunAttributesString: String = """ |default-runtime-attributes { | failOnStderr: false @@ -42,7 +41,7 @@ object BcsTestUtilSpec { |} """.stripMargin - val BcsBackendConfigString = + val BcsBackendConfigString: String = s""" |root = "oss://your-bucket/cromwell-exe" |dockerRoot = "/cromwell-executions" @@ -70,7 +69,7 @@ object BcsTestUtilSpec { | |""".stripMargin - val BcsBackendConfigWithoutDefaultString = + val BcsBackendConfigWithoutDefaultString: String = s""" |root = "oss://your-bucket/cromwell-exe" |dockerRoot = "/cromwell-executions" @@ -93,7 +92,7 @@ object BcsTestUtilSpec { | |""".stripMargin - val BcsGlobalConfigString = + val BcsGlobalConfigString: String = s""" |backend { | default = "BCS" @@ -109,52 +108,65 @@ object BcsTestUtilSpec { | |""".stripMargin - val BcsBackendConfig = ConfigFactory.parseString(BcsBackendConfigString) - val BcsGlobalConfig = ConfigFactory.parseString(BcsGlobalConfigString) - val BcsBackendConfigWithoutDefault = ConfigFactory.parseString(BcsBackendConfigWithoutDefaultString) - val BcsBackendConfigurationDescriptor = BackendConfigurationDescriptor(BcsBackendConfig, BcsGlobalConfig) - val BcsBackendConfigurationWithoutDefaultDescriptor = BackendConfigurationDescriptor(BcsBackendConfigWithoutDefault, BcsGlobalConfig) - val EmptyWorkflowOption = WorkflowOptions.fromMap(Map.empty).get + val BcsBackendConfig: Config = ConfigFactory.parseString(BcsBackendConfigString) + val BcsGlobalConfig: Config = ConfigFactory.parseString(BcsGlobalConfigString) + val BcsBackendConfigWithoutDefault: Config = ConfigFactory.parseString(BcsBackendConfigWithoutDefaultString) + val BcsBackendConfigurationDescriptor: BackendConfigurationDescriptor = + BackendConfigurationDescriptor(BcsBackendConfig, BcsGlobalConfig) + val BcsBackendConfigurationWithoutDefaultDescriptor: BackendConfigurationDescriptor = + BackendConfigurationDescriptor(BcsBackendConfigWithoutDefault, BcsGlobalConfig) + val EmptyWorkflowOption: WorkflowOptions = WorkflowOptions.fromMap(Map.empty).get } -trait BcsTestUtilSpec extends TestKitSuite with AnyFlatSpecLike with Matchers with MockitoSugar with BeforeAndAfter { +trait BcsTestUtilSpec extends TestKitSuite with AnyFlatSpecLike with Matchers with BeforeAndAfter { before { BcsMount.pathBuilders = List(mockPathBuilder) } val jobId = "test-bcs-job" - val mockOssConf = DefaultOssStorageConfiguration("oss.aliyuncs.com", "test-id", "test-key") - val mockPathBuilder = OssPathBuilder(mockOssConf) + val mockOssConf: DefaultOssStorageConfiguration = + DefaultOssStorageConfiguration("oss.aliyuncs.com", "test-id", "test-key") + lazy val mockPathBuilder: OssPathBuilder = OssPathBuilder(mockOssConf) val mockPathBuilders = List(mockPathBuilder) - lazy val workflowDescriptor = buildWdlWorkflowDescriptor( + lazy val workflowDescriptor: BackendWorkflowDescriptor = buildWdlWorkflowDescriptor( SampleWdl.HelloWorld.workflowSource(), inputFileAsJson = Option(JsObject(SampleWdl.HelloWorld.rawInputs.safeMapValues(JsString.apply)).compactPrint) ) - lazy val jobKey = { + lazy val jobKey: BackendJobDescriptorKey = { val call = workflowDescriptor.callable.taskCallNodes.head BackendJobDescriptorKey(call, None, 1) } - val expectedContinueOnReturn = ContinueOnReturnCodeSet(Set(0)) - val expectedDockerTag = Option(BcsDockerWithPath("ubuntu/latest", "oss://bcs-reg/ubuntu/")) - val expectedDocker = Option(BcsDockerWithoutPath("registry.cn-beijing.aliyuncs.com/test/testubuntu:0.1")) + val expectedContinueOnReturn: ContinueOnReturnCodeSet = ContinueOnReturnCodeSet(Set(0)) + val expectedDockerTag: Option[BcsDockerWithPath] = + Option(BcsDockerWithPath("ubuntu/latest", "oss://bcs-reg/ubuntu/")) + val expectedDocker: Option[BcsDockerWithoutPath] = + Option(BcsDockerWithoutPath("registry.cn-beijing.aliyuncs.com/test/testubuntu:0.1")) val expectedFailOnStderr = false - val expectedUserData = Option(Vector(new BcsUserData("key", "value"))) - val expectedMounts = Option(Vector(new BcsInputMount(Left(mockPathBuilder.build("oss://bcs-bucket/bcs-dir/").get), Right("/home/inputs/"), false))) - val expectedCluster = Option(Left("cls-mycluster")) - val expectedImageId = Option("img-ubuntu-vpc") - val expectedSystemDisk = Option(BcsSystemDisk("cloud", 50)) - val expectedDataDisk = Option(BcsDataDisk("cloud", 250, "/home/data/")) - - val expectedReserveOnFail = Option(true) - val expectedAutoRelease = Option(true) - val expectedTimeout = Option(3000) - val expectedVerbose = Option(false) - val expectedVpc = Option(BcsVpcConfiguration(Option("192.168.0.0/16"), Option("vpc-xxxx"))) - val expectedTag = Option("jobTag") - val expectedIsv = Option("test-isv") + val expectedUserData: Option[Vector[BcsUserData]] = Option(Vector(new BcsUserData("key", "value"))) + val expectedMounts: Option[Vector[BcsInputMount]] = + Option(Vector( + BcsInputMount( + src = Left(mockPathBuilder.build("oss://bcs-bucket/bcs-dir/").get), + dest = Right("/home/inputs/"), + writeSupport = false, + ) + )) + val expectedCluster: Option[Left[String, Nothing]] = Option(Left("cls-mycluster")) + val expectedImageId: Option[String] = Option("img-ubuntu-vpc") + val expectedSystemDisk: Option[BcsSystemDisk] = Option(BcsSystemDisk("cloud", 50)) + val expectedDataDisk: Option[BcsDataDisk] = Option(BcsDataDisk("cloud", 250, "/home/data/")) + + val expectedReserveOnFail: Option[Boolean] = Option(true) + val expectedAutoRelease: Option[Boolean] = Option(true) + val expectedTimeout: Option[Int] = Option(3000) + val expectedVerbose: Option[Boolean] = Option(false) + val expectedVpc: Option[BcsVpcConfiguration] = + Option(BcsVpcConfiguration(Option("192.168.0.0/16"), Option("vpc-xxxx"))) + val expectedTag: Option[String] = Option("jobTag") + val expectedIsv: Option[String] = Option("test-isv") val expectedRuntimeAttributes = new BcsRuntimeAttributes(expectedContinueOnReturn, expectedDockerTag, expectedDocker, expectedFailOnStderr, expectedMounts, expectedUserData, expectedCluster, diff --git a/supportedBackends/bcs/src/test/scala/cromwell/backend/impl/bcs/callcaching/BcsBackendCacheHitCopyingActorSpec.scala b/supportedBackends/bcs/src/test/scala/cromwell/backend/impl/bcs/callcaching/BcsBackendCacheHitCopyingActorSpec.scala index 3ee71686602..dc06332aa22 100644 --- a/supportedBackends/bcs/src/test/scala/cromwell/backend/impl/bcs/callcaching/BcsBackendCacheHitCopyingActorSpec.scala +++ b/supportedBackends/bcs/src/test/scala/cromwell/backend/impl/bcs/callcaching/BcsBackendCacheHitCopyingActorSpec.scala @@ -4,25 +4,25 @@ package cromwell.backend.impl.bcs.callcaching import akka.actor.Props import akka.testkit.TestActorRef import com.typesafe.config.ConfigValueFactory +import common.mock.MockSugar import cromwell.backend.impl.bcs.{BcsBackendInitializationData, BcsConfiguration, BcsRuntimeAttributes, BcsTestUtilSpec, BcsWorkflowPaths} import cromwell.backend.standard.callcaching.StandardCacheHitCopyingActorParams -import cromwell.core.path.{Path} +import cromwell.core.path.Path import wom.values._ import cromwell.backend.impl.bcs.BcsTestUtilSpec.BcsBackendConfig import cromwell.backend.standard.callcaching.DefaultStandardCacheHitCopyingActorParams import cromwell.core.simpleton.WomValueSimpleton import cromwell.filesystems.oss.OssPath -import org.mockito.Mockito.when +import org.mockito.Mockito._ import scala.util.Try -class BcsBackendCacheHitCopyingActorSpec extends BcsTestUtilSpec { +class BcsBackendCacheHitCopyingActorSpec extends BcsTestUtilSpec with MockSugar { behavior of "BcsBackendCacheHitCopyingActor" type ValueOrDelete = Either[Boolean, AnyRef] - val workflowPaths = BcsWorkflowPaths(workflowDescriptor, BcsBackendConfig, mockPathBuilders) - + private val workflowPaths = BcsWorkflowPaths(workflowDescriptor, BcsBackendConfig, mockPathBuilders) private def buildInitializationData(configuration: BcsConfiguration) = { @@ -30,9 +30,6 @@ class BcsBackendCacheHitCopyingActorSpec extends BcsTestUtilSpec { BcsBackendInitializationData(workflowPaths, runtimeAttributesBuilder, configuration, null) } - val runtimeAttributesBuilder = BcsRuntimeAttributes.runtimeAttributesBuilder(BcsTestUtilSpec.BcsBackendConfigurationDescriptor.backendRuntimeAttributesConfig) - - private def withConfig(configs: Map[String, ValueOrDelete]) = { var descriptor = BcsTestUtilSpec.BcsBackendConfigurationDescriptor.copy() for ((key, value) <- configs) { @@ -45,7 +42,7 @@ class BcsBackendCacheHitCopyingActorSpec extends BcsTestUtilSpec { } - var cacheHitCopyingActorParams = { + private val cacheHitCopyingActorParams = { val mockCacheHitCopyingActorParams = mock[DefaultStandardCacheHitCopyingActorParams] val id = "test-access-id" val key = "test-access-key" @@ -61,8 +58,6 @@ class BcsBackendCacheHitCopyingActorSpec extends BcsTestUtilSpec { val id = "test-access-id" val key = "test-access-key" val configs = Map("access-id" -> Right(id), "access-key" -> Right(key)) - val conf = withConfig(configs) - def this() = { this(cacheHitCopyingActorParams) diff --git a/supportedBackends/google/pipelines/common/src/test/scala/cromwell/backend/google/pipelines/common/PipelinesApiAsyncBackendJobExecutionActorSpec.scala b/supportedBackends/google/pipelines/common/src/test/scala/cromwell/backend/google/pipelines/common/PipelinesApiAsyncBackendJobExecutionActorSpec.scala index 0f83b4fa10f..828112b557d 100644 --- a/supportedBackends/google/pipelines/common/src/test/scala/cromwell/backend/google/pipelines/common/PipelinesApiAsyncBackendJobExecutionActorSpec.scala +++ b/supportedBackends/google/pipelines/common/src/test/scala/cromwell/backend/google/pipelines/common/PipelinesApiAsyncBackendJobExecutionActorSpec.scala @@ -22,7 +22,6 @@ import cromwell.backend.google.pipelines.common.api.RunStatus.UnsuccessfulRunSta import cromwell.backend.google.pipelines.common.io.{DiskType, PipelinesApiWorkingDisk} import cromwell.backend.io.JobPathsSpecHelper._ import cromwell.backend.standard.{DefaultStandardAsyncExecutionActorParams, StandardAsyncExecutionActorParams, StandardAsyncJob, StandardExpressionFunctionsParams} -import cromwell.core.Tags.PostWomTest import cromwell.core._ import cromwell.core.callcaching.NoDocker import cromwell.core.labels.Labels @@ -40,7 +39,6 @@ import org.scalatest.flatspec.AnyFlatSpecLike import org.scalatest.matchers.should.Matchers import org.scalatest.prop.Tables.Table import org.slf4j.Logger -import org.specs2.mock.Mockito import spray.json._ import wdl.transforms.draft2.wdlom2wom.WdlDraft2WomExecutableMakers._ import wdl.transforms.draft2.wdlom2wom._ @@ -61,7 +59,6 @@ class PipelinesApiAsyncBackendJobExecutionActorSpec extends TestKitSuite with AnyFlatSpecLike with Matchers with ImplicitSender - with Mockito with BackendSpec with BeforeAndAfter with DefaultJsonProtocol { @@ -413,7 +410,7 @@ class PipelinesApiAsyncBackendJobExecutionActorSpec extends TestKitSuite dockerImageCacheTestingParamsOpt = Option( DockerImageCacheTestingParameters( - Some("test_madeup_disk_image_name"), + Option("test_madeup_disk_image_name"), "test_madeup_docker_image_name", isDockerImageCacheUsageRequested = true ) @@ -432,7 +429,7 @@ class PipelinesApiAsyncBackendJobExecutionActorSpec extends TestKitSuite dockerImageCacheTestingParamsOpt = Option( DockerImageCacheTestingParameters( - Some("test_madeup_disk_image_name"), + Option("test_madeup_disk_image_name"), "test_madeup_docker_image_name", isDockerImageCacheUsageRequested = false ) @@ -607,12 +604,26 @@ class PipelinesApiAsyncBackendJobExecutionActorSpec extends TestKitSuite actorRef.stop() } - it should "map GCS paths and *only* GCS paths to local" taggedAs PostWomTest ignore { - val stringKey = "abc" + it should "map GCS paths and *only* GCS paths to local" in { + val wdlString = + s"""|workflow wf { + | call t + |} + | + |task t { + | String abc + | File lf + | File gcsf + | command {} + | runtime { docker: "ubuntu" } + |} + |""".stripMargin + + val stringKey = "wf.t.abc" val stringVal = WomString("abc") - val localFileKey = "lf" + val localFileKey = "wf.t.lf" val localFileVal = WomSingleFile("/blah/abc") - val gcsFileKey = "gcsf" + val gcsFileKey = "wf.t.gcsf" val gcsFileVal = WomSingleFile("gs://blah/abc") val inputs: Map[String, WomValue] = Map( @@ -621,8 +632,10 @@ class PipelinesApiAsyncBackendJobExecutionActorSpec extends TestKitSuite gcsFileKey -> gcsFileVal ) - val wdlNamespace = WdlNamespaceWithWorkflow.load(YoSup.replace("[PREEMPTIBLE]", ""), - Seq.empty[Draft2ImportResolver]).get + val wdlNamespace = WdlNamespaceWithWorkflow.load( + wdlString, + Seq.empty[Draft2ImportResolver], + ).get val womWorkflow = wdlNamespace.workflow.toWomWorkflowDefinition(isASubworkflow = false).getOrElse(fail("failed to get WomDefinition from WdlWorkflow")) wdlNamespace.toWomExecutable(Option(inputs.toJson.compactPrint), NoIoFunctionSet, strictValidation = true) match { case Right(womExecutable) => @@ -673,10 +686,25 @@ class PipelinesApiAsyncBackendJobExecutionActorSpec extends TestKitSuite } } - private val dockerAndDiskWdlNamespace = WdlNamespaceWithWorkflow.load(SampleWdl.CurrentDirectory.asWorkflowSources(DockerAndDiskRuntime).workflowSource.get, - Seq.empty[Draft2ImportResolver]).get - - it should "generate correct JesFileInputs from a WdlMap" taggedAs PostWomTest ignore { + private val dockerAndDiskMapsWdlNamespace = + WdlNamespaceWithWorkflow.load( + SampleWdl.CurrentDirectoryMaps.asWorkflowSources(DockerAndDiskRuntime).workflowSource.get, + Seq.empty[Draft2ImportResolver], + ).get + + private val dockerAndDiskArrayWdlNamespace = + WdlNamespaceWithWorkflow.load( + SampleWdl.CurrentDirectoryArray.asWorkflowSources(DockerAndDiskRuntime).workflowSource.get, + Seq.empty[Draft2ImportResolver], + ).get + + private val dockerAndDiskFilesWdlNamespace = + WdlNamespaceWithWorkflow.load( + SampleWdl.CurrentDirectoryFiles.asWorkflowSources(DockerAndDiskRuntime).workflowSource.get, + Seq.empty[Draft2ImportResolver], + ).get + + it should "generate correct JesFileInputs from a WdlMap" in { val inputs: Map[String, WomValue] = Map( "stringToFileMap" -> WomMap(WomMapType(WomStringType, WomSingleFileType), Map( WomString("stringTofile1") -> WomSingleFile("gs://path/to/stringTofile1"), @@ -697,11 +725,18 @@ class PipelinesApiAsyncBackendJobExecutionActorSpec extends TestKitSuite ) val workflowInputs = inputs map { - case (k, v) => s"wf_whereami.whereami$k" -> v + case (key, value) => (s"wf_whereami.whereami.$key", value) } - val womWorkflow = dockerAndDiskWdlNamespace.workflow.toWomWorkflowDefinition(isASubworkflow = false).getOrElse(fail("failed to get WomDefinition from WdlWorkflow")) - dockerAndDiskWdlNamespace.toWomExecutable(Option(workflowInputs.toJson.compactPrint), NoIoFunctionSet, strictValidation = true) match { + val womWorkflow = + dockerAndDiskMapsWdlNamespace + .workflow + .toWomWorkflowDefinition(isASubworkflow = false) + .getOrElse(fail("failed to get WomDefinition from WdlWorkflow")) + val womExecutableChecked = + dockerAndDiskMapsWdlNamespace + .toWomExecutable(Option(workflowInputs.toJson.compactPrint), NoIoFunctionSet, strictValidation = true) + womExecutableChecked match { case Right(womExecutable) => val wdlInputs = womExecutable.resolvedExecutableInputs.flatMap({case (port, v) => v.select[WomValue] map { port -> _ }}) val workflowDescriptor = BackendWorkflowDescriptor( @@ -727,32 +762,83 @@ class PipelinesApiAsyncBackendJobExecutionActorSpec extends TestKitSuite val jesInputs = testActorRef.underlyingActor.generateInputs(jobDescriptor) jesInputs should have size 8 jesInputs should contain(PipelinesApiFileInput( - "stringToFileMap-0", gcsPath("gs://path/to/stringTofile1"), DefaultPathBuilder.get("path/to/stringTofile1"), workingDisk)) + name = "wf_whereami.whereami.stringToFileMap-0", + cloudPath = gcsPath("gs://path/to/stringTofile1"), + relativeHostPath = DefaultPathBuilder.get("path/to/stringTofile1"), + mount = workingDisk, + )) jesInputs should contain(PipelinesApiFileInput( - "stringToFileMap-1", gcsPath("gs://path/to/stringTofile2"), DefaultPathBuilder.get("path/to/stringTofile2"), workingDisk)) + name = "wf_whereami.whereami.stringToFileMap-1", + cloudPath = gcsPath("gs://path/to/stringTofile2"), + relativeHostPath = DefaultPathBuilder.get("path/to/stringTofile2"), + mount = workingDisk, + )) jesInputs should contain(PipelinesApiFileInput( - "fileToStringMap-0", gcsPath("gs://path/to/fileToString1"), DefaultPathBuilder.get("path/to/fileToString1"), workingDisk)) + name = "wf_whereami.whereami.fileToStringMap-0", + cloudPath = gcsPath("gs://path/to/fileToString1"), + relativeHostPath = DefaultPathBuilder.get("path/to/fileToString1"), + mount = workingDisk, + )) jesInputs should contain(PipelinesApiFileInput( - "fileToStringMap-1", gcsPath("gs://path/to/fileToString2"), DefaultPathBuilder.get("path/to/fileToString2"), workingDisk)) + name = "wf_whereami.whereami.fileToStringMap-1", + cloudPath = gcsPath("gs://path/to/fileToString2"), + relativeHostPath = DefaultPathBuilder.get("path/to/fileToString2"), + mount = workingDisk, + )) jesInputs should contain(PipelinesApiFileInput( - "fileToFileMap-0", gcsPath("gs://path/to/fileToFile1Key"), DefaultPathBuilder.get("path/to/fileToFile1Key"), workingDisk)) + name = "wf_whereami.whereami.fileToFileMap-0", + cloudPath = gcsPath("gs://path/to/fileToFile1Key"), + relativeHostPath = DefaultPathBuilder.get("path/to/fileToFile1Key"), + mount = workingDisk, + )) jesInputs should contain(PipelinesApiFileInput( - "fileToFileMap-1", gcsPath("gs://path/to/fileToFile1Value"), DefaultPathBuilder.get("path/to/fileToFile1Value"), workingDisk)) + name = "wf_whereami.whereami.fileToFileMap-1", + cloudPath = gcsPath("gs://path/to/fileToFile1Value"), + relativeHostPath = DefaultPathBuilder.get("path/to/fileToFile1Value"), + mount = workingDisk, + )) jesInputs should contain(PipelinesApiFileInput( - "fileToFileMap-2", gcsPath("gs://path/to/fileToFile2Key"), DefaultPathBuilder.get("path/to/fileToFile2Key"), workingDisk)) + name = "wf_whereami.whereami.fileToFileMap-2", + cloudPath = gcsPath("gs://path/to/fileToFile2Key"), + relativeHostPath = DefaultPathBuilder.get("path/to/fileToFile2Key"), + mount = workingDisk, + )) jesInputs should contain(PipelinesApiFileInput( - "fileToFileMap-3", gcsPath("gs://path/to/fileToFile2Value"), DefaultPathBuilder.get("path/to/fileToFile2Value"), workingDisk)) + name = "wf_whereami.whereami.fileToFileMap-3", + cloudPath = gcsPath("gs://path/to/fileToFile2Value"), + relativeHostPath = DefaultPathBuilder.get("path/to/fileToFile2Value"), + mount = workingDisk, + )) case Left(badness) => fail(badness.toList.mkString(", ")) } } - def makeJesActorRef(sampleWdl: SampleWdl, callName: LocallyQualifiedName, inputs: Map[FullyQualifiedName, WomValue], - functions: PipelinesApiExpressionFunctions = TestableJesExpressionFunctions): + private def makeJesActorRef(sampleWdl: SampleWdl, + workflowInputs: Map[FullyQualifiedName, WomValue], + callName: LocallyQualifiedName, + callInputs: Map[LocallyQualifiedName, WomValue], + functions: PipelinesApiExpressionFunctions = TestableJesExpressionFunctions, + ): TestActorRef[TestablePipelinesApiJobExecutionActor] = { - val womWorkflow = WdlNamespaceWithWorkflow.load(sampleWdl.asWorkflowSources(DockerAndDiskRuntime).workflowSource.get, - Seq.empty[Draft2ImportResolver]).get.workflow.toWomWorkflowDefinition(isASubworkflow = false).getOrElse(fail("failed to get WomDefinition from WdlWorkflow")) - dockerAndDiskWdlNamespace.toWomExecutable(Option(inputs.toJson.compactPrint), NoIoFunctionSet, strictValidation = true) match { + val wdlNamespaceWithWorkflow = + WdlNamespaceWithWorkflow.load( + sampleWdl.asWorkflowSources(DockerAndDiskRuntime).workflowSource.get, + Seq.empty[Draft2ImportResolver], + ).get + val womWorkflow = + wdlNamespaceWithWorkflow + .workflow + .toWomWorkflowDefinition(isASubworkflow = false) + .getOrElse(fail("failed to get WomDefinition from WdlWorkflow")) + val womExecutableChecked = + wdlNamespaceWithWorkflow + .toWomExecutable( + Option(workflowInputs.toJson.compactPrint), + NoIoFunctionSet, + strictValidation = true, + ) + womExecutableChecked match { case Right(womExecutable) => val wdlInputs = womExecutable.resolvedExecutableInputs.flatMap({case (port, v) => v.select[WomValue] map { port -> _ }}) val workflowDescriptor = BackendWorkflowDescriptor( @@ -769,7 +855,16 @@ class PipelinesApiAsyncBackendJobExecutionActorSpec extends TestKitSuite val call: CommandCallNode = workflowDescriptor.callable.taskCallNodes.find(_.localName == callName).get val key = BackendJobDescriptorKey(call, None, 1) val runtimeAttributes = makeRuntimeAttributes(call) - val jobDescriptor = BackendJobDescriptor(workflowDescriptor, key, runtimeAttributes, fqnWdlMapToDeclarationMap(inputs), NoDocker, None, Map.empty) + val jobDescriptor = + BackendJobDescriptor( + workflowDescriptor = workflowDescriptor, + key = key, + runtimeAttributes = runtimeAttributes, + evaluatedTaskInputs = fqnWdlMapToDeclarationMap(callInputs), + maybeCallCachingEligible = NoDocker, + dockerSize = None, + prefetchedKvStoreEntries = Map.empty, + ) val props = Props(new TestablePipelinesApiJobExecutionActor(jobDescriptor, Promise(), papiConfiguration, functions)) TestActorRef[TestablePipelinesApiJobExecutionActor](props, s"TestableJesJobExecutionActor-${jobDescriptor.workflowDescriptor.id}") @@ -777,24 +872,31 @@ class PipelinesApiAsyncBackendJobExecutionActorSpec extends TestKitSuite } } - it should "generate correct JesOutputs" taggedAs PostWomTest ignore { - val inputs = Map( - "in" -> WomSingleFile("gs://blah/b/c.txt") + it should "generate correct JesOutputs" in { + val womFile = WomSingleFile("gs://blah/b/c.txt") + val workflowInputs = Map("file_passing.f" -> womFile) + val callInputs = Map( + "in" -> womFile, // how does one programmatically map the wf inputs to the call inputs? + "out_name" -> WomString("out"), // is it expected that this isn't using the default? ) - val jesBackend = makeJesActorRef(SampleWdl.FilePassingWorkflow, "a", inputs).underlyingActor + val jesBackend = makeJesActorRef(SampleWdl.FilePassingWorkflow, workflowInputs, "a", callInputs).underlyingActor val jobDescriptor = jesBackend.jobDescriptor val workflowId = jesBackend.workflowId val jesInputs = jesBackend.generateInputs(jobDescriptor) jesInputs should have size 1 - jesInputs should contain(PipelinesApiFileInput("in-0", gcsPath("gs://blah/b/c.txt"), DefaultPathBuilder.get("blah/b/c.txt"), workingDisk)) + jesInputs should contain(PipelinesApiFileInput( + name = "file_passing.a.in-0", + cloudPath = gcsPath("gs://blah/b/c.txt"), + relativeHostPath = DefaultPathBuilder.get("blah/b/c.txt"), + mount = workingDisk, + )) val jesOutputs = jesBackend.generateOutputs(jobDescriptor) jesOutputs should have size 1 jesOutputs should contain(PipelinesApiFileOutput("out", gcsPath(s"gs://my-cromwell-workflows-bucket/file_passing/$workflowId/call-a/out"), DefaultPathBuilder.get("out"), workingDisk, optional = false, secondary = false)) } - - it should "generate correct JesInputs when a command line contains a write_lines call in it" taggedAs PostWomTest ignore { + it should "generate correct JesInputs when a command line contains a write_lines call in it" in { val inputs = Map( "strs" -> WomArray(WomArrayType(WomStringType), Seq("A", "B", "C").map(WomString)) ) @@ -806,24 +908,39 @@ class PipelinesApiAsyncBackendJobExecutionActorSpec extends TestKitSuite } val functions = new TestPipelinesApiExpressionFunctions - val jesBackend = makeJesActorRef(SampleWdl.ArrayIO, "serialize", inputs, functions).underlyingActor + val jesBackend = makeJesActorRef(SampleWdl.ArrayIO, Map.empty, "serialize", inputs, functions).underlyingActor val jobDescriptor = jesBackend.jobDescriptor val jesInputs = jesBackend.generateInputs(jobDescriptor) jesInputs should have size 1 jesInputs should contain(PipelinesApiFileInput( - "c6fd5c91-0", gcsPath("gs://some/path/file.txt"), DefaultPathBuilder.get("some/path/file.txt"), workingDisk)) + name = "c35ad8d3-0", + cloudPath = gcsPath("gs://some/path/file.txt"), + relativeHostPath = DefaultPathBuilder.get("some/path/file.txt"), + mount = workingDisk, + )) val jesOutputs = jesBackend.generateOutputs(jobDescriptor) jesOutputs should have size 0 } - it should "generate correct JesFileInputs from a WdlArray" taggedAs PostWomTest ignore { + it should "generate correct JesFileInputs from a WdlArray" in { val inputs: Map[String, WomValue] = Map( "fileArray" -> WomArray(WomArrayType(WomSingleFileType), Seq(WomSingleFile("gs://path/to/file1"), WomSingleFile("gs://path/to/file2"))) ) - val womWorkflow = dockerAndDiskWdlNamespace.workflow.toWomWorkflowDefinition(isASubworkflow = false).getOrElse(fail("failed to get WomDefinition from WdlWorkflow")) - dockerAndDiskWdlNamespace.toWomExecutable(Option(inputs.toJson.compactPrint), NoIoFunctionSet, strictValidation = true) match { + val workflowInputs = inputs map { + case (key, value) => (s"wf_whereami.whereami.$key", value) + } + + val womWorkflow = + dockerAndDiskArrayWdlNamespace + .workflow + .toWomWorkflowDefinition(isASubworkflow = false) + .getOrElse(fail("failed to get WomDefinition from WdlWorkflow")) + val womExecutableChecked = + dockerAndDiskArrayWdlNamespace + .toWomExecutable(Option(workflowInputs.toJson.compactPrint), NoIoFunctionSet, strictValidation = true) + womExecutableChecked match { case Right(womExecutable) => val wdlInputs = womExecutable.resolvedExecutableInputs.flatMap({case (port, v) => v.select[WomValue] map { port -> _ }}) val workflowDescriptor = BackendWorkflowDescriptor( @@ -848,20 +965,41 @@ class PipelinesApiAsyncBackendJobExecutionActorSpec extends TestKitSuite val jesInputs = testActorRef.underlyingActor.generateInputs(jobDescriptor) jesInputs should have size 2 - jesInputs should contain(PipelinesApiFileInput("fileArray-0", gcsPath("gs://path/to/file1"), DefaultPathBuilder.get("path/to/file1"), workingDisk)) - jesInputs should contain(PipelinesApiFileInput("fileArray-1", gcsPath("gs://path/to/file2"), DefaultPathBuilder.get("path/to/file2"), workingDisk)) + jesInputs should contain(PipelinesApiFileInput( + name = "wf_whereami.whereami.fileArray-0", + cloudPath = gcsPath("gs://path/to/file1"), + relativeHostPath = DefaultPathBuilder.get("path/to/file1"), + mount = workingDisk, + )) + jesInputs should contain(PipelinesApiFileInput( + name = "wf_whereami.whereami.fileArray-1", + cloudPath = gcsPath("gs://path/to/file2"), + relativeHostPath = DefaultPathBuilder.get("path/to/file2"), + mount = workingDisk, + )) case Left(badness) => fail(badness.toList.mkString(", ")) } } - it should "generate correct JesFileInputs from a WdlFile" taggedAs PostWomTest ignore { + it should "generate correct JesFileInputs from a WdlFile" in { val inputs: Map[String, WomValue] = Map( "file1" -> WomSingleFile("gs://path/to/file1"), "file2" -> WomSingleFile("gs://path/to/file2") ) - val womWorkflow = dockerAndDiskWdlNamespace.workflow.toWomWorkflowDefinition(isASubworkflow = false).getOrElse(fail("failed to get WomDefinition from WdlWorkflow")) - dockerAndDiskWdlNamespace.toWomExecutable(Option(inputs.toJson.compactPrint), NoIoFunctionSet, strictValidation = true) match { + val workflowInputs = inputs map { + case (key, value) => (s"wf_whereami.whereami.$key", value) + } + + val womWorkflow = + dockerAndDiskFilesWdlNamespace + .workflow + .toWomWorkflowDefinition(isASubworkflow = false) + .getOrElse(fail("failed to get WomDefinition from WdlWorkflow")) + val womExecutableChecked = + dockerAndDiskFilesWdlNamespace + .toWomExecutable(Option(workflowInputs.toJson.compactPrint), NoIoFunctionSet, strictValidation = true) + womExecutableChecked match { case Right(womExecutable) => val wdlInputs = womExecutable.resolvedExecutableInputs.flatMap({case (port, v) => v.select[WomValue] map { port -> _ }}) val workflowDescriptor = BackendWorkflowDescriptor( @@ -886,8 +1024,18 @@ class PipelinesApiAsyncBackendJobExecutionActorSpec extends TestKitSuite val jesInputs = testActorRef.underlyingActor.generateInputs(jobDescriptor) jesInputs should have size 2 - jesInputs should contain(PipelinesApiFileInput("file1-0", gcsPath("gs://path/to/file1"), DefaultPathBuilder.get("path/to/file1"), workingDisk)) - jesInputs should contain(PipelinesApiFileInput("file2-0", gcsPath("gs://path/to/file2"), DefaultPathBuilder.get("path/to/file2"), workingDisk)) + jesInputs should contain(PipelinesApiFileInput( + name = "wf_whereami.whereami.file1-0", + cloudPath = gcsPath("gs://path/to/file1"), + relativeHostPath = DefaultPathBuilder.get("path/to/file1"), + mount = workingDisk, + )) + jesInputs should contain(PipelinesApiFileInput( + name = "wf_whereami.whereami.file2-0", + cloudPath = gcsPath("gs://path/to/file2"), + relativeHostPath = DefaultPathBuilder.get("path/to/file2"), + mount = workingDisk, + )) case Left(badness) => fail(badness.toList.mkString(", ")) } @@ -974,7 +1122,7 @@ class PipelinesApiAsyncBackendJobExecutionActorSpec extends TestKitSuite props, s"TestableJesJobExecutionActor-${jobDescriptor.workflowDescriptor.id}") testActorRef.underlyingActor.monitoringScript shouldBe - Some(PipelinesApiFileInput("monitoring-in", gcsPath("gs://path/to/script"), DefaultPathBuilder.get("monitoring.sh"), workingDisk)) + Option(PipelinesApiFileInput("monitoring-in", gcsPath("gs://path/to/script"), DefaultPathBuilder.get("monitoring.sh"), workingDisk)) } it should "not create a JesFileInput for the monitoring script, when not specified" in { @@ -1038,7 +1186,7 @@ class PipelinesApiAsyncBackendJobExecutionActorSpec extends TestKitSuite "gs://path/to/gcs_root/wf_hello/e6236763-c518-41d0-9688-432549a8bf7c/call-hello/hello.log" } - it should "return JES log paths for scattered call" taggedAs PostWomTest ignore { + it should "return JES log paths for scattered call" in { val workflowDescriptor = BackendWorkflowDescriptor( WorkflowId(UUID.fromString("e6236763-c518-41d0-9688-432549a8bf7d")), WdlNamespaceWithWorkflow.load( @@ -1065,10 +1213,10 @@ class PipelinesApiAsyncBackendJobExecutionActorSpec extends TestKitSuite jesBackend.pipelinesApiCallPaths.stdout should be(a[GcsPath]) jesBackend.pipelinesApiCallPaths.stdout.pathAsString shouldBe - "gs://path/to/gcs_root/w/e6236763-c518-41d0-9688-432549a8bf7d/call-B/shard-2/B-2-stdout.log" + "gs://path/to/gcs_root/w/e6236763-c518-41d0-9688-432549a8bf7d/call-B/shard-2/stdout" jesBackend.pipelinesApiCallPaths.stderr should be(a[GcsPath]) jesBackend.pipelinesApiCallPaths.stderr.pathAsString shouldBe - "gs://path/to/gcs_root/w/e6236763-c518-41d0-9688-432549a8bf7d/call-B/shard-2/B-2-stderr.log" + "gs://path/to/gcs_root/w/e6236763-c518-41d0-9688-432549a8bf7d/call-B/shard-2/stderr" jesBackend.pipelinesApiCallPaths.jesLogPath should be(a[GcsPath]) jesBackend.pipelinesApiCallPaths.jesLogPath.pathAsString shouldBe "gs://path/to/gcs_root/w/e6236763-c518-41d0-9688-432549a8bf7d/call-B/shard-2/B-2.log" diff --git a/supportedBackends/google/pipelines/common/src/test/scala/cromwell/backend/google/pipelines/common/PipelinesApiCallPathsSpec.scala b/supportedBackends/google/pipelines/common/src/test/scala/cromwell/backend/google/pipelines/common/PipelinesApiCallPathsSpec.scala index 68233bc1a14..64971449373 100644 --- a/supportedBackends/google/pipelines/common/src/test/scala/cromwell/backend/google/pipelines/common/PipelinesApiCallPathsSpec.scala +++ b/supportedBackends/google/pipelines/common/src/test/scala/cromwell/backend/google/pipelines/common/PipelinesApiCallPathsSpec.scala @@ -8,10 +8,9 @@ import cromwell.core.TestKitSuite import cromwell.util.SampleWdl import org.scalatest.flatspec.AnyFlatSpecLike import org.scalatest.matchers.should.Matchers -import org.specs2.mock.Mockito import spray.json.{JsObject, JsString} -class PipelinesApiCallPathsSpec extends TestKitSuite with AnyFlatSpecLike with Matchers with Mockito { +class PipelinesApiCallPathsSpec extends TestKitSuite with AnyFlatSpecLike with Matchers { import BackendSpec._ import PipelinesApiTestConfig._ diff --git a/supportedBackends/google/pipelines/common/src/test/scala/cromwell/backend/google/pipelines/common/PipelinesApiDockerCacheMappingOperationsSpec.scala b/supportedBackends/google/pipelines/common/src/test/scala/cromwell/backend/google/pipelines/common/PipelinesApiDockerCacheMappingOperationsSpec.scala index 22cf68e1057..8fd8d7dd581 100644 --- a/supportedBackends/google/pipelines/common/src/test/scala/cromwell/backend/google/pipelines/common/PipelinesApiDockerCacheMappingOperationsSpec.scala +++ b/supportedBackends/google/pipelines/common/src/test/scala/cromwell/backend/google/pipelines/common/PipelinesApiDockerCacheMappingOperationsSpec.scala @@ -9,7 +9,7 @@ import org.scalatest.flatspec.AnyFlatSpecLike import org.scalatest.matchers.should.Matchers import org.mockito.Mockito._ import org.scalatest.PrivateMethodTester -import org.specs2.mock.Mockito +import common.mock.MockSugar import scala.io.Source @@ -17,7 +17,7 @@ class PipelinesApiDockerCacheMappingOperationsSpec extends AnyFlatSpecLike with CromwellTimeoutSpec with Matchers - with Mockito + with MockSugar with PrivateMethodTester { private val pipelinesApiDockerCacheMappingOperationsMock = new PipelinesApiDockerCacheMappingOperations {} @@ -98,7 +98,7 @@ class PipelinesApiDockerCacheMappingOperationsSpec jobLogger = mockJobLogger ) - dockerImageCacheDiskOpt shouldBe Some(expectedDiskImageName) + dockerImageCacheDiskOpt shouldBe Option(expectedDiskImageName) } it should "not use docker image cache if requested docker image is not in cache" in { diff --git a/supportedBackends/google/pipelines/common/src/test/scala/cromwell/backend/google/pipelines/common/PipelinesApiInitializationActorSpec.scala b/supportedBackends/google/pipelines/common/src/test/scala/cromwell/backend/google/pipelines/common/PipelinesApiInitializationActorSpec.scala index d6440f088e3..918a8a436ef 100644 --- a/supportedBackends/google/pipelines/common/src/test/scala/cromwell/backend/google/pipelines/common/PipelinesApiInitializationActorSpec.scala +++ b/supportedBackends/google/pipelines/common/src/test/scala/cromwell/backend/google/pipelines/common/PipelinesApiInitializationActorSpec.scala @@ -11,19 +11,17 @@ import cromwell.backend.google.pipelines.common.PipelinesApiInitializationActorS import cromwell.backend.google.pipelines.common.PipelinesApiTestConfig.{PapiGlobalConfig, genomicsFactory, googleConfiguration, papiAttributes} import cromwell.backend.{BackendConfigurationDescriptor, BackendSpec, BackendWorkflowDescriptor} import cromwell.core.Dispatcher.BackendDispatcher -import cromwell.core.Tags.{IntegrationTest, PostWomTest} import cromwell.core.TestKitSuite import cromwell.core.filesystem.CromwellFileSystems import cromwell.core.logging.LoggingTest._ import org.scalatest.flatspec.AnyFlatSpecLike import org.scalatest.matchers.should.Matchers -import org.specs2.mock.Mockito import wom.graph.CommandCallNode import scala.concurrent.duration._ class PipelinesApiInitializationActorSpec extends TestKitSuite with AnyFlatSpecLike with Matchers - with ImplicitSender with Mockito { + with ImplicitSender { val Timeout: FiniteDuration = 30.second.dilated import BackendSpec._ @@ -62,7 +60,7 @@ class PipelinesApiInitializationActorSpec extends TestKitSuite with AnyFlatSpecL behavior of "PipelinesApiInitializationActor" - it should "log a warning message when there are unsupported runtime attributes" taggedAs IntegrationTest in { + it should "log a warning message when there are unsupported runtime attributes" in { within(Timeout) { val workflowDescriptor = buildWdlWorkflowDescriptor(HelloWorld, @@ -81,8 +79,7 @@ class PipelinesApiInitializationActorSpec extends TestKitSuite with AnyFlatSpecL } } - // Depends on https://github.com/broadinstitute/cromwell/issues/2606 - it should "return InitializationFailed when docker runtime attribute key is not present" taggedAs PostWomTest in { + it should "return InitializationFailed when docker runtime attribute key is not present" in { within(Timeout) { val workflowDescriptor = buildWdlWorkflowDescriptor(HelloWorld, runtime = """runtime { }""") val backend = getJesBackend(workflowDescriptor, workflowDescriptor.callable.taskCallNodes, @@ -110,7 +107,7 @@ object PipelinesApiInitializationActorSpec { | auths = [ | { | name = "application-default" - | scheme = "application_default" + | scheme = "mock" | } | ] |} diff --git a/supportedBackends/google/pipelines/common/src/test/scala/cromwell/backend/google/pipelines/common/PipelinesApiJobExecutionActorSpec.scala b/supportedBackends/google/pipelines/common/src/test/scala/cromwell/backend/google/pipelines/common/PipelinesApiJobExecutionActorSpec.scala index e3f125c26f1..37b2ba6f02c 100644 --- a/supportedBackends/google/pipelines/common/src/test/scala/cromwell/backend/google/pipelines/common/PipelinesApiJobExecutionActorSpec.scala +++ b/supportedBackends/google/pipelines/common/src/test/scala/cromwell/backend/google/pipelines/common/PipelinesApiJobExecutionActorSpec.scala @@ -9,14 +9,14 @@ import cromwell.backend.{BackendJobDescriptor, MinimumRuntimeSettings} import cromwell.core.TestKitSuite import org.scalatest.flatspec.AnyFlatSpecLike import org.scalatest.matchers.should.Matchers -import org.specs2.mock.Mockito +import common.mock.MockSugar import scala.concurrent.duration._ import scala.concurrent.{ExecutionContext, Promise} import scala.util.control.NoStackTrace import scala.util.{Failure, Success} -class PipelinesApiJobExecutionActorSpec extends TestKitSuite with AnyFlatSpecLike with Matchers with Mockito { +class PipelinesApiJobExecutionActorSpec extends TestKitSuite with AnyFlatSpecLike with Matchers with MockSugar { behavior of "PipelinesApiJobExecutionActor" diff --git a/supportedBackends/google/pipelines/common/src/test/scala/cromwell/backend/google/pipelines/common/PipelinesApiRuntimeAttributesSpec.scala b/supportedBackends/google/pipelines/common/src/test/scala/cromwell/backend/google/pipelines/common/PipelinesApiRuntimeAttributesSpec.scala index 4492d7f0607..aa9812e4b3c 100644 --- a/supportedBackends/google/pipelines/common/src/test/scala/cromwell/backend/google/pipelines/common/PipelinesApiRuntimeAttributesSpec.scala +++ b/supportedBackends/google/pipelines/common/src/test/scala/cromwell/backend/google/pipelines/common/PipelinesApiRuntimeAttributesSpec.scala @@ -11,7 +11,6 @@ import org.scalatest.TestSuite import org.scalatest.matchers.should.Matchers import org.scalatest.wordspec.AnyWordSpecLike import org.slf4j.helpers.NOPLogger -import org.specs2.mock.Mockito import spray.json._ import wdl4s.parser.MemoryUnit import wom.format.MemorySize @@ -23,7 +22,6 @@ import scala.util.{Failure, Success, Try} final class PipelinesApiRuntimeAttributesSpec extends AnyWordSpecLike with Matchers - with Mockito with PipelinesApiRuntimeAttributesSpecsMixin { "PipelinesApiRuntimeAttributes" should { @@ -306,8 +304,8 @@ trait PipelinesApiRuntimeAttributesSpecsMixin { this: TestSuite => PipelinesApiRuntimeAttributes(validatedRuntimeAttributes, papiConfiguration.runtimeConfig) } - val emptyWorkflowOptions = WorkflowOptions.fromMap(Map.empty).get - val defaultZones = NonEmptyList.of("us-central1-b", "us-central1-a") + val emptyWorkflowOptions: WorkflowOptions = WorkflowOptions.fromMap(Map.empty).get + val defaultZones: NonEmptyList[String] = NonEmptyList.of("us-central1-b", "us-central1-a") val noDefaultsPapiConfiguration = new PipelinesApiConfiguration(PipelinesApiTestConfig.NoDefaultsConfigurationDescriptor, genomicsFactory, googleConfiguration, papiAttributes) val staticRuntimeAttributeDefinitions: Set[RuntimeAttributeDefinition] = PipelinesApiRuntimeAttributes.runtimeAttributesBuilder(PipelinesApiTestConfig.papiConfiguration).definitions.toSet diff --git a/supportedBackends/google/pipelines/common/src/test/scala/cromwell/backend/google/pipelines/common/PipelinesApiWorkflowPathsSpec.scala b/supportedBackends/google/pipelines/common/src/test/scala/cromwell/backend/google/pipelines/common/PipelinesApiWorkflowPathsSpec.scala index 750a7e177a2..db7b189e79e 100644 --- a/supportedBackends/google/pipelines/common/src/test/scala/cromwell/backend/google/pipelines/common/PipelinesApiWorkflowPathsSpec.scala +++ b/supportedBackends/google/pipelines/common/src/test/scala/cromwell/backend/google/pipelines/common/PipelinesApiWorkflowPathsSpec.scala @@ -7,10 +7,9 @@ import cromwell.core.TestKitSuite import cromwell.util.SampleWdl import org.scalatest.flatspec.AnyFlatSpecLike import org.scalatest.matchers.should.Matchers -import org.specs2.mock.Mockito import spray.json.{JsObject, JsString} -class PipelinesApiWorkflowPathsSpec extends TestKitSuite with AnyFlatSpecLike with Matchers with Mockito { +class PipelinesApiWorkflowPathsSpec extends TestKitSuite with AnyFlatSpecLike with Matchers { import BackendSpec._ import PipelinesApiTestConfig._ import cromwell.filesystems.gcs.MockGcsPathBuilder._ diff --git a/supportedBackends/google/pipelines/common/src/test/scala/cromwell/backend/google/pipelines/common/api/PipelinesApiRequestWorkerSpec.scala b/supportedBackends/google/pipelines/common/src/test/scala/cromwell/backend/google/pipelines/common/api/PipelinesApiRequestWorkerSpec.scala index ac7915dd67e..d189735eab0 100644 --- a/supportedBackends/google/pipelines/common/src/test/scala/cromwell/backend/google/pipelines/common/api/PipelinesApiRequestWorkerSpec.scala +++ b/supportedBackends/google/pipelines/common/src/test/scala/cromwell/backend/google/pipelines/common/api/PipelinesApiRequestWorkerSpec.scala @@ -18,7 +18,6 @@ import org.scalatest.BeforeAndAfter import org.scalatest.concurrent.Eventually import org.scalatest.flatspec.AnyFlatSpecLike import org.scalatest.matchers.should.Matchers -import org.specs2.mock.Mockito import scala.collection.immutable.Queue import scala.concurrent.duration._ @@ -26,10 +25,10 @@ import scala.concurrent.{Future, Promise} import scala.util.Try abstract class PipelinesApiRequestWorkerSpec[O >: Null] - extends TestKitSuite with AnyFlatSpecLike with Matchers with Eventually with BeforeAndAfter with Mockito { + extends TestKitSuite with AnyFlatSpecLike with Matchers with Eventually with BeforeAndAfter { implicit var batchHandler: TestPipelinesApiBatchHandler[O] - + behavior of "PipelinesApiRequestWorker" implicit val TestExecutionTimeout: FiniteDuration = 10.seconds.dilated @@ -104,7 +103,7 @@ abstract class PipelinesApiRequestWorkerSpec[O >: Null] //noinspection ScalaUnusedSymbol class TestPipelinesApiRequestWorker(manager: ActorRef, qps: Int Refined Positive, registryProbe: ActorRef)(implicit batchHandler: TestPipelinesApiBatchHandler[_]) - extends PipelinesApiRequestWorker(manager, 10.milliseconds, registryProbe) with Mockito { + extends PipelinesApiRequestWorker(manager, 10.milliseconds, registryProbe) { override def createBatch(): BatchRequest = null override def runBatch(batch: BatchRequest): Unit = batchHandler.runBatch() } @@ -134,11 +133,11 @@ abstract class TestPipelinesApiBatchHandler[O >: Null] extends PipelinesApiReque addStatusPollToBatch(null, batch, resultHandler) completionPromise.future } - + def statusPollResultHandler(pollRequest: PAPIStatusPollRequest, completionPromise: Promise[Try[Unit]]): JsonBatchCallback[O] def addStatusPollToBatch(httpRequest: HttpRequest, batch: BatchRequest, resultHandler: JsonBatchCallback[O]): Unit = resultHandlers :+= resultHandler - + def mockStatusInterpreter(operation: O): RunStatus = { val (status, newQueue) = operationStatusResponses.dequeue operationStatusResponses = newQueue @@ -155,7 +154,7 @@ object TestPipelinesApiRequestWorker { registryProbe )) } - + sealed trait PipelinesApiBatchCallbackResponse case object CallbackSuccess extends PipelinesApiBatchCallbackResponse case object CallbackFailure extends PipelinesApiBatchCallbackResponse diff --git a/supportedBackends/google/pipelines/common/src/test/scala/cromwell/backend/google/pipelines/common/callcaching/PipelinesApiBackendCacheHitCopyingActorSpec.scala b/supportedBackends/google/pipelines/common/src/test/scala/cromwell/backend/google/pipelines/common/callcaching/PipelinesApiBackendCacheHitCopyingActorSpec.scala index 98f1ac52f85..b6b9dee163e 100644 --- a/supportedBackends/google/pipelines/common/src/test/scala/cromwell/backend/google/pipelines/common/callcaching/PipelinesApiBackendCacheHitCopyingActorSpec.scala +++ b/supportedBackends/google/pipelines/common/src/test/scala/cromwell/backend/google/pipelines/common/callcaching/PipelinesApiBackendCacheHitCopyingActorSpec.scala @@ -24,11 +24,12 @@ import cromwell.services.instrumentation.CromwellCount import cromwell.services.instrumentation.InstrumentationService.InstrumentationServiceMessage import eu.timepit.refined.numeric.Positive import eu.timepit.refined.refineMV +import org.mockito.ArgumentMatchers._ import org.scalatest.concurrent.Eventually import org.scalatest.flatspec.AnyFlatSpecLike import org.scalatest.matchers.should.Matchers import org.slf4j.Logger -import org.specs2.mock.Mockito +import common.mock.MockSugar import wom.callable.CommandTaskDefinition import wom.graph.{CommandCallNode, FullyQualifiedName, LocalName, WomIdentifier} import wom.values.WomValue @@ -39,7 +40,7 @@ import scala.util.{Success, Try} class PipelinesApiBackendCacheHitCopyingActorSpec extends TestKitSuite - with AnyFlatSpecLike with Matchers with ImplicitSender with Mockito with Eventually { + with AnyFlatSpecLike with Matchers with ImplicitSender with MockSugar with Eventually { behavior of "PipelinesApiBackendCacheHitCopyingActor" @@ -427,7 +428,7 @@ class PipelinesApiBackendCacheHitCopyingActorSpec extends TestKitSuite checkpointingInterval = 10.minutes ) - val papiConfiguration = mock[PipelinesApiConfiguration] + val papiConfiguration = mockWithDefaults[PipelinesApiConfiguration] papiConfiguration.papiAttributes returns papiConfigurationAttributes val commandTaskDefinition = mock[CommandTaskDefinition] @@ -467,7 +468,7 @@ class PipelinesApiBackendCacheHitCopyingActorSpec extends TestKitSuite pipelinesApiJobPaths.forCallCacheCopyAttempts returns copyDestinationPaths pipelinesApiJobPaths.metadataPaths returns Map.empty - workflowPaths.toJobPaths(anyObject[BackendJobDescriptor]).returns(pipelinesApiJobPaths) + workflowPaths.toJobPaths(any[BackendJobDescriptor]).returns(pipelinesApiJobPaths) def identityPathMocker(str: Any): Try[Path] = { val path = mock[Path] @@ -475,12 +476,12 @@ class PipelinesApiBackendCacheHitCopyingActorSpec extends TestKitSuite Success(path) } - workflowPaths.getPath(anyString).answers(identityPathMocker _) + workflowPaths.getPath(anyString).answers(identityPathMocker) workflowPaths.gcsAuthFilePath returns mock[Path] val runtimeAttributesBuilder = mock[StandardValidatedRuntimeAttributesBuilder] runtimeAttributesBuilder - .build(anyObject[Map[String, WomValue]], anyObject[Logger]) + .build(any[Map[String, WomValue]], any[Logger]) .returns(ValidatedRuntimeAttributes(Map.empty)) val backendInitializationData = mock[PipelinesApiBackendInitializationData] diff --git a/supportedBackends/google/pipelines/v2alpha1/src/test/scala/cromwell/backend/google/pipelines/v2alpha1/PipelinesApiAsyncBackendJobExecutionActorSpec.scala b/supportedBackends/google/pipelines/v2alpha1/src/test/scala/cromwell/backend/google/pipelines/v2alpha1/PipelinesApiAsyncBackendJobExecutionActorSpec.scala index 472a7948e5a..510f4a80742 100644 --- a/supportedBackends/google/pipelines/v2alpha1/src/test/scala/cromwell/backend/google/pipelines/v2alpha1/PipelinesApiAsyncBackendJobExecutionActorSpec.scala +++ b/supportedBackends/google/pipelines/v2alpha1/src/test/scala/cromwell/backend/google/pipelines/v2alpha1/PipelinesApiAsyncBackendJobExecutionActorSpec.scala @@ -1,17 +1,17 @@ package cromwell.backend.google.pipelines.v2alpha1 import java.nio.file.Paths - import cats.data.NonEmptyList import common.assertion.CromwellTimeoutSpec +import common.mock.MockSugar import cromwell.backend.google.pipelines.common.PipelinesApiFileInput import cromwell.core.path.DefaultPathBuilder import org.mockito.Mockito._ import org.scalatest.flatspec.AnyFlatSpec import org.scalatest.matchers.should.Matchers -import org.scalatestplus.mockito.MockitoSugar -class PipelinesApiAsyncBackendJobExecutionActorSpec extends AnyFlatSpec with CromwellTimeoutSpec with Matchers with MockitoSugar { +class PipelinesApiAsyncBackendJobExecutionActorSpec extends AnyFlatSpec with CromwellTimeoutSpec with Matchers + with MockSugar { behavior of "PipelinesParameterConversions" it should "group files by bucket" in { diff --git a/supportedBackends/google/pipelines/v2alpha1/src/test/scala/cromwell/backend/google/pipelines/v2alpha1/api/ActionCommandsSpec.scala b/supportedBackends/google/pipelines/v2alpha1/src/test/scala/cromwell/backend/google/pipelines/v2alpha1/api/ActionCommandsSpec.scala index eb832d099d9..31d1300d4db 100644 --- a/supportedBackends/google/pipelines/v2alpha1/src/test/scala/cromwell/backend/google/pipelines/v2alpha1/api/ActionCommandsSpec.scala +++ b/supportedBackends/google/pipelines/v2alpha1/src/test/scala/cromwell/backend/google/pipelines/v2alpha1/api/ActionCommandsSpec.scala @@ -9,13 +9,18 @@ import cromwell.filesystems.gcs.GcsPath import eu.timepit.refined.refineMV import org.scalatest.flatspec.AnyFlatSpec import org.scalatest.matchers.should.Matchers -import org.specs2.mock.Mockito +import common.mock.MockSugar -class ActionCommandsSpec extends AnyFlatSpec with CromwellTimeoutSpec with Matchers with Mockito { +class ActionCommandsSpec extends AnyFlatSpec with CromwellTimeoutSpec with Matchers with MockSugar { behavior of "ActionCommands" it should "inject project flag when request fails because of requester pays" in { - val path = GcsPath(any[Path], any[com.google.api.services.storage.Storage], any[com.google.cloud.storage.Storage], "my-project") + val path = GcsPath( + mock[Path], + mock[com.google.api.services.storage.Storage], + mock[com.google.cloud.storage.Storage], + "my-project", + ) val recovered = recoverRequesterPaysError(path) { flag => s"flag is $flag" } @@ -41,7 +46,7 @@ class ActionCommandsSpec extends AnyFlatSpec with CromwellTimeoutSpec with Match } it should "use GcsTransferConfiguration to set the number of localization retries" in { - implicit val gcsTransferConfiguration = GcsTransferConfiguration( + implicit val gcsTransferConfiguration: GcsTransferConfiguration = GcsTransferConfiguration( transferAttempts = refineMV(31380), parallelCompositeUploadThreshold = "0") retry("I'm very flaky") shouldBe """for i in $(seq 31380); do | ( diff --git a/supportedBackends/google/pipelines/v2beta/src/test/scala/cromwell/backend/google/pipelines/v2beta/PipelinesApiAsyncBackendJobExecutionActorSpec.scala b/supportedBackends/google/pipelines/v2beta/src/test/scala/cromwell/backend/google/pipelines/v2beta/PipelinesApiAsyncBackendJobExecutionActorSpec.scala index dd917b39eba..9b93a976e22 100644 --- a/supportedBackends/google/pipelines/v2beta/src/test/scala/cromwell/backend/google/pipelines/v2beta/PipelinesApiAsyncBackendJobExecutionActorSpec.scala +++ b/supportedBackends/google/pipelines/v2beta/src/test/scala/cromwell/backend/google/pipelines/v2beta/PipelinesApiAsyncBackendJobExecutionActorSpec.scala @@ -1,17 +1,17 @@ package cromwell.backend.google.pipelines.v2beta import java.nio.file.Paths - import cats.data.NonEmptyList import common.assertion.CromwellTimeoutSpec +import common.mock.MockSugar import cromwell.backend.google.pipelines.common.PipelinesApiFileInput import cromwell.core.path.DefaultPathBuilder import org.mockito.Mockito._ import org.scalatest.flatspec.AnyFlatSpec import org.scalatest.matchers.should.Matchers -import org.scalatestplus.mockito.MockitoSugar -class PipelinesApiAsyncBackendJobExecutionActorSpec extends AnyFlatSpec with CromwellTimeoutSpec with Matchers with MockitoSugar { +class PipelinesApiAsyncBackendJobExecutionActorSpec extends AnyFlatSpec with CromwellTimeoutSpec with Matchers + with MockSugar { behavior of "PipelinesParameterConversions" it should "group files by bucket" in { diff --git a/supportedBackends/google/pipelines/v2beta/src/test/scala/cromwell/backend/google/pipelines/v2beta/api/ActionCommandsSpec.scala b/supportedBackends/google/pipelines/v2beta/src/test/scala/cromwell/backend/google/pipelines/v2beta/api/ActionCommandsSpec.scala index b1de86d5ccc..257fb094b0d 100644 --- a/supportedBackends/google/pipelines/v2beta/src/test/scala/cromwell/backend/google/pipelines/v2beta/api/ActionCommandsSpec.scala +++ b/supportedBackends/google/pipelines/v2beta/src/test/scala/cromwell/backend/google/pipelines/v2beta/api/ActionCommandsSpec.scala @@ -9,13 +9,18 @@ import cromwell.filesystems.gcs.GcsPath import eu.timepit.refined.refineMV import org.scalatest.flatspec.AnyFlatSpec import org.scalatest.matchers.should.Matchers -import org.specs2.mock.Mockito +import common.mock.MockSugar -class ActionCommandsSpec extends AnyFlatSpec with CromwellTimeoutSpec with Matchers with Mockito { +class ActionCommandsSpec extends AnyFlatSpec with CromwellTimeoutSpec with Matchers with MockSugar { behavior of "ActionCommands" it should "inject project flag when request fails because of requester pays" in { - val path = GcsPath(any[Path], any[com.google.api.services.storage.Storage], any[com.google.cloud.storage.Storage], "my-project") + val path = GcsPath( + mock[Path], + mock[com.google.api.services.storage.Storage], + mock[com.google.cloud.storage.Storage], + "my-project", + ) val recovered = recoverRequesterPaysError(path) { flag => s"flag is $flag" } @@ -41,7 +46,7 @@ class ActionCommandsSpec extends AnyFlatSpec with CromwellTimeoutSpec with Match } it should "use GcsTransferConfiguration to set the number of localization retries" in { - implicit val gcsTransferConfiguration = GcsTransferConfiguration( + implicit val gcsTransferConfiguration: GcsTransferConfiguration = GcsTransferConfiguration( transferAttempts = refineMV(31380), parallelCompositeUploadThreshold = "0") retry("I'm very flaky") shouldBe """for i in $(seq 31380); do | ( diff --git a/supportedBackends/sfs/src/test/scala/cromwell/backend/impl/sfs/config/ConfigHashingStrategySpec.scala b/supportedBackends/sfs/src/test/scala/cromwell/backend/impl/sfs/config/ConfigHashingStrategySpec.scala index 9e1a33377c7..0cc6267374e 100644 --- a/supportedBackends/sfs/src/test/scala/cromwell/backend/impl/sfs/config/ConfigHashingStrategySpec.scala +++ b/supportedBackends/sfs/src/test/scala/cromwell/backend/impl/sfs/config/ConfigHashingStrategySpec.scala @@ -14,33 +14,34 @@ import org.scalatest.BeforeAndAfterAll import org.scalatest.flatspec.AnyFlatSpec import org.scalatest.matchers.should.Matchers import org.scalatest.prop.TableDrivenPropertyChecks -import org.specs2.mock.Mockito +import common.mock.MockSugar import wom.values.WomSingleFile import scala.util.Success -class ConfigHashingStrategySpec extends AnyFlatSpec with CromwellTimeoutSpec with Matchers with TableDrivenPropertyChecks with Mockito with BeforeAndAfterAll { +class ConfigHashingStrategySpec extends AnyFlatSpec with CromwellTimeoutSpec with Matchers + with TableDrivenPropertyChecks with MockSugar with BeforeAndAfterAll { behavior of "ConfigHashingStrategy" - val steak = "Steak" - val steakMd5 = DigestUtils.md5Hex(steak) - val steakXxh64 = HashFileXxH64StrategyMethods.xxh64sumString(steak) - val file = DefaultPathBuilder.createTempFile() - val symLinksDir = DefaultPathBuilder.createTempDirectory("sym-dir") - val pathMd5 = DigestUtils.md5Hex(file.pathAsString) - val md5File = file.sibling(s"${file.name}.md5") + private val steak = "Steak" + private val steakMd5 = DigestUtils.md5Hex(steak) + private val steakXxh64 = HashFileXxH64StrategyMethods.xxh64sumString(steak) + private val file = DefaultPathBuilder.createTempFile() + private val symLinksDir = DefaultPathBuilder.createTempDirectory("sym-dir") + private val pathMd5 = DigestUtils.md5Hex(file.pathAsString) + private val md5File = file.sibling(s"${file.name}.md5") // Not the md5 value of "Steak". This is intentional so we can verify which hash is used depending on the strategy val md5FileHash = "103508832bace55730c8ee8d89c1a45f" - override def beforeAll() = { + override def beforeAll(): Unit = { file.write(steak) () } private def randomName(): String = UUID.randomUUID().toString - def mockRequest(withSibling: Boolean, symlink: Boolean) = { + private def mockRequest(withSibling: Boolean, symlink: Boolean) = { if (withSibling && md5File.notExists) md5File.write(md5FileHash + System.lineSeparator()) val requestFile = if (symlink) { val symLink: Path = symLinksDir./(s"symlink-${randomName()}") @@ -56,7 +57,7 @@ class ConfigHashingStrategySpec extends AnyFlatSpec with CromwellTimeoutSpec wit SingleFileHashRequest(null, null, WomSingleFile(requestFile.pathAsString), Option(initData)) } - def makeStrategy(strategy: String, checkSibling: Option[Boolean] = None) = { + private def makeStrategy(strategy: String, checkSibling: Option[Boolean] = None) = { val conf = ConfigFactory.parseString(s"""hashing-strategy: "$strategy"""") ConfigHashingStrategy( checkSibling map { check => conf.withValue("check-sibling-md5", ConfigValueFactory.fromAnyRef(check)) } getOrElse conf @@ -284,7 +285,7 @@ class ConfigHashingStrategySpec extends AnyFlatSpec with CromwellTimeoutSpec wit } } - override def afterAll() = { + override def afterAll(): Unit = { file.delete(true) md5File.delete(true) () diff --git a/supportedBackends/sfs/src/test/scala/cromwell/backend/sfs/SharedFileSystemSpec.scala b/supportedBackends/sfs/src/test/scala/cromwell/backend/sfs/SharedFileSystemSpec.scala index a428bda2faf..34d5ca3cc73 100644 --- a/supportedBackends/sfs/src/test/scala/cromwell/backend/sfs/SharedFileSystemSpec.scala +++ b/supportedBackends/sfs/src/test/scala/cromwell/backend/sfs/SharedFileSystemSpec.scala @@ -5,25 +5,27 @@ import com.typesafe.config.{Config, ConfigFactory} import common.assertion.CromwellTimeoutSpec import cromwell.backend.BackendSpec import cromwell.core.CromwellFatalExceptionMarker +import cromwell.core.path.PathFactory.PathBuilders import cromwell.core.path.{DefaultPathBuilder, Path} import org.scalatest.flatspec.AnyFlatSpec import org.scalatest.matchers.should.Matchers import org.scalatest.prop.TableDrivenPropertyChecks -import org.specs2.mock.Mockito import wom.values.WomSingleFile import scala.io.Source -class SharedFileSystemSpec extends AnyFlatSpec with CromwellTimeoutSpec with Matchers with Mockito with TableDrivenPropertyChecks with BackendSpec { +class SharedFileSystemSpec extends AnyFlatSpec with CromwellTimeoutSpec with Matchers + with TableDrivenPropertyChecks with BackendSpec { behavior of "SharedFileSystem" - val defaultLocalization = ConfigFactory.parseString(""" localization: [copy, hard-link, soft-link] """) - val hardLinkLocalization = ConfigFactory.parseString(""" localization: [hard-link] """) - val softLinkLocalization = ConfigFactory.parseString(""" localization: [soft-link] """) - val cachedCopyLocalization = ConfigFactory.parseString(""" localization: [cached-copy] """) - val cachedCopyLocalizationMaxHardlinks = ConfigFactory.parseString("""{localization: [cached-copy], max-hardlinks: 3 }""") - val localPathBuilder = List(DefaultPathBuilder) + private val defaultLocalization = ConfigFactory.parseString(""" localization: [copy, hard-link, soft-link] """) + private val hardLinkLocalization = ConfigFactory.parseString(""" localization: [hard-link] """) + private val softLinkLocalization = ConfigFactory.parseString(""" localization: [soft-link] """) + private val cachedCopyLocalization = ConfigFactory.parseString(""" localization: [cached-copy] """) + private val cachedCopyLocalizationMaxHardlinks = + ConfigFactory.parseString("""{localization: [cached-copy], max-hardlinks: 3 }""") + private val localPathBuilder = List(DefaultPathBuilder) def localizationTest(config: Config, @@ -32,10 +34,10 @@ class SharedFileSystemSpec extends AnyFlatSpec with CromwellTimeoutSpec with Mat fileAlreadyExists: Boolean = false, symlink: Boolean = false, cachedCopy: Boolean = false, - linkNb: Int = 1) = { + linkNb: Int = 1): Path = { val callDir = DefaultPathBuilder.createTempDirectory("SharedFileSystem") val orig = if (fileInCallDir) callDir.createChild("inputFile") else DefaultPathBuilder.createTempFile("inputFile") - val dest = if (fileInCallDir) orig else callDir./(orig.parent.pathAsString.hashCode.toString())./(orig.name) + val dest = if (fileInCallDir) orig else callDir./(orig.parent.pathAsString.hashCode.toString)./(orig.name) val testText = """This is a simple text to check if the localization | works correctly for the file contents. @@ -49,11 +51,11 @@ class SharedFileSystemSpec extends AnyFlatSpec with CromwellTimeoutSpec with Mat } val inputs = fqnWdlMapToDeclarationMap(Map("input" -> WomSingleFile(orig.pathAsString))) - val sharedFS = new SharedFileSystem { - override val pathBuilders = localPathBuilder - override val sharedFileSystemConfig = config + val sharedFS: SharedFileSystem = new SharedFileSystem { + override val pathBuilders: PathBuilders = localPathBuilder + override val sharedFileSystemConfig: Config = config override implicit def actorContext: ActorContext = null - override lazy val cachedCopyDir = Some(DefaultPathBuilder.createTempDirectory("cached-copy")) + override lazy val cachedCopyDir: Option[Path] = Option(DefaultPathBuilder.createTempDirectory("cached-copy")) } val cachedFile: Option[Path] = sharedFS.cachedCopyDir.map( _./(orig.parent.pathAsString.hashCode.toString)./(orig.lastModifiedTime.toEpochMilli.toString + orig.name)) @@ -63,10 +65,12 @@ class SharedFileSystemSpec extends AnyFlatSpec with CromwellTimeoutSpec with Mat result.isSuccess shouldBe true result.get.toList should contain theSameElementsAs localizedinputs + val destBuffer = Source.fromFile(dest.toFile) dest.exists shouldBe true - Source.fromFile(dest.toFile).mkString shouldBe testText + destBuffer.mkString shouldBe testText countLinks(dest) should be(linkNb) isSymLink(dest) should be(symlink) + destBuffer.close() cachedFile.foreach(_.exists should be(cachedCopy)) orig.delete(swallowIOExceptions = true) @@ -107,9 +111,9 @@ class SharedFileSystemSpec extends AnyFlatSpec with CromwellTimeoutSpec with Mat val orig = DefaultPathBuilder.get("/made/up/origin") val inputs = fqnWdlMapToDeclarationMap(Map("input" -> WomSingleFile(orig.pathAsString))) - val sharedFS = new SharedFileSystem { - override val pathBuilders = localPathBuilder - override val sharedFileSystemConfig = defaultLocalization + val sharedFS: SharedFileSystem = new SharedFileSystem { + override val pathBuilders: PathBuilders = localPathBuilder + override val sharedFileSystemConfig: Config = defaultLocalization override implicit def actorContext: ActorContext = null } val result = sharedFS.localizeInputs(callDir, docker = false)(inputs) @@ -118,16 +122,16 @@ class SharedFileSystemSpec extends AnyFlatSpec with CromwellTimeoutSpec with Mat } it should "cache only one file if copied multiple times via cached copy" in { - val callDirs: List[Path] = List(1,2,3).map(x => DefaultPathBuilder.createTempDirectory("SharedFileSystem")) + val callDirs: List[Path] = List.fill(3)(DefaultPathBuilder.createTempDirectory("SharedFileSystem")) val orig = DefaultPathBuilder.createTempFile("inputFile") - val dests = callDirs.map(_./(orig.parent.pathAsString.hashCode.toString())./(orig.name)) + val dests = callDirs.map(_./(orig.parent.pathAsString.hashCode.toString)./(orig.name)) orig.touch() val inputs = fqnWdlMapToDeclarationMap(Map("input" -> WomSingleFile(orig.pathAsString))) - val sharedFS = new SharedFileSystem { - override val pathBuilders = localPathBuilder - override val sharedFileSystemConfig = cachedCopyLocalization + val sharedFS: SharedFileSystem = new SharedFileSystem { + override val pathBuilders: PathBuilders = localPathBuilder + override val sharedFileSystemConfig: Config = cachedCopyLocalization override implicit def actorContext: ActorContext = null - override lazy val cachedCopyDir = Some(DefaultPathBuilder.createTempDirectory("cached-copy")) + override lazy val cachedCopyDir: Option[Path] = Option(DefaultPathBuilder.createTempDirectory("cached-copy")) } val cachedFile: Option[Path] = sharedFS.cachedCopyDir.map( _./(orig.parent.pathAsString.hashCode.toString)./(orig.lastModifiedTime.toEpochMilli.toString + orig.name)) @@ -147,14 +151,14 @@ class SharedFileSystemSpec extends AnyFlatSpec with CromwellTimeoutSpec with Mat it should "copy the file again when the copy-cached file has exceeded the maximum number of hardlinks" in { val callDirs: IndexedSeq[Path] = 1 to 3 map { _ => DefaultPathBuilder.createTempDirectory("SharedFileSystem") } val orig = DefaultPathBuilder.createTempFile("inputFile") - val dests = callDirs.map(_./(orig.parent.pathAsString.hashCode.toString())./(orig.name)) + val dests = callDirs.map(_./(orig.parent.pathAsString.hashCode.toString)./(orig.name)) orig.touch() val inputs = fqnWdlMapToDeclarationMap(Map("input" -> WomSingleFile(orig.pathAsString))) - val sharedFS = new SharedFileSystem { - override val pathBuilders = localPathBuilder - override val sharedFileSystemConfig = cachedCopyLocalizationMaxHardlinks + val sharedFS: SharedFileSystem = new SharedFileSystem { + override val pathBuilders: PathBuilders = localPathBuilder + override val sharedFileSystemConfig: Config = cachedCopyLocalizationMaxHardlinks override implicit def actorContext: ActorContext = null - override lazy val cachedCopyDir = Some(DefaultPathBuilder.createTempDirectory("cached-copy")) + override lazy val cachedCopyDir: Option[Path] = Option(DefaultPathBuilder.createTempDirectory("cached-copy")) } val cachedFile: Option[Path] = sharedFS.cachedCopyDir.map( _./(orig.parent.pathAsString.hashCode.toString)./(orig.lastModifiedTime.toEpochMilli.toString + orig.name)) diff --git a/supportedBackends/tes/src/test/scala/cromwell/backend/impl/tes/TesInitializationActorSpec.scala b/supportedBackends/tes/src/test/scala/cromwell/backend/impl/tes/TesInitializationActorSpec.scala index 27eb296b02c..731dd3c6c70 100644 --- a/supportedBackends/tes/src/test/scala/cromwell/backend/impl/tes/TesInitializationActorSpec.scala +++ b/supportedBackends/tes/src/test/scala/cromwell/backend/impl/tes/TesInitializationActorSpec.scala @@ -8,7 +8,6 @@ import cromwell.backend.BackendSpec._ import cromwell.backend.BackendWorkflowInitializationActor.{InitializationFailed, InitializationSuccess, Initialize} import cromwell.backend.async.RuntimeAttributeValidationFailures import cromwell.backend.{BackendConfigurationDescriptor, BackendWorkflowDescriptor} -import cromwell.core.Tags.PostWomTest import cromwell.core.{TestKitSuite, WorkflowOptions} import cromwell.core.filesystem.CromwellFileSystems import cromwell.core.logging.LoggingTest._ @@ -158,7 +157,7 @@ class TesInitializationActorSpec extends TestKitSuite } } - "return InitializationFailed when docker runtime attribute key is not present" taggedAs PostWomTest ignore { + "return InitializationFailed when docker runtime attribute key is not present" in { within(Timeout) { val workflowDescriptor = buildWdlWorkflowDescriptor(HelloWorld, runtime = """runtime { }""") val backend = getActorRef(workflowDescriptor, workflowDescriptor.callable.taskCallNodes, conf) @@ -175,4 +174,3 @@ class TesInitializationActorSpec extends TestKitSuite } } } - diff --git a/supportedBackends/tes/src/test/scala/cromwell/backend/impl/tes/TesWorkflowPathsSpec.scala b/supportedBackends/tes/src/test/scala/cromwell/backend/impl/tes/TesWorkflowPathsSpec.scala index 14ed7ffaa6a..8d06277dbb5 100644 --- a/supportedBackends/tes/src/test/scala/cromwell/backend/impl/tes/TesWorkflowPathsSpec.scala +++ b/supportedBackends/tes/src/test/scala/cromwell/backend/impl/tes/TesWorkflowPathsSpec.scala @@ -7,9 +7,10 @@ import cromwell.core.{JobKey, WorkflowId} import cromwell.util.WomMocks import org.scalatest.flatspec.AnyFlatSpec import org.scalatest.matchers.should.Matchers -import wom.graph.WomIdentifier +import wom.graph.{GraphNode, WomIdentifier} +import common.mock.MockSugar -class TesWorkflowPathsSpec extends AnyFlatSpec with CromwellTimeoutSpec with Matchers with BackendSpec { +class TesWorkflowPathsSpec extends AnyFlatSpec with CromwellTimeoutSpec with Matchers with BackendSpec with MockSugar { "WorkflowPaths" should "provide correct paths for a workflow" in { val wd = buildWdlWorkflowDescriptor(TestWorkflows.HelloWorld) @@ -33,11 +34,11 @@ class TesWorkflowPathsSpec extends AnyFlatSpec with CromwellTimeoutSpec with Mat val subWorkflowId = WorkflowId.randomId() subWd.callable returns subWorkflow subWd.id returns subWorkflowId - + val call1 = WomMocks.mockTaskCall(WomIdentifier("call1")) - + val jobKey = new JobKey { - override def node = call1 + override def node: GraphNode = call1 override def tag: String = "tag1" override def index: Option[Int] = Option(1) override def attempt: Int = 2 @@ -45,7 +46,7 @@ class TesWorkflowPathsSpec extends AnyFlatSpec with CromwellTimeoutSpec with Mat subWd.breadCrumbs returns List(BackendJobBreadCrumb(rootWorkflow, rootWorkflowId, jobKey)) subWd.id returns subWorkflowId - + val workflowPaths = TesWorkflowPaths(subWd, TesTestConfig.backendConfig) workflowPaths.workflowRoot.toString shouldBe File(s"local-cromwell-executions/rootWorkflow/$rootWorkflowId/call-call1/shard-1/attempt-2/subWorkflow/$subWorkflowId").pathAsString workflowPaths.dockerWorkflowRoot.toString shouldBe s"/cromwell-executions/rootWorkflow/$rootWorkflowId/call-call1/shard-1/attempt-2/subWorkflow/$subWorkflowId" diff --git a/wom/src/test/scala/wom/types/WomLongTypeSpec.scala b/wom/src/test/scala/wom/types/WomLongTypeSpec.scala index 373b8910a78..0aa98543126 100644 --- a/wom/src/test/scala/wom/types/WomLongTypeSpec.scala +++ b/wom/src/test/scala/wom/types/WomLongTypeSpec.scala @@ -1,41 +1,41 @@ package wom.types import wom.values.{WomInteger, WomLong, WomString} -import org.scalacheck.Properties -import org.scalacheck.Prop._ +import org.scalatest.flatspec.AnyFlatSpec +import org.scalatest.matchers.should.Matchers +import org.scalatestplus.scalacheck.ScalaCheckDrivenPropertyChecks import spray.json.{JsNumber, JsString} import scala.util.Success +class WomLongTypeSpec extends AnyFlatSpec with Matchers with ScalaCheckDrivenPropertyChecks { + behavior of "WomLongType" -object WomLongTypeSpec extends Properties("WomLongType") { - - property("conversion from Long") = forAll { i: Long => - WomLongType.coerceRawValue(i) == Success(WomLong(i)) + it should "conversion from Long" in forAll { i: Long => + WomLongType.coerceRawValue(i) shouldBe Success(WomLong(i)) } - property("conversion from String") = forAll { i: Long => - WomLongType.coerceRawValue(i.toString) == Success(WomLong(i)) + it should "conversion from String" in forAll { i: Long => + WomLongType.coerceRawValue(i.toString) shouldBe Success(WomLong(i)) } - property("conversion from Wom String") = forAll { i: Long => - WomLongType.coerceRawValue(WomString(i.toString)) == Success(WomLong(i)) + it should "conversion from Wom String" in forAll { i: Long => + WomLongType.coerceRawValue(WomString(i.toString)) shouldBe Success(WomLong(i)) } - property("conversion from Js String") = forAll { i: Long => - WomLongType.coerceRawValue(JsString(i.toString)) == Success(WomLong(i)) + it should "conversion from Js String" in forAll { i: Long => + WomLongType.coerceRawValue(JsString(i.toString)) shouldBe Success(WomLong(i)) } - property("conversion from Int") = forAll { i: Int => - WomLongType.coerceRawValue(i) == Success(WomLong(i.toLong)) + it should "conversion from Int" in forAll { i: Int => + WomLongType.coerceRawValue(i) shouldBe Success(WomLong(i.toLong)) } - property("conversion from WomInt") = forAll { i: Int => - WomLongType.coerceRawValue(WomInteger(i)) == Success(WomLong(i.toLong)) + it should "conversion from WomInt" in forAll { i: Int => + WomLongType.coerceRawValue(WomInteger(i)) shouldBe Success(WomLong(i.toLong)) } - property("conversion from JsNumber") = forAll { i: Long => - WomLongType.coerceRawValue(JsNumber(i)) == Success(WomLong(i)) + it should "conversion from JsNumber" in forAll { i: Long => + WomLongType.coerceRawValue(JsNumber(i)) shouldBe Success(WomLong(i)) } } - From c57b6119b0e99fb15ad5e176c14ac8d8d83971d9 Mon Sep 17 00:00:00 2001 From: Adam Nichols Date: Thu, 28 Apr 2022 15:51:10 -0400 Subject: [PATCH 20/58] BW-1224 Security upgrade for jackson-databind (#6747) --- CromwellRefdiskManifestCreator/pom.xml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/CromwellRefdiskManifestCreator/pom.xml b/CromwellRefdiskManifestCreator/pom.xml index ebd0cdf32e2..987aad421be 100644 --- a/CromwellRefdiskManifestCreator/pom.xml +++ b/CromwellRefdiskManifestCreator/pom.xml @@ -48,7 +48,7 @@ com.fasterxml.jackson.core jackson-databind - 2.11.1 + 2.13.2.2 org.apache.logging.log4j From 841bf435ddc0d34d8bce29380fa947905f756da2 Mon Sep 17 00:00:00 2001 From: Katrina P <68349264+kpierre13@users.noreply.github.com> Date: Mon, 2 May 2022 09:57:14 -0400 Subject: [PATCH 21/58] Bw 1126 workflow stdout stderr (#6748) * bad stdout location tests * just a draft * checkStdout helper function * Adding stderr wdl and test files, refactoring functions and adding functions to handle stderr * Fixing error messages * bad stdout location tests * just a draft * checkStdout helper function * Adding stderr wdl and test files, refactoring functions and adding functions to handle stderr * Fixing error messages * Rebasing * Rebasing * Omitting the metadata section for Travis builds to pass * Bw 1126 workflow level stdout / stderr expressions fail, hang execution forever (#6745) * Combining similar-use functions * All tests created * Saving changes * Adding unit tests * Update wdl/transforms/draft3/src/test/scala/AstToWorkflowDefinitionElementSpec.scala Co-authored-by: Chris Llanwarne * Update wdl/transforms/draft3/src/test/scala/AstToWorkflowDefinitionElementSpec.scala Co-authored-by: Chris Llanwarne * Update wdl/transforms/draft3/src/test/scala/AstToWorkflowDefinitionElementSpec.scala Co-authored-by: Chris Llanwarne * Update wdl/transforms/draft3/src/test/scala/AstToWorkflowDefinitionElementSpec.scala Co-authored-by: Chris Llanwarne * Changing to more general function naming * Return type change for functions * Reverting Saloni newline addition * Reverting Saloni newline addition and fixing return type * bad stdout location tests * just a draft * checkStdout helper function * Adding stderr wdl and test files, refactoring functions and adding functions to handle stderr * Fixing error messages * Rebasing * Omitting the metadata section for Travis builds to pass * Bw 1126 workflow level stdout / stderr expressions fail, hang execution forever (#6745) * Combining similar-use functions * All tests created * Saving changes * Adding unit tests * Update wdl/transforms/draft3/src/test/scala/AstToWorkflowDefinitionElementSpec.scala Co-authored-by: Chris Llanwarne * Update wdl/transforms/draft3/src/test/scala/AstToWorkflowDefinitionElementSpec.scala Co-authored-by: Chris Llanwarne * Update wdl/transforms/draft3/src/test/scala/AstToWorkflowDefinitionElementSpec.scala Co-authored-by: Chris Llanwarne * Update wdl/transforms/draft3/src/test/scala/AstToWorkflowDefinitionElementSpec.scala Co-authored-by: Chris Llanwarne * Changing to more general function naming * Return type change for functions * Reverting Saloni newline addition * Reverting Saloni newline addition and fixing return type * Revert "Reverting Saloni newline addition and fixing return type" This reverts commit 858214da2b8965119d78748b048c7eb1a9a4d68c. Reverting for Travis CI tests * Reverting Saloni newline addition and fixing return type * remove_tests * remove_corresponding_wdl_files * Updating some unit tests with more correct functionality * Fixing missed input for Invalid function * triggering build Co-authored-by: Chris Llanwarne Co-authored-by: Saloni Shah Co-authored-by: Chris Llanwarne --- .../AstToWorkflowDefinitionElementSpec.scala | 97 +++++++++++++++++++ .../AstToWorkflowDefinitionElement.scala | 50 +++++++++- 2 files changed, 142 insertions(+), 5 deletions(-) create mode 100644 wdl/transforms/draft3/src/test/scala/AstToWorkflowDefinitionElementSpec.scala diff --git a/wdl/transforms/draft3/src/test/scala/AstToWorkflowDefinitionElementSpec.scala b/wdl/transforms/draft3/src/test/scala/AstToWorkflowDefinitionElementSpec.scala new file mode 100644 index 00000000000..99731de6aad --- /dev/null +++ b/wdl/transforms/draft3/src/test/scala/AstToWorkflowDefinitionElementSpec.scala @@ -0,0 +1,97 @@ +import cats.data.Validated._ +import org.scalatest.flatspec.AnyFlatSpec +import org.scalatest.matchers.should.Matchers._ +import wdl.model.draft3.elements.ExpressionElement._ +import wdl.model.draft3.elements.{InputDeclarationElement, InputsSectionElement, IntermediateValueDeclarationElement, OutputDeclarationElement, OutputsSectionElement, PrimitiveTypeElement} +import wom.types.{WomSingleFileType, WomStringType} +import wdl.transforms.base.ast2wdlom.AstToWorkflowDefinitionElement._ + + +class AstToWorkflowDefinitionElementSpec extends AnyFlatSpec{ + behavior of "Check Stdouts and Stderrs" + + val mockInputSectionStdout = Option(InputsSectionElement(Vector(InputDeclarationElement(PrimitiveTypeElement(WomSingleFileType), "i", Some(StdoutElement))))) + val mockInputSectionStderr = Option(InputsSectionElement(Vector(InputDeclarationElement(PrimitiveTypeElement(WomSingleFileType), "i", Some(StderrElement))))) + val mockInputSectionNonStd = Option(InputsSectionElement(Vector(InputDeclarationElement(PrimitiveTypeElement(WomStringType), "more", Some(StringLiteral("more")))))) + + + val mockIntermediatesStdout = Vector(IntermediateValueDeclarationElement(PrimitiveTypeElement(WomSingleFileType), "y", StdoutElement)) + val mockIntermediatesStderr = Vector(IntermediateValueDeclarationElement(PrimitiveTypeElement(WomSingleFileType), "y", StderrElement)) + val mockIntermediatesNonStd = Vector(IntermediateValueDeclarationElement(PrimitiveTypeElement(WomStringType), "here", StringLiteral("here"))) + + val mockOutputSectionStdout = Option(OutputsSectionElement(Vector(OutputDeclarationElement(PrimitiveTypeElement(WomSingleFileType), "s", StdoutElement)))) + val mockOutputSectionStderr = Option(OutputsSectionElement(Vector(OutputDeclarationElement(PrimitiveTypeElement(WomSingleFileType), "s", StderrElement)))) + val mockOutputSectionNonStd = Option(OutputsSectionElement(Vector(OutputDeclarationElement(PrimitiveTypeElement(WomStringType), "more", StringLiteral("more"))))) + + + it should "return an error when there is an stdout in input section" in { + val testInputs = checkDisallowedInputElement(mockInputSectionStdout, StdoutElement, "stdout") + testInputs match { + case Valid(_) => fail("Input section contained stdout. Should have failed.") + case Invalid(e) => e.head should be("Workflow cannot have stdout expression in input section at workflow-level.") + } + } + + it should "return an error when there is an stderr in input section" in { + val testInputs = checkDisallowedInputElement(mockInputSectionStderr, StderrElement, "stderr") + testInputs match { + case Valid(_) => fail("Input section contained stderr. Should have failed.") + case Invalid(e) => e.head should be("Workflow cannot have stderr expression in input section at workflow-level.") + } + } + + it should "not return an error for non-stdout/stderr in the inputs section" in { + val testInputs = checkDisallowedInputElement(mockInputSectionNonStd, StdoutElement, "non-stdout/stderr") + testInputs match { + case Valid(_) => // No action + case Invalid(_) => fail("Check shouldn't have returned error as input section had non-stdout/stderr inputs") + } + } + + it should "return an error when there is an stdout in output section" in { + val testOutputs = checkDisallowedOutputElement(mockOutputSectionStdout, StdoutElement, "stdout") + testOutputs match { + case Valid(_) => fail("Output section contained stdout. Should have failed.") + case Invalid(e) => e.head should be("Workflow cannot have stdout expression in output section at workflow-level.") + } + } + + it should "return an error when there is an stderr in output section" in { + val testOutputs = checkDisallowedOutputElement(mockOutputSectionStderr, StderrElement, "stderr") + testOutputs match { + case Valid(_) => fail("Output section contained stderr. Should have failed.") + case Invalid(e) => e.head should be("Workflow cannot have stderr expression in output section at workflow-level.") + } + } + it should "not return an error for non-stdout/stderr in the outputs section" in { + val testOutputs = checkDisallowedOutputElement(mockOutputSectionNonStd, StdoutElement, "non-stdout/stderr") + testOutputs match { + case Valid(_) => // No action + case Invalid(_) => fail("Check shouldn't have returned error as output section had non-stdout/stderr outputs") + } + } + + it should "return an error when there is an stdout at intermediate declaration section" in { + val testIntermediates = checkDisallowedIntermediates(mockIntermediatesStdout, StdoutElement, "stdout") + testIntermediates match { + case Valid(_) => fail("Intermediate section contained stdout. Should have failed.") + case Invalid(e) => e.head should be("Workflow cannot have stdout expression at intermediate declaration section at workflow-level.") + } + } + it should "return an error when there is an stderr at intermediate declaration section" in { + val testIntermediates = checkDisallowedIntermediates(mockIntermediatesStderr, StderrElement, "stderr") + testIntermediates match { + case Valid(_) => fail("Intermediate section contained stderr. Should have failed.") + case Invalid(e) => e.head should be("Workflow cannot have stderr expression at intermediate declaration section at workflow-level.") + } + } + + it should "not return an error for non-stdout/stderr in the intermediates section" in { + val testIntermediates = checkDisallowedIntermediates(mockIntermediatesNonStd, StdoutElement, "non-stdout/stderr") + testIntermediates match { + case Valid(_) => // No action + case Invalid(_) => fail("Check shouldn't have returned error as intermediate section had non-stdout/stderr intermediates.") + } + } + +} \ No newline at end of file diff --git a/wdl/transforms/new-base/src/main/scala/wdl/transforms/base/ast2wdlom/AstToWorkflowDefinitionElement.scala b/wdl/transforms/new-base/src/main/scala/wdl/transforms/base/ast2wdlom/AstToWorkflowDefinitionElement.scala index ad4c28f064a..6b8e7d08dd0 100644 --- a/wdl/transforms/new-base/src/main/scala/wdl/transforms/base/ast2wdlom/AstToWorkflowDefinitionElement.scala +++ b/wdl/transforms/new-base/src/main/scala/wdl/transforms/base/ast2wdlom/AstToWorkflowDefinitionElement.scala @@ -8,6 +8,7 @@ import common.transforms.CheckedAtoB import common.validation.ErrorOr._ import wdl.model.draft3.elements._ import wom.SourceFileLocation +import wdl.model.draft3.elements.ExpressionElement._ object AstToWorkflowDefinitionElement { @@ -26,26 +27,65 @@ object AstToWorkflowDefinitionElement { sourceLocation: Option[SourceFileLocation], bodyElements: Vector[WorkflowBodyElement]) = { - val inputsSectionValidation: ErrorOr[Option[InputsSectionElement]] = validateSize(bodyElements.filterByType[InputsSectionElement], "inputs", 1) - val outputsSectionValidation: ErrorOr[Option[OutputsSectionElement]] = validateSize(bodyElements.filterByType[OutputsSectionElement], "outputs", 1) + val inputsSectionValidation: ErrorOr[Option[InputsSectionElement]] = for { + inputValidateElement <- validateSize(bodyElements.filterByType[InputsSectionElement], "inputs", 1): ErrorOr[Option[InputsSectionElement]] + _ <- checkDisallowedInputElement(inputValidateElement, StdoutElement, "stdout") + _ <- checkDisallowedInputElement(inputValidateElement, StderrElement, "stderr") + } yield inputValidateElement + + val intermediateValueDeclarationStdoutCheck = checkDisallowedIntermediates(bodyElements.filterByType[IntermediateValueDeclarationElement], StdoutElement, "stdout") + val intermediateValueDeclarationStderrCheck = checkDisallowedIntermediates(bodyElements.filterByType[IntermediateValueDeclarationElement], StderrElement, "stderr") + + val outputsSectionValidation: ErrorOr[Option[OutputsSectionElement]] = for { + outputValidateElement <- validateSize(bodyElements.filterByType[OutputsSectionElement], "outputs", 1): ErrorOr[Option[OutputsSectionElement]] + _ <- checkDisallowedOutputElement(outputValidateElement, StdoutElement, "stdout") + _ <- checkDisallowedOutputElement(outputValidateElement, StderrElement, "stderr") + } yield outputValidateElement val graphSections: Vector[WorkflowGraphElement] = bodyElements.filterByType[WorkflowGraphElement] val metaSectionValidation: ErrorOr[Option[MetaSectionElement]] = validateSize(bodyElements.filterByType[MetaSectionElement], "meta", 1) val parameterMetaSectionValidation: ErrorOr[Option[ParameterMetaSectionElement]] = validateSize(bodyElements.filterByType[ParameterMetaSectionElement], "parameterMeta", 1) - (inputsSectionValidation, outputsSectionValidation, metaSectionValidation, parameterMetaSectionValidation) mapN { - (validInputs, validOutputs, meta, parameterMeta) => + (inputsSectionValidation, outputsSectionValidation, metaSectionValidation, parameterMetaSectionValidation, intermediateValueDeclarationStdoutCheck, intermediateValueDeclarationStderrCheck) mapN { + (validInputs, validOutputs, meta, parameterMeta, _, _) => WorkflowDefinitionElement(name, validInputs, graphSections.toSet, validOutputs, meta, parameterMeta, sourceLocation) } } + def checkDisallowedInputElement(inputSection: Option[InputsSectionElement], expressionType: FunctionCallElement, expressionName: String): ErrorOr[Unit] = { + inputSection match { + case Some(section) => + if (section.inputDeclarations.flatMap(_.expression).exists(_.isInstanceOf[expressionType.type])) { + s"Workflow cannot have $expressionName expression in input section at workflow-level.".invalidNel + } else ().validNel + case None => ().validNel + } + } + + def checkDisallowedOutputElement(outputSection: Option[OutputsSectionElement], expressionType: FunctionCallElement, expressionName: String): ErrorOr[Unit] = { + outputSection match { + case Some(section) => + if (section.outputs.map(_.expression).exists(_.isInstanceOf[expressionType.type])) { + s"Workflow cannot have $expressionName expression in output section at workflow-level.".invalidNel + } else ().validNel + case None => ().validNel + } + } + + def checkDisallowedIntermediates(intermediate: Vector[IntermediateValueDeclarationElement], expressionType: FunctionCallElement, expressionName: String): ErrorOr[Unit] = { + if (intermediate.map(_.expression).exists(_.isInstanceOf[expressionType.type])) { + s"Workflow cannot have $expressionName expression at intermediate declaration section at workflow-level.".invalidNel + } else ().validNel + } + private def validateSize[A](elements: Vector[A], sectionName: String, numExpected: Int): ErrorOr[Option[A]] = { val sectionValidation: ErrorOr[Option[A]] = if (elements.size > numExpected) { s"Workflow cannot have more than $numExpected $sectionName sections, found ${elements.size}.".invalidNel } else { elements.headOption.validNel } + sectionValidation } -} +} \ No newline at end of file From 49f3ef1e8c753b034634bb040c801bb66481c395 Mon Sep 17 00:00:00 2001 From: Katrina P <68349264+kpierre13@users.noreply.github.com> Date: Tue, 3 May 2022 10:36:02 -0400 Subject: [PATCH 22/58] BW-1194 - Document Cromwell's existing (as of 4/22) WES endpoints in Swagger (#6749) * Saving changes * Saving changes * Adding and deleting endpoints * Saving changes * Service-info, cancel, and status endpoints added. * WES enpoints have api prefix * Update yaml file and apidocs * Directed response for new endpoints to existing response options; omitted lines that caused the associated spec file to fail in one of its tests * Adding newline in .yaml and adding the RESTAPI.md file to gitattributes. * Adding newline in .yaml and adding the RESTAPI.md file to gitattributes. * Updating CHANGELOG.md * Update CHANGELOG.md Co-authored-by: Adam Nichols Co-authored-by: Adam Nichols --- .gitattributes | 1 + CHANGELOG.md | 10 + docs/api/RESTAPI.md | 301 ++++++++++- .../src/main/resources/swagger/cromwell.yaml | 492 ++++++++++++++++++ .../cromwell/server/CromwellServer.scala | 4 +- 5 files changed, 805 insertions(+), 3 deletions(-) diff --git a/.gitattributes b/.gitattributes index 9fcf38aa0a7..d4b49d9edc9 100644 --- a/.gitattributes +++ b/.gitattributes @@ -3,3 +3,4 @@ *.MD text *.java text *.html text +docs/api/RESTAPI.md linguist-generated=true diff --git a/CHANGELOG.md b/CHANGELOG.md index 62a3ad92218..1a82bc66eea 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -2,6 +2,16 @@ ## 79 Release Notes +### WES endpoints preview + +As a means to stay on top of endpoints within our repo, 3 new Workflow Execution Service (WES) endpoints are now documented in the Cromwell Swagger (others to follow as part of later work): + +| HTTP verb | Endpoint path | Description | +| --------- | ------------- |---------------| +| GET | /api/ga4gh/wes/v1/service-info | Server info | +| POST | /api/ga4gh/wes/v1/runs/{run_id}/cancel | Abort workflow | +| GET | /api/ga4gh/wes/v1/runs/{run_id}/status | Workflow status | + ### Scala 2.13 Cromwell is now built with Scala version 2.13. This change should not be noticeable to users but may be of interest to developers of Cromwell backend implementations. diff --git a/docs/api/RESTAPI.md b/docs/api/RESTAPI.md index d74f09a00d7..5063b0114fe 100644 --- a/docs/api/RESTAPI.md +++ b/docs/api/RESTAPI.md @@ -1,5 +1,5 @@ - -Need to file an issue? Head over to [our JIRA](https://broadworkbench.atlassian.net/jira/software/c/projects/CROM/issues). You must create a free profile to view or create. +### Issue tracking -[Issues in Github](https://github.com/broadinstitute/cromwell/issues) remain available for discussion among community members but are not actively monitored by the development team. +Need to file an issue? Head over to [Github Issues](https://github.com/broadinstitute/cromwell/issues). -![Cromwell JIRA](docs/img/cromwell_jira.png) +If you previously filed an issue in JIRA, the link is [here](https://broadworkbench.atlassian.net/jira/software/c/projects/CROM/issues). New signups are no longer accepted. ![Jamie, the Cromwell pig](docs/jamie_the_cromwell_pig.png) From d24645aa9c875805817f4fb8769c79c44554815d Mon Sep 17 00:00:00 2001 From: Janet Gainer-Dewar Date: Tue, 17 May 2022 16:27:42 +0000 Subject: [PATCH 24/58] Update cromwell version from 79 to 80 --- project/Version.scala | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/project/Version.scala b/project/Version.scala index 38b33e4d386..b894fde796c 100644 --- a/project/Version.scala +++ b/project/Version.scala @@ -5,7 +5,7 @@ import sbt._ object Version { // Upcoming release, or current if we're on a master / hotfix branch - val cromwellVersion = "79" + val cromwellVersion = "80" /** * Returns true if this project should be considered a snapshot. From d0dd831d12104dc6303ae5bcf16e031db7e1a5e8 Mon Sep 17 00:00:00 2001 From: Adam Nichols Date: Fri, 3 Jun 2022 18:04:04 -0400 Subject: [PATCH 25/58] BW-1227 Upgrade org.bouncycastle:bcprov-jdk15on:1.67 (#6775) --- project/Dependencies.scala | 7 ++++++- project/plugins.sbt | 1 + 2 files changed, 7 insertions(+), 1 deletion(-) diff --git a/project/Dependencies.scala b/project/Dependencies.scala index 9a558a70a91..704e9d2f198 100644 --- a/project/Dependencies.scala +++ b/project/Dependencies.scala @@ -703,6 +703,10 @@ object Dependencies { "org.asynchttpclient" % "async-http-client" % "2.10.5", ) + private val bouncyCastleOverrides = List( + "org.bouncycastle" % "bcprov-jdk15on" % "1.70", + ) + /* If we use a version in one of our projects, that's the one we want all the libraries to use ...plus other groups of transitive dependencies shared across multiple projects @@ -714,5 +718,6 @@ object Dependencies { rdf4jDependencyOverrides ++ grpcDependencyOverrides ++ scalaCollectionCompatOverrides ++ - asyncHttpClientOverrides + asyncHttpClientOverrides ++ + bouncyCastleOverrides } diff --git a/project/plugins.sbt b/project/plugins.sbt index db249558756..51289a45e53 100644 --- a/project/plugins.sbt +++ b/project/plugins.sbt @@ -2,3 +2,4 @@ addSbtPlugin("se.marcuslonnberg" % "sbt-docker" % "1.9.0") addSbtPlugin("com.eed3si9n" % "sbt-assembly" % "1.1.0") addSbtPlugin("com.typesafe.sbt" % "sbt-git" % "1.0.2") addSbtPlugin("org.scoverage" % "sbt-scoverage" % "1.9.3") +addDependencyTreePlugin From b78af4e5a867ac5ffbd2ada04f9e4fb806453bb8 Mon Sep 17 00:00:00 2001 From: Adam Nichols Date: Fri, 3 Jun 2022 20:47:03 -0400 Subject: [PATCH 26/58] BW-1227 Upgrade jackson-databind, nimbus-jose-jwt [due 7/22] (#6776) --- project/Dependencies.scala | 16 +++++++++++++++- 1 file changed, 15 insertions(+), 1 deletion(-) diff --git a/project/Dependencies.scala b/project/Dependencies.scala index 704e9d2f198..2b3bc3fb574 100644 --- a/project/Dependencies.scala +++ b/project/Dependencies.scala @@ -63,7 +63,7 @@ object Dependencies { private val heterodonV = "1.0.0-beta3" private val hsqldbV = "2.6.1" private val http4sV = "0.21.31" // this release is EOL. We need to upgrade further for cats3. https://http4s.org/versions/ - private val jacksonV = "2.13.2" + private val jacksonV = "2.13.3" private val janinoV = "3.1.6" private val jsr305V = "3.0.2" private val junitV = "4.13.2" @@ -619,6 +619,14 @@ object Dependencies { Any dependencies that are removed may be also removed from this list. However, be careful about downgrading any of these dependencies. Older versions have known vulnerabilities, ex: CVE-2017-7525 + + === SECURITY UPGRADES === + + When upgrading dependencies to fix security issues, it is preferable to start with upgrading the + library that brings it in. Only fall back to overriding here when the latest library version still + has a vulnerable version of the dependency, or a major version upgrade is required and infeasible. + This algorithm makes it simpler to upgrade libraries in the future, because we don't have to + remember to remove the override. */ val googleHttpClientDependencies = List( @@ -703,6 +711,11 @@ object Dependencies { "org.asynchttpclient" % "async-http-client" % "2.10.5", ) + + private val nimbusdsOverrides = List( + "com.nimbusds" % "nimbus-jose-jwt" % "9.23", + ) + private val bouncyCastleOverrides = List( "org.bouncycastle" % "bcprov-jdk15on" % "1.70", ) @@ -719,5 +732,6 @@ object Dependencies { grpcDependencyOverrides ++ scalaCollectionCompatOverrides ++ asyncHttpClientOverrides ++ + nimbusdsOverrides ++ bouncyCastleOverrides } From ba4518b2b608185b1f86820048e1c92fe3ddede7 Mon Sep 17 00:00:00 2001 From: Katrina P <68349264+kpierre13@users.noreply.github.com> Date: Tue, 7 Jun 2022 13:50:01 -0400 Subject: [PATCH 27/58] BW-1254 Implement GET /runs endpoint (#6770) * Removed Wes2Cromwell project, added 'runs' endpoint a necessary functions. * saving... * saving... * saving... * saving... * Fix missing JSON implicits * Added endpoint to cromwell.yaml and added api prefix * Getting rid of unnecessary implicits * Fixing and changing function name * Updated RESTAPI.md, cromwell.yaml, and RunListResponse.scala * Getting rid of comments * Omitting unexpected arguments * Removing headers, adding further details in cromwell.yaml * Updating description and RESTAPI.md * saving... * saving... * saving... * saving... * saving... * Moving functions to consolidate, updating CHANGELOG.md, deleting Wes2CromwellInterface.scala file * Updated CHANGELOG.md, passed error message to API * saving... * Update CHANGELOG.md Co-authored-by: Adam Nichols * Update engine/src/main/scala/cromwell/webservice/routes/wes/WesRunRoutes.scala Co-authored-by: Adam Nichols * Update engine/src/main/scala/cromwell/webservice/routes/wes/WesRunRoutes.scala Co-authored-by: Adam Nichols * Changes to CHANGELOG.md, updating routes * Unused imports * Cleanup * Cleanup * codecov * . * . Co-authored-by: Adam Nichols Co-authored-by: Adam Nichols --- CHANGELOG.md | 26 ++++ docs/api/RESTAPI.md | 37 +++++- .../src/main/resources/swagger/cromwell.yaml | 49 ++++++++ .../cromwell/server/CromwellServer.scala | 4 +- .../routes/wes/RunListResponse.scala | 21 ++++ .../webservice/routes/wes/WesResponse.scala | 4 + .../webservice/routes/wes/WesRunRoutes.scala | 61 ++++++++++ .../webservice/routes/wes/WesState.scala | 6 + .../src/main/resources/application.conf | 25 ---- wes2cromwell/src/main/resources/logback.xml | 97 --------------- .../src/main/resources/sentry.properties | 2 - .../scala/wes2cromwell/CromwellMetadata.scala | 111 ------------------ .../scala/wes2cromwell/RunListResponse.scala | 17 --- .../wes2cromwell/Wes2CromwellInterface.scala | 77 ------------ .../main/scala/wes2cromwell/WesResponse.scala | 56 --------- .../main/scala/wes2cromwell/WesRunLog.scala | 40 ------- .../scala/wes2cromwell/WesRunRoutes.scala | 109 ----------------- .../main/scala/wes2cromwell/WesServer.scala | 40 ------- .../main/scala/wes2cromwell/WesState.scala | 38 ------ .../scala/wes2cromwell/WesSubmission.scala | 45 ------- 20 files changed, 206 insertions(+), 659 deletions(-) create mode 100644 engine/src/main/scala/cromwell/webservice/routes/wes/RunListResponse.scala create mode 100644 engine/src/main/scala/cromwell/webservice/routes/wes/WesRunRoutes.scala delete mode 100644 wes2cromwell/src/main/resources/application.conf delete mode 100644 wes2cromwell/src/main/resources/logback.xml delete mode 100644 wes2cromwell/src/main/resources/sentry.properties delete mode 100644 wes2cromwell/src/main/scala/wes2cromwell/CromwellMetadata.scala delete mode 100644 wes2cromwell/src/main/scala/wes2cromwell/RunListResponse.scala delete mode 100644 wes2cromwell/src/main/scala/wes2cromwell/Wes2CromwellInterface.scala delete mode 100644 wes2cromwell/src/main/scala/wes2cromwell/WesResponse.scala delete mode 100644 wes2cromwell/src/main/scala/wes2cromwell/WesRunLog.scala delete mode 100644 wes2cromwell/src/main/scala/wes2cromwell/WesRunRoutes.scala delete mode 100644 wes2cromwell/src/main/scala/wes2cromwell/WesServer.scala delete mode 100644 wes2cromwell/src/main/scala/wes2cromwell/WesState.scala delete mode 100644 wes2cromwell/src/main/scala/wes2cromwell/WesSubmission.scala diff --git a/CHANGELOG.md b/CHANGELOG.md index 241af16fb79..540131ad17a 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,5 +1,31 @@ # Cromwell Change Log +## 80 Release Notes + +### Direct WES support in Cromwell + +Cromwell 80 no longer supports the wes2cromwell project within the Cromwell repository. + +In the previous release, 3 Wes2Cromwell endpoints in the Cromwell project were implemented and documented in the Swagger API. Three new endpoints, +located within the wes2cromwell project, will also be moved, implemented, and documented within Cromwell. As a result of this, we can safely remove +and deprecate the wes2cromwell project from the repo. + +Previous endpoints: + +| HTTP verb | Endpoint path | Description | +| --------- | ------------- |---------------| +| GET | /api/ga4gh/wes/v1/service-info | Server info | +| POST | /api/ga4gh/wes/v1/runs/{run_id}/cancel | Abort workflow | +| GET | /api/ga4gh/wes/v1/runs/{run_id}/status | Workflow status | + +Newly implemented endpoints: + +| HTTP verb | Endpoint path | Description | +| --------- | ------------- |-----------------| +| GET | /api/ga4gh/wes/v1/runs | List workflows | +| POST | /api/ga4gh/wes/v1/runs | Submit workflow | +| GET | /api/ga4gh/wes/v1/runs/{run_id} | Workflow details | + ## 79 Release Notes ### Last release with CWL support diff --git a/docs/api/RESTAPI.md b/docs/api/RESTAPI.md index 5063b0114fe..bdc5b6a15ff 100644 --- a/docs/api/RESTAPI.md +++ b/docs/api/RESTAPI.md @@ -1,5 +1,5 @@ - - - - - - - - - - - - - - - - - - %date %X{sourceThread} %-5level - %msg%n - - - - - - - - - - - - - - - - - - - - - - - - - - - ${FILEROLLER_DIR}/${FILEROLLER_NAME} - - - - - - ${FILEROLLER_DIR}/${FILEROLLER_NAMEPATTERN}-${FILEROLLER_NAME} - - - ${FILEROLLER_DIR}/%d{yyyyMMdd}-${FILEROLLER_NAME} - - - - ${FILEROLLER_MAXHISTORY} - - - - ${FILEROLLER_SIZECAP} - - - - - - %d{yyyy-MM-dd HH:mm:ss,SSS} [%thread] %-5level %logger{35} - %msg%n - - - - - - - - - - - - WARN - - - - - - - - - - - - - - diff --git a/wes2cromwell/src/main/resources/sentry.properties b/wes2cromwell/src/main/resources/sentry.properties deleted file mode 100644 index ebc5aa32687..00000000000 --- a/wes2cromwell/src/main/resources/sentry.properties +++ /dev/null @@ -1,2 +0,0 @@ -# Quiet warnings about missing sentry DSNs by providing an empty string -dsn= diff --git a/wes2cromwell/src/main/scala/wes2cromwell/CromwellMetadata.scala b/wes2cromwell/src/main/scala/wes2cromwell/CromwellMetadata.scala deleted file mode 100644 index 80b1965d4ef..00000000000 --- a/wes2cromwell/src/main/scala/wes2cromwell/CromwellMetadata.scala +++ /dev/null @@ -1,111 +0,0 @@ -package wes2cromwell - -import spray.json.{DefaultJsonProtocol, JsObject, JsonFormat, JsonParser} - -final case class CromwellCallsMetadata(shardIndex: Option[Int], - commandLine: Option[String], - returnCode: Option[Int], - start: Option[String], - end: Option[String], - stdout: Option[String], - stderr: Option[String] - ) - -object CromwellCallsMetadata { - import DefaultJsonProtocol._ - - implicit val cromwellCallsMetadataFormat: JsonFormat[CromwellCallsMetadata] = jsonFormat7(CromwellCallsMetadata.apply) -} - -final case class CromwellSubmittedFiles(workflow: Option[String], - workflowType: Option[String], - workflowTypeVersion: Option[String], - options: Option[String], - inputs: Option[String], - labels: Option[String] - ) - -object CromwellSubmittedFiles { - import DefaultJsonProtocol._ - - implicit val cromwellSubmittedFilesFormat: JsonFormat[CromwellSubmittedFiles] = jsonFormat6(CromwellSubmittedFiles.apply) -} - -final case class CromwellMetadata(workflowName: Option[String], - id: String, - status: String, - start: Option[String], - end: Option[String], - submittedFiles: CromwellSubmittedFiles, - outputs: Option[JsObject], - calls: Option[Map[String, Seq[CromwellCallsMetadata]]] - ) { - import CromwellMetadata._ - - def wesRunLog: WesRunLog = { - val workflowParams = submittedFiles.inputs.map(JsonParser(_).asJsObject) - val workflowTags = submittedFiles.labels.map(JsonParser(_).asJsObject) - val workflowEngineParams = submittedFiles.options.map(JsonParser(_).asJsObject) - - val workflowRequest = WesRunRequest(workflow_params = workflowParams, - workflow_type = submittedFiles.workflowType.getOrElse("Unable to find workflow type"), - workflow_type_version = submittedFiles.workflowTypeVersion.getOrElse("Unable to find workflow version"), - tags = workflowTags, - workflow_engine_parameters = workflowEngineParams, - workflow_url = None - ) - - val workflowLogData = WesLog(name = workflowName, - cmd = None, - start_time = start, - end_time = end, - stdout = None, - stderr = None, - exit_code = None - ) - - val taskLogs = for { - callsArray <- calls.toList - (taskName, metadataEntries) <- callsArray - metadataEntry <- metadataEntries - logEntry = cromwellCallsMetadataEntryToLogEntry(taskName, metadataEntry) - } yield logEntry - - WesRunLog( - run_id = id, - request = workflowRequest, - state = WesState.fromCromwellStatus(status), - run_log = Option(workflowLogData), - task_logs = Option(taskLogs), - outputs = outputs - ) - } -} - -object CromwellMetadata { - import DefaultJsonProtocol._ - - implicit val cromwellMetadataFormat: JsonFormat[CromwellMetadata] = jsonFormat8(CromwellMetadata.apply) - - def fromJson(json: String): CromwellMetadata = { - val jsonAst = JsonParser(json) - jsonAst.convertTo[CromwellMetadata] - } - - def cromwellCallsMetadataEntryToLogEntry(taskName: String, callsMetadata: CromwellCallsMetadata): WesLog = { - val newTaskName = callsMetadata.shardIndex map { - case -1 => taskName - case notMinusOne => s"$taskName.$notMinusOne" - } getOrElse taskName - - WesLog( - name = Option(newTaskName), - cmd = callsMetadata.commandLine.map(c => List(c)), - start_time = callsMetadata.start, - end_time = callsMetadata.end, - stdout = callsMetadata.stdout, - stderr = callsMetadata.stderr, - exit_code = callsMetadata.returnCode - ) - } -} diff --git a/wes2cromwell/src/main/scala/wes2cromwell/RunListResponse.scala b/wes2cromwell/src/main/scala/wes2cromwell/RunListResponse.scala deleted file mode 100644 index a8eddd3f94e..00000000000 --- a/wes2cromwell/src/main/scala/wes2cromwell/RunListResponse.scala +++ /dev/null @@ -1,17 +0,0 @@ -package wes2cromwell - -import cromwell.api.model.CromwellQueryResults -import spray.json.JsonParser - -case class RunListResponse(runs: List[WesRunStatus], next_page_token: String) - -object RunListResponse { - def fromJson(json: String): RunListResponse = { - import cromwell.api.model.CromwellQueryResultJsonSupport._ - - val jsonAst = JsonParser(json) - val queryResults = jsonAst.convertTo[CromwellQueryResults] - val runs = queryResults.results.toList.map(q => WesRunStatus(q.id.toString, WesState.fromCromwellStatus(q.status.toString))) - RunListResponse(runs, "Not Yet Implemented") // FIXME: paging is still a known sore spot - } -} diff --git a/wes2cromwell/src/main/scala/wes2cromwell/Wes2CromwellInterface.scala b/wes2cromwell/src/main/scala/wes2cromwell/Wes2CromwellInterface.scala deleted file mode 100644 index 2ae0400a878..00000000000 --- a/wes2cromwell/src/main/scala/wes2cromwell/Wes2CromwellInterface.scala +++ /dev/null @@ -1,77 +0,0 @@ -package wes2cromwell - -import java.net.URL - -import akka.actor.ActorSystem -import akka.http.scaladsl.Http -import akka.http.scaladsl.model._ - -import scala.concurrent.{ExecutionContext, Future} -import akka.http.scaladsl.unmarshalling.Unmarshal -import akka.stream.ActorMaterializer -import wes2cromwell.Wes2CromwellInterface._ - -final class Wes2CromwellInterface(cromwellPath: URL)(implicit system: ActorSystem, mat: ActorMaterializer, ec: ExecutionContext) { - def runWorkflow(submission: WesSubmission, headers: List[HttpHeader]): Future[WesResponse] = { - // FIXME - Should be able to get away with these fromJsons by implementing the proper marshalling - // Because this request has the entity, it's not going through the standard forwardToCromwell method - val cromwellRequest = HttpRequest(method = HttpMethods.POST, uri = cromwellPath.toString, headers = headers, entity=submission.entity) - handleCromwellResponse(Http().singleRequest(cromwellRequest), (s: String) => WesRunId(WesRunStatus.fromJson(s).run_id)) - } - - def cancelRun(workflowId: String, headers: List[HttpHeader]): Future[WesResponse] = { - val cromwellUrl = s"$cromwellPath/$workflowId/abort" - forwardToCromwell(cromwellUrl, headers, HttpMethods.POST, (s: String) => WesRunId(WesRunStatus.fromJson(s).run_id)) - } - - def runStatus(workflowId: String, headers: List[HttpHeader]): Future[WesResponse] = { - val cromwellUrl = s"$cromwellPath/$workflowId/status" - forwardToCromwell(cromwellUrl, headers, HttpMethods.GET, (s: String) => WesRunStatus.fromJson(s)) - } - - def runLog(workflowId: String, headers: List[HttpHeader]): Future[WesResponse] = { - val cromwellUrl = s"$cromwellPath/$workflowId/metadata" - forwardToCromwell(cromwellUrl, headers, HttpMethods.GET, (s: String) => WesResponseWorkflowMetadata(WesRunLog.fromJson(s))) - } - - def listRuns(pageSize: Option[Int], pageToken: Option[String], headers: List[HttpHeader]): Future[WesResponse] = { - // FIXME: to handle - page_size, page_token - // FIXME: How to handle next_page_token in response? - val cromwellUrl = s"$cromwellPath/query" - forwardToCromwell(cromwellUrl, headers, HttpMethods.GET , (s: String) => WesResponseRunList(RunListResponse.fromJson(s).runs)) - } -} - -object Wes2CromwellInterface { - def forwardToCromwell(url: String, headers: List[HttpHeader], method: HttpMethod, - f: String => WesResponse)(implicit system: ActorSystem, mat: ActorMaterializer, ec: ExecutionContext): Future[WesResponse] = { - val cromwellRequest = HttpRequest(method = method, uri = url, headers = headers) - handleCromwellResponse(Http().singleRequest(cromwellRequest), f) - } - - def handleCromwellResponse(response: Future[HttpResponse], f: String => WesResponse)(implicit mat: ActorMaterializer, ec: ExecutionContext): Future[WesResponse] = { - response.flatMap({ cr => - cr.status match { - /* - Strictly speaking, this is a larger list than what Cromwell typically returns for most endpoints, however - leaving it here as things like Unauthorized/Forbidden start showing up a lot more in CromIAM which might - be underneath these requests instead of OG Cromwell - */ - case StatusCodes.OK | StatusCodes.Created => Unmarshal(cr.entity).to[String].map(s => f(s)) - case StatusCodes.BadRequest => Future.successful(BadRequestError) // In Cromwell, malformed workflow IDs also generate this and that's not supported in WES, but .... - case StatusCodes.Unauthorized => Future.successful(UnauthorizedError) - case StatusCodes.NotFound => Future.successful(NotFoundError) - case StatusCodes.Forbidden => Future.successful(ForbiddenError) - case StatusCodes.InternalServerError => Future.successful(InternalServerError) - case _ => Future.successful(InternalServerError) - } - }).recover({case _ => InternalServerError}) - } - - // We'll likely want to live in a world where we're giving more info than this, but that world isn't now - val BadRequestError = WesErrorResponse("The request is malformed", StatusCodes.BadRequest.intValue) - val InternalServerError = WesErrorResponse("Cromwell server error", StatusCodes.InternalServerError.intValue) - val UnauthorizedError = WesErrorResponse("The request is unauthorized", StatusCodes.Unauthorized.intValue) - val ForbiddenError = WesErrorResponse("The requester is not authorized to perform this action", StatusCodes.Forbidden.intValue) - val NotFoundError = WesErrorResponse("The requested workflow run wasn't found", StatusCodes.NotFound.intValue) -} diff --git a/wes2cromwell/src/main/scala/wes2cromwell/WesResponse.scala b/wes2cromwell/src/main/scala/wes2cromwell/WesResponse.scala deleted file mode 100644 index c7e1a3a1a6a..00000000000 --- a/wes2cromwell/src/main/scala/wes2cromwell/WesResponse.scala +++ /dev/null @@ -1,56 +0,0 @@ -package wes2cromwell - -import akka.http.scaladsl.marshallers.sprayjson.SprayJsonSupport -import cromwell.api.model.CromwellStatus -import spray.json.{DefaultJsonProtocol, JsonParser, RootJsonFormat} - -sealed trait WesResponse extends Product with Serializable -final case class WesErrorResponse(msg: String, status_code: Int) extends WesResponse -final case class WesRunId(run_id: String) extends WesResponse -final case class WesRunStatus(run_id: String, state: WesState) extends WesResponse -final case class WesResponseRunList(runs: List[WesRunStatus]) extends WesResponse -final case class WesResponseWorkflowMetadata(workflowLog: WesRunLog) extends WesResponse - -object WesRunStatus { - def fromJson(json: String): WesRunStatus = { - import cromwell.api.model.CromwellStatusJsonSupport._ - val jsonAst = JsonParser(json) - val cromwellStatus = jsonAst.convertTo[CromwellStatus] - WesRunStatus(cromwellStatus.id, WesState.fromCromwellStatus(cromwellStatus.status)) - } -} - -object WesResponseJsonSupport extends SprayJsonSupport with DefaultJsonProtocol { - import WorkflowLogJsonSupport._ - import WesStateJsonSupport._ - - implicit val WesResponseErrorFormat = jsonFormat2(WesErrorResponse) - implicit val WesResponseRunIdFormat = jsonFormat1(WesRunId) - implicit val WesResponseStatusFormat = jsonFormat2(WesRunStatus.apply) - implicit val WesResponseRunListFormat = jsonFormat1(WesResponseRunList) - implicit val WesResponseRunMetadataFormat = jsonFormat1(WesResponseWorkflowMetadata) - - implicit object WesResponseFormat extends RootJsonFormat[WesResponse] { - import spray.json._ - - def write(r: WesResponse) = { - r match { - case r: WesRunId => r.toJson - case s: WesRunStatus => s.toJson - case l: WesResponseRunList => l.toJson - case e: WesErrorResponse => e.toJson - case m: WesResponseWorkflowMetadata => m.toJson - } - } - - def read(value: JsValue) = throw new UnsupportedOperationException("Reading WesResponse objects from JSON is not supported") - } - - implicit object WesRunStatusFormat extends RootJsonFormat[WesRunStatus] { - import spray.json._ - - def write(r: WesRunStatus) = r.toJson - def read(value: JsValue) = throw new UnsupportedOperationException("Reading WesRunStatus objects from JSON is not supported") - } -} - diff --git a/wes2cromwell/src/main/scala/wes2cromwell/WesRunLog.scala b/wes2cromwell/src/main/scala/wes2cromwell/WesRunLog.scala deleted file mode 100644 index 5a9c47e786c..00000000000 --- a/wes2cromwell/src/main/scala/wes2cromwell/WesRunLog.scala +++ /dev/null @@ -1,40 +0,0 @@ -package wes2cromwell - -import spray.json.{JsObject, JsonFormat} - -final case class WesLog(name: Option[String], - cmd: Option[Seq[String]], - start_time: Option[String], - end_time: Option[String], - stdout: Option[String], - stderr: Option[String], - exit_code: Option[Int] - ) - -final case class WesRunRequest(workflow_params: Option[JsObject], - workflow_type: String, - workflow_type_version: String, - tags: Option[JsObject], - workflow_engine_parameters: Option[JsObject], - workflow_url: Option[String] - ) - -final case class WesRunLog(run_id: String, - request: WesRunRequest, - state: WesState, - run_log: Option[WesLog], - task_logs: Option[List[WesLog]], - outputs: Option[JsObject] - ) - -object WesRunLog { - def fromJson(json: String): WesRunLog = CromwellMetadata.fromJson(json).wesRunLog -} - -object WorkflowLogJsonSupport { - import WesStateJsonSupport._ - - implicit val logFormat: JsonFormat[WesLog] = jsonFormat7(WesLog) - implicit val runRequestFormat: JsonFormat[WesRunRequest] = jsonFormat6(WesRunRequest) - implicit val runLogFormat: JsonFormat[WesRunLog] = jsonFormat6(WesRunLog.apply) -} diff --git a/wes2cromwell/src/main/scala/wes2cromwell/WesRunRoutes.scala b/wes2cromwell/src/main/scala/wes2cromwell/WesRunRoutes.scala deleted file mode 100644 index d5736857334..00000000000 --- a/wes2cromwell/src/main/scala/wes2cromwell/WesRunRoutes.scala +++ /dev/null @@ -1,109 +0,0 @@ -package wes2cromwell - -import java.net.URL - -import akka.actor.ActorSystem -import akka.event.LoggingAdapter -import akka.http.scaladsl.model.{HttpHeader, StatusCodes} -import akka.http.scaladsl.server.Directives._ -import akka.http.scaladsl.server.{Directive1, Route} -import akka.http.scaladsl.server.directives.MethodDirectives.post -import akka.http.scaladsl.server.directives.RouteDirectives.complete -import akka.util.Timeout -import com.typesafe.config.ConfigFactory -import net.ceedubs.ficus.Ficus._ -import cromiam.webservice.RequestSupport -import wes2cromwell.WesResponseJsonSupport._ -import WesRunRoutes._ -import akka.stream.ActorMaterializer - -import scala.concurrent.duration._ -import scala.concurrent.ExecutionContext.Implicits.global -import scala.concurrent.Future -import scala.util.{Failure, Success} - -trait WesRunRoutes extends RequestSupport { - implicit def system: ActorSystem - implicit def materializer: ActorMaterializer - - val log: LoggingAdapter - - def cromwellUrl: URL - def cromwellApiVersion: String - def cromwellPath: URL = new URL(cromwellUrl.toString + s"/api/workflows/$cromwellApiVersion") - - implicit lazy val duration: FiniteDuration = ConfigFactory.load().as[FiniteDuration]("akka.http.server.request-timeout") - implicit lazy val timeout: Timeout = duration - - lazy val wes2CromwellInterface = new Wes2CromwellInterface(cromwellPath) - - lazy val runRoutes: Route = - optionalHeaderValue(extractAuthorizationHeader) { authHeader => - val cromwellRequestHeaders = authHeader.toList - pathPrefix("ga4gh" / "wes" / "v1") { - concat( - pathPrefix("runs") { - concat( - pathEnd { - concat( - get { - parameters(("page_size".as[Int].?, "page_token".?)) { (pageSize, pageToken) => - completeCromwellResponse(wes2CromwellInterface.listRuns(pageSize, pageToken, cromwellRequestHeaders)) - } - }, - post { - extractStrictRequest { request => - extractSubmission() { submission => - completeCromwellResponse(wes2CromwellInterface.runWorkflow(submission, cromwellRequestHeaders)) - } - } - } - ) - }, - path(Segment) { workflowId => - concat( - get { - completeCromwellResponse(wes2CromwellInterface.runLog(workflowId, cromwellRequestHeaders)) - }, - delete { - completeCromwellResponse(wes2CromwellInterface.cancelRun(workflowId, cromwellRequestHeaders)) - } - ) - }, - path(Segment / "status") { workflowId => - get { - completeCromwellResponse(wes2CromwellInterface.runStatus(workflowId, cromwellRequestHeaders)) - } - } - ) - } - ) - } - } - - def extractSubmission(): Directive1[WesSubmission] = { - formFields(( - "workflow_params", - "workflow_type", - "workflow_type_version", - "tags".?, - "workflow_engine_parameters".?, - "workflow_url", - "workflow_attachment".as[String].* - )).as(WesSubmission) - } -} - -object WesRunRoutes { - def extractAuthorizationHeader: HttpHeader => Option[HttpHeader] = { - case h: HttpHeader if h.name() == "Authorization" => Option(h) - case _ => None - } - - def completeCromwellResponse(future: => Future[WesResponse]): Route = { - onComplete(future) { - case Success(a) => complete(a) - case Failure(e) => complete(WesErrorResponse(e.getMessage, StatusCodes.InternalServerError.intValue)) - } - } -} diff --git a/wes2cromwell/src/main/scala/wes2cromwell/WesServer.scala b/wes2cromwell/src/main/scala/wes2cromwell/WesServer.scala deleted file mode 100644 index 576306e6c6c..00000000000 --- a/wes2cromwell/src/main/scala/wes2cromwell/WesServer.scala +++ /dev/null @@ -1,40 +0,0 @@ -package wes2cromwell - -import java.net.URL - -import scala.concurrent.Await -import scala.concurrent.duration.Duration -import akka.actor.ActorSystem -import akka.event.Logging -import akka.http.scaladsl.Http -import akka.http.scaladsl.server.Route -import akka.stream.ActorMaterializer -import com.typesafe.config.ConfigFactory -import net.ceedubs.ficus.Ficus._ - -object WesServer extends App with WesRunRoutes { - val config = ConfigFactory.load() - - val port = config.as[Int]("wes2cromwell.port") - val interface = config.as[String]("wes2cromwell.interface") - - override implicit val system: ActorSystem = ActorSystem("wes2cromwell") - override implicit val materializer: ActorMaterializer = ActorMaterializer() - - override val log = Logging(system, getClass) - - lazy val cromwellScheme = config.as[String]("cromwell.scheme") - lazy val cromwellInterface = config.as[String]("cromwell.interface") - lazy val cromwellPort = config.as[Int]("cromwell.port") - - override lazy val cromwellUrl = new URL(s"$cromwellScheme://$cromwellInterface:$cromwellPort") - override val cromwellApiVersion = "v1" - - val routes: Route = runRoutes - - Http().bindAndHandle(routes, interface, port) - - println(s"Server online. Listening at port:$port") - - Await.result(system.whenTerminated, Duration.Inf) -} diff --git a/wes2cromwell/src/main/scala/wes2cromwell/WesState.scala b/wes2cromwell/src/main/scala/wes2cromwell/WesState.scala deleted file mode 100644 index a260df0d9f7..00000000000 --- a/wes2cromwell/src/main/scala/wes2cromwell/WesState.scala +++ /dev/null @@ -1,38 +0,0 @@ -package wes2cromwell - -import akka.http.scaladsl.marshallers.sprayjson.SprayJsonSupport -import spray.json.{DefaultJsonProtocol, JsString, JsValue, RootJsonFormat} - -sealed trait WesState extends Product with Serializable -case object UNKNOWN extends WesState -case object QUEUED extends WesState -case object INITIALIZING extends WesState -case object RUNNING extends WesState -case object PAUSED extends WesState -case object COMPLETE extends WesState -case object EXECUTOR_ERROR extends WesState -case object SYSTEM_ERROR extends WesState -case object CANCELED extends WesState - -object WesState { - def fromCromwellStatus(cromwellStatus: String): WesState = { - cromwellStatus match { - case "On Hold" => PAUSED - case "Submitted" => QUEUED - case "Running" => RUNNING - case "Aborting" => CANCELED - case "Aborted" => CANCELED - case "Succeeded" => COMPLETE - case "Failed" => EXECUTOR_ERROR - case _ => UNKNOWN - } - } -} - -object WesStateJsonSupport extends SprayJsonSupport with DefaultJsonProtocol { - implicit object WesStateFormat extends RootJsonFormat[WesState] { - def write(obj: WesState): JsValue = JsString(obj.toString) - - def read(json: JsValue): WesState = throw new UnsupportedOperationException("Reading WesState unsupported") - } -} diff --git a/wes2cromwell/src/main/scala/wes2cromwell/WesSubmission.scala b/wes2cromwell/src/main/scala/wes2cromwell/WesSubmission.scala deleted file mode 100644 index 464d27a7e05..00000000000 --- a/wes2cromwell/src/main/scala/wes2cromwell/WesSubmission.scala +++ /dev/null @@ -1,45 +0,0 @@ -package wes2cromwell - -import akka.http.scaladsl.model.{HttpEntity, MediaTypes, MessageEntity, Multipart} -import cromiam.auth.Collection.LabelsKey -import cromiam.webservice.SubmissionSupport._ - -final case class WesSubmission(workflowParams: String, - workflowType: String, - workflowTypeVersion: String, - tags: Option[String], - workflowEngineParameters: Option[String], - workflowUrl: String, - workflowAttachment: Iterable[String] - ) { - val entity: MessageEntity = { - /* - FIXME: - - Super oversimplification going on here as Cromwell's API is expected to normalize w/ WES a bit over the course - of the quarter and it's not worth doing it here and throwing it out later (there's close to a 0% chance this will - be used between now and then) - - At the moment, just taking the head of workflowAttachment (if it exists) and dropping that into the workflow source, - and we're ignoring the possible existence of a zip bundle. Eventually the way this will work is that there'll be - an Iterable of workflow files and workflowUrl will point to which one is the source and the rest goes into the bundle. - - NB: I think we've already lost too much information for the above to happen as there's an optional use of - Content-Disposition headers on each of these files which can be used to describe directory structure and such - for relative import resolution - */ - val sourcePart = workflowAttachment.headOption map { a => Multipart.FormData.BodyPart(WorkflowSourceKey, HttpEntity(MediaTypes.`application/json`, a)) } - - val urlPart = Multipart.FormData.BodyPart(WorkflowUrlKey, HttpEntity(MediaTypes.`application/json`, workflowUrl)) - - val typePart = Multipart.FormData.BodyPart(WorkflowTypeKey, HttpEntity(MediaTypes.`application/json`, workflowType)) - val typeVersionPart = Multipart.FormData.BodyPart(WorkflowTypeVersionKey, HttpEntity(MediaTypes.`application/json`, workflowTypeVersion)) - val inputsPart = Multipart.FormData.BodyPart(WorkflowInputsKey, HttpEntity(MediaTypes.`application/json`, workflowParams)) - val optionsPart = workflowEngineParameters map { o => Multipart.FormData.BodyPart(WorkflowOptionsKey, HttpEntity(MediaTypes.`application/json`, o)) } - val labelsPart = tags map { t => Multipart.FormData.BodyPart(LabelsKey, HttpEntity(MediaTypes.`application/json`, t)) } - - val parts = List(sourcePart, Option(urlPart), Option(typePart), Option(typeVersionPart), Option(inputsPart), optionsPart, labelsPart).flatten - - Multipart.FormData(parts: _*).toEntity() - } -} From 7e067d569a52f1ec4d7e76e0c77307cb021d93ba Mon Sep 17 00:00:00 2001 From: mspector Date: Thu, 9 Jun 2022 09:06:55 -0400 Subject: [PATCH 28/58] [BT-504] cromwell graceful restarts (#6769) * implement reversed ordering for start and end keys * add unit test for new timestamp ordering * more coverage in unit test --- .../webservice/MetadataBuilderActorSpec.scala | 30 +++++++++++++++++++ .../impl/builder/MetadataComponent.scala | 13 ++++++-- 2 files changed, 41 insertions(+), 2 deletions(-) diff --git a/engine/src/test/scala/cromwell/webservice/MetadataBuilderActorSpec.scala b/engine/src/test/scala/cromwell/webservice/MetadataBuilderActorSpec.scala index ac7966f1da7..a520faee1f2 100644 --- a/engine/src/test/scala/cromwell/webservice/MetadataBuilderActorSpec.scala +++ b/engine/src/test/scala/cromwell/webservice/MetadataBuilderActorSpec.scala @@ -227,6 +227,36 @@ class MetadataBuilderActorSpec extends TestKitSuite with AsyncFlatSpecLike with ) } + + it should "use reverse date ordering (oldest first) for event start and stop values" in { + val eventBuilderList = List( + ("start", "1990-12-20T12:30:00.000Z", OffsetDateTime.now), + ("start", "1990-12-20T12:30:01.000Z", OffsetDateTime.now.plusSeconds(1)), + ("end", "2018-06-02T12:30:00.000Z", OffsetDateTime.now.plusSeconds(2)), + ("end", "2018-06-02T12:30:01.000Z", OffsetDateTime.now.plusSeconds(3)), + ) + val workflowId = WorkflowId.randomId() + val expectedRes = + s""""calls": { + | "fqn": [{ + | "attempt": 1, + | "end": "2018-06-02T12:30:00.000Z", + | "start": "1990-12-20T12:30:00.000Z", + | "shardIndex": -1 + | }] + | }, + | "id": "$workflowId"""".stripMargin + + assertMetadataKeyStructure( + eventList = eventBuilderList, + expectedJson = expectedRes, + workflow = workflowId, + eventMaker = makeCallEvent, + metadataBuilderActorName = "mba-start-end-values", + ) + } + + it should "build JSON object structure from dotted key syntax" in { val eventBuilderList = List( ("a:b:c", "abc", OffsetDateTime.now), diff --git a/services/src/main/scala/cromwell/services/metadata/impl/builder/MetadataComponent.scala b/services/src/main/scala/cromwell/services/metadata/impl/builder/MetadataComponent.scala index 8f2697fe26a..428feafe1a1 100644 --- a/services/src/main/scala/cromwell/services/metadata/impl/builder/MetadataComponent.scala +++ b/services/src/main/scala/cromwell/services/metadata/impl/builder/MetadataComponent.scala @@ -11,6 +11,7 @@ import spray.json._ import scala.collection.immutable.TreeMap import scala.language.postfixOps import scala.util.{Random, Try} +import java.time.Instant object MetadataComponent { implicit val MetadataComponentMonoid: Monoid[MetadataComponent] = new Monoid[MetadataComponent] { @@ -88,8 +89,12 @@ object MetadataComponent { } private def customOrdering(event: MetadataEvent): Option[Ordering[MetadataPrimitive]] = event match { - case MetadataEvent(MetadataKey(_, Some(_), key), _, _) if key == CallMetadataKeys.ExecutionStatus => Option(MetadataPrimitive.ExecutionStatusOrdering) - case MetadataEvent(MetadataKey(_, None, key), _, _) if key == WorkflowMetadataKeys.Status => Option(MetadataPrimitive.WorkflowStateOrdering) + case MetadataEvent(MetadataKey(_, Some(_), key), _, _) + if key == CallMetadataKeys.ExecutionStatus => Option(MetadataPrimitive.ExecutionStatusOrdering) + case MetadataEvent(MetadataKey(_, _, key), _, _) + if key == CallMetadataKeys.Start || key == CallMetadataKeys.End => Option(MetadataPrimitive.TimestampOrdering) + case MetadataEvent(MetadataKey(_, None, key), _, _) + if key == WorkflowMetadataKeys.Status => Option(MetadataPrimitive.WorkflowStateOrdering) case _ => None } @@ -151,6 +156,10 @@ object MetadataPrimitive { val WorkflowStateOrdering: Ordering[MetadataPrimitive] = Ordering.by { primitive: MetadataPrimitive => WorkflowState.withName(primitive.v.value) } + + val TimestampOrdering: Ordering[MetadataPrimitive] = Ordering.by { primitive: MetadataPrimitive => + Instant.parse(primitive.v.value) + }.reverse } case class MetadataPrimitive(v: MetadataValue, customOrdering: Option[Ordering[MetadataPrimitive]] = None) extends MetadataComponent From cc0e240f742400ded89d9d9599ea335e30772c27 Mon Sep 17 00:00:00 2001 From: Chris Llanwarne Date: Thu, 9 Jun 2022 22:38:48 +0000 Subject: [PATCH 29/58] Update cromwell version from 80 to 81 --- project/Version.scala | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/project/Version.scala b/project/Version.scala index b894fde796c..557c47dd458 100644 --- a/project/Version.scala +++ b/project/Version.scala @@ -5,7 +5,7 @@ import sbt._ object Version { // Upcoming release, or current if we're on a master / hotfix branch - val cromwellVersion = "80" + val cromwellVersion = "81" /** * Returns true if this project should be considered a snapshot. From ded7e718e662d7dfb2f7ca6b0ae6d8c273f5c2b5 Mon Sep 17 00:00:00 2001 From: Katrina P <68349264+kpierre13@users.noreply.github.com> Date: Tue, 14 Jun 2022 12:10:44 -0400 Subject: [PATCH 30/58] BW-1256 /runs/{run id} endpoint_KP (#6777) * Removed Wes2Cromwell project, added 'runs' endpoint a necessary functions. * saving... * saving... * saving... * saving... * Fix missing JSON implicits * Added endpoint to cromwell.yaml and added api prefix * Getting rid of unnecessary implicits * Fixing and changing function name * Updated RESTAPI.md, cromwell.yaml, and RunListResponse.scala * Getting rid of comments * Omitting unexpected arguments * Removing headers, adding further details in cromwell.yaml * Updating description and RESTAPI.md * saving... * saving... * saving... * saving... * saving... * Moving functions to consolidate, updating CHANGELOG.md, deleting Wes2CromwellInterface.scala file * Updated CHANGELOG.md, passed error message to API * saving... * Update CHANGELOG.md Co-authored-by: Adam Nichols * Update engine/src/main/scala/cromwell/webservice/routes/wes/WesRunRoutes.scala Co-authored-by: Adam Nichols * Update engine/src/main/scala/cromwell/webservice/routes/wes/WesRunRoutes.scala Co-authored-by: Adam Nichols * Changes to CHANGELOG.md, updating routes * Unused imports * Cleanup * Cleanup * codecov * saving... * saving... * saving... * saving... * runLog method * saving... * RESTAPI.md * Code cleanup * Code cleanup + comment * Unused import correction Co-authored-by: Adam Nichols Co-authored-by: Adam Nichols Co-authored-by: Saloni Shah --- docs/api/RESTAPI.md | 36 ++++++- .../src/main/resources/swagger/cromwell.yaml | 37 +++++++ .../routes/wes/CromwellMetadata.scala | 101 ++++++++++++++++++ .../routes/wes/RunListResponse.scala | 3 +- .../webservice/routes/wes/WesResponse.scala | 9 +- .../webservice/routes/wes/WesRunLog.scala | 34 ++++++ .../webservice/routes/wes/WesRunRoutes.scala | 65 ++++++----- 7 files changed, 255 insertions(+), 30 deletions(-) create mode 100644 engine/src/main/scala/cromwell/webservice/routes/wes/CromwellMetadata.scala create mode 100644 engine/src/main/scala/cromwell/webservice/routes/wes/WesRunLog.scala diff --git a/docs/api/RESTAPI.md b/docs/api/RESTAPI.md index bdc5b6a15ff..b15072f6407 100644 --- a/docs/api/RESTAPI.md +++ b/docs/api/RESTAPI.md @@ -1,5 +1,5 @@ diff --git a/docs/developers/bitesize/ci/travis_centaur.md b/docs/developers/bitesize/ci/travis_centaur.md index 5d1bb836109..b56ec16c8af 100644 --- a/docs/developers/bitesize/ci/travis_centaur.md +++ b/docs/developers/bitesize/ci/travis_centaur.md @@ -7,7 +7,6 @@ Other backends run tests for any user. | Backend | Read-only users | Write/Admin users | |---------------|:---------------:|:-----------------:| | AWS | | ✅ | -| BCS | | ✅ | | Local | ✅ | ✅ | | PAPI V2alpha1 | | ✅ | | PAPI V2beta | | ✅ | @@ -64,7 +63,6 @@ or `papi_v2beta_centaur_application.conf` | Backend | MySQL | PostgreSQL | MariaDB | |---------|:------:|:-----------:|:--------:| | AWS | ✅ | | | -| BCS | ✅ | | | | Local | ✅ | ✅ | | | PAPI V2 | ✅ | | ⭕ | | SLURM | ✅ | | | diff --git a/docs/filesystems/Filesystems.md b/docs/filesystems/Filesystems.md index 641761fcfe1..d5a66de7eba 100644 --- a/docs/filesystems/Filesystems.md +++ b/docs/filesystems/Filesystems.md @@ -40,9 +40,6 @@ filesystems { gcs { class = "cromwell.filesystems.gcs.GcsPathBuilderFactory" } - oss { - class = "cromwell.filesystems.oss.OssPathBuilderFactory" - } s3 { class = "cromwell.filesystems.s3.S3PathBuilderFactory" } @@ -53,10 +50,10 @@ filesystems { ``` It defines the filesystems that can be accessed by Cromwell. -Those filesystems can be referenced by their name (`drs`, `gcs`, `oss`, `s3`, `http` and `local`) in other parts of the configuration. +Those filesystems can be referenced by their name (`drs`, `gcs`, `s3`, `http` and `local`) in other parts of the configuration. **Note:** -- **OSS and S3 filesystems are experimental.** +- **S3 filesystem is experimental.** - **DRS filesystem has initial support only. Also, currently it works only with [GCS filesystem](../GoogleCloudStorage) in [PapiV2 backend](http://cromwell.readthedocs.io/en/develop/backends/Google).** @@ -197,8 +194,6 @@ The filesystem configuration used will be the one in the `config` section of the - Simple Storage Service (S3) - [Amazon Doc](https://aws.amazon.com/documentation/s3/) -- Object Storage Service (OSS) - [Alibaba Cloud Doc](https://www.alibabacloud.com/product/oss) - - HTTP - support for `http` or `https` URLs for [workflow inputs only](http://cromwell.readthedocs.io/en/develop/filesystems/HTTP) - File Transfer Protocol (FTP) - [Cromwell Doc](FileTransferProtocol.md) diff --git a/docs/tutorials/BCSIntro.md b/docs/tutorials/BCSIntro.md deleted file mode 100644 index 11b55075a6d..00000000000 --- a/docs/tutorials/BCSIntro.md +++ /dev/null @@ -1,122 +0,0 @@ -## Getting started on Alibaba Cloud with the Batch Compute Service - -### Prerequisites - -This tutorial page relies on completing the previous tutorials: - -- [Configuration Files](ConfigurationFiles.md) - -### Goals - -In this tutorial you'll learn to run the first workflow against the Batch Compute service on Alibaba Cloud. - -### Let's get started! - -#### - -#### Configuring Alibaba Cloud - -- Go to Alibaba Cloud and activate Alibaba Cloud OSS and Alibaba Cloud BatchCompute services. -- Follow AccessKey Guide to retrieve an access-id and access-key pair. We will refer to this pair as `` and ``, respectively. -- Log on to the OSS console and choose a region to create a new bucket. We will use `` and `` to refer the chosen region and bucket. -- Find the corresponding OSS endpoint in OSS region and endpoint. We will refer to it as ``. - -#### Preparing workflow source files - -Copy over the sample `echo.wdl` and `echo.inputs` files to the same directory as the Cromwell jar. -This workflow takes a string value as an output file name and writes "Hello World!" to the file. - -***echo.wdl*** - -``` -task echo { - String out - - command { - echo Hello World! > ${out} - } - - output { - File outFile = "${out}" - Array[String] content = read_lines(outFile) - } -} - -workflow wf_echo { - call echo - output { - echo.outFile - echo.content - } -} -``` - -***echo.inputs*** - -``` -{ - "wf_echo.echo.out": "output" -} -``` - -#### Configuration file for Alibaba Cloud - -Copy over the sample `bcs.conf` file to the same directory that contains your sample WDL, inputs and the Cromwell jar. Replace ``, ``, ``, ``, `` in the configuration file with actual values. - -***bcs.conf*** - -``` -include required(classpath("application")) - -backend { - default = "BCS" - - providers { - BCS { - actor-factory = "cromwell.backend.impl.bcs.BcsBackendLifecycleActorFactory" - config { - root = "oss:///cromwell-dir" - region = "" - access-id = "" - access-key = "" - - filesystems { - oss { - auth { - endpoint = "" - access-id = "" - access-key = "" - } - } - } - - default-runtime-attributes { - failOnStderr: false - continueOnReturnCode: 0 - cluster: "OnDemand ecs.sn1ne.large img-ubuntu" - vpc: "192.168.0.0/16" - } - } - } - } -} -``` - -#### Run workflow - -`java -Dconfig.file=bcs.conf -jar cromwell.jar run echo.wdl --inputs echo.inputs` - -#### Outputs - -The end of your workflow logs should report the workflow outputs. - -``` -[info] SingleWorkflowRunnerActor workflow finished with status 'Succeeded'. -{ - "outputs": { - "wf_echo.echo.outFile": "oss:///cromwell-dir/wf_echo/38b088b2-5131-4ea0-a161-4cf2ca8d15ac/call-echo/output", - "wf_echo.echo.content": ["Hello World!"] - }, - "id": "38b088b2-5131-4ea0-a161-4cf2ca8d15ac" -} -``` diff --git a/docs/tutorials/Containers.md b/docs/tutorials/Containers.md index f565ced2332..3934f7a4579 100644 --- a/docs/tutorials/Containers.md +++ b/docs/tutorials/Containers.md @@ -509,8 +509,6 @@ Congratulations for improving the reproducibility of your workflows! You might f - [Getting started with AWS Batch](AwsBatch101.md) - [Getting started on Google Pipelines API](PipelinesApi101.md) -- [Getting started on Alibaba Cloud](BCSIntro/) - [cromwell-examples-conf]: https://www.github.com/broadinstitute/cromwell/tree/develop/cromwell.example.backends/cromwell.examples.conf [cromwell-examples-folder]: https://www.github.com/broadinstitute/cromwell/tree/develop/cromwell.example.backends diff --git a/engine/src/main/scala/cromwell/engine/io/nio/NioFlow.scala b/engine/src/main/scala/cromwell/engine/io/nio/NioFlow.scala index a67df146abe..0598e154f2d 100644 --- a/engine/src/main/scala/cromwell/engine/io/nio/NioFlow.scala +++ b/engine/src/main/scala/cromwell/engine/io/nio/NioFlow.scala @@ -14,7 +14,6 @@ import cromwell.engine.io.RetryableRequestSupport.{isInfinitelyRetryable, isRetr import cromwell.engine.io.{IoAttempts, IoCommandContext, IoCommandStalenessBackpressuring} import cromwell.filesystems.drs.DrsPath import cromwell.filesystems.gcs.GcsPath -import cromwell.filesystems.oss.OssPath import cromwell.filesystems.s3.S3Path import cromwell.util.TryWithResource._ import net.ceedubs.ficus.Ficus._ @@ -166,9 +165,6 @@ class NioFlow(parallelism: Int, case s3Path: S3Path => IO { FileHash(HashType.S3Etag, s3Path.eTag) } - case ossPath: OssPath => IO { - FileHash(HashType.OssEtag, ossPath.eTag) - } case path => getMd5FileHashForPath(path) } } diff --git a/engine/src/test/scala/cromwell/webservice/routes/wes/ServiceInfoSpec.scala b/engine/src/test/scala/cromwell/webservice/routes/wes/ServiceInfoSpec.scala index e6055822ea9..b4d775f7389 100644 --- a/engine/src/test/scala/cromwell/webservice/routes/wes/ServiceInfoSpec.scala +++ b/engine/src/test/scala/cromwell/webservice/routes/wes/ServiceInfoSpec.scala @@ -24,7 +24,7 @@ class ServiceInfoSpec extends AsyncFlatSpec with ScalatestRouteTest with Matcher val expectedResponse = WesStatusInfoResponse(Map("CWL" -> Set("v1.0"), "WDL" -> Set("draft-2", "1.0", "biscayne")), List("1.0"), - Set("ftp", "s3", "drs", "gcs", "oss", "http"), + Set("ftp", "s3", "drs", "gcs", "http"), Map("Cromwell" -> CromwellApiService.cromwellVersion), List(), Map(WesState.Running -> 5, WesState.Queued -> 3, WesState.Canceling -> 2), diff --git a/filesystems/drs/src/main/scala/cromwell/filesystems/drs/DrsPathBuilder.scala b/filesystems/drs/src/main/scala/cromwell/filesystems/drs/DrsPathBuilder.scala index bb9959ae29f..f22023c87fd 100644 --- a/filesystems/drs/src/main/scala/cromwell/filesystems/drs/DrsPathBuilder.scala +++ b/filesystems/drs/src/main/scala/cromwell/filesystems/drs/DrsPathBuilder.scala @@ -58,7 +58,7 @@ case class DrsPathBuilder(fileSystemProvider: DrsCloudNioFileSystemProvider, } val gcsPathOption = for { - // Right now, only pre-resolving GCS. In the future, could pull others like FTP, HTTP, S3, OSS, SRA, etc. + // Right now, only pre-resolving GCS. In the future, could pull others like FTP, HTTP, S3, SRA, etc. gsUriOption <- logAttempt( "resolve the uri through Martha", DrsResolver.getSimpleGsUri(pathAsString, fileSystemProvider.drsPathResolver).unsafeRunSync() diff --git a/filesystems/oss/src/main/scala/cromwell/filesystems/oss/OssPathBuilder.scala b/filesystems/oss/src/main/scala/cromwell/filesystems/oss/OssPathBuilder.scala deleted file mode 100644 index 8792194f804..00000000000 --- a/filesystems/oss/src/main/scala/cromwell/filesystems/oss/OssPathBuilder.scala +++ /dev/null @@ -1,157 +0,0 @@ -package cromwell.filesystems.oss - -import java.net.URI - -import com.google.common.net.UrlEscapers -import com.typesafe.config.Config -import net.ceedubs.ficus.Ficus._ -import cats.syntax.apply._ -import com.aliyun.oss.OSSClient -import common.validation.Validation._ -import cromwell.core.WorkflowOptions -import cromwell.core.path.{NioPath, Path, PathBuilder} -import cromwell.filesystems.oss.OssPathBuilder._ -import cromwell.filesystems.oss.nio._ - -import scala.language.postfixOps -import scala.util.matching.Regex -import scala.util.{Failure, Try} - -object OssPathBuilder { - - val URI_SCHEME = OssStorageFileSystem.URI_SCHEMA - - val OssBucketPattern:Regex = - """ - (?x) # Turn on comments and whitespace insensitivity - ^oss:// - ( # Begin capturing group for oss bucket name - [a-z0-9][a-z0-9-_\\.]+[a-z0-9] # Regex for bucket name - soft validation, see comment above - ) # End capturing group for gcs bucket name - (?: - /.* # No validation here - )? - """.trim.r - - sealed trait OssPathValidation - - case class ValidFullOssPath(bucket: String, path: String) extends OssPathValidation - - case object PossiblyValidRelativeOssPath extends OssPathValidation - - sealed trait InvalidOssPath extends OssPathValidation { - def pathString: String - def errorMessage: String - } - - final case class InvalidScheme(pathString: String) extends InvalidOssPath { - def errorMessage = s"OSS URIs must have 'oss' scheme: $pathString" - } - - final case class InvalidFullOssPath(pathString: String) extends InvalidOssPath { - def errorMessage = { - s""" - |The path '$pathString' does not seem to be a valid OSS path. - |Please check that it starts with oss:// and that the bucket and object follow OSS naming guidelines. - """.stripMargin.replaceAll("\n", " ").trim - } - } - - final case class UnparseableOssPath(pathString: String, throwable: Throwable) extends InvalidOssPath { - def errorMessage: String = - List(s"The specified OSS path '$pathString' does not parse as a URI.", throwable.getMessage).mkString("\n") - } - - private def softBucketParsing(string: String): Option[String] = string match { - case OssBucketPattern(bucket) => Option(bucket) - case _ => None - } - - def validateOssPath(string: String): OssPathValidation = { - Try { - val uri = URI.create(UrlEscapers.urlFragmentEscaper().escape(string)) - if (uri.getScheme == null) PossiblyValidRelativeOssPath - else if (uri.getScheme.equalsIgnoreCase(URI_SCHEME)) { - if (uri.getHost == null) { - softBucketParsing(string) map { ValidFullOssPath(_, uri.getPath) } getOrElse InvalidFullOssPath(string) - } else ValidFullOssPath(uri.getHost, uri.getPath) - } else InvalidScheme(string) - } recover { case t => UnparseableOssPath(string, t) } get - } - - def isOssPath(nioPath: NioPath): Boolean = { - nioPath.getFileSystem.provider().getScheme.equalsIgnoreCase(URI_SCHEME) - } - - def fromConfiguration(configuration: OssStorageConfiguration, - options: WorkflowOptions): OssPathBuilder = { - OssPathBuilder(configuration) - } - - def fromConfig(config: Config, options: WorkflowOptions): OssPathBuilder = { - val refresh = config.as[Option[Long]](TTLOssStorageConfiguration.RefreshInterval) - - val (endpoint, accessId, accessKey, securityToken) = ( - validate { config.as[String]("auth.endpoint") }, - validate { config.as[String]("auth.access-id") }, - validate { config.as[String]("auth.access-key") }, - validate { config.as[Option[String]]("auth.security-token") } - ).tupled.unsafe("OSS filesystem configuration is invalid") - - refresh match { - case None => - val cfg = DefaultOssStorageConfiguration(endpoint, accessId, accessKey, securityToken) - fromConfiguration(cfg, options) - case Some(_) => - val cfg = TTLOssStorageConfiguration(config) - fromConfiguration(cfg, options) - } - } -} - -final case class OssPathBuilder(ossStorageConfiguration: OssStorageConfiguration) extends PathBuilder { - def build(string: String): Try[OssPath] = { - validateOssPath(string) match { - case ValidFullOssPath(bucket, path) => - Try { - val ossStorageFileSystem = OssStorageFileSystem(bucket, ossStorageConfiguration) - OssPath(ossStorageFileSystem.getPath(path), ossStorageFileSystem.provider.ossClient) - } - case PossiblyValidRelativeOssPath => Failure(new IllegalArgumentException(s"$string does not have a oss scheme")) - case invalid: InvalidOssPath => Failure(new IllegalArgumentException(invalid.errorMessage)) - } - } - - override def name: String = "Object Storage Service" -} - -final case class BucketAndObj(bucket: String, obj: String) - -final case class OssPath private[oss](nioPath: NioPath, - ossClient: OSSClient) extends Path { - - override protected def newPath(path: NioPath): OssPath = { - OssPath(path, ossClient) - } - - override def pathAsString: String = ossStoragePath.pathAsString - - override def pathWithoutScheme: String = { - ossStoragePath.bucket + ossStoragePath.toAbsolutePath.toString - } - - def bucket: String = { - ossStoragePath.bucket - } - - def key: String = { - ossStoragePath.key - } - - lazy val eTag = ossClient.getSimplifiedObjectMeta(bucket, key).getETag - - def ossStoragePath: OssStoragePath = nioPath match { - case ossPath: OssStoragePath => ossPath - case _ => throw new RuntimeException(s"Internal path was not a cloud storage path: $nioPath") - } -} diff --git a/filesystems/oss/src/main/scala/cromwell/filesystems/oss/OssPathBuilderFactory.scala b/filesystems/oss/src/main/scala/cromwell/filesystems/oss/OssPathBuilderFactory.scala deleted file mode 100644 index 666ba1c4849..00000000000 --- a/filesystems/oss/src/main/scala/cromwell/filesystems/oss/OssPathBuilderFactory.scala +++ /dev/null @@ -1,14 +0,0 @@ -package cromwell.filesystems.oss - -import akka.actor.ActorSystem -import com.typesafe.config.Config -import cromwell.core.WorkflowOptions -import cromwell.core.path.PathBuilderFactory - -import scala.concurrent.{ExecutionContext, Future} - -final case class OssPathBuilderFactory(globalConfig: Config, instanceConfig: Config) extends PathBuilderFactory { - def withOptions(options: WorkflowOptions)(implicit as: ActorSystem, ec: ExecutionContext) = { - Future.successful(OssPathBuilder.fromConfig(instanceConfig, options)) - } -} diff --git a/filesystems/oss/src/main/scala/cromwell/filesystems/oss/batch/OssBatchCommandBuilder.scala b/filesystems/oss/src/main/scala/cromwell/filesystems/oss/batch/OssBatchCommandBuilder.scala deleted file mode 100644 index 8e1fc9a8135..00000000000 --- a/filesystems/oss/src/main/scala/cromwell/filesystems/oss/batch/OssBatchCommandBuilder.scala +++ /dev/null @@ -1,35 +0,0 @@ -package cromwell.filesystems.oss.batch - -import cromwell.core.io._ -import cromwell.core.path.Path -import cromwell.filesystems.oss.OssPath - -import scala.util.Try - -private case object PartialOssBatchCommandBuilder extends PartialIoCommandBuilder { - override def sizeCommand: PartialFunction[Path, Try[IoSizeCommand]] = { - case ossPath: OssPath => Try(OssBatchSizeCommand(ossPath)) - } - - override def deleteCommand: PartialFunction[(Path, Boolean), Try[IoDeleteCommand]] = { - case (ossPath: OssPath, swallowIoExceptions) => Try(OssBatchDeleteCommand(ossPath, swallowIoExceptions)) - } - - override def copyCommand: PartialFunction[(Path, Path), Try[IoCopyCommand]] = { - case (ossSrc: OssPath, ossDest: OssPath) => Try(OssBatchCopyCommand(ossSrc, ossDest)) - } - - override def hashCommand: PartialFunction[Path, Try[IoHashCommand]] = { - case ossPath: OssPath => Try(OssBatchEtagCommand(ossPath)) - } - - override def touchCommand: PartialFunction[Path, Try[IoTouchCommand]] = { - case ossPath: OssPath => Try(OssBatchTouchCommand(ossPath)) - } - - override def existsCommand: PartialFunction[Path, Try[IoExistsCommand]] = { - case ossPath: OssPath => Try(OssBatchExistsCommand(ossPath)) - } -} - -case object OssBatchCommandBuilder extends IoCommandBuilder(List(PartialOssBatchCommandBuilder)) diff --git a/filesystems/oss/src/main/scala/cromwell/filesystems/oss/batch/OssBatchIoCommand.scala b/filesystems/oss/src/main/scala/cromwell/filesystems/oss/batch/OssBatchIoCommand.scala deleted file mode 100644 index 625a068a126..00000000000 --- a/filesystems/oss/src/main/scala/cromwell/filesystems/oss/batch/OssBatchIoCommand.scala +++ /dev/null @@ -1,98 +0,0 @@ -package cromwell.filesystems.oss.batch - -import com.aliyun.oss.OSSException -import com.aliyun.oss.model._ -import com.google.api.client.http.HttpHeaders -import cromwell.core.io._ -import cromwell.filesystems.oss._ - -/** - * Io commands with OSS paths and some logic enabling batching of request. - * @tparam T Return type of the IoCommand - * @tparam U Return type of the OSS response - */ -sealed trait OssBatchIoCommand[T, U] extends IoCommand[T] { - /** - * StorageRequest operation to be executed by this command - */ - def operation: Any - - /** - * Maps the Oss response of type U to the Cromwell Io response of type T - */ - protected def mapOssResponse(response: U): T - - /** - * Method called in the success callback of a batched request to decide what to do next. - * Returns an `Either[T, OssBatchIoCommand[T, U]]` - * Left(value) means the command is complete, and the result can be sent back to the sender. - * Right(newCommand) means the command is not complete and needs another request to be executed. - * Most commands will reply with Left(value). - */ - def onSuccess(response: U, httpHeaders: HttpHeaders): Either[T, OssBatchIoCommand[T, U]] = { - Left(mapOssResponse(response)) - } - - /** - * Override to handle a failure differently and potentially return a successful response. - */ - def onFailure(ossError: OSSException): Option[Either[T, OssBatchIoCommand[T, U]]] = None -} - -case class OssBatchCopyCommand( - override val source: OssPath, - override val destination: OssPath, - ) - extends IoCopyCommand(source, destination) with OssBatchIoCommand[Unit, CopyObjectResult] { - override def operation: GenericResult = { - val getObjectRequest = new CopyObjectRequest(source.bucket, source.key, destination.bucket, destination.key) - // TODO: Copy other attributes (encryption, metadata, etc.) - source.ossClient.copyObject(getObjectRequest) - } - override def mapOssResponse(response: CopyObjectResult): Unit = () - override def commandDescription: String = s"OssBatchCopyCommand source '$source' destination '$destination'" -} - -case class OssBatchDeleteCommand( - override val file: OssPath, - override val swallowIOExceptions: Boolean - ) extends IoDeleteCommand(file, swallowIOExceptions) with OssBatchIoCommand[Unit, Void] { - def operation: Unit = { - file.ossClient.deleteObject(file.bucket, file.key) - () - } - override protected def mapOssResponse(response: Void): Unit = () - override def commandDescription: String = s"OssBatchDeleteCommand file '$file' swallowIOExceptions '$swallowIOExceptions'" -} - -/** - * Base trait for commands that use the headObject() operation. (e.g: size, crc32, ...) - */ -sealed trait OssBatchHeadCommand[T] extends OssBatchIoCommand[T, ObjectMetadata] { - def file: OssPath - - override def operation: ObjectMetadata = file.ossClient.getObjectMetadata(file.bucket, file.key) -} - -case class OssBatchSizeCommand(override val file: OssPath) extends IoSizeCommand(file) with OssBatchHeadCommand[Long] { - override def mapOssResponse(response: ObjectMetadata): Long = response.getContentLength - override def commandDescription: String = s"OssBatchSizeCommand file '$file'" -} - -case class OssBatchEtagCommand(override val file: OssPath) extends IoHashCommand(file) with OssBatchHeadCommand[String] { - override def mapOssResponse(response: ObjectMetadata): String = response.getETag - override def commandDescription: String = s"OssBatchEtagCommand file '$file'" -} - -case class OssBatchTouchCommand(override val file: OssPath) extends IoTouchCommand(file) with OssBatchHeadCommand[Unit] { - override def mapOssResponse(response: ObjectMetadata): Unit = () - override def commandDescription: String = s"OssBatchTouchCommand file '$file'" -} - -case class OssBatchExistsCommand(override val file: OssPath) extends IoExistsCommand(file) with OssBatchIoCommand[Boolean, Boolean] { - override def operation: Boolean = { - file.ossClient.doesObjectExist(file.bucket, file.key) - } - override def mapOssResponse(response: Boolean): Boolean = response - override def commandDescription: String = s"OssBatchExistsCommand file '$file'" -} diff --git a/filesystems/oss/src/main/scala/cromwell/filesystems/oss/nio/OssAppendOutputStream.scala b/filesystems/oss/src/main/scala/cromwell/filesystems/oss/nio/OssAppendOutputStream.scala deleted file mode 100644 index bca8d16a609..00000000000 --- a/filesystems/oss/src/main/scala/cromwell/filesystems/oss/nio/OssAppendOutputStream.scala +++ /dev/null @@ -1,86 +0,0 @@ -package cromwell.filesystems.oss.nio - -import java.io.{ByteArrayInputStream, OutputStream} - -import com.aliyun.oss.OSSClient -import com.aliyun.oss.model.{AppendObjectRequest, GenericRequest} - -import scala.util.Try - -final case class OssAppendOutputStream(ossClient: OSSClient, path: OssStoragePath, deleteIfExists: Boolean) extends OutputStream { - - var position: Long = { - val exist = OssStorageRetry.fromTry( - () => Try{ - val request = new GenericRequest(path.bucket, path.key) - request.setLogEnabled(false) - ossClient.doesObjectExist(request) - } - ) - - var len: Long = 0 - if (exist && deleteIfExists) { - OssStorageRetry.from( - () => ossClient.deleteObject(path.bucket, path.key) - ) - } - else if (exist) { - len = OssStorageRetry.from( - () => ossClient.getObjectMetadata(path.bucket, path.key).getContentLength - ) - } - - len - } - - override def write(b: Int): Unit = { - val arr = Array[Byte]((b & 0xFF).toByte) - - val appendObjectRequest: AppendObjectRequest = new AppendObjectRequest(path.bucket, path.key, new ByteArrayInputStream(arr)) - this.synchronized { - appendObjectRequest.setPosition(position) - val appendObjectResult = OssStorageRetry.fromTry( - () => Try{ - ossClient.appendObject(appendObjectRequest) - } - ) - - position = appendObjectResult.getNextPosition() - } - } - - override def write(b: Array[Byte]): Unit = { - val appendObjectRequest: AppendObjectRequest = new AppendObjectRequest(path.bucket, path.key, new ByteArrayInputStream(b)) - this.synchronized { - appendObjectRequest.setPosition(position) - val appendObjectResult = OssStorageRetry.fromTry( - () => Try{ - ossClient.appendObject(appendObjectRequest) - } - ) - position = appendObjectResult.getNextPosition() - } - } - - override def write(b: Array[Byte], off: Int, len: Int): Unit = { - if (b == null) { - throw new NullPointerException - } else if ((off < 0) || (off > b.length) || (len < 0) || ((off + len) > b.length) || ((off + len) < 0)) { - throw new IndexOutOfBoundsException - } else if (len == 0) { - return - } - - val s = b.slice(off, off+len) - val appendObjectRequest: AppendObjectRequest = new AppendObjectRequest(path.bucket, path.key, new ByteArrayInputStream(s)) - this.synchronized { - appendObjectRequest.setPosition(position) - val appendObjectResult = OssStorageRetry.fromTry( - () => Try{ - ossClient.appendObject(appendObjectRequest) - } - ) - position = appendObjectResult.getNextPosition() - } - } -} diff --git a/filesystems/oss/src/main/scala/cromwell/filesystems/oss/nio/OssFileChannel.scala b/filesystems/oss/src/main/scala/cromwell/filesystems/oss/nio/OssFileChannel.scala deleted file mode 100644 index e5851c9133e..00000000000 --- a/filesystems/oss/src/main/scala/cromwell/filesystems/oss/nio/OssFileChannel.scala +++ /dev/null @@ -1,19 +0,0 @@ -package cromwell.filesystems.oss.nio - -import java.nio.ByteBuffer -import java.nio.channels.SeekableByteChannel - - -trait OssFileChannel extends SeekableByteChannel { - - override def isOpen: Boolean = true - - override def close(): Unit = {} - - override def read(dst: ByteBuffer): Int = throw new UnsupportedOperationException() - - override def write(src: ByteBuffer): Int = throw new UnsupportedOperationException() - - override def truncate(size: Long): SeekableByteChannel = throw new UnsupportedOperationException() - -} diff --git a/filesystems/oss/src/main/scala/cromwell/filesystems/oss/nio/OssFileReadChannel.scala b/filesystems/oss/src/main/scala/cromwell/filesystems/oss/nio/OssFileReadChannel.scala deleted file mode 100644 index 1fb283b85e0..00000000000 --- a/filesystems/oss/src/main/scala/cromwell/filesystems/oss/nio/OssFileReadChannel.scala +++ /dev/null @@ -1,80 +0,0 @@ -package cromwell.filesystems.oss.nio - -import java.nio.ByteBuffer -import java.nio.channels.{Channels, SeekableByteChannel} - -import com.aliyun.oss.OSSClient -import com.aliyun.oss.model.{GenericRequest, GetObjectRequest} - -import scala.util.Try - -final case class OssFileReadChannel(ossClient: OSSClient, pos: Long, path: OssStoragePath) extends OssFileChannel { - var internalPosition = pos - - override def position(): Long = { - synchronized { - internalPosition - } - } - - override def position(newPosition: Long): SeekableByteChannel = { - if (newPosition < 0) { - throw new IllegalArgumentException(newPosition.toString) - } - - synchronized { - if (newPosition != internalPosition) { - internalPosition = newPosition - } - - return this - } - } - - override def read(dst: ByteBuffer): Int = { - dst.mark() - - dst.reset() - - val want = dst.capacity - val begin: Long = position() - var end: Long = position + want - 1 - if (begin < 0 || end < 0 || begin > end) { - throw new IllegalArgumentException(s"being $begin or end $end invalid") - } - - if (begin >= size) { - return -1 - } - - if (end >= size()) { - end = size() - 1 - } - - val getObjectRequest = new GetObjectRequest(path.bucket, path.key) - getObjectRequest.setRange(begin, end) - - OssStorageRetry.fromTry( - () => Try{ - val ossObject = ossClient.getObject(getObjectRequest) - val in = ossObject.getObjectContent - val channel = Channels.newChannel(in) - - val amt = channel.read(dst) - channel.close() - internalPosition += amt - amt - } - ) - } - - override def size(): Long = { - OssStorageRetry.fromTry( - () => Try { - val request = new GenericRequest(path.bucket, path.key) - request.setLogEnabled(false) - ossClient.getSimplifiedObjectMeta(request).getSize - } - ) - } -} diff --git a/filesystems/oss/src/main/scala/cromwell/filesystems/oss/nio/OssStorageDirectoryAttributes.scala b/filesystems/oss/src/main/scala/cromwell/filesystems/oss/nio/OssStorageDirectoryAttributes.scala deleted file mode 100644 index 84ab82deab9..00000000000 --- a/filesystems/oss/src/main/scala/cromwell/filesystems/oss/nio/OssStorageDirectoryAttributes.scala +++ /dev/null @@ -1,38 +0,0 @@ -package cromwell.filesystems.oss.nio - -import java.nio.file.attribute.FileTime - -import scala.collection.mutable.Map - -final case class OssStorageDirectoryAttributes(path: OssStoragePath) extends OssStorageFileAttributes { - override def creationTime(): FileTime = FileTime.fromMillis(0) - - override def lastAccessTime(): FileTime = FileTime.fromMillis(0) - - override def lastModifiedTime(): FileTime = creationTime() - - override def isRegularFile: Boolean = false - - override def isDirectory: Boolean = true - - override def isSymbolicLink: Boolean = false - - override def isOther: Boolean = false - - override def size(): Long = 0 - - override def fileKey(): AnyRef = path.pathAsString - - override def expires: FileTime = FileTime.fromMillis(0) - - override def cacheControl(): Option[String] = None - - override def contentDisposition: Option[String] = None - - override def contentEncoding: Option[String] = None - - override def etag: Option[String] = None - - override def userMeta: Map[String, String] = Map.empty[String, String] - -} diff --git a/filesystems/oss/src/main/scala/cromwell/filesystems/oss/nio/OssStorageFileAttributes.scala b/filesystems/oss/src/main/scala/cromwell/filesystems/oss/nio/OssStorageFileAttributes.scala deleted file mode 100644 index ab53b4f8d40..00000000000 --- a/filesystems/oss/src/main/scala/cromwell/filesystems/oss/nio/OssStorageFileAttributes.scala +++ /dev/null @@ -1,19 +0,0 @@ -package cromwell.filesystems.oss.nio - -import java.nio.file.attribute.{BasicFileAttributes, FileTime} - -import scala.collection.mutable.Map - -trait OssStorageFileAttributes extends BasicFileAttributes { - def cacheControl(): Option[String] - - def contentDisposition: Option[String] - - def contentEncoding: Option[String] - - def expires: FileTime - - def etag: Option[String] - - def userMeta: Map[String, String] -} diff --git a/filesystems/oss/src/main/scala/cromwell/filesystems/oss/nio/OssStorageFileAttributesView.scala b/filesystems/oss/src/main/scala/cromwell/filesystems/oss/nio/OssStorageFileAttributesView.scala deleted file mode 100644 index cba65649a98..00000000000 --- a/filesystems/oss/src/main/scala/cromwell/filesystems/oss/nio/OssStorageFileAttributesView.scala +++ /dev/null @@ -1,45 +0,0 @@ -package cromwell.filesystems.oss.nio - -import java.nio.file.NoSuchFileException -import java.nio.file.attribute.{BasicFileAttributeView, FileTime} -import java.util.Date - -import com.aliyun.oss.OSSClient -import com.aliyun.oss.model.{CopyObjectRequest, CopyObjectResult, GenericRequest} - -import scala.util.Try - -final case class OssStorageFileAttributesView(ossClient: OSSClient, path: OssStoragePath) extends BasicFileAttributeView { - override def name(): String = OssStorageFileSystem.URI_SCHEMA - - override def readAttributes(): OssStorageFileAttributes = { - val ossPath = OssStoragePath.checkPath(path) - - if (ossPath.seemsLikeDirectory) { - return OssStorageDirectoryAttributes(path) - } - - val request = new GenericRequest(ossPath.bucket, ossPath.key) - request.setLogEnabled(false) - if (!ossClient.doesObjectExist(request)) { - throw new NoSuchFileException(path.toString) - } - - val objectMeta = OssStorageRetry.fromTry( - () => Try{ - ossClient.getObjectMetadata(path.bucket, path.key) - } - ) - - OssStorageObjectAttributes(objectMeta, path) - } - - override def setTimes(lastModifiedTime: FileTime, lastAccessTime: FileTime, createTime: FileTime): Unit = { - val meta = ossClient.getObjectMetadata(path.bucket, path.key) - meta.setLastModified(new Date(lastModifiedTime.toMillis)) - - val copyReq = new CopyObjectRequest(path.bucket, path.key, path.bucket, path.key) - copyReq.setNewObjectMetadata(meta) - val _: CopyObjectResult = ossClient.copyObject(copyReq) - } -} diff --git a/filesystems/oss/src/main/scala/cromwell/filesystems/oss/nio/OssStorageFileSystem.scala b/filesystems/oss/src/main/scala/cromwell/filesystems/oss/nio/OssStorageFileSystem.scala deleted file mode 100644 index 3d27cecf296..00000000000 --- a/filesystems/oss/src/main/scala/cromwell/filesystems/oss/nio/OssStorageFileSystem.scala +++ /dev/null @@ -1,161 +0,0 @@ -package cromwell.filesystems.oss.nio - - -import java.nio.file._ -import java.nio.file.attribute.UserPrincipalLookupService -import java.util.Objects -import java.{lang, util} - -import com.aliyun.oss.common.auth.DefaultCredentialProvider -import com.aliyun.oss.{ClientConfiguration, OSSClient} -import cromwell.filesystems.oss.nio.OssStorageConfiguration.{ACCESS_ID_KEY, ACCESS_KEY_KEY, ENDPOINT_KEY, SECURITY_TOKEN_KEY} - -import scala.jdk.CollectionConverters._ - - -object OssStorageFileSystem { - val SEPARATOR: String = "/" - val URI_SCHEMA: String = "oss" - val OSS_VIEW = "oss" - val BASIC_VIEW = "basic" - - def apply(provider: OssStorageFileSystemProvider, bucket: String, config: OssStorageConfiguration): OssStorageFileSystem = { - val res = new OssStorageFileSystem(bucket, config) - res.internalProvider = provider - res - } -} - -object OssStorageConfiguration { - val ENDPOINT_KEY = "endpoint" - val ACCESS_ID_KEY = "access-id" - val ACCESS_KEY_KEY = "access-key" - val SECURITY_TOKEN_KEY = "security-token" - - import scala.collection.immutable.Map - def parseMap(map: Map[String, Any]): OssStorageConfiguration = { - val endpoint = map.get(ENDPOINT_KEY) match { - case Some(endpoint: String) if !endpoint.isEmpty => endpoint - case _ => throw new IllegalArgumentException(s"endpoint is mandatory and must be an unempty string") - } - val accessId = map.get(ACCESS_ID_KEY) match { - case Some(id: String) if !id.isEmpty => id - case _ => throw new IllegalArgumentException(s"access-id is mandatory and must be an unempty string") - } - - val accessKey = map.get(ACCESS_KEY_KEY) match { - case Some(key: String) if !key.isEmpty => key - case _ => throw new IllegalArgumentException(s"access-key is mandatory and must be an unempty string") - } - - val securityToken = map.get(SECURITY_TOKEN_KEY) match { - case Some(token: String) if !token.isEmpty => Some(token) - case _ => None - } - - new DefaultOssStorageConfiguration(endpoint, accessId, accessKey, securityToken) - } - - def getClient(map: Map[String, String]): OSSClient = { - parseMap(map).newOssClient() - } - - def getClient(endpoint: String, - accessId: String, - accessKey: String, - stsToken: Option[String]): OSSClient = { - DefaultOssStorageConfiguration(endpoint, accessId, accessKey, stsToken).newOssClient() - } - -} - -trait OssStorageConfiguration { - def endpoint: String - - def accessId: String - - def accessKey: String - - def securityToken: Option[String] - - def toMap: Map[String, String] = { - val ret = Map(ENDPOINT_KEY -> endpoint, ACCESS_ID_KEY -> accessId, ACCESS_KEY_KEY -> accessKey) - val token = securityToken map {token => SECURITY_TOKEN_KEY -> token} - ret ++ token - } - - def newOssClient() = { - val credentialsProvider = securityToken match { - case Some(token: String) => - new DefaultCredentialProvider(accessId, accessKey, token) - case None => - new DefaultCredentialProvider(accessId, accessKey) - } - val clientConfiguration = new ClientConfiguration - new OSSClient(endpoint, credentialsProvider, clientConfiguration) - } - -} - -case class DefaultOssStorageConfiguration(endpoint: String, accessId: String, accessKey: String, securityToken: Option[String] = None) extends OssStorageConfiguration {} - -case class OssStorageFileSystem(bucket: String, config: OssStorageConfiguration) extends FileSystem { - - var internalProvider: OssStorageFileSystemProvider = OssStorageFileSystemProvider(config) - - override def provider: OssStorageFileSystemProvider = internalProvider - - override def getPath(first: String, more: String*): OssStoragePath = OssStoragePath.getPath(this, first, more: _*) - - override def close(): Unit = { - // do nothing currently. - } - - override def isOpen: Boolean = { - true - } - - override def isReadOnly: Boolean = { - false - } - - override def getSeparator: String = { - OssStorageFileSystem.SEPARATOR - } - - override def getRootDirectories: lang.Iterable[Path] = { - Set[Path](OssStoragePath.getPath(this, UnixPath.ROOT_PATH)).asJava - } - - override def getFileStores: lang.Iterable[FileStore] = { - Set.empty[FileStore].asJava - } - - override def getPathMatcher(syntaxAndPattern: String): PathMatcher = { - FileSystems.getDefault.getPathMatcher(syntaxAndPattern) - } - - override def getUserPrincipalLookupService: UserPrincipalLookupService = { - throw new UnsupportedOperationException() - } - - override def newWatchService(): WatchService = { - throw new UnsupportedOperationException() - } - - override def supportedFileAttributeViews(): util.Set[String] = { - Set(OssStorageFileSystem.OSS_VIEW, OssStorageFileSystem.BASIC_VIEW).asJava - } - - override def equals(obj: scala.Any): Boolean = { - this == obj || - obj.isInstanceOf[OssStorageFileSystem] && - obj.asInstanceOf[OssStorageFileSystem].config.equals(config) && - obj.asInstanceOf[OssStorageFileSystem].bucket.equals(bucket) - } - - override def hashCode(): Int = Objects.hash(bucket) - - override def toString: String = OssStorageFileSystem.URI_SCHEMA + "://" + bucket -} - diff --git a/filesystems/oss/src/main/scala/cromwell/filesystems/oss/nio/OssStorageFileSystemProvider.scala b/filesystems/oss/src/main/scala/cromwell/filesystems/oss/nio/OssStorageFileSystemProvider.scala deleted file mode 100644 index c820e66f411..00000000000 --- a/filesystems/oss/src/main/scala/cromwell/filesystems/oss/nio/OssStorageFileSystemProvider.scala +++ /dev/null @@ -1,323 +0,0 @@ -package cromwell.filesystems.oss.nio - -import java.io.{BufferedOutputStream, OutputStream} -import java.net.URI -import java.nio.channels.SeekableByteChannel -import java.nio.file._ -import java.nio.file.attribute.{BasicFileAttributeView, BasicFileAttributes, FileAttribute, FileAttributeView} -import java.nio.file.spi.FileSystemProvider -import java.util - -import com.aliyun.oss.OSSClient -import com.aliyun.oss.model.{GenericRequest, ListObjectsRequest} -import com.google.common.collect.AbstractIterator - -import scala.jdk.CollectionConverters._ -import scala.collection.immutable.Set -import collection.mutable.ArrayBuffer - - -final case class OssStorageFileSystemProvider(config: OssStorageConfiguration) extends FileSystemProvider { - def ossClient: OSSClient = config.newOssClient() - - class PathIterator(ossClient: OSSClient, prefix: OssStoragePath, filter: DirectoryStream.Filter[_ >: Path]) extends AbstractIterator[Path] { - var nextMarker: Option[String] = None - - var iterator: Iterator[String] = Iterator() - - override def computeNext(): Path = { - if (!iterator.hasNext) { - nextMarker match { - case None => iterator = listNext("") - case Some(marker: String) if !marker.isEmpty => iterator = listNext(marker) - case Some(marker: String) if marker.isEmpty => iterator = Iterator() - case Some(null) => iterator = Iterator() - case oh => throw new RuntimeException(s"Programmer Error! Unexpected case match: $oh") - } - } - - - if (iterator.hasNext) { - val path = OssStoragePath.getPath(prefix.getFileSystem, iterator.next()) - if (filter.accept(path)) { - path - } else { - computeNext() - } - } else { - endOfData() - } - } - - private[this] def listNext(marker: String): Iterator[String] = { - val objectListing = OssStorageRetry.from( - () => { - val listObjectRequest = new ListObjectsRequest(prefix.bucket) - listObjectRequest.setDelimiter(UnixPath.SEPARATOR.toString) - listObjectRequest.setPrefix(prefix.key) - listObjectRequest.setMarker(marker) - - ossClient.listObjects(listObjectRequest) - } - ) - - val result = ArrayBuffer.empty[String] - - objectListing.getObjectSummaries.asScala.filterNot(_.equals(prefix.key)).foreach(obj => {result append obj.getKey.stripPrefix(prefix.key)}) - objectListing.getCommonPrefixes.asScala.filterNot(_.equals(prefix.key)).foreach(obj => {result append obj.stripPrefix(prefix.key)}) - - nextMarker = Some(objectListing.getNextMarker) - result.iterator - } - } - - class OssStorageDirectoryStream(ossClient: OSSClient, prefix: OssStoragePath, filter: DirectoryStream.Filter[_ >: Path]) extends DirectoryStream[Path] { - - override def iterator(): util.Iterator[Path] = new PathIterator(ossClient, prefix, filter) - - override def close(): Unit = {} - - } - - override def getScheme: String = OssStorageFileSystem.URI_SCHEMA - - override def newFileSystem(uri: URI, env: util.Map[String, _]): OssStorageFileSystem = { - if (uri.getScheme != getScheme) { - throw new IllegalArgumentException(s"Schema ${uri.getScheme} not match") - } - - val bucket = uri.getHost - if (bucket.isEmpty) { - throw new IllegalArgumentException(s"Bucket is empty") - } - - if (uri.getPort != -1) { - throw new IllegalArgumentException(s"Port is not permitted") - } - - OssStorageFileSystem(this, bucket, OssStorageConfiguration.parseMap(env.asScala.toMap)) - } - - override def getFileSystem(uri: URI): OssStorageFileSystem = { - newFileSystem(uri, config.toMap.asJava) - } - - override def getPath(uri: URI): OssStoragePath = { - OssStoragePath.getPath(getFileSystem(uri), uri.getPath) - } - - override def newOutputStream(path: Path, options: OpenOption*): OutputStream = { - if (!path.isInstanceOf[OssStoragePath]) { - throw new ProviderMismatchException(s"Not a oss storage path $path") - } - - val len = options.length - - var opts = Set[OpenOption]() - if (len == 0) { - opts = Set(StandardOpenOption.CREATE, StandardOpenOption.TRUNCATE_EXISTING) - } else { - for (opt <- options) { - if (opt == StandardOpenOption.READ) { - throw new IllegalArgumentException("READ not allowed") - } - - opts += opt - } - } - - opts += StandardOpenOption.WRITE - val ossStream = OssAppendOutputStream(ossClient, path.asInstanceOf[OssStoragePath], true) - - new BufferedOutputStream(ossStream, 256*1024) - } - - override def newByteChannel(path: Path, options: util.Set[_ <: OpenOption], attrs: FileAttribute[_]*): SeekableByteChannel = { - if (!path.isInstanceOf[OssStoragePath]) { - throw new ProviderMismatchException(s"Not a oss storage path $path") - } - - for (opt <- options.asScala) { - opt match { - case StandardOpenOption.READ => - case StandardOpenOption.WRITE => throw new IllegalArgumentException(s"WRITE byte channel not allowed currently, $path") - case StandardOpenOption.SPARSE | StandardOpenOption.TRUNCATE_EXISTING => - case StandardOpenOption.APPEND | StandardOpenOption.CREATE | StandardOpenOption.DELETE_ON_CLOSE | - StandardOpenOption.CREATE_NEW | StandardOpenOption.DSYNC | StandardOpenOption.SYNC => throw new UnsupportedOperationException() - } - } - - OssFileReadChannel(ossClient, 0, path.asInstanceOf[OssStoragePath]) - } - - def doesObjectExist(bucket: String, name: String): Boolean = { - val req = new GenericRequest(bucket, name) - req.setLogEnabled(false) - ossClient.doesBucketExist(req) - } - - override def createDirectory(dir: Path, attrs: FileAttribute[_]*): Unit = {} - - override def deleteIfExists(path: Path): Boolean = { - val ossPath = OssStoragePath.checkPath(path) - - if (ossPath.seemsLikeDirectory) { - if (headPrefix(ossPath)) { - throw new UnsupportedOperationException("Can not delete a non-empty directory") - } - - return true - } - - val exist = OssStorageRetry.from( - () => { - val request = new GenericRequest(ossPath.bucket, ossPath.key) - request.setLogEnabled(false) - ossClient.doesObjectExist(request) - } - ) - - if (!exist) { - return false - } - - OssStorageRetry.from( - () => ossClient.deleteObject(ossPath.bucket, ossPath.key) - ) - - true - } - - override def delete(path: Path): Unit = { - if (!deleteIfExists(path)) { - throw new NoSuchFileException(s"File $path not exists") - } - } - - /* - * XXX: Can only copy files whose size is below 1GB currently. - */ - - override def copy(source: Path, target: Path, options: CopyOption*): Unit = { - val srcOssPath = OssStoragePath.checkPath(source) - val targetOssPath= OssStoragePath.checkPath(target) - - // ignore all options currently. - if (srcOssPath == targetOssPath) { - return - } - - val _ = OssStorageRetry.from( - () => ossClient.copyObject(srcOssPath.bucket, srcOssPath.key, targetOssPath.bucket, targetOssPath.key) - ) - - } - - override def move(source: Path, target: Path, options: CopyOption*): Unit = { - copy(source, target, options: _*) - - val _ = deleteIfExists(source) - } - - override def isSameFile(path: Path, path2: Path): Boolean = { - OssStoragePath.checkPath(path).equals(OssStoragePath.checkPath(path2)) - } - - override def isHidden(path: Path): Boolean = { - false - } - - override def getFileStore(path: Path): FileStore = throw new UnsupportedOperationException() - - override def checkAccess(path: Path, modes: AccessMode*): Unit = { - for (mode <- modes) { - mode match { - case AccessMode.READ | AccessMode.WRITE => - case AccessMode.EXECUTE => throw new AccessDeniedException(mode.toString) - } - } - - val ossPath = OssStoragePath.checkPath(path) - // directory always exists. - if (ossPath.seemsLikeDirectory) { - return - } - - val exist = OssStorageRetry.from( - () => { - val request = new GenericRequest(ossPath.bucket, ossPath.key) - request.setLogEnabled(false) - ossClient.doesObjectExist(request) - } - ) - - if (!exist) { - throw new NoSuchFileException(path.toString) - } - } - - override def getFileAttributeView[V <: FileAttributeView](path: Path, `type`: Class[V], options: LinkOption*): V = { - if (`type` != classOf[OssStorageFileAttributesView] && `type` != classOf[BasicFileAttributeView] ) { - throw new UnsupportedOperationException(`type`.getSimpleName) - } - - val ossPath = OssStoragePath.checkPath(path) - - OssStorageFileAttributesView(ossClient, ossPath).asInstanceOf[V] - } - - override def readAttributes(path: Path, attributes: String, options: LinkOption*): util.Map[String, AnyRef] = { - throw new UnsupportedOperationException() - } - - override def readAttributes[A <: BasicFileAttributes](path: Path, `type`: Class[A], options: LinkOption*): A = { - if (`type` != classOf[OssStorageFileAttributes] && `type` != classOf[BasicFileAttributes] ) { - throw new UnsupportedOperationException(`type`.getSimpleName) - } - - val ossPath = OssStoragePath.checkPath(path) - - if (ossPath.seemsLikeDirectory) { - return new OssStorageDirectoryAttributes(ossPath).asInstanceOf[A] - } - - val exists = OssStorageRetry.from( - () => { - val request = new GenericRequest(ossPath.bucket, ossPath.key) - request.setLogEnabled(false) - ossClient.doesObjectExist(request) - } - ) - - if (!exists) { - throw new NoSuchFileException(ossPath.toString) - } - - val objectMeta = OssStorageRetry.from( - () => ossClient.getObjectMetadata(ossPath.bucket, ossPath.key) - ) - - OssStorageObjectAttributes(objectMeta, ossPath).asInstanceOf[A] - } - - override def newDirectoryStream(dir: Path, filter: DirectoryStream.Filter[_ >: Path]): DirectoryStream[Path] = { - val ossPath = OssStoragePath.checkPath(dir) - - new OssStorageDirectoryStream(ossClient, ossPath, filter) - } - - override def setAttribute(path: Path, attribute: String, value: scala.Any, options: LinkOption*): Unit = throw new UnsupportedOperationException() - - private[this] def headPrefix(path: Path): Boolean = { - val ossPath = OssStoragePath.checkPath(path) - - val listRequest = new ListObjectsRequest(ossPath.bucket) - listRequest.setPrefix(ossPath.key) - - val listResult = OssStorageRetry.from( - () => ossClient.listObjects(listRequest) - ) - - listResult.getObjectSummaries.iterator().hasNext - } -} diff --git a/filesystems/oss/src/main/scala/cromwell/filesystems/oss/nio/OssStorageObjectAttributes.scala b/filesystems/oss/src/main/scala/cromwell/filesystems/oss/nio/OssStorageObjectAttributes.scala deleted file mode 100644 index 376aca7f2f8..00000000000 --- a/filesystems/oss/src/main/scala/cromwell/filesystems/oss/nio/OssStorageObjectAttributes.scala +++ /dev/null @@ -1,44 +0,0 @@ -package cromwell.filesystems.oss.nio - -import java.nio.file.attribute.FileTime - -import com.aliyun.oss.model.ObjectMetadata - -import scala.jdk.CollectionConverters._ -import scala.collection.mutable.Map -import scala.util.Try - -final case class OssStorageObjectAttributes(objectMeta: ObjectMetadata, path: OssStoragePath) extends OssStorageFileAttributes { - override def creationTime(): FileTime = { - FileTime.fromMillis(objectMeta.getLastModified.getTime) - } - - override def lastAccessTime(): FileTime = FileTime.fromMillis(0) - - override def lastModifiedTime(): FileTime = creationTime() - - override def isRegularFile: Boolean = true - - override def isDirectory: Boolean = false - - override def isSymbolicLink: Boolean = false - - override def isOther: Boolean = false - - override def size(): Long = objectMeta.getContentLength - - override def fileKey(): AnyRef = path.pathAsString - - // oss sdk has an issule: throw NullPointerException when no expire time exists. - override def expires: FileTime = FileTime.fromMillis(Try{objectMeta.getExpirationTime.getTime} getOrElse (0)) - - override def cacheControl(): Option[String] = Option(objectMeta.getCacheControl) - - override def contentDisposition: Option[String] = Option(objectMeta.getContentDisposition) - - override def contentEncoding: Option[String] = Option(objectMeta.getContentEncoding) - - override def etag: Option[String] = Option(objectMeta.getETag) - - override def userMeta: Map[String, String] = objectMeta.getUserMetadata.asScala -} diff --git a/filesystems/oss/src/main/scala/cromwell/filesystems/oss/nio/OssStoragePath.scala b/filesystems/oss/src/main/scala/cromwell/filesystems/oss/nio/OssStoragePath.scala deleted file mode 100644 index 02cd06b090e..00000000000 --- a/filesystems/oss/src/main/scala/cromwell/filesystems/oss/nio/OssStoragePath.scala +++ /dev/null @@ -1,252 +0,0 @@ -package cromwell.filesystems.oss.nio - -import java.io.File -import java.net.URI -import java.nio.file._ -import java.util -import java.util.Objects - -import com.google.common.collect.UnmodifiableIterator - -object OssStoragePath { - def checkOssStoragePath(other: Path): OssStoragePath = { - if (!other.isInstanceOf[OssStoragePath]) { - throw new ProviderMismatchException(s"Not a oss storage path $other") - } - - other.asInstanceOf[OssStoragePath] - } - - def getPath(filesystem: OssStorageFileSystem, path: UnixPath) = new OssStoragePathImpl(filesystem, path) - - def getPath(filesystem: OssStorageFileSystem, first: String, more: String*) = new OssStoragePathImpl(filesystem, UnixPath.getPath(first, more: _*)) - - def checkPath(path: Path): OssStoragePath = { - if (!path.isInstanceOf[OssStoragePath]) { - throw new ProviderMismatchException(s"Not an oss storage path $path") - } - - path.asInstanceOf[OssStoragePath] - } - -} - -trait OssStoragePath extends Path { - def bucket = "" - - def key = "" - - def path: UnixPath = UnixPath.EMPTY_PATH - - def seemsLikeDirectory = false - - def pathAsString: String = "" - - override def getFileSystem: OssStorageFileSystem = throw new UnsupportedOperationException - - override def isAbsolute: Boolean = throw new UnsupportedOperationException - - override def getRoot: OssStoragePath = throw new UnsupportedOperationException - - override def getFileName: OssStoragePath = throw new UnsupportedOperationException - - override def getParent: OssStoragePath = throw new UnsupportedOperationException - - override def getNameCount: Int = throw new UnsupportedOperationException - - override def getName(index: Int): OssStoragePath = throw new UnsupportedOperationException - - override def subpath(beginIndex: Int, endIndex: Int): OssStoragePath = throw new UnsupportedOperationException - - override def startsWith(other: Path): Boolean = throw new UnsupportedOperationException - - override def startsWith(other: String): Boolean = throw new UnsupportedOperationException - - override def endsWith(other: Path): Boolean = throw new UnsupportedOperationException - - override def endsWith(other: String): Boolean = throw new UnsupportedOperationException - - override def normalize(): OssStoragePath = throw new UnsupportedOperationException - - override def resolve(other: Path): OssStoragePath = throw new UnsupportedOperationException - - override def resolve(other: String): OssStoragePath = throw new UnsupportedOperationException - - override def resolveSibling(other: Path): OssStoragePath = throw new UnsupportedOperationException - - override def resolveSibling(other: String): OssStoragePath = throw new UnsupportedOperationException - - override def relativize(other: Path): OssStoragePath = throw new UnsupportedOperationException - - override def toAbsolutePath: OssStoragePath = throw new UnsupportedOperationException - - override def toRealPath(options: LinkOption*): OssStoragePath = throw new UnsupportedOperationException - - override def toFile: File = throw new UnsupportedOperationException - - override def register(watcher: WatchService, events: WatchEvent.Kind[_]*): WatchKey = throw new UnsupportedOperationException - - override def register(watcher: WatchService, events: Array[WatchEvent.Kind[_]], modifiers: WatchEvent.Modifier*): WatchKey = throw new UnsupportedOperationException - - override def iterator(): util.Iterator[Path] = throw new UnsupportedOperationException - - override def compareTo(other: Path): Int = throw new UnsupportedOperationException - - override def toUri: URI = throw new UnsupportedOperationException -} - -final case class OssStoragePathImpl(filesystem: OssStorageFileSystem, override val path: UnixPath = UnixPath.EMPTY_PATH) extends OssStoragePath { - - override def pathAsString: String = toUri.toString - - override def bucket: String = filesystem.bucket - - override def key: String = toAbsolutePath.toString.stripPrefix("/") - - override def getFileSystem: OssStorageFileSystem = filesystem - - override def isAbsolute: Boolean = path.isAbsolute - - override def getRoot: OssStoragePath = path.getRoot map {path => newPath(path)} getOrElse NullOssStoragePath(filesystem) - - override def getFileName: OssStoragePath = path.getFileName map {path => newPath(path)} getOrElse NullOssStoragePath(filesystem) - - override def getParent: OssStoragePath = path.getParent map {path => newPath(path)} getOrElse NullOssStoragePath(filesystem) - - override def getNameCount: Int = path.getNameCount - - override def getName(index: Int): OssStoragePath = path.getName(index) map {path => newPath(path)} getOrElse NullOssStoragePath(filesystem) - - override def subpath(beginIndex: Int, endIndex: Int): OssStoragePath = path.subPath(beginIndex, endIndex) map {path => newPath(path)} getOrElse NullOssStoragePath(filesystem) - - override def startsWith(other: Path): Boolean = { - if (!other.isInstanceOf[OssStoragePath]) { - return false - } - - val that = other.asInstanceOf[OssStoragePath] - if (bucket != that.bucket) { - return false - } - - path.startsWith(that.path) - } - - override def startsWith(other: String): Boolean = { - path.startsWith(UnixPath.getPath(other)) - } - - override def endsWith(other: Path): Boolean = { - if (!other.isInstanceOf[OssStoragePath]) { - return false - } - val that = other.asInstanceOf[OssStoragePath] - if (bucket != that.bucket) { - return false - } - - path.endsWith(that.path) - } - - override def endsWith(other: String): Boolean = { - path.endsWith(UnixPath.getPath(other)) - } - - override def normalize(): OssStoragePath = newPath(path.normalize()) - - override def resolve(other: Path): OssStoragePath = { - val that = OssStoragePath.checkOssStoragePath(other) - - newPath(path.resolve(that.path)) - } - - override def resolve(other: String): OssStoragePath = { - newPath(path.resolve(UnixPath.getPath(other))) - } - - override def resolveSibling(other: Path): OssStoragePath = { - val that = OssStoragePath.checkOssStoragePath(other) - - newPath(path.resolveSibling(that.path)) - } - - override def resolveSibling(other: String): OssStoragePath = { - newPath(path.resolveSibling(UnixPath.getPath(other))) - } - - override def relativize(other: Path): OssStoragePath = { - val that = OssStoragePath.checkOssStoragePath(other) - - newPath(path.relativize(that.path)) - } - - /** - * currently a mocked one - */ - override def toAbsolutePath: OssStoragePath = { - newPath(path.toAbsolutePath()) - } - - override def toRealPath(options: LinkOption*): OssStoragePath = toAbsolutePath - - override def toFile: File = throw new UnsupportedOperationException - - override def register(watcher: WatchService, events: WatchEvent.Kind[_]*): WatchKey = throw new UnsupportedOperationException - - override def register(watcher: WatchService, events: Array[WatchEvent.Kind[_]], modifiers: WatchEvent.Modifier*): WatchKey = throw new UnsupportedOperationException - - override def iterator(): util.Iterator[Path] = { - if (path.isEmpty() || path.isRoot) { - return util.Collections.emptyIterator() - } - - new PathIterator() - } - - override def compareTo(other: Path): Int = { - if (other.isInstanceOf[OssStoragePath]) { - return -1 - } - - val that = other.asInstanceOf[OssStoragePath] - val res: Int = bucket.compareTo(that.bucket) - if (res != 0) { - return res - } - - path.compareTo(that.path) - } - - override def seemsLikeDirectory = path.seemsLikeDirectory() - - override def equals(obj: scala.Any): Boolean = { - (this eq obj.asInstanceOf[AnyRef]) || obj.isInstanceOf[OssStoragePath] && obj.asInstanceOf[OssStoragePath].bucket.equals(bucket) && obj.asInstanceOf[OssStoragePath].path.equals(path) - } - - override def hashCode(): Int = { - Objects.hash(bucket, toAbsolutePath.toString) - } - - override def toString: String = path.toString - - override def toUri: URI = new URI("oss", bucket, toAbsolutePath.toString, None.orNull) - - private[this] def newPath(unixPath: UnixPath): OssStoragePath = { - if (unixPath == path) { - this - } else { - OssStoragePathImpl(filesystem, unixPath) - } - } - - - class PathIterator extends UnmodifiableIterator[Path] { - val delegate = path.split() - - override def next(): OssStoragePath = newPath(UnixPath.getPath(delegate.next())) - - override def hasNext: Boolean = delegate.hasNext - } -} - -final case class NullOssStoragePath(filesystem: OssStorageFileSystem) extends OssStoragePath diff --git a/filesystems/oss/src/main/scala/cromwell/filesystems/oss/nio/OssStorageRetry.scala b/filesystems/oss/src/main/scala/cromwell/filesystems/oss/nio/OssStorageRetry.scala deleted file mode 100644 index 105c5458b5d..00000000000 --- a/filesystems/oss/src/main/scala/cromwell/filesystems/oss/nio/OssStorageRetry.scala +++ /dev/null @@ -1,71 +0,0 @@ -package cromwell.filesystems.oss.nio - -import com.aliyun.oss.{ClientException, OSSException} -import common.util.Backoff -import cromwell.core.retry.SimpleExponentialBackoff - -import scala.concurrent.duration._ -import scala.util.{Failure, Success, Try} - -object OssStorageRetry { - def fromTry[A](f: () => Try[A], - maxRetries: Option[Int] = Some(DEFAULT_MAX_RETRIES), - backoff: Backoff = SimpleExponentialBackoff(1.second, 60.seconds, 1.1), - isTransient: Throwable => Boolean = transient, - isFatal: Throwable => Boolean = fatal - ): A = { - val delay = backoff.backoffMillis - - f() match { - case Success(ret) => ret - case Failure(e) if isFatal(e) => throw e - case Failure(e) if !isFatal(e) => - val retriesLeft = if (isTransient(e)) maxRetries else maxRetries map { _ - 1 } - if (retriesLeft.forall(_ > 0)) { - Thread.sleep(delay) - fromTry(f, retriesLeft, backoff, isTransient, isFatal) - } else { - throw e - } - case oh => throw new RuntimeException(s"Programmer Error! Unexpected case match: $oh") - } - } - - def from[A](f: () => A, - maxRetries: Option[Int] = Some(DEFAULT_MAX_RETRIES), - backoff: Backoff = SimpleExponentialBackoff(1.second, 60.seconds, 1.1), - isTransient: Throwable => Boolean = transient, - isFatal: Throwable => Boolean = fatal - ): A = { - fromTry[A]( - () => Try{ - f() - }, - maxRetries, - backoff, - isTransient, - isFatal - ) - } - - def transient(t: Throwable): Boolean = t match { - case oss: OSSException if TRANSIENT_ERROR_CODES contains oss.getErrorCode => true - case _ => false - } - - def fatal(t: Throwable): Boolean = t match { - case oss: OSSException if oss.getErrorCode.startsWith("Invalid") || oss.getErrorCode.startsWith("NoSuch") => true - case _: OSSException | _: ClientException => false - case _ => true - } - - val DEFAULT_MAX_RETRIES = 10 - - val TRANSIENT_ERROR_CODES = Array("InternalError", - "RequestTimeout", - "RecvFlowLimitExceeded", - "SendFlowLimitExceeded", - "UploadTrafficRateLimitExceeded", - "DownloadTrafficRateLimitExceeded" - ) -} diff --git a/filesystems/oss/src/main/scala/cromwell/filesystems/oss/nio/TTLOssStorageConfiguration.scala b/filesystems/oss/src/main/scala/cromwell/filesystems/oss/nio/TTLOssStorageConfiguration.scala deleted file mode 100644 index e87f59c25bb..00000000000 --- a/filesystems/oss/src/main/scala/cromwell/filesystems/oss/nio/TTLOssStorageConfiguration.scala +++ /dev/null @@ -1,47 +0,0 @@ -package cromwell.filesystems.oss.nio - -import com.aliyun.oss.OSSClient -import com.typesafe.config.Config -import net.ceedubs.ficus.Ficus._ - -object TTLOssStorageConfiguration { - def currentTimestamp = System.currentTimeMillis / 1000 - - def defaultRefreshInterval: Long = 30 * 60 - - val RefreshInterval = "refresh-interval" - - def apply(config: Config): TTLOssStorageConfiguration = new TTLOssStorageConfiguration(config) -} - -/* Unsupported. For test purposes only. */ -class TTLOssStorageConfiguration(config: Config) extends OssStorageConfiguration { - - override def endpoint: String = config.as[Option[String]](authPath(OssStorageConfiguration.ENDPOINT_KEY)) getOrElse("") - - override def accessId: String = config.as[Option[String]](authPath(OssStorageConfiguration.ACCESS_ID_KEY)) getOrElse("") - - override def accessKey: String = config.as[Option[String]](authPath(OssStorageConfiguration.ACCESS_KEY_KEY)) getOrElse("") - - override def securityToken: Option[String] = config.as[Option[String]](authPath(OssStorageConfiguration.SECURITY_TOKEN_KEY)) - - def refreshInterval: Long = config.as[Option[Long]](TTLOssStorageConfiguration.RefreshInterval).getOrElse(TTLOssStorageConfiguration.defaultRefreshInterval) - - private def authPath(key: String): String = s"auth.$key" - private var lastClientUpdateTime: Long = 0 - - private var oldClient: Option[OSSClient] = None - - override def newOssClient(): OSSClient = { - val current = TTLOssStorageConfiguration.currentTimestamp - synchronized { - if (lastClientUpdateTime == 0 || current - lastClientUpdateTime > refreshInterval) { - oldClient = Option(super.newOssClient()) - lastClientUpdateTime = current - } - } - - oldClient getOrElse(throw new IllegalArgumentException("Non oss client")) - } -} - diff --git a/filesystems/oss/src/main/scala/cromwell/filesystems/oss/nio/UnixPath.scala b/filesystems/oss/src/main/scala/cromwell/filesystems/oss/nio/UnixPath.scala deleted file mode 100644 index fc640680a26..00000000000 --- a/filesystems/oss/src/main/scala/cromwell/filesystems/oss/nio/UnixPath.scala +++ /dev/null @@ -1,347 +0,0 @@ -package cromwell.filesystems.oss.nio - - -import scala.collection.mutable -import scala.math.Ordering.Implicits._ -import scala.util.control.Breaks._ -import scala.util.{Failure, Success, Try} - - -object UnixPath { - val DOT: Char = '.' - val SEPARATOR: Char = '/' - val ROOT: String = SEPARATOR.toString - val CURRENT_DIR: String = DOT.toString - val PARENT_DIR: String = DOT.toString + DOT.toString - val EMPTY_PATH: UnixPath = new UnixPath("") - val ROOT_PATH: UnixPath = new UnixPath("/") - - private def isRoot(path: String) = path.length() == 1 && path.charAt(0) == SEPARATOR - private def isAbsolute(path: String) = !path.isEmpty() && path.charAt(0) == SEPARATOR - private def hasTrailingSeparator(path: String) = !path.isEmpty() && path.charAt(path.length - 1) == SEPARATOR - - def getPath(path: String): UnixPath = { - if (path.isEmpty()) { - return EMPTY_PATH - } else if (isRoot(path)) { - return ROOT_PATH - } else { - UnixPath(path) - } - } - - def getPath(first: String, more: String*): UnixPath = { - if (more.length == 0) { - return new UnixPath(first) - } - - val builder = new StringBuilder(first) - for ((part, index) <- more.view.zipWithIndex) { - if (part.isEmpty()) { - // do nothing - } else if (isAbsolute(part)) { - if (index == more.length - 1) { - return new UnixPath(part) - } else { - builder.replace(0, builder.length, part) - } - } else if (hasTrailingSeparator(part)) { - builder.append(part) - } else { - builder.append(SEPARATOR) - builder.append(part) - } - } - - UnixPath(builder.toString) - } - -} - -final case class UnixPath(path: String) extends CharSequence -{ - lazy val parts = initParts() - - def isRoot = UnixPath.isRoot(path) - - def isAbsolute = UnixPath.isAbsolute(path) - - def isEmpty() = path.isEmpty() - - def hasTrailingSeparator = UnixPath.hasTrailingSeparator(path) - - def seemsLikeDirectory() = path.isEmpty() || hasTrailingSeparator || path.endsWith(".") && (length == 1 || path.charAt(length - 2) == UnixPath.SEPARATOR) || path.endsWith("..") && (length == 2 || path.charAt(length - 3) == UnixPath.SEPARATOR) - - def getFileName: Option[UnixPath] = { - if (path.isEmpty() || isRoot) { - None - } else { - if (parts.size == 1 && parts.last == path) { - Some(this) - } else { - Some(UnixPath(parts.last)) - } - } - } - - def getParent: Option[UnixPath] = { - if (path.isEmpty() || isRoot) { - return None - } - - val index = if (hasTrailingSeparator) path.lastIndexOf(UnixPath.SEPARATOR.toInt, path.length -2) else path.lastIndexOf(UnixPath.SEPARATOR.toInt) - index match { - case -1 => if (isAbsolute) Some(UnixPath.ROOT_PATH) else None - case pos => Some(UnixPath(path.substring(0, pos + 1))) - } - } - - def getRoot: Option[UnixPath] = if (isAbsolute) Some(UnixPath.ROOT_PATH) else None - - def subPath(beginIndex: Int, endIndex: Int): Try[UnixPath] = { - if (path.isEmpty() && beginIndex == 0 && endIndex == 1) { - return Success(this) - } - - if (beginIndex < 0 || endIndex < beginIndex) { - return Failure(new IllegalArgumentException(s"begin index or end index is invalid")) - } - - Try(UnixPath(parts.slice(beginIndex, endIndex).mkString(UnixPath.SEPARATOR.toString))) - } - - def getNameCount: Int = { - if (path.isEmpty()) { - 1 - } else if (isRoot) { - 0 - } else { - parts.size - } - } - - def getName(index: Int): Try[UnixPath] = { - if (path.isEmpty()){ - return Failure(new IllegalArgumentException("can not get name from a empty path")) - } - - if (index > length - 1) { - return Failure(new IndexOutOfBoundsException(s"index ${index} out of name count ${length -1}")) - } - - return Success(UnixPath(parts(2))) - } - - def resolve(other: UnixPath): UnixPath = { - if (other.path.isEmpty()){ - this - } else if (other.isAbsolute) { - other - } else if (hasTrailingSeparator) { - new UnixPath(path + other.path) - } else { - new UnixPath(path + UnixPath.SEPARATOR.toString + other.path) - } - } - - def resolveSibling(other: UnixPath): UnixPath = { - getParent match { - case Some(parent: UnixPath) => - parent.resolve(other) - case None => other - } - } - - def relativize(other: UnixPath): UnixPath = { - if (path.isEmpty()){ - return other - } - - val left = split().buffered - val right = other.split().buffered - breakable( - while (left.hasNext && right.hasNext){ - if (!(left.head == right.head)){ - break() - } - - left.next() - right.next() - } - ) - - val result = new StringBuilder(path.length + other.path.length) - while (left.hasNext){ - result.append(UnixPath.PARENT_DIR) - result.append(UnixPath.SEPARATOR) - left.next() - } - - while (right.hasNext) { - result.append(right.next()) - result.append(UnixPath.SEPARATOR) - } - - if (result.length > 0 && !other.hasTrailingSeparator) { - result.deleteCharAt(result.length - 1) - } - - return new UnixPath(result.toString) - } - - def normalize(): UnixPath = { - val parts = mutable.ArrayBuffer[String]() - var mutated = false - var resultLength = 0 - var mark = 0 - var index = 0 - val current = UnixPath.CURRENT_DIR + UnixPath.SEPARATOR.toString - val parent = UnixPath.PARENT_DIR + UnixPath.SEPARATOR - do { - index = path.indexOf(UnixPath.SEPARATOR.toInt, mark) - val part = path.substring(mark, if (index == -1) path.length else index + 1) - part match { - case UnixPath.CURRENT_DIR | `current` => mutated = true - case UnixPath.PARENT_DIR | `parent` => - mutated = true - if (!parts.isEmpty){ - resultLength -= parts.remove(parts.length -1).length - } - case _ => - if (index != mark || index == 0) { - parts.append(part) - resultLength += part.length - } else { - mutated = true - } - } - mark = index + 1 - } while (index != -1) - - if (!mutated){ - return this - } - - val result = new StringBuilder(resultLength) - - parts.foreach {part => result.append(part)} - - return new UnixPath(result.toString) - } - - def split(): Iterator[String] = parts.iterator - - def splitReverse(): Iterator[String] = parts.reverseIterator - - def removeBeginningSeparator(): UnixPath = { - if (isAbsolute) new UnixPath(path.substring(1)) else this - } - - def addTrailingSeparator(): UnixPath = { - if (hasTrailingSeparator) this else new UnixPath(path + UnixPath.SEPARATOR) - } - - def removeTrailingSeparator()(): UnixPath = { - if (!isRoot && hasTrailingSeparator) { - new UnixPath(path.substring(0, length -1)) - } else { - this - } - } - - def startsWith(other: UnixPath): Boolean = { - val me = removeTrailingSeparator()() - val oth = other.removeTrailingSeparator()() - - if (oth.path.length > me.path.length) { - return false - } else if (me.isAbsolute != oth.isAbsolute) { - return false - } else if (!me.path.isEmpty() && oth.path.isEmpty()) { - return false - } - - return startsWith(split(), other.split()) - } - - def startsWith(left: Iterator[String], right: Iterator[String]): Boolean = { - while (right.hasNext){ - if (!left.hasNext || right.next() != left.next()) { - return false - } - } - return true - } - - def endsWith(other: UnixPath): Boolean = { - val me = removeTrailingSeparator()() - val oth = other.removeTrailingSeparator()() - - if (oth.path.length > me.path.length) { - return false - } else if (!me.path.isEmpty() && oth.path.isEmpty()) { - return false - } else if (oth.isAbsolute) { - return me.isAbsolute && me.path == other.path - } - - startsWith(me.splitReverse(), other.splitReverse()) - } - - def toAbsolutePath(currentWorkingDirectory: UnixPath): Try[UnixPath] = { - if (!currentWorkingDirectory.isAbsolute) { - return Failure(new IllegalArgumentException(s"Not an absolute path ${currentWorkingDirectory}")) - } - - if (isAbsolute) Success(this) else Success(currentWorkingDirectory.resolve(this)) - } - - def toAbsolutePath(): UnixPath = { - if (isAbsolute) this else UnixPath.ROOT_PATH.resolve(this) - } - - def compareTo(other: UnixPath): Int = { - val me = parts.toList - val that = other.parts.toList - - if (me == that) { - return 0 - } else if (me < that) { - return -1 - } else { - return 1 - } - } - - override def equals(obj: scala.Any): Boolean = { - (this eq obj.asInstanceOf[AnyRef]) || { obj.isInstanceOf[UnixPath] && obj.asInstanceOf[UnixPath].path.equals(path)} - } - - override def length(): Int = { - path.length - } - - override def charAt(index: Int): Char = { - path.charAt(index) - } - - override def subSequence(start: Int, end: Int): CharSequence = { - path.subSequence(start, end) - } - - override def toString: String = { - path - } - - def initParts(): Array[String] = { - if (path.isEmpty()) { - Array.empty[String] - } else { - if (path.charAt(0) == UnixPath.SEPARATOR){ - path.substring(1).split(UnixPath.SEPARATOR) - } else { - path.split(UnixPath.SEPARATOR) - } - } - } -} diff --git a/filesystems/oss/src/test/scala/cromwell/filesystems/oss/OssPathBuilderSpec.scala b/filesystems/oss/src/test/scala/cromwell/filesystems/oss/OssPathBuilderSpec.scala deleted file mode 100644 index 10e664504f1..00000000000 --- a/filesystems/oss/src/test/scala/cromwell/filesystems/oss/OssPathBuilderSpec.scala +++ /dev/null @@ -1,82 +0,0 @@ -package cromwell.filesystems.oss - -import com.typesafe.config.ConfigFactory -import cromwell.core.TestKitSuite -import cromwell.filesystems.oss.nio.OssNioUtilSpec -import org.scalatest.BeforeAndAfter -import org.scalatest.TryValues._ -import org.scalatest.flatspec.AnyFlatSpecLike -import org.scalatest.matchers.should.Matchers - -object OssPathBuilderSpec { - - val BcsBackendConfigWithRefreshString = - s""" - | refresh-interval = 1800 - | auth { - | endpoint = "oss-cn-shanghai.aliyuncs.com" - | access-id = "test-access-id" - | access-key = "test-access-key" - | security-token = "test-security-token" - | } - | caching { - | duplication-strategy = "reference" - | } - """.stripMargin - - val BcsBackendConfigWithRefresh = ConfigFactory.parseString(BcsBackendConfigWithRefreshString) - - val BcsBackendConfigWithoutRefreshString = - s""" - | auth { - | endpoint = "oss-cn-shanghai.aliyuncs.com" - | access-id = "test-access-id" - | access-key = "test-access-key" - | } - | caching { - | duplication-strategy = "reference" - | } - """.stripMargin - - val BcsBackendConfigWithoutRefresh = ConfigFactory.parseString(BcsBackendConfigWithoutRefreshString) -} - -class OssPathBuilderSpec extends TestKitSuite with AnyFlatSpecLike with Matchers with OssNioUtilSpec with BeforeAndAfter { - - behavior of "OssPathBuilerSpec" - val testPathBuiler = OssPathBuilder(mockOssConf) - - it should "throw when no bucket in URI" in { - testPathBuiler.build("oss:").failed.get shouldBe an[IllegalArgumentException] - testPathBuiler.build("oss://").failed.get shouldBe an[IllegalArgumentException] - } - - it should "throw when path has an invalid schema" in { - testPathBuiler.build(s"gcs://$bucket$fileName").failed.get shouldBe an[IllegalArgumentException] - } - - it should "has an empty key when no path specified" in { - testPathBuiler.build(s"oss://$bucket").success.value.bucket shouldBe bucket - testPathBuiler.build(s"oss://$bucket").success.value.key shouldBe empty - } - - it should "start with separator when path specified" in { - val path = testPathBuiler.build(s"oss://$bucket$fileName").success.value - path.bucket shouldBe bucket - path.nioPath.toString shouldBe fileName - path.key shouldBe fileName.stripPrefix("/") - path.pathAsString shouldBe s"oss://$bucket$fileName" - path.pathWithoutScheme shouldBe s"$bucket$fileName" - } - - it should "success from config" in { - val ossPathBuilder = OssPathBuilder.fromConfig(OssPathBuilderSpec.BcsBackendConfigWithRefresh, null) - ossPathBuilder.build(s"oss://$bucket").success.value.bucket shouldBe bucket - ossPathBuilder.build(s"oss://$bucket").success.value.key shouldBe empty - - val ossPathBuilderWithoutRefresh = OssPathBuilder.fromConfig(OssPathBuilderSpec.BcsBackendConfigWithoutRefresh, null) - ossPathBuilderWithoutRefresh.build(s"oss://$bucket").success.value.bucket shouldBe bucket - ossPathBuilderWithoutRefresh.build(s"oss://$bucket").success.value.key shouldBe empty - } - - } diff --git a/filesystems/oss/src/test/scala/cromwell/filesystems/oss/nio/OssAppendOutputStreamSpec.scala b/filesystems/oss/src/test/scala/cromwell/filesystems/oss/nio/OssAppendOutputStreamSpec.scala deleted file mode 100644 index ea72793dcdb..00000000000 --- a/filesystems/oss/src/test/scala/cromwell/filesystems/oss/nio/OssAppendOutputStreamSpec.scala +++ /dev/null @@ -1,42 +0,0 @@ -package cromwell.filesystems.oss.nio - -import cromwell.core.TestKitSuite - -class OssAppendOutputStreamSpec extends TestKitSuite with OssNioUtilSpec { - - behavior of s"OssAppendOutputStream" - - "write batch" should "work" taggedAs NeedAK in { - val path = OssStoragePath.getPath(ossFileSystem, "/test-oss-append") - val stream = OssAppendOutputStream(ossClient, path, true) - - val content: String = "haha" - stream.write(content.getBytes) - - contentAsString(path) shouldEqual content - stream.position shouldEqual content.length - } - - "write single" should "work" taggedAs NeedAK in { - val c: Char = 'c' - val path = OssStoragePath.getPath(ossFileSystem, "/test-oss-append") - val stream = OssAppendOutputStream(ossClient, path, true) - - stream.write(c.toInt) - - contentAsString(path) shouldEqual c.toString - stream.position shouldEqual 1 - } - - "write range" should "work" taggedAs NeedAK in { - val path = OssStoragePath.getPath(ossFileSystem, "/test-oss-append") - val stream = OssAppendOutputStream(ossClient, path, true) - - val content: String = "haha" - stream.write(content.getBytes, 1, 1) - - contentAsString(path) shouldEqual 'a'.toString - stream.position shouldEqual 1 - } - -} diff --git a/filesystems/oss/src/test/scala/cromwell/filesystems/oss/nio/OssFileReadChannelSpec.scala b/filesystems/oss/src/test/scala/cromwell/filesystems/oss/nio/OssFileReadChannelSpec.scala deleted file mode 100644 index 1c57460ef51..00000000000 --- a/filesystems/oss/src/test/scala/cromwell/filesystems/oss/nio/OssFileReadChannelSpec.scala +++ /dev/null @@ -1,80 +0,0 @@ -package cromwell.filesystems.oss.nio - -import java.nio.charset.Charset - -import cromwell.core.TestKitSuite -import org.scalatest.{BeforeAndAfter} - -import scala.util.Try -import scala.util.control.Breaks - -object OssFileReadChannelSpec { - val FILENAME = "/test-oss-read-file" - val CONTENT = "Hello World!" - - implicit class Crossable[X](xs: Iterable[X]) { - def cross[Y](ys: Iterable[Y]) = for { x <- xs; y <- ys } yield (x, y) - } -} - -class OssFileReadChannelSpec extends TestKitSuite with OssNioUtilSpec with BeforeAndAfter { - behavior of s"OssFileReadChannelSpec" - - import OssFileReadChannelSpec._ - - - def getPath = OssStoragePath.getPath(ossFileSystem, FILENAME) - - before { - Try(OssAppendOutputStream(ossClient, getPath, true)) foreach {_.write(CONTENT.getBytes("UTF-8"))} - } - - after { - Try(deleteObject(getPath)) - } - - it should "has the right size" taggedAs NeedAK in { - val channel = OssFileReadChannel(ossClient, 0L, getPath) - channel.size shouldEqual(CONTENT.length) - } - - it should "has the right content" taggedAs NeedAK in { - List.range(1, CONTENT.length + 1) foreach { bufferSize =>verifySameContent(bufferSize)} - for (bufferSize <- List.range(1, CONTENT.length + 1); position <- List.range(0, CONTENT.length)) { - verifySameContent(bufferSize, position.toLong) - } - } - - it should "has the right position after seeking" taggedAs NeedAK in { - val channel = OssFileReadChannel(ossClient, 0L, getPath) - channel.size shouldEqual(CONTENT.length) - - channel.position(1) - - channel.position shouldEqual(1) - } - - def verifySameContent(bufferSize: Int, position: Long = 0) = { - val channel = OssFileReadChannel(ossClient, position, getPath) - - import java.nio.ByteBuffer - val buf = ByteBuffer.allocate(bufferSize) - - val loop = new Breaks - val builder = new StringBuilder - - var bytesRead = channel.read(buf) - loop.breakable { - while (bytesRead != -1) { - buf.flip() - val charset = Charset.forName("UTF-8"); - - builder.append(charset.decode(buf).toString()) - buf.clear - bytesRead = channel.read(buf) - } - } - - builder.toString shouldEqual CONTENT.substring(position.toInt) - } -} diff --git a/filesystems/oss/src/test/scala/cromwell/filesystems/oss/nio/OssNioUtilSpec.scala b/filesystems/oss/src/test/scala/cromwell/filesystems/oss/nio/OssNioUtilSpec.scala deleted file mode 100644 index b4ebb84b7fa..00000000000 --- a/filesystems/oss/src/test/scala/cromwell/filesystems/oss/nio/OssNioUtilSpec.scala +++ /dev/null @@ -1,110 +0,0 @@ -package cromwell.filesystems.oss.nio - -import java.io.ByteArrayInputStream - -import com.aliyun.oss.OSSClient -import common.assertion.CromwellTimeoutSpec -import org.scalatest._ -import org.scalatest.flatspec.AnyFlatSpecLike -import org.scalatest.matchers.should.Matchers - -import scala.util.control.Breaks -import scala.util.{Failure, Success, Try} - -object NeedAK extends Tag("this test need oss storage access id and key") - -object OssNioUtilSpec { - val DEFAULT_BUCKET = "bcs-bucket" - - val DEFAULT_FILE_NAME = "/bcs-dir/bcs-file" - - val DEFAULT_CONTENT = "Hello World!" - - val ossInfo: Map[String, String] = Map( - "endpoint" -> "", - "access-id" -> "", - "access-key" -> "", - "bucket" -> DEFAULT_BUCKET - ) -} - -trait OssNioUtilSpec extends AnyFlatSpecLike with CromwellTimeoutSpec with Matchers { - - override def withFixture(test: NoArgTest): Outcome = { - if (test.tags.contains(NeedAK.name)) { - Try(ossConf) match { - case Success(_) => super.withFixture(test) - case Failure(_) => cancel(NeedAK.name) - } - } else { - super.withFixture(test) - } - } - - import OssNioUtilSpec._ - - lazy val bucket: String = { - val bucket = ossInfo.getOrElse("bucket", "mock-bucket") - if (bucket.isEmpty) { - throw new IllegalArgumentException("test bucket can not be empty") - } - - bucket - } - - lazy val ossConf: OssStorageConfiguration = Try{ - OssStorageConfiguration.parseMap(ossInfo) - } getOrElse(throw new IllegalArgumentException("you should supply oss info before testing oss related operation")) - - lazy val mockOssConf: OssStorageConfiguration = - DefaultOssStorageConfiguration("mock-endpoint", "mock-id", "mock-key", None) - - lazy val ossProvider: OssStorageFileSystemProvider = OssStorageFileSystemProvider(ossConf) - lazy val mockProvider: OssStorageFileSystemProvider = OssStorageFileSystemProvider(mockOssConf) - lazy val ossFileSystem: OssStorageFileSystem = OssStorageFileSystem(bucket, ossConf) - lazy val mockFileSystem: OssStorageFileSystem = OssStorageFileSystem(bucket, mockOssConf) - val fileName: String = DEFAULT_FILE_NAME - val fileContent: String = DEFAULT_CONTENT - - lazy val ossClient: OSSClient = mockOssConf.newOssClient() - - def contentAsString(path: OssStoragePath): String = { - val ossObject = ossClient.getObject(path.bucket, path.key) - - val in = OssStorageRetry.from( - () => ossObject.getObjectContent - ) - - val maxLen = 1024 - val loop = new Breaks - val result = new StringBuilder - loop.breakable { - while(true) { - val b = new Array[Byte](maxLen) - val got = OssStorageRetry.from( - () => in.read(b, 0, maxLen) - ) - if (got <= 0) { - loop.break() - } - result.append(new String(b, 0, got)) - } - } - - result.toString() - } - - def deleteObject(path: OssStoragePath): Unit = { - OssStorageRetry.from( - () => ossClient.deleteObject(path.bucket, path.key) - ) - () - } - - def writeObject(path: OssStoragePath): Unit = { - OssStorageRetry.from{ - () => ossClient.putObject(path.bucket, path.key, new ByteArrayInputStream(fileContent.getBytes())) - } - () - } -} diff --git a/filesystems/oss/src/test/scala/cromwell/filesystems/oss/nio/OssStorageFileAttributesViewSpec.scala b/filesystems/oss/src/test/scala/cromwell/filesystems/oss/nio/OssStorageFileAttributesViewSpec.scala deleted file mode 100644 index 7825a369076..00000000000 --- a/filesystems/oss/src/test/scala/cromwell/filesystems/oss/nio/OssStorageFileAttributesViewSpec.scala +++ /dev/null @@ -1,39 +0,0 @@ -package cromwell.filesystems.oss.nio - -import com.aliyun.oss.OSSClient -import com.aliyun.oss.model.GenericRequest -import org.mockito.Mockito._ -import org.mockito.ArgumentMatchers._ - - -class OssStorageFileAttributesViewSpec extends OssNioUtilSpec { - behavior of "OssStorageFileAttributesView" - - import OssStorageObjectAttributesSpec._ - - private def getObject = { - OssStoragePath.getPath(mockFileSystem, fileName) - } - - private def getDir = { - OssStoragePath.getPath(mockFileSystem, "/bcs-dir/") - } - - it should "return an object attr" in { - val ossClient = mock[OSSClient] - when(ossClient.doesObjectExist(any[GenericRequest]())).thenReturn(true) - val meta = getObjectMeta - when(ossClient.getObjectMetadata(anyString(), anyString())).thenReturn(meta) - - val view = OssStorageFileAttributesView(ossClient, getObject) - view.readAttributes shouldBe an [OssStorageObjectAttributes] - } - - it should "return an dir attr" in { - val ossClient = mock[OSSClient] - when(ossClient.doesObjectExist(any[GenericRequest]())).thenReturn(true) - val view = OssStorageFileAttributesView(ossClient, getDir) - view.readAttributes shouldBe a [OssStorageDirectoryAttributes] - } - -} diff --git a/filesystems/oss/src/test/scala/cromwell/filesystems/oss/nio/OssStorageFileSystemProviderSpec.scala b/filesystems/oss/src/test/scala/cromwell/filesystems/oss/nio/OssStorageFileSystemProviderSpec.scala deleted file mode 100644 index 59638e36878..00000000000 --- a/filesystems/oss/src/test/scala/cromwell/filesystems/oss/nio/OssStorageFileSystemProviderSpec.scala +++ /dev/null @@ -1,194 +0,0 @@ -package cromwell.filesystems.oss.nio - -import java.net.URI -import java.nio.charset.Charset -import java.nio.file.{DirectoryStream, NoSuchFileException, Path, StandardOpenOption} - -import cromwell.core.TestKitSuite -import org.scalatest.BeforeAndAfter - -import scala.jdk.CollectionConverters._ -import scala.collection.mutable.ArrayBuffer -import scala.util.control.Breaks - -class OssStorageFileSystemProviderSpec extends TestKitSuite with OssNioUtilSpec with BeforeAndAfter { - behavior of "OssStorageFileSystemProviderSpec" - - it should "has right schema" in { - mockProvider.getScheme shouldEqual OssStorageFileSystem.URI_SCHEMA - } - - it should "work when creating new file system" in { - val fs = mockProvider.newFileSystem(URI.create(s"oss://$bucket"), mockOssConf.toMap.asJava) - fs.bucket shouldEqual bucket - - an [IllegalArgumentException] should be thrownBy ossProvider.newFileSystem(URI.create(s"oss://"), mockOssConf.toMap.asJava) - an [IllegalArgumentException] should be thrownBy ossProvider.newFileSystem(URI.create(s"oss://$bucket:8812"), mockOssConf.toMap.asJava) - - val fs1 = mockProvider.getFileSystem(URI.create(s"oss://$bucket")) - fs1.bucket shouldEqual bucket - } - - it should "work when getting a new oss path" in { - val path = mockProvider.getPath(URI.create(s"oss://$bucket$fileName")) - path.bucket shouldEqual bucket - path.key shouldEqual fileName.stripPrefix(OssStorageFileSystem.SEPARATOR) - } - - it should "work when creating an output stream" taggedAs NeedAK in { - val path = ossProvider.getPath(URI.create(s"oss://$bucket$fileName")) - - val outS = ossProvider.newOutputStream(path) - outS.write(fileContent.getBytes) - - contentAsString(path) shouldEqual fileContent - outS.asInstanceOf[OssAppendOutputStream].position shouldEqual fileContent.length - } - - it should "work when creating an byte channel" taggedAs NeedAK in { - val path = ossProvider.getPath(URI.create(s"oss://$bucket$fileName")) - - val outS = ossProvider.newOutputStream(path) - outS.write(fileContent.getBytes) - - val inC = ossProvider.newByteChannel(path, Set(StandardOpenOption.READ).asJava) - - import java.nio.ByteBuffer - val buf = ByteBuffer.allocate(1) - - val loop = new Breaks - val builder = new StringBuilder - - var bytesRead = inC.read(buf) - loop.breakable { - while (bytesRead != -1) { - buf.flip() - val charset = Charset.forName("UTF-8") - - builder.append(charset.decode(buf).toString) - buf.clear - bytesRead = inC.read(buf) - } - } - - builder.toString shouldEqual fileContent - } - - it should "delete file if it exists" taggedAs NeedAK in { - val path = ossProvider.getPath(URI.create(s"oss://$bucket$fileName")) - - val outS = ossProvider.newOutputStream(path) - outS.write(fileContent.getBytes) - outS.close() - - ossProvider.deleteIfExists(path) shouldEqual true - ossProvider.deleteIfExists(path) shouldEqual false - an [NoSuchFileException] should be thrownBy ossProvider.delete(path) - } - - it should "work when copying an object" taggedAs NeedAK in { - val src = ossProvider.getPath(URI.create(s"oss://$bucket$fileName")) - val target = ossProvider.getPath(URI.create(s"oss://$bucket${fileName}1")) - ossProvider.deleteIfExists(src) - - writeObject(src) - - ossProvider.copy(src, target) - - ossProvider.deleteIfExists(target) shouldEqual true - } - - it should "work when moving an object" taggedAs NeedAK in { - val src = ossProvider.getPath(URI.create(s"oss://$bucket$fileName")) - val target = ossProvider.getPath(URI.create(s"oss://$bucket${fileName}1")) - ossProvider.deleteIfExists(src) - - writeObject(src) - - ossProvider.move(src, target) - - ossProvider.deleteIfExists(target) shouldEqual true - ossProvider.deleteIfExists(src) shouldEqual false - - } - - it should "work for some basic operations" taggedAs NeedAK in { - val path = ossProvider.getPath(URI.create(s"oss://$bucket$fileName")) - val path1 = ossProvider.getPath(URI.create(s"oss://$bucket$fileName")) - - ossProvider.isHidden(path) shouldEqual false - ossProvider.isSameFile(path, path1) - - an [UnsupportedOperationException] should be thrownBy ossProvider.getFileStore(path) - - an [NoSuchFileException] should be thrownBy ossProvider.checkAccess(path) - - val dir = ossProvider.getPath(URI.create(s"oss://$bucket${fileName}/")) - noException should be thrownBy ossProvider.checkAccess(dir) - } - - it should "work for attribute view" taggedAs NeedAK in { - val path = ossProvider.getPath(URI.create(s"oss://$bucket$fileName")) - ossProvider.deleteIfExists(path) - - writeObject(path) - val view = ossProvider.getFileAttributeView(path, classOf[OssStorageFileAttributesView]) - view shouldBe an [OssStorageFileAttributesView] - - val attr = view.readAttributes() - attr shouldBe an [OssStorageObjectAttributes] - - val dir = ossProvider.getPath(URI.create(s"oss://$bucket${fileName}/")) - val dirView = ossProvider.getFileAttributeView(dir, classOf[OssStorageFileAttributesView]) - dirView shouldBe an [OssStorageFileAttributesView] - - val dirAttr = dirView.readAttributes() - dirAttr shouldBe an [OssStorageDirectoryAttributes] - } - - it should "work for reading attrs" taggedAs NeedAK in { - val path = ossProvider.getPath(URI.create(s"oss://$bucket$fileName")) - ossProvider.deleteIfExists(path) - - writeObject(path) - val attr = ossProvider.readAttributes(path, classOf[OssStorageFileAttributes]) - attr shouldBe an [OssStorageObjectAttributes] - - ossProvider.deleteIfExists(path) - a [NoSuchFileException] should be thrownBy ossProvider.readAttributes(path, classOf[OssStorageFileAttributes]) - - val dir = ossProvider.getPath(URI.create(s"oss://$bucket${fileName}/")) - val dirAttr = ossProvider.readAttributes(dir, classOf[OssStorageFileAttributes]) - dirAttr shouldBe an [OssStorageDirectoryAttributes] - } - - - it should "work for reading dirs" taggedAs NeedAK in { - val count = 10 - val testDir = "/test-read-dir" - val filePrefix = "test-file" - val expectedFileNames = ArrayBuffer.empty[String] - val dir = ossProvider.getPath(URI.create(s"oss://$bucket$testDir/")) - for (i <- 0 to count) { - val fileName = filePrefix + i.toString - expectedFileNames.append(fileName) - - val path = dir.resolve(fileName) - - ossProvider.deleteIfExists(path) - writeObject(path) - } - - val dirStream = ossProvider.newDirectoryStream(dir, new DirectoryStream.Filter[Path] { - override def accept(entry: Path): Boolean = { - true - } - }) - - val files = ArrayBuffer.empty[String] - dirStream.iterator.asScala foreach(file => files.append(file.toString)) - - files should contain allElementsOf(expectedFileNames) - } - -} diff --git a/filesystems/oss/src/test/scala/cromwell/filesystems/oss/nio/OssStorageFileSystemSpec.scala b/filesystems/oss/src/test/scala/cromwell/filesystems/oss/nio/OssStorageFileSystemSpec.scala deleted file mode 100644 index ea05cfa8bf9..00000000000 --- a/filesystems/oss/src/test/scala/cromwell/filesystems/oss/nio/OssStorageFileSystemSpec.scala +++ /dev/null @@ -1,30 +0,0 @@ -package cromwell.filesystems.oss.nio - -import cromwell.core.TestKitSuite - -class OssStorageFileSystemSpec extends TestKitSuite with OssNioUtilSpec { - behavior of s"OssStorageFileSystemSpec" - - it should "get right path" in { - val ossPath = mockFileSystem.getPath("/test-file-system") - ossPath.bucket shouldEqual(bucket) - ossPath.key shouldEqual("test-file-system") - } - - it should "has right view name" in { - val fs = mockFileSystem - - fs.supportedFileAttributeViews should contain (OssStorageFileSystem.BASIC_VIEW) - fs.supportedFileAttributeViews should contain (OssStorageFileSystem.OSS_VIEW) - } - - it should "do not support some method" in { - an [UnsupportedOperationException] should be thrownBy mockFileSystem.newWatchService - } - - it should "return some expected simple mocked result" in { - mockFileSystem.isOpen shouldBe true - mockFileSystem.isReadOnly shouldBe false - mockFileSystem.getSeparator shouldBe OssStorageFileSystem.SEPARATOR - } -} diff --git a/filesystems/oss/src/test/scala/cromwell/filesystems/oss/nio/OssStorageObjectAttributesSpec.scala b/filesystems/oss/src/test/scala/cromwell/filesystems/oss/nio/OssStorageObjectAttributesSpec.scala deleted file mode 100644 index 4b2ab275b4d..00000000000 --- a/filesystems/oss/src/test/scala/cromwell/filesystems/oss/nio/OssStorageObjectAttributesSpec.scala +++ /dev/null @@ -1,92 +0,0 @@ -package cromwell.filesystems.oss.nio - -import java.nio.file.attribute.FileTime -import com.aliyun.oss.model.ObjectMetadata -import common.mock.MockSugar -import cromwell.core.TestKitSuite -import org.mockito.Mockito._ - -import java.text.SimpleDateFormat -import java.util.{Date, Locale} - -object OssStorageObjectAttributesSpec extends MockSugar { - val DEFAULT_BUCKET = "bcs-bucket" - - val DEFAULT_FILE_NAME = "/bcs-dir/bcs-file" - - val DEFAULT_LENGTH: Long = 2102784 - - val DEFAULT_MODIFIED: Date = { - val target = "Thu Dec 21 15:19:27 CST 2017" - val df = new SimpleDateFormat("EEE MMM dd kk:mm:ss z yyyy", Locale.ENGLISH) - df.parse(target) - } - - val DEFAULT_ETAG = "F80066F040BDA4F991DB5F8AEC9905FB" - - val DEFAULT_CONTENT_DISPOSITION: String = null - - val DEFAULT_CACHE_CONTROL: String = null - - val DEFAULT_CONTENT_ENCODING: String = null - - val DEFAULT_CONTENT_TYPE = "application/x-msdownload" - - def getObjectMeta: ObjectMetadata = { - val meta = mock[ObjectMetadata] - - when(meta.getContentDisposition).thenReturn(DEFAULT_CONTENT_DISPOSITION) - when(meta.getContentEncoding).thenReturn(DEFAULT_CONTENT_ENCODING) - when(meta.getCacheControl).thenReturn(DEFAULT_CACHE_CONTROL) - when(meta.getLastModified).thenReturn(DEFAULT_MODIFIED) - when(meta.getETag).thenReturn(DEFAULT_ETAG) - when(meta.getContentType).thenReturn(DEFAULT_CONTENT_TYPE) - when(meta.getContentLength).thenReturn(DEFAULT_LENGTH) - when(meta.getExpirationTime).thenThrow(new NullPointerException()) - - meta - } -} - -class OssStorageObjectAttributesSpec extends TestKitSuite with OssNioUtilSpec { - - behavior of s"OssStorageObjectAttributes" - - import OssStorageObjectAttributesSpec._ - - def getObject: OssStoragePathImpl = { - OssStoragePath.getPath(mockFileSystem, fileName) - } - - def getDir: OssStoragePathImpl = { - OssStoragePath.getPath(mockFileSystem, "/bcs-dir/") - } - - "an oss object attr" should "be an right" in { - val attr = OssStorageObjectAttributes(getObjectMeta, getObject) - - attr.fileKey shouldEqual getObject.pathAsString - - attr.creationTime shouldEqual attr.lastModifiedTime() - attr.lastAccessTime shouldEqual FileTime.fromMillis(0) - attr.cacheControl() shouldBe empty - attr.contentDisposition shouldBe empty - attr.contentEncoding shouldBe empty - attr.etag shouldBe Option(DEFAULT_ETAG) - attr.size shouldBe DEFAULT_LENGTH - } - - "an oss directory attr" should "be an right" in { - val attr = OssStorageDirectoryAttributes(getDir) - - attr.fileKey shouldEqual getDir.pathAsString - - attr.creationTime shouldEqual attr.lastModifiedTime() - attr.lastAccessTime shouldEqual FileTime.fromMillis(0) - attr.cacheControl() shouldBe empty - attr.contentDisposition shouldBe empty - attr.contentEncoding shouldBe empty - attr.etag shouldBe empty - attr.size shouldBe 0 - } -} diff --git a/filesystems/oss/src/test/scala/cromwell/filesystems/oss/nio/OssStoragePathSpec.scala b/filesystems/oss/src/test/scala/cromwell/filesystems/oss/nio/OssStoragePathSpec.scala deleted file mode 100644 index 74008bdeb27..00000000000 --- a/filesystems/oss/src/test/scala/cromwell/filesystems/oss/nio/OssStoragePathSpec.scala +++ /dev/null @@ -1,65 +0,0 @@ -package cromwell.filesystems.oss.nio - -import cromwell.core.TestKitSuite -import scala.jdk.CollectionConverters._ - - -class OssStoragePathSpec extends TestKitSuite with OssNioUtilSpec { - behavior of s"OssStoragePath" - - - it should s"has the same bucket with file system" in { - - val path = OssStoragePath.getPath(mockFileSystem, fileName) - - path.bucket shouldBe bucket - - path.toAbsolutePath.toString shouldBe fileName - } - - it should s"has a separator-removed key" in { - val path = OssStoragePath.getPath(mockFileSystem, fileName) - - path.key shouldBe fileName.stripPrefix(UnixPath.SEPARATOR.toString) - } - - "a not absolute oss path" should s"has a NullOssStoragePath root path" in { - val path = OssStoragePath.getPath(mockFileSystem, fileName.stripPrefix(UnixPath.SEPARATOR.toString)) - - path.getRoot shouldBe a [NullOssStoragePath] - } - - "an absolute oss path" should s"has a OssStoragePathImpl root path" in { - val path = OssStoragePath.getPath(mockFileSystem, fileName) - - path.getRoot shouldBe an [OssStoragePathImpl] - } - - it should s"has right iterator" in { - val path = OssStoragePath.getPath(mockFileSystem, fileName) - - var subs = List.empty[String] - path.iterator().asScala foreach(p => subs = subs :+ p.toString) - - subs.head shouldBe "bcs-dir" - subs(1) shouldBe "bcs-file" - } - - it should s"has right relativize" in { - val path = OssStoragePath.getPath(mockFileSystem, fileName) - - val path1 = OssStoragePath.getPath(mockFileSystem, "/bcs-dir/bcs-file1") - - path.relativize(path1).toString shouldEqual "../bcs-file1" - - val path2 = OssStoragePath.getPath(mockFileSystem, "/bcs-dir1/bcs-file2") - path.relativize(path2).toString shouldEqual "../../bcs-dir1/bcs-file2" - } - - it should s"has right pathAsString" in { - val path = OssStoragePath.getPath(mockFileSystem, fileName) - - path.pathAsString shouldEqual s"oss://$bucket$fileName" - } - -} diff --git a/filesystems/oss/src/test/scala/cromwell/filesystems/oss/nio/OssStorageRetrySpec.scala b/filesystems/oss/src/test/scala/cromwell/filesystems/oss/nio/OssStorageRetrySpec.scala deleted file mode 100644 index ea2f03e0605..00000000000 --- a/filesystems/oss/src/test/scala/cromwell/filesystems/oss/nio/OssStorageRetrySpec.scala +++ /dev/null @@ -1,126 +0,0 @@ -package cromwell.filesystems.oss.nio - -import cromwell.core.TestKitSuite - -import scala.util.{Success, Failure, Try} - -case class FatalError(message: String = "", cause: Throwable = None.orNull) extends Exception(message, cause) -case class TransientError(message: String = "", cause: Throwable = None.orNull) extends Exception(message, cause) -case class RetryableError(message: String = "", cause: Throwable = None.orNull) extends Exception(message, cause) - -class RetryContext { - var retried = 0 - - def doSth(f: Int => Try[Int]): Unit = { - f(retried) match { - case Success(_) => retried += 1 - case Failure(e: RetryableError) => - retried += 1 - throw e - case Failure(e: TransientError) => - retried += 1 - throw e - case Failure(e) => throw e - } - } -} - -object OssStorageRetrySpec { - def isFatal(t: Throwable): Boolean = t match { - case _: FatalError => true - case _ => false - } - - def isTransient(t: Throwable): Boolean = t match { - case _: TransientError => true - case _ => false - } -} - -class OssStorageRetrySpec extends TestKitSuite with OssNioUtilSpec { - - import OssStorageRetrySpec._ - - behavior of s"OssStorageRetrySpec" - - it should "retry throw immediately when fatal error occours" in { - val f = (x: Int) => if (x == 0) Failure(new FatalError) else Success(x) - val ctx = new RetryContext() - an [FatalError] should be thrownBy OssStorageRetry.fromTry( - () => Try{ - ctx.doSth(f) - }, - isFatal = isFatal, - isTransient = isTransient - ) - - ctx.retried shouldEqual(0) - } - - it should "retry if non-fatal error occurs" in { - val needRetry = 5 - val f = (x: Int) => { - if (x < needRetry) { - Failure(new RetryableError()) - } else { - Failure(new FatalError()) - } - } - - val ctx = new RetryContext() - an [FatalError] should be thrownBy OssStorageRetry.fromTry( - () => Try{ - ctx.doSth(f) - }, - isFatal = isFatal, - isTransient = isTransient - ) - - ctx.retried shouldEqual(needRetry) - } - - it should "success after retry max retries " in { - - val needRetry = 5 - val f = (x: Int) => { - if (x < needRetry) { - Failure(new RetryableError()) - } else { - Success(x) - } - } - - val ctx = new RetryContext() - OssStorageRetry.fromTry( - () => Try{ - ctx.doSth(f) - }, - isFatal = isFatal, - isTransient = isTransient - ) - - ctx.retried shouldEqual(needRetry + 1) - } - - it should "retry at most max retry times" in { - val needRetry = OssStorageRetry.DEFAULT_MAX_RETRIES + 1 - val f = (x: Int) => { - if (x < needRetry) { - Failure(new RetryableError()) - } else { - Success(x) - } - } - - val ctx = new RetryContext() - an [RetryableError] should be thrownBy OssStorageRetry.fromTry( - () => Try{ - ctx.doSth(f) - }, - isFatal = isFatal, - isTransient = isTransient - ) - - ctx.retried shouldEqual(OssStorageRetry.DEFAULT_MAX_RETRIES) - } -} diff --git a/filesystems/oss/src/test/scala/cromwell/filesystems/oss/nio/TTLOssStorageConfigurationSpec.scala b/filesystems/oss/src/test/scala/cromwell/filesystems/oss/nio/TTLOssStorageConfigurationSpec.scala deleted file mode 100644 index 739760fea42..00000000000 --- a/filesystems/oss/src/test/scala/cromwell/filesystems/oss/nio/TTLOssStorageConfigurationSpec.scala +++ /dev/null @@ -1,50 +0,0 @@ -package cromwell.filesystems.oss.nio - -import java.net.URI -import com.typesafe.config.{Config, ConfigFactory} -import cromwell.core.TestKitSuite -import org.scalatest.BeforeAndAfter -import org.scalatest.flatspec.AnyFlatSpecLike -import org.scalatest.matchers.should.Matchers - -object TTLOssStorageConfigurationSpec { - - val BcsBackendConfigString: String = - s""" - | auth { - | endpoint = "oss-cn-shanghai.aliyuncs.com" - | access-id = "test-access-id" - | access-key = "test-access-key" - | security-token = "test-security-token" - | } - | caching { - | duplication-strategy = "reference" - | } - """.stripMargin - - val BcsBackendConfig: Config = ConfigFactory.parseString(BcsBackendConfigString) -} - -class TTLOssStorageConfigurationSpec extends TestKitSuite with AnyFlatSpecLike with Matchers with BeforeAndAfter { - val expectedEndpoint = "oss-cn-shanghai.aliyuncs.com" - val expectedAccessId = "test-access-id" - val expectedAccessKey = "test-access-key" - val expectedToken: Option[String] = Option("test-security-token") - val expectedFullEndpoint: URI = URI.create("http://oss-cn-shanghai.aliyuncs.com") - - behavior of "TTLOssStorageConfiguration" - - - it should "have correct OSS credential info" in { - - val ossConfig = TTLOssStorageConfiguration(TTLOssStorageConfigurationSpec.BcsBackendConfig) - - ossConfig.endpoint shouldEqual expectedEndpoint - ossConfig.accessId shouldEqual expectedAccessId - ossConfig.accessKey shouldEqual expectedAccessKey - ossConfig.securityToken shouldEqual expectedToken - - ossConfig.newOssClient().getEndpoint shouldEqual expectedFullEndpoint - - } -} diff --git a/filesystems/oss/src/test/scala/cromwell/filesystems/oss/nio/UnixPathSpec.scala b/filesystems/oss/src/test/scala/cromwell/filesystems/oss/nio/UnixPathSpec.scala deleted file mode 100644 index de4ccf3bb17..00000000000 --- a/filesystems/oss/src/test/scala/cromwell/filesystems/oss/nio/UnixPathSpec.scala +++ /dev/null @@ -1,335 +0,0 @@ -package cromwell.filesystems.oss.nio - -import cromwell.core.TestKitSuite -import org.scalatest.TryValues._ - -import scala.util.{Failure, Success, Try} - -case class ValidPath(pathAsString: String, - parent: Option[String], - fileName: Option[String], - nameCount: Int, - root: Option[String] = Some(UnixPath.ROOT_PATH.toString), - isRoot: Boolean = false, - isAbsolute: Boolean = true, - hasTrailingSeparator: Boolean = false, - likeDir: Boolean = false, - ) - -case class SubPath(pathAsString: String, - beginIndex: Int, - endIndex: Int, - subPath: Try[String], - description: String = "" - ) - -case class ResolvePath(pathAsString: String, - other: String, - resolved: String, - description: String = "" - ) - -case class ResolveSiblingPath(pathAsString: String, - other: String, - resolved: String, - description: String = "" - ) - -case class NormalizePath(pathAsString: String, - normalized: String - ) - -case class RelativizePath(pathAsString: String, - other: String, - relative: String - ) - -class UnixPathSpec extends TestKitSuite with OssNioUtilSpec { - - validPaths foreach { path => - it should behave like verifyValidPath(path) - } - - subPaths foreach { path => - it should behave like verifySubPath(path) - } - - resolvePaths foreach { path => - it should behave like verifyResolvePath(path) - } - - resolveSiblingPaths foreach { path => - it should behave like verifyResolveSiblingPath(path) - } - - normalizePaths foreach { path => - it should behave like verifyNormalizePath(path) - } - - relativizePaths foreach { path => - it should behave like verifyRelativePath(path) - } - - - def verifyValidPath(path: ValidPath) = { - behavior of s"Verify a valid UnixPath ${path.pathAsString}" - - val clue = s"pathAsString: ${path.pathAsString}" - - val unixPath = UnixPath(path.pathAsString) - - it should "match expected parent" in - withClue(clue) { - unixPath.getParent map {_.toString} shouldEqual path.parent - } - - it should "match expected name count" in - withClue(clue) { - unixPath.getNameCount shouldEqual path.nameCount - } - - it should "match expected file name" in - withClue(clue) { - unixPath.getFileName map {_.toString} shouldEqual path.fileName - } - - it should "match expected root" in - withClue(clue) { - unixPath.getRoot map {_.toString} shouldEqual path.root - } - - it should "match expected isRoot" in - withClue(clue) { - unixPath.isRoot shouldBe path.isRoot - } - - it should "match expected isAbsolute" in - withClue(clue) { - unixPath.isAbsolute shouldBe path.isAbsolute - } - - it should "match expected hasTrailingSeparator" in - withClue(clue) { - unixPath.hasTrailingSeparator shouldBe path.hasTrailingSeparator - } - - it should "match expected seemsLikeDirectory" in - withClue(clue) { - unixPath.seemsLikeDirectory() shouldBe path.likeDir - } - } - - def verifySubPath(path: SubPath) = { - val clue = s"path ${path.pathAsString} beginIndex ${path.beginIndex} endIndex ${path.endIndex}" - - behavior of s"Verify a unix path's sub path ${clue}" - - val unixPath = UnixPath(path.pathAsString) - - it should "match sub path" in - withClue(clue) { - val maybeRes = unixPath.subPath(path.beginIndex, path.endIndex) - path.subPath match { - case Success(sub: String) => - maybeRes.success.value.toString shouldEqual(sub) - case Failure(_) => - maybeRes.failure - } - } - } - - def verifyResolvePath(path: ResolvePath) = { - val clue = s"path ${path.pathAsString} other ${path.other}" - - behavior of s"Verify resolving a path on a UnixPath ${clue}" - - val me = UnixPath(path.pathAsString) - val other = UnixPath(path.other) - - it should "match resolved path" in - withClue(clue) { - me.resolve(other).toString shouldEqual(path.resolved) - } - } - - def verifyResolveSiblingPath(path: ResolveSiblingPath) = { - val clue = s"path ${path.pathAsString} other ${path.other}" - - behavior of s"Verify resolving sibling on a UnixPath ${clue}" - - val me = UnixPath(path.pathAsString) - val other = UnixPath(path.other) - - it should "match expected sibling path" in - withClue(clue) { - me.resolveSibling(other).toString shouldEqual(path.resolved) - } - } - - - def verifyNormalizePath(path: NormalizePath) = { - val clue = s"path ${path.pathAsString}" - - behavior of s"Verify normalize a UnixPath ${clue}" - - val me = UnixPath(path.pathAsString) - - it should "match expected normalized path" in - withClue(clue) { - me.normalize().toString shouldEqual(path.normalized) - } - } - - - def verifyRelativePath(path: RelativizePath) = { - val clue = s"path ${path.pathAsString} other ${path.other}" - - behavior of s"Verify resolving relativize on a UnixPath ${clue}" - - val me = UnixPath(path.pathAsString) - val other = UnixPath(path.other) - - it should "match resolved path" in - withClue(clue) { - me.relativize(other).toString shouldEqual(path.relative) - } - } - - private[this] def validPaths = Seq( - ValidPath( - pathAsString = "/bcs-dir/bcs-file", - parent = Some("/bcs-dir/"), - fileName = Some("bcs-file"), - nameCount = 2 - ), - ValidPath( - pathAsString = "/bcs-dir/bcs-dir1/", - parent = Some("/bcs-dir/"), - fileName = Some("bcs-dir1"), - nameCount = 2, - hasTrailingSeparator = true, - likeDir = true - ), - ValidPath( - pathAsString = "bcs-file", - parent = None, - fileName = Some("bcs-file"), - nameCount = 1, - root = None, - isAbsolute = false, - ) - ) - - private[this] def subPaths = Seq( - SubPath( - pathAsString = "/bcs-dir/bcs-dir1/bcs-dir2", - beginIndex = 0, - endIndex = 1, - subPath = Success("bcs-dir"), - description = "valid slice" - ), - SubPath( - pathAsString = "/bcs-dir/bcs-dir1/bcs-dir2", - beginIndex = 1, - endIndex = 0, - subPath = Failure(new IllegalArgumentException()), - description = "invalid index" - ), - SubPath( - pathAsString = "/bcs-dir/bcs-dir1/bcs-dir2", - beginIndex = 1, - endIndex = 10, - subPath = Success("bcs-dir1/bcs-dir2"), - description = "valid index" - ), - SubPath( - pathAsString = "/bcs-dir/bcs-dir1/bcs-dir2", - beginIndex = 3, - endIndex = 10, - subPath = Success(""), - description = "valid index" - ) - ) - - private[this] def resolvePaths = Seq( - ResolvePath( - pathAsString = "/bcs-dir/bcs-dir1", - other = "", - resolved = "/bcs-dir/bcs-dir1" - ), - ResolvePath( - pathAsString = "/bcs-dir/bcs-dir1", - other = "/bcs-dir2/bcs-dir3", - resolved = "/bcs-dir2/bcs-dir3" - ), - ResolvePath( - pathAsString = "/bcs-dir/bcs-dir1/", - other = "bcs-file", - resolved = "/bcs-dir/bcs-dir1/bcs-file" - ), - ResolvePath( - pathAsString = "/bcs-dir/bcs-dir1", - other = "bcs-file", - resolved = "/bcs-dir/bcs-dir1/bcs-file" - ), - ) - - private[this] def resolveSiblingPaths = Seq( - ResolveSiblingPath( - pathAsString = "/bcs-dir/bcs-file1", - other = "bcs-file2", - resolved = "/bcs-dir/bcs-file2" - ), - ResolveSiblingPath( - pathAsString = "/", - other = "bcs-file2", - resolved = "bcs-file2" - ), - ResolveSiblingPath( - pathAsString = "", - other = "bcs-file2", - resolved = "bcs-file2" - ), - ResolveSiblingPath( - pathAsString = "/bcs-dir/bcs-file1", - other = "/bcs-file2", - resolved = "/bcs-file2" - ), - ) - - private[this] def normalizePaths = Seq( - NormalizePath( - "/bcs-dir/.", - "/bcs-dir/" - ), - NormalizePath( - "/bcs-dir/../bcs-file", - "/bcs-file" - ), - NormalizePath( - "/bcs-dir/./bcs-file", - "/bcs-dir/bcs-file" - ), - NormalizePath( - "/bcs-dir/./bcs-dir1/", - "/bcs-dir/bcs-dir1/" - ), - NormalizePath( - "../bcs-dir/bcs-dir1/", - "bcs-dir/bcs-dir1/" - ) - ) - - private[this] def relativizePaths = Seq( - RelativizePath( - "/bcs-dir1/bcs-file1", - "/bcs-dir1/bcs-file2", - "../bcs-file2" - ), - RelativizePath( - "/bcs-dir1/bcs-file1", - "/bcs-file2", - "../../bcs-file2" - ) - ) -} diff --git a/mkdocs.yml b/mkdocs.yml index fb06a7c544c..68e73560774 100644 --- a/mkdocs.yml +++ b/mkdocs.yml @@ -12,7 +12,6 @@ nav: - Quick Introduction: tutorials/FiveMinuteIntro.md - How to Configure Cromwell: tutorials/ConfigurationFiles.md - Getting started with Google Cloud: tutorials/PipelinesApi101.md - - Getting started with Alibaba Cloud: tutorials/BCSIntro.md - Getting started with AWS (beta): tutorials/AwsBatch101.md - View the Timing Diagrams: tutorials/TimingDiagrams.md - Persisting data between restarts: tutorials/PersistentServer.md @@ -56,7 +55,6 @@ nav: - Local: backends/Local.md - Google Cloud: backends/Google.md - AWS Batch: backends/AWSBatch.md - - Alibaba Cloud: backends/BCS.md - AWS Batch (beta): backends/AWS.md - GA4GH TES: backends/TES.md - HPC: backends/HPC.md diff --git a/project/Dependencies.scala b/project/Dependencies.scala index 2b3bc3fb574..0dbf9b4325f 100644 --- a/project/Dependencies.scala +++ b/project/Dependencies.scala @@ -4,10 +4,6 @@ object Dependencies { private val akkaHttpCirceIntegrationV = "1.39.2" private val akkaHttpV = "10.1.15" // (CROM-6619) private val akkaV = "2.5.32" // scala-steward:off (CROM-6637) - private val aliyunBcsV = "6.2.4" - private val aliyunCoreV = "4.6.0" - private val aliyunCrV = "4.1.4" - private val aliyunOssV = "3.14.0" private val ammoniteOpsV = "2.4.1" private val apacheHttpClientV = "4.5.13" private val awsSdkV = "2.17.152" @@ -322,24 +318,6 @@ object Dependencies { exclude("com.google.guava", "guava-jdk5") ) ++ googleGenomicsV2Alpha1Dependency ++ googleLifeSciencesV2BetaDependency - private val aliyunOssDependencies = List( - "com.aliyun.oss" % "aliyun-sdk-oss" % aliyunOssV - exclude("com.sun.activation", "jakarta.activation") - ) - - private val aliyunBatchComputeDependencies = List( - "com.aliyun" % "aliyun-java-sdk-batchcompute" % aliyunBcsV, - "com.aliyun" % "aliyun-java-sdk-core" % aliyunCoreV - exclude("com.sun.activation", "jakarta.activation") - ) - - private val aliyunCrDependencies = List( - "com.aliyun" % "aliyun-java-sdk-cr" % aliyunCrV, - "com.aliyun" % "aliyun-java-sdk-core" % aliyunCoreV - exclude("com.sun.activation", "jakarta.activation"), - "com.typesafe.akka" %% "akka-http-spray-json" % akkaHttpV - ) - private val dbmsDependencies = List( "org.hsqldb" % "hsqldb" % hsqldbV, "org.mariadb.jdbc" % "mariadb-java-client" % mariadbV, @@ -419,10 +397,6 @@ object Dependencies { val httpFileSystemDependencies: List[ModuleID] = akkaHttpDependencies - val ossFileSystemDependencies: List[ModuleID] = googleCloudDependencies ++ aliyunOssDependencies ++ List( - "com.github.pathikrit" %% "better-files" % betterFilesV - ) - val womDependencies: List[ModuleID] = List( "com.typesafe.scala-logging" %% "scala-logging" % scalaLoggingV, "io.spray" %% "spray-json" % sprayJsonV, @@ -497,7 +471,7 @@ object Dependencies { val databaseMigrationDependencies: List[ModuleID] = liquibaseDependencies ++ dbmsDependencies - val dockerHashingDependencies: List[ModuleID] = http4sDependencies ++ circeDependencies ++ aliyunCrDependencies + val dockerHashingDependencies: List[ModuleID] = http4sDependencies ++ circeDependencies val cromwellApiClientDependencies: List[ModuleID] = List( "org.typelevel" %% "cats-effect" % catsEffectV, @@ -544,8 +518,6 @@ object Dependencies { "co.fs2" %% "fs2-io" % fs2V ) ++ scalacheckDependencies - val bcsBackendDependencies: List[ModuleID] = commonDependencies ++ refinedTypeDependenciesList ++ aliyunBatchComputeDependencies - val tesBackendDependencies: List[ModuleID] = akkaHttpDependencies val sfsBackendDependencies = List ( @@ -578,7 +550,6 @@ object Dependencies { val allProjectDependencies: List[ModuleID] = backendDependencies ++ - bcsBackendDependencies ++ centaurCwlRunnerDependencies ++ centaurDependencies ++ cloudSupportDependencies ++ @@ -589,7 +560,6 @@ object Dependencies { cwlDependencies ++ databaseMigrationDependencies ++ databaseSqlDependencies ++ - dockerHashingDependencies ++ draft2LanguageFactoryDependencies ++ drsLocalizerDependencies ++ engineDependencies ++ @@ -598,7 +568,6 @@ object Dependencies { implDrsDependencies ++ implFtpDependencies ++ languageFactoryDependencies ++ - ossFileSystemDependencies ++ perfDependencies ++ serverDependencies ++ sfsBackendDependencies ++ diff --git a/src/ci/bin/testCentaurBcs.sh b/src/ci/bin/testCentaurBcs.sh deleted file mode 100755 index 270da840432..00000000000 --- a/src/ci/bin/testCentaurBcs.sh +++ /dev/null @@ -1,146 +0,0 @@ -#!/usr/bin/env bash - -set -o errexit -o nounset -o pipefail -export CROMWELL_BUILD_REQUIRES_SECURE=true -# import in shellcheck / CI / IntelliJ compatible ways -# shellcheck source=/dev/null -source "${BASH_SOURCE%/*}/test.inc.sh" || source test.inc.sh -# shellcheck source=/dev/null -source "${BASH_SOURCE%/*}/test_bcs.inc.sh" || source test_bcs.inc.sh - -cromwell::build::setup_common_environment - -cromwell::build::bcs::setup_bcs_environment - -cromwell::build::setup_centaur_environment - -cromwell::build::assemble_jars - -# Instead of excluding tests, only include a fixed list of tests. This is because due to the -# numerous issues below, contributors did not like having to constantly update the exclude lists. -# https://github.com/broadinstitute/cromwell/issues/3522 -# https://github.com/broadinstitute/cromwell/issues/3523 -# https://github.com/broadinstitute/cromwell/issues/3524 -# https://github.com/broadinstitute/cromwell/issues/3518 -# https://github.com/broadinstitute/cromwell/issues/3519 -include_tests=( \ - -i abort.instant_abort \ - -i abort.sub_workflow_abort \ - -i aliased_subworkflows \ - -i array_io \ - -i array_literal_locations \ - -i arrays_scatters_ifs \ - -i bad_docker_name \ - -i bad_workflow_failure_mode \ - -i cacheBetweenWF \ - -i cacheWithinWF \ - -i chainfail \ - -i complex_types_files \ - -i composedenginefunctions \ - -i cwl_input_binding_expression \ - -i cwl_optionals \ - -i declarations \ - -i declarations_as_nodes \ - -i declarations_in_ifs \ - -i default_runtime_attributes \ - -i defined_function \ - -i dont_strip_line_prefix \ - -i dot_dir_stuck_running \ - -i draft3_declaration_chain \ - -i draft3_default_input_overrides \ - -i draft3_empty \ - -i draft3_import_structs \ - -i draft3_lots_of_nesting \ - -i draft3_nested_scatter \ - -i draft3_nested_struct \ - -i draft3_passthrough_value \ - -i draft3_sizeenginefunction \ - -i draft3_struct_output \ - -i draft3_taskless_engine_functions \ - -i empty_scatter \ - -i empty_string \ - -i exit \ - -i expression_lib_cwl \ - -i failures.terminal_status \ - -i filearrayoutput \ - -i floating_tags \ - -i forkjoin \ - -i hello_cwl \ - -i if_then_else_expressions \ - -i ifs_upstream_and_downstream \ - -i input_mirror \ - -i invalid_inputs_json \ - -i invalid_labels \ - -i invalid_options_json \ - -i invalid_runtime_attributes \ - -i invalid_wdl \ - -i length \ - # -i long_cmd \ # 2019-08-05 consistently timing out trying to read a < 100KB file in 60 seconds - -i lots_of_nesting \ - -i member_access \ - -i missing_imports \ - -i missing_sub_inputs \ - -i multiline_command_line \ - -i multiplesourcedarray \ - -i nested_lookups \ - -i null_input_values \ - -i object_access \ - -i optional_declarations \ - -i optional_parameter \ - -i output_filename_interpolation \ - -i output_redirection \ - -i passingfiles \ - -i prefix \ - -i public_http_import \ - -i readFromCacheFalse \ - -i read_tsv \ - -i read_write_json \ - -i read_write_map \ - -i referencingpreviousinputsandoutputs \ - -i runtime_attribute_expressions \ - -i runtime_failOnStderr \ - -i scatterchain \ - -i scattergather \ - -i scatters_in_ifs \ - -i select_functions \ - -i simple_if \ - -i simple_if_workflow_outputs \ - -i single_to_array_coercion \ - -i sizeenginefunction \ - -i square \ - -i stdout_stderr_passing \ - -i string_interpolation \ - -i sub_function \ - -i sub_workflow_decls \ - -i sub_workflow_hello_world \ - -i sub_workflow_interactions \ - -i sub_workflow_interactions_scatter \ - -i sub_workflow_no_output \ - -i sub_workflow_var_refs \ - -i subdirectory \ - -i subworkflows_in_ifs \ - -i taskless_engine_functions \ - -i test_file_outputs_from_input \ - -i three_step__subwf_cwl \ - -i unexpected_call_input_failure \ - -i unexpected_subworkflow_call_input_failure \ - -i valid_labels \ - -i variable_scoping \ - -i wdl_function_locations \ - -i workflow_output_declarations \ - -i workflow_type_and_version_default \ - -i workflow_type_and_version_wdl \ - -i workflowenginefunctions \ - -i writeToCache \ - -i write_lines \ - -i write_lines_files \ - -i write_tsv \ -) - -cromwell::build::run_centaur \ - -p 100 \ - -t 1m \ - -e localdockertest \ - "${include_tests[@]}" \ - -cromwell::build::generate_code_coverage diff --git a/src/ci/bin/test_bcs.inc.sh b/src/ci/bin/test_bcs.inc.sh deleted file mode 100644 index a415d0a49d2..00000000000 --- a/src/ci/bin/test_bcs.inc.sh +++ /dev/null @@ -1,130 +0,0 @@ -#!/usr/bin/env bash - -set -o errexit -o nounset -o pipefail -# import in shellcheck / CI / IntelliJ compatible ways -# shellcheck source=/dev/null -source "${BASH_SOURCE%/*}/test.inc.sh" || source test.inc.sh - -# A set of common BCS functions for use in other scripts. -# -# Functions: -# -# - cromwell::build::bcs::* -# Functions for use in other BCS scripts -# -# - cromwell::private::bcs::* -# Functions for use only within this file by cromwell::build::bcs::* functions -# - -cromwell::private::bcs::bcs_install() { - cromwell::build::pip_install batchcompute-cli==1.7.1 --upgrade -} - -cromwell::private::bcs::bcs_run() { - local bcs_command - local bcs_command_result - - bcs_command="${1:?bcs_run called without a command}"; shift - bcs_command_result="${CROMWELL_BUILD_RESOURCES_DIRECTORY}/${bcs_command}_result.txt" - - # Login failures print out the access key so send output to a file. - bcs "${bcs_command}" "$@" &1 | tee "${bcs_command_result}" - - # bcs commands always exit with zero. make sure the result text does not contain "error". - if grep -q -i error "${bcs_command_result}"; then - echo "bcs ${bcs_command} failed" >&2 - grep -i error "${bcs_command_result}" >&2 - return 1 - else - return 0 - fi -} - -cromwell::private::bcs::try_bcs_login() { - local bcs_login_include - bcs_login_include="${CROMWELL_BUILD_RESOURCES_DIRECTORY}/bcs_login.inc.sh" - if [[ -f "${bcs_login_include}" ]]; then - # shellcheck source=/dev/null - source "${bcs_login_include}" - fi -} - -cromwell::private::bcs::try_bcs_create_cluster() { - local cluster_name - - cluster_name=$(echo "cromwell_build_${CROMWELL_BUILD_PROVIDER}_${CROMWELL_BUILD_NUMBER}" | tr -c a-zA-Z0-9_- _) - - echo "Creating BCS cluster name: '${cluster_name}'" - - CROMWELL_BUILD_BCS_CLUSTER_ID=$( \ - cromwell::private::bcs::bcs_run \ - create_cluster \ - "${cluster_name}" \ - --image img-ubuntu \ - --type ecs.sn1ne.large \ - --nodes 8 \ - --vpc_cidr_block 192.168.1.0/24 \ - --no_cache_support \ - | grep 'Cluster created:' \ - | awk '{print $NF}' \ - ) - - echo "Created BCS cluster id: '${CROMWELL_BUILD_BCS_CLUSTER_ID}'" - - export CROMWELL_BUILD_BCS_CLUSTER_ID -} - -cromwell::private::bcs::try_bcs_delete_cluster() { - cromwell::private::bcs::bcs_run delete_cluster --yes "${CROMWELL_BUILD_BCS_CLUSTER_ID}" -} - -cromwell::private::bcs::bcs_login() { - cromwell::build::exec_retry_function cromwell::private::bcs::try_bcs_login -} - -cromwell::private::bcs::bcs_config() { - cromwell::private::bcs::bcs_run config --god true -} - -cromwell::private::bcs::bcs_create_cluster() { - cromwell::build::exec_retry_function cromwell::private::bcs::try_bcs_create_cluster - cromwell::build::add_exit_function cromwell::private::bcs::bcs_delete_cluster -} - -cromwell::private::bcs::bcs_delete_cluster() { - if [[ -n "${CROMWELL_BUILD_BCS_CLUSTER_ID}" ]]; then - cromwell::build::exec_retry_function cromwell::private::bcs::try_bcs_delete_cluster || true - fi -} - -cromwell::private::bcs::bcs_delete_old_resources() { - # Clean up assuming that all BCS jobs and clusters that are older than 3 hours are orphans to be deleted. jq 1.5 - # date functions all use UTC. https://stedolan.github.io/jq/manual/v1.5/#Dates Set the timezone environment variable - # to UTC before running the command just in case this script/bcs are run on a different zone outside of UTC. - echo "Please wait, removing old jobs…" - - TZ=utc \ - bcs job --all --show_json \ - | jq \ - -L "${CROMWELL_BUILD_RESOURCES_DIRECTORY}" \ - --raw-output 'include "bcs"; printIdsMoreThanSecondsOld(.; 3 * 60 * 60)' \ - | xargs -n 1 -I '{}' bash -c 'bcs delete_job --yes {} || true' - - echo "Please wait, removing old clusters…" - TZ=utc \ - bcs cluster --show_json \ - | jq \ - -L "${CROMWELL_BUILD_RESOURCES_DIRECTORY}" \ - --raw-output 'include "bcs"; printIdsMoreThanSecondsOld(.; 3 * 60 * 60)' \ - | xargs -n 1 -I '{}' bash -c 'bcs delete_cluster --yes {} || true' -} - -cromwell::build::bcs::setup_bcs_environment() { - cromwell::private::bcs::bcs_install - cromwell::build::exec_silent_function cromwell::private::bcs::bcs_login - cromwell::private::bcs::bcs_config - cromwell::private::bcs::bcs_delete_old_resources - - # Create the BCS cluster before sbt assembly as cluster creation takes a few minutes - cromwell::private::bcs::bcs_create_cluster -} diff --git a/src/ci/docker-compose/docker-compose-horicromtal.yml b/src/ci/docker-compose/docker-compose-horicromtal.yml index fcdbda8d432..bc2a20a4a91 100644 --- a/src/ci/docker-compose/docker-compose-horicromtal.yml +++ b/src/ci/docker-compose/docker-compose-horicromtal.yml @@ -21,7 +21,6 @@ services: -Dsystem.max-workflow-launch-count=0 -Dsystem.new-workflow-poll-rate=999999 -Dservices.MetadataService.config.metadata-summary-refresh-interval=Inf - - CROMWELL_BUILD_BCS_CLUSTER_ID - CROMWELL_BUILD_CENTAUR_256_BITS_KEY - CROMWELL_BUILD_CENTAUR_JDBC_DRIVER - CROMWELL_BUILD_CENTAUR_JDBC_URL @@ -56,7 +55,6 @@ services: JAVA_OPTS=-Dconfig.file=${CROMWELL_BUILD_CENTAUR_MANAGED_CONFIG} -Dwebservice.port=8000 -Dsystem.cromwell_id=summarizer - - CROMWELL_BUILD_BCS_CLUSTER_ID - CROMWELL_BUILD_CENTAUR_256_BITS_KEY - CROMWELL_BUILD_CENTAUR_JDBC_DRIVER - CROMWELL_BUILD_CENTAUR_JDBC_URL @@ -99,7 +97,6 @@ services: -Dwebservice.port=${CROMWELL_BUILD_CENTAUR_MANAGED_PORT} -Dsystem.cromwell_id=frontend -Dservices.MetadataService.config.metadata-summary-refresh-interval=Inf - - CROMWELL_BUILD_BCS_CLUSTER_ID - CROMWELL_BUILD_CENTAUR_256_BITS_KEY - CROMWELL_BUILD_CENTAUR_JDBC_DRIVER - CROMWELL_BUILD_CENTAUR_JDBC_URL diff --git a/src/ci/resources/bcs.jq b/src/ci/resources/bcs.jq deleted file mode 100644 index ef76bc3a628..00000000000 --- a/src/ci/resources/bcs.jq +++ /dev/null @@ -1,31 +0,0 @@ -# Convert a bcs date to an jq compatible date. -# The bcs date should already be in UTC. -# https://stedolan.github.io/jq/manual/v1.5/#Dates -def bcsToEpochSeconds(bcs_date): - bcs_date - | sub(" "; "T") - | sub("\\..*$"; "Z") - | fromdateiso8601; - -# Returns true if the jq function `now` is more than `seconds` ahead of `epoch_date_seconds`. -# https://stedolan.github.io/jq/manual/v1.5/#Dates -def isMoreThanSecondsOld(epoch_date_seconds; seconds): - now - epoch_date_seconds > seconds; - -# Filters the bcs date `key` if it is more than `seconds` old. -# https://stedolan.github.io/jq/manual/v1.5/#select(boolean_expression) -def filterMoreThanSecondsOld(key; seconds): - map(select(isMoreThanSecondsOld( - bcsToEpochSeconds(key); - seconds - ))); - -# Returns items under `key` that were created more than `seconds` ago. -# For bcs jobs and clusters the key is usually `.`. -# Expects `key` to be able to parse `{ Items: [ .Items[] | {Id, CreationTime} ] }` -def printIdsMoreThanSecondsOld(key; seconds): - key - | .Items - | filterMoreThanSecondsOld(.CreationTime; seconds) - | .[] - | .Id; diff --git a/src/ci/resources/bcs_application.conf.ctmpl b/src/ci/resources/bcs_application.conf.ctmpl deleted file mode 100644 index 16440f70634..00000000000 --- a/src/ci/resources/bcs_application.conf.ctmpl +++ /dev/null @@ -1,81 +0,0 @@ -include required(classpath("application.conf")) -include "build_application.inc.conf" - -{{with $cromwellBcs := secret (printf "secret/dsde/cromwell/common/cromwell-bcs")}} -backend { - default = "BCS" - - providers { - BCS { - actor-factory = "cromwell.backend.impl.bcs.BcsBackendLifecycleActorFactory" - - config { - root = "oss://cloud-cromwell-dev-self-cleaning/cromwell-dir" - region = "us-east-1" - access-id = "{{$cromwellBcs.Data.access_id}}" - access-key = "{{$cromwellBcs.Data.access_key}}" - - concurrent-job-limit = 50 - - filesystems { - oss { - auth { - endpoint = "oss-us-east-1.aliyuncs.com" - access-id = "{{$cromwellBcs.Data.access_id}}" - access-key = "{{$cromwellBcs.Data.access_key}}" - } - } - } - - default-runtime-attributes { - # Not 100% sure of the instance types, but as of April 2018 according to heshan.lhs@alibaba-inc.com the BCS - # itself needs some compute resources on the spun up VM. So we're using medium instances. - # - https://www.alibabacloud.com/help/doc-detail/25378.htm - # TODO: Is there an even smaller/faster image that we can use for BCS - #cluster: "OnDemand ecs.sn1.medium img-ubuntu" - # Alternatively leave a fixed cluster spun up via: - # bcs cc cromwell_test_cluster -i img-ubuntu -t ecs.sn1.medium -n 1 -d 'cromwell test cluster' - cluster: "Error: BA-6546 The environment variable CROMWELL_BUILD_BCS_CLUSTER_ID must be set/export pointing to a valid cluster id" - cluster: ${?CROMWELL_BUILD_BCS_CLUSTER_ID} - - # TODO: We should continue to allow users and our CI tests to cache images in their own OSS bucket - # BUT we should also be able to check the hash of the image in OSS using the config supplied credentials - # - https://www.alibabacloud.com/help/doc-detail/50452.htm?spm=a3c0i.l31815en.a3.109.50db5139VKk1FK - # - https://www.alibabacloud.com/product/oss?spm=a3c0i.7911826.1023975.dproductb1.454c737bJknGYt#resources - # Downloading from DockerHub is incredibly slow in Hangzhou. However for portability of WDL, we should use a - # new BCS runtime attribute for a `ossDockerRegistry` instead of this format? - # docker: "ubuntu/latest oss://broad-test/registry/ubuntu/" - - # If we do allow the above then we should NOT be trying to hash against DockerHub for the BCS backend. - # We should test if we can ping the OSS registry using our existing client code. - # TODO: https://github.com/broadinstitute/cromwell/issues/3518 For now, ignore docker. - ignoreDocker: true - - timeout: 3000 # None of our test workflow calls should be running longer than 3000 seconds - # Saw heshan.lhs@alibaba-inc.com set this. Not sure how it is used internally / if it is necessary - vpc: "192.168.1.0/24" - - # TODO: Embed the worker as a compiled resource - # TODO: Include the python as source code and not in the tar - workerPath: ${user.dir}/supportedBackends/bcs/src/main/scala/cromwell/backend/impl/bcs/worker.tar.gz - } - } - - # Have the engine authenticate to docker.io. See BT-141 for more info. - include "dockerhub_provider_config_v1.inc.conf" - } - } -} - -engine { - filesystems { - oss { - auth { - endpoint = "oss-us-east-1.aliyuncs.com" - access-id = "{{$cromwellBcs.Data.access_id}}" - access-key = "{{$cromwellBcs.Data.access_key}}" - } - } - } -} -{{end}} diff --git a/src/ci/resources/bcs_login.inc.sh.ctmpl b/src/ci/resources/bcs_login.inc.sh.ctmpl deleted file mode 100644 index b3366087027..00000000000 --- a/src/ci/resources/bcs_login.inc.sh.ctmpl +++ /dev/null @@ -1,17 +0,0 @@ -#!/usr/bin/env bash - -# Used to login bcs from the command line. If/when there is a better way to bcs login please update testCentaurBcs.sh -# and remove all traces of this include. - -set -o errexit -o nounset -o pipefail - -local bcs_access_id -local bcs_access_key - -{{with $cromwellBcs := secret (printf "secret/dsde/cromwell/common/cromwell-bcs")}} -bcs_access_id="{{$cromwellBcs.Data.access_id}}" -bcs_access_key="{{$cromwellBcs.Data.access_key}}" -{{end}} - -cromwell::build::exec_silent_function \ - cromwell::private::bcs::bcs_run login us-east-1 "${bcs_access_id}" "${bcs_access_key}" >/dev/null diff --git a/supportedBackends/bcs/src/main/scala/cromwell/backend/impl/bcs/BcsAsyncBackendJobExecutionActor.scala b/supportedBackends/bcs/src/main/scala/cromwell/backend/impl/bcs/BcsAsyncBackendJobExecutionActor.scala deleted file mode 100644 index 73d622eaecd..00000000000 --- a/supportedBackends/bcs/src/main/scala/cromwell/backend/impl/bcs/BcsAsyncBackendJobExecutionActor.scala +++ /dev/null @@ -1,424 +0,0 @@ -package cromwell.backend.impl.bcs - -import better.files.File.OpenOptions -import com.aliyuncs.batchcompute.main.v20151111.BatchComputeClient -import com.aliyuncs.exceptions.{ClientException, ServerException} -import common.collections.EnhancedCollections._ -import common.util.StringUtil._ -import cromwell.backend._ -import cromwell.backend.async.{ExecutionHandle, FailedNonRetryableExecutionHandle, PendingExecutionHandle} -import cromwell.backend.impl.bcs.RunStatus.{Finished, TerminalRunStatus} -import cromwell.backend.standard.{StandardAsyncExecutionActor, StandardAsyncExecutionActorParams, StandardAsyncJob} -import cromwell.core.path.{DefaultPathBuilder, Path, PathFactory} -import cromwell.core.retry.SimpleExponentialBackoff -import cromwell.core.ExecutionEvent -import cromwell.filesystems.oss.OssPath -import wom.callable.Callable.OutputDefinition -import wom.callable.RuntimeEnvironment -import wom.core.FullyQualifiedName -import wom.expression.NoIoFunctionSet -import wom.types.WomSingleFileType -import wom.values._ -import mouse.all._ - -import scala.annotation.nowarn -import scala.concurrent.Future -import scala.concurrent.duration._ -import scala.util.{Success, Try} - -object BcsAsyncBackendJobExecutionActor { - val JobIdKey = "__bcs_job_id" -} - -final class BcsAsyncBackendJobExecutionActor(override val standardParams: StandardAsyncExecutionActorParams) - extends BackendJobLifecycleActor with StandardAsyncExecutionActor with BcsJobCachingActorHelper { - - type BcsPendingExecutionHandle = PendingExecutionHandle[StandardAsyncJob, BcsJob, RunStatus] - - override type StandardAsyncRunInfo = BcsJob - - override type StandardAsyncRunState = RunStatus - - def statusEquivalentTo(thiz: StandardAsyncRunState)(that: StandardAsyncRunState): Boolean = thiz == that - - override lazy val pollBackOff = SimpleExponentialBackoff(1.second, 5.minutes, 1.1) - - override lazy val executeOrRecoverBackOff = SimpleExponentialBackoff(3.seconds, 30.seconds, 1.1) - - // override lazy val dockerImageUsed: Option[String] = runtimeAttributes.docker map {docker => docker.image} - override lazy val dockerImageUsed: Option[String] = None - override lazy val commandDirectory: Path = BcsJobPaths.BcsCommandDirectory.resolve(bcsJobPaths.callExecutionRoot.pathWithoutScheme) - - private[bcs] lazy val userTag = runtimeAttributes.tag.getOrElse("cromwell") - private[bcs] lazy val jobName: String = - List(userTag, jobDescriptor.workflowDescriptor.id.shortString, jobDescriptor.taskCall.identifier.localName.value) - .mkString("_") - // Avoid "Name ... must only contain characters within [a-zA-Z0-9_-] and not start with [0-9]." - .replaceAll("[^a-zA-Z0-9_-]", "_") - - override lazy val jobTag: String = jobDescriptor.key.tag - - private lazy val bcsWorkflowInputMount: BcsMount = bcsWorkflowPaths.getWorkflowInputMounts - private lazy val userDefinedMounts: List[BcsMount] = runtimeAttributes.mounts.toList.flatten :+ bcsWorkflowInputMount - // TODO: With a bit of refactoring this mutable var can be converted to a def or lazy val - private var inputMounts: List[BcsMount] = List.empty - - private[bcs] def ossPathToMount(ossPath: OssPath): BcsInputMount = { - val tmp = DefaultPathBuilder.get("/" + ossPath.pathWithoutScheme) - val dir = tmp.getParent - val local = BcsJobPaths.BcsTempInputDirectory.resolve(dir.pathAsString.md5SumShort).resolve(tmp.getFileName) - val ret = BcsInputMount(Left(ossPath), Left(local), writeSupport = false) - if (!inputMounts.exists(mount => mount.src == Left(ossPath) && mount.dest == Left(local))) { - inputMounts :+= ret - } - - ret - } - - private[bcs] def womFileToMount(file: WomFile): Option[BcsInputMount] = file match { - case path if userDefinedMounts exists(bcsMount => path.valueString.startsWith(BcsMount.toString(bcsMount.src))) => None - case path => PathFactory.buildPath(path.valueString, initializationData.pathBuilders) match { - case ossPath: OssPath => Some(ossPathToMount(ossPath)) - case _ => None - } - } - - private def bcsInputsFromWomFiles(prefix: String, - remotePathArray: Seq[WomFile], - jobDescriptor: BackendJobDescriptor): Iterable[BcsInputMount] = { - remotePathArray flatMap { remotePath => womFileToMount(remotePath) match { - case Some(mount) => Seq(mount) - case None => Seq.empty - } - } - } - - private[bcs] def getInputFiles(jobDescriptor: BackendJobDescriptor): Map[FullyQualifiedName, Seq[WomFile]] = { - val writeFunctionFiles = instantiatedCommand.createdFiles map { f => f.file.value.md5SumShort -> Seq(f.file) } - - val writeFunctionInputs = writeFunctionFiles map { - case (name, files) => name -> files - } - - // Collect all WomFiles from inputs to the call. - val callInputFiles: Map[FullyQualifiedName, Seq[WomFile]] = jobDescriptor.fullyQualifiedInputs safeMapValues { - _.collectAsSeq { case w: WomFile => w } - } - - callInputFiles ++ writeFunctionInputs - } - - private[bcs] def generateBcsInputs(jobDescriptor: BackendJobDescriptor): Unit = { - val _ = getInputFiles(jobDescriptor) flatMap { - case (name, files) => bcsInputsFromWomFiles(name, files, jobDescriptor) - } - } - - private def relativePath(path: String): Path = { - val absolutePath = DefaultPathBuilder.get(path) match { - case p if !p.isAbsolute => commandDirectory.resolve(p) - case p => p - } - - absolutePath - } - - private[bcs] lazy val callRawOutputFiles: List[WomFile] = { - import cats.syntax.validated._ - def evaluateFiles(output: OutputDefinition): List[WomFile] = { - Try ( - output.expression.evaluateFiles(jobDescriptor.localInputs, NoIoFunctionSet, output.womType).map(_.toList map { _.file }) - ).getOrElse(List.empty[WomFile].validNel) - .getOrElse(List.empty) - } - - // val womFileOutputs = call.task.findOutputFiles(jobDescriptor.fullyQualifiedInputs, PureStandardLibraryFunctions) - - jobDescriptor.taskCall.callable.outputs.flatMap(evaluateFiles) - } - - private[bcs] def isOutputOssFileString(s: String): Boolean = { - callRawOutputFiles.exists({ - case file: WomSingleFile if file.value == s => true - case _ => false - }) - } - - private[bcs] def generateBcsOutputs(jobDescriptor: BackendJobDescriptor): Seq[BcsMount] = { - callRawOutputFiles.flatMap(_.flattenFiles).distinct flatMap { womFile => - womFile match { - case singleFile: WomSingleFile => List(generateBcsSingleFileOutput(singleFile)) - case globFile: WomGlobFile => generateBcsGlobFileOutputs(globFile) - case unlistedDirectory: WomUnlistedDirectory => generateUnlistedDirectoryOutputs(unlistedDirectory) - } - } - } - - private def generateBcsSingleFileOutput(wdlFile: WomSingleFile): BcsOutputMount = { - val destination = getPath(wdlFile.valueString) match { - case Success(ossPath: OssPath) => ossPath - case Success(path: Path) if !path.isAbsolute => relativeOutputPath(path) - case _ => callRootPath.resolve(wdlFile.value.stripPrefix("/")) - } - - val src = relativePath(wdlFile.valueString) - - BcsOutputMount(Left(src), Left(destination), writeSupport = false) - } - - protected def generateBcsGlobFileOutputs(womFile: WomGlobFile): List[BcsOutputMount] = { - val globName = GlobFunctions.globName(womFile.value) - val globDirectory = globName + "/" - val globListFile = globName + ".list" - val bcsGlobDirectoryDestinationPath = callRootPath.resolve(globDirectory) - val bcsGlobListFileDestinationPath = callRootPath.resolve(globListFile) - - // We need both the glob directory and the glob list: - List( - BcsOutputMount(Left(relativePath(globDirectory)), Left(bcsGlobDirectoryDestinationPath), writeSupport = false), - BcsOutputMount(Left(relativePath(globListFile)), Left(bcsGlobListFileDestinationPath), writeSupport = false) - ) - } - - private def generateUnlistedDirectoryOutputs(womFile: WomUnlistedDirectory): List[BcsOutputMount] = { - val directoryPath = womFile.value.ensureSlashed - val directoryListFile = womFile.value.ensureUnslashed + ".list" - val bcsDirDestinationPath = callRootPath.resolve(directoryPath) - val bcsListDestinationPath = callRootPath.resolve(directoryListFile) - - // We need both the collection directory and the collection list: - List( - BcsOutputMount(Left(relativePath(directoryPath)), Left(bcsDirDestinationPath), writeSupport = false), - BcsOutputMount(Left(relativePath(directoryListFile)), Left(bcsListDestinationPath), writeSupport = false) - ) - } - - private[bcs] def getOssFileName(ossPath: OssPath): String = { - getPath(ossPath.pathWithoutScheme) match { - case Success(path) => path.getFileName.pathAsString - case _ => ossPath.pathWithoutScheme - } - } - - private[bcs] def localizeOssPath(ossPath: OssPath): String = { - if (isOutputOssFileString(ossPath.pathAsString) && !ossPath.isAbsolute) { - if (ossPath.exists) { - ossPathToMount(ossPath).dest match { - case Left(p) => p.normalize().pathAsString - case _ => throw new RuntimeException("only support oss") - } - } else { - commandDirectory.resolve(getOssFileName(ossPath)).normalize().pathAsString - } - } else { - userDefinedMounts collectFirst { - case bcsMount: BcsMount if ossPath.pathAsString.startsWith(BcsMount.toString(bcsMount.src)) => - bcsMount.dest match { - case Left(p) => p.resolve(ossPath.pathAsString.stripPrefix(BcsMount.toString(bcsMount.src))).pathAsString - case _ => throw new RuntimeException("only support oss") - } - } getOrElse { - val mount = ossPathToMount(ossPath) - BcsMount.toString(mount.dest) - } - } - } - - private[bcs] def relativeOutputPath(path: Path): Path = { - if (isOutputOssFileString(path.pathAsString)) { - bcsJobPaths.callRoot.resolve(path.pathAsString).normalize() - } else { - path - } - } - - private[bcs] def mapWomFile(womFile: WomFile): WomFile = { - getPath(womFile.valueString) match { - case Success(ossPath: OssPath) => - WomFile(WomSingleFileType, localizeOssPath(ossPath)) - case Success(path: Path) if !path.isAbsolute => - WomFile(WomSingleFileType, relativeOutputPath(path).pathAsString) - case _ => womFile - } - } - - override def preProcessWomFile(womFile: WomFile): WomFile = mapWomFile(womFile) - - override def mapCommandLineWomFile(womFile: WomFile): WomFile = mapWomFile(womFile) - - override def runtimeEnvironmentPathMapper(env: RuntimeEnvironment): RuntimeEnvironment = { - def localize(path: String): String = (WomSingleFile(path) |> mapRuntimeEnvs).valueString - env.copy(outputPath = env.outputPath |> localize, tempPath = env.tempPath |> localize) - } - - private[bcs] def mapRuntimeEnvs(womFile: WomSingleFile): WomFile = { - getPath(womFile.valueString) match { - case Success(ossPath: OssPath) => - WomFile(WomSingleFileType, BcsJobPaths.BcsCommandDirectory.resolve(ossPath.pathWithoutScheme).pathAsString) - case _ => womFile - } - - } - - override def isTerminal(runStatus: RunStatus): Boolean = { - runStatus match { - case _ : TerminalRunStatus => true - case _ => false - } - } - - override def getTerminalEvents(runStatus: RunStatus): Seq[ExecutionEvent] = { - runStatus match { - case successStatus: Finished => successStatus.eventList - case unknown => - throw new RuntimeException(s"handleExecutionSuccess not called with RunStatus.Success. Instead got $unknown") - } - } - - override def handleExecutionFailure(runStatus: RunStatus, - returnCode: Option[Int]): Future[ExecutionHandle] = { - runStatus match { - case RunStatus.Failed(jobId, Some(errorMessage), _) => - val exception = new Exception(s"Job id $jobId failed: '$errorMessage'") - Future.successful(FailedNonRetryableExecutionHandle(exception, returnCode, None)) - case _ => super.handleExecutionFailure(runStatus, returnCode) - } - } - - override def isDone(runStatus: RunStatus): Boolean = { - runStatus match { - case _: Finished => - runtimeAttributes.autoReleaseJob match { - case Some(true) | None => - bcsClient.deleteJob(runStatus.jobId) - case _ => - } - true - case _ => false - } - } - - private[bcs] lazy val rcBcsOutput = BcsOutputMount( - Left(commandDirectory.resolve(bcsJobPaths.returnCodeFilename)), Left(bcsJobPaths.returnCode), writeSupport = false) - - private[bcs] lazy val stdoutBcsOutput = BcsOutputMount( - Left(commandDirectory.resolve(bcsJobPaths.defaultStdoutFilename)), Left(standardPaths.output), writeSupport = false) - private[bcs] lazy val stderrBcsOutput = BcsOutputMount( - Left(commandDirectory.resolve(bcsJobPaths.defaultStderrFilename)), Left(standardPaths.error), writeSupport = false) - - private[bcs] lazy val uploadBcsWorkerPackage = { - bcsJobPaths.workerPath.writeByteArray(BcsJobCachingActorHelper.workerScript.getBytes)(OpenOptions.default) - - bcsJobPaths.workerPath - } - - @nowarn("msg=a type was inferred to be `Object`; this may indicate a programming error.") - override def executeAsync(): Future[ExecutionHandle] = { - commandScriptContents.fold( - errors => Future.failed(new RuntimeException(errors.toList.mkString(", "))), - bcsJobPaths.script.write) - - - setBcsVerbose() - - val envs = bcsEnvs - - val bcsJob = new BcsJob( - jobName, - jobTag, - bcsCommandLine, - uploadBcsWorkerPackage, - bcsMounts, - envs, - runtimeAttributes, - Some(bcsJobPaths.bcsStdoutPath), - Some(bcsJobPaths.bcsStderrPath), - bcsClient) - - for { - jobId <- Future.fromTry(bcsJob.submit()) - } yield PendingExecutionHandle(jobDescriptor, StandardAsyncJob(jobId), Option(bcsJob), previousState = None) - } - - override def recoverAsync(jobId: StandardAsyncJob) = executeAsync() - - override def pollStatusAsync(handle: BcsPendingExecutionHandle): Future[RunStatus] = { - val jobId = handle.pendingJob.jobId - val bcsJob: BcsJob = handle.runInfo.getOrElse(throw new RuntimeException("empty run job info ")) - - Future.fromTry(bcsJob.getStatus(jobId)) - } - - override def mapOutputWomFile(wdlFile: WomFile): WomFile = { - wdlFileToOssPath(generateBcsOutputs(jobDescriptor))(wdlFile) - } - - private[bcs] def wdlFileToOssPath(bcsOutputs: Seq[BcsMount])(wdlFile: WomFile): WomFile = { - bcsOutputs collectFirst { - case bcsOutput if BcsMount.toString(bcsOutput.src).endsWith(wdlFile.valueString) => WomFile(WomSingleFileType, BcsMount.toString(bcsOutput.dest)) - } getOrElse wdlFile - } - - override def tryAbort(job: StandardAsyncJob): Unit = { - for { - client <- Try(initializationData.bcsConfiguration.bcsClient getOrElse(throw new RuntimeException("empty run job info "))) - resp <- Try(client.getJob(job.jobId)) - status <- RunStatusFactory.getStatus(job.jobId, resp.getJob.getState) - } yield { - status match { - case _: RunStatus.TerminalRunStatus => - for { - _ <- Try(client.deleteJob(job.jobId)) - } yield job - case _ => - for { - _ <- Try(client.stopJob(job.jobId)) - _ <- Try(client.deleteJob(job.jobId)) - } yield job - } - } - () - } - - override def isFatal(throwable: Throwable): Boolean = super.isFatal(throwable) || isFatalBcsException(throwable) - - private[bcs] def isFatalBcsException(throwable: Throwable) = { - throwable match { - case e: ClientException if e.getErrCode.startsWith("Invalid") => true - case _ => false - } - } - - override def isTransient(throwable: Throwable): Boolean = { - throwable match { - case _: ServerException => true - case e: ClientException if e.getErrCode == "InternalError" => true - case e: ClientException if e.getErrCode.startsWith("Throttling") => true - case _ => false - } - } - - private[bcs] def setBcsVerbose(): Unit = { - runtimeAttributes.verbose match { - case Some(verbose) => BatchComputeClient.verbose = verbose - case None => BatchComputeClient.verbose = false - } - } - - private[bcs] lazy val bcsEnvs: Map[String, String] = { - val mount = ossPathToMount(bcsJobPaths.script.asInstanceOf[OssPath]) - - Map( - BcsJobPaths.BcsEnvCwdKey -> commandDirectory.pathAsString, - BcsJobPaths.BcsEnvExecKey -> BcsMount.toString(mount.dest), - BcsJobPaths.BcsEnvStdoutKey -> commandDirectory.resolve(bcsJobPaths.defaultStdoutFilename).pathAsString, - BcsJobPaths.BcsEnvStderrKey -> commandDirectory.resolve(bcsJobPaths.defaultStderrFilename).pathAsString - ) - } - - private[bcs] lazy val bcsMounts: Seq[BcsMount] ={ - generateBcsInputs(jobDescriptor) - runtimeAttributes.mounts.getOrElse(Seq.empty) ++ inputMounts ++ - generateBcsOutputs(jobDescriptor) :+ rcBcsOutput :+ stdoutBcsOutput :+ stderrBcsOutput :+ bcsWorkflowInputMount - } -} diff --git a/supportedBackends/bcs/src/main/scala/cromwell/backend/impl/bcs/BcsBackendInitializationData.scala b/supportedBackends/bcs/src/main/scala/cromwell/backend/impl/bcs/BcsBackendInitializationData.scala deleted file mode 100644 index 89a97b98f90..00000000000 --- a/supportedBackends/bcs/src/main/scala/cromwell/backend/impl/bcs/BcsBackendInitializationData.scala +++ /dev/null @@ -1,14 +0,0 @@ -package cromwell.backend.impl.bcs - -import cromwell.backend.io.WorkflowPaths -import cromwell.backend.standard.{StandardInitializationData, StandardValidatedRuntimeAttributesBuilder} -import cromwell.core.path.PathBuilder - -final case class BcsBackendInitializationData -( - override val workflowPaths: WorkflowPaths, - override val runtimeAttributesBuilder: StandardValidatedRuntimeAttributesBuilder, - bcsConfiguration: BcsConfiguration, - pathBuilders: List[PathBuilder] -) extends StandardInitializationData(workflowPaths, runtimeAttributesBuilder, classOf[BcsExpressionFunctions]) - diff --git a/supportedBackends/bcs/src/main/scala/cromwell/backend/impl/bcs/BcsBackendLifecycleActorFactory.scala b/supportedBackends/bcs/src/main/scala/cromwell/backend/impl/bcs/BcsBackendLifecycleActorFactory.scala deleted file mode 100644 index 053b3b4cadb..00000000000 --- a/supportedBackends/bcs/src/main/scala/cromwell/backend/impl/bcs/BcsBackendLifecycleActorFactory.scala +++ /dev/null @@ -1,41 +0,0 @@ -package cromwell.backend.impl.bcs - -import akka.actor.ActorRef -import cromwell.backend.{BackendConfigurationDescriptor, BackendWorkflowDescriptor} -import cromwell.backend.standard._ -import cromwell.backend.BackendInitializationData -import cromwell.backend.impl.bcs.callcaching.BcsBackendCacheHitCopyingActor -import cromwell.backend.standard.callcaching.StandardCacheHitCopyingActor -import wom.graph.CommandCallNode - -import scala.util.{Success, Try} - - -final case class BcsBackendLifecycleActorFactory(val name: String, val configurationDescriptor: BackendConfigurationDescriptor) - extends StandardLifecycleActorFactory { - override lazy val initializationActorClass: Class[_ <: StandardInitializationActor] = classOf[BcsInitializationActor] - override lazy val asyncExecutionActorClass: Class[_ <: StandardAsyncExecutionActor] = classOf[BcsAsyncBackendJobExecutionActor] - - override def jobIdKey: String = BcsAsyncBackendJobExecutionActor.JobIdKey - - val bcsConfiguration = new BcsConfiguration(configurationDescriptor) - - override def workflowInitializationActorParams(workflowDescriptor: BackendWorkflowDescriptor, ioActor: ActorRef, calls: Set[CommandCallNode], serviceRegistryActor: ActorRef, restarting: Boolean): StandardInitializationActorParams = { - BcsInitializationActorParams(workflowDescriptor, calls, bcsConfiguration, serviceRegistryActor) - } - - override lazy val cacheHitCopyingActorClassOption: Option[Class[_ <: StandardCacheHitCopyingActor]] = { - Option(classOf[BcsBackendCacheHitCopyingActor]) - } - - override def dockerHashCredentials(workflowDescriptor: BackendWorkflowDescriptor, initializationData: Option[BackendInitializationData]) = { - Try(BackendInitializationData.as[BcsBackendInitializationData](initializationData)) match { - case Success(bcsData) => - bcsData.bcsConfiguration.dockerHashEndpoint match { - case Some(endpoint) => List(bcsData.bcsConfiguration.dockerCredentials, Option(endpoint)).flatten - case None => List(bcsData.bcsConfiguration.dockerCredentials).flatten - } - case _ => List.empty[Any] - } - } -} diff --git a/supportedBackends/bcs/src/main/scala/cromwell/backend/impl/bcs/BcsClusterIdOrConfiguration.scala b/supportedBackends/bcs/src/main/scala/cromwell/backend/impl/bcs/BcsClusterIdOrConfiguration.scala deleted file mode 100644 index 047af4dabf2..00000000000 --- a/supportedBackends/bcs/src/main/scala/cromwell/backend/impl/bcs/BcsClusterIdOrConfiguration.scala +++ /dev/null @@ -1,56 +0,0 @@ -package cromwell.backend.impl.bcs - - -import scala.util.{Try, Success, Failure} -import scala.util.matching.Regex - -final case class AutoClusterConfiguration(resourceType: String, - instanceType: String, - imageId: String, - spotStrategy: Option[String] = None, - spotPriceLimit: Option[Float] = None, - clusterId: Option[String] = None) - - -object BcsClusterIdOrConfiguration { - type BcsClusterIdOrConfiguration = Either[String, AutoClusterConfiguration] - - val idPattern: Regex = """(cls-[^\s]+)""".r - val resourceTypePattern = """(OnDemand|Spot)""".r - val defaultResourceType = "OnDemand" - - val imageIdPattern = """([^\s]+)""".r - - val spotStrategyPattern = """(SpotWithPriceLimit|SpotAsPriceGo)""".r - - val spotPriceLimitPattern = """([01]\.\d{1,3})""".r - - // no suitable default instance type availabe - val instanceTypePattern = """([be]cs[^\s]+)""".r - - val instanceAndImagePattern = s"""$instanceTypePattern\\s+$imageIdPattern""".r - - val resourceAndInstanceAndImagePattern = s"""$resourceTypePattern\\s+$instanceTypePattern\\s+$imageIdPattern""".r - - val spotPattern = s"""$resourceAndInstanceAndImagePattern\\s+$spotStrategyPattern\\s+$spotPriceLimitPattern""".r - - val attachClusterSimplePattern = s"""$instanceAndImagePattern\\s+$idPattern""".r - - val attachClusterPattern = s"""$resourceAndInstanceAndImagePattern\\s+$idPattern""".r - - val attachClusterSpotPattern = s"""$spotPattern\\s+$idPattern""".r - - - def parse(cluster: String): Try[BcsClusterIdOrConfiguration] = { - cluster match { - case idPattern(clusterId) => Success(Left(clusterId)) - case instanceAndImagePattern(instanceType, imageId) => Success(Right(AutoClusterConfiguration(defaultResourceType, instanceType, imageId))) - case attachClusterSimplePattern(instanceType, imageId, clusterId) =>Success(Right(AutoClusterConfiguration(defaultResourceType, instanceType, imageId, clusterId=Option(clusterId)))) - case resourceAndInstanceAndImagePattern(resourceType, instanceType, imageId) => Success(Right(AutoClusterConfiguration(resourceType, instanceType, imageId))) - case attachClusterPattern(resourceType, instanceType, imageId, clusterId) => Success(Right(AutoClusterConfiguration(resourceType, instanceType, imageId, clusterId = Option(clusterId)))) - case spotPattern(resourceType, instanceType, imageId, spotStrategy, spotPriceLimit) => Success(Right(AutoClusterConfiguration(resourceType, instanceType, imageId, Option(spotStrategy), Option(spotPriceLimit.toFloat)))) - case attachClusterSpotPattern(resourceType, instanceType, imageId, spotStrategy, spotPriceLimit, clusterId) => Success(Right(AutoClusterConfiguration(resourceType, instanceType, imageId, Option(spotStrategy), Option(spotPriceLimit.toFloat), Option(clusterId)))) - case _ => Failure(new IllegalArgumentException("must be some string like 'cls-xxxx' or 'OnDemand ecs.s1.large img-ubuntu' or 'OnDemand ecs.s1.large img-ubuntu cls-xxxx'")) - } - } -} diff --git a/supportedBackends/bcs/src/main/scala/cromwell/backend/impl/bcs/BcsConfiguration.scala b/supportedBackends/bcs/src/main/scala/cromwell/backend/impl/bcs/BcsConfiguration.scala deleted file mode 100644 index c3119079211..00000000000 --- a/supportedBackends/bcs/src/main/scala/cromwell/backend/impl/bcs/BcsConfiguration.scala +++ /dev/null @@ -1,71 +0,0 @@ -package cromwell.backend.impl.bcs - -import com.aliyuncs.auth.BasicCredentials -import com.aliyuncs.batchcompute.main.v20151111.BatchComputeClient -import cromwell.backend.BackendConfigurationDescriptor -import net.ceedubs.ficus.Ficus._ -import cromwell.backend.impl.bcs.callcaching.{CopyCachedOutputs, UseOriginalCachedOutputs} -import cromwell.core.DockerConfiguration - -object BcsConfiguration{ - val OssEndpointKey = "ossEndpoint" - val OssIdKey = "ossId" - val OssSecretKey = "ossSecret" - val OssTokenKey = "ossToken" -} - -final class BcsConfiguration(val configurationDescriptor: BackendConfigurationDescriptor) { - val runtimeConfig = configurationDescriptor.backendRuntimeAttributesConfig - val bcsRegion: Option[String] = configurationDescriptor.backendConfig.as[Option[String]]("region") - - val bcsUserDefinedRegion: Option[String] = configurationDescriptor.backendConfig.as[Option[String]]("user-defined-region") - - val bcsUserDefinedDomain: Option[String] = configurationDescriptor.backendConfig.as[Option[String]]("user-defined-domain") - - val bcsAccessId: Option[String] = configurationDescriptor.backendConfig.as[Option[String]]("access-id") - - val bcsAccessKey: Option[String] = configurationDescriptor.backendConfig.as[Option[String]]("access-key") - - val bcsSecurityToken: Option[String] = configurationDescriptor.backendConfig.as[Option[String]]("security-token") - - val ossEndpoint = configurationDescriptor.backendConfig.as[Option[String]]("filesystems.oss.auth.endpoint").getOrElse("") - val ossAccessId = configurationDescriptor.backendConfig.as[Option[String]]("filesystems.oss.auth.access-id").getOrElse("") - val ossAccessKey = configurationDescriptor.backendConfig.as[Option[String]]("filesystems.oss.auth.access-key").getOrElse("") - val ossSecurityToken = configurationDescriptor.backendConfig.as[Option[String]]("filesystems.oss.auth.security-token").getOrElse("") - - val duplicationStrategy = { - configurationDescriptor.backendConfig.as[Option[String]]("filesystems.oss.caching.duplication-strategy").getOrElse("reference") match { - case "copy" => CopyCachedOutputs - case "reference" => UseOriginalCachedOutputs - case other => throw new IllegalArgumentException(s"Unrecognized caching duplication strategy: $other. Supported strategies are copy and reference. See reference.conf for more details.") - } - } - - lazy val dockerHashAccessId = DockerConfiguration.dockerHashLookupConfig.as[Option[String]]("alibabacloudcr.auth.access-id") - lazy val dockerHashAccessKey = DockerConfiguration.dockerHashLookupConfig.as[Option[String]]("alibabacloudcr.auth.access-key") - lazy val dockerHashSecurityToken = DockerConfiguration.dockerHashLookupConfig.as[Option[String]]("alibabacloudcr.auth.security-token") - lazy val dockerHashEndpoint = DockerConfiguration.dockerHashLookupConfig.as[Option[String]]("alibabacloudcr.auth.endpoint") - - val dockerCredentials = { - for { - id <- dockerHashAccessId - key <- dockerHashAccessKey - } yield new BasicCredentials(id, key) - } - - val bcsClient: Option[BatchComputeClient] = { - val userDefinedRegion = for { - region <- bcsUserDefinedRegion - domain <- bcsUserDefinedDomain - } yield { - BatchComputeClient.addEndpoint(region, domain) - region - } - - for { - region <- userDefinedRegion orElse bcsRegion - id <- bcsAccessId - key <- bcsAccessKey - } yield new BatchComputeClient(region, id, key) - } -} diff --git a/supportedBackends/bcs/src/main/scala/cromwell/backend/impl/bcs/BcsDisk.scala b/supportedBackends/bcs/src/main/scala/cromwell/backend/impl/bcs/BcsDisk.scala deleted file mode 100644 index d2d52e2521c..00000000000 --- a/supportedBackends/bcs/src/main/scala/cromwell/backend/impl/bcs/BcsDisk.scala +++ /dev/null @@ -1,24 +0,0 @@ -package cromwell.backend.impl.bcs - -import scala.util.{Try, Success, Failure} - -trait BcsDisk { - val diskType: String - val sizeInGB: Int -} - -final case class BcsSystemDisk(diskType: String, sizeInGB: Int) extends BcsDisk -final case class BcsDataDisk(diskType: String, sizeInGB: Int, mountPoint: String) extends BcsDisk - -object BcsDisk{ - val systemDiskPattern = s"""(\\S+)\\s+(\\d+)""".r - val dataDiskPattern = s"""(\\S+)\\s+(\\d+)\\s+(\\S+)""".r - - def parse(s: String): Try[BcsDisk] = { - s match { - case systemDiskPattern(diskType, sizeInGB) => Success(BcsSystemDisk(diskType, sizeInGB.toInt)) - case dataDiskPattern(diskType, sizeInGB, mountPoint) => Success(BcsDataDisk(diskType, sizeInGB.toInt, mountPoint)) - case _ => Failure(new IllegalArgumentException("disk must be 'cloud 40' or 'cloud 200 /home/input/'")) - } - } -} \ No newline at end of file diff --git a/supportedBackends/bcs/src/main/scala/cromwell/backend/impl/bcs/BcsDocker.scala b/supportedBackends/bcs/src/main/scala/cromwell/backend/impl/bcs/BcsDocker.scala deleted file mode 100644 index 91250956b26..00000000000 --- a/supportedBackends/bcs/src/main/scala/cromwell/backend/impl/bcs/BcsDocker.scala +++ /dev/null @@ -1,24 +0,0 @@ -package cromwell.backend.impl.bcs - -import scala.util.{Failure, Success, Try} - -trait BcsDocker { - val image: String -} - -final case class BcsDockerWithoutPath(image: String) extends BcsDocker -final case class BcsDockerWithPath(image: String, path: String) extends BcsDocker - - -object BcsDocker{ - val dockerWithPathPattern = s"""(\\S+)\\s+(\\S+)""".r - val dockerWithoutPathPatter = s"""(\\S+)""".r - - def parse(s: String): Try[BcsDocker] = { - s match { - case dockerWithoutPathPatter(dockerImage) => Success(BcsDockerWithoutPath(dockerImage)) - case dockerWithPathPattern(dockerImage, dockerPath) => Success(BcsDockerWithPath(dockerImage, dockerPath)) - case _ => Failure(new IllegalArgumentException("must be 'ubuntu/latest oss://docker-reg/ubuntu/'")) - } - } -} diff --git a/supportedBackends/bcs/src/main/scala/cromwell/backend/impl/bcs/BcsExpressionFunctions.scala b/supportedBackends/bcs/src/main/scala/cromwell/backend/impl/bcs/BcsExpressionFunctions.scala deleted file mode 100644 index 0e47bf9faa1..00000000000 --- a/supportedBackends/bcs/src/main/scala/cromwell/backend/impl/bcs/BcsExpressionFunctions.scala +++ /dev/null @@ -1,42 +0,0 @@ -package cromwell.backend.impl.bcs - -import common.validation.ErrorOr.ErrorOr -import cromwell.backend.BackendJobDescriptor -import cromwell.backend.standard.{StandardExpressionFunctions, StandardExpressionFunctionsParams} -import cromwell.filesystems.oss.OssPathBuilder -import cromwell.filesystems.oss.OssPathBuilder.{InvalidOssPath, PossiblyValidRelativeOssPath, ValidFullOssPath} -import wom.graph.CommandCallNode -import wom.values.WomGlobFile - -import scala.concurrent.Future - -final case class BcsExpressionFunctions(override val standardParams: StandardExpressionFunctionsParams) - extends StandardExpressionFunctions(standardParams) { - - override def preMapping(str: String) = { - OssPathBuilder.validateOssPath(str) match { - case _: ValidFullOssPath => str - case PossiblyValidRelativeOssPath => callContext.root.resolve(str.stripPrefix("/")).pathAsString - case invalid: InvalidOssPath => throw new IllegalArgumentException(invalid.errorMessage) - } - } - - // TODO: BCS: When globs are supported this override should be removed. - // https://github.com/broadinstitute/cromwell/issues/3519 - // See usages of cwl.CommandLineTool.CwlOutputJson - override def glob(pattern: String): Future[Seq[String]] = { - if (pattern == "cwl.output.json") { - Future.successful(Nil) - } else { - super.glob(pattern) - } - } - - // TODO: BCS: When globs are supported this override should be removed. - // https://github.com/broadinstitute/cromwell/issues/3519 - // See usages of cwl.CommandLineTool.CwlOutputJson - override def findGlobOutputs(call: CommandCallNode, jobDescriptor: BackendJobDescriptor): ErrorOr[List[WomGlobFile]] = { - val base = super.findGlobOutputs(call, jobDescriptor) - base.map(_.filterNot(_.value == "cwl.output.json")) - } -} diff --git a/supportedBackends/bcs/src/main/scala/cromwell/backend/impl/bcs/BcsInitializationActor.scala b/supportedBackends/bcs/src/main/scala/cromwell/backend/impl/bcs/BcsInitializationActor.scala deleted file mode 100644 index d93b7076420..00000000000 --- a/supportedBackends/bcs/src/main/scala/cromwell/backend/impl/bcs/BcsInitializationActor.scala +++ /dev/null @@ -1,45 +0,0 @@ -package cromwell.backend.impl.bcs - -import akka.actor.ActorRef -import cromwell.backend.standard.{StandardInitializationActor, StandardInitializationActorParams, StandardValidatedRuntimeAttributesBuilder} -import cromwell.backend.{BackendConfigurationDescriptor, BackendInitializationData, BackendWorkflowDescriptor} -import cromwell.core.path.PathBuilder - -import scala.concurrent.Future -import wom.graph.CommandCallNode - - -final case class BcsInitializationActorParams -( - workflowDescriptor: BackendWorkflowDescriptor, - calls: Set[CommandCallNode], - bcsConfiguration: BcsConfiguration, - serviceRegistryActor: ActorRef -) extends StandardInitializationActorParams { - override val configurationDescriptor: BackendConfigurationDescriptor = bcsConfiguration.configurationDescriptor -} - -final class BcsInitializationActor(params: BcsInitializationActorParams) - extends StandardInitializationActor(params) { - - private val bcsConfiguration = params.bcsConfiguration - - override lazy val pathBuilders: Future[List[PathBuilder]] = - standardParams.configurationDescriptor.pathBuildersWithDefault(workflowDescriptor.workflowOptions) - - override lazy val workflowPaths: Future[BcsWorkflowPaths] = pathBuilders map { - BcsWorkflowPaths(workflowDescriptor, bcsConfiguration.configurationDescriptor.backendConfig, _) - } - - override lazy val runtimeAttributesBuilder: StandardValidatedRuntimeAttributesBuilder = - BcsRuntimeAttributes.runtimeAttributesBuilder(bcsConfiguration.runtimeConfig) - - override def beforeAll(): Future[Option[BackendInitializationData]] = { - pathBuilders map { builders => BcsMount.pathBuilders = builders} - - for { - paths <- workflowPaths - builders <- pathBuilders - } yield Option(BcsBackendInitializationData(paths, runtimeAttributesBuilder, bcsConfiguration, builders)) - } -} diff --git a/supportedBackends/bcs/src/main/scala/cromwell/backend/impl/bcs/BcsJob.scala b/supportedBackends/bcs/src/main/scala/cromwell/backend/impl/bcs/BcsJob.scala deleted file mode 100644 index c19e1228864..00000000000 --- a/supportedBackends/bcs/src/main/scala/cromwell/backend/impl/bcs/BcsJob.scala +++ /dev/null @@ -1,226 +0,0 @@ -package cromwell.backend.impl.bcs - -import com.aliyuncs.batchcompute.main.v20151111.BatchComputeClient -import com.aliyuncs.batchcompute.model.v20151111._ -import com.aliyuncs.batchcompute.pojo.v20151111._ -import cromwell.core.ExecutionEvent -import cromwell.core.path.Path - -import scala.jdk.CollectionConverters._ -import scala.util.{Failure, Success, Try} - -object BcsJob{ - val BcsDockerImageEnvKey = "BATCH_COMPUTE_DOCKER_IMAGE" - val BcsDockerPathEnvKey = "BATCH_COMPUTE_DOCKER_REGISTRY_OSS_PATH" -} - -final case class BcsJob(name: String, - description: String, - commandString: String, - packagePath: Path, - mounts: Seq[BcsMount], - envs: Map[String, String], - runtime: BcsRuntimeAttributes, - stdoutPath: Option[Path], - stderrPath: Option[Path], - batchCompute: BatchComputeClient) { - - lazy val lazyDisks = new Disks - lazy val lazyConfigs = new Configs - lazy val lazyVpc = new VPC - lazy val lazyTask = new TaskDescription - lazy val lazyJob = new JobDescription - lazy val lazyCmd = new Command - - def submit(): Try[String] = Try{ - val request: CreateJobRequest = new CreateJobRequest - request.setJobDescription(jobDesc) - val response: CreateJobResponse = batchCompute.createJob(request) - val jobId = response.getJobId - jobId - } - - def getStatus(jobId: String): Try[RunStatus] = Try{ - val request: GetJobRequest = new GetJobRequest - request.setJobId(jobId) - val response: GetJobResponse = batchCompute.getJob(request) - val job = response.getJob - val status = job.getState - val message = job.getMessage - val eventList = Seq[ExecutionEvent]() - RunStatusFactory.getStatus(jobId, status, Some(message), Some(eventList)) match { - case Success(status) => status - case Failure(e) => throw e - } - } - - def cancel(jobId: String): Unit = { - // XXX: Do nothing currently. - } - - private[bcs] def systemDisk: Option[SystemDisk] = runtime.systemDisk map { disk => - val systemDisk = new SystemDisk() - systemDisk.setType(disk.diskType) - systemDisk.setSize(disk.sizeInGB) - systemDisk - } - - private[bcs] def dataDisk: Option[DataDisk] = runtime.dataDisk map { disk => - val dataDisk = new DataDisk - dataDisk.setType(disk.diskType) - dataDisk.setSize(disk.sizeInGB) - dataDisk.setMountPoint(disk.mountPoint) - dataDisk - } - - // XXX: maybe more elegant way to reduce two options? - private[bcs] def disks: Option[Disks] = { - (systemDisk, dataDisk) match { - case (Some(sys), Some(data)) => - lazyDisks.setSystemDisk(sys) - lazyDisks.setDataDisk(data) - Some(lazyDisks) - case (Some(sys), None) => - lazyDisks.setSystemDisk(sys) - Some(lazyDisks) - case (None, Some(data)) => - lazyDisks.setDataDisk(data) - Some(lazyDisks) - case (None, None) => None - } - } - - private[bcs] def vpc: Option[VPC] = { - (runtime.vpc flatMap {v => v.cidrBlock}, runtime.vpc flatMap {v => v.vpcId}) match { - case (Some(cidr), Some(id)) => - lazyVpc.setCidrBlock(cidr) - lazyVpc.setVpcId(id) - Some(lazyVpc) - case (Some(cidr), None) => - lazyVpc.setCidrBlock(cidr) - Some(lazyVpc) - case (None, Some(vpc)) => - lazyVpc.setVpcId(vpc) - Some(lazyVpc) - case (None, None) => None - } - } - - private[bcs] def configs: Option[Configs] = { - (vpc, disks) match { - case (Some(bcsVpc), Some(bcsDisks)) => - lazyConfigs.setDisks(bcsDisks) - val networks = new Networks - networks.setVpc(bcsVpc) - lazyConfigs.setNetworks(networks) - Some(lazyConfigs) - case (Some(bcsVpc), None) => - val networks = new Networks - networks.setVpc(bcsVpc) - lazyConfigs.setNetworks(networks) - Some(lazyConfigs) - case (None, Some(bcsDisks)) => - lazyConfigs.setDisks(bcsDisks) - Some(lazyConfigs) - case (None, None) => None - } - } - - private[bcs] def params: Parameters = { - val parames = new Parameters - lazyCmd.setPackagePath(packagePath.pathAsString) - lazyCmd.setEnvVars(environments.asJava) - lazyCmd.setCommandLine(commandString) - - dockers foreach {docker => lazyCmd.setDocker(docker)} - stdoutPath foreach {path => parames.setStdoutRedirectPath(path.normalize().pathAsString + "/")} - stderrPath foreach {path => parames.setStderrRedirectPath(path.normalize().pathAsString + "/")} - - parames.setCommand(lazyCmd) - parames - } - - private[bcs] def environments: Map[String, String] = { - runtime.docker match { - case None => - runtime.dockerTag match { - case Some(docker: BcsDockerWithoutPath) => envs + (BcsJob.BcsDockerImageEnvKey -> docker.image) - case Some(docker: BcsDockerWithPath) => envs + (BcsJob.BcsDockerPathEnvKey -> docker.path) + (BcsJob.BcsDockerImageEnvKey -> docker.image) - case _ => envs - } - case _ => envs - } - } - - val dockers: Option[Command.Docker] = { - runtime.docker match { - case Some(docker: BcsDockerWithoutPath) => - val dockers = new Command.Docker - dockers.setImage(docker.image) - Some(dockers) - case _ => None - } - } - - private[bcs] def jobDesc: JobDescription = { - lazyJob.setName(name) - lazyJob.setDescription(description) - lazyJob.setType("DAG") - - val dag = new DAG - dag.addTask("cromwell", taskDesc) - lazyJob.setDag(dag) - - // NOTE: Do NOT set auto release here or we will not be able to get status after the job completes. - lazyJob.setAutoRelease(false) - - lazyJob - } - - private[bcs] def taskDesc: TaskDescription = { - lazyTask.setParameters(params) - lazyTask.setInstanceCount(1) - - runtime.timeout foreach {timeout => lazyTask.setTimeout(timeout.toLong)} - - val cluster = runtime.cluster getOrElse(throw new IllegalArgumentException("cluster id or auto cluster configuration is mandatory")) - cluster.fold(handleClusterId, handleAutoCluster) - - val mnts = new Mounts - mounts foreach { - case input: BcsInputMount => - mnts.addEntries(input.toBcsMountEntry) - case output: BcsOutputMount => - var srcStr = BcsMount.toString(output.src) - if (BcsMount.toString(output.dest).endsWith("/") && !srcStr.endsWith("/")) { - srcStr += "/" - } - lazyTask.addOutputMapping(srcStr, BcsMount.toString(output.dest)) - } - - lazyTask.setMounts(mnts) - - lazyTask - } - - private def handleAutoCluster(config: AutoClusterConfiguration): Unit = { - val autoCluster = new AutoCluster - autoCluster.setImageId(runtime.imageId.getOrElse(config.imageId)) - autoCluster.setInstanceType(config.instanceType) - autoCluster.setResourceType(config.resourceType) - - config.spotStrategy foreach {strategy => autoCluster.setSpotStrategy(strategy)} - config.spotPriceLimit foreach {priceLimit => autoCluster.setSpotPriceLimit(priceLimit)} - config.clusterId foreach {clusterId => autoCluster.setClusterId(clusterId)} - runtime.reserveOnFail foreach {reserve => autoCluster.setReserveOnFail(reserve)} - val userData = runtime.userData map {datas => Map(datas map {data => data.key -> data.value}: _*)} - userData foreach {datas => autoCluster.setUserData(datas.asJava)} - - configs foreach (bcsConfigs => autoCluster.setConfigs(bcsConfigs)) - runtime.isv foreach(isv => autoCluster.setDependencyIsvService(isv)) - - lazyTask.setAutoCluster(autoCluster) - } - - private def handleClusterId(clusterId: String): Unit = lazyTask.setClusterId(clusterId) -} diff --git a/supportedBackends/bcs/src/main/scala/cromwell/backend/impl/bcs/BcsJobCachingActorHelper.scala b/supportedBackends/bcs/src/main/scala/cromwell/backend/impl/bcs/BcsJobCachingActorHelper.scala deleted file mode 100644 index e4aa2413f73..00000000000 --- a/supportedBackends/bcs/src/main/scala/cromwell/backend/impl/bcs/BcsJobCachingActorHelper.scala +++ /dev/null @@ -1,52 +0,0 @@ -package cromwell.backend.impl.bcs - -import akka.actor.Actor -import cromwell.backend.standard.StandardCachingActorHelper -import cromwell.core.logging.JobLogging -import cromwell.core.path.Path - -object BcsJobCachingActorHelper { - val workerScript: String = - s"""|#!/bin/bash - |export script=$$cwd/$$(basename $$exec) - |export rc=$$cwd/rc - | - |( - |mkdir -p $$cwd - |cp -rf $$exec $$script - |cd $$cwd - |/bin/bash -c $$script - |) - """.stripMargin -} - -trait BcsJobCachingActorHelper extends StandardCachingActorHelper { - this: Actor with JobLogging => - - bcsWorkflowPaths.tag = runtimeAttributes.tag.getOrElse("") - - lazy val initializationData: BcsBackendInitializationData = { - backendInitializationDataAs[BcsBackendInitializationData] - } - - def bcsClient = initializationData.bcsConfiguration.bcsClient.getOrElse(throw new RuntimeException("no bcs client available")) - - lazy val bcsWorkflowPaths: BcsWorkflowPaths = workflowPaths.asInstanceOf[BcsWorkflowPaths] - - lazy val bcsJobPaths: BcsJobPaths = jobPaths.asInstanceOf[BcsJobPaths] - - lazy val bcsConfiguration: BcsConfiguration = initializationData.bcsConfiguration - - lazy val runtimeAttributes = BcsRuntimeAttributes(validatedRuntimeAttributes, bcsConfiguration.runtimeConfig) - - lazy val callRootPath: Path = bcsJobPaths.callExecutionRoot - - lazy val returnCodeFilename: String = bcsJobPaths.returnCodeFilename - lazy val returnCodeGcsPath: Path = bcsJobPaths.returnCode - lazy val standardPaths = bcsJobPaths.standardPaths - lazy val bcsStdoutFile: Path = standardPaths.output - lazy val bcsStderrFile: Path = standardPaths.error - - //lazy val bcsCommandLine = "bash -c $(pwd)/cromwell_bcs && sync" - lazy val bcsCommandLine = "./worker" -} diff --git a/supportedBackends/bcs/src/main/scala/cromwell/backend/impl/bcs/BcsJobPaths.scala b/supportedBackends/bcs/src/main/scala/cromwell/backend/impl/bcs/BcsJobPaths.scala deleted file mode 100644 index 6353f51e3e1..00000000000 --- a/supportedBackends/bcs/src/main/scala/cromwell/backend/impl/bcs/BcsJobPaths.scala +++ /dev/null @@ -1,29 +0,0 @@ -package cromwell.backend.impl.bcs - -import cromwell.backend.BackendJobDescriptorKey -import cromwell.backend.io.JobPaths -import cromwell.core.path.{DefaultPathBuilder, Path} - -object BcsJobPaths { - val BcsLogPathKey = "bcsLog" - val BcsEnvExecKey = "exec" - val BcsEnvCwdKey = "cwd" - val BcsEnvStdoutKey = "stdout" - val BcsEnvStderrKey = "stderr" - val BcsCommandDirectory: Path = DefaultPathBuilder.get("/cromwell_root") - val BcsTempInputDirectory: Path = DefaultPathBuilder.get("/cromwell_inputs") - val BcsStdoutRedirectPath = "bcs-stdout" - val BcsStderrRedirectPath = "bcs-stderr" -} - -final case class BcsJobPaths(workflowPaths: BcsWorkflowPaths, jobKey: BackendJobDescriptorKey, override val isCallCacheCopyAttempt: Boolean = false) extends JobPaths { - - import BcsJobPaths._ - - val workerFileName = "worker" - val workerPath = callRoot.resolve(workerFileName) - val bcsStdoutPath = callRoot.resolve(BcsStdoutRedirectPath) - val bcsStderrPath = callRoot.resolve(BcsStderrRedirectPath) - - override def forCallCacheCopyAttempts: JobPaths = this.copy(isCallCacheCopyAttempt = true) -} diff --git a/supportedBackends/bcs/src/main/scala/cromwell/backend/impl/bcs/BcsMount.scala b/supportedBackends/bcs/src/main/scala/cromwell/backend/impl/bcs/BcsMount.scala deleted file mode 100644 index 757405d23fb..00000000000 --- a/supportedBackends/bcs/src/main/scala/cromwell/backend/impl/bcs/BcsMount.scala +++ /dev/null @@ -1,127 +0,0 @@ -package cromwell.backend.impl.bcs - -import cats.data.Validated._ -import cats.syntax.apply._ -import cats.syntax.validated._ -import com.aliyuncs.batchcompute.pojo.v20151111.MountEntry -import common.exception.MessageAggregation -import common.validation.ErrorOr._ -import cromwell.backend.impl.bcs.BcsMount.PathType -import cromwell.core.path.{Path, PathBuilder, PathFactory} - -import scala.util.{Success, Try} -import scala.util.matching.Regex - -object BcsMount { - type PathType = Either[Path, String] - - def toString(p: PathType): String = { - p match { - case Left(p) => - p.pathAsString - case Right(s) => - return s - } - } - - val supportFileSystemTypes = List("oss", "nas", "smb", "lustre").mkString("|") - - var pathBuilders: List[PathBuilder] = List() - - val remotePrefix = s"""(?:$supportFileSystemTypes)""" + """://[^\s]+""" - val localPath = """/[^\s]+""" - val writeSupport = """true|false""" - - val inputMountPattern: Regex = s"""($remotePrefix)\\s+($localPath)\\s+($writeSupport)""".r - val outputMountPattern: Regex = s"""($localPath)\\s+($remotePrefix)\\s+($writeSupport)""".r - - def parse(s: String): Try[BcsMount] = { - val validation: ErrorOr[BcsMount] = s match { - case inputMountPattern(remote, local, writeSupport) => - (validateRemote(remote), validateLocal(remote, local), validateBoolean(writeSupport)) mapN { (src, dest, ws) => new BcsInputMount(src, dest, ws)} - case outputMountPattern(local, oss, writeSupport) => - (validateLocal(oss, local), validateRemote(oss), validateBoolean(writeSupport)) mapN { (src, dest, ws) => new BcsOutputMount(src, dest, ws)} - case _ => s"Mount strings should be of the format 'oss://my-bucket/inputs/ /home/inputs/ true' or '/home/outputs/ oss://my-bucket/outputs/ false'".invalidNel - } - - Try(validation match { - case Valid(mount) => mount - case Invalid(nels) => - throw new UnsupportedOperationException with MessageAggregation { - val exceptionContext = "" - val errorMessages: List[String] = nels.toList - } - }) - } - - private def validateRemote(value: String): ErrorOr[PathType] = { - Try(PathFactory.buildPath(value, pathBuilders)) match { - case Success(p) => - Left(p).validNel - case _ => - Right(value).validNel - } - } - private def validateLocal(remote: String, local: String): ErrorOr[PathType] = { - if (remote.endsWith("/") == local.endsWith("/")) { - Try(PathFactory.buildPath(local, pathBuilders)) match { - case Success(p) => - Left(p).validNel - case _=> - Right(local).validNel - } - } else { - "oss and local path type not match".invalidNel - } - } - - private def validateBoolean(value: String): ErrorOr[Boolean] = { - try { - value.toBoolean.validNel - } catch { - case _: IllegalArgumentException => s"$value not convertible to a Boolean".invalidNel - } - } -} - -trait BcsMount { - import BcsMount._ - var src: PathType - var dest: PathType - var writeSupport: Boolean - - def toBcsMountEntry: MountEntry -} - -final case class BcsInputMount(var src: PathType, var dest: PathType, var writeSupport: Boolean) extends BcsMount { - def toBcsMountEntry: MountEntry = { - var destStr = BcsMount.toString(dest) - if (BcsMount.toString(src).endsWith("/") && !destStr.endsWith("/")) { - destStr += "/" - } - - val entry = new MountEntry - entry.setSource(BcsMount.toString(src)) - entry.setDestination(destStr) - entry.setWriteSupport(writeSupport) - - entry - } - -} -final case class BcsOutputMount(var src: PathType, var dest: PathType, var writeSupport: Boolean) extends BcsMount { - def toBcsMountEntry: MountEntry = { - var srcStr = BcsMount.toString(src) - if (BcsMount.toString(dest).endsWith("/") && !srcStr.endsWith("/")) { - srcStr += "/" - } - - - val entry = new MountEntry - entry.setSource(srcStr) - entry.setDestination(BcsMount.toString(dest)) - entry.setWriteSupport(writeSupport) - - entry - } -} \ No newline at end of file diff --git a/supportedBackends/bcs/src/main/scala/cromwell/backend/impl/bcs/BcsRuntimeAttributes.scala b/supportedBackends/bcs/src/main/scala/cromwell/backend/impl/bcs/BcsRuntimeAttributes.scala deleted file mode 100644 index da73c3ef747..00000000000 --- a/supportedBackends/bcs/src/main/scala/cromwell/backend/impl/bcs/BcsRuntimeAttributes.scala +++ /dev/null @@ -1,394 +0,0 @@ -package cromwell.backend.impl.bcs - -import cats.data.Validated._ -import cats.syntax.apply._ -import cats.syntax.validated._ -import com.typesafe.config.Config -import common.validation.ErrorOr._ -import cromwell.backend.impl.bcs.BcsClusterIdOrConfiguration.BcsClusterIdOrConfiguration -import cromwell.backend.standard.StandardValidatedRuntimeAttributesBuilder -import cromwell.backend.validation._ -import net.ceedubs.ficus.Ficus._ -import wom.types._ -import wom.values._ - -import scala.util.{Failure, Success, Try} - - -trait OptionalWithDefault[A] { - this: RuntimeAttributesValidation[A] => - protected val config: Option[Config] - - override protected def staticDefaultOption: Option[WomValue] = { - Try(this.configDefaultWomValue(config)) match { - case Success(value: Option[WomValue]) => value - case Failure(_) => None - } - } -} - -final case class BcsRuntimeAttributes(continueOnReturnCode: ContinueOnReturnCode, - dockerTag: Option[BcsDocker], - docker: Option[BcsDocker], - failOnStderr: Boolean, - mounts: Option[Seq[BcsMount]], - userData: Option[Seq[BcsUserData]], - cluster: Option[BcsClusterIdOrConfiguration], - imageId: Option[String], - systemDisk: Option[BcsSystemDisk], - dataDisk: Option[BcsDataDisk], - reserveOnFail: Option[Boolean], - autoReleaseJob: Option[Boolean], - timeout: Option[Int], - verbose: Option[Boolean], - vpc: Option[BcsVpcConfiguration], - tag: Option[String], - isv:Option[String]) - -object BcsRuntimeAttributes { - - val MountsKey = "mounts" - val UserDataKey = "userData" - val MountsDefaultValue = WomString("") - val ReserveOnFailKey = "reserveOnFail" - val ReserveOnFailDefault = false - val AutoReleaseJobKey = "autoReleaseJob" - val AutoReleaseJobDefault = WomBoolean(true) - val TimeoutKey = "timeout" - val TimeoutDefault = WomInteger(21600) - val VerboseKey = "verbose" - val ClusterKey = "cluster" - val DockerKey = "docker" - val SystemDiskKey = "systemDisk" - val DataDiskKey = "dataDisk" - val VpcKey = "vpc" - val TagKey = "tag" - - private def failOnStderrValidation(runtimeConfig: Option[Config]) = FailOnStderrValidation.default(runtimeConfig) - - private def continueOnReturnCodeValidation(runtimeConfig: Option[Config]) = ContinueOnReturnCodeValidation.default(runtimeConfig) - - private def clusterValidation(runtimeConfig: Option[Config]): OptionalRuntimeAttributesValidation[BcsClusterIdOrConfiguration] = ClusterValidation.optionalWithDefault(runtimeConfig) - - private def dockerTagValidation(runtimeConfig: Option[Config]): OptionalRuntimeAttributesValidation[BcsDocker] = DockerTagValidation.optionalWithDefault(runtimeConfig) - private def dockerValidation(runtimeConfig: Option[Config]): OptionalRuntimeAttributesValidation[BcsDocker] = DockerValidation.optionalWithDefault(runtimeConfig) - - private def userDataValidation(runtimeConfig: Option[Config]): OptionalRuntimeAttributesValidation[Seq[BcsUserData]] = UserDataValidation.optionalWithDefault(runtimeConfig) - - private def systemDiskValidation(runtimeConfig: Option[Config]): OptionalRuntimeAttributesValidation[BcsSystemDisk] = SystemDiskValidation.optionalWithDefault(runtimeConfig) - private def dataDiskValidation(runtimeConfig: Option[Config]): OptionalRuntimeAttributesValidation[BcsDataDisk] = DataDiskValidation.optionalWithDefault(runtimeConfig) - - private def reserveOnFailValidation(runtimeConfig: Option[Config]): OptionalRuntimeAttributesValidation[Boolean] = ReserveOnFailValidation.optionalWithDefault(runtimeConfig) - - private def autoReleaseJobValidation(runtimeConfig: Option[Config]): OptionalRuntimeAttributesValidation[Boolean] = AutoReleaseJobValidation.optionalWithDefault(runtimeConfig) - - private def mountsValidation(runtimeConfig: Option[Config]): OptionalRuntimeAttributesValidation[Seq[BcsMount]] = MountsValidation.optionalWithDefault(runtimeConfig) - - private def timeoutValidation(runtimeConfig: Option[Config]): OptionalRuntimeAttributesValidation[Int] = TimeoutValidation.optionalWithDefault(runtimeConfig) - - private def verboseValidation(runtimeConfig: Option[Config]): OptionalRuntimeAttributesValidation[Boolean] = VerboseValidation.optionalWithDefault(runtimeConfig) - - private def vpcValidation(runtimeConfig: Option[Config]): OptionalRuntimeAttributesValidation[BcsVpcConfiguration] = VpcValidation.optionalWithDefault(runtimeConfig) - - private def tagValidation(runtimeConfig: Option[Config]): OptionalRuntimeAttributesValidation[String] = TagValidation.optionalWithDefault(runtimeConfig) - - private def imageIdValidation(runtimeConfig: Option[Config]): OptionalRuntimeAttributesValidation[String] = ImageIdValidation.optionalWithDefault(runtimeConfig) - - private def isvValidation(runtimeConfig: Option[Config]): OptionalRuntimeAttributesValidation[String] = IsvValidation.optionalWithDefault(runtimeConfig) - - def runtimeAttributesBuilder(backendRuntimeConfig: Option[Config]): StandardValidatedRuntimeAttributesBuilder = { - val defaults = StandardValidatedRuntimeAttributesBuilder.default(backendRuntimeConfig).withValidation( - mountsValidation(backendRuntimeConfig), - userDataValidation(backendRuntimeConfig), - clusterValidation(backendRuntimeConfig), - systemDiskValidation(backendRuntimeConfig), - dataDiskValidation(backendRuntimeConfig), - reserveOnFailValidation(backendRuntimeConfig), - autoReleaseJobValidation(backendRuntimeConfig), - timeoutValidation(backendRuntimeConfig), - verboseValidation(backendRuntimeConfig), - vpcValidation(backendRuntimeConfig), - tagValidation(backendRuntimeConfig), - imageIdValidation(backendRuntimeConfig), - isvValidation(backendRuntimeConfig), - ) - - // TODO: docker trips up centaur testing, for now https://github.com/broadinstitute/cromwell/issues/3518 - if (backendRuntimeConfig.exists(_.getOrElse("ignoreDocker", false))) { - defaults - } else { - defaults.withValidation( - dockerTagValidation(backendRuntimeConfig), - dockerValidation(backendRuntimeConfig) - ) - } - } - - def apply(validatedRuntimeAttributes: ValidatedRuntimeAttributes, backendRuntimeConfig: Option[Config]): BcsRuntimeAttributes = { - val failOnStderr: Boolean = - RuntimeAttributesValidation.extract(failOnStderrValidation(backendRuntimeConfig), validatedRuntimeAttributes) - val continueOnReturnCode: ContinueOnReturnCode = - RuntimeAttributesValidation.extract(continueOnReturnCodeValidation(backendRuntimeConfig), validatedRuntimeAttributes) - val mounts: Option[Seq[BcsMount]] = RuntimeAttributesValidation.extractOption(mountsValidation(backendRuntimeConfig).key, validatedRuntimeAttributes) - val userData: Option[Seq[BcsUserData]] = RuntimeAttributesValidation.extractOption(userDataValidation(backendRuntimeConfig).key, validatedRuntimeAttributes) - - val cluster: Option[BcsClusterIdOrConfiguration] = RuntimeAttributesValidation.extractOption(clusterValidation(backendRuntimeConfig).key, validatedRuntimeAttributes) - val imageId: Option[String] = RuntimeAttributesValidation.extractOption(imageIdValidation(backendRuntimeConfig).key, validatedRuntimeAttributes) - val dockerTag: Option[BcsDocker] = RuntimeAttributesValidation.extractOption(dockerTagValidation(backendRuntimeConfig).key, validatedRuntimeAttributes) - val docker: Option[BcsDocker] = RuntimeAttributesValidation.extractOption(dockerValidation(backendRuntimeConfig).key, validatedRuntimeAttributes) - val systemDisk: Option[BcsSystemDisk] = RuntimeAttributesValidation.extractOption(systemDiskValidation(backendRuntimeConfig).key, validatedRuntimeAttributes) - val dataDisk: Option[BcsDataDisk] = RuntimeAttributesValidation.extractOption(dataDiskValidation(backendRuntimeConfig).key, validatedRuntimeAttributes) - - val reserveOnFail: Option[Boolean] = RuntimeAttributesValidation.extractOption(reserveOnFailValidation(backendRuntimeConfig).key, validatedRuntimeAttributes) - val autoReleaseJob: Option[Boolean] = RuntimeAttributesValidation.extractOption(autoReleaseJobValidation(backendRuntimeConfig).key, validatedRuntimeAttributes) - val timeout: Option[Int] = RuntimeAttributesValidation.extractOption(timeoutValidation(backendRuntimeConfig).key, validatedRuntimeAttributes) - val verbose: Option[Boolean] = RuntimeAttributesValidation.extractOption(verboseValidation(backendRuntimeConfig).key, validatedRuntimeAttributes) - val vpc: Option[BcsVpcConfiguration] = RuntimeAttributesValidation.extractOption(vpcValidation(backendRuntimeConfig).key, validatedRuntimeAttributes) - val tag: Option[String] = RuntimeAttributesValidation.extractOption(tagValidation(backendRuntimeConfig).key, validatedRuntimeAttributes) - val isv: Option[String] = RuntimeAttributesValidation.extractOption(isvValidation(backendRuntimeConfig).key, validatedRuntimeAttributes) - - new BcsRuntimeAttributes( - continueOnReturnCode, - dockerTag, - docker, - failOnStderr, - mounts, - userData, - cluster, - imageId, - systemDisk, - dataDisk, - reserveOnFail, - autoReleaseJob, - timeout, - verbose, - vpc, - tag, - isv - ) - } -} - -object MountsValidation { - def optionalWithDefault(config: Option[Config]): OptionalRuntimeAttributesValidation[Seq[BcsMount]] = new MountsValidation(config).optional -} - -class MountsValidation(override val config: Option[Config]) extends RuntimeAttributesValidation[Seq[BcsMount]] with OptionalWithDefault[Seq[BcsMount]] { - override def key: String = BcsRuntimeAttributes.MountsKey - - override def coercion: Iterable[WomType] = Set(WomStringType, WomArrayType(WomStringType)) - - override protected def validateValue: PartialFunction[WomValue, ErrorOr[Seq[BcsMount]]] = { - case WomString(value) => validateMounts(value.split(",\\s*").toSeq) - case WomArray(wdlType, values) if wdlType.memberType == WomStringType => - validateMounts(values.map(_.valueString)) - } - - private def validateMounts(mounts: Seq[String]): ErrorOr[Seq[BcsMount]] = { - val mountNels: Seq[ErrorOr[BcsMount]] = mounts filter { s => !s.trim().isEmpty } map validateMounts - val sequenced: ErrorOr[Seq[BcsMount]] = sequenceNels(mountNels) - sequenced - } - - private def validateMounts(mount: String): ErrorOr[BcsMount] = { - BcsMount.parse(mount) match { - case scala.util.Success(mnt) => mnt.validNel - case scala.util.Failure(ex) => ex.getMessage.invalidNel - } - } - - private def sequenceNels(nels: Seq[ErrorOr[BcsMount]]): ErrorOr[Seq[BcsMount]] = { - val emptyMountNel: ErrorOr[Vector[BcsMount]] = Vector.empty[BcsMount].validNel - val mountsNel: ErrorOr[Vector[BcsMount]] = nels.foldLeft(emptyMountNel) { - (acc, v) => (acc, v) mapN { (a, v) => a :+ v } - } - mountsNel - } - - override protected def missingValueMessage: String = - s"Expecting $key runtime attribute to be a comma separated String or Array[String]" -} - -object UserDataValidation { - def optionalWithDefault(config: Option[Config]): OptionalRuntimeAttributesValidation[Seq[BcsUserData]] = new UserDataValidation(config).optional -} - -class UserDataValidation(override val config: Option[Config]) extends RuntimeAttributesValidation[Seq[BcsUserData]] with OptionalWithDefault[Seq[BcsUserData]]{ - override def key: String = BcsRuntimeAttributes.UserDataKey - - override def usedInCallCaching: Boolean = true - - override def coercion: Iterable[WomType] = Set(WomStringType, WomArrayType(WomStringType)) - - override protected def validateValue: PartialFunction[WomValue, ErrorOr[Seq[BcsUserData]]] = { - case WomString(value) => validateUserData(value.split(",\\s*").toSeq) - case WomArray(wdlType, values) if wdlType.memberType == WomStringType => - validateUserData(values.map(_.valueString)) - } - - private def validateUserData(mounts: Seq[String]): ErrorOr[Seq[BcsUserData]] = { - val userDataNels: Seq[ErrorOr[BcsUserData]] = mounts filter { s => !s.trim().isEmpty } map validateUserData - val sequenced: ErrorOr[Seq[BcsUserData]] = sequenceNels(userDataNels) - sequenced - } - - private def validateUserData(data: String): ErrorOr[BcsUserData] = { - BcsUserData.parse(data) match { - case scala.util.Success(userData) => userData.validNel - case scala.util.Failure(ex) => ex.getMessage.invalidNel - } - } - - private def sequenceNels(nels: Seq[ErrorOr[BcsUserData]]): ErrorOr[Seq[BcsUserData]] = { - val emptyDataNel: ErrorOr[Vector[BcsUserData]] = Vector.empty[BcsUserData].validNel - val datasNel: ErrorOr[Vector[BcsUserData]] = nels.foldLeft(emptyDataNel) { - (acc, v) => (acc, v) mapN { (a, v) => a :+ v } - } - datasNel - } - - override protected def missingValueMessage: String = - s"Expecting $key runtime attribute to be a comma separated String or Array[String]" -} - -object ReserveOnFailValidation { - def optionalWithDefault(config: Option[Config]): OptionalRuntimeAttributesValidation[Boolean] = new ReserveOnFailValidation(config).optional -} - -class ReserveOnFailValidation(override val config: Option[Config]) extends BooleanRuntimeAttributesValidation(BcsRuntimeAttributes.ReserveOnFailKey) with OptionalWithDefault[Boolean] - -object AutoReleaseJobValidation { - def optionalWithDefault(config: Option[Config]): OptionalRuntimeAttributesValidation[Boolean] = new AutoReleaseJobValidation(config).optional -} - -class AutoReleaseJobValidation(override val config: Option[Config]) extends BooleanRuntimeAttributesValidation(BcsRuntimeAttributes.AutoReleaseJobKey) with OptionalWithDefault[Boolean] - -object TimeoutValidation { - def optionalWithDefault(config: Option[Config]): OptionalRuntimeAttributesValidation[Int] = new TimeoutValidation(config).optional -} - -class TimeoutValidation(override val config: Option[Config]) extends IntRuntimeAttributesValidation(BcsRuntimeAttributes.TimeoutKey) with OptionalWithDefault[Int] - -object VerboseValidation { - def optionalWithDefault(config: Option[Config]): OptionalRuntimeAttributesValidation[Boolean] = new VerboseValidation(config).optional -} - -class VerboseValidation(override val config: Option[Config]) extends BooleanRuntimeAttributesValidation(BcsRuntimeAttributes.VerboseKey) with OptionalWithDefault[Boolean] - - -object ClusterValidation { - def optionalWithDefault(config: Option[Config]): OptionalRuntimeAttributesValidation[BcsClusterIdOrConfiguration] = new ClusterValidation(config).optional -} - -class ClusterValidation(override val config: Option[Config]) extends RuntimeAttributesValidation[BcsClusterIdOrConfiguration] with OptionalWithDefault[BcsClusterIdOrConfiguration] -{ - override def key: String = "cluster" - - override def coercion: Iterable[WomType] = Set(WomStringType) - - override def validateValue: PartialFunction[WomValue, ErrorOr[BcsClusterIdOrConfiguration]] = { - case WomString(s) => BcsClusterIdOrConfiguration.parse(s.toString) match { - case Success(cluster) => cluster.validNel - case Failure(t) => t.getMessage.invalidNel - } - } -} - -object SystemDiskValidation { - def optionalWithDefault(config: Option[Config]): OptionalRuntimeAttributesValidation[BcsSystemDisk] = new SystemDiskValidation(config).optional -} - -class SystemDiskValidation(override val config: Option[Config]) extends RuntimeAttributesValidation[BcsSystemDisk] with OptionalWithDefault[BcsSystemDisk] -{ - override def key: String = "systemDisk" - override def coercion: Iterable[WomType] = Set(WomStringType) - override def validateValue: PartialFunction[WomValue, ErrorOr[BcsSystemDisk]] = { - case WomString(s) => BcsDisk.parse(s.toString) match { - case Success(disk: BcsSystemDisk) => disk.validNel - case _ => s"system disk should be string like 'cloud 40'".invalidNel - } - } -} - -object DataDiskValidation { - def optionalWithDefault(config: Option[Config]): OptionalRuntimeAttributesValidation[BcsDataDisk] = new DataDiskValidation(config).optional -} - -class DataDiskValidation(override val config: Option[Config]) extends RuntimeAttributesValidation[BcsDataDisk] with OptionalWithDefault[BcsDataDisk] -{ - override def key: String = "dataDisk" - override def coercion: Iterable[WomType] = Set(WomStringType) - override def validateValue: PartialFunction[WomValue, ErrorOr[BcsDataDisk]] = { - case WomString(s) => BcsDisk.parse(s.toString) match { - case Success(disk: BcsDataDisk) => disk.validNel - case _ => s"system disk should be string like 'cloud 40 /home/data/'".invalidNel - } - } -} - -object DockerTagValidation { - def optionalWithDefault(config: Option[Config]): OptionalRuntimeAttributesValidation[BcsDocker] = new DockerTagValidation(config).optional -} - -class DockerTagValidation(override val config: Option[Config]) extends RuntimeAttributesValidation[BcsDocker] with OptionalWithDefault[BcsDocker] -{ - override def key: String = "dockerTag" - override def coercion: Iterable[WomType] = Set(WomStringType) - override def validateValue: PartialFunction[WomValue, ErrorOr[BcsDocker]] = { - case WomString(s) => BcsDocker.parse(s.toString) match { - case Success(docker: BcsDocker) => docker.validNel - case _ => s"docker must be 'dockerImage dockerPath' like".invalidNel - } - } -} - -object DockerValidation { - def optionalWithDefault(config: Option[Config]): OptionalRuntimeAttributesValidation[BcsDocker] = new DockerValidation(config).optional -} - -class DockerValidation(override val config: Option[Config]) extends DockerTagValidation(config) -{ - override def key: String = "docker" - override def usedInCallCaching: Boolean = true -} - -object VpcValidation { - def optionalWithDefault(config: Option[Config]): OptionalRuntimeAttributesValidation[BcsVpcConfiguration] = new VpcValidation(config).optional -} - -class VpcValidation(override val config: Option[Config]) extends RuntimeAttributesValidation[BcsVpcConfiguration] with OptionalWithDefault[BcsVpcConfiguration] -{ - override def key: String = "vpc" - override def coercion: Iterable[WomType] = Set(WomStringType) - override def validateValue: PartialFunction[WomValue, ErrorOr[BcsVpcConfiguration]] = { - case WomString(s) => BcsVpcConfiguration.parse(s.toString) match { - case Success(vpc: BcsVpcConfiguration) => vpc.validNel - case _ => s"vpc must be '192.168.0.0/16 vpc-xxxx' like".invalidNel - } - } -} - -object TagValidation { - def optionalWithDefault(config: Option[Config]): OptionalRuntimeAttributesValidation[String] = new TagValidation(config).optional -} - -class TagValidation(override val config: Option[Config]) extends StringRuntimeAttributesValidation("tag") with OptionalWithDefault[String] - -object ImageIdValidation { - def optionalWithDefault(config: Option[Config]): OptionalRuntimeAttributesValidation[String] = new ImageIdValidation(config).optional -} - -class ImageIdValidation(override val config: Option[Config]) extends StringRuntimeAttributesValidation("imageId") with OptionalWithDefault[String] -{ - override def usedInCallCaching: Boolean = true -} - -object IsvValidation { - def optionalWithDefault(config: Option[Config]): OptionalRuntimeAttributesValidation[String] = new IsvValidation(config).optional -} - -class IsvValidation(override val config: Option[Config]) extends StringRuntimeAttributesValidation("isv") with OptionalWithDefault[String] -{ - override def usedInCallCaching: Boolean = true -} - diff --git a/supportedBackends/bcs/src/main/scala/cromwell/backend/impl/bcs/BcsUserData.scala b/supportedBackends/bcs/src/main/scala/cromwell/backend/impl/bcs/BcsUserData.scala deleted file mode 100644 index 375ae943d49..00000000000 --- a/supportedBackends/bcs/src/main/scala/cromwell/backend/impl/bcs/BcsUserData.scala +++ /dev/null @@ -1,43 +0,0 @@ -package cromwell.backend.impl.bcs - -import cats.data.Validated._ -import cats.syntax.apply._ -import common.validation.ErrorOr._ -import cats.syntax.validated._ -import common.exception.MessageAggregation - -import scala.util.Try -import scala.util.matching.Regex - -object BcsUserData { - val keyPattern = """[^\s]+""" - val valuePattern = """[^\s]+""" - val inputMountPattern: Regex = s"""($keyPattern)\\s+($valuePattern)""".r - - def parse(s: String): Try[BcsUserData] = { - val validation: ErrorOr[BcsUserData] = s match { - case inputMountPattern(key, value) => (validateKey(key), validateValue(value)) mapN ((k, v) => new BcsUserData(k, v)) - case _ => s"error user data entry".invalidNel - } - - Try(validation match { - case Valid(userData) => userData - case Invalid(nels) => - throw new UnsupportedOperationException with MessageAggregation { - val exceptionContext = "" - val errorMessages: List[String] = nels.toList - } - }) - } - - private def validateKey(key: String): ErrorOr[String] = { - key.validNel - } - - private def validateValue(value: String): ErrorOr[String] = { - value.validNel - } - -} - -final case class BcsUserData(key: String, value: String) diff --git a/supportedBackends/bcs/src/main/scala/cromwell/backend/impl/bcs/BcsVpcConfiguration.scala b/supportedBackends/bcs/src/main/scala/cromwell/backend/impl/bcs/BcsVpcConfiguration.scala deleted file mode 100644 index eac121ad745..00000000000 --- a/supportedBackends/bcs/src/main/scala/cromwell/backend/impl/bcs/BcsVpcConfiguration.scala +++ /dev/null @@ -1,25 +0,0 @@ -package cromwell.backend.impl.bcs - - -import scala.util.{Failure, Success, Try} - - -final case class BcsVpcConfiguration(cidrBlock: Option[String] = None, - vpcId: Option[String] = None) - - -object BcsVpcConfiguration { - val cidrBlockPattern = """(([0-9]|[1-9][0-9]|1[0-9]{2}|2[0-4][0-9]|25[0-5])\.){3}([0-9]|[1-9][0-9]|1[0-9]{2}|2[0-4][0-9]|25[0-5])(\/([0-9]|[1-2][0-9]|3[0-2]){1,2})""".r - val vpcIdPattern = """(vpc-[^\s]+)""".r - - def parse(s: String): Try[BcsVpcConfiguration] = { - val cidrBlock = cidrBlockPattern findFirstIn s - val vpcId = vpcIdPattern findFirstIn s - - if (cidrBlock.isEmpty && vpcId.isEmpty) { - Failure(new IllegalArgumentException("vpc configuration must be a string like '192.168.0.0/16 vpc-xxxx' ")) - } else { - Success(BcsVpcConfiguration(cidrBlock, vpcId)) - } - } -} \ No newline at end of file diff --git a/supportedBackends/bcs/src/main/scala/cromwell/backend/impl/bcs/BcsWorkflowPaths.scala b/supportedBackends/bcs/src/main/scala/cromwell/backend/impl/bcs/BcsWorkflowPaths.scala deleted file mode 100644 index 1082acaa0f9..00000000000 --- a/supportedBackends/bcs/src/main/scala/cromwell/backend/impl/bcs/BcsWorkflowPaths.scala +++ /dev/null @@ -1,38 +0,0 @@ -package cromwell.backend.impl.bcs - -import com.typesafe.config.Config -import cromwell.backend.io.WorkflowPaths -import cromwell.backend.{BackendJobDescriptorKey, BackendWorkflowDescriptor} -import cromwell.core.path.{Path, PathBuilder} - -object BcsWorkflowPaths { - val WorkFlowTagKey = "bcs_workflow_tag" -} - -case class BcsWorkflowPaths(override val workflowDescriptor: BackendWorkflowDescriptor, - override val config: Config, - override val pathBuilders: List[PathBuilder] = WorkflowPaths.DefaultPathBuilders) extends WorkflowPaths { - - import BcsWorkflowPaths._ - override def toJobPaths(workflowPaths: WorkflowPaths, jobKey: BackendJobDescriptorKey): BcsJobPaths = { - new BcsJobPaths(workflowPaths.asInstanceOf[BcsWorkflowPaths], jobKey) - } - - override protected def withDescriptor(workflowDescriptor: BackendWorkflowDescriptor): WorkflowPaths = this.copy(workflowDescriptor = workflowDescriptor) - - override protected def workflowPathBuilder(root: Path): Path = { - workflowDescriptor.breadCrumbs.foldLeft(root)((acc, breadCrumb) => { - breadCrumb.toPath(acc) - }).resolve(workflowDescriptor.callable.name).resolve(tag).resolve(workflowDescriptor.id.toString + "/") - } - - var tag: String = { - workflowDescriptor.workflowOptions.get(WorkFlowTagKey).getOrElse("") - } - - private[bcs] def getWorkflowInputMounts: BcsInputMount = { - val src = workflowRoot - val dest = BcsJobPaths.BcsTempInputDirectory.resolve(src.pathWithoutScheme) - BcsInputMount(Left(src), Left(dest), true) - } -} diff --git a/supportedBackends/bcs/src/main/scala/cromwell/backend/impl/bcs/RunStatus.scala b/supportedBackends/bcs/src/main/scala/cromwell/backend/impl/bcs/RunStatus.scala deleted file mode 100644 index fbfa3149e0f..00000000000 --- a/supportedBackends/bcs/src/main/scala/cromwell/backend/impl/bcs/RunStatus.scala +++ /dev/null @@ -1,85 +0,0 @@ -package cromwell.backend.impl.bcs - -import cromwell.core.ExecutionEvent - -import scala.util.{Failure, Success, Try} - -sealed trait RunStatus { - import RunStatus._ - val jobId: String - val status: String - - def isTerminated: Boolean - - def isRunningOrComplete = this match { - case _: Running | _: TerminalRunStatus => true - case _ => false - } - - override def toString = status -} - -object RunStatus { - final case class Waiting(override val jobId: String) extends RunStatus { - override val status = "Waiting" - - override def isTerminated: Boolean = false - } - - final case class Running(override val jobId: String) extends RunStatus { - override val status = "Running" - - override def isTerminated: Boolean = false - } - - sealed trait TerminalRunStatus extends RunStatus { - def eventList: Seq[ExecutionEvent] - - override def isTerminated: Boolean = true - } - - sealed trait UnsuccessfulRunStatus extends TerminalRunStatus { - val errorMessage: Option[String] - lazy val prettyPrintedError: String = errorMessage map { e => s" Message: $e" } getOrElse "" - } - - final case class Finished(override val jobId: String, eventList: Seq[ExecutionEvent]) extends TerminalRunStatus { - override val status = "Finished" - } - - object UnsuccessfulRunStatus { - def apply(jobId: String, status: String, errorMessage: Option[String], eventList: Seq[ExecutionEvent]): UnsuccessfulRunStatus = { - if (status == "Stopped") { - Stopped(jobId, errorMessage, eventList) - } else { - Failed(jobId, errorMessage, eventList) - } - } - } - - final case class Failed(override val jobId: String, - errorMessage: Option[String], - eventList: Seq[ExecutionEvent]) extends UnsuccessfulRunStatus { - override val status = "Failed" - } - - final case class Stopped(override val jobId: String, - errorMessage: Option[String], - eventList: Seq[ExecutionEvent]) extends UnsuccessfulRunStatus { - override val status = "Stopped" - } -} - -object RunStatusFactory { - def getStatus(jobId: String, status: String, errorMessage: Option[String] = None, eventList: Option[Seq[ExecutionEvent]] = None): Try[RunStatus] = { - import RunStatus._ - status match { - case "Waiting" => Success(Waiting(jobId)) - case "Running" => Success(Running(jobId)) - case "Stopped" => Success(Stopped(jobId, errorMessage, eventList.getOrElse(Seq.empty))) - case "Failed" => Success(Failed(jobId, errorMessage, eventList.getOrElse(Seq.empty))) - case "Finished" => Success(Finished(jobId, eventList.getOrElse(Seq.empty))) - case _ => Failure(new RuntimeException(s"job {$jobId} turns to an invalid batchcompue status: {$status}")) - } - } -} diff --git a/supportedBackends/bcs/src/main/scala/cromwell/backend/impl/bcs/callcaching/BcsBackendCacheHitCopyingActor.scala b/supportedBackends/bcs/src/main/scala/cromwell/backend/impl/bcs/callcaching/BcsBackendCacheHitCopyingActor.scala deleted file mode 100644 index 0285edf3e0a..00000000000 --- a/supportedBackends/bcs/src/main/scala/cromwell/backend/impl/bcs/callcaching/BcsBackendCacheHitCopyingActor.scala +++ /dev/null @@ -1,83 +0,0 @@ -package cromwell.backend.impl.bcs.callcaching - -import com.google.cloud.storage.contrib.nio.CloudStorageOptions -import common.util.TryUtil -import cromwell.backend.BackendInitializationData -import cromwell.backend.impl.bcs.BcsBackendInitializationData -import cromwell.backend.io.JobPaths -import cromwell.backend.standard.callcaching.{StandardCacheHitCopyingActor, StandardCacheHitCopyingActorParams} -import cromwell.core.CallOutputs -import cromwell.core.io.{IoCommand, IoTouchCommand} -import cromwell.core.path.Path -import cromwell.core.simpleton.{WomValueBuilder, WomValueSimpleton} -import cromwell.filesystems.oss.batch.OssBatchCommandBuilder -import wom.values.WomFile - -import scala.language.postfixOps -import scala.util.Try - -class BcsBackendCacheHitCopyingActor(standardParams: StandardCacheHitCopyingActorParams) extends StandardCacheHitCopyingActor(standardParams) { - override protected val commandBuilder: OssBatchCommandBuilder.type = OssBatchCommandBuilder - private val cachingStrategy = BackendInitializationData - .as[BcsBackendInitializationData](standardParams.backendInitializationDataOption) - .bcsConfiguration.duplicationStrategy - - override def processSimpletons(womValueSimpletons: Seq[WomValueSimpleton], - sourceCallRootPath: Path, - ): Try[(CallOutputs, Set[IoCommand[_]])] = cachingStrategy match { - case CopyCachedOutputs => super.processSimpletons(womValueSimpletons, sourceCallRootPath) - case UseOriginalCachedOutputs => - val touchCommands: Seq[Try[IoTouchCommand]] = womValueSimpletons collect { - case WomValueSimpleton(_, wdlFile: WomFile) => getPath(wdlFile.value) flatMap OssBatchCommandBuilder.touchCommand - } - - TryUtil.sequence(touchCommands) map { - WomValueBuilder.toJobOutputs(jobDescriptor.taskCall.outputPorts, womValueSimpletons) -> _.toSet - } - } - - override def processDetritus(sourceJobDetritusFiles: Map[String, String] - ): Try[(Map[String, Path], Set[IoCommand[_]])] = cachingStrategy match { - case CopyCachedOutputs => super.processDetritus(sourceJobDetritusFiles) - case UseOriginalCachedOutputs => - // apply getPath on each detritus string file - val detritusAsPaths = detritusFileKeys(sourceJobDetritusFiles).toSeq map { key => - key -> getPath(sourceJobDetritusFiles(key)) - } toMap - - // Don't forget to re-add the CallRootPathKey that has been filtered out by detritusFileKeys - TryUtil.sequenceMap(detritusAsPaths, "Failed to make paths out of job detritus") flatMap { newDetritus => - Try { - // PROD-444: Keep It Short and Simple: Throw on the first error and let the outer Try catch-and-re-wrap - (newDetritus + (JobPaths.CallRootPathKey -> destinationCallRootPath)) -> - newDetritus.values.map(OssBatchCommandBuilder.touchCommand(_).get).toSet - } - } - } - - override protected def additionalIoCommands(sourceCallRootPath: Path, - originalSimpletons: Seq[WomValueSimpleton], - newOutputs: CallOutputs, - originalDetritus: Map[String, String], - newDetritus: Map[String, Path]): Try[List[Set[IoCommand[_]]]] = Try { - cachingStrategy match { - case UseOriginalCachedOutputs => - val content = - s""" - |This directory does not contain any output files because this job matched an identical job that was previously run, thus it was a cache-hit. - |Cromwell is configured to not copy outputs during call caching. To change this, edit the filesystems.gcs.caching.duplication-strategy field in your backend configuration. - |The original outputs can be found at this location: ${sourceCallRootPath.pathAsString} - """.stripMargin - - // PROD-444: Keep It Short and Simple: Throw on the first error and let the outer Try catch-and-re-wrap - List(Set( - OssBatchCommandBuilder.writeCommand( - path = jobPaths.forCallCacheCopyAttempts.callExecutionRoot / "call_caching_placeholder.txt", - content = content, - options = Seq(CloudStorageOptions.withMimeType("text/plain")), - ).get - )) - case CopyCachedOutputs => List.empty - } - } -} diff --git a/supportedBackends/bcs/src/main/scala/cromwell/backend/impl/bcs/callcaching/BcsCacheHitDuplicationStrategy.scala b/supportedBackends/bcs/src/main/scala/cromwell/backend/impl/bcs/callcaching/BcsCacheHitDuplicationStrategy.scala deleted file mode 100644 index 55ba639d6fd..00000000000 --- a/supportedBackends/bcs/src/main/scala/cromwell/backend/impl/bcs/callcaching/BcsCacheHitDuplicationStrategy.scala +++ /dev/null @@ -1,6 +0,0 @@ -package cromwell.backend.impl.bcs.callcaching - -sealed trait BcsCacheHitDuplicationStrategy - -case object CopyCachedOutputs extends BcsCacheHitDuplicationStrategy -case object UseOriginalCachedOutputs extends BcsCacheHitDuplicationStrategy diff --git a/supportedBackends/bcs/src/test/scala/cromwell/backend/impl/bcs/BcsClusterIdOrConfigurationSpec.scala b/supportedBackends/bcs/src/test/scala/cromwell/backend/impl/bcs/BcsClusterIdOrConfigurationSpec.scala deleted file mode 100644 index b57fa60344b..00000000000 --- a/supportedBackends/bcs/src/test/scala/cromwell/backend/impl/bcs/BcsClusterIdOrConfigurationSpec.scala +++ /dev/null @@ -1,106 +0,0 @@ -package cromwell.backend.impl.bcs - -import org.scalatest.prop.Tables.Table -import org.scalatest.prop.TableDrivenPropertyChecks._ -import org.scalatest.TryValues._ - -import scala.util.Failure - - -class BcsClusterIdOrConfigurationSpec extends BcsTestUtilSpec { - behavior of s"BcsClusterIdOrConfiguration" - - val clusterIdTable = Table( - ("unparsed", "parsed"), - ("cls-xxxx", Option("cls-xxxx")), - ("job-xxxx", None) - ) - - it should "parse correct cluster id" in { - forAll(clusterIdTable) { (unparsed, parsed) => - BcsClusterIdOrConfiguration.idPattern.findFirstIn(unparsed) shouldEqual(parsed) - } - } - - val resourceTypeTable = Table( - ("unparsed", "parsed"), - ("OnDemand", Option("OnDemand")), - ("Spot", Option("Spot")), - ("Other", None) - ) - - it should "parse correct resource type" in { - forAll(resourceTypeTable) { (unparsed, parsed) => - BcsClusterIdOrConfiguration.resourceTypePattern.findFirstIn(unparsed) shouldEqual(parsed) - } - } - - - val instanceTypeTable = Table( - ("unparsed", "parsed"), - ("ecs.s1.large", Option("ecs.s1.large")), - ("bcs.s1.large", Option("bcs.s1.large")) - ) - - it should "parse correct instance type" in { - forAll(instanceTypeTable) { (unparsed, parsed) => - BcsClusterIdOrConfiguration.instanceTypePattern.findFirstIn(unparsed) shouldEqual parsed - } - } - - val spotStrategyTable = Table( - ("unparsed", "parsed"), - ("SpotWithPriceLimit", Option("SpotWithPriceLimit")), - ("SpotAsPriceGo", Option("SpotAsPriceGo")) - ) - - - it should "parse correct spot strategy" in { - forAll(spotStrategyTable) { (unparsed, parsed) => - BcsClusterIdOrConfiguration.spotStrategyPattern.findFirstIn(unparsed) shouldEqual parsed - } - } - - val spotPriceLimitTable = Table( - ("unparsed", "parsed"), - ("1.0", Option(1.0.toFloat)), - ("0.1", Option(0.1.toFloat)), - ("0.12", Option(0.12.toFloat)), - ("0.123", Option(0.123.toFloat)) - ) - - it should "parse correct spot price limit" in { - forAll(spotPriceLimitTable) { (unparsed, parsed) => - BcsClusterIdOrConfiguration.spotPriceLimitPattern.findFirstIn(unparsed) map {limit => limit.toFloat} shouldEqual parsed - } - } - - val validClusterInfoTable = Table( - ("unparsed", "parsed"), - ("cls-id", Left("cls-id")), - ("OnDemand ecs.s1.large img-test", Right(AutoClusterConfiguration("OnDemand", "ecs.s1.large", "img-test"))), - ("OnDemand ecs.s1.large img-test cls-test", Right(AutoClusterConfiguration("OnDemand", "ecs.s1.large", "img-test", clusterId = Option("cls-test")))), - ("ecs.s1.large img-test", Right(AutoClusterConfiguration("OnDemand", "ecs.s1.large", "img-test"))), - ("ecs.s1.large img-test cls-test", Right(AutoClusterConfiguration("OnDemand", "ecs.s1.large", "img-test", clusterId = Option("cls-test")))), - ("Spot ecs.s1.large img-test SpotWithPriceLimit 0.1", Right(AutoClusterConfiguration("Spot", "ecs.s1.large", "img-test", Option("SpotWithPriceLimit"), Option(0.1.toFloat)))), - ("Spot ecs.s1.large img-test SpotWithPriceLimit 0.1 cls-test", Right(AutoClusterConfiguration("Spot", "ecs.s1.large", "img-test", Option("SpotWithPriceLimit"), Option(0.1.toFloat), Option("cls-test")))), - ("Spot ecs.s1.large img-test SpotAsPriceGo 0.1", Right(AutoClusterConfiguration("Spot", "ecs.s1.large", "img-test", Option("SpotAsPriceGo"), Option(0.1.toFloat)))), - ("Spot ecs.s1.large img-test SpotAsPriceGo 0.1 cls-test", Right(AutoClusterConfiguration("Spot", "ecs.s1.large", "img-test", Option("SpotAsPriceGo"), Option(0.1.toFloat), Option("cls-test")))), - - ) - - - it should "parse correct cluster id or cluster configuration" in { - forAll(validClusterInfoTable) { (unparsed, parsed) => - BcsClusterIdOrConfiguration.parse(unparsed).success.value shouldEqual parsed - } - } - - val invalidClusterInfos = List("OnDemand", "", "OnDemand other", "other ecs.s1.large") - - invalidClusterInfos foreach { unparsed => - it should s"throw when parsing $unparsed" in { - BcsClusterIdOrConfiguration.parse(unparsed) shouldBe a [Failure[_]] - } - } -} diff --git a/supportedBackends/bcs/src/test/scala/cromwell/backend/impl/bcs/BcsConfigurationSpec.scala b/supportedBackends/bcs/src/test/scala/cromwell/backend/impl/bcs/BcsConfigurationSpec.scala deleted file mode 100644 index ad32eec7cce..00000000000 --- a/supportedBackends/bcs/src/test/scala/cromwell/backend/impl/bcs/BcsConfigurationSpec.scala +++ /dev/null @@ -1,48 +0,0 @@ -package cromwell.backend.impl.bcs - -import com.typesafe.config.ConfigValueFactory -import cromwell.backend.impl.bcs.callcaching.UseOriginalCachedOutputs - -class BcsConfigurationSpec extends BcsTestUtilSpec { - behavior of "BcsConfiguration" - type ValueOrDelete = Either[Boolean, AnyRef] - - def backendConfiguration = BcsTestUtilSpec.BcsBackendConfigurationDescriptor - def defaultBackendConfig = BcsTestUtilSpec.BcsBackendConfigurationDescriptor.backendConfig - - it should "have correct bcs region" in { - val region = "cn-hangzhou" - val configs = Map("region" -> Right(region)) - val conf = withConfig(configs) - conf.bcsRegion shouldEqual Some(region) - } - - it should "have correct bcs access id and key" in { - val id = "test-access-id" - val key = "test-access-key" - val configs = Map("access-id" -> Right(id), "access-key" -> Right(key)) - val conf = withConfig(configs) - conf.bcsAccessId shouldEqual Some(id) - conf.bcsAccessKey shouldEqual Some(key) - } - - it should "have correct bcs callcaching strategy" in { - val region = "cn-hangzhou" - val configs = Map("region" -> Right(region)) - val conf = withConfig(configs) - conf.duplicationStrategy shouldEqual UseOriginalCachedOutputs - } - - - private def withConfig(configs: Map[String, ValueOrDelete]) = { - var descriptor = BcsTestUtilSpec.BcsBackendConfigurationDescriptor.copy() - for ((key, value) <- configs) { - value match { - case Left(_) => descriptor = BcsTestUtilSpec.BcsBackendConfigurationDescriptor.copy(backendConfig = descriptor.backendConfig.withoutPath(key)) - case Right(v) => descriptor = BcsTestUtilSpec.BcsBackendConfigurationDescriptor.copy(backendConfig = descriptor.backendConfig.withValue(key, ConfigValueFactory.fromAnyRef(v))) - } - } - new BcsConfiguration(descriptor) - } - -} diff --git a/supportedBackends/bcs/src/test/scala/cromwell/backend/impl/bcs/BcsDiskSpec.scala b/supportedBackends/bcs/src/test/scala/cromwell/backend/impl/bcs/BcsDiskSpec.scala deleted file mode 100644 index 98f6b1b3449..00000000000 --- a/supportedBackends/bcs/src/test/scala/cromwell/backend/impl/bcs/BcsDiskSpec.scala +++ /dev/null @@ -1,36 +0,0 @@ -package cromwell.backend.impl.bcs - -import org.scalatest.prop.Tables.Table -import org.scalatest.prop.TableDrivenPropertyChecks._ -import org.scalatest.TryValues._ - -import scala.util.Failure - -class BcsDiskSpec extends BcsTestUtilSpec { - behavior of s"BcsDisk" - - val validDiskTable = Table( - ("unparsed", "parsed"), - ("cloud 40", BcsSystemDisk("cloud", 40)), - ("cloud 200 /home/inputs/", BcsDataDisk("cloud", 200, "/home/inputs/")) - ) - - it should "parse correct disk" in { - forAll(validDiskTable) { (unparsed, parsed)=> - BcsDisk.parse(unparsed).success.value shouldEqual(parsed) - } - } - - val invalidDiskTable = List( - "", - "cloud", - "40", - "cloud 40GB", - "cloud /home/inputs/", - "cloud /home/inputs 40" - ) - - invalidDiskTable foreach { unparsed => - BcsDisk.parse(unparsed) shouldBe a [Failure[_]] - } -} diff --git a/supportedBackends/bcs/src/test/scala/cromwell/backend/impl/bcs/BcsDockerSpec.scala b/supportedBackends/bcs/src/test/scala/cromwell/backend/impl/bcs/BcsDockerSpec.scala deleted file mode 100644 index 1a3217111b5..00000000000 --- a/supportedBackends/bcs/src/test/scala/cromwell/backend/impl/bcs/BcsDockerSpec.scala +++ /dev/null @@ -1,22 +0,0 @@ -package cromwell.backend.impl.bcs - -import org.scalatest.prop.Tables.Table -import org.scalatest.prop.TableDrivenPropertyChecks._ -import org.scalatest.TryValues._ - - -class BcsDockerSpec extends BcsTestUtilSpec { - behavior of s"BcsDocker" - - val validDockerTable = Table( - ("unparsed", "parsed"), - ("ubuntu/latest oss://bcs-reg/ubuntu/", BcsDockerWithPath("ubuntu/latest", "oss://bcs-reg/ubuntu/")), - ("ubuntu/latest", BcsDockerWithoutPath("ubuntu/latest")) - ) - - it should "parse correct docker configuration" in { - forAll(validDockerTable) { (unparsed, parsed) => - BcsDocker.parse(unparsed).success.value shouldEqual(parsed) - } - } -} diff --git a/supportedBackends/bcs/src/test/scala/cromwell/backend/impl/bcs/BcsJobPathsSpec.scala b/supportedBackends/bcs/src/test/scala/cromwell/backend/impl/bcs/BcsJobPathsSpec.scala deleted file mode 100644 index 15148f39a35..00000000000 --- a/supportedBackends/bcs/src/test/scala/cromwell/backend/impl/bcs/BcsJobPathsSpec.scala +++ /dev/null @@ -1,25 +0,0 @@ -package cromwell.backend.impl.bcs - -import common.mock.MockSugar -import cromwell.filesystems.oss.OssPath -import org.mockito.Mockito._ - -class BcsJobPathsSpec extends BcsTestUtilSpec with MockSugar { - behavior of s"BcsJobPathsSpec" - - var root: OssPath = mockPathBuilder.build("oss://bcs-test/root/").getOrElse(throw new IllegalArgumentException()) - - var workflowPath: BcsWorkflowPaths = { - val workflowPaths = mock[BcsWorkflowPaths] - - when(workflowPaths.workflowRoot).thenReturn(root) - workflowPaths - } - - def name = "test" - - it should "have right package name" in { - val jobPath = BcsJobPaths(workflowPath, jobKey) - jobPath.workerPath shouldEqual jobPath.callRoot.resolve(s"${jobPath.workerFileName}") - } -} diff --git a/supportedBackends/bcs/src/test/scala/cromwell/backend/impl/bcs/BcsJobSpec.scala b/supportedBackends/bcs/src/test/scala/cromwell/backend/impl/bcs/BcsJobSpec.scala deleted file mode 100644 index ad4ec85b64f..00000000000 --- a/supportedBackends/bcs/src/test/scala/cromwell/backend/impl/bcs/BcsJobSpec.scala +++ /dev/null @@ -1,137 +0,0 @@ -package cromwell.backend.impl.bcs - -import com.aliyuncs.batchcompute.main.v20151111.BatchComputeClient -import com.aliyuncs.batchcompute.pojo.v20151111.TaskDescription -import common.mock.MockSugar -import wom.values._ - - -class BcsJobSpec extends BcsTestUtilSpec with MockSugar { - - behavior of s"BcsJob" - - val mockBcsClient = mock[BatchComputeClient] - val name = "cromwell" - val description = name - val command = "python main.py" - val packagePath = mockPathBuilder.build("oss://bcs-test/worker.tar.gz").get - val mounts = Seq.empty[BcsMount] - val envs = Map.empty[String, String] - - it should "have correct name and other basic info" in { - val job = withRuntime() - job.jobDesc.getName shouldEqual name - job.jobDesc.getDescription shouldEqual description - job.jobDesc.getType shouldEqual "DAG" - - val task = taskWithRuntime() - task.getParameters.getCommand.getCommandLine shouldEqual command - task.getParameters.getCommand.getPackagePath shouldEqual packagePath.pathAsString - - } - - it should "have correct auto release option" in { - val runtime = Map("autoReleasejob" -> WomBoolean(false)) - withRuntime(runtime).jobDesc.isAutoRelease shouldBe false - } - - it should "have correct timeout" in { - val timeout = 3000 - val runtime = Map("timeout" -> WomInteger(timeout)) - taskWithRuntime(runtime).getTimeout shouldEqual timeout - } - - it should "have correct mounts" in { - val src = "oss://bcs-job/dir/" - val dest = "/home/inputs/" - val writeSupport = false - val runtime = Map("mounts" -> WomString(s"$src $dest $writeSupport")) - taskWithRuntime(runtime).getMounts().getEntries should have size(1) - taskWithRuntime(runtime).getMounts().getEntries.get(0).getSource shouldBe src - taskWithRuntime(runtime).getMounts().getEntries.get(0).getDestination shouldBe dest - taskWithRuntime(runtime).getMounts().getEntries.get(0).isWriteSupport shouldBe writeSupport - } - - it should "have correct cluster id" in { - val clusterId = "cls-bcs" - val runtime = Map("cluster" -> WomString(clusterId)) - taskWithRuntime(runtime).getClusterId shouldEqual clusterId - } - - it should "have correct docker option" in { - val dockerImage = "ubuntu/latest" - val dockerPath = "oss://bcs-reg/ubuntu/".toLowerCase() - val runtime = Map("dockerTag" -> WomString(s"$dockerImage $dockerPath")) - taskWithRuntime(runtime).getParameters.getCommand.getEnvVars.get(BcsJob.BcsDockerImageEnvKey) shouldEqual null - taskWithRuntime(runtime).getParameters.getCommand.getEnvVars.get(BcsJob.BcsDockerPathEnvKey) shouldEqual null - } - - it should "have correct auto cluster configuration" in { - val resourceType = "Spot" - val instanceType = "ecs.c1.large" - val imageId = "img-centos" - val spotStrategy = "SpotWithPriceLimit" - val spotPriceLimit = 0.12 - val cluster = s"$resourceType $instanceType $imageId $spotStrategy $spotPriceLimit" - val imageIdForCallCaching = "img-ubuntu-vpc" - val reserveOnFail = true - val cidr = "172.16.16.0/20" - val vpcId = "vpc-test" - val systemDiskType = "cloud" - val systemDiskSize = 40 - val dataDiskType = "cloud_efficiency" - val dataDiskSize = 250 - val dataDiskMountPoint = "/home/data/" - val userDataKey = "key" - val userDataValue = "value" - - val runtime = Map( - "cluster" -> WomString(cluster), - "reserveOnFail" -> WomBoolean(reserveOnFail), - "vpc" -> WomString(s"$cidr $vpcId"), - "systemDisk" -> WomString(s"$systemDiskType $systemDiskSize"), - "dataDisk" -> WomString(s"$dataDiskType $dataDiskSize $dataDiskMountPoint"), - "userData" -> WomString(s"$userDataKey $userDataValue"), - "imageId" -> WomString(s"$imageIdForCallCaching") - ) - - val task = taskWithRuntime(runtime) - a [NullPointerException] should be thrownBy task.getClusterId.isEmpty - - val autoCluster = task.getAutoCluster - autoCluster.isReserveOnFail shouldEqual reserveOnFail - autoCluster.getImageId shouldEqual imageIdForCallCaching - autoCluster.getResourceType shouldEqual resourceType - autoCluster.getInstanceType shouldEqual instanceType - autoCluster.getSpotStrategy shouldEqual spotStrategy - autoCluster.getSpotPriceLimit shouldEqual spotPriceLimit.toFloat - - val vpc = autoCluster.getConfigs.getNetworks.getVpc - vpc.getVpcId shouldEqual vpcId - vpc.getCidrBlock shouldEqual cidr - - val systemDisk = autoCluster.getConfigs.getDisks.getSystemDisk - systemDisk.getType shouldEqual systemDiskType - systemDisk.getSize shouldEqual systemDiskSize - - val dataDisk = autoCluster.getConfigs.getDisks.getDataDisk - dataDisk.getType shouldEqual dataDiskType - dataDisk.getSize shouldEqual dataDiskSize - dataDisk.getMountPoint shouldEqual dataDiskMountPoint - - val userData = autoCluster.getUserData - userData.get(userDataKey) shouldEqual userDataValue - } - - - private def withRuntime(runtime: Map[String, WomValue] = Map.empty[String, WomValue]): BcsJob = { - val runtimeAttributes = createBcsRuntimeAttributes(runtime) - BcsJob(name, description, command, packagePath, runtimeAttributes.mounts.getOrElse(mounts), envs, runtimeAttributes, None, None, mockBcsClient) - } - - private def taskWithRuntime(runtime: Map[String, WomValue] = Map.empty[String, WomValue]): TaskDescription = { - val job = withRuntime(runtime) - job.jobDesc.getDag.getTasks.get("cromwell") - } - -} diff --git a/supportedBackends/bcs/src/test/scala/cromwell/backend/impl/bcs/BcsMountSpec.scala b/supportedBackends/bcs/src/test/scala/cromwell/backend/impl/bcs/BcsMountSpec.scala deleted file mode 100644 index c27d6488965..00000000000 --- a/supportedBackends/bcs/src/test/scala/cromwell/backend/impl/bcs/BcsMountSpec.scala +++ /dev/null @@ -1,60 +0,0 @@ -package cromwell.backend.impl.bcs - -import org.scalatest.TryValues._ - -class BcsMountSpec extends BcsTestUtilSpec { - behavior of s"BcsMountSpec" - val ossObject = "oss://bcs-test/bcs-dir/" - val localFile = "/home/admin/local-dir/" - - it should "be an input mount if src starts with oss://" in { - var writeSupport = true - var entryString = s"$ossObject $localFile $writeSupport" - var entry = BcsMount.parse(entryString).success.value - - - entry shouldBe a [BcsInputMount] - BcsMount.toString(entry.src) shouldEqual ossObject - BcsMount.toString(entry.dest) shouldEqual localFile - entry.writeSupport shouldEqual writeSupport - - writeSupport = false - - entryString = s"$ossObject $localFile $writeSupport" - entry = BcsMount.parse(entryString).success.value - entry shouldBe a [BcsInputMount] - BcsMount.toString(entry.src) shouldEqual ossObject - BcsMount.toString(entry.dest) shouldEqual localFile - entry.writeSupport shouldEqual writeSupport - } - - it should "be an output mount if dest starts with oss://" in { - var writeSupport = true - var entryString = s"$localFile $ossObject $writeSupport" - var entry = BcsMount.parse(entryString).success.value - - - entry shouldBe a [BcsOutputMount] - BcsMount.toString(entry.src) shouldEqual localFile - BcsMount.toString(entry.dest) shouldEqual ossObject - entry.writeSupport shouldEqual writeSupport - - writeSupport = false - - entryString = s"$localFile $ossObject $writeSupport" - entry = BcsMount.parse(entryString).success.value - entry shouldBe a [BcsOutputMount] - BcsMount.toString(entry.src) shouldEqual localFile - BcsMount.toString(entry.dest) shouldEqual ossObject - entry.writeSupport shouldEqual writeSupport - } - - it should "throw if src and dest are both local files or oss files" in { - val writeSupport = true - var entryString = s"$localFile $localFile $writeSupport" - a [UnsupportedOperationException] should be thrownBy BcsMount.parse(entryString).failure.get - - entryString = s"$ossObject $ossObject $writeSupport" - a [UnsupportedOperationException] should be thrownBy BcsMount.parse(entryString).failure.get - } -} diff --git a/supportedBackends/bcs/src/test/scala/cromwell/backend/impl/bcs/BcsRuntimeAttributesSpec.scala b/supportedBackends/bcs/src/test/scala/cromwell/backend/impl/bcs/BcsRuntimeAttributesSpec.scala deleted file mode 100644 index 012c6a15b3b..00000000000 --- a/supportedBackends/bcs/src/test/scala/cromwell/backend/impl/bcs/BcsRuntimeAttributesSpec.scala +++ /dev/null @@ -1,148 +0,0 @@ -package cromwell.backend.impl.bcs - -import wom.values._ - -class BcsRuntimeAttributesSpec extends BcsTestUtilSpec { - behavior of "BcsRuntimeAttributes" - - it should "build correct default runtime attributes from config string" in { - val runtime = Map.empty[String, WomValue] - val defaults = createBcsRuntimeAttributes(runtime) - defaults shouldEqual expectedRuntimeAttributes - } - - it should "parse dockerTag without docker path" in { - val runtime = Map("dockerTag" -> WomString("ubuntu/latest")) - val expected = expectedRuntimeAttributes.copy(dockerTag = Option(BcsDockerWithoutPath("ubuntu/latest"))) - createBcsRuntimeAttributes(runtime) shouldEqual(expected) - } - - it should "parse dockerTag with path" in { - val runtime = Map("dockerTag" -> WomString("centos/latest oss://bcs-dir/registry/")) - val expected = expectedRuntimeAttributes.copy(dockerTag = Option(BcsDockerWithPath("centos/latest", "oss://bcs-dir/registry/"))) - createBcsRuntimeAttributes(runtime) shouldEqual(expected) - } - - it should "parse dockerTag fail if an empty string value" in { - val runtime = Map("dockerTag" -> WomString("")) - an [Exception] should be thrownBy createBcsRuntimeAttributes(runtime) - } - - it should "parse docker" in { - val runtime = Map("docker" -> WomString("registry.cn-beijing.aliyuncs.com/test/testubuntu:0.2")) - val expected = expectedRuntimeAttributes.copy(docker = Option(BcsDockerWithoutPath("registry.cn-beijing.aliyuncs.com/test/testubuntu:0.2"))) - createBcsRuntimeAttributes(runtime) shouldEqual(expected) - } - - it should "parse correct user data" in { - val runtime = Map("userData" -> WomString("key value1")) - val expected = expectedRuntimeAttributes.copy(userData = Option(Vector(BcsUserData("key", "value1")))) - createBcsRuntimeAttributes(runtime) shouldEqual(expected) - } - - it should "throw if user data is invalid" in { - val runtime = Map("userData" -> WomString("keyvalue")) - an [Exception] should be thrownBy createBcsRuntimeAttributes(runtime) - } - - it should "parse correct input mount" in { - val runtime = Map("mounts" -> WomString("oss://bcs-dir/bcs-file /home/inputs/input_file false")) - val expected = expectedRuntimeAttributes.copy(mounts = Option(Vector(BcsInputMount(Left(mockPathBuilder.build("oss://bcs-dir/bcs-file").get), Right("/home/inputs/input_file"), false)))) - createBcsRuntimeAttributes(runtime) shouldEqual expected - } - - it should "parse correct out mount" in { - val runtime = Map("mounts" -> WomString("/home/outputs/ oss://bcs-dir/outputs/ true")) - val expected = expectedRuntimeAttributes.copy(mounts = Option(Vector(BcsOutputMount(Right("/home/outputs/"), Left(mockPathBuilder.build("oss://bcs-dir/outputs/").get), true)))) - createBcsRuntimeAttributes(runtime) shouldEqual expected - } - - it should "throw if mounts is invalid" in { - val runtime = Map("mounts" -> WomString("invalid mounts")) - an [Exception] should be thrownBy createBcsRuntimeAttributes(runtime) - } - - it should "parse correct cluster id" in { - val runtime = Map("cluster" -> WomString("cls-1")) - val expected = expectedRuntimeAttributes.copy(cluster = Option(Left("cls-1"))) - createBcsRuntimeAttributes(runtime) shouldEqual expected - } - - it should "parse correct ondemand auto cluster configuration" in { - val runtime = Map("cluster" -> WomString("OnDemand ecs.s1.large img-ubuntu")) - val expected = expectedRuntimeAttributes.copy(cluster = Option(Right(AutoClusterConfiguration("OnDemand", "ecs.s1.large", "img-ubuntu")))) - createBcsRuntimeAttributes(runtime) shouldEqual expected - } - - it should "parse correct spot auto cluster configuration" in { - val runtime = Map("cluster" -> WomString("Spot ecs.s1.large img-ubuntu")) - val expected = expectedRuntimeAttributes.copy(cluster = Option(Right(AutoClusterConfiguration("Spot", "ecs.s1.large", "img-ubuntu")))) - createBcsRuntimeAttributes(runtime) shouldEqual expected - - } - - it should "parse correct spot auto cluster price option" in { - val runtime = Map("cluster" -> WomString("Spot ecs.s1.large img-ubuntu SpotWithPriceLimit 0.1")) - val expected = expectedRuntimeAttributes.copy(cluster = Option(Right(AutoClusterConfiguration("Spot", "ecs.s1.large", "img-ubuntu", Option("SpotWithPriceLimit"), Some(0.1.toFloat))))) - createBcsRuntimeAttributes(runtime) shouldEqual expected - } - - it should "parse correct vpc cidr block" in { - val runtime = Map("vpc" -> WomString("172.16.16.0/20")) - val expected = expectedRuntimeAttributes.copy(vpc = Option(BcsVpcConfiguration(Option("172.16.16.0/20")))) - createBcsRuntimeAttributes(runtime) shouldEqual expected - } - - it should "parse correct vpc id" in { - val runtime = Map("vpc" -> WomString("vpc-xxxx")) - val expected = expectedRuntimeAttributes.copy(vpc = Option(BcsVpcConfiguration(vpcId = Option("vpc-xxxx")))) - createBcsRuntimeAttributes(runtime) shouldEqual expected - } - - it should "parse correct system disk" in { - val runtime = Map("systemDisk" -> WomString("cloud_efficiency 250")) - val expected = expectedRuntimeAttributes.copy(systemDisk = Option(BcsSystemDisk("cloud_efficiency", 250))) - createBcsRuntimeAttributes(runtime) shouldEqual expected - } - - it should "throw when parsing invalid system disk" in { - val runtime = Map("systemDisk" -> WomString("cloud_efficiency 250 /home/data/")) - an [Exception] should be thrownBy createBcsRuntimeAttributes(runtime) - } - - it should "parse correct data disk" in { - val runtime = Map("dataDisk" -> WomString("cloud 400 /home/data/")) - val expected = expectedRuntimeAttributes.copy(dataDisk = Option(BcsDataDisk("cloud", 400, "/home/data/"))) - createBcsRuntimeAttributes(runtime) shouldEqual expected - } - - it should "throw when parsing invalid data disk" in { - val runtime = Map("dataDisk" -> WomString("cloud_efficiency 250")) - an [Exception] should be thrownBy createBcsRuntimeAttributes(runtime) - } - - it should "parse correct reserve on fail option" in { - val runtime = Map("reserveOnFail" -> WomBoolean(false)) - val expected = expectedRuntimeAttributes.copy(reserveOnFail = Option(false)) - createBcsRuntimeAttributes(runtime) shouldEqual expected - } - - it should "parse correct auto release option" in { - val runtime = Map("autoReleaseJob" -> WomBoolean(false)) - val expected = expectedRuntimeAttributes.copy(autoReleaseJob = Option(false)) - createBcsRuntimeAttributes(runtime) shouldEqual expected - } - - it should "parse correct verbose option" in { - val runtime = Map("verbose" -> WomBoolean(false)) - val expected = expectedRuntimeAttributes.copy(verbose = Option(false)) - createBcsRuntimeAttributes(runtime) shouldEqual expected - } - - it should "parse correct time out" in { - val runtime = Map("timeout" -> WomInteger(3000)) - val expected = expectedRuntimeAttributes.copy(timeout = Option(3000)) - createBcsRuntimeAttributes(runtime) shouldEqual expected - } - -} diff --git a/supportedBackends/bcs/src/test/scala/cromwell/backend/impl/bcs/BcsTestUtilSpec.scala b/supportedBackends/bcs/src/test/scala/cromwell/backend/impl/bcs/BcsTestUtilSpec.scala deleted file mode 100644 index 61da2f6aba0..00000000000 --- a/supportedBackends/bcs/src/test/scala/cromwell/backend/impl/bcs/BcsTestUtilSpec.scala +++ /dev/null @@ -1,183 +0,0 @@ -package cromwell.backend.impl.bcs - -import com.typesafe.config.{Config, ConfigFactory} -import common.collections.EnhancedCollections._ -import cromwell.backend.BackendSpec.buildWdlWorkflowDescriptor -import cromwell.backend.validation.ContinueOnReturnCodeSet -import cromwell.backend.{BackendConfigurationDescriptor, BackendJobDescriptorKey, BackendWorkflowDescriptor, RuntimeAttributeDefinition} -import cromwell.core.{TestKitSuite, WorkflowOptions} -import cromwell.filesystems.oss.OssPathBuilder -import cromwell.filesystems.oss.nio.DefaultOssStorageConfiguration -import cromwell.util.SampleWdl -import org.scalatest.BeforeAndAfter -import org.scalatest.flatspec.AnyFlatSpecLike -import org.scalatest.matchers.should.Matchers -import org.slf4j.helpers.NOPLogger -import spray.json.{JsObject, JsString} -import wom.values.WomValue - -object BcsTestUtilSpec { - - val DefaultRunAttributesString: String = - """ - |default-runtime-attributes { - | failOnStderr: false - | continueOnReturnCode: 0 - | cluster: "cls-mycluster" - | mounts: "oss://bcs-bucket/bcs-dir/ /home/inputs/ false" - | dockerTag: "ubuntu/latest oss://bcs-reg/ubuntu/" - | docker: "registry.cn-beijing.aliyuncs.com/test/testubuntu:0.1" - | userData: "key value" - | reserveOnFail: true - | autoReleaseJob: true - | verbose: false - | systemDisk: "cloud 50" - | dataDisk: "cloud 250 /home/data/" - | timeout: 3000 - | vpc: "192.168.0.0/16 vpc-xxxx" - | tag: "jobTag" - | imageId: "img-ubuntu-vpc" - | isv: "test-isv" - |} - """.stripMargin - - val BcsBackendConfigString: String = - s""" - |root = "oss://your-bucket/cromwell-exe" - |dockerRoot = "/cromwell-executions" - |region = "" - | - |access-id = "" - |access-key = "" - |security-token = "" - | - |filesystems { - | oss { - | auth { - | endpoint = "" - | access-id = "" - | access-key = "" - | security-token = "" - | } - | caching { - | duplication-strategy = "reference" - | } - | } - |} - | - |$DefaultRunAttributesString - | - |""".stripMargin - - val BcsBackendConfigWithoutDefaultString: String = - s""" - |root = "oss://your-bucket/cromwell-exe" - |dockerRoot = "/cromwell-executions" - |region = "" - | - |access-id = "" - |access-key = "" - |security-token = "" - | - |filesystems { - | oss { - | auth { - | endpoint = "" - | access-id = "" - | access-key = "" - | security-token = "" - | } - | } - |} - | - |""".stripMargin - - val BcsGlobalConfigString: String = - s""" - |backend { - | default = "BCS" - | providers { - | BCS { - | actor-factory = "cromwell.backend.impl.bcs.BcsBackendLifecycleActorFactory" - | config { - | $BcsBackendConfigString - | } - | } - | } - |} - | - |""".stripMargin - - val BcsBackendConfig: Config = ConfigFactory.parseString(BcsBackendConfigString) - val BcsGlobalConfig: Config = ConfigFactory.parseString(BcsGlobalConfigString) - val BcsBackendConfigWithoutDefault: Config = ConfigFactory.parseString(BcsBackendConfigWithoutDefaultString) - val BcsBackendConfigurationDescriptor: BackendConfigurationDescriptor = - BackendConfigurationDescriptor(BcsBackendConfig, BcsGlobalConfig) - val BcsBackendConfigurationWithoutDefaultDescriptor: BackendConfigurationDescriptor = - BackendConfigurationDescriptor(BcsBackendConfigWithoutDefault, BcsGlobalConfig) - val EmptyWorkflowOption: WorkflowOptions = WorkflowOptions.fromMap(Map.empty).get -} - -trait BcsTestUtilSpec extends TestKitSuite with AnyFlatSpecLike with Matchers with BeforeAndAfter { - - before { - BcsMount.pathBuilders = List(mockPathBuilder) - } - - val jobId = "test-bcs-job" - val mockOssConf: DefaultOssStorageConfiguration = - DefaultOssStorageConfiguration("oss.aliyuncs.com", "test-id", "test-key") - lazy val mockPathBuilder: OssPathBuilder = OssPathBuilder(mockOssConf) - val mockPathBuilders = List(mockPathBuilder) - lazy val workflowDescriptor: BackendWorkflowDescriptor = buildWdlWorkflowDescriptor( - SampleWdl.HelloWorld.workflowSource(), - inputFileAsJson = Option(JsObject(SampleWdl.HelloWorld.rawInputs.safeMapValues(JsString.apply)).compactPrint) - ) - lazy val jobKey: BackendJobDescriptorKey = { - val call = workflowDescriptor.callable.taskCallNodes.head - BackendJobDescriptorKey(call, None, 1) - } - - - val expectedContinueOnReturn: ContinueOnReturnCodeSet = ContinueOnReturnCodeSet(Set(0)) - val expectedDockerTag: Option[BcsDockerWithPath] = - Option(BcsDockerWithPath("ubuntu/latest", "oss://bcs-reg/ubuntu/")) - val expectedDocker: Option[BcsDockerWithoutPath] = - Option(BcsDockerWithoutPath("registry.cn-beijing.aliyuncs.com/test/testubuntu:0.1")) - val expectedFailOnStderr = false - val expectedUserData: Option[Vector[BcsUserData]] = Option(Vector(new BcsUserData("key", "value"))) - val expectedMounts: Option[Vector[BcsInputMount]] = - Option(Vector( - BcsInputMount( - src = Left(mockPathBuilder.build("oss://bcs-bucket/bcs-dir/").get), - dest = Right("/home/inputs/"), - writeSupport = false, - ) - )) - val expectedCluster: Option[Left[String, Nothing]] = Option(Left("cls-mycluster")) - val expectedImageId: Option[String] = Option("img-ubuntu-vpc") - val expectedSystemDisk: Option[BcsSystemDisk] = Option(BcsSystemDisk("cloud", 50)) - val expectedDataDisk: Option[BcsDataDisk] = Option(BcsDataDisk("cloud", 250, "/home/data/")) - - val expectedReserveOnFail: Option[Boolean] = Option(true) - val expectedAutoRelease: Option[Boolean] = Option(true) - val expectedTimeout: Option[Int] = Option(3000) - val expectedVerbose: Option[Boolean] = Option(false) - val expectedVpc: Option[BcsVpcConfiguration] = - Option(BcsVpcConfiguration(Option("192.168.0.0/16"), Option("vpc-xxxx"))) - val expectedTag: Option[String] = Option("jobTag") - val expectedIsv: Option[String] = Option("test-isv") - - - val expectedRuntimeAttributes = new BcsRuntimeAttributes(expectedContinueOnReturn, expectedDockerTag, expectedDocker, expectedFailOnStderr, expectedMounts, expectedUserData, expectedCluster, - expectedImageId, expectedSystemDisk, expectedDataDisk, expectedReserveOnFail, expectedAutoRelease, expectedTimeout, expectedVerbose, expectedVpc, expectedTag, expectedIsv) - - - protected def createBcsRuntimeAttributes(runtimeAttributes: Map[String, WomValue]): BcsRuntimeAttributes = { - val builder = BcsRuntimeAttributes.runtimeAttributesBuilder(BcsTestUtilSpec.BcsBackendConfigurationDescriptor.backendRuntimeAttributesConfig) - val default = RuntimeAttributeDefinition.addDefaultsToAttributes( - builder.definitions.toSet, BcsTestUtilSpec.EmptyWorkflowOption)(runtimeAttributes) - val validated = builder.build(default, NOPLogger.NOP_LOGGER) - BcsRuntimeAttributes(validated, BcsTestUtilSpec.BcsBackendConfigurationDescriptor.backendRuntimeAttributesConfig) - } -} diff --git a/supportedBackends/bcs/src/test/scala/cromwell/backend/impl/bcs/BcsUserDataSpec.scala b/supportedBackends/bcs/src/test/scala/cromwell/backend/impl/bcs/BcsUserDataSpec.scala deleted file mode 100644 index 6392f60dd35..00000000000 --- a/supportedBackends/bcs/src/test/scala/cromwell/backend/impl/bcs/BcsUserDataSpec.scala +++ /dev/null @@ -1,18 +0,0 @@ -package cromwell.backend.impl.bcs - -import org.scalatest.TryValues._ - -class BcsUserDataSpec extends BcsTestUtilSpec { - behavior of s"BcsUserDataSpec" - - it should "work for right user data" in { - val key = "key" - val value = "value" - - val userData = BcsUserData.parse(s"$key $value") - userData.success.value.key shouldEqual key - userData.success.value.value shouldEqual value - - BcsUserData.parse(s"$key$value").isFailure shouldBe true - } -} diff --git a/supportedBackends/bcs/src/test/scala/cromwell/backend/impl/bcs/BcsVpcConfigurationSpec.scala b/supportedBackends/bcs/src/test/scala/cromwell/backend/impl/bcs/BcsVpcConfigurationSpec.scala deleted file mode 100644 index 94bc48ecbf2..00000000000 --- a/supportedBackends/bcs/src/test/scala/cromwell/backend/impl/bcs/BcsVpcConfigurationSpec.scala +++ /dev/null @@ -1,23 +0,0 @@ -package cromwell.backend.impl.bcs - -import org.scalatest.prop.Tables.Table -import org.scalatest.prop.TableDrivenPropertyChecks._ -import org.scalatest.TryValues._ - -class BcsVpcConfigurationSpec extends BcsTestUtilSpec { - behavior of s"BcsVpcConfiguration" - - val validVpcTable = Table( - ("unparsed", "parsed"), - ("192.168.0.0/16", BcsVpcConfiguration(Some("192.168.0.0/16"), None)), - ("vpc-xxxx", BcsVpcConfiguration(None, Some("vpc-xxxx"))), - ("192.168.0.0/16 vpc-xxxx", BcsVpcConfiguration(Some("192.168.0.0/16"), Some("vpc-xxxx"))), - ("vpc-xxxx 192.168.0.0/16 ", BcsVpcConfiguration(Some("192.168.0.0/16"), Some("vpc-xxxx"))), - ) - - it should "parse correct vpc configuration" in { - forAll(validVpcTable) { (unparsed, parsed) => - BcsVpcConfiguration.parse(unparsed).success.value shouldEqual parsed - } - } -} diff --git a/supportedBackends/bcs/src/test/scala/cromwell/backend/impl/bcs/BcsWorkflowPathsSpec.scala b/supportedBackends/bcs/src/test/scala/cromwell/backend/impl/bcs/BcsWorkflowPathsSpec.scala deleted file mode 100644 index 4222055ab1a..00000000000 --- a/supportedBackends/bcs/src/test/scala/cromwell/backend/impl/bcs/BcsWorkflowPathsSpec.scala +++ /dev/null @@ -1,31 +0,0 @@ -package cromwell.backend.impl.bcs - -class BcsWorkflowPathsSpec extends BcsTestUtilSpec { - - behavior of s"BcsWorkflowPaths" - - it should "have correct input workflow mapping" in { - - import BcsTestUtilSpec._ - - val paths = BcsWorkflowPaths(workflowDescriptor, BcsBackendConfig, mockPathBuilders) - - val workflowInput = paths.getWorkflowInputMounts - workflowInput shouldBe a[BcsInputMount] - workflowInput.src shouldEqual(Left(paths.workflowRoot)) - BcsMount.toString(workflowInput.dest).startsWith(BcsJobPaths.BcsTempInputDirectory.pathAsString) shouldBe true - // DefaultPathBuilder always remove ending '/' from directory path. - BcsMount.toString(workflowInput.dest).endsWith(paths.workflowRoot.pathWithoutScheme.stripSuffix("/")) shouldBe true - } - - it should "have correct job paths" in { - - import BcsTestUtilSpec._ - - val paths = BcsWorkflowPaths(workflowDescriptor, BcsBackendConfig, mockPathBuilders) - - val jobPaths = paths.toJobPaths(paths, jobKey) - jobPaths shouldBe a [BcsJobPaths] - } - -} diff --git a/supportedBackends/bcs/src/test/scala/cromwell/backend/impl/bcs/RunStatusFactorySpec.scala b/supportedBackends/bcs/src/test/scala/cromwell/backend/impl/bcs/RunStatusFactorySpec.scala deleted file mode 100644 index bb19a965971..00000000000 --- a/supportedBackends/bcs/src/test/scala/cromwell/backend/impl/bcs/RunStatusFactorySpec.scala +++ /dev/null @@ -1,34 +0,0 @@ -package cromwell.backend.impl.bcs - -import org.scalatest.TryValues._ - - - -final class RunStatusFactorySpec extends BcsTestUtilSpec { - behavior of s"RunStatusFactorySpec" - - private case class Status(str: String, - isRunningOrComplete: Boolean, - terminated: Boolean) - - strToClasses foreach { status => - it should behave like verifyStatus(status) - } - - def verifyStatus(status: Status) = { - it should s"have correct status: ${status.str}" in withClue(status.str) { - val s = RunStatusFactory.getStatus(jobId, status.str).success.value - s.status shouldEqual status.str - s.isRunningOrComplete shouldEqual status.isRunningOrComplete - s.isTerminated shouldEqual status.terminated - } - } - - private def strToClasses = Seq( - Status("Waiting", false, false), - Status("Running", true, false), - Status("Finished", true, true), - Status("Stopped", true, true), - Status("Failed", true, true) - ) -} diff --git a/supportedBackends/bcs/src/test/scala/cromwell/backend/impl/bcs/callcaching/BcsBackendCacheHitCopyingActorSpec.scala b/supportedBackends/bcs/src/test/scala/cromwell/backend/impl/bcs/callcaching/BcsBackendCacheHitCopyingActorSpec.scala deleted file mode 100644 index dc06332aa22..00000000000 --- a/supportedBackends/bcs/src/test/scala/cromwell/backend/impl/bcs/callcaching/BcsBackendCacheHitCopyingActorSpec.scala +++ /dev/null @@ -1,83 +0,0 @@ -package cromwell.backend.impl.bcs.callcaching - - -import akka.actor.Props -import akka.testkit.TestActorRef -import com.typesafe.config.ConfigValueFactory -import common.mock.MockSugar -import cromwell.backend.impl.bcs.{BcsBackendInitializationData, BcsConfiguration, BcsRuntimeAttributes, BcsTestUtilSpec, BcsWorkflowPaths} -import cromwell.backend.standard.callcaching.StandardCacheHitCopyingActorParams -import cromwell.core.path.Path -import wom.values._ -import cromwell.backend.impl.bcs.BcsTestUtilSpec.BcsBackendConfig -import cromwell.backend.standard.callcaching.DefaultStandardCacheHitCopyingActorParams -import cromwell.core.simpleton.WomValueSimpleton -import cromwell.filesystems.oss.OssPath -import org.mockito.Mockito._ - -import scala.util.Try - - -class BcsBackendCacheHitCopyingActorSpec extends BcsTestUtilSpec with MockSugar { - behavior of "BcsBackendCacheHitCopyingActor" - type ValueOrDelete = Either[Boolean, AnyRef] - - private val workflowPaths = BcsWorkflowPaths(workflowDescriptor, BcsBackendConfig, mockPathBuilders) - - private def buildInitializationData(configuration: BcsConfiguration) = { - - val runtimeAttributesBuilder = BcsRuntimeAttributes.runtimeAttributesBuilder(BcsTestUtilSpec.BcsBackendConfigurationDescriptor.backendRuntimeAttributesConfig) - BcsBackendInitializationData(workflowPaths, runtimeAttributesBuilder, configuration, null) - } - - private def withConfig(configs: Map[String, ValueOrDelete]) = { - var descriptor = BcsTestUtilSpec.BcsBackendConfigurationDescriptor.copy() - for ((key, value) <- configs) { - value match { - case Left(_) => descriptor = BcsTestUtilSpec.BcsBackendConfigurationDescriptor.copy(backendConfig = descriptor.backendConfig.withoutPath(key)) - case Right(v) => descriptor = BcsTestUtilSpec.BcsBackendConfigurationDescriptor.copy(backendConfig = descriptor.backendConfig.withValue(key, ConfigValueFactory.fromAnyRef(v))) - } - } - new BcsConfiguration(descriptor) - } - - - private val cacheHitCopyingActorParams = { - val mockCacheHitCopyingActorParams = mock[DefaultStandardCacheHitCopyingActorParams] - val id = "test-access-id" - val key = "test-access-key" - val configs = Map("access-id" -> Right(id), "access-key" -> Right(key)) - val conf = withConfig(configs) - when(mockCacheHitCopyingActorParams.backendInitializationDataOption).thenReturn(Option(buildInitializationData(conf))) - mockCacheHitCopyingActorParams - } - - class TestableBcsCacheHitCopyingActor(params: StandardCacheHitCopyingActorParams) - extends BcsBackendCacheHitCopyingActor(params) { - - val id = "test-access-id" - val key = "test-access-key" - val configs = Map("access-id" -> Right(id), "access-key" -> Right(key)) - - def this() = { - this(cacheHitCopyingActorParams) - } - - override def getPath(str: String): Try[Path] = mockPathBuilder.build("oss://bcs-dir/outputs/") - override def destinationJobDetritusPaths: Map[String, Path] = Map("stdout" - -> mockPathBuilder.build("oss://my-bucket/cromwell_dir/wf_echo/14e5dcd2-0c94-4035-aa7b-b90d7008202c/call-echo/stdout.log").get) - } - - it should "process simpleton and detritus correctly" in { - val simpleton = WomValueSimpleton("txt_files", WomSingleFile("oss://my-bucket/cromwell_dir/wf_echo/14e5dcd2-0c94-4035-aa7b-b90d7008202c/call-echo/abc.log")) - val detritus = Map("stdout" -> "oss://my-bucket/cromwell_dir/wf_echo/14e5dcd2-0c94-4035-aa7b-b90d7008202c/call-echo/stdout.log") - val sourceCallRootPath: OssPath = mockPathBuilder.build("oss://bcs-test/root/abc.log").getOrElse(throw new IllegalArgumentException()) - - val props = Props(new TestableBcsCacheHitCopyingActor()) - val cacheHitActor = TestActorRef[TestableBcsCacheHitCopyingActor]( - props, "TestableBcsCacheHitCopyingActor") - - noException should be thrownBy cacheHitActor.underlyingActor.processSimpletons(List(simpleton), sourceCallRootPath) - noException should be thrownBy cacheHitActor.underlyingActor.processDetritus(detritus) - } -} From b432e640e6978030c8a111119a267bdfc564c36c Mon Sep 17 00:00:00 2001 From: Adam Nichols Date: Tue, 5 Jul 2022 14:08:26 -0400 Subject: [PATCH 44/58] BW-1312 Fix incorrect documentation (#6790) --- docs/Imports.md | 4 ++-- src/ci/bin/test.inc.sh | 18 ++++-------------- 2 files changed, 6 insertions(+), 16 deletions(-) diff --git a/docs/Imports.md b/docs/Imports.md index 8cf8d930123..f929ada5883 100644 --- a/docs/Imports.md +++ b/docs/Imports.md @@ -10,7 +10,7 @@ There are two types of resources that are supported in imports: *http(s)* and *f ```wdl import "http://mywdlrepository/my.wdl" as http_import1 -import "https://github.com/broadinstitute/cromwell/blob/master/engine/src/main/resources/3step.wdl" as http_import2 +import "https://raw.githubusercontent.com/broadinstitute/cromwell/master/engine/src/main/resources/3step.wdl" as http_import2 ``` To use a file-based import resource, provide a ZIP bundle of your resources and then use a path relative to that ZIP in your import statement. For example: @@ -30,7 +30,7 @@ Here's a complete example showing both http(s) and file-based imports workflow i _workflow.wdl_ ```wdl -import "https://github.com/broadinstitute/cromwell/blob/master/engine/src/main/resources/3step.wdl" as http_import +import "https://raw.githubusercontent.com/broadinstitute/cromwell/master/engine/src/main/resources/3step.wdl" as http_import import "imports/imported.wdl" as provided_import workflow my_workflow { diff --git a/src/ci/bin/test.inc.sh b/src/ci/bin/test.inc.sh index eb52ab58ad1..95820513dfe 100644 --- a/src/ci/bin/test.inc.sh +++ b/src/ci/bin/test.inc.sh @@ -158,8 +158,10 @@ cromwell::private::create_build_variables() { # will be the name of the branch targeted by the pull request, and for push builds it will be the name of the # branch. So, in case of push builds `git diff` will always return empty result. This is why we only use this short # circuiting logic for pull request builds - cromwell::private::set_variable_if_only_some_files_changed "^mkdocs.yml|^docs/|^CHANGELOG.md" "CROMWELL_BUILD_ONLY_DOCS_CHANGED" - cromwell::private::set_variable_if_only_some_files_changed "^src/ci/bin/testMetadataComparisonPython.sh|^scripts/" "CROMWELL_BUILD_ONLY_SCRIPTS_CHANGED" + + # PR #6790 disabled the conditional that skips tests for documentation-only PRs, because + # those PRs (and only those PRs) were uniformly failing tests with a nondescript error. + # https://broadinstitute.slack.com/archives/GHYJZ2ZE0/p1656625952888149?thread_ts=1656620572.975059&cid=GHYJZ2ZE0 case "${CROMWELL_BUILD_PROVIDER}" in "${CROMWELL_BUILD_PROVIDER_TRAVIS}") @@ -208,15 +210,9 @@ cromwell::private::create_build_variables() { # This allows quick sanity checks before starting PRs *and* publishing after merges into develop. if [[ "${travis_force_tests}" == "true" ]]; then CROMWELL_BUILD_RUN_TESTS=true - elif [[ "${CROMWELL_BUILD_ONLY_DOCS_CHANGED}" == "true" ]] && \ - [[ "${BUILD_TYPE}" != "checkPublish" ]]; then - CROMWELL_BUILD_RUN_TESTS=false elif [[ "${travis_minimal_tests}" == "true" ]] && \ [[ "${TRAVIS_EVENT_TYPE}" != "push" ]]; then CROMWELL_BUILD_RUN_TESTS=false - elif [[ "${CROMWELL_BUILD_ONLY_SCRIPTS_CHANGED}" == "true" ]] && \ - [[ "${BUILD_TYPE}" != "metadataComparisonPython" ]]; then - CROMWELL_BUILD_RUN_TESTS=false elif [[ "${TRAVIS_EVENT_TYPE}" == "push" ]] && \ [[ "${BUILD_TYPE}" != "sbt" ]]; then CROMWELL_BUILD_RUN_TESTS=false @@ -294,15 +290,9 @@ cromwell::private::create_build_variables() { # This allows quick sanity checks before starting PRs *and* publishing after merges into develop. if [[ "${circle_force_tests}" == "true" ]]; then CROMWELL_BUILD_RUN_TESTS=true - elif [[ "${CROMWELL_BUILD_ONLY_DOCS_CHANGED}" == "true" ]] && \ - [[ "${BUILD_TYPE}" != "checkPublish" ]]; then - CROMWELL_BUILD_RUN_TESTS=false elif [[ "${circle_minimal_tests}" == "true" ]] && \ [[ "${CROMWELL_BUILD_EVENT}" != "push" ]]; then CROMWELL_BUILD_RUN_TESTS=false - elif [[ "${CROMWELL_BUILD_ONLY_SCRIPTS_CHANGED}" == "true" ]] && \ - [[ "${BUILD_TYPE}" != "metadataComparisonPython" ]]; then - CROMWELL_BUILD_RUN_TESTS=false elif [[ "${CROMWELL_BUILD_EVENT}" == "push" ]] && \ [[ "${BUILD_TYPE}" != "sbt" ]]; then CROMWELL_BUILD_RUN_TESTS=false From 3e9aec2922f8a4b039dc729a9fd454178e4fb3f8 Mon Sep 17 00:00:00 2001 From: Adam Nichols Date: Tue, 5 Jul 2022 20:10:48 -0400 Subject: [PATCH 45/58] BW-1228 Upgrade `protobuf-java` to non-vulnerable version (#6793) --- project/Dependencies.scala | 11 ++++++++++- 1 file changed, 10 insertions(+), 1 deletion(-) diff --git a/project/Dependencies.scala b/project/Dependencies.scala index 0dbf9b4325f..3664f533ff7 100644 --- a/project/Dependencies.scala +++ b/project/Dependencies.scala @@ -79,6 +79,10 @@ object Dependencies { private val mockitoV = "3.11.2" private val mockserverNettyV = "5.11.2" private val mouseV = "1.0.10" + /* + Newer version 8.0.29 fails `Control characters should work with metadata` Centaur tests, has charset changes mentioned in release notes + https://dev.mysql.com/doc/relnotes/mysql/8.0/en/news-8-0-29.html#mysqld-8-0-29-charset + */ private val mysqlV = "8.0.28" private val nettyV = "4.1.72.Final" private val owlApiV = "5.1.19" @@ -689,6 +693,10 @@ object Dependencies { "org.bouncycastle" % "bcprov-jdk15on" % "1.70", ) + private val protobufJavaOverrides = List( + "com.google.protobuf" % "protobuf-java" % "3.21.2", + ) + /* If we use a version in one of our projects, that's the one we want all the libraries to use ...plus other groups of transitive dependencies shared across multiple projects @@ -702,5 +710,6 @@ object Dependencies { scalaCollectionCompatOverrides ++ asyncHttpClientOverrides ++ nimbusdsOverrides ++ - bouncyCastleOverrides + bouncyCastleOverrides ++ + protobufJavaOverrides } From f5d0921a6bc8aadd1ffde30ad510981ef6a2c164 Mon Sep 17 00:00:00 2001 From: Janet Gainer-Dewar Date: Thu, 7 Jul 2022 18:02:22 +0000 Subject: [PATCH 46/58] Update cromwell version from 81 to 82 --- project/Version.scala | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/project/Version.scala b/project/Version.scala index 557c47dd458..a31e3506cf7 100644 --- a/project/Version.scala +++ b/project/Version.scala @@ -5,7 +5,7 @@ import sbt._ object Version { // Upcoming release, or current if we're on a master / hotfix branch - val cromwellVersion = "81" + val cromwellVersion = "82" /** * Returns true if this project should be considered a snapshot. From 30874d95c4d03eb96f4c17a11ea9fa37bdbb59a0 Mon Sep 17 00:00:00 2001 From: Christian Freitas Date: Fri, 8 Jul 2022 15:27:14 -0400 Subject: [PATCH 47/58] BT-637 Upgrade GoogleCloudStorage to 2.9.2 (#6794) Updated GoogleCloudStorage to 2.9.2 --- project/Dependencies.scala | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/project/Dependencies.scala b/project/Dependencies.scala index 3664f533ff7..fd0bb021bd9 100644 --- a/project/Dependencies.scala +++ b/project/Dependencies.scala @@ -43,7 +43,7 @@ object Dependencies { // BW-808 Pinning googleCloudNioV to this tried-and-true old version and quieting Scala Steward. // 0.121.2 is the most recent version currently known to work. private val googleCloudNioV = "0.61.0-alpha" // scala-steward:off - private val googleCloudStorageV = "2.1.10" + private val googleCloudStorageV = "2.9.2" private val googleGaxGrpcV = "2.12.2" // latest date via: https://mvnrepository.com/artifact/com.google.apis/google-api-services-genomics private val googleGenomicsServicesV2Alpha1ApiV = "v2alpha1-rev20210811-1.32.1" From d776727ac9cbacd169a87005b1c9d2c0ba5c49d4 Mon Sep 17 00:00:00 2001 From: Venkat Malladi Date: Mon, 11 Jul 2022 12:15:38 -0500 Subject: [PATCH 48/58] CROM-6893 Add in Azure into documentation as TES backend. (#6773) --- README.md | 2 +- docs/backends/Azure.md | 9 +++++++++ docs/backends/TES.md | 17 ++++++++++------- 3 files changed, 20 insertions(+), 8 deletions(-) create mode 100644 docs/backends/Azure.md diff --git a/README.md b/README.md index 419aeb42c5d..fac1541cf8e 100644 --- a/README.md +++ b/README.md @@ -26,7 +26,7 @@ Many users today run their WDL workflows in [Terra](https://app.terra.bio/), a m Users with specialized needs who wish to install and maintain their own Cromwell instances can [download](https://github.com/broadinstitute/cromwell/releases) a JAR or Docker image. The development team accepts reproducible bug reports from self-managed instances, but cannot feasibly provide direct support. -[Cromwell's backends](https://cromwell.readthedocs.io/en/stable/backends/Backends/) receive development resources proportional to user demand. The team is actively developing for Google Cloud and Microsoft Azure. Maintenance of other backends is primarily community-based. +[Cromwell's backends](https://cromwell.readthedocs.io/en/stable/backends/Backends/) receive development resources proportional to user demand. The team is actively developing for Google Cloud and Microsoft Azure (see [Cromwell on Azure](https://github.com/microsoft/CromwellOnAzure)). Maintenance of other backends is primarily community-based. Cromwell [supports](https://cromwell.readthedocs.io/en/stable/LanguageSupport/) the WDL workflow language. Cromwell version 80 and above no longer support CWL. diff --git a/docs/backends/Azure.md b/docs/backends/Azure.md new file mode 100644 index 00000000000..44461e0a1b6 --- /dev/null +++ b/docs/backends/Azure.md @@ -0,0 +1,9 @@ +**Azure** + +[Cromwell on Azure](https://github.com/microsoft/CromwellOnAzure) configures all Azure resources needed to run +workflows through Cromwell on the Azure cloud, and uses the [GA4GH TES](TES) backend for +orchestrating the tasks that create a workflow. + +Check out the [getting started guide](https://github.com/microsoft/CromwellOnAzure#Deploy-your-instance-of-Cromwell-on-Azure) for how to setup your environment. + +Have an existing WDL file that you want to run on Azure? [Modify your existing WDL with these adaptations for Azure](https://github.com/microsoft/CromwellOnAzure/blob/main/docs/change-existing-WDL-for-Azure.md/#How-to-modify-an-existing-WDL-file-to-run-on-Cromwell-on-Azure) diff --git a/docs/backends/TES.md b/docs/backends/TES.md index da13116e893..e19917ec126 100644 --- a/docs/backends/TES.md +++ b/docs/backends/TES.md @@ -21,7 +21,7 @@ echo $? > rc **Configuring** -Configuring the TES backend is straightforward; one must only provide the TES API endpoint for the service. +Configuring the TES backend is straightforward; one must only provide the TES API endpoint for the service. ```hocon backend { @@ -42,7 +42,7 @@ backend { **Supported File Systems** -Currently this backend only works with files on a Local or Shared File System. +Currently this backend only works with files on a Local or Shared File System. **Docker** @@ -51,22 +51,25 @@ This backend supports the following optional [Runtime Attributes](../RuntimeAttr * `docker`: Docker image to use such as "Ubuntu". * `dockerWorkingDir`: defines the working directory in the container. -**CPU, Memory and Disk** +**CPU, Memory and Disk** This backend supports CPU, memory and disk size configuration through the use of the following [Runtime Attributes](../RuntimeAttributes) and [Workflow Options](../wf_options/Overview/): -* `cpu` defines the amount of CPU to use. +* `cpu` defines the amount of CPU to use. * Type: Integer (ex: 4) -* `memory` defines the amount of memory to use. +* `memory` defines the amount of memory to use. * Type: String (ex: "4 GB" or "4096 MB") -* `disk` defines the amount of disk to use. +* `disk` defines the amount of disk to use. * Type: String (ex: "1 GB" or "1024 MB") -* `preemptible` defines whether or not to use preemptible VMs. +* `preemptible` defines whether or not to use preemptible VMs. * Type: Boolean (ex: "true" or "false") * Integers are accepted and will be converted to boolean (true if > 0) If they are not set, the TES backend may use default values. +**Azure** +[Azure](Azure) is an implementation of Cromwell that uses the TES interface for orchestrating the tasks on Azure. + **TESK** [TESK](https://github.com/EMBL-EBI-TSI/TESK) is an implementation of the TES interface that uses Kubernetes and FTP. From 0325524222be6e1af977aa355eb24fa566303ae4 Mon Sep 17 00:00:00 2001 From: Christian Freitas Date: Tue, 12 Jul 2022 09:25:04 -0400 Subject: [PATCH 49/58] BT-689 Restore and update cromwell.examples.conf (#6800) --- .../cromwell.examples.conf | 701 ++++++++++++++++++ 1 file changed, 701 insertions(+) create mode 100644 cromwell.example.backends/cromwell.examples.conf diff --git a/cromwell.example.backends/cromwell.examples.conf b/cromwell.example.backends/cromwell.examples.conf new file mode 100644 index 00000000000..49b676a2c7c --- /dev/null +++ b/cromwell.example.backends/cromwell.examples.conf @@ -0,0 +1,701 @@ +# This is a "default" Cromwell example that is intended for you you to start with +# and edit for your needs. Specifically, you will be interested to customize +# the configuration based on your preferred backend (see the backends section +# below in the file). For backend-specific examples for you to copy paste here, +# please see the cromwell.backend.examples folder in the repository. The files +# there also include links to online documentation (if it exists) + +# This line is required. It pulls in default overrides from the embedded cromwell +# `reference.conf` (in core/src/main/resources) needed for proper performance of cromwell. +include required(classpath("application")) + +# Cromwell HTTP server settings +webservice { + #port = 8000 + #interface = 0.0.0.0 + #binding-timeout = 5s + #instance.name = "reference" +} + +akka { + # Optionally set / override any akka settings + http { + server { + # Increasing these timeouts allow rest api responses for very large jobs + # to be returned to the user. When the timeout is reached the server would respond + # `The server was not able to produce a timely response to your request.` + # https://gatkforums.broadinstitute.org/wdl/discussion/10209/retrieving-metadata-for-large-workflows + # request-timeout = 20s + # idle-timeout = 20s + } + } +} + +# Cromwell "system" settings +system { + # If 'true', a SIGINT will trigger Cromwell to attempt to abort all currently running jobs before exiting + #abort-jobs-on-terminate = false + + # If 'true', a SIGTERM or SIGINT will trigger Cromwell to attempt to gracefully shutdown in server mode, + # in particular clearing up all queued database writes before letting the JVM shut down. + # The shutdown is a multi-phase process, each phase having its own configurable timeout. See the Dev Wiki for more details. + #graceful-server-shutdown = true + + # Cromwell will cap the number of running workflows at N + #max-concurrent-workflows = 5000 + + # Cromwell will launch up to N submitted workflows at a time, regardless of how many open workflow slots exist + #max-workflow-launch-count = 50 + + # Number of seconds between workflow launches + #new-workflow-poll-rate = 20 + + # Since the WorkflowLogCopyRouter is initialized in code, this is the number of workers + #number-of-workflow-log-copy-workers = 10 + + # Default number of cache read workers + #number-of-cache-read-workers = 25 + + io { + # throttle { + # # Global Throttling - This is mostly useful for GCS and can be adjusted to match + # # the quota availble on the GCS API + # #number-of-requests = 100000 + # #per = 100 seconds + # } + + # Number of times an I/O operation should be attempted before giving up and failing it. + #number-of-attempts = 5 + } + + # Maximum number of input file bytes allowed in order to read each type. + # If exceeded a FileSizeTooBig exception will be thrown. + input-read-limits { + + #lines = 128000 + + #bool = 7 + + #int = 19 + + #float = 50 + + #string = 128000 + + #json = 128000 + + #tsv = 128000 + + #map = 128000 + + #object = 128000 + } + + abort { + # These are the default values in Cromwell, in most circumstances there should not be a need to change them. + + # How frequently Cromwell should scan for aborts. + scan-frequency: 30 seconds + + # The cache of in-progress aborts. Cromwell will add entries to this cache once a WorkflowActor has been messaged to abort. + # If on the next scan an 'Aborting' status is found for a workflow that has an entry in this cache, Cromwell will not ask + # the associated WorkflowActor to abort again. + cache { + enabled: true + # Guava cache concurrency. + concurrency: 1 + # How long entries in the cache should live from the time they are added to the cache. + ttl: 20 minutes + # Maximum number of entries in the cache. + size: 100000 + } + } + + # Cromwell reads this value into the JVM's `networkaddress.cache.ttl` setting to control DNS cache expiration + dns-cache-ttl: 3 minutes +} + +workflow-options { + # These workflow options will be encrypted when stored in the database + #encrypted-fields: [] + + # AES-256 key to use to encrypt the values in `encrypted-fields` + #base64-encryption-key: "AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA=" + + # Directory where to write per workflow logs + #workflow-log-dir: "cromwell-workflow-logs" + + # When true, per workflow logs will be deleted after copying + #workflow-log-temporary: true + + # Workflow-failure-mode determines what happens to other calls when a call fails. Can be either ContinueWhilePossible or NoNewCalls. + # Can also be overridden in workflow options. Defaults to NoNewCalls. Uncomment to change: + #workflow-failure-mode: "ContinueWhilePossible" + + default { + # When a workflow type is not provided on workflow submission, this specifies the default type. + #workflow-type: WDL + + # When a workflow type version is not provided on workflow submission, this specifies the default type version. + #workflow-type-version: "draft-2" + + # To set a default hog group rather than defaulting to workflow ID: + #hogGroup: "static" + } +} + +# Optional call-caching configuration. +call-caching { + # Allows re-use of existing results for jobs you've already run + # (default: false) + #enabled = false + + # Whether to invalidate a cache result forever if we cannot reuse them. Disable this if you expect some cache copies + # to fail for external reasons which should not invalidate the cache (e.g. auth differences between users): + # (default: true) + #invalidate-bad-cache-results = true + + # The maximum number of times Cromwell will attempt to copy cache hits before giving up and running the job. + #max-failed-copy-attempts = 1000000 + + # blacklist-cache { + # # The call caching blacklist cache is off by default. This cache is used to blacklist cache hits based on cache + # # hit ids or buckets of cache hit paths that Cromwell has previously failed to copy for permissions reasons. + # enabled: true + # + # # A blacklist grouping can be specified in workflow options which will inform the blacklister which workflows + # # should share a blacklist cache. + # groupings { + # workflow-option: call-cache-blacklist-group + # concurrency: 10000 + # ttl: 2 hours + # size: 1000 + # } + # + # buckets { + # # Guava cache concurrency. + # concurrency: 10000 + # # How long entries in the cache should live from the time of their last access. + # ttl: 20 minutes + # # Maximum number of entries in the cache. + # size: 1000 + # } + # + # hits { + # # Guava cache concurrency. + # concurrency: 10000 + # # How long entries in the cache should live from the time of their last access. + # ttl: 20 minutes + # # Maximum number of entries in the cache. + # size: 100000 + # } + # + # } +} + +# Google configuration +google { + + #application-name = "cromwell" + + # Default: just application default + #auths = [ + + # Application default + #{ + # name = "application-default" + # scheme = "application_default" + #}, + + # Use a static service account + #{ + # name = "service-account" + # scheme = "service_account" + # Choose between PEM file and JSON file as a credential format. They're mutually exclusive. + # PEM format: + # service-account-id = "my-service-account" + # pem-file = "/path/to/file.pem" + # JSON format: + # json-file = "/path/to/file.json" + #} + + # Use service accounts provided through workflow options + #{ + # name = "user-service-account" + # scheme = "user_service_account" + #} + #] +} + +docker { + hash-lookup { + # Set this to match your available quota against the Google Container Engine API + #gcr-api-queries-per-100-seconds = 1000 + + # Time in minutes before an entry expires from the docker hashes cache and needs to be fetched again + #cache-entry-ttl = "20 minutes" + + # Maximum number of elements to be kept in the cache. If the limit is reached, old elements will be removed from the cache + #cache-size = 200 + + # How should docker hashes be looked up. Possible values are "local" and "remote" + # "local": Lookup hashes on the local docker daemon using the cli + # "remote": Lookup hashes on docker hub, gcr, gar, quay + #method = "remote" + } +} + +engine { + # This instructs the engine which filesystems are at its disposal to perform any IO operation that it might need. + # For instance, WDL variables declared at the Workflow level will be evaluated using the filesystems declared here. + # If you intend to be able to run workflows with this kind of declarations: + # workflow { + # String str = read_string("gs://bucket/my-file.txt") + # } + # You will need to provide the engine with a gcs filesystem + # Note that the default filesystem (local) is always available. + filesystems { + # gcs { + # auth = "application-default" + # # Google project which will be billed for the requests + # project = "google-billing-project" + # } + local { + #enabled: true + } + } +} + +# You probably don't want to override the language factories here, but the strict-validation and enabled fields might be of interest: +# +# `enabled`: Defaults to `true`. Set to `false` to disallow workflows of this language/version from being run. +# `strict-validation`: Specifies whether workflows fail if the inputs JSON (or YAML) file contains values which the workflow did not ask for (and will therefore have no effect). +languages { + WDL { + versions { + "draft-2" { + # language-factory = "languages.wdl.draft2.WdlDraft2LanguageFactory" + # config { + # strict-validation: true + # enabled: true + # caching { + # # WDL Draft 2 namespace caching is off by default, this value must be set to true to enable it. + # enabled: false + # # Guava cache concurrency + # concurrency: 2 + # # How long entries in the cache should live from the time of their last access. + # ttl: 20 minutes + # # Maximum number of entries in the cache (i.e. the number of workflow source + imports => namespace entries). + # size: 1000 + # } + # } + } + # draft-3 is the same as 1.0 so files should be able to be submitted to Cromwell as 1.0 + # "draft-3" { + # language-factory = "languages.wdl.draft3.WdlDraft3LanguageFactory" + # config { + # strict-validation: true + # enabled: true + # } + # } + "1.0" { + # 1.0 is just a rename of draft-3, so yes, they really do use the same factory: + # language-factory = "languages.wdl.draft3.WdlDraft3LanguageFactory" + # config { + # strict-validation: true + # enabled: true + # } + } + } + } + CWL { + versions { + "v1.0" { + # language-factory = "languages.cwl.CwlV1_0LanguageFactory" + # config { + # strict-validation: false + # enabled: true + # } + } + } + } +} + +# Here is where you can define the backend providers that Cromwell understands. +# The default is a local provider. +# To add additional backend providers, you should copy paste additional backends +# of interest that you can find in the cromwell.example.backends folder +# folder at https://www.github.com/broadinstitute/cromwell +# Other backend providers include SGE, SLURM, Docker, udocker, Singularity. etc. +# Don't forget you will need to customize them for your particular use case. +backend { + # Override the default backend. + #default = "LocalExample" + + # The list of providers. + providers { + # Copy paste the contents of a backend provider in this section + # Examples in cromwell.example.backends include: + # LocalExample: What you should use if you want to define a new backend provider + # AWS: Amazon Web Services + # TES: protocol defined by GA4GH + # TESK: the same, with kubernetes support + # Google Pipelines, v2 (PAPIv2) + # Docker + # Singularity: a container safe for HPC + # Singularity+Slurm: and an example on Slurm + # udocker: another rootless container solution + # udocker+slurm: also exemplified on slurm + # HtCondor: workload manager at UW-Madison + # LSF: the Platform Load Sharing Facility backend + # SGE: Sun Grid Engine + # SLURM: workload manager + + # Note that these other backend examples will need tweaking and configuration. + # Please open an issue https://www.github.com/broadinstitute/cromwell if you have any questions + + # The local provider is included by default. This is an example. + # Define a new backend provider. + LocalExample { + # The actor that runs the backend. In this case, it's the Shared File System (SFS) ConfigBackend. + actor-factory = "cromwell.backend.impl.sfs.config.ConfigBackendLifecycleActorFactory" + + # The backend custom configuration. + config { + + # Optional limits on the number of concurrent jobs + #concurrent-job-limit = 5 + + # If true submits scripts to the bash background using "&". Only usefull for dispatchers that do NOT submit + # the job and then immediately return a scheduled job id. + run-in-background = true + + # `temporary-directory` creates the temporary directory for commands. + # + # If this value is not set explicitly, the default value creates a unique temporary directory, equivalent to: + # temporary-directory = "$(mktemp -d \"$PWD\"/tmp.XXXXXX)" + # + # The expression is run from the execution directory for the script. The expression must create the directory + # if it does not exist, and then return the full path to the directory. + # + # To create and return a non-random temporary directory, use something like: + # temporary-directory = "$(mkdir -p /tmp/mydir && echo /tmp/mydir)" + + # `script-epilogue` configures a shell command to run after the execution of every command block. + # + # If this value is not set explicitly, the default value is `sync`, equivalent to: + # script-epilogue = "sync" + # + # To turn off the default `sync` behavior set this value to an empty string: + # script-epilogue = "" + + # `glob-link-command` specifies command used to link glob outputs, by default using hard-links. + # If filesystem doesn't allow hard-links (e.g., beeGFS), change to soft-links as follows: + # glob-link-command = "ln -sL GLOB_PATTERN GLOB_DIRECTORY" + + # The list of possible runtime custom attributes. + runtime-attributes = """ + String? docker + String? docker_user + """ + + # Submit string when there is no "docker" runtime attribute. + submit = "/usr/bin/env bash ${script}" + + # Submit string when there is a "docker" runtime attribute. + submit-docker = """ + docker run \ + --rm -i \ + ${"--user " + docker_user} \ + --entrypoint ${job_shell} \ + -v ${cwd}:${docker_cwd} \ + ${docker} ${docker_script} + """ + + # Root directory where Cromwell writes job results. This directory must be + # visible and writeable by the Cromwell process as well as the jobs that Cromwell + # launches. + root = "cromwell-executions" + + # Root directory where Cromwell writes job results in the container. This value + # can be used to specify where the execution folder is mounted in the container. + # it is used for the construction of the docker_cwd string in the submit-docker + # value above. + dockerRoot = "/cromwell-executions" + + # File system configuration. + filesystems { + + # For SFS backends, the "local" configuration specifies how files are handled. + local { + + # Try to hard link (ln), then soft-link (ln -s), and if both fail, then copy the files. + localization: [ + "hard-link", "soft-link", "copy" + ] + # An experimental localization strategy called "cached-copy" is also available for SFS backends. + # This will copy a file to a cache and then hard-link from the cache. It will copy the file to the cache again + # when the maximum number of hardlinks for a file is reached. The maximum number of hardlinks can be set with: + # max-hardlinks: 950 + + # Call caching strategies + caching { + # When copying a cached result, what type of file duplication should occur. Attempted in the order listed below: + duplication-strategy: [ + "hard-link", "soft-link", "copy" + ] + + # Possible values: file, path, path+modtime + # "file" will compute an md5 hash of the file content. + # "path" will compute an md5 hash of the file path. This strategy will only be effective if the duplication-strategy (above) is set to "soft-link", + # in order to allow for the original file path to be hashed. + # "path+modtime" will compute an md5 hash of the file path and the last modified time. The same conditions as for "path" apply here. + # Default: file + hashing-strategy: "file" + + # When true, will check if a sibling file with the same name and the .md5 extension exists, and if it does, use the content of this file as a hash. + # If false or the md5 does not exist, will proceed with the above-defined hashing strategy. + check-sibling-md5: false + } + } + } + + # The defaults for runtime attributes if not provided. + default-runtime-attributes { + failOnStderr: false + continueOnReturnCode: 0 + } + } + } + } +} + +services { + MetadataService { + + # This class is the "default" database backed implementation: + # class = "cromwell.services.metadata.impl.MetadataServiceActor" + # config { + # # For the standard MetadataService implementation, cromwell.services.metadata.impl.MetadataServiceActor: + # # Set this value to "Inf" to turn off metadata summary refresh. The default value is currently "1 second". + # metadata-summary-refresh-interval = "1 second" + # + # # Set this value to the maximum number of metadata rows to be considered per summarization cycle. + # metadata-summary-refresh-limit = 5000 + # + # # For higher scale environments, e.g. many workflows and/or jobs, DB write performance for metadata events + # # can improved by writing to the database in batches. Increasing this value can dramatically improve overall + # # performance but will both lead to a higher memory usage as well as increase the risk that metadata events + # # might not have been persisted in the event of a Cromwell crash. + # # + # # For normal usage the default value of 200 should be fine but for larger/production environments we recommend a + # # value of at least 500. There'll be no one size fits all number here so we recommend benchmarking performance and + # # tuning the value to match your environment. + # db-batch-size = 200 + # + # # Periodically the stored metadata events will be forcibly written to the DB regardless of if the batch size + # # has been reached. This is to prevent situations where events wind up never being written to an incomplete batch + # # with no new events being generated. The default value is currently 5 seconds + # db-flush-rate = 5 seconds + # + # # Kill metadata SQL queries that run so long that the associated request will likely already have timed out. + # # The intention is to return resources to the system within a reasonable timeframe to avoid OOM incidents. + # # See also `akka.http.server.request-timeout`. + # metadata-read-query-timeout = "Inf" + # + # # Limit the number of rows from METADATA_ENTRY that will be fetched to produce metadata responses. + # # This limit takes into account the effects of `includeKey`, `excludeKey` and `includeSubworkflows` + # # request parameters; only the rows required to be retrieved from the database to compose the response + # # count against this limit. + # metadata-read-row-number-safety-threshold = 1000000 + # + # metadata-write-statistics { + # # Not strictly necessary since the 'metadata-write-statistics' section itself is enough for statistics to be recorded. + # # However, this can be set to 'false' to disable statistics collection without deleting the section. + # enabled: true + # + # # How many workflows to maintain statistics for concurrently. At ~4x "max-concurrent-workflows", this would be + # # *relatively* resilient to large scatters of subworkflows without risking an uncapped expansion in memory usage. + # # Note that cache entries expire after 4h of not being accessed, regardless of whether this limit is reached or not. + # cache-size = 20000 + # + # # How many metadata rows to alert at each increment of. At 100k, there will be alert messages every time a + # # workflow publishes another 100k rows of metadata. + # metadata-row-alert-interval = 100000 + # + # # Whether to include subworkflow rows in both individual counts and also include them in their parent counts + # # (and their parent's parent, and so on up to the root) + # sub-workflow-bundling = true + # } + # } + + # Alternative 1: Pub sub implementation: + # class = "cromwell.services.metadata.impl.MetadataServiceActor" + # config { + # # For the Google PubSub MetadataService implementation: cromwell.services.metadata.impl.pubsub.PubSubMetadataServiceActor: + # # Google project + # project = "my-project" + # # The auth *must* be a service-account auth with JSON auth. + # auth = "service-account" + # # The PubSub topic to write to. Will be created if it doesn't already exist. Defaults to "cromwell-metadata" + # topic = "cromwell-metadata" + # # An optional PubSub subscription name. If supplied and if it doesn't already exist, it will be created and + # # subscribed to the topic + # # subscription = "optional-subscription" + # # An application name to set on your PubSub interaction. + # appName = "cromwell" + # } + } + + Instrumentation { + # StatsD - Send metrics to a StatsD server over UDP + # class = "cromwell.services.instrumentation.impl.statsd.StatsDInstrumentationServiceActor" + # config { + # hostname = "localhost" + # port = 8125 + # prefix = "" # can be used to prefix all metrics with an api key for example + # flush-rate = 1 second # rate at which aggregated metrics will be sent to statsd + # } + + # Stackdriver - Send metrics to Google's monitoring API + # class = "cromwell.services.instrumentation.impl.stackdriver.StackdriverInstrumentationServiceActor" + # config { + # # auth scheme can be `application_default` or `service_account` + # auth = "service-account" + # google-project = "my-project" + # # rate at which aggregated metrics will be sent to Stackdriver API, must be 1 minute or more. + # flush-rate = 1 minute + # # below 3 keys are attached as labels to each metric. `cromwell-perf-test-case` is specifically meant for perf env. + # cromwell-instance-identifier = "cromwell-101" + # cromwell-instance-role = "role" + # cromwell-perf-test-case = "perf-test-1" + # } + } + HealthMonitor { + config { + + ##### + # Choose what to monitor: + ##### + + # If you want to check the availability of the PAPI or PAPIv2 services, list them here. + # If provided, all values here *MUST* be valid PAPI or PAPIv2 backend names in the Backends stanza. + # NB: requires 'google-auth-name' to be set + # check-papi-backends: [ PAPIv2 ] + + # If you want to check connection to GCS (NB: requires 'google-auth-name' and 'gcs-bucket-to-check' to be set): + # check-gcs: true + + # If you want to check database connectivity: + # check-engine-database: true + + # If you want to check dockerhub availability: + # check-dockerhub: true + + ##### + # General health monitor configuration: + ##### + + # How long to wait between status check sweeps + # check-refresh-time = 5 minutes + + # For any given status check, how long to wait before assuming failure + # check-timeout = 1 minute + + # For any given status datum, the maximum time a value will be kept before reverting back to "Unknown" + # status-ttl = 15 minutes + + # For any given status check, how many times to retry a failure before setting status to failed. Note this + # is the number of retries before declaring failure, not the total number of tries which is 1 more than + # the number of retries. + # check-failure-retry-count = 3 + + # For any given status check, how long to wait between failure retries. + # check-failure-retry-interval = 30 seconds + + ##### + # GCS- and PAPI-specific configuration options: + ##### + + # The name of an authentication scheme to use for e.g. pinging PAPI and GCS. This should be either an application + # default or service account auth, otherwise things won't work as there'll not be a refresh token where you need + # them. + # google-auth-name = application-default + + # A *bucket* in GCS to periodically stat to check for connectivity. This must be accessible by the auth mode + # specified by google-auth-name + # NB: This is a *bucket name*, not a URL and not an *object*. With 'some-bucket-name', Cromwell would ping gs://some-bucket-name + # gcs-bucket-to-check = some-bucket-name + } + } + LoadController { + config { + # The load controller service will periodically look at the status of various metrics its collecting and make an + # assessment of the system's load. If necessary an alert will be sent to the rest of the system. + # This option sets how frequently this should happen + # To disable load control, set this to "Inf" + # control-frequency = 5 seconds + } + } +} + +database { + # mysql example + #driver = "slick.driver.MySQLDriver$" + + # see all possible parameters and default values here: + # http://slick.lightbend.com/doc/3.2.0/api/index.html#slick.jdbc.JdbcBackend$DatabaseFactoryDef@forConfig(String,Config,Driver):Database + #db { + # driver = "com.mysql.jdbc.Driver" + # url = "jdbc:mysql://host/cromwell?rewriteBatchedStatements=true" + # user = "user" + # password = "pass" + # connectionTimeout = 5000 + #} + + # For batch inserts the number of inserts to send to the DB at a time + # insert-batch-size = 2000 + + migration { + # For databases with a very large number of symbols, selecting all the rows at once can generate a variety of + # problems. In order to avoid any issue, the selection is paginated. This value sets how many rows should be + # retrieved and processed at a time, before asking for the next chunk. + #read-batch-size = 100000 + + # Because a symbol row can contain any arbitrary wdl value, the amount of metadata rows to insert from a single + # symbol row can vary from 1 to several thousands (or more). To keep the size of the insert batch from growing out + # of control we monitor its size and execute/commit when it reaches or exceeds writeBatchSize. + #write-batch-size = 100000 + } + + # To customize the metadata database connection, create a block under `database` with the metadata database settings. + # + # For example, the default database stores all data in memory. This commented out block would store `metadata` in an + # hsqldb file, without modifying the internal engine database connection. + # + # The value `${uniqueSchema}` is always replaced with a unqiue UUID on each cromwell startup. + # + # This feature should be considered experimental and likely to change in the future. + + #metadata { + # profile = "slick.jdbc.HsqldbProfile$" + # db { + # driver = "org.hsqldb.jdbcDriver" + # url = "jdbc:hsqldb:file:metadata-${uniqueSchema};shutdown=false;hsqldb.tx=mvcc" + # connectionTimeout = 3000 + # } + #} + + # Postgresql example + #database { + # profile = "slick.jdbc.PostgresProfile$" + # db { + # driver = "org.postgresql.Driver" + # url = "jdbc:postgresql://localhost:5432/cromwell" + # user = "" + # password = "" + # port = 5432 + # connectionTimeout = 5000 + # } + #} +} \ No newline at end of file From e28dcb12257f82f340d5db4edb4e7d1ca6e6dbc9 Mon Sep 17 00:00:00 2001 From: Janet Gainer-Dewar Date: Tue, 12 Jul 2022 09:33:53 -0400 Subject: [PATCH 50/58] CROM-6865 Fix changelog parsing, update release doc (#6799) * Relax regex that pulls notes from changelog * Minor updates to release process doc --- processes/release_processes/README.MD | 11 ++++++----- publish/publish_workflow.wdl | 2 +- 2 files changed, 7 insertions(+), 6 deletions(-) diff --git a/processes/release_processes/README.MD b/processes/release_processes/README.MD index 6994fd6f715..a5b0911acb0 100644 --- a/processes/release_processes/README.MD +++ b/processes/release_processes/README.MD @@ -22,7 +22,7 @@ will need to be on the Broad internal network or VPN to open the following links #### Announce your intentions -Post a message in `#ftfy-private` letting people know that a publish is imminent in case there are PRs they want to get +Post a message in `#dsp-batch-private` letting people know that a publish is imminent in case there are PRs they want to get wrapped up and merged to `develop` to go out in the forthcoming version of Cromwell. #### Get a Github token @@ -61,7 +61,7 @@ Ensure you have at least 8GB; 4GB is not sufficient. #### Let people know the publish is underway -Post another message in `#ftfy-private` that the release is underway, asking everyone to hold off merges to `develop` until +Post another message in `#dsp-batch-private` that the release is underway, asking everyone to hold off merges to `develop` until the release is published. #### Run the `publish_workflow.wdl` Workflow @@ -79,11 +79,12 @@ The workflow outputs its status to the console. #### Make sure it all went swimmingly -* Check that the workflow succeeded +* Check that the workflow succeeded. If it failed during Homebrew publishing in the final step, do not fear! The actual release probably still worked. * Check that there's now a new Cromwell release listed [here](https://github.com/broadinstitute/cromwell/releases). -* If publishing to Homebrew, check that there's a Homebrew PR for the new Cromwell version [here](https://github.com/Homebrew/homebrew-core/pulls) (and that it passes their CI) +* If publishing to Homebrew, check that there's a Homebrew PR for the new Cromwell version [here](https://github.com/Homebrew/homebrew-core/pulls) (and that it passes their CI). +If the Homebrew publish step of the workflow failed, there will not be one, but typically someone on the Homebrew side takes care of creating one. Check back tomorrow. * Look [in Travis](https://app.travis-ci.com/github/broadinstitute/cromwell/branches) for the release tag build that will publish Docker images for the new version. -* Let `#ftfy-private` know that it's okay to resume merges to `develop`. +* Let `#dsp-batch-private` know that it's okay to resume merges to `develop`. * Announce release in `#dsp-batch`, set expectations about when the new version will be available in Terra. * It will take about an additional hour for the Docker image to build in Travis before its tag appears on the [Cromwell Docker Hub page](https://hub.docker.com/r/broadinstitute/cromwell/tags). * The relevant build is the one named `XX_hotfix` in [this list](https://app.travis-ci.com/github/broadinstitute/cromwell/builds). diff --git a/publish/publish_workflow.wdl b/publish/publish_workflow.wdl index 1ce6cccc05f..73c12526e5d 100644 --- a/publish/publish_workflow.wdl +++ b/publish/publish_workflow.wdl @@ -332,7 +332,7 @@ task draftGithubRelease { echo 'Extract the latest piece of the changelog corresponding to this release' # sed removes the last line - sed -n '/## ~{currentReleaseVersion}/,/## ~{changelogPreviousVersion}/p' CHANGELOG.md \ + sed -n '/## ~{currentReleaseVersion}/,/## [0-9]/p' CHANGELOG.md \ | sed '$d' \ > changelog.txt From d79ac35384272cf9db1803b86b2673f3089500e2 Mon Sep 17 00:00:00 2001 From: Jack Warren Date: Thu, 14 Jul 2022 10:03:25 -0400 Subject: [PATCH 51/58] [DDO-2190] Strip Host header in CromIAM's forwarding (#6803) --- .../main/scala/cromiam/cromwell/CromwellClient.scala | 10 +++++++++- 1 file changed, 9 insertions(+), 1 deletion(-) diff --git a/CromIAM/src/main/scala/cromiam/cromwell/CromwellClient.scala b/CromIAM/src/main/scala/cromiam/cromwell/CromwellClient.scala index 10a4fc9dd7f..a95b8df89b5 100644 --- a/CromIAM/src/main/scala/cromiam/cromwell/CromwellClient.scala +++ b/CromIAM/src/main/scala/cromiam/cromwell/CromwellClient.scala @@ -62,7 +62,8 @@ class CromwellClient(scheme: String, interface: String, port: Int, log: LoggingA def forwardToCromwell(httpRequest: HttpRequest): FailureResponseOrT[HttpResponse] = { val future = { - val headers = httpRequest.headers.filterNot(_.name == TimeoutAccessHeader) + // See CromwellClient's companion object for info on these header modifications + val headers = httpRequest.headers.filterNot(header => header.name == TimeoutAccessHeader || header.name == HostHeader) val cromwellRequest = httpRequest .copy(uri = httpRequest.uri.withAuthority(interface, port).withScheme(scheme)) .withHeaders(headers) @@ -112,6 +113,13 @@ object CromwellClient { // See: https://github.com/akka/akka-http/issues/64 val TimeoutAccessHeader = "Timeout-Access" + // Header that all clients will add before they send a request. + // If we don't strip this header out and let Akka replace it automatically before forwarding + // requests to Cromwell, any host-based routing in front of Cromwell will fail because the + // header will still contain CromIAM's host, not Cromwell's. + // See: https://broadworkbench.atlassian.net/browse/DDO-2190 + val HostHeader = "Host" + final case class CromwellConnectionFailure(f: Throwable) extends Exception(s"Unable to connect to Cromwell (${f.getMessage})", f) implicit class EnhancedWorkflowLabels(val wl: WorkflowLabels) extends AnyVal { From 06a7aea1335373085b8fa5d79473ce5456775651 Mon Sep 17 00:00:00 2001 From: Adam Nichols Date: Thu, 14 Jul 2022 12:54:45 -0400 Subject: [PATCH 52/58] BW-1317 Upgrade googleCloudNioV to latest (#6804) --- project/Dependencies.scala | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/project/Dependencies.scala b/project/Dependencies.scala index fd0bb021bd9..3aab47d94ce 100644 --- a/project/Dependencies.scala +++ b/project/Dependencies.scala @@ -40,9 +40,7 @@ object Dependencies { // latest date via: https://github.com/googleapis/google-api-java-client-services/blob/main/clients/google-api-services-cloudkms/v1.metadata.json private val googleCloudKmsV = "v1-rev20220104-1.32.1" private val googleCloudMonitoringV = "3.2.5" - // BW-808 Pinning googleCloudNioV to this tried-and-true old version and quieting Scala Steward. - // 0.121.2 is the most recent version currently known to work. - private val googleCloudNioV = "0.61.0-alpha" // scala-steward:off + private val googleCloudNioV = "0.124.8" private val googleCloudStorageV = "2.9.2" private val googleGaxGrpcV = "2.12.2" // latest date via: https://mvnrepository.com/artifact/com.google.apis/google-api-services-genomics From 293f233e2cdfb1b7c6e9d59aae1ef8e9e3c808d7 Mon Sep 17 00:00:00 2001 From: Janet Gainer-Dewar Date: Mon, 18 Jul 2022 16:43:34 -0400 Subject: [PATCH 53/58] BT-690 Add release notes for Cromwell 82 (#6806) * Add release notes for Cromwell 82 * Specify upgrade version * Fix library name Co-authored-by: Adam Nichols Co-authored-by: Adam Nichols --- CHANGELOG.md | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index 47c5fe4a587..1307ad6cb53 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,5 +1,10 @@ # Cromwell Change Log +## 82 Release Notes + + * Restored missing example configuration file + * Upgraded to latest version of the Google Cloud Storage NIO library (0.124.8) + ## 81 Release Notes ### Workflow labels in TES tasks From fd5a6b59dfb228a7c1855ae2a0beb7e71904bbb0 Mon Sep 17 00:00:00 2001 From: Adam Nichols Date: Tue, 19 Jul 2022 11:12:09 -0400 Subject: [PATCH 54/58] BW-1320 Finite retry `User project specified in the request is invalid` (#6807) --- CHANGELOG.md | 3 +++ .../cromwell/engine/io/RetryableRequestSupport.scala | 8 +++++--- .../src/test/scala/cromwell/engine/io/IoActorSpec.scala | 1 + 3 files changed, 9 insertions(+), 3 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 1307ad6cb53..0c40f47b651 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -4,6 +4,9 @@ * Restored missing example configuration file * Upgraded to latest version of the Google Cloud Storage NIO library (0.124.8) + * Cromwell will now finitely retry the following Google Cloud Storage I/O error. + * Response code `400` bad request, message `User project specified in the request is invalid` + * The default retry count is `5` and may be customized with `system.io.number-of-attempts`. ## 81 Release Notes diff --git a/engine/src/main/scala/cromwell/engine/io/RetryableRequestSupport.scala b/engine/src/main/scala/cromwell/engine/io/RetryableRequestSupport.scala index 03ef9224b7d..d2aa427a567 100644 --- a/engine/src/main/scala/cromwell/engine/io/RetryableRequestSupport.scala +++ b/engine/src/main/scala/cromwell/engine/io/RetryableRequestSupport.scala @@ -11,8 +11,8 @@ import javax.net.ssl.SSLException object RetryableRequestSupport { /** - * Failures that are considered retryable. - * Retrying them should increase the "retry counter" + * Failures that are considered retryable. Retrying them increases the retry counter. + * The default count is `5` and may be customized with `system.io.number-of-attempts`. */ def isRetryable(failure: Throwable): Boolean = failure match { case gcs: StorageException => gcs.isRetryable || @@ -47,7 +47,9 @@ object RetryableRequestSupport { // Error messages not included in the list of built-in GCS retryable errors (com.google.cloud.storage.StorageException) but that we still want to retry private val AdditionalRetryableErrorMessages = List( - "Connection closed prematurely" + "Connection closed prematurely", + // This is a 400 "non-retryable" error that is nevertheless sporadic and succeeds on subsequent re-runs with identical parameters [BW-1320] + "User project specified in the request is invalid" ).map(_.toLowerCase) /** diff --git a/engine/src/test/scala/cromwell/engine/io/IoActorSpec.scala b/engine/src/test/scala/cromwell/engine/io/IoActorSpec.scala index 518788c26b0..e6209ff5958 100644 --- a/engine/src/test/scala/cromwell/engine/io/IoActorSpec.scala +++ b/engine/src/test/scala/cromwell/engine/io/IoActorSpec.scala @@ -253,6 +253,7 @@ class IoActorSpec extends TestKitSuite with AnyFlatSpecLike with Matchers with I new StorageException(504, "message"), new StorageException(408, "message"), new StorageException(429, "message"), + new StorageException(400, "User project specified in the request is invalid"), BatchFailedException(new Exception), ChecksumFailedException("message"), new SocketException(), From bc2e25fd427c555487bf8b3ba9297ff7d96cfdf7 Mon Sep 17 00:00:00 2001 From: Janet Gainer-Dewar Date: Tue, 19 Jul 2022 19:17:49 +0000 Subject: [PATCH 55/58] Update cromwell version from 82 to 83 --- project/Version.scala | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/project/Version.scala b/project/Version.scala index a31e3506cf7..80f74d66d3f 100644 --- a/project/Version.scala +++ b/project/Version.scala @@ -5,7 +5,7 @@ import sbt._ object Version { // Upcoming release, or current if we're on a master / hotfix branch - val cromwellVersion = "82" + val cromwellVersion = "83" /** * Returns true if this project should be considered a snapshot. From 56d3d1d8e77bd8a04af52cd05886e0bba5e82098 Mon Sep 17 00:00:00 2001 From: Janet Gainer-Dewar Date: Wed, 20 Jul 2022 15:42:05 -0400 Subject: [PATCH 56/58] Update release process doc: Terra release notes, homebrew (#6812) --- processes/release_processes/README.MD | 14 ++++++++++---- 1 file changed, 10 insertions(+), 4 deletions(-) diff --git a/processes/release_processes/README.MD b/processes/release_processes/README.MD index a5b0911acb0..d16f97fddef 100644 --- a/processes/release_processes/README.MD +++ b/processes/release_processes/README.MD @@ -9,6 +9,9 @@ See our "updating the process" [process](../README.MD)! If you're the lucky "Release Rotation Engineer" this time, you should do the following four things: +1. Create a Jira ticket for the release (look at previous release tickets if you're not sure how). In the Suggested +Release Notes field, add "Terra is now running Cromwell " along with any other release notes relevant to Terra +users. 1. Check that all most recent executions of Cromwell's Jenkins tests have succeeded, and that all those executions were against the most recent commit in "develop" branch. There are 2 groups of Jenkins tests which have to be checked (you will need to be on the Broad internal network or VPN to open the following links): @@ -48,7 +51,7 @@ Make or copy the following files into a temporary `release/` directory outside t { "publish_workflow.githubToken": "<>", "publish_workflow.majorRelease": true, - "publish_workflow.publishHomebrew": true, + "publish_workflow.publishHomebrew": false, "publish_workflow.publishDocker": "broadinstitute/cromwell-publish:latest", "publish_workflow.organization": "broadinstitute" } @@ -79,16 +82,15 @@ The workflow outputs its status to the console. #### Make sure it all went swimmingly -* Check that the workflow succeeded. If it failed during Homebrew publishing in the final step, do not fear! The actual release probably still worked. +* Check that the workflow succeeded. * Check that there's now a new Cromwell release listed [here](https://github.com/broadinstitute/cromwell/releases). -* If publishing to Homebrew, check that there's a Homebrew PR for the new Cromwell version [here](https://github.com/Homebrew/homebrew-core/pulls) (and that it passes their CI). -If the Homebrew publish step of the workflow failed, there will not be one, but typically someone on the Homebrew side takes care of creating one. Check back tomorrow. * Look [in Travis](https://app.travis-ci.com/github/broadinstitute/cromwell/branches) for the release tag build that will publish Docker images for the new version. * Let `#dsp-batch-private` know that it's okay to resume merges to `develop`. * Announce release in `#dsp-batch`, set expectations about when the new version will be available in Terra. * It will take about an additional hour for the Docker image to build in Travis before its tag appears on the [Cromwell Docker Hub page](https://hub.docker.com/r/broadinstitute/cromwell/tags). * The relevant build is the one named `XX_hotfix` in [this list](https://app.travis-ci.com/github/broadinstitute/cromwell/builds). * See the `cromwell::build::publish_artifacts()` function for details +* **One business day later,** confirm that [the Homebrew package](https://formulae.brew.sh/formula/cromwell) has the latest version. If it doesn't, start investigation by looking at [Homebrew PR's](https://github.com/Homebrew/homebrew-core/pulls?q=is%3Apr+cromwell). ### How to Release Cromwell into Firecloud / Terra @@ -110,6 +112,10 @@ to let them know that the upgrade is about to happen is a good idea. Deploying to CaaS is detailed in the [Quick CaaS Deployment Guide](https://docs.google.com/document/d/1s0YC-oohJ7o-OGcgnH_-YBtIEKmLIPTRpG36yvWxUpE) +### How to be Sure You're Done + +Don't forget to confirm that [the Homebrew package](https://formulae.brew.sh/formula/cromwell) was updated! + ## Bonus Processes The Swagger client library is not part of our core publish/release process but can be performed from time to time, as required. From 95c550e0defdf653ba25f0528648e77b46148cae Mon Sep 17 00:00:00 2001 From: Christian Freitas Date: Tue, 26 Jul 2022 09:45:34 -0400 Subject: [PATCH 57/58] BT-684 Initial Blob Storage Impl (#6810) BT-684 Initial Blob Storage Implementation Merge the initial PathBuilder and PathBuilderFactory for accessing blob storage --- build.sbt | 9 ++ .../filesystems/blob/BlobPathBuilder.scala | 89 +++++++++++++++++++ .../blob/BlobPathBuilderFactory.scala | 22 +++++ .../blob/BlobPathBuilderSpec.scala | 65 ++++++++++++++ project/Dependencies.scala | 6 ++ 5 files changed, 191 insertions(+) create mode 100644 filesystems/blob/src/main/scala/cromwell/filesystems/blob/BlobPathBuilder.scala create mode 100644 filesystems/blob/src/main/scala/cromwell/filesystems/blob/BlobPathBuilderFactory.scala create mode 100644 filesystems/blob/src/test/scala/cromwell/filesystems/blob/BlobPathBuilderSpec.scala diff --git a/build.sbt b/build.sbt index 9c059f3ea81..3b255d0e0e5 100644 --- a/build.sbt +++ b/build.sbt @@ -91,6 +91,12 @@ lazy val cloudSupport = project .dependsOn(common) .dependsOn(common % "test->test") +lazy val azureBlobFileSystem = (project in file("filesystems/blob")) + .withLibrarySettings("cromwell-azure-blobFileSystem", blobFileSystemDependencies) + .dependsOn(core) + .dependsOn(core % "test->test") + .dependsOn(common % "test->test") + lazy val awsS3FileSystem = (project in file("filesystems/s3")) .withLibrarySettings("cromwell-aws-s3filesystem", s3FileSystemDependencies) .dependsOn(core) @@ -249,10 +255,12 @@ lazy val engine = project .dependsOn(drsFileSystem) .dependsOn(sraFileSystem) .dependsOn(awsS3FileSystem) + .dependsOn(azureBlobFileSystem) .dependsOn(awsS3FileSystem % "test->test") .dependsOn(drsFileSystem % "test->test") .dependsOn(httpFileSystem % "test->test") .dependsOn(ftpFileSystem % "test->test") + .dependsOn(azureBlobFileSystem % "test->test") .dependsOn(`cloud-nio-spi`) .dependsOn(languageFactoryCore) .dependsOn(cwlV1_0LanguageFactory % "test->test") @@ -391,6 +399,7 @@ lazy val root = (project in file(".")) .aggregate(`cromwell-drs-localizer`) .aggregate(awsBackend) .aggregate(awsS3FileSystem) + .aggregate(azureBlobFileSystem) .aggregate(backend) .aggregate(centaur) .aggregate(centaurCwlRunner) diff --git a/filesystems/blob/src/main/scala/cromwell/filesystems/blob/BlobPathBuilder.scala b/filesystems/blob/src/main/scala/cromwell/filesystems/blob/BlobPathBuilder.scala new file mode 100644 index 00000000000..038936ec46e --- /dev/null +++ b/filesystems/blob/src/main/scala/cromwell/filesystems/blob/BlobPathBuilder.scala @@ -0,0 +1,89 @@ +package cromwell.filesystems.blob + +import com.azure.core.credential.AzureSasCredential +import com.azure.storage.blob.nio.AzureFileSystem +import com.google.common.net.UrlEscapers +import cromwell.core.path.NioPath +import cromwell.core.path.Path +import cromwell.core.path.PathBuilder +import cromwell.filesystems.blob.BlobPathBuilder._ + +import java.net.MalformedURLException +import java.net.URI +import java.nio.file.FileSystems +import scala.jdk.CollectionConverters._ +import scala.language.postfixOps +import scala.util.Failure +import scala.util.Try + +object BlobPathBuilder { + + sealed trait BlobPathValidation + case class ValidBlobPath(path: String) extends BlobPathValidation + case class UnparsableBlobPath(errorMessage: Throwable) extends BlobPathValidation + + def invalidBlobPathMessage(container: String, endpoint: String) = s"Malformed Blob URL for this builder. Expecting a URL for a container $container and endpoint $endpoint" + def parseURI(string: String) = URI.create(UrlEscapers.urlFragmentEscaper().escape(string)) + def parseStorageAccount(uri: URI) = uri.getHost().split("\\.").filter(!_.isEmpty()).headOption + + /** + * Validates a that a path from a string is a valid BlobPath of the format: + * {endpoint}/{containerName}/{pathToFile} + * + * with an endpoint for a particular storage account typically given by: + * https://{storageAccountName}.blob.core.windows.net/ + * + * For example, a path string we might expect to receive might look like: + * https://appexternalstorage.blob.core.windows.net/inputs/test/testFile.wdl + * + * In this example + * storageAccountName -> appexternalstorage + * endpoint -> https://{storageAccountName}.blob.core.windows.net/ + * container -> inputs + * pathToFile -> test/testFile.wdl + * + * If the configured container and storage account do not match, the string is considered unparsable + */ + def validateBlobPath(string: String, container: String, endpoint: String): BlobPathValidation = { + Try { + val uri = parseURI(string) + val storageAccount = parseStorageAccount(parseURI(endpoint)) + val hasContainer = uri.getPath().split("/").filter(!_.isEmpty()).headOption.contains(container) + def hasEndpoint = parseStorageAccount(uri).contains(storageAccount.get) + if (hasContainer && !storageAccount.isEmpty && hasEndpoint) { + ValidBlobPath(uri.getPath.replaceFirst("/" + container, "")) + } else { + UnparsableBlobPath(new MalformedURLException(invalidBlobPathMessage(container, endpoint))) + } + } recover { case t => UnparsableBlobPath(t) } get + } +} + +class BlobPathBuilder(credential: AzureSasCredential, container: String, endpoint: String) extends PathBuilder { + + val fileSystemConfig: Map[String, Object] = Map((AzureFileSystem.AZURE_STORAGE_SAS_TOKEN_CREDENTIAL, credential), + (AzureFileSystem.AZURE_STORAGE_FILE_STORES, container)) + + def build(string: String): Try[BlobPath] = { + validateBlobPath(string, container, endpoint) match { + case ValidBlobPath(path) => + Try { + val fileSystem = FileSystems.newFileSystem(new URI("azb://?endpoint=" + endpoint), fileSystemConfig.asJava) + val blobStoragePath = fileSystem.getPath(path) + BlobPath(blobStoragePath, endpoint, container) + } + case UnparsableBlobPath(errorMessage: Throwable) => Failure(errorMessage) + } + } + + override def name: String = "Azure Blob Storage" +} + +// Add args for container, storage account name +case class BlobPath private[blob](nioPath: NioPath, endpoint: String, container: String) extends Path { + override protected def newPath(nioPath: NioPath): Path = BlobPath(nioPath, endpoint, container) + + override def pathAsString: String = List(endpoint, container, nioPath.toString()).mkString("/") + + override def pathWithoutScheme: String = parseURI(endpoint).getHost + "/" + container + "/" + nioPath.toString() +} diff --git a/filesystems/blob/src/main/scala/cromwell/filesystems/blob/BlobPathBuilderFactory.scala b/filesystems/blob/src/main/scala/cromwell/filesystems/blob/BlobPathBuilderFactory.scala new file mode 100644 index 00000000000..ca5e24fe3d7 --- /dev/null +++ b/filesystems/blob/src/main/scala/cromwell/filesystems/blob/BlobPathBuilderFactory.scala @@ -0,0 +1,22 @@ +package cromwell.filesystems.blob + +import akka.actor.ActorSystem +import com.azure.core.credential.AzureSasCredential +import com.typesafe.config.Config +import cromwell.core.WorkflowOptions +import cromwell.core.path.PathBuilderFactory +import cromwell.filesystems.blob.BlobPathBuilder + +import scala.concurrent.ExecutionContext +import scala.concurrent.Future + +final case class BlobPathBuilderFactory(globalConfig: Config, instanceConfig: Config) extends PathBuilderFactory { + override def withOptions(options: WorkflowOptions)(implicit as: ActorSystem, ec: ExecutionContext): Future[BlobPathBuilder] = { + val sasToken: String = instanceConfig.getString("sasToken") + val container: String = instanceConfig.getString("store") + val endpoint: String = instanceConfig.getString("endpoint") + Future { + new BlobPathBuilder(new AzureSasCredential(sasToken), container, endpoint) + } + } +} diff --git a/filesystems/blob/src/test/scala/cromwell/filesystems/blob/BlobPathBuilderSpec.scala b/filesystems/blob/src/test/scala/cromwell/filesystems/blob/BlobPathBuilderSpec.scala new file mode 100644 index 00000000000..454b8f5adaf --- /dev/null +++ b/filesystems/blob/src/test/scala/cromwell/filesystems/blob/BlobPathBuilderSpec.scala @@ -0,0 +1,65 @@ +package cromwell.filesystems.blob + +import com.azure.core.credential.AzureSasCredential +import cromwell.filesystems.blob.BlobPathBuilder +import org.scalatest.flatspec.AnyFlatSpec +import org.scalatest.matchers.should.Matchers + +import java.nio.file.Files + +object BlobPathBuilderSpec { + def buildEndpoint(storageAccount: String) = s"https://$storageAccount.blob.core.windows.net" +} + +class BlobPathBuilderSpec extends AnyFlatSpec with Matchers{ + + it should "parse a URI into a path" in { + val endpoint = BlobPathBuilderSpec.buildEndpoint("storageAccount") + val container = "container" + val evalPath = "/path/to/file" + val testString = endpoint + "/" + container + evalPath + BlobPathBuilder.validateBlobPath(testString, container, endpoint) match { + case BlobPathBuilder.ValidBlobPath(path) => path should equal(evalPath) + case BlobPathBuilder.UnparsableBlobPath(errorMessage) => fail(errorMessage) + } + } + + it should "bad storage account fails causes URI to fail parse into a path" in { + val endpoint = BlobPathBuilderSpec.buildEndpoint("storageAccount") + val container = "container" + val evalPath = "/path/to/file" + val testString = BlobPathBuilderSpec.buildEndpoint("badStorageAccount") + container + evalPath + BlobPathBuilder.validateBlobPath(testString, container, endpoint) match { + case BlobPathBuilder.ValidBlobPath(path) => fail(s"Valid path: $path found when verifying mismatched storage account") + case BlobPathBuilder.UnparsableBlobPath(errorMessage) => errorMessage.getMessage() should equal(BlobPathBuilder.invalidBlobPathMessage(container, endpoint)) + } + } + + it should "bad container fails causes URI to fail parse into a path" in { + val endpoint = BlobPathBuilderSpec.buildEndpoint("storageAccount") + val container = "container" + val evalPath = "/path/to/file" + val testString = endpoint + "badContainer" + evalPath + BlobPathBuilder.validateBlobPath(testString, container, endpoint) match { + case BlobPathBuilder.ValidBlobPath(path) => fail(s"Valid path: $path found when verifying mismatched container") + case BlobPathBuilder.UnparsableBlobPath(errorMessage) => errorMessage.getMessage() should equal(BlobPathBuilder.invalidBlobPathMessage(container, endpoint)) + } + } + + ignore should "build a blob path from a test string and read a file" in { + val endpoint = BlobPathBuilderSpec.buildEndpoint("coaexternalstorage") + val endpointHost = BlobPathBuilder.parseURI(endpoint).getHost + val store = "inputs" + val evalPath = "/test/inputFile.txt" + val sas = "{SAS TOKEN HERE}" + val testString = endpoint + "/" + store + evalPath + val blobPath: BlobPath = new BlobPathBuilder(new AzureSasCredential(sas), store, endpoint) build testString getOrElse fail() + blobPath.container should equal(store) + blobPath.endpoint should equal(endpoint) + blobPath.pathAsString should equal(testString) + blobPath.pathWithoutScheme should equal(endpointHost + "/" + store + evalPath) + val is = Files.newInputStream(blobPath.nioPath) + val fileText = (is.readAllBytes.map(_.toChar)).mkString + fileText should include ("This is my test file!!!! Did it work?") + } +} diff --git a/project/Dependencies.scala b/project/Dependencies.scala index 3aab47d94ce..0988ae8c44f 100644 --- a/project/Dependencies.scala +++ b/project/Dependencies.scala @@ -10,6 +10,7 @@ object Dependencies { // We would like to use the BOM to manage Azure SDK versions, but SBT doesn't support it. // https://github.com/Azure/azure-sdk-for-java/tree/main/sdk/boms/azure-sdk-bom // https://github.com/sbt/sbt/issues/4531 + private val azureStorageBlobNioV = "12.0.0-beta.18" private val azureIdentitySdkV = "1.4.2" private val azureKeyVaultSdkV = "4.3.7" private val betterFilesV = "3.9.1" @@ -181,6 +182,9 @@ object Dependencies { ) val azureDependencies: List[ModuleID] = List( + "com.azure" % "azure-storage-blob-nio" % azureStorageBlobNioV + exclude("jakarta.xml.bind", "jakarta.xml.bind-api") + exclude("jakarta.activation", "jakarta.activation-api"), "com.azure" % "azure-identity" % azureIdentitySdkV exclude("jakarta.xml.bind", "jakarta.xml.bind-api") exclude("jakarta.activation", "jakarta.activation-api"), @@ -393,6 +397,8 @@ object Dependencies { List("scalatest", "mysql", "mariadb", "postgresql") .map(name => "com.dimafeng" %% s"testcontainers-scala-$name" % testContainersScalaV % Test) + val blobFileSystemDependencies: List[ModuleID] = azureDependencies + val s3FileSystemDependencies: List[ModuleID] = junitDependencies val gcsFileSystemDependencies: List[ModuleID] = akkaHttpDependencies From 5b3b66406593cc048168de9357baaafbeee5bbb8 Mon Sep 17 00:00:00 2001 From: Saloni Shah Date: Thu, 28 Jul 2022 10:22:05 -0400 Subject: [PATCH 58/58] PROD-707 Embiggen PK of tables related to call caching (#6815) --- .../src/main/resources/changelog.xml | 5 + .../enlarge_call_cache_entry_ids.xml | 195 ++++++++++++++++++ ...arge_call_caching_aggregation_entry_id.xml | 50 +++++ ...enlarge_call_caching_detritus_entry_id.xml | 50 +++++ ...nlarge_call_caching_simpleton_entry_id.xml | 50 +++++ ...ll_caching_hash_entry_id_autoincrement.xml | 33 +++ .../slick/CallCachingSlickDatabase.scala | 20 +- ...CallCachingAggregationEntryComponent.scala | 10 +- .../CallCachingDetritusEntryComponent.scala | 6 +- .../tables/CallCachingEntryComponent.scala | 6 +- .../CallCachingHashEntryComponent.scala | 4 +- .../CallCachingSimpletonEntryComponent.scala | 6 +- .../database/sql/CallCachingSqlDatabase.scala | 10 +- .../tables/CallCachingAggregationEntry.scala | 4 +- .../sql/tables/CallCachingDetritusEntry.scala | 4 +- .../sql/tables/CallCachingEntry.scala | 2 +- .../sql/tables/CallCachingHashEntry.scala | 2 +- .../tables/CallCachingSimpletonEntry.scala | 4 +- .../deletion/DeleteWorkflowFilesActor.scala | 10 +- ...jeaMultipleCallCacheCopyAttemptsSpec.scala | 2 +- .../scala/cromwell/services/CallCaching.scala | 2 +- ...esApiBackendCacheHitCopyingActorSpec.scala | 2 +- 22 files changed, 430 insertions(+), 47 deletions(-) create mode 100644 database/migration/src/main/resources/changesets/enlarge_call_cache_entry_ids.xml create mode 100644 database/migration/src/main/resources/changesets/enlarge_call_caching_aggregation_entry_id.xml create mode 100644 database/migration/src/main/resources/changesets/enlarge_call_caching_detritus_entry_id.xml create mode 100644 database/migration/src/main/resources/changesets/enlarge_call_caching_simpleton_entry_id.xml create mode 100644 database/migration/src/main/resources/changesets/reset_call_caching_hash_entry_id_autoincrement.xml diff --git a/database/migration/src/main/resources/changelog.xml b/database/migration/src/main/resources/changelog.xml index bb57a4d17a7..5d711061c31 100644 --- a/database/migration/src/main/resources/changelog.xml +++ b/database/migration/src/main/resources/changelog.xml @@ -82,6 +82,11 @@ + + + + + diff --git a/database/migration/src/main/resources/changesets/enlarge_call_cache_entry_ids.xml b/database/migration/src/main/resources/changesets/enlarge_call_cache_entry_ids.xml new file mode 100644 index 00000000000..e9f34fcfb64 --- /dev/null +++ b/database/migration/src/main/resources/changesets/enlarge_call_cache_entry_ids.xml @@ -0,0 +1,195 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + SELECT count(*) + FROM information_schema.sequences + WHERE sequence_name = 'CALL_CACHING_ENTRY_CALL_CACHING_ENTRY_ID_seq' + AND data_type = 'bigint'; + + + ALTER SEQUENCE "CALL_CACHING_ENTRY_CALL_CACHING_ENTRY_ID_seq" as bigint RESTART WITH 20000000000; + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/database/migration/src/main/resources/changesets/enlarge_call_caching_aggregation_entry_id.xml b/database/migration/src/main/resources/changesets/enlarge_call_caching_aggregation_entry_id.xml new file mode 100644 index 00000000000..fb0651ce32c --- /dev/null +++ b/database/migration/src/main/resources/changesets/enlarge_call_caching_aggregation_entry_id.xml @@ -0,0 +1,50 @@ + + + + + + + + + + + + + + + + + + + + + + SELECT count(*) + FROM information_schema.sequences + WHERE sequence_name = 'CALL_CACHING_AGGREGATION_ENTR_CALL_CACHING_AGGREGATION_ENTR_seq' + AND data_type = 'bigint'; + + + ALTER SEQUENCE "CALL_CACHING_AGGREGATION_ENTR_CALL_CACHING_AGGREGATION_ENTR_seq" as bigint RESTART WITH 20000000000; + + + + diff --git a/database/migration/src/main/resources/changesets/enlarge_call_caching_detritus_entry_id.xml b/database/migration/src/main/resources/changesets/enlarge_call_caching_detritus_entry_id.xml new file mode 100644 index 00000000000..b0cd8f0122d --- /dev/null +++ b/database/migration/src/main/resources/changesets/enlarge_call_caching_detritus_entry_id.xml @@ -0,0 +1,50 @@ + + + + + + + + + + + + + + + + + + + + + + SELECT count(*) + FROM information_schema.sequences + WHERE sequence_name = 'CALL_CACHING_DETRITUS_ENTRY_CALL_CACHING_DETRITUS_ENTRY_ID_seq' + AND data_type = 'bigint'; + + + ALTER SEQUENCE "CALL_CACHING_DETRITUS_ENTRY_CALL_CACHING_DETRITUS_ENTRY_ID_seq" as bigint RESTART WITH 20000000000; + + + + diff --git a/database/migration/src/main/resources/changesets/enlarge_call_caching_simpleton_entry_id.xml b/database/migration/src/main/resources/changesets/enlarge_call_caching_simpleton_entry_id.xml new file mode 100644 index 00000000000..17e5f12ccf3 --- /dev/null +++ b/database/migration/src/main/resources/changesets/enlarge_call_caching_simpleton_entry_id.xml @@ -0,0 +1,50 @@ + + + + + + + + + + + + + + + + + + + + + + SELECT count(*) + FROM information_schema.sequences + WHERE sequence_name = 'CALL_CACHING_SIMPLETON_ENTRY_CALL_CACHING_SIMPLETON_ENTRY_I_seq' + AND data_type = 'bigint'; + + + ALTER SEQUENCE "CALL_CACHING_SIMPLETON_ENTRY_CALL_CACHING_SIMPLETON_ENTRY_I_seq" as bigint RESTART WITH 20000000000; + + + + diff --git a/database/migration/src/main/resources/changesets/reset_call_caching_hash_entry_id_autoincrement.xml b/database/migration/src/main/resources/changesets/reset_call_caching_hash_entry_id_autoincrement.xml new file mode 100644 index 00000000000..5397dbeaad4 --- /dev/null +++ b/database/migration/src/main/resources/changesets/reset_call_caching_hash_entry_id_autoincrement.xml @@ -0,0 +1,33 @@ + + + + + + + + + + + + + + SELECT count(*) + FROM information_schema.sequences + WHERE sequence_name = 'CALL_CACHING_HASH_ENTRY_CALL_CACHING_HASH_ENTRY_ID_seq'; + + + ALTER SEQUENCE "CALL_CACHING_HASH_ENTRY_CALL_CACHING_HASH_ENTRY_ID_seq" RESTART WITH 20000000000; + + + + diff --git a/database/sql/src/main/scala/cromwell/database/slick/CallCachingSlickDatabase.scala b/database/sql/src/main/scala/cromwell/database/slick/CallCachingSlickDatabase.scala index 2a78cc5300b..8e1b98a4a1c 100644 --- a/database/sql/src/main/scala/cromwell/database/slick/CallCachingSlickDatabase.scala +++ b/database/sql/src/main/scala/cromwell/database/slick/CallCachingSlickDatabase.scala @@ -26,7 +26,7 @@ trait CallCachingSlickDatabase extends CallCachingSqlDatabase { (List(j.callCachingEntry), List(j.callCachingHashEntries), List(j.callCachingSimpletonEntries), List(j.callCachingDetritusEntries), List(j.callCachingAggregationEntry.toList)) } // Use the supplied `assigner` function to assign parent entry row IDs into the parallel `Seq` of children entities. - def assignEntryIdsToChildren[C](ids: Seq[Int], groupingsOfChildren: Seq[Seq[C]], assigner: (Int, C) => C): Seq[C] = { + def assignEntryIdsToChildren[C](ids: Seq[Long], groupingsOfChildren: Seq[Seq[C]], assigner: (Long, C) => C): Seq[C] = { (ids zip groupingsOfChildren) flatMap { case (id, children) => children.map(assigner(id, _)) } } @@ -36,10 +36,10 @@ trait CallCachingSlickDatabase extends CallCachingSqlDatabase { } // Functions to assign call cache entry IDs into child hash entry, simpleton, and detritus rows. - def hashAssigner(id: Int, hash: CallCachingHashEntry) = hash.copy(callCachingEntryId = Option(id)) - def simpletonAssigner(id: Int, simpleton: CallCachingSimpletonEntry) = simpleton.copy(callCachingEntryId = Option(id)) - def detritusAssigner(id: Int, detritus: CallCachingDetritusEntry) = detritus.copy(callCachingEntryId = Option(id)) - def aggregationAssigner(id: Int, aggregation: CallCachingAggregationEntry) = aggregation.copy(callCachingEntryId = Option(id)) + def hashAssigner(id: Long, hash: CallCachingHashEntry) = hash.copy(callCachingEntryId = Option(id)) + def simpletonAssigner(id: Long, simpleton: CallCachingSimpletonEntry) = simpleton.copy(callCachingEntryId = Option(id)) + def detritusAssigner(id: Long, detritus: CallCachingDetritusEntry) = detritus.copy(callCachingEntryId = Option(id)) + def aggregationAssigner(id: Long, aggregation: CallCachingAggregationEntry) = aggregation.copy(callCachingEntryId = Option(id)) val action = for { entryIds <- dataAccess.callCachingEntryIdsAutoInc ++= entries @@ -85,8 +85,8 @@ trait CallCachingSlickDatabase extends CallCachingSqlDatabase { runTransaction(action) } - override def findCacheHitForAggregation(baseAggregationHash: String, inputFilesAggregationHash: Option[String], callCachePathPrefixes: Option[List[String]], excludedIds: Set[Int]) - (implicit ec: ExecutionContext): Future[Option[Int]] = { + override def findCacheHitForAggregation(baseAggregationHash: String, inputFilesAggregationHash: Option[String], callCachePathPrefixes: Option[List[String]], excludedIds: Set[Long]) + (implicit ec: ExecutionContext): Future[Option[Long]] = { val action = callCachePathPrefixes match { case None => @@ -104,7 +104,7 @@ trait CallCachingSlickDatabase extends CallCachingSqlDatabase { runTransaction(action) } - override def queryResultsForCacheId(callCachingEntryId: Int) + override def queryResultsForCacheId(callCachingEntryId: Long) (implicit ec: ExecutionContext): Future[Option[CallCachingJoin]] = { val action = for { callCachingEntryOption <- dataAccess. @@ -149,7 +149,7 @@ trait CallCachingSlickDatabase extends CallCachingSqlDatabase { runTransaction(action) } - override def invalidateCall(callCachingEntryId: Int) + override def invalidateCall(callCachingEntryId: Long) (implicit ec: ExecutionContext): Future[Option[CallCachingEntry]] = { val action = for { _ <- dataAccess.allowResultReuseForCallCachingEntryId(callCachingEntryId).update(false) @@ -165,7 +165,7 @@ trait CallCachingSlickDatabase extends CallCachingSqlDatabase { runTransaction(action).void } - override def callCacheEntryIdsForWorkflowId(workflowExecutionUuid: String)(implicit ec: ExecutionContext): Future[Seq[Int]] = { + override def callCacheEntryIdsForWorkflowId(workflowExecutionUuid: String)(implicit ec: ExecutionContext): Future[Seq[Long]] = { val action = dataAccess.callCachingEntryIdsForWorkflowId(workflowExecutionUuid).result runTransaction(action) } diff --git a/database/sql/src/main/scala/cromwell/database/slick/tables/CallCachingAggregationEntryComponent.scala b/database/sql/src/main/scala/cromwell/database/slick/tables/CallCachingAggregationEntryComponent.scala index 6fea80dae79..139558aa7b8 100644 --- a/database/sql/src/main/scala/cromwell/database/slick/tables/CallCachingAggregationEntryComponent.scala +++ b/database/sql/src/main/scala/cromwell/database/slick/tables/CallCachingAggregationEntryComponent.scala @@ -10,13 +10,13 @@ trait CallCachingAggregationEntryComponent { import driver.api._ class CallCachingAggregationEntries(tag: Tag) extends Table[CallCachingAggregationEntry](tag, "CALL_CACHING_AGGREGATION_ENTRY") { - def callCachingAggregationEntryId = column[Int]("CALL_CACHING_AGGREGATION_ENTRY_ID", O.PrimaryKey, O.AutoInc) + def callCachingAggregationEntryId = column[Long]("CALL_CACHING_AGGREGATION_ENTRY_ID", O.PrimaryKey, O.AutoInc) def baseAggregation = column[String]("BASE_AGGREGATION", O.Length(255)) def inputFilesAggregation = column[Option[String]]("INPUT_FILES_AGGREGATION", O.Length(255)) - def callCachingEntryId = column[Int]("CALL_CACHING_ENTRY_ID") + def callCachingEntryId = column[Long]("CALL_CACHING_ENTRY_ID") override def * = (baseAggregation, inputFilesAggregation, callCachingEntryId.?, callCachingAggregationEntryId.?) <> (CallCachingAggregationEntry.tupled, CallCachingAggregationEntry.unapply) @@ -34,7 +34,7 @@ trait CallCachingAggregationEntryComponent { callCachingAggregationEntries.map(_.callCachingAggregationEntryId) val callCachingAggregationForCacheEntryId = Compiled( - (callCachingEntryId: Rep[Int]) => for { + (callCachingEntryId: Rep[Long]) => for { callCachingAggregationEntry <- callCachingAggregationEntries if callCachingAggregationEntry.callCachingEntryId === callCachingEntryId } yield callCachingAggregationEntry @@ -69,7 +69,7 @@ trait CallCachingAggregationEntryComponent { (detritusPath.substring(0, prefix3Length) === prefix3)} yield ()).exists ) - def callCachingEntriesForAggregatedHashes(baseAggregation: Rep[String], inputFilesAggregation: Rep[Option[String]], excludedIds: Set[Int]) = { + def callCachingEntriesForAggregatedHashes(baseAggregation: Rep[String], inputFilesAggregation: Rep[Option[String]], excludedIds: Set[Long]) = { (for { callCachingEntry <- callCachingEntries if callCachingEntry.allowResultReuse && !(callCachingEntry.callCachingEntryId inSet excludedIds) @@ -85,7 +85,7 @@ trait CallCachingAggregationEntryComponent { prefix1: Rep[String], prefix1Length: Rep[Int], prefix2: Rep[String], prefix2Length: Rep[Int], prefix3: Rep[String], prefix3Length: Rep[Int], - excludedIds: Set[Int]) = { + excludedIds: Set[Long]) = { (for { callCachingEntry <- callCachingEntries if callCachingEntry.allowResultReuse && !(callCachingEntry.callCachingEntryId inSet excludedIds) diff --git a/database/sql/src/main/scala/cromwell/database/slick/tables/CallCachingDetritusEntryComponent.scala b/database/sql/src/main/scala/cromwell/database/slick/tables/CallCachingDetritusEntryComponent.scala index db7c1f3d826..b668adb4940 100644 --- a/database/sql/src/main/scala/cromwell/database/slick/tables/CallCachingDetritusEntryComponent.scala +++ b/database/sql/src/main/scala/cromwell/database/slick/tables/CallCachingDetritusEntryComponent.scala @@ -12,13 +12,13 @@ trait CallCachingDetritusEntryComponent { class CallCachingDetritusEntries(tag: Tag) extends Table[CallCachingDetritusEntry](tag, "CALL_CACHING_DETRITUS_ENTRY") { - def callCachingDetritusEntryId = column[Int]("CALL_CACHING_DETRITUS_ENTRY_ID", O.PrimaryKey, O.AutoInc) + def callCachingDetritusEntryId = column[Long]("CALL_CACHING_DETRITUS_ENTRY_ID", O.PrimaryKey, O.AutoInc) def detritusKey = column[String]("DETRITUS_KEY", O.Length(255)) def detritusValue = column[Option[SerialClob]]("DETRITUS_VALUE") - def callCachingEntryId = column[Int]("CALL_CACHING_ENTRY_ID") + def callCachingEntryId = column[Long]("CALL_CACHING_ENTRY_ID") override def * = (detritusKey, detritusValue, callCachingEntryId.?, callCachingDetritusEntryId.?) <> (CallCachingDetritusEntry.tupled, CallCachingDetritusEntry.unapply) @@ -37,7 +37,7 @@ trait CallCachingDetritusEntryComponent { callCachingDetritusEntries returning callCachingDetritusEntries.map(_.callCachingDetritusEntryId) val callCachingDetritusEntriesForCallCachingEntryId = Compiled( - (callCachingEntryId: Rep[Int]) => for { + (callCachingEntryId: Rep[Long]) => for { callCachingDetritusEntry <- callCachingDetritusEntries if callCachingDetritusEntry.callCachingEntryId === callCachingEntryId } yield callCachingDetritusEntry diff --git a/database/sql/src/main/scala/cromwell/database/slick/tables/CallCachingEntryComponent.scala b/database/sql/src/main/scala/cromwell/database/slick/tables/CallCachingEntryComponent.scala index 5fce20f19e6..db9c5dc1654 100644 --- a/database/sql/src/main/scala/cromwell/database/slick/tables/CallCachingEntryComponent.scala +++ b/database/sql/src/main/scala/cromwell/database/slick/tables/CallCachingEntryComponent.scala @@ -9,7 +9,7 @@ trait CallCachingEntryComponent { import driver.api._ class CallCachingEntries(tag: Tag) extends Table[CallCachingEntry](tag, "CALL_CACHING_ENTRY") { - def callCachingEntryId = column[Int]("CALL_CACHING_ENTRY_ID", O.PrimaryKey, O.AutoInc) + def callCachingEntryId = column[Long]("CALL_CACHING_ENTRY_ID", O.PrimaryKey, O.AutoInc) def workflowExecutionUuid = column[String]("WORKFLOW_EXECUTION_UUID", O.Length(255)) @@ -36,14 +36,14 @@ trait CallCachingEntryComponent { val callCachingEntryIdsAutoInc = callCachingEntries returning callCachingEntries.map(_.callCachingEntryId) val callCachingEntriesForId = Compiled( - (callCachingEntryId: Rep[Int]) => for { + (callCachingEntryId: Rep[Long]) => for { callCachingEntry <- callCachingEntries if callCachingEntry.callCachingEntryId === callCachingEntryId } yield callCachingEntry ) val allowResultReuseForCallCachingEntryId = Compiled( - (callCachingEntryId: Rep[Int]) => for { + (callCachingEntryId: Rep[Long]) => for { callCachingEntry <- callCachingEntries if callCachingEntry.callCachingEntryId === callCachingEntryId } yield callCachingEntry.allowResultReuse diff --git a/database/sql/src/main/scala/cromwell/database/slick/tables/CallCachingHashEntryComponent.scala b/database/sql/src/main/scala/cromwell/database/slick/tables/CallCachingHashEntryComponent.scala index 1a8e3e772f1..b556b3f22de 100644 --- a/database/sql/src/main/scala/cromwell/database/slick/tables/CallCachingHashEntryComponent.scala +++ b/database/sql/src/main/scala/cromwell/database/slick/tables/CallCachingHashEntryComponent.scala @@ -15,7 +15,7 @@ trait CallCachingHashEntryComponent { def hashValue = column[String]("HASH_VALUE", O.Length(255)) - def callCachingEntryId = column[Int]("CALL_CACHING_ENTRY_ID") + def callCachingEntryId = column[Long]("CALL_CACHING_ENTRY_ID") override def * = (hashKey, hashValue, callCachingEntryId.?, callCachingHashEntryId.?) <> (CallCachingHashEntry.tupled, CallCachingHashEntry.unapply) @@ -36,7 +36,7 @@ trait CallCachingHashEntryComponent { * Find all hashes for a CALL_CACHING_ENTRY_ID */ val callCachingHashEntriesForCallCachingEntryId = Compiled( - (callCachingEntryId: Rep[Int]) => for { + (callCachingEntryId: Rep[Long]) => for { callCachingHashEntry <- callCachingHashEntries if callCachingHashEntry.callCachingEntryId === callCachingEntryId } yield callCachingHashEntry diff --git a/database/sql/src/main/scala/cromwell/database/slick/tables/CallCachingSimpletonEntryComponent.scala b/database/sql/src/main/scala/cromwell/database/slick/tables/CallCachingSimpletonEntryComponent.scala index 38a095a9682..b2e6f02b5b9 100644 --- a/database/sql/src/main/scala/cromwell/database/slick/tables/CallCachingSimpletonEntryComponent.scala +++ b/database/sql/src/main/scala/cromwell/database/slick/tables/CallCachingSimpletonEntryComponent.scala @@ -12,7 +12,7 @@ trait CallCachingSimpletonEntryComponent { class CallCachingSimpletonEntries(tag: Tag) extends Table[CallCachingSimpletonEntry](tag, "CALL_CACHING_SIMPLETON_ENTRY") { - def callCachingSimpletonEntryId = column[Int]("CALL_CACHING_SIMPLETON_ENTRY_ID", O.PrimaryKey, O.AutoInc) + def callCachingSimpletonEntryId = column[Long]("CALL_CACHING_SIMPLETON_ENTRY_ID", O.PrimaryKey, O.AutoInc) def simpletonKey = column[String]("SIMPLETON_KEY", O.Length(255)) @@ -20,7 +20,7 @@ trait CallCachingSimpletonEntryComponent { def wdlType = column[String]("WDL_TYPE", O.Length(255)) - def callCachingEntryId = column[Int]("CALL_CACHING_ENTRY_ID") + def callCachingEntryId = column[Long]("CALL_CACHING_ENTRY_ID") override def * = (simpletonKey, simpletonValue, wdlType, callCachingEntryId.?, callCachingSimpletonEntryId.?) <> (CallCachingSimpletonEntry.tupled, CallCachingSimpletonEntry.unapply) @@ -41,7 +41,7 @@ trait CallCachingSimpletonEntryComponent { * Find all result simpletons which match a given CALL_CACHING_ENTRY_ID */ val callCachingSimpletonEntriesForCallCachingEntryId = Compiled( - (callCachingEntryId: Rep[Int]) => for { + (callCachingEntryId: Rep[Long]) => for { callCachingSimpletonEntry <- callCachingSimpletonEntries if callCachingSimpletonEntry.callCachingEntryId === callCachingEntryId } yield callCachingSimpletonEntry diff --git a/database/sql/src/main/scala/cromwell/database/sql/CallCachingSqlDatabase.scala b/database/sql/src/main/scala/cromwell/database/sql/CallCachingSqlDatabase.scala index 23a744b4cf1..25380dc41b5 100644 --- a/database/sql/src/main/scala/cromwell/database/sql/CallCachingSqlDatabase.scala +++ b/database/sql/src/main/scala/cromwell/database/sql/CallCachingSqlDatabase.scala @@ -11,20 +11,20 @@ trait CallCachingSqlDatabase { def hasMatchingCallCachingEntriesForBaseAggregation(baseAggregationHash: String, callCachePathPrefixes: Option[List[String]]) (implicit ec: ExecutionContext): Future[Boolean] - def findCacheHitForAggregation(baseAggregationHash: String, inputFilesAggregationHash: Option[String], callCachePathPrefixes: Option[List[String]], excludedIds: Set[Int]) - (implicit ec: ExecutionContext): Future[Option[Int]] + def findCacheHitForAggregation(baseAggregationHash: String, inputFilesAggregationHash: Option[String], callCachePathPrefixes: Option[List[String]], excludedIds: Set[Long]) + (implicit ec: ExecutionContext): Future[Option[Long]] - def queryResultsForCacheId(callCachingEntryId: Int) + def queryResultsForCacheId(callCachingEntryId: Long) (implicit ec: ExecutionContext): Future[Option[CallCachingJoin]] def callCacheJoinForCall(workflowExecutionUuid: String, callFqn: String, index: Int) (implicit ec: ExecutionContext): Future[Option[CallCachingJoin]] - def invalidateCall(callCachingEntryId: Int) + def invalidateCall(callCachingEntryId: Long) (implicit ec: ExecutionContext): Future[Option[CallCachingEntry]] def invalidateCallCacheEntryIdsForWorkflowId(workflowExecutionUuid: String) (implicit ec: ExecutionContext): Future[Unit] - def callCacheEntryIdsForWorkflowId(workflowExecutionUuid: String)(implicit ec: ExecutionContext): Future[Seq[Int]] + def callCacheEntryIdsForWorkflowId(workflowExecutionUuid: String)(implicit ec: ExecutionContext): Future[Seq[Long]] } diff --git a/database/sql/src/main/scala/cromwell/database/sql/tables/CallCachingAggregationEntry.scala b/database/sql/src/main/scala/cromwell/database/sql/tables/CallCachingAggregationEntry.scala index 3b5dab28628..088c07bf3c7 100644 --- a/database/sql/src/main/scala/cromwell/database/sql/tables/CallCachingAggregationEntry.scala +++ b/database/sql/src/main/scala/cromwell/database/sql/tables/CallCachingAggregationEntry.scala @@ -4,6 +4,6 @@ case class CallCachingAggregationEntry ( baseAggregation: String, inputFilesAggregation: Option[String], - callCachingEntryId: Option[Int] = None, - callCachingAggregationEntryId: Option[Int] = None + callCachingEntryId: Option[Long] = None, + callCachingAggregationEntryId: Option[Long] = None ) diff --git a/database/sql/src/main/scala/cromwell/database/sql/tables/CallCachingDetritusEntry.scala b/database/sql/src/main/scala/cromwell/database/sql/tables/CallCachingDetritusEntry.scala index 31e6af183ae..36afadd4c8e 100644 --- a/database/sql/src/main/scala/cromwell/database/sql/tables/CallCachingDetritusEntry.scala +++ b/database/sql/src/main/scala/cromwell/database/sql/tables/CallCachingDetritusEntry.scala @@ -6,6 +6,6 @@ case class CallCachingDetritusEntry ( detritusKey: String, detritusValue: Option[SerialClob], - callCachingEntryId: Option[Int] = None, - callCachingDetritusEntryId: Option[Int] = None + callCachingEntryId: Option[Long] = None, + callCachingDetritusEntryId: Option[Long] = None ) diff --git a/database/sql/src/main/scala/cromwell/database/sql/tables/CallCachingEntry.scala b/database/sql/src/main/scala/cromwell/database/sql/tables/CallCachingEntry.scala index f06bf96b618..24263b0cb80 100644 --- a/database/sql/src/main/scala/cromwell/database/sql/tables/CallCachingEntry.scala +++ b/database/sql/src/main/scala/cromwell/database/sql/tables/CallCachingEntry.scala @@ -8,5 +8,5 @@ case class CallCachingEntry jobAttempt: Option[Int], returnCode: Option[Int], allowResultReuse: Boolean, - callCachingEntryId: Option[Int] = None + callCachingEntryId: Option[Long] = None ) diff --git a/database/sql/src/main/scala/cromwell/database/sql/tables/CallCachingHashEntry.scala b/database/sql/src/main/scala/cromwell/database/sql/tables/CallCachingHashEntry.scala index 41926ff2057..5f2aff9fb14 100644 --- a/database/sql/src/main/scala/cromwell/database/sql/tables/CallCachingHashEntry.scala +++ b/database/sql/src/main/scala/cromwell/database/sql/tables/CallCachingHashEntry.scala @@ -4,6 +4,6 @@ case class CallCachingHashEntry ( hashKey: String, hashValue: String, - callCachingEntryId: Option[Int] = None, + callCachingEntryId: Option[Long] = None, callCachingHashEntryId: Option[Long] = None ) diff --git a/database/sql/src/main/scala/cromwell/database/sql/tables/CallCachingSimpletonEntry.scala b/database/sql/src/main/scala/cromwell/database/sql/tables/CallCachingSimpletonEntry.scala index 626246c7bf2..c4e6628ee81 100644 --- a/database/sql/src/main/scala/cromwell/database/sql/tables/CallCachingSimpletonEntry.scala +++ b/database/sql/src/main/scala/cromwell/database/sql/tables/CallCachingSimpletonEntry.scala @@ -7,6 +7,6 @@ case class CallCachingSimpletonEntry simpletonKey: String, simpletonValue: Option[SerialClob], wdlType: String, - callCachingEntryId: Option[Int] = None, - callCachingSimpletonEntryId: Option[Int] = None + callCachingEntryId: Option[Long] = None, + callCachingSimpletonEntryId: Option[Long] = None ) diff --git a/engine/src/main/scala/cromwell/engine/workflow/lifecycle/deletion/DeleteWorkflowFilesActor.scala b/engine/src/main/scala/cromwell/engine/workflow/lifecycle/deletion/DeleteWorkflowFilesActor.scala index 7a1a4625751..56d6012125d 100644 --- a/engine/src/main/scala/cromwell/engine/workflow/lifecycle/deletion/DeleteWorkflowFilesActor.scala +++ b/engine/src/main/scala/cromwell/engine/workflow/lifecycle/deletion/DeleteWorkflowFilesActor.scala @@ -206,7 +206,7 @@ class DeleteWorkflowFilesActor(rootWorkflowId: RootWorkflowId, } - private def fetchCallCacheEntries(callCache: CallCache): Future[Set[Int]] = { + private def fetchCallCacheEntries(callCache: CallCache): Future[Set[Long]] = { val callCacheEntryIdsFuture = rootAndSubworkflowIds.map(x => callCache.callCacheEntryIdsForWorkflowId(x.toString)).map { f => f.map { Success(_) }.recover { case t => Failure(t) }} @@ -265,7 +265,7 @@ object DeleteWorkflowFilesActor { object StartWorkflowFilesDeletion extends DeleteWorkflowFilesActorMessage object DeleteFiles extends DeleteWorkflowFilesActorMessage object InvalidateCallCache extends DeleteWorkflowFilesActorMessage - case class RetrievedCallCacheIds(ids: Set[Int]) extends DeleteWorkflowFilesActorMessage + case class RetrievedCallCacheIds(ids: Set[Long]) extends DeleteWorkflowFilesActorMessage case class FailedRetrieveCallCacheIds(throwable: Throwable) extends DeleteWorkflowFilesActorMessage // Actor states @@ -319,18 +319,18 @@ object DeleteWorkflowFilesActor { } } - case class WaitingForInvalidateCCResponsesData(commandsToWaitFor: Set[Int], + case class WaitingForInvalidateCCResponsesData(commandsToWaitFor: Set[Long], deleteErrors: List[Throwable], filesNotFound: List[Path], callCacheInvalidationErrors: List[Throwable] = List.empty) - extends WaitingForResponseFromActorData[Int](commandsToWaitFor) with DeleteWorkflowFilesActorStateData { + extends WaitingForResponseFromActorData[Long](commandsToWaitFor) with DeleteWorkflowFilesActorStateData { override def assertionFailureMsg(expectedSize: Int, requiredSize: Int): String = { s"Found updated call cache entries set size as $expectedSize instead of $requiredSize. The updated set of call cache entries" + s" that DeleteWorkflowFilesActor has to wait for should be 1 less after a call cache entry is invalidated." } - override def setCommandsToWaitFor(updatedCommandsToWaitFor: Set[Int]): WaitingForResponseFromActorData[Int] = { + override def setCommandsToWaitFor(updatedCommandsToWaitFor: Set[Long]): WaitingForResponseFromActorData[Long] = { this.copy(commandsToWaitFor = updatedCommandsToWaitFor) } } diff --git a/server/src/test/scala/cromwell/engine/workflow/lifecycle/execution/ejea/EjeaMultipleCallCacheCopyAttemptsSpec.scala b/server/src/test/scala/cromwell/engine/workflow/lifecycle/execution/ejea/EjeaMultipleCallCacheCopyAttemptsSpec.scala index f643e8b2038..cd13165370a 100644 --- a/server/src/test/scala/cromwell/engine/workflow/lifecycle/execution/ejea/EjeaMultipleCallCacheCopyAttemptsSpec.scala +++ b/server/src/test/scala/cromwell/engine/workflow/lifecycle/execution/ejea/EjeaMultipleCallCacheCopyAttemptsSpec.scala @@ -31,7 +31,7 @@ class EjeaMultipleCallCacheCopyAttemptsSpec // Arbitrary. // When we attempt the nth copy attempt, we'll say that the cache entry ID is 'n' plus this offset. // Just makes sure that we're treating the copy attempt and the hit ID as separate numbers. - def cacheEntryIdFromCopyAttempt(attempt: Int) = CallCachingEntryId(75 + attempt) + def cacheEntryIdFromCopyAttempt(attempt: Int) = CallCachingEntryId(75L + attempt.toLong) def ejhaSendsHitIdToEjeaAndEjeaReacts(copyAttemptNumber: Int) = { val callCachingEntryId = cacheEntryIdFromCopyAttempt(copyAttemptNumber) diff --git a/services/src/main/scala/cromwell/services/CallCaching.scala b/services/src/main/scala/cromwell/services/CallCaching.scala index c85431ca752..6f4ba02ea40 100644 --- a/services/src/main/scala/cromwell/services/CallCaching.scala +++ b/services/src/main/scala/cromwell/services/CallCaching.scala @@ -1,5 +1,5 @@ package cromwell.services object CallCaching { - final case class CallCachingEntryId(id: Int) + final case class CallCachingEntryId(id: Long) } diff --git a/supportedBackends/google/pipelines/common/src/test/scala/cromwell/backend/google/pipelines/common/callcaching/PipelinesApiBackendCacheHitCopyingActorSpec.scala b/supportedBackends/google/pipelines/common/src/test/scala/cromwell/backend/google/pipelines/common/callcaching/PipelinesApiBackendCacheHitCopyingActorSpec.scala index b6b9dee163e..1c40fa38590 100644 --- a/supportedBackends/google/pipelines/common/src/test/scala/cromwell/backend/google/pipelines/common/callcaching/PipelinesApiBackendCacheHitCopyingActorSpec.scala +++ b/supportedBackends/google/pipelines/common/src/test/scala/cromwell/backend/google/pipelines/common/callcaching/PipelinesApiBackendCacheHitCopyingActorSpec.scala @@ -518,7 +518,7 @@ class PipelinesApiBackendCacheHitCopyingActorSpec extends TestKitSuite actorUnderTest } - private def buildCopyCommand(hitId: Int, bucket: String): CopyOutputsCommand = { + private def buildCopyCommand(hitId: Long, bucket: String): CopyOutputsCommand = { val callRoot = s"gs://$bucket/workflow-id/call-name" val rcFile = callRoot + "/rc"