diff --git a/projects/control-service/projects/model/apidefs/datajob-api/api.yaml b/projects/control-service/projects/model/apidefs/datajob-api/api.yaml index d069f51e02..750736a1ec 100644 --- a/projects/control-service/projects/model/apidefs/datajob-api/api.yaml +++ b/projects/control-service/projects/model/apidefs/datajob-api/api.yaml @@ -79,7 +79,7 @@ paths: List data Jobs with GraphQL like query. By choosing which field to be returned you can control the output. You can learn more about the GraphQL queries by visiting [GraphQL official website](https://graphql.org/learn/queries/) Query should be provided as GET parameter, not by POST body. Don't worry about the spaces and tabs - Keep in mind that each additional field could make query response time slower, for instance deployments, + Keep in mind that each aditional field could make query response time slower, for instance deployments, it's best if you request only what you need

The pageNumber and pageSize arguments are required! Page number should be a number greater than 1, @@ -102,7 +102,7 @@ paths: an invalid response will be return.
* pattern should be a non-empty string which the provided property should contains [ignoring cases], for instance: `starshot` pattern will match import-starshot-sql, StarShot-servers and notify-starshot job names, but it won't match stars-shot-daily-prune` - If a pattern string is not provided, then you must at least provide the property field
+ If a pattern string is not provided, then you must atleast provide the property field
* sort should be an enum value - ASC (ascending) or DESC (descending) option [not required, default is ASC] Multiple filters could be applied, but maximum one should contain sorting! ``` @@ -376,8 +376,8 @@ paths:
  • 5 characters at least
  • should contain only lowercase alphanumeric symbols and dash [a-z0-9\-]

    - When successfully created clients can download the keytab associated with the Data Job to run it locally.
    - To deploy so that it can be executed regularly in the runtime environment use /data-jobs/for-team/{team_name}/jobs/{job_name}/deployments API. + When succesfully created clients can download the keytab associated with the Data Job to run it locally.
    + To deploy so that it can be executed reguarly in the runtime environment use /data-jobs/for-team/{team_name}/jobs/{job_name}/deployments API. '/data-jobs/for-team/{team_name}/jobs/{job_name}': summary: | @@ -548,9 +548,9 @@ paths: parameters: - name: send_notification description: | - Allows administrators to disable sending notification during maintenance window for platform errors - + Allows administrators to disable sending notification during maintance window for platfrom errors - e.g if all jobs are being re-deployed due to migration or after recovering from an outage. - Monitoring data will still be populated so deployment status can be monitored by admins. + Monitoring data will still be popoulated so deployment status can be monitored by admins. schema: type: boolean default: True @@ -569,7 +569,7 @@ paths: summary: Creates or updates a deployment of a Data Job. | (Stable) description: | (Introduced in v1.0) | Creates or updates a deployment of a Data Job. - The API returns before the actual Job is deployed (the operation is asynchronous). + The API returns before the actual Job is deployed (the operation is asynchrounous). Depending on settings specified during Data Job creation, clients will get an e-mail in case of success or failure of the deployment. If you need to change only deployment settings without changing the job version, use PATCH deployments request which is synchronous. parameters: @@ -632,7 +632,7 @@ paths: summary: | Patch a deployment of a Data Job. Use it to change the configuration of a data job. For example: to enable or disable deployment, to change the vdk version. - The operation is guaranteed to be synchronous so it cannot be used to deploy new version of a data job - + The operation is guranteed to be synchrounous so it cannot be used to deploy new version of a data job - job_version cannot be changed using PATCH. Use POST .../deployments for this. | (Stable) description: (Introduced in v1.0) parameters: @@ -818,7 +818,7 @@ paths: operationId: dataJobLogsDownload summary: | Download data job logs. - This API is guaranteed to provide logs only if the jobs is currently running. + This API is guranteed to provide logs only if the jobs is currently running. For logs from older job executions - use logsUrl field passed by GET execution API or jobsQuery API. description: (Introduced in v1.2.9) parameters: @@ -902,7 +902,7 @@ paths: '/data-jobs/for-team/{team_name}/jobs/{job_name}/deployments/{deployment_id}/secrets': summary: | - Data Job Secrets API allows the management of secrets for specific data jobs. Secrets are any sensitive data: + Data Job Secrets API allows the management of secrets for specific data jobs. Secreats are any sensitive data: passwords/tokens/credentials which are need by a data job. get: tags: @@ -1134,10 +1134,10 @@ components: description: A deployment of the Data Job type: object properties: - vdk_image: - description: A specific VDK image to use + vdk_version: + description: A specific VDK version to use type: string - example: "example.com/versatiledatakit/vdk:latest" + example: 2.1 job_version: description: Job version (can be Git commit) type: string @@ -1171,10 +1171,10 @@ components: description: A deployment status of the Data Job, which includes information such as last deployment date, and who deployed the Data Job. type: object properties: - vdk_image: - description: A specific VDK image to use + vdk_version: + description: A specific VDK version to use type: string - example: "example.com/versatiledatakit/vdk:latest" + example: 2.1 job_version: description: Job version (can be Git commit) type: string @@ -1333,7 +1333,7 @@ components: example: release DataJobResources: - description: Resource configuration of a data Data Job Deployment. + description: Resource cofiguration of a data Data Job Deployment. type: object properties: cpu_request: diff --git a/projects/control-service/projects/pipelines_control_service/src/integration-test/java/com/vmware/taurus/datajobs/it/DataJobDeploymentCrudIT.java b/projects/control-service/projects/pipelines_control_service/src/integration-test/java/com/vmware/taurus/datajobs/it/DataJobDeploymentCrudIT.java index 8e3a689809..5e0c34debe 100644 --- a/projects/control-service/projects/pipelines_control_service/src/integration-test/java/com/vmware/taurus/datajobs/it/DataJobDeploymentCrudIT.java +++ b/projects/control-service/projects/pipelines_control_service/src/integration-test/java/com/vmware/taurus/datajobs/it/DataJobDeploymentCrudIT.java @@ -41,7 +41,7 @@ private void setVdkVersionForDeployment() throws Exception { .with(user("user")) .content(getDataJobDeploymentVdkVersionRequestBody("new_vdk_version_tag")) .contentType(MediaType.APPLICATION_JSON)) - .andExpect(status().isAccepted()); + .andExpect(status().isBadRequest()); } private void disableDeployment() throws Exception { @@ -67,7 +67,7 @@ private void verifyVersion() throws Exception { .with(user("user")) .contentType(MediaType.APPLICATION_JSON)) .andExpect(status().isOk()) - .andExpect(jsonPath("$.vdk_image", is("new_vdk_version_tag"))); + .andExpect(jsonPath("$.vdk_version", is("release"))); } private void resetVdkDeploymentVersion() throws Exception { @@ -80,7 +80,7 @@ private void resetVdkDeploymentVersion() throws Exception { .with(user("user")) .content(getDataJobDeploymentVdkVersionRequestBody("")) .contentType(MediaType.APPLICATION_JSON)) - .andExpect(status().isAccepted()); + .andExpect(status().isBadRequest()); } private MvcResult getDeployment() throws Exception { @@ -105,9 +105,9 @@ private void checkDeployment() throws Exception { Assertions.assertEquals("user", jobDeployment.getLastDeployedBy()); Assertions.assertEquals("3.9", jobDeployment.getPythonVersion()); Assertions.assertFalse(jobDeployment.getEnabled()); - Assertions.assertEquals("new_vdk_version_tag", jobDeployment.getVdkImage()); + Assertions.assertEquals("release", jobDeployment.getVdkVersion()); Assertions.assertNotNull(jobDeployment.getJobVersion()); - Assertions.assertNotNull(jobDeployment.getVdkImage()); + Assertions.assertNotNull(jobDeployment.getVdkVersion()); } @Override diff --git a/projects/control-service/projects/pipelines_control_service/src/integration-test/java/com/vmware/taurus/datajobs/it/TestJobImageBuilderDynamicVdkImageIT.java b/projects/control-service/projects/pipelines_control_service/src/integration-test/java/com/vmware/taurus/datajobs/it/TestJobImageBuilderDynamicVdkImageIT.java index ceb94ee325..de5e775fad 100644 --- a/projects/control-service/projects/pipelines_control_service/src/integration-test/java/com/vmware/taurus/datajobs/it/TestJobImageBuilderDynamicVdkImageIT.java +++ b/projects/control-service/projects/pipelines_control_service/src/integration-test/java/com/vmware/taurus/datajobs/it/TestJobImageBuilderDynamicVdkImageIT.java @@ -178,9 +178,7 @@ public void testDataJobDeploymentDynamicVdkVersion() throws Exception { Assertions.assertEquals(true, jobDeployment.getEnabled()); // by default the version is the same as the tag specified by datajobs.vdk.image // for integration test this is registry.hub.docker.com/versatiledatakit/quickstart-vdk:release - Assertions.assertEquals( - "ghcr.io/versatile-data-kit-dev/versatiledatakit/quickstart-vdk:release", - jobDeployment.getVdkImage()); + Assertions.assertEquals("release", jobDeployment.getVdkVersion()); Assertions.assertEquals("user", jobDeployment.getLastDeployedBy()); // just check some valid date is returned. It would be too error-prone/brittle to verify exact // time. @@ -214,9 +212,9 @@ public void testDataJobDeploymentDynamicVdkVersion() throws Exception { .with(user("user")) .content(getDataJobDeploymentVdkVersionRequestBody("new_vdk_version_tag")) .contentType(MediaType.APPLICATION_JSON)) - .andExpect(status().isAccepted()); + .andExpect(status().isBadRequest()); - // verify vdk version is changed + // verify vdk version is not changed mockMvc .perform( get(String.format( @@ -225,7 +223,7 @@ public void testDataJobDeploymentDynamicVdkVersion() throws Exception { .with(user("user")) .contentType(MediaType.APPLICATION_JSON)) .andExpect(status().isOk()) - .andExpect(jsonPath("$.vdk_image", is("new_vdk_version_tag"))); + .andExpect(jsonPath("$.vdk_version", is("release"))); // Execute change python version and set corresponding vdk version for deployment mockMvc @@ -247,7 +245,7 @@ public void testDataJobDeploymentDynamicVdkVersion() throws Exception { Assertions.assertEquals(false, cronJob.getEnabled()); Assertions.assertTrue(cronJob.getImageName().endsWith(testJobVersionSha)); Assertions.assertEquals("user", cronJob.getLastDeployedBy()); - Assertions.assertTrue(cronJob.getVdkImage().endsWith("pre-release")); + Assertions.assertTrue(cronJob.getVdkVersion().endsWith("pre-release")); Assertions.assertEquals("3.8", cronJob.getPythonVersion()); // Execute delete deployment diff --git a/projects/control-service/projects/pipelines_control_service/src/integration-test/java/com/vmware/taurus/datajobs/it/common/BaseIT.java b/projects/control-service/projects/pipelines_control_service/src/integration-test/java/com/vmware/taurus/datajobs/it/common/BaseIT.java index 14cdf48697..1ae111c32c 100644 --- a/projects/control-service/projects/pipelines_control_service/src/integration-test/java/com/vmware/taurus/datajobs/it/common/BaseIT.java +++ b/projects/control-service/projects/pipelines_control_service/src/integration-test/java/com/vmware/taurus/datajobs/it/common/BaseIT.java @@ -189,7 +189,7 @@ public String getDataJobDeploymentEnableRequestBody(boolean enabled) public String getDataJobDeploymentVdkVersionRequestBody(String vdkVersion) throws JsonProcessingException { var deployment = new DataJobDeployment(); - deployment.setVdkImage(vdkVersion); + deployment.setVdkVersion(vdkVersion); return mapper.writeValueAsString(deployment); } } diff --git a/projects/control-service/projects/pipelines_control_service/src/main/java/com/vmware/taurus/datajobs/DeploymentModelConverter.java b/projects/control-service/projects/pipelines_control_service/src/main/java/com/vmware/taurus/datajobs/DeploymentModelConverter.java index 18123dc286..ad536d3818 100644 --- a/projects/control-service/projects/pipelines_control_service/src/main/java/com/vmware/taurus/datajobs/DeploymentModelConverter.java +++ b/projects/control-service/projects/pipelines_control_service/src/main/java/com/vmware/taurus/datajobs/DeploymentModelConverter.java @@ -36,7 +36,7 @@ public static JobDeployment toJobDeployment( deployment.setResources(jobDeploymentStatus.getResources()); deployment.setMode(jobDeploymentStatus.getMode()); deployment.setGitCommitSha(jobDeploymentStatus.getGitCommitSha()); - deployment.setVdkImage(jobDeploymentStatus.getVdkImage()); + deployment.setVdkVersion(jobDeploymentStatus.getVdkVersion()); deployment.setPythonVersion(jobDeploymentStatus.getPythonVersion()); return deployment; @@ -62,7 +62,6 @@ public static DesiredDataJobDeployment toDesiredDataJobDeployment(JobDeployment deployment.setGitCommitSha(jobDeployment.getGitCommitSha()); deployment.setPythonVersion(jobDeployment.getPythonVersion()); - deployment.setVdkImage(jobDeployment.getVdkImage()); return deployment; } @@ -165,10 +164,10 @@ public static JobDeployment mergeDeployments( newDeployment.getGitCommitSha() != null ? newDeployment.getGitCommitSha() : oldDeployment.getGitCommitSha()); - mergedDeployment.setVdkImage( - (newDeployment.getVdkImage() != null || newDeployment.getPythonVersion() != null) - ? newDeployment.getVdkImage() - : oldDeployment.getVdkImage()); + mergedDeployment.setVdkVersion( + newDeployment.getVdkVersion() != null + ? newDeployment.getVdkVersion() + : oldDeployment.getVdkVersion()); mergedDeployment.setPythonVersion( newDeployment.getPythonVersion() != null ? newDeployment.getPythonVersion() @@ -236,10 +235,6 @@ public static DesiredDataJobDeployment mergeDeployments( newDeployment.getEnabled() != null ? newDeployment.getEnabled() : oldDeployment.getEnabled()); - mergedDeployment.setVdkImage( - (newDeployment.getVdkImage() != null || newDeployment.getPythonVersion() != null) - ? newDeployment.getVdkImage() - : oldDeployment.getVdkImage()); return mergedDeployment; } @@ -328,7 +323,6 @@ public static DataJobDeploymentStatus toJobDeploymentStatus( ? null : actualDataJobDeployment.getLastDeployedDate().toString()); deploymentStatus.setLastDeployedBy(actualDataJobDeployment.getLastDeployedBy()); - deploymentStatus.setVdkImage(actualDataJobDeployment.getVdkImage()); return deploymentStatus; } diff --git a/projects/control-service/projects/pipelines_control_service/src/main/java/com/vmware/taurus/datajobs/ToApiModelConverter.java b/projects/control-service/projects/pipelines_control_service/src/main/java/com/vmware/taurus/datajobs/ToApiModelConverter.java index be65b57f2d..a5aa87c382 100644 --- a/projects/control-service/projects/pipelines_control_service/src/main/java/com/vmware/taurus/datajobs/ToApiModelConverter.java +++ b/projects/control-service/projects/pipelines_control_service/src/main/java/com/vmware/taurus/datajobs/ToApiModelConverter.java @@ -109,7 +109,7 @@ public static DataJobDeploymentStatus toDataJobDeploymentStatus( deployment.setJobVersion(jobDeploymentStatus.getGitCommitSha()); deployment.setLastDeployedBy(jobDeploymentStatus.getLastDeployedBy()); deployment.setLastDeployedDate(jobDeploymentStatus.getLastDeployedDate()); - deployment.setVdkImage(jobDeploymentStatus.getVdkImage()); + deployment.setVdkVersion(jobDeploymentStatus.getVdkVersion()); deployment.setPythonVersion(jobDeploymentStatus.getPythonVersion()); return deployment; @@ -181,7 +181,7 @@ public static V2DataJobDeployment toV2DataJobDeployment( // TODO finish mapping implementation in TAUR-1535 v2DataJobDeployment.setContacts(new DataJobContacts()); v2DataJobDeployment.setSchedule(new V2DataJobSchedule()); - v2DataJobDeployment.setVdkVersion(jobDeploymentStatus.getVdkImage()); + v2DataJobDeployment.setVdkVersion(jobDeploymentStatus.getVdkVersion()); v2DataJobDeployment.setExecutions(new ArrayList<>()); return v2DataJobDeployment; @@ -202,7 +202,7 @@ public static DataJobExecution jobExecutionToConvert( .logsUrl(logsUrl) .deployment( new DataJobDeployment() - .vdkImage(jobExecutionToConvert.getVdkImage()) + .vdkVersion(jobExecutionToConvert.getVdkVersion()) .jobVersion(jobExecutionToConvert.getJobVersion()) .pythonVersion(jobExecutionToConvert.getJobPythonVersion()) .schedule( diff --git a/projects/control-service/projects/pipelines_control_service/src/main/java/com/vmware/taurus/datajobs/ToModelApiConverter.java b/projects/control-service/projects/pipelines_control_service/src/main/java/com/vmware/taurus/datajobs/ToModelApiConverter.java index da813b242c..bbb272f561 100644 --- a/projects/control-service/projects/pipelines_control_service/src/main/java/com/vmware/taurus/datajobs/ToModelApiConverter.java +++ b/projects/control-service/projects/pipelines_control_service/src/main/java/com/vmware/taurus/datajobs/ToModelApiConverter.java @@ -25,7 +25,7 @@ public static JobDeployment toJobDeployment( jobDeployment.setMode(dataJobDeployment.getMode().toString()); } jobDeployment.setGitCommitSha(dataJobDeployment.getJobVersion()); - jobDeployment.setVdkImage(dataJobDeployment.getVdkImage()); + jobDeployment.setVdkVersion(dataJobDeployment.getVdkVersion()); if (dataJobDeployment.getPythonVersion() != null) { jobDeployment.setPythonVersion(dataJobDeployment.getPythonVersion()); } diff --git a/projects/control-service/projects/pipelines_control_service/src/main/java/com/vmware/taurus/service/execution/JobExecutionResultManager.java b/projects/control-service/projects/pipelines_control_service/src/main/java/com/vmware/taurus/service/execution/JobExecutionResultManager.java index fe3fa6f8ee..0e82473cfa 100644 --- a/projects/control-service/projects/pipelines_control_service/src/main/java/com/vmware/taurus/service/execution/JobExecutionResultManager.java +++ b/projects/control-service/projects/pipelines_control_service/src/main/java/com/vmware/taurus/service/execution/JobExecutionResultManager.java @@ -59,7 +59,7 @@ public static ExecutionResult getResult(KubernetesService.JobExecution jobExecut return ExecutionResult.builder() .executionStatus(executionStatus) - .vdkImage(podTerminationMessage.getVdkVersion()) + .vdkImage(podTerminationMessage.getVdkVersion()) .build(); } diff --git a/projects/control-service/projects/pipelines_control_service/src/main/java/com/vmware/taurus/service/execution/JobExecutionService.java b/projects/control-service/projects/pipelines_control_service/src/main/java/com/vmware/taurus/service/execution/JobExecutionService.java index e3c4ff33ec..1a197848d3 100644 --- a/projects/control-service/projects/pipelines_control_service/src/main/java/com/vmware/taurus/service/execution/JobExecutionService.java +++ b/projects/control-service/projects/pipelines_control_service/src/main/java/com/vmware/taurus/service/execution/JobExecutionService.java @@ -349,7 +349,7 @@ public Optional updateJobExecu executionStatus, jobExecution.getMainContainerTerminationReason())) .opId(jobExecution.getOpId()) .endTime(jobExecution.getEndTime()) - .vdkImage(executionResult.getVdkImage()) + .vdkImage(executionResult.getVdkImage()) .jobVersion(jobExecution.getJobVersion()) .jobPythonVersion(jobExecution.getJobPythonVersion()) .jobSchedule(jobExecution.getJobSchedule()) diff --git a/projects/control-service/projects/pipelines_control_service/src/test/java/com/vmware/taurus/service/deploy/DeploymentServiceV2Test.java b/projects/control-service/projects/pipelines_control_service/src/test/java/com/vmware/taurus/service/deploy/DeploymentServiceV2Test.java index 3afc5033f7..c378d941fa 100644 --- a/projects/control-service/projects/pipelines_control_service/src/test/java/com/vmware/taurus/service/deploy/DeploymentServiceV2Test.java +++ b/projects/control-service/projects/pipelines_control_service/src/test/java/com/vmware/taurus/service/deploy/DeploymentServiceV2Test.java @@ -98,7 +98,7 @@ public void testFindAllDesiredDataJobDeployments() { public void testFindAllActualDataJobDeployments() { Assertions.assertEquals( - 0, deploymentServiceV2.findAllActualDataJobDeployments().keySet().size()); + 0, deploymentServiceV2.findAllActualDataJobDeployments().keySet().size()); var deployment = new ActualDataJobDeployment(); var dataJob = ToModelApiConverter.toDataJob(TestUtils.getDataJob("teamName", "jobName")); @@ -108,7 +108,7 @@ public void testFindAllActualDataJobDeployments() { actualJobDeploymentRepository.save(deployment); Assertions.assertEquals( - 1, deploymentServiceV2.findAllActualDataJobDeployments().keySet().size()); + 1, deploymentServiceV2.findAllActualDataJobDeployments().keySet().size()); } @Test