diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml
index 501bd5e..f78496c 100644
--- a/.github/workflows/ci.yml
+++ b/.github/workflows/ci.yml
@@ -5,8 +5,7 @@ jobs:
runs-on: ubuntu-latest
strategy:
matrix:
- java-version: [ 11, 14 ]
-# removed version 8 temporarily to avoid sbt-updates failing because of the latest version of LogbackVersion
+ java-version: [ 17, 21 ]
fail-fast: false
steps:
- uses: actions/checkout@v2
@@ -21,7 +20,7 @@ jobs:
runs-on: ubuntu-latest
strategy:
matrix:
- java-version: [ 8, 11, 14 ]
+ java-version: [ 17, 21 ]
fail-fast: false
steps:
- uses: actions/checkout@v2
@@ -36,7 +35,7 @@ jobs:
runs-on: ubuntu-latest
strategy:
matrix:
- java-version: [ 8, 11, 14 ]
+ java-version: [ 17, 21 ]
fail-fast: false
steps:
- uses: actions/checkout@v2
@@ -51,7 +50,7 @@ jobs:
runs-on: ubuntu-latest
strategy:
matrix:
- java-version: [ 8, 11, 14 ]
+ java-version: [ 17, 21 ]
fail-fast: false
steps:
- uses: actions/checkout@v2
@@ -66,7 +65,7 @@ jobs:
runs-on: ubuntu-latest
strategy:
matrix:
- java-version: [ 8, 11, 14 ]
+ java-version: [ 17, 21 ]
fail-fast: false
steps:
- uses: actions/checkout@v2
diff --git a/.github/workflows/examples.yml b/.github/workflows/examples.yml
index c7a1288..b379254 100644
--- a/.github/workflows/examples.yml
+++ b/.github/workflows/examples.yml
@@ -5,7 +5,7 @@ jobs:
runs-on: ubuntu-latest
strategy:
matrix:
- java-version: [ 11, 8, 14 ]
+ java-version: [ 17 ]
fail-fast: false
steps:
- name: Checkout Code
diff --git a/.github/workflows/lint.yml b/.github/workflows/lint.yml
index 02c6773..ec91076 100644
--- a/.github/workflows/lint.yml
+++ b/.github/workflows/lint.yml
@@ -1,4 +1,4 @@
-name: Scala Code Linting Check
+name: Scala/Java Code Linting Check
on: [push]
jobs:
scalafmt:
@@ -10,7 +10,7 @@ jobs:
uses: actions/setup-java@v2
with:
distribution: 'adopt'
- java-version: 11
+ java-version: 21
- name: Check scalafmt
run: sbt scalafmtCheckAll
scalastyle:
@@ -22,6 +22,18 @@ jobs:
uses: actions/setup-java@v2
with:
distribution: 'adopt'
- java-version: 11
+ java-version: 21
- name: Check scalastyle
run: sbt "scalastyle; Test/scalastyle"
+ javafmt:
+ runs-on: ubuntu-latest
+ steps:
+ - name: Checkout Code
+ uses: actions/checkout@v2
+ - name: Set up JDK
+ uses: actions/setup-java@v2
+ with:
+ distribution: 'adopt'
+ java-version: 21
+ - name: Check javafmt
+ run: sbt javafmtCheckAll
diff --git a/.github/workflows/mdoc.yml b/.github/workflows/mdoc.yml
index b5f3251..88bce5b 100644
--- a/.github/workflows/mdoc.yml
+++ b/.github/workflows/mdoc.yml
@@ -10,6 +10,6 @@ jobs:
uses: actions/setup-java@v2
with:
distribution: 'adopt'
- java-version: 11
+ java-version: 21
- name: Check Documentation
run: sbt "docs/mdoc --check"
diff --git a/.github/workflows/publishLocal.yml b/.github/workflows/publishLocal.yml
new file mode 100644
index 0000000..b095407
--- /dev/null
+++ b/.github/workflows/publishLocal.yml
@@ -0,0 +1,37 @@
+name: Publish Local CI
+on: [push]
+jobs:
+ publish213:
+ runs-on: ubuntu-latest
+ strategy:
+ matrix:
+ java-version: [ 21 ]
+ fail-fast: false
+ steps:
+ - name: Checkout Code
+ uses: actions/checkout@v3
+ - name: Set up JDK
+ uses: actions/setup-java@v3
+ with:
+ distribution: 'adopt'
+ java-version: ${{ matrix.java-version }}
+ cache: 'sbt'
+ - name: Publish Local for 2.13
+ run: sbt "++ 2.13.12 publishLocal"
+ publish3:
+ runs-on: ubuntu-latest
+ strategy:
+ matrix:
+ java-version: [21]
+ fail-fast: false
+ steps:
+ - name: Checkout Code
+ uses: actions/checkout@v3
+ - name: Set up JDK
+ uses: actions/setup-java@v3
+ with:
+ distribution: 'adopt'
+ java-version: ${{ matrix.java-version }}
+ cache: 'sbt'
+ - name: Publish Local for 3
+ run: sbt "++ 3.3.1 publishLocal"
\ No newline at end of file
diff --git a/.github/workflows/semver.yml b/.github/workflows/semver.yml
index 26aee9f..10f94b6 100644
--- a/.github/workflows/semver.yml
+++ b/.github/workflows/semver.yml
@@ -9,7 +9,7 @@ jobs:
uses: actions/setup-java@v3
with:
distribution: 'adopt'
- java-version: 11
+ java-version: 21
cache: 'sbt'
# - name: Run Semantic Versioning Policy Check for BQ
# run: sbt bq/versionPolicyCheck
diff --git a/.github/workflows/updates.yml b/.github/workflows/updates.yml
index d39748b..59d4b1b 100644
--- a/.github/workflows/updates.yml
+++ b/.github/workflows/updates.yml
@@ -10,6 +10,6 @@ jobs:
uses: actions/setup-java@v2
with:
distribution: 'adopt'
- java-version: 11
+ java-version: 21
- name: Check Dependency Updates
run: sbt dependencyUpdates
diff --git a/README.md b/README.md
index 5e683d5..23067c2 100644
--- a/README.md
+++ b/README.md
@@ -8,24 +8,24 @@
Add the latest release as a dependency to your project
-| Module | Latest Version | Documentation | Scala Versions |
-|----------------------|------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|------------------------------------------------------------------------------------------------------------------------------------------------------------------------------:|-------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|
-| Google Cloud Storage | [![Latest Version](https://maven-badges.herokuapp.com/maven-central/com.github.tharwaninitin/gcp4zio-gcs_2.12/badge.svg)](https://mvnrepository.com/artifact/com.github.tharwaninitin/gcp4zio-gcs) | [![javadoc](https://javadoc.io/badge2/com.github.tharwaninitin/gcp4zio-gcs_2.12/javadoc.svg)](https://javadoc.io/doc/com.github.tharwaninitin/gcp4zio-gcs_2.12) | [![gcp4zio-gcs Scala version support](https://index.scala-lang.org/tharwaninitin/gcp4zio/gcp4zio-gcs/latest-by-scala-version.svg)](https://index.scala-lang.org/tharwaninitin/gcp4zio/gcp4zio-gcs) |
-| Dataproc | [![Latest Version](https://maven-badges.herokuapp.com/maven-central/com.github.tharwaninitin/gcp4zio-dp_2.12/badge.svg)](https://mvnrepository.com/artifact/com.github.tharwaninitin/gcp4zio-dp) | [![javadoc](https://javadoc.io/badge2/com.github.tharwaninitin/gcp4zio-dp_2.12/javadoc.svg)](https://javadoc.io/doc/com.github.tharwaninitin/gcp4zio-dp_2.12) | [![gcp4zio-dp Scala version support](https://index.scala-lang.org/tharwaninitin/gcp4zio/gcp4zio-dp/latest-by-scala-version.svg)](https://index.scala-lang.org/tharwaninitin/gcp4zio/gcp4zio-dp) |
-| BigQuery | [![Latest Version](https://maven-badges.herokuapp.com/maven-central/com.github.tharwaninitin/gcp4zio-bq_2.12/badge.svg)](https://mvnrepository.com/artifact/com.github.tharwaninitin/gcp4zio-bq) | [![javadoc](https://javadoc.io/badge2/com.github.tharwaninitin/gcp4zio-bq_2.12/javadoc.svg)](https://javadoc.io/doc/com.github.tharwaninitin/gcp4zio-bq_2.12) | [![gcp4zio-bq Scala version support](https://index.scala-lang.org/tharwaninitin/gcp4zio/gcp4zio-bq/latest-by-scala-version.svg)](https://index.scala-lang.org/tharwaninitin/gcp4zio/gcp4zio-bq) |
-| PubSub | [![Latest Version](https://maven-badges.herokuapp.com/maven-central/com.github.tharwaninitin/gcp4zio-pubsub_2.12/badge.svg)](https://mvnrepository.com/artifact/com.github.tharwaninitin/gcp4zio-pubsub) | [![javadoc](https://javadoc.io/badge2/com.github.tharwaninitin/gcp4zio-pubsub_2.12/javadoc.svg)](https://javadoc.io/doc/com.github.tharwaninitin/gcp4zio-pubsub_2.12) | [![gcp4zio-pubsub Scala version support](https://index.scala-lang.org/tharwaninitin/gcp4zio/gcp4zio-pubsub/latest-by-scala-version.svg)](https://index.scala-lang.org/tharwaninitin/gcp4zio/gcp4zio-pubsub) |
-| Cloud Monitoring | [![Latest Version](https://maven-badges.herokuapp.com/maven-central/com.github.tharwaninitin/gcp4zio-monitoring_2.12/badge.svg)](https://mvnrepository.com/artifact/com.github.tharwaninitin/gcp4zio-monitoring) | [![javadoc](https://javadoc.io/badge2/com.github.tharwaninitin/gcp4zio-monitoring_2.12/javadoc.svg)](https://javadoc.io/doc/com.github.tharwaninitin/gcp4zio-monitoring_2.12) | [![gcp4zio-monitoring Scala version support](https://index.scala-lang.org/tharwaninitin/gcp4zio/gcp4zio-monitoring/latest-by-scala-version.svg)](https://index.scala-lang.org/tharwaninitin/gcp4zio/gcp4zio-monitoring) |
+| Module | Latest Version | Documentation | Scala Versions |
+|----------------------|------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|------------------------------------------------------------------------------------------------------------------------------------------------------------------------:|-------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|
+| Google Cloud Storage | [![Latest Version](https://maven-badges.herokuapp.com/maven-central/com.github.tharwaninitin/gcp4zio-gcs_2.12/badge.svg)](https://mvnrepository.com/artifact/com.github.tharwaninitin/gcp4zio-gcs) | [![javadoc](https://javadoc.io/badge2/com.github.tharwaninitin/gcp4zio-gcs_3/javadoc.svg)](https://javadoc.io/doc/com.github.tharwaninitin/gcp4zio-gcs_3) | [![gcp4zio-gcs Scala version support](https://index.scala-lang.org/tharwaninitin/gcp4zio/gcp4zio-gcs/latest-by-scala-version.svg)](https://index.scala-lang.org/tharwaninitin/gcp4zio/gcp4zio-gcs) |
+| Dataproc | [![Latest Version](https://maven-badges.herokuapp.com/maven-central/com.github.tharwaninitin/gcp4zio-dp_2.12/badge.svg)](https://mvnrepository.com/artifact/com.github.tharwaninitin/gcp4zio-dp) | [![javadoc](https://javadoc.io/badge2/com.github.tharwaninitin/gcp4zio-dp_3/javadoc.svg)](https://javadoc.io/doc/com.github.tharwaninitin/gcp4zio-dp_3) | [![gcp4zio-dp Scala version support](https://index.scala-lang.org/tharwaninitin/gcp4zio/gcp4zio-dp/latest-by-scala-version.svg)](https://index.scala-lang.org/tharwaninitin/gcp4zio/gcp4zio-dp) |
+| BigQuery | [![Latest Version](https://maven-badges.herokuapp.com/maven-central/com.github.tharwaninitin/gcp4zio-bq_2.12/badge.svg)](https://mvnrepository.com/artifact/com.github.tharwaninitin/gcp4zio-bq) | [![javadoc](https://javadoc.io/badge2/com.github.tharwaninitin/gcp4zio-bq_3/javadoc.svg)](https://javadoc.io/doc/com.github.tharwaninitin/gcp4zio-bq_3) | [![gcp4zio-bq Scala version support](https://index.scala-lang.org/tharwaninitin/gcp4zio/gcp4zio-bq/latest-by-scala-version.svg)](https://index.scala-lang.org/tharwaninitin/gcp4zio/gcp4zio-bq) |
+| PubSub | [![Latest Version](https://maven-badges.herokuapp.com/maven-central/com.github.tharwaninitin/gcp4zio-pubsub_2.12/badge.svg)](https://mvnrepository.com/artifact/com.github.tharwaninitin/gcp4zio-pubsub) | [![javadoc](https://javadoc.io/badge2/com.github.tharwaninitin/gcp4zio-pubsub_3/javadoc.svg)](https://javadoc.io/doc/com.github.tharwaninitin/gcp4zio-pubsub_3) | [![gcp4zio-pubsub Scala version support](https://index.scala-lang.org/tharwaninitin/gcp4zio/gcp4zio-pubsub/latest-by-scala-version.svg)](https://index.scala-lang.org/tharwaninitin/gcp4zio/gcp4zio-pubsub) |
+| Cloud Monitoring | [![Latest Version](https://maven-badges.herokuapp.com/maven-central/com.github.tharwaninitin/gcp4zio-monitoring_2.12/badge.svg)](https://mvnrepository.com/artifact/com.github.tharwaninitin/gcp4zio-monitoring) | [![javadoc](https://javadoc.io/badge2/com.github.tharwaninitin/gcp4zio-monitoring_3/javadoc.svg)](https://javadoc.io/doc/com.github.tharwaninitin/gcp4zio-monitoring_3) | [![gcp4zio-monitoring Scala version support](https://index.scala-lang.org/tharwaninitin/gcp4zio/gcp4zio-monitoring/latest-by-scala-version.svg)](https://index.scala-lang.org/tharwaninitin/gcp4zio/gcp4zio-monitoring) |
-This project is compiled with scala versions 2.12.18, 2.13.12, 3.3.1
+This project is tested with scala versions 2.13.12, 3.3.1 and java versions 17, 21
__SBT__
``` scala mdoc
libraryDependencies ++= List(
- "com.github.tharwaninitin" %% "gcp4zio-gcs" % "1.5.0",
- "com.github.tharwaninitin" %% "gcp4zio-dp" % "1.5.0",
- "com.github.tharwaninitin" %% "gcp4zio-bq" % "1.5.0",
- "com.github.tharwaninitin" %% "gcp4zio-pubsub" % "1.5.0",
- "com.github.tharwaninitin" %% "gcp4zio-monitoring" % "1.5.0"
+ "com.github.tharwaninitin" %% "gcp4zio-gcs" % "1.5.1",
+ "com.github.tharwaninitin" %% "gcp4zio-dp" % "1.5.1",
+ "com.github.tharwaninitin" %% "gcp4zio-bq" % "1.5.1",
+ "com.github.tharwaninitin" %% "gcp4zio-pubsub" % "1.5.1",
+ "com.github.tharwaninitin" %% "gcp4zio-monitoring" % "1.5.1"
)
```
__Maven__
@@ -33,7 +33,7 @@ __Maven__
com.github.tharwaninitin
gcp4zio-gcs_2.12
- 1.5.0
+ 1.5.1
```
# GCP4ZIO API's
@@ -123,14 +123,14 @@ GCS.copyObjectsGCStoGCS(
import gcp4zio.dp._
// Create Dataproc Cluster Properties
-val dpProps = ClusterProps(bucketName = "dpLogBucket")
+val props = new ClusterProps("dataproc-logs")
// Create Dataproc Cluster
-val createTask = DPCluster.createDataproc("dpCluster", dpProps)
+val createTask = DPCluster.createDataproc("cluster1", props)
// Delete Dataproc Cluster
val deleteTask = DPCluster.deleteDataproc("dpCluster")
-(createTask *> deleteTask).provide(DPCluster.live("gcpProject", "gcpRegion", "dpEndpoint"))
+(createTask *> deleteTask).provide(DPCluster.live("gcpProject", "gcpRegion"))
```
### Dataproc Job
diff --git a/build.sbt b/build.sbt
index f68966d..f4d9f36 100644
--- a/build.sbt
+++ b/build.sbt
@@ -8,6 +8,9 @@ lazy val commonSettings = Seq(
scalaVersion := scala212,
crossScalaVersions := allScalaVersions,
dependencyUpdatesFailBuild := true,
+ compileOrder := CompileOrder.JavaThenScala,
+// scalacOptions ++= Seq("-target:11"),
+// scalacOptions ++= Seq("-release", "25"),
dependencyUpdatesFilter -= moduleFilter(organization = "org.scala-lang"),
scalacOptions ++= {
CrossVersion.partialVersion(scalaVersion.value) match {
@@ -35,7 +38,7 @@ lazy val gcp4zio = (project in file("."))
crossScalaVersions := Nil, // crossScalaVersions must be set to Nil on the aggregating project
publish / skip := true
)
- .aggregate(bq, dp, gcs, pubsub, batch, monitoring)
+ .aggregate(bq, dp, gcs, pubsub, batch, monitoring, examples)
lazy val bq = (project in file("modules/bq"))
.settings(commonSettings)
@@ -76,7 +79,7 @@ lazy val docs = project
.settings(
name := "gcp4zio-docs",
publish / skip := true,
- mdocVariables := Map("VERSION" -> version.value, "Scala212" -> scala212, "Scala213" -> scala213, "Scala3" -> scala3),
+ mdocVariables := Map("VERSION" -> version.value, "Scala212" -> scala212, "Scala213" -> scala213, "Scala3" -> scala3, "JavaVersions" -> "17, 21"),
mdocIn := new File("docs/readme.template.md"),
mdocOut := new File("README.md")
)
diff --git a/docs/readme.template.md b/docs/readme.template.md
index 292a905..9780010 100644
--- a/docs/readme.template.md
+++ b/docs/readme.template.md
@@ -8,15 +8,15 @@
Add the latest release as a dependency to your project
-| Module | Latest Version | Documentation | Scala Versions |
-|----------------------|------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|------------------------------------------------------------------------------------------------------------------------------------------------------------------------------:|-------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|
-| Google Cloud Storage | [![Latest Version](https://maven-badges.herokuapp.com/maven-central/com.github.tharwaninitin/gcp4zio-gcs_2.12/badge.svg)](https://mvnrepository.com/artifact/com.github.tharwaninitin/gcp4zio-gcs) | [![javadoc](https://javadoc.io/badge2/com.github.tharwaninitin/gcp4zio-gcs_2.12/javadoc.svg)](https://javadoc.io/doc/com.github.tharwaninitin/gcp4zio-gcs_2.12) | [![gcp4zio-gcs Scala version support](https://index.scala-lang.org/tharwaninitin/gcp4zio/gcp4zio-gcs/latest-by-scala-version.svg)](https://index.scala-lang.org/tharwaninitin/gcp4zio/gcp4zio-gcs) |
-| Dataproc | [![Latest Version](https://maven-badges.herokuapp.com/maven-central/com.github.tharwaninitin/gcp4zio-dp_2.12/badge.svg)](https://mvnrepository.com/artifact/com.github.tharwaninitin/gcp4zio-dp) | [![javadoc](https://javadoc.io/badge2/com.github.tharwaninitin/gcp4zio-dp_2.12/javadoc.svg)](https://javadoc.io/doc/com.github.tharwaninitin/gcp4zio-dp_2.12) | [![gcp4zio-dp Scala version support](https://index.scala-lang.org/tharwaninitin/gcp4zio/gcp4zio-dp/latest-by-scala-version.svg)](https://index.scala-lang.org/tharwaninitin/gcp4zio/gcp4zio-dp) |
-| BigQuery | [![Latest Version](https://maven-badges.herokuapp.com/maven-central/com.github.tharwaninitin/gcp4zio-bq_2.12/badge.svg)](https://mvnrepository.com/artifact/com.github.tharwaninitin/gcp4zio-bq) | [![javadoc](https://javadoc.io/badge2/com.github.tharwaninitin/gcp4zio-bq_2.12/javadoc.svg)](https://javadoc.io/doc/com.github.tharwaninitin/gcp4zio-bq_2.12) | [![gcp4zio-bq Scala version support](https://index.scala-lang.org/tharwaninitin/gcp4zio/gcp4zio-bq/latest-by-scala-version.svg)](https://index.scala-lang.org/tharwaninitin/gcp4zio/gcp4zio-bq) |
-| PubSub | [![Latest Version](https://maven-badges.herokuapp.com/maven-central/com.github.tharwaninitin/gcp4zio-pubsub_2.12/badge.svg)](https://mvnrepository.com/artifact/com.github.tharwaninitin/gcp4zio-pubsub) | [![javadoc](https://javadoc.io/badge2/com.github.tharwaninitin/gcp4zio-pubsub_2.12/javadoc.svg)](https://javadoc.io/doc/com.github.tharwaninitin/gcp4zio-pubsub_2.12) | [![gcp4zio-pubsub Scala version support](https://index.scala-lang.org/tharwaninitin/gcp4zio/gcp4zio-pubsub/latest-by-scala-version.svg)](https://index.scala-lang.org/tharwaninitin/gcp4zio/gcp4zio-pubsub) |
-| Cloud Monitoring | [![Latest Version](https://maven-badges.herokuapp.com/maven-central/com.github.tharwaninitin/gcp4zio-monitoring_2.12/badge.svg)](https://mvnrepository.com/artifact/com.github.tharwaninitin/gcp4zio-monitoring) | [![javadoc](https://javadoc.io/badge2/com.github.tharwaninitin/gcp4zio-monitoring_2.12/javadoc.svg)](https://javadoc.io/doc/com.github.tharwaninitin/gcp4zio-monitoring_2.12) | [![gcp4zio-monitoring Scala version support](https://index.scala-lang.org/tharwaninitin/gcp4zio/gcp4zio-monitoring/latest-by-scala-version.svg)](https://index.scala-lang.org/tharwaninitin/gcp4zio/gcp4zio-monitoring) |
+| Module | Latest Version | Documentation | Scala Versions |
+|----------------------|------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|------------------------------------------------------------------------------------------------------------------------------------------------------------------------:|-------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|
+| Google Cloud Storage | [![Latest Version](https://maven-badges.herokuapp.com/maven-central/com.github.tharwaninitin/gcp4zio-gcs_2.12/badge.svg)](https://mvnrepository.com/artifact/com.github.tharwaninitin/gcp4zio-gcs) | [![javadoc](https://javadoc.io/badge2/com.github.tharwaninitin/gcp4zio-gcs_3/javadoc.svg)](https://javadoc.io/doc/com.github.tharwaninitin/gcp4zio-gcs_3) | [![gcp4zio-gcs Scala version support](https://index.scala-lang.org/tharwaninitin/gcp4zio/gcp4zio-gcs/latest-by-scala-version.svg)](https://index.scala-lang.org/tharwaninitin/gcp4zio/gcp4zio-gcs) |
+| Dataproc | [![Latest Version](https://maven-badges.herokuapp.com/maven-central/com.github.tharwaninitin/gcp4zio-dp_2.12/badge.svg)](https://mvnrepository.com/artifact/com.github.tharwaninitin/gcp4zio-dp) | [![javadoc](https://javadoc.io/badge2/com.github.tharwaninitin/gcp4zio-dp_3/javadoc.svg)](https://javadoc.io/doc/com.github.tharwaninitin/gcp4zio-dp_3) | [![gcp4zio-dp Scala version support](https://index.scala-lang.org/tharwaninitin/gcp4zio/gcp4zio-dp/latest-by-scala-version.svg)](https://index.scala-lang.org/tharwaninitin/gcp4zio/gcp4zio-dp) |
+| BigQuery | [![Latest Version](https://maven-badges.herokuapp.com/maven-central/com.github.tharwaninitin/gcp4zio-bq_2.12/badge.svg)](https://mvnrepository.com/artifact/com.github.tharwaninitin/gcp4zio-bq) | [![javadoc](https://javadoc.io/badge2/com.github.tharwaninitin/gcp4zio-bq_3/javadoc.svg)](https://javadoc.io/doc/com.github.tharwaninitin/gcp4zio-bq_3) | [![gcp4zio-bq Scala version support](https://index.scala-lang.org/tharwaninitin/gcp4zio/gcp4zio-bq/latest-by-scala-version.svg)](https://index.scala-lang.org/tharwaninitin/gcp4zio/gcp4zio-bq) |
+| PubSub | [![Latest Version](https://maven-badges.herokuapp.com/maven-central/com.github.tharwaninitin/gcp4zio-pubsub_2.12/badge.svg)](https://mvnrepository.com/artifact/com.github.tharwaninitin/gcp4zio-pubsub) | [![javadoc](https://javadoc.io/badge2/com.github.tharwaninitin/gcp4zio-pubsub_3/javadoc.svg)](https://javadoc.io/doc/com.github.tharwaninitin/gcp4zio-pubsub_3) | [![gcp4zio-pubsub Scala version support](https://index.scala-lang.org/tharwaninitin/gcp4zio/gcp4zio-pubsub/latest-by-scala-version.svg)](https://index.scala-lang.org/tharwaninitin/gcp4zio/gcp4zio-pubsub) |
+| Cloud Monitoring | [![Latest Version](https://maven-badges.herokuapp.com/maven-central/com.github.tharwaninitin/gcp4zio-monitoring_2.12/badge.svg)](https://mvnrepository.com/artifact/com.github.tharwaninitin/gcp4zio-monitoring) | [![javadoc](https://javadoc.io/badge2/com.github.tharwaninitin/gcp4zio-monitoring_3/javadoc.svg)](https://javadoc.io/doc/com.github.tharwaninitin/gcp4zio-monitoring_3) | [![gcp4zio-monitoring Scala version support](https://index.scala-lang.org/tharwaninitin/gcp4zio/gcp4zio-monitoring/latest-by-scala-version.svg)](https://index.scala-lang.org/tharwaninitin/gcp4zio/gcp4zio-monitoring) |
-This project is compiled with scala versions @Scala212@, @Scala213@, @Scala3@
+This project is tested with scala versions @Scala213@, @Scala3@ and java versions @JavaVersions@
__SBT__
``` scala mdoc
@@ -123,14 +123,14 @@ GCS.copyObjectsGCStoGCS(
import gcp4zio.dp._
// Create Dataproc Cluster Properties
-val dpProps = ClusterProps(bucketName = "dpLogBucket")
+val props = new ClusterProps("dataproc-logs")
// Create Dataproc Cluster
-val createTask = DPCluster.createDataproc("dpCluster", dpProps)
+val createTask = DPCluster.createDataproc("cluster1", props)
// Delete Dataproc Cluster
val deleteTask = DPCluster.deleteDataproc("dpCluster")
-(createTask *> deleteTask).provide(DPCluster.live("gcpProject", "gcpRegion", "dpEndpoint"))
+(createTask *> deleteTask).provide(DPCluster.live("gcpProject", "gcpRegion"))
```
### Dataproc Job
diff --git a/examples/src/main/scala/DPGCS.scala b/examples/src/main/scala/DPGCS.scala
index 1168b16..e25a826 100644
--- a/examples/src/main/scala/DPGCS.scala
+++ b/examples/src/main/scala/DPGCS.scala
@@ -40,7 +40,7 @@ object DPGCS extends ZIOAppDefault with ApplicationLogger {
private val mainClass = "org.apache.spark.examples.SparkPi"
- private val createCluster = DPCluster.createDataproc(dpCluster, ClusterProps(dpBucket))
+ private val createCluster = DPCluster.createDataproc(dpCluster, new ClusterProps(dpBucket))
private val program1 = DPJob
.executeSparkJob(List("1000"), mainClass, libs, conf)
@@ -56,7 +56,7 @@ object DPGCS extends ZIOAppDefault with ApplicationLogger {
private val dpJobLayer = DPJob.live(dpCluster, gcpProject, gcpRegion, dpEndpoint)
- private val dpClusterLayer = DPCluster.live(gcpProject, gcpRegion, dpEndpoint)
+ private val dpClusterLayer = DPCluster.live(gcpProject, gcpRegion)
val run: Task[Unit] =
(createCluster *> program1 *> program2 *> deleteCluster).provide(dpJobLayer ++ dpClusterLayer ++ GCS.live())
diff --git a/modules/bq/src/main/scala/gcp4zio/bq/BQ.scala b/modules/bq/src/main/scala/gcp4zio/bq/BQ.scala
index 7640780..af6c6b1 100644
--- a/modules/bq/src/main/scala/gcp4zio/bq/BQ.scala
+++ b/modules/bq/src/main/scala/gcp4zio/bq/BQ.scala
@@ -3,6 +3,7 @@ package bq
import com.google.cloud.bigquery._
import zio.{RIO, Task, TaskLayer, ZIO, ZLayer}
+import zio.stream.Stream
trait BQ {
@@ -24,6 +25,17 @@ trait BQ {
*/
def fetchResults[T](query: String)(fn: FieldValueList => T): Task[Iterable[T]]
+ /** This API can be used to run any SQL(SELECT) query on BigQuery to fetch rows
+ * @param query
+ * SQL query(SELECT) to execute
+ * @param fn
+ * function to convert FieldValueList to Scala Type T
+ * @tparam T
+ * Scala Type for output rows
+ * @return
+ */
+ def fetchStreamingResults[T](query: String)(fn: FieldValueList => T): Task[Stream[Throwable, T]]
+
/** Load data into BigQuery from GCS
* @param sourcePath
* Source GCS path from which we need to load data into BigQuery
@@ -113,6 +125,18 @@ object BQ {
def fetchResults[T](query: String)(fn: FieldValueList => T): RIO[BQ, Iterable[T]] =
ZIO.environmentWithZIO(_.get.fetchResults[T](query)(fn))
+ /** This API can be used to run any SQL(SELECT) query on BigQuery to fetch rows
+ * @param query
+ * SQL query(SELECT) to execute
+ * @param fn
+ * function to convert FieldValueList to Scala Type T
+ * @tparam T
+ * Scala Type for output rows
+ * @return
+ */
+ def fetchStreamingResults[T](query: String)(fn: FieldValueList => T): RIO[BQ, Stream[Throwable, T]] =
+ ZIO.environmentWithZIO(_.get.fetchStreamingResults[T](query)(fn))
+
/** Load data into BigQuery from GCS
* @param sourcePath
* Source GCS path from which we need to load data into BigQuery
diff --git a/modules/bq/src/main/scala/gcp4zio/bq/BQImpl.scala b/modules/bq/src/main/scala/gcp4zio/bq/BQImpl.scala
index 3bc80f5..db6786c 100644
--- a/modules/bq/src/main/scala/gcp4zio/bq/BQImpl.scala
+++ b/modules/bq/src/main/scala/gcp4zio/bq/BQImpl.scala
@@ -6,6 +6,7 @@ import gcp4zio.bq.FileType.{CSV, JSON, ORC, PARQUET}
import zio.{Task, ZIO}
import java.util.UUID
import scala.jdk.CollectionConverters._
+import zio.stream.{Stream, ZPipeline, ZStream}
@SuppressWarnings(
Array(
@@ -88,6 +89,36 @@ case class BQImpl(client: BigQuery) extends BQ {
result.iterateAll().asScala.map(fn)
}
+ /** This API can be used to run any SQL(SELECT) query on BigQuery to fetch rows
+ * @param query
+ * SQL query(SELECT) to execute
+ * @param fn
+ * function to convert FieldValueList to Scala Type T
+ * @tparam T
+ * Scala Type for output rows
+ * @return
+ */
+ def fetchStreamingResults[T](query: String)(fn: FieldValueList => T): Task[Stream[Throwable, T]] = ZIO.attempt {
+ val queryConfig: QueryJobConfiguration = QueryJobConfiguration
+ .newBuilder(query)
+ .setUseLegacySql(false)
+ .build()
+
+ val jobId = JobId.of(UUID.randomUUID().toString)
+ val queryJob = client.create(JobInfo.newBuilder(queryConfig).setJobId(jobId).build())
+
+ // Wait for the query to complete.
+ val job = queryJob.waitFor()
+
+ val fnTransformer: ZPipeline[Any, Nothing, FieldValueList, T] = ZPipeline.map(fn)
+
+ ZStream
+ .fromJavaStream(
+ job.getQueryResults().streamAll()
+ )
+ .via(fnTransformer)
+ }
+
/** Load data into BigQuery from GCS
* @param sourcePath
* Source GCS path from which we need to load data into BigQuery
diff --git a/modules/bq/src/test/scala/gcp4zio/bq/BQQueryTestSuite.scala b/modules/bq/src/test/scala/gcp4zio/bq/BQQueryTestSuite.scala
index f2fa13c..943cb72 100644
--- a/modules/bq/src/test/scala/gcp4zio/bq/BQQueryTestSuite.scala
+++ b/modules/bq/src/test/scala/gcp4zio/bq/BQQueryTestSuite.scala
@@ -23,6 +23,22 @@ object BQQueryTestSuite {
)
)
assertZIO(task.foldZIO(ex => ZIO.fail(ex.getMessage), _ => ZIO.succeed("ok")))(equalTo("ok"))
+ },
+ test("Execute Streaming BQ Query ") {
+ val task = BQ.executeQuery("")
+ assertZIO(task.foldZIO(ex => ZIO.fail(ex.getMessage), _ => ZIO.succeed("ok")))(equalTo("ok"))
+ },
+ test("Execute StreamingBQ Query and get data") {
+ val task =
+ BQ.fetchStreamingResults("SELECT * FROM dev.ratings")(rs =>
+ Rating(
+ rs.get("userId").getLongValue,
+ rs.get("movieId").getLongValue,
+ rs.get("rating").getDoubleValue,
+ rs.get("timestamp").getLongValue
+ )
+ )
+ assertZIO(task.foldZIO(ex => ZIO.fail(ex.getMessage), _ => ZIO.succeed("ok")))(equalTo("ok"))
}
) @@ TestAspect.sequential
}
diff --git a/modules/dp/src/main/java/gcp4zio/dp/ClusterProps.java b/modules/dp/src/main/java/gcp4zio/dp/ClusterProps.java
new file mode 100644
index 0000000..53e6c3e
--- /dev/null
+++ b/modules/dp/src/main/java/gcp4zio/dp/ClusterProps.java
@@ -0,0 +1,24 @@
+package gcp4zio.dp;
+
+import java.util.Optional;
+
+/**
+ * Represents the Immutable properties for a Dataproc cluster.
+ */
+public record ClusterProps(
+ String bucketName,
+ boolean singleNode,
+ String imageVersion,
+ Optional idleDeletionDurationSecs,
+ GCEProps gceClusterProps,
+ InstanceProps masterInstanceProps,
+ InstanceProps workerInstanceProps) {
+
+ public ClusterProps(String bucketName, boolean singleNode, Optional subnetUri, Optional serviceAccount) {
+ this(bucketName, singleNode, "2.1-debian11", Optional.of(1800), new GCEProps(false, subnetUri, serviceAccount), new InstanceProps(), new InstanceProps(2));
+ }
+
+ public ClusterProps(String bucketName) {
+ this(bucketName, true, "2.1-debian11", Optional.of(1800), new GCEProps(false), new InstanceProps(), new InstanceProps(2));
+ }
+}
diff --git a/modules/dp/src/main/java/gcp4zio/dp/DPClusterImpl.java b/modules/dp/src/main/java/gcp4zio/dp/DPClusterImpl.java
new file mode 100644
index 0000000..9271e96
--- /dev/null
+++ b/modules/dp/src/main/java/gcp4zio/dp/DPClusterImpl.java
@@ -0,0 +1,157 @@
+package gcp4zio.dp;
+
+import com.google.api.gax.longrunning.OperationFuture;
+import com.google.cloud.dataproc.v1.*;
+import com.google.protobuf.Duration;
+import com.google.protobuf.Empty;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+public class DPClusterImpl {
+
+ private static final Logger logger = LoggerFactory.getLogger(DPClusterImpl.class);
+
+ private final ClusterControllerClient client;
+ private final String project;
+ private final String region;
+
+ public DPClusterImpl(ClusterControllerClient client, String project, String region) {
+ this.client = client;
+ this.project = project;
+ this.region = region;
+ }
+
+ /**
+ * This function creates Dataproc Software Config - Image Version, Single Node
+ *
+ * @param props ClusterProps
+ * @return SoftwareConfig
+ */
+ private SoftwareConfig createSoftwareConfig(ClusterProps props) {
+ if (props.singleNode()) {
+ return SoftwareConfig.newBuilder()
+ .setImageVersion(props.imageVersion())
+ .putProperties("dataproc:dataproc.allow.zero.workers", "true")
+ .build();
+ } else {
+ return SoftwareConfig.newBuilder().setImageVersion(props.imageVersion()).build();
+ }
+ }
+
+ /**
+ * This function creates GCE Cluster Config - GCE Network, GCE Service Accounts, GCE Scopes
+ *
+ * @param props GCEProps
+ * @return GceClusterConfig
+ */
+ private GceClusterConfig createGCEClusterConfig(GCEProps props) {
+ GceClusterConfig.Builder gceClusterBuilder =
+ GceClusterConfig.newBuilder().setInternalIpOnly(props.internalIpOnly());
+
+ if (props.subnetUri().isPresent()) {
+ gceClusterBuilder.setSubnetworkUri(props.subnetUri().get()).addAllTags(props.networkTags());
+ } else {
+ gceClusterBuilder.addAllTags(props.networkTags());
+ }
+
+ if (props.serviceAccount().isPresent()) {
+ return gceClusterBuilder.setServiceAccount(props.serviceAccount().get()).build();
+ } else {
+ return gceClusterBuilder
+ .addServiceAccountScopes("https://www.googleapis.com/auth/cloud-platform")
+ .build();
+ }
+ }
+
+ /**
+ * This function creates GCE Instance Config - Instance Disk, Machine Type, Number of Nodes
+ *
+ * @param props ClusterProps
+ * @return InstanceGroupConfig
+ */
+ private InstanceGroupConfig createGCEInstanceConfig(InstanceProps props) {
+ DiskConfig diskConfig =
+ DiskConfig.newBuilder()
+ .setBootDiskType(props.bootDiskType())
+ .setBootDiskSizeGb(props.bootDiskSizeGb())
+ .build();
+ return InstanceGroupConfig.newBuilder()
+ .setMachineTypeUri(props.machineType())
+ .setNumInstances(props.numInstance())
+ .setDiskConfig(diskConfig)
+ .build();
+ }
+
+ /**
+ * This function creates Dataproc Cluster Config combining all configs
+ *
+ * @param props ClusterProps
+ * @return ClusterConfig
+ */
+ private ClusterConfig createClusterConfig(ClusterProps props) {
+ if (props.singleNode()) {
+ logger.info("Creating single node cluster creation request");
+ } else {
+ logger.info("Creating multi node cluster creation request");
+ }
+
+ SoftwareConfig softwareConfig = createSoftwareConfig(props);
+ GceClusterConfig gceClusterConfig = createGCEClusterConfig(props.gceClusterProps());
+ InstanceGroupConfig masterConfig = createGCEInstanceConfig(props.masterInstanceProps());
+ EndpointConfig endPointConfig =
+ EndpointConfig.newBuilder().setEnableHttpPortAccess(true).build();
+
+ ClusterConfig.Builder clusterConfigBuilder =
+ ClusterConfig.newBuilder()
+ .setMasterConfig(masterConfig)
+ .setSoftwareConfig(softwareConfig)
+ .setConfigBucket(props.bucketName())
+ .setGceClusterConfig(gceClusterConfig)
+ .setEndpointConfig(endPointConfig);
+
+ if (!props.singleNode()) {
+ InstanceGroupConfig workerConfig = createGCEInstanceConfig(props.workerInstanceProps());
+ clusterConfigBuilder.setWorkerConfig(workerConfig);
+ }
+
+ if (props.idleDeletionDurationSecs().isPresent()) {
+ return clusterConfigBuilder
+ .setLifecycleConfig(
+ LifecycleConfig.newBuilder()
+ .setIdleDeleteTtl(
+ Duration.newBuilder().setSeconds(props.idleDeletionDurationSecs().get())))
+ .build();
+ } else {
+ return clusterConfigBuilder.build();
+ }
+ }
+
+ /**
+ * This public function creates Dataproc Cluster
+ *
+ * @param cluster String
+ * @param props ClusterProps
+ * @return OperationFuture
+ */
+ public OperationFuture createDataproc(
+ String cluster, ClusterProps props) {
+ ClusterConfig clusterConfig = createClusterConfig(props);
+
+ Cluster clusterConf =
+ Cluster.newBuilder().setClusterName(cluster).setConfig(clusterConfig).build();
+
+ logger.info("Submitting cluster creation request for {}", cluster);
+ return client.createClusterAsync(project, region, clusterConf);
+ }
+
+ /**
+ * This public function deletes Dataproc Cluster
+ *
+ * @param cluster String
+ * @return OperationFuture
+ */
+ public OperationFuture deleteDataproc(String cluster) {
+ logger.info("Submitting cluster deletion request for {}", cluster);
+ return client.deleteClusterAsync(project, region, cluster);
+ }
+}
diff --git a/modules/dp/src/main/java/gcp4zio/dp/GCEProps.java b/modules/dp/src/main/java/gcp4zio/dp/GCEProps.java
new file mode 100644
index 0000000..bc7f647
--- /dev/null
+++ b/modules/dp/src/main/java/gcp4zio/dp/GCEProps.java
@@ -0,0 +1,22 @@
+package gcp4zio.dp;
+
+import java.util.List;
+import java.util.Optional;
+
+/**
+ * Represents the Immutable properties for a Dataproc cluster.
+ */
+public record GCEProps(
+ boolean internalIpOnly,
+ Optional subnetUri,
+ List networkTags,
+ Optional serviceAccount) {
+
+ public GCEProps(boolean internalIpOnly) {
+ this(internalIpOnly, Optional.empty(), List.of(), Optional.empty());
+ }
+
+ public GCEProps(boolean internalIpOnly, Optional subnetUri, Optional serviceAccount) {
+ this(internalIpOnly, subnetUri, List.of(), serviceAccount);
+ }
+}
diff --git a/modules/dp/src/main/java/gcp4zio/dp/InstanceProps.java b/modules/dp/src/main/java/gcp4zio/dp/InstanceProps.java
new file mode 100644
index 0000000..3dbac02
--- /dev/null
+++ b/modules/dp/src/main/java/gcp4zio/dp/InstanceProps.java
@@ -0,0 +1,19 @@
+package gcp4zio.dp;
+
+/**
+ * Represents the Immutable properties for a cluster instance.
+ */
+public record InstanceProps(
+ String machineType,
+ String bootDiskType,
+ int bootDiskSizeGb,
+ int numInstance) {
+
+ public InstanceProps() {
+ this("n2-standard-4", "pd-standard", 100, 1);
+ }
+
+ public InstanceProps(int numInstance) {
+ this("n2-standard-4", "pd-standard", 100, numInstance);
+ }
+}
\ No newline at end of file
diff --git a/modules/dp/src/main/scala/gcp4zio/dp/DPCluster.scala b/modules/dp/src/main/scala/gcp4zio/dp/DPCluster.scala
index 9301114..d587be4 100644
--- a/modules/dp/src/main/scala/gcp4zio/dp/DPCluster.scala
+++ b/modules/dp/src/main/scala/gcp4zio/dp/DPCluster.scala
@@ -51,10 +51,31 @@ object DPCluster {
* GCP projectID
* @param region
* GCP Region name
- * @param endpoint
- * GCP dataproc API
* @return
*/
- def live(project: String, region: String, endpoint: String): TaskLayer[DPCluster] =
- ZLayer.scoped(DPClusterClient(endpoint).map(dp => DPClusterImpl(dp, project, region)))
+ def live(project: String, region: String): TaskLayer[DPCluster] =
+ ZLayer.scoped {
+ val endpoint = s"$region-dataproc.googleapis.com:443"
+ DPClusterClient(endpoint).map(client =>
+ new DPCluster {
+
+ val dpClusterImpl = new DPClusterImpl(client, project, region)
+
+ override def createDataproc(cluster: String, props: ClusterProps): Task[Cluster] = ZIO
+ .fromFutureJava(dpClusterImpl.createDataproc(cluster, props))
+ .tapBoth(
+ e => ZIO.succeed(logger.error(s"Cluster creation failed with error ${e.getMessage}")),
+ res => ZIO.succeed(logger.info(s"Cluster ${res.getClusterName} created successfully"))
+ )
+
+ override def deleteDataproc(cluster: String): Task[Unit] = ZIO
+ .fromFutureJava(dpClusterImpl.deleteDataproc(cluster))
+ .tapBoth(
+ e => ZIO.succeed(logger.error(s"Cluster deletion failed with error ${e.getMessage}")),
+ _ => ZIO.succeed(logger.info(s"Cluster $cluster deleted successfully"))
+ )
+ .unit
+ }
+ )
+ }
}
diff --git a/modules/dp/src/main/scala/gcp4zio/dp/DPClusterClient.scala b/modules/dp/src/main/scala/gcp4zio/dp/DPClusterClient.scala
index 1735dc1..16e163c 100644
--- a/modules/dp/src/main/scala/gcp4zio/dp/DPClusterClient.scala
+++ b/modules/dp/src/main/scala/gcp4zio/dp/DPClusterClient.scala
@@ -3,22 +3,18 @@ package gcp4zio.dp
import com.google.cloud.dataproc.v1.{ClusterControllerClient, ClusterControllerSettings}
import zio.{RIO, Scope, ZIO}
-// https://github.com/ManeShalaka/gcpCodeSnippet/blob/master/src/main/java/com/gcp/dataproc/DataprocClusterCreate.java
-@SuppressWarnings(Array("org.wartremover.warts.Throw"))
+// https://cloud.google.com/docs/authentication/application-default-credentials
object DPClusterClient {
- /** Returns AutoCloseable ClusterControllerClient object wrapped in ZIO
+ /** Returns AutoCloseable ClusterControllerClient object wrapped in ZIO, credentials are obtained using GCP ADC strategy
* @param endpoint
* Dataproc cluster endpoint
* @return
* RIO[Scope, ClusterControllerClient]
*/
def apply(endpoint: String): RIO[Scope, ClusterControllerClient] = ZIO.fromAutoCloseable(ZIO.attempt {
- sys.env.get("GOOGLE_APPLICATION_CREDENTIALS") match {
- case Some(_) =>
- logger.info("Using credentials from GOOGLE_APPLICATION_CREDENTIALS for Dataproc Cluster Client")
- ClusterControllerClient.create(ClusterControllerSettings.newBuilder().setEndpoint(endpoint).build())
- case None => throw new RuntimeException("Set environment variable GOOGLE_APPLICATION_CREDENTIALS")
- }
+ val settings = ClusterControllerSettings.newBuilder.setEndpoint(endpoint).build()
+ logger.info(s"Credential Provider: ${settings.getCredentialsProvider}")
+ ClusterControllerClient.create(settings)
})
}
diff --git a/modules/dp/src/main/scala/gcp4zio/dp/DPClusterImpl.scala b/modules/dp/src/main/scala/gcp4zio/dp/DPClusterImpl.scala
deleted file mode 100644
index 8f45fff..0000000
--- a/modules/dp/src/main/scala/gcp4zio/dp/DPClusterImpl.scala
+++ /dev/null
@@ -1,142 +0,0 @@
-package gcp4zio
-package dp
-
-import com.google.cloud.dataproc.v1._
-import com.google.protobuf.Duration
-import zio.{Task, ZIO}
-import scala.jdk.CollectionConverters._
-
-case class DPClusterImpl(client: ClusterControllerClient, project: String, region: String) extends DPCluster {
-
- /** This function creates Dataproc Software Config - Image Version, Single Node
- *
- * @param props
- * ClusterProps
- * @return
- */
- private def createSoftwareConfig(props: ClusterProps): SoftwareConfig =
- if (props.singleNode) {
- SoftwareConfig.newBuilder().setImageVersion(props.imageVersion).putProperties("dataproc:dataproc.allow.zero.workers", "true").build()
- } else {
- SoftwareConfig.newBuilder().setImageVersion(props.imageVersion).build()
- }
-
- /** This function creates GCE Cluster Config - GCE Network, GCE Service Accounts, GCE Scopes
- *
- * @param props
- * ClusterProps
- * @return
- */
- private def createGCEClusterConfig(props: ClusterProps): GceClusterConfig = {
- val gceClusterBuilder: GceClusterConfig.Builder = props.subnetUri match {
- case Some(value) =>
- GceClusterConfig
- .newBuilder()
- .setInternalIpOnly(props.internalIpOnly)
- .setSubnetworkUri(value)
- .addAllTags(props.networkTags.asJava)
- case None =>
- GceClusterConfig.newBuilder().setInternalIpOnly(props.internalIpOnly).addAllTags(props.networkTags.asJava)
- }
-
- props.serviceAccount match {
- case Some(value) => gceClusterBuilder.setServiceAccount(value).build()
- case _ => gceClusterBuilder.addServiceAccountScopes("https://www.googleapis.com/auth/cloud-platform").build()
- }
- }
-
- /** This function creates GCE Instance Config - Instance Disk, Machine Type, Number of Nodes
- *
- * @param props
- * ClusterProps
- * @return
- */
- private def createGCEInstanceConfig(props: InstanceProps): InstanceGroupConfig = {
- val diskConfigM = DiskConfig
- .newBuilder()
- .setBootDiskType(props.bootDiskType)
- .setBootDiskSizeGb(props.bootDiskSizeGb)
- .build()
- InstanceGroupConfig
- .newBuilder()
- .setMachineTypeUri(props.machineType)
- .setNumInstances(props.numInstance)
- .setDiskConfig(diskConfigM)
- .build()
- }
-
- /** This function creates Dataproc Cluster Config combining all configs
- *
- * @param props
- * ClusterProps
- * @return
- */
- private def createClusterConfig(props: ClusterProps): ClusterConfig = {
- if (props.singleNode)
- logger.info(s"Creating single node cluster creation request")
- else
- logger.info(s"Creating multi node cluster creation request")
-
- val softwareConfig = createSoftwareConfig(props)
-
- val gceClusterConfig = createGCEClusterConfig(props)
-
- val masterConfig = createGCEInstanceConfig(props.masterInstanceProps)
-
- val endPointConfig = EndpointConfig.newBuilder().setEnableHttpPortAccess(true).build()
-
- val clusterConfigBuilder =
- if (props.singleNode)
- ClusterConfig.newBuilder
- .setMasterConfig(masterConfig)
- .setSoftwareConfig(softwareConfig)
- .setConfigBucket(props.bucketName)
- .setGceClusterConfig(gceClusterConfig)
- .setEndpointConfig(endPointConfig)
- else {
- val workerConfig = createGCEInstanceConfig(props.workerInstanceProps)
- ClusterConfig.newBuilder
- .setMasterConfig(masterConfig)
- .setWorkerConfig(workerConfig)
- .setSoftwareConfig(softwareConfig)
- .setConfigBucket(props.bucketName)
- .setGceClusterConfig(gceClusterConfig)
- .setEndpointConfig(endPointConfig)
- }
-
- props.idleDeletionDurationSecs match {
- case Some(value) =>
- clusterConfigBuilder
- .setLifecycleConfig(LifecycleConfig.newBuilder().setIdleDeleteTtl(Duration.newBuilder().setSeconds(value)))
- .build()
- case _ => clusterConfigBuilder.build()
- }
- }
-
- def createDataproc(clusterName: String, props: ClusterProps): Task[Cluster] = ZIO
- .fromFutureJava {
- val clusterConfig = createClusterConfig(props)
-
- val cluster = Cluster.newBuilder.setClusterName(clusterName).setConfig(clusterConfig).build
-
- val createClusterAsyncRequest = client.createClusterAsync(project, region, cluster)
-
- logger.info(s"Submitting cluster creation request for $clusterName")
- createClusterAsyncRequest
- }
- .tapBoth(
- e => ZIO.succeed(logger.error(s"Cluster creation failed with error ${e.getMessage}")),
- res => ZIO.succeed(logger.info(s"Cluster ${res.getClusterName} created successfully"))
- )
-
- def deleteDataproc(clusterName: String): Task[Unit] = ZIO
- .fromFutureJava {
- logger.info(s"Submitting cluster deletion request for $clusterName")
- client.deleteClusterAsync(project, region, clusterName)
- }
- .tapBoth(
- e => ZIO.succeed(logger.error(s"Cluster deletion failed with error ${e.getMessage}")),
- _ => ZIO.succeed(logger.info(s"Cluster $clusterName deleted successfully"))
- )
- .unit
-}
diff --git a/modules/dp/src/main/scala/gcp4zio/dp/package.scala b/modules/dp/src/main/scala/gcp4zio/dp/package.scala
index 464cdc0..a865cf6 100644
--- a/modules/dp/src/main/scala/gcp4zio/dp/package.scala
+++ b/modules/dp/src/main/scala/gcp4zio/dp/package.scala
@@ -4,24 +4,4 @@ import org.slf4j.{Logger, LoggerFactory}
package object dp {
private[dp] lazy val logger: Logger = LoggerFactory.getLogger(getClass.getName)
-
- case class InstanceProps(
- machineType: String = "n2-standard-4",
- bootDiskType: String = "pd-standard", // pd-standard, pd-balanced, pd-ssd // https://cloud.google.com/compute/docs/disks#disk-types
- bootDiskSizeGb: Int = 100,
- numInstance: Int = 1
- )
-
- case class ClusterProps(
- bucketName: String,
- singleNode: Boolean = false,
- internalIpOnly: Boolean = false,
- subnetUri: Option[String] = None,
- networkTags: List[String] = List.empty,
- serviceAccount: Option[String] = None,
- idleDeletionDurationSecs: Option[Long] = Some(1800L),
- imageVersion: String = "2.1-debian11",
- masterInstanceProps: InstanceProps = InstanceProps(),
- workerInstanceProps: InstanceProps = InstanceProps(numInstance = 2)
- )
}
diff --git a/modules/dp/src/test/scala/RunTests.scala b/modules/dp/src/test/scala/RunTests.scala
index f477e01..8155d9b 100644
--- a/modules/dp/src/test/scala/RunTests.scala
+++ b/modules/dp/src/test/scala/RunTests.scala
@@ -3,7 +3,7 @@ import gcp4zio.dp._
import zio.test._
object RunTests extends ZIOSpecDefault {
- private val env = DPJob.live(dpCluster, gcpProject, gcpRegion, dpEndpoint) ++ DPCluster.live(gcpProject, gcpRegion, dpEndpoint)
+ private val env = DPJob.live(dpCluster, gcpProject, gcpRegion, dpEndpoint) ++ DPCluster.live(gcpProject, gcpRegion)
override def spec: Spec[TestEnvironment, Any] = (suite("DP Apis")(
DPCreateTestSuite.spec,
diff --git a/modules/dp/src/test/scala/gcp4zio/dp/DPCreateTestSuite.scala b/modules/dp/src/test/scala/gcp4zio/dp/DPCreateTestSuite.scala
index d47821d..34fd558 100644
--- a/modules/dp/src/test/scala/gcp4zio/dp/DPCreateTestSuite.scala
+++ b/modules/dp/src/test/scala/gcp4zio/dp/DPCreateTestSuite.scala
@@ -4,17 +4,13 @@ import gcp4zio.Global._
import zio.ZIO
import zio.test.Assertion.equalTo
import zio.test.{assertZIO, test, Spec}
+import scala.jdk.OptionConverters._
object DPCreateTestSuite {
val spec: Spec[DPCluster, Any] =
test("Execute DPCreateStep") {
- val dpProps = ClusterProps(
- bucketName = dpBucket,
- subnetUri = dpSubnetUri,
- networkTags = dpNetworkTags,
- serviceAccount = dpServiceAccount
- )
- val step = DPCluster.createDataproc(dpCluster, dpProps)
+ val dpProps = new ClusterProps(dpBucket, true, dpSubnetUri.toJava, dpServiceAccount.toJava)
+ val step = DPCluster.createDataproc(dpCluster, dpProps)
assertZIO(step.foldZIO(ex => ZIO.fail(ex.getMessage), _ => ZIO.succeed("ok")))(equalTo("ok"))
}
}
diff --git a/project/Versions.scala b/project/Versions.scala
index 7171ca1..5b4f856 100644
--- a/project/Versions.scala
+++ b/project/Versions.scala
@@ -3,18 +3,18 @@ object Versions {
val scala213 = "2.13.12"
val scala3 = "3.3.1"
- val allScalaVersions: List[String] = List(scala212, scala213, scala3)
+ val allScalaVersions: List[String] = List(scala213, scala3)
- val scalaJavaCollectionCompat = "2.11.0"
+ val scalaJavaCollectionCompat = "2.12.0"
- val zioVersion = "2.0.20"
- val zioLogVersion = "2.1.16"
+ val zioVersion = "2.1.1"
+ val zioLogVersion = "2.2.4"
- val logbackVersion = "1.4.14"
- val gcpBqVersion = "2.35.0"
- val gcpDpVersion = "4.29.0"
- val gcpGcsVersion = "2.30.1"
- val gcpPubSubVersion = "1.125.13"
- val cloudMonitorVersion = "3.33.0"
- val batchVersion = "0.32.0"
+ val logbackVersion = "1.5.6"
+ val gcpBqVersion = "2.40.1"
+ val gcpDpVersion = "4.40.0"
+ val gcpGcsVersion = "2.38.0"
+ val gcpPubSubVersion = "1.129.5"
+ val cloudMonitorVersion = "3.44.0"
+ val batchVersion = "0.43.0"
}
diff --git a/project/build.properties b/project/build.properties
index 8b9a0b0..abbbce5 100644
--- a/project/build.properties
+++ b/project/build.properties
@@ -1 +1 @@
-sbt.version=1.8.0
+sbt.version=1.9.8
diff --git a/project/plugins.sbt b/project/plugins.sbt
index e9bd88c..0b5ab64 100644
--- a/project/plugins.sbt
+++ b/project/plugins.sbt
@@ -1,9 +1,17 @@
-addSbtPlugin("io.crashbox" % "sbt-gpg" % "0.2.1")
-addSbtPlugin("org.scoverage" % "sbt-scoverage" % "1.9.3")
-addSbtPlugin("com.timushev.sbt" % "sbt-updates" % "0.6.3")
-addSbtPlugin("ch.epfl.scala" % "sbt-version-policy" % "2.0.1")
-addSbtPlugin("org.wartremover" % "sbt-wartremover" % "3.1.5")
-addSbtPlugin("org.scalastyle" %% "scalastyle-sbt-plugin" % "1.0.0")
-addSbtPlugin("org.scalameta" % "sbt-scalafmt" % "2.4.6")
-addSbtPlugin("com.github.sbt" % "sbt-release" % "1.1.0")
-addSbtPlugin("org.scalameta" % "sbt-mdoc" % "2.3.6")
+// Code Quality and Linting Plugins
+addSbtPlugin("org.wartremover" % "sbt-wartremover" % "3.1.5")
+addSbtPlugin("org.scalastyle" %% "scalastyle-sbt-plugin" % "1.0.0")
+addSbtPlugin("org.scalameta" % "sbt-scalafmt" % "2.4.6")
+addSbtPlugin("com.lightbend.sbt" % "sbt-java-formatter" % "0.8.0")
+
+// Type Checked Documentation Plugin
+addSbtPlugin("org.scalameta" % "sbt-mdoc" % "2.3.6")
+
+// Publishing and Release Plugins
+addSbtPlugin("io.crashbox" % "sbt-gpg" % "0.2.1")
+addSbtPlugin("com.github.sbt" % "sbt-release" % "1.1.0")
+addSbtPlugin("ch.epfl.scala" % "sbt-version-policy" % "2.0.1")
+
+// Other Plugins
+addSbtPlugin("org.scoverage" % "sbt-scoverage" % "1.9.3")
+addSbtPlugin("com.timushev.sbt" % "sbt-updates" % "0.6.3")
diff --git a/version.sbt b/version.sbt
index 8d76cb0..6a252ea 100644
--- a/version.sbt
+++ b/version.sbt
@@ -1 +1 @@
-ThisBuild / version := "1.5.0"
+ThisBuild / version := "1.5.1"