Skip to content

Commit

Permalink
update option for pyspark (#152)
Browse files Browse the repository at this point in the history
* update option for pyspark

* update workflow

* update workflow

* update
  • Loading branch information
Nicole00 authored Nov 5, 2024
1 parent 8f465cb commit a623064
Show file tree
Hide file tree
Showing 5 changed files with 29 additions and 13 deletions.
11 changes: 6 additions & 5 deletions .github/workflows/pull_request.yml
Original file line number Diff line number Diff line change
Expand Up @@ -17,11 +17,12 @@ jobs:
runs-on: ubuntu-latest

steps:
- uses: actions/checkout@v2
- uses: actions/checkout@v4
- name: Set up JDK 1.8
uses: actions/setup-java@v1
uses: actions/setup-java@v4
with:
java-version: 1.8
distribution: "temurin"
java-version: "8"

- name: Cache the Maven packages to speed up build
uses: actions/cache@v2
Expand All @@ -37,9 +38,9 @@ jobs:
git clone https://github.com/vesoft-inc/nebula-docker-compose.git
pushd nebula-docker-compose/
cp ../../nebula-spark-connector/src/test/resources/docker-compose.yaml .
docker-compose up -d
docker compose up -d
sleep 30
docker-compose ps
docker compose ps
popd
popd
Expand Down
9 changes: 5 additions & 4 deletions .github/workflows/release.yml
Original file line number Diff line number Diff line change
Expand Up @@ -13,11 +13,12 @@ jobs:
runs-on: ubuntu-latest

steps:
- uses: actions/checkout@v2
- uses: actions/checkout@v4
- name: Set up JDK 1.8
uses: actions/setup-java@v1
uses: actions/setup-java@v4
with:
java-version: 1.8
distribution: "temurin"
java-version: "8"

- name: Cache the Maven packages to speed up build
uses: actions/cache@v2
Expand All @@ -33,7 +34,7 @@ jobs:
git clone https://github.com/vesoft-inc/nebula-docker-compose.git
pushd nebula-docker-compose/
cp ../../nebula-spark-connector/src/test/resources/docker-compose.yaml .
docker-compose up -d
docker compose up -d
sleep 30
popd
popd
Expand Down
9 changes: 5 additions & 4 deletions .github/workflows/snapshot.yml
Original file line number Diff line number Diff line change
Expand Up @@ -15,11 +15,12 @@ jobs:
runs-on: ubuntu-latest

steps:
- uses: actions/checkout@v2
- uses: actions/checkout@v4
- name: Set up JDK 1.8
uses: actions/setup-java@v1
uses: actions/setup-java@v4
with:
java-version: 1.8
distribution: "temurin"
java-version: "8"

- name: Cache the Maven packages to speed up build
uses: actions/cache@v2
Expand All @@ -35,7 +36,7 @@ jobs:
git clone https://github.com/vesoft-inc/nebula-docker-compose.git
pushd nebula-docker-compose/
cp ../../nebula-spark-connector/src/test/resources/docker-compose.yaml .
docker-compose up -d
docker compose up -d
sleep 30
popd
popd
Expand Down
6 changes: 6 additions & 0 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -217,6 +217,7 @@ Let's try a write example, by default, the `writeMode` is `insert`
```python
df.write.format("com.vesoft.nebula.connector.NebulaDataSource").option(
"type", "vertex").option(
"operateType", "write").option(
"spaceName", "basketballplayer").option(
"label", "player").option(
"vidPolicy", "").option(
Expand All @@ -232,6 +233,7 @@ For delete or update write mode, we could(for instance)specify with `writeMode`
```python
df.write.format("com.vesoft.nebula.connector.NebulaDataSource").option(
"type", "vertex").option(
"operateType", "write").option(
"spaceName", "basketballplayer").option(
"label", "player").option(
"vidPolicy", "").option(
Expand All @@ -247,6 +249,7 @@ df.write.format("com.vesoft.nebula.connector.NebulaDataSource").option(
```python
df.write.format("com.vesoft.nebula.connector.NebulaDataSource")\
.mode("overwrite")\
.option("operateType", "write")\
.option("srcPolicy", "")\
.option("dstPolicy", "")\
.option("metaAddress", "metad0:9559")\
Expand All @@ -266,6 +269,7 @@ df.write.format("com.vesoft.nebula.connector.NebulaDataSource")\
```python
df.write.format("com.vesoft.nebula.connector.NebulaDataSource")\
.mode("overwrite")\
.option("operateType", "write")\
.option("srcPolicy", "")\
.option("dstPolicy", "")\
.option("metaAddress", "metad0:9559")\
Expand All @@ -289,6 +293,7 @@ For more options, i.e. delete edge with vertex being deleted, refer to [nebula/c
```scala
/** write config */
val OPERATE_TYPE: String = "operateType"
val RATE_LIMIT: String = "rateLimit"
val VID_POLICY: String = "vidPolicy"
val SRC_POLICY: String = "srcPolicy"
Expand Down Expand Up @@ -330,6 +335,7 @@ spark = SparkSession.builder.config(
df = spark.read.format(
"com.vesoft.nebula.connector.NebulaDataSource").option(
"type", "vertex").option(
"operateType", "write").option(
"spaceName", "basketballplayer").option(
"label", "player").option(
"returnCols", "name,age").option(
Expand Down
7 changes: 7 additions & 0 deletions README_CN.md
Original file line number Diff line number Diff line change
Expand Up @@ -208,6 +208,7 @@ Nebula Spark Connector 支持 Spark 2.2, 2.4 和 3.x.
df = spark.read.format(
"com.vesoft.nebula.connector.NebulaDataSource").option(
"type", "vertex").option(
"operateType", "write").option(
"spaceName", "basketballplayer").option(
"label", "player").option(
"returnCols", "name,age").option(
Expand Down Expand Up @@ -243,6 +244,7 @@ rows
```python
df.write.format("com.vesoft.nebula.connector.NebulaDataSource").option(
"type", "vertex").option(
"operateType", "write").option(
"spaceName", "basketballplayer").option(
"label", "player").option(
"vidPolicy", "").option(
Expand All @@ -261,6 +263,7 @@ df.write.format("com.vesoft.nebula.connector.NebulaDataSource").option(
```python
df.write.format("com.vesoft.nebula.connector.NebulaDataSource").option(
"type", "vertex").option(
"operateType", "write").option(
"spaceName", "basketballplayer").option(
"label", "player").option(
"vidPolicy", "").option(
Expand All @@ -278,6 +281,7 @@ df.write.format("com.vesoft.nebula.connector.NebulaDataSource").option(
```python
df.write.format("com.vesoft.nebula.connector.NebulaDataSource")
.mode("overwrite")
.option("operateType", "write")
.option("srcPolicy", "")
.option("dstPolicy", "")
.option("metaAddress", "metad0:9559")
Expand All @@ -299,6 +303,7 @@ df.write.format("com.vesoft.nebula.connector.NebulaDataSource")
```python
df.write.format("com.vesoft.nebula.connector.NebulaDataSource")
.mode("overwrite")
.option("operateType", "write")
.option("srcPolicy", "")
.option("dstPolicy", "")
.option("metaAddress", "metad0:9559")
Expand All @@ -323,6 +328,7 @@ df.write.format("com.vesoft.nebula.connector.NebulaDataSource")

```scala
/** write config */
val OPERATE_TYPE: String = "operateType"
val RATE_LIMIT: String = "rateLimit"
val VID_POLICY: String = "vidPolicy"
val SRC_POLICY: String = "srcPolicy"
Expand Down Expand Up @@ -364,6 +370,7 @@ spark = SparkSession.builder.config(
df = spark.read.format(
"com.vesoft.nebula.connector.NebulaDataSource").option(
"type", "vertex").option(
"operateType", "write").option(
"spaceName", "basketballplayer").option(
"label", "player").option(
"returnCols", "name,age").option(
Expand Down

0 comments on commit a623064

Please sign in to comment.