diff --git a/.github/workflows/backend.yml b/.github/workflows/backend.yml
index ee6620ec71c..1098b164ab1 100644
--- a/.github/workflows/backend.yml
+++ b/.github/workflows/backend.yml
@@ -111,7 +111,8 @@ jobs:
-D"maven.wagon.httpconnectionManager.ttlSeconds"=120
dependency-license:
- if: github.repository == 'apache/incubator-seatunnel'
+ # This job has somethings need todo, and it is not a blocker for the release.
+ if: "contains(toJSON(github.event.commits.*.message), '[ci-auto-license]')"
name: Dependency licenses
needs: [ sanity-check ]
runs-on: ubuntu-latest
@@ -144,7 +145,7 @@ jobs:
matrix:
java: [ '8', '11' ]
os: [ 'ubuntu-latest', 'windows-latest' ]
- timeout-minutes: 50
+ timeout-minutes: 90
steps:
- uses: actions/checkout@v2
- name: Set up JDK ${{ matrix.java }}
@@ -167,7 +168,7 @@ jobs:
matrix:
java: [ '8', '11' ]
os: [ 'ubuntu-latest' ]
- timeout-minutes: 50
+ timeout-minutes: 90
steps:
- uses: actions/checkout@v2
- name: Set up JDK ${{ matrix.java }}
diff --git a/docs/en/concept/connector-v2-features.md b/docs/en/concept/connector-v2-features.md
new file mode 100644
index 00000000000..d400722fa27
--- /dev/null
+++ b/docs/en/concept/connector-v2-features.md
@@ -0,0 +1,65 @@
+# Intro To Connector V2 Features
+
+## Differences Between Connector V2 And Connector v1
+
+Since https://github.com/apache/incubator-seatunnel/issues/1608 We Added Connector V2 Features.
+Connector V2 is a connector defined based on the Seatunnel Connector API interface. Unlike Connector V1, Connector V2 supports the following features.
+
+* **Multi Engine Support** SeaTunnel Connector API is an engine independent API. The connectors developed based on this API can run in multiple engines. Currently, Flink and Spark are supported, and we will support other engines in the future.
+* **Multi Engine Version Support** Decoupling the connector from the engine through the translation layer solves the problem that most connectors need to modify the code in order to support a new version of the underlying engine.
+* **Unified Batch And Stream** Connector V2 can perform batch processing or streaming processing. We do not need to develop connectors for batch and stream separately.
+* **Multiplexing JDBC/Log connection.** Connector V2 supports JDBC resource reuse and sharing database log parsing.
+
+## Source Connector Features
+
+Source connectors have some common core features, and each source connector supports them to varying degrees.
+
+### exactly-once
+
+If each piece of data in the data source will only be sent downstream by the source once, we think this source connector supports exactly once.
+
+In SeaTunnel, we can save the read **Split** and its **offset**(The position of the read data in split at that time,
+such as line number, byte size, offset, etc) as **StateSnapshot** when checkpoint. If the task restarted, we will get the last **StateSnapshot**
+and then locate the **Split** and **offset** read last time and continue to send data downstream.
+
+For example `File`, `Kafka`.
+
+### schema projection
+
+If the source connector supports selective reading of certain columns or redefine columns order or supports the data format read through `schema` params, we think it supports schema projection.
+
+For example `JDBCSource` can use sql define read columns, `KafkaSource` can use `schema` params to define the read schema.
+
+### batch
+
+Batch Job Mode, The data read is bounded and the job will stop when all data read complete.
+
+### stream
+
+Streaming Job Mode, The data read is unbounded and the job never stop.
+
+### parallelism
+
+Parallelism Source Connector support config `parallelism`, every parallelism will create a task to read the data.
+In the **Parallelism Source Connector**, the source will be split into multiple splits, and then the enumerator will allocate the splits to the SourceReader for processing.
+
+### support user-defined split
+
+User can config the split rule.
+
+## Sink Connector Features
+
+Sink connectors have some common core features, and each sink connector supports them to varying degrees.
+
+### exactly-once
+
+When any piece of data flows into a distributed system, if the system processes any piece of data accurately only once in the whole processing process and the processing results are correct, it is considered that the system meets the exact once consistency.
+
+For sink connector, the sink connector supports exactly-once if any piece of data only write into target once. There are generally two ways to achieve this:
+
+* The target database supports key deduplication. For example `MySQL`, `Kudu`.
+* The target support **XA Transaction**(This transaction can be used across sessions. Even if the program that created the transaction has ended, the newly started program only needs to know the ID of the last transaction to resubmit or roll back the transaction). Then we can use **Two-phase Commit** to ensure **exactly-once**. For example `File`, `MySQL`.
+
+### schema projection
+
+If a sink connector supports the fields and their types or redefine columns order written in the configuration, we think it supports schema projection.
\ No newline at end of file
diff --git a/docs/en/connector-v2/sink/Assert.md b/docs/en/connector-v2/sink/Assert.md
index 9e5c49acfa4..5a161212630 100644
--- a/docs/en/connector-v2/sink/Assert.md
+++ b/docs/en/connector-v2/sink/Assert.md
@@ -6,6 +6,11 @@
A flink sink plugin which can assert illegal data by user defined rules
+## Key features
+
+- [ ] [exactly-once](../../concept/connector-v2-features.md)
+- [x] [schema projection](../../concept/connector-v2-features.md)
+
## Options
| name | type | required | default value |
diff --git a/docs/en/connector-v2/sink/Clickhouse.md b/docs/en/connector-v2/sink/Clickhouse.md
index ff1e8998936..02ac2246e45 100644
--- a/docs/en/connector-v2/sink/Clickhouse.md
+++ b/docs/en/connector-v2/sink/Clickhouse.md
@@ -4,7 +4,15 @@
## Description
-Used to write data to Clickhouse. Supports Batch and Streaming mode.
+Used to write data to Clickhouse.
+
+## Key features
+
+- [ ] [exactly-once](../../concept/connector-v2-features.md)
+
+The Clickhouse sink plug-in can achieve accuracy once by implementing idempotent writing, and needs to cooperate with aggregatingmergetree and other engines that support deduplication.
+
+- [ ] [schema projection](../../concept/connector-v2-features.md)
:::tip
diff --git a/docs/en/connector-v2/sink/ClickhouseFile.md b/docs/en/connector-v2/sink/ClickhouseFile.md
index f1c6e302470..90e196c9227 100644
--- a/docs/en/connector-v2/sink/ClickhouseFile.md
+++ b/docs/en/connector-v2/sink/ClickhouseFile.md
@@ -8,6 +8,11 @@ Generate the clickhouse data file with the clickhouse-local program, and then se
server, also call bulk load. This connector only support clickhouse table which engine is 'Distributed'.And `internal_replication` option
should be `true`. Supports Batch and Streaming mode.
+## Key features
+
+- [ ] [exactly-once](../../concept/connector-v2-features.md)
+- [ ] [schema projection](../../concept/connector-v2-features.md)
+
:::tip
Write data to Clickhouse can also be done using JDBC
diff --git a/docs/en/connector-v2/sink/Datahub.md b/docs/en/connector-v2/sink/Datahub.md
index 292944cd583..800c2a54bb4 100644
--- a/docs/en/connector-v2/sink/Datahub.md
+++ b/docs/en/connector-v2/sink/Datahub.md
@@ -6,6 +6,11 @@
A sink plugin which use send message to datahub
+## Key features
+
+- [ ] [exactly-once](../../concept/connector-v2-features.md)
+- [ ] [schema projection](../../concept/connector-v2-features.md)
+
## Options
| name | type | required | default value |
diff --git a/docs/en/connector-v2/sink/Elasticsearch.md b/docs/en/connector-v2/sink/Elasticsearch.md
index c8fbb551e7e..0d743c79909 100644
--- a/docs/en/connector-v2/sink/Elasticsearch.md
+++ b/docs/en/connector-v2/sink/Elasticsearch.md
@@ -4,6 +4,11 @@
Output data to `Elasticsearch`.
+## Key features
+
+- [ ] [exactly-once](../../concept/connector-v2-features.md)
+- [ ] [schema projection](../../concept/connector-v2-features.md)
+
:::tip
Engine Supported
diff --git a/docs/en/connector-v2/source/Email.md b/docs/en/connector-v2/sink/Email.md
similarity index 92%
rename from docs/en/connector-v2/source/Email.md
rename to docs/en/connector-v2/sink/Email.md
index fdd13711763..cc74cf495fd 100644
--- a/docs/en/connector-v2/source/Email.md
+++ b/docs/en/connector-v2/sink/Email.md
@@ -8,6 +8,11 @@ Send the data as a file to email.
The tested email version is 1.5.6.
+## Key features
+
+- [ ] [exactly-once](../../concept/connector-v2-features.md)
+- [ ] [schema projection](../../concept/connector-v2-features.md)
+
## Options
| name | type | required | default value |
diff --git a/docs/en/connector-v2/sink/Enterprise-WeChat.md b/docs/en/connector-v2/sink/Enterprise-WeChat.md
index 30364821251..28ec03059fa 100644
--- a/docs/en/connector-v2/sink/Enterprise-WeChat.md
+++ b/docs/en/connector-v2/sink/Enterprise-WeChat.md
@@ -13,6 +13,10 @@ A sink plugin which use Enterprise WeChat robot send message
> ```
**Tips: WeChat sink only support `string` webhook and the data from source will be treated as body content in web hook.**
+## Key features
+
+- [ ] [exactly-once](../../concept/connector-v2-features.md)
+- [ ] [schema projection](../../concept/connector-v2-features.md)
## Options
diff --git a/docs/en/connector-v2/sink/Feishu.md b/docs/en/connector-v2/sink/Feishu.md
index 5359cc588a7..311a5d7fe87 100644
--- a/docs/en/connector-v2/sink/Feishu.md
+++ b/docs/en/connector-v2/sink/Feishu.md
@@ -10,6 +10,11 @@ Used to launch feishu web hooks using data.
**Tips: Feishu sink only support `post json` webhook and the data from source will be treated as body content in web hook.**
+## Key features
+
+- [ ] [exactly-once](../../concept/connector-v2-features.md)
+- [ ] [schema projection](../../concept/connector-v2-features.md)
+
## Options
| name | type | required | default value |
diff --git a/docs/en/connector-v2/sink/FtpFile.md b/docs/en/connector-v2/sink/FtpFile.md
index 0384671c3d5..009d1bd619f 100644
--- a/docs/en/connector-v2/sink/FtpFile.md
+++ b/docs/en/connector-v2/sink/FtpFile.md
@@ -6,7 +6,12 @@
Output data to Ftp .
+## Key features
+- [x] [exactly-once](../../concept/connector-v2-features.md)
+- [ ] [schema projection](../../concept/connector-v2-features.md)
+
+## Options
| name | type | required | default value |
|----------------------------------|---------|----------|-----------------------------------------------------------|
diff --git a/docs/en/connector-v2/sink/Greenplum.md b/docs/en/connector-v2/sink/Greenplum.md
index 9317e5c625f..91af690d541 100644
--- a/docs/en/connector-v2/sink/Greenplum.md
+++ b/docs/en/connector-v2/sink/Greenplum.md
@@ -6,6 +6,11 @@
Write data to Greenplum using [Jdbc connector](Jdbc.md).
+## Key features
+
+- [ ] [exactly-once](../../concept/connector-v2-features.md)
+- [ ] [schema projection](../../concept/connector-v2-features.md)
+
:::tip
Not support exactly-once semantics (XA transaction is not yet supported in Greenplum database).
diff --git a/docs/en/connector-v2/sink/HdfsFile.md b/docs/en/connector-v2/sink/HdfsFile.md
index cbc2d616a92..e2e3b7561ff 100644
--- a/docs/en/connector-v2/sink/HdfsFile.md
+++ b/docs/en/connector-v2/sink/HdfsFile.md
@@ -4,26 +4,40 @@
## Description
-Output data to hdfs file. Support bounded and unbounded job.
+Output data to hdfs file
+
+## Key features
+
+- [x] [exactly-once](../../concept/connector-v2-features.md)
+
+By default, we use 2PC commit to ensure `exactly-once`
+
+- [ ] [schema projection](../../concept/connector-v2-features.md)
+- [x] file format
+ - [x] text
+ - [x] csv
+ - [x] parquet
+ - [x] orc
+ - [x] json
## Options
In order to use this connector, You must ensure your spark/flink cluster already integrated hadoop. The tested hadoop version is 2.x.
-| name | type | required | default value |
-| --------------------------------- | ------ | -------- | ------------------------------------------------------------- |
-| path | string | yes | - |
-| file_name_expression | string | no | "${transactionId}" |
-| file_format | string | no | "text" |
-| filename_time_format | string | no | "yyyy.MM.dd" |
-| field_delimiter | string | no | '\001' |
-| row_delimiter | string | no | "\n" |
-| partition_by | array | no | - |
-| partition_dir_expression | string | no | "\${k0}=\${v0}\/\${k1}=\${v1}\/...\/\${kn}=\${vn}\/" |
-| is_partition_field_write_in_file | boolean| no | false |
-| sink_columns | array | no | When this parameter is empty, all fields are sink columns |
-| is_enable_transaction | boolean| no | true |
-| save_mode | string | no | "error" |
+| name | type | required | default value |
+| --------------------------------- | ------ | -------- |---------------------------------------------------------|
+| path | string | yes | - |
+| file_name_expression | string | no | "${transactionId}" |
+| file_format | string | no | "text" |
+| filename_time_format | string | no | "yyyy.MM.dd" |
+| field_delimiter | string | no | '\001' |
+| row_delimiter | string | no | "\n" |
+| partition_by | array | no | - |
+| partition_dir_expression | string | no | "${k0}=${v0}/${k1}=${v1}/.../${kn}=${vn}/" |
+| is_partition_field_write_in_file | boolean| no | false |
+| sink_columns | array | no | When this parameter is empty, all fields are sink columns |
+| is_enable_transaction | boolean| no | true |
+| save_mode | string | no | "error" |
### path [string]
diff --git a/docs/en/connector-v2/sink/Hive.md b/docs/en/connector-v2/sink/Hive.md
index b5ae8edc4ad..56b49ad7ba9 100644
--- a/docs/en/connector-v2/sink/Hive.md
+++ b/docs/en/connector-v2/sink/Hive.md
@@ -8,6 +8,18 @@ Write data to Hive.
In order to use this connector, You must ensure your spark/flink cluster already integrated hive. The tested hive version is 2.3.9.
+## Key features
+
+- [x] [exactly-once](../../concept/connector-v2-features.md)
+
+By default, we use 2PC commit to ensure `exactly-once`
+
+- [ ] [schema projection](../../concept/connector-v2-features.md)
+- [x] file format
+ - [x] text
+ - [x] parquet
+ - [x] orc
+
## Options
| name | type | required | default value |
diff --git a/docs/en/connector-v2/sink/Http.md b/docs/en/connector-v2/sink/Http.md
index c871b9c1669..8f4ab2572f7 100644
--- a/docs/en/connector-v2/sink/Http.md
+++ b/docs/en/connector-v2/sink/Http.md
@@ -4,12 +4,17 @@
## Description
-Used to launch web hooks using data. Both support streaming and batch mode.
+Used to launch web hooks using data.
> For example, if the data from upstream is [`age: 12, name: tyrantlucifer`], the body content is the following: `{"age": 12, "name": "tyrantlucifer"}`
**Tips: Http sink only support `post json` webhook and the data from source will be treated as body content in web hook.**
+## Key features
+
+- [ ] [exactly-once](../../concept/connector-v2-features.md)
+- [ ] [schema projection](../../concept/connector-v2-features.md)
+
## Options
| name | type | required | default value |
diff --git a/docs/en/connector-v2/sink/IoTDB.md b/docs/en/connector-v2/sink/IoTDB.md
index 3ea624fd578..31389c03f74 100644
--- a/docs/en/connector-v2/sink/IoTDB.md
+++ b/docs/en/connector-v2/sink/IoTDB.md
@@ -4,7 +4,16 @@
## Description
-Used to write data to IoTDB. Supports Batch and Streaming mode.
+Used to write data to IoTDB.
+
+## Key features
+
+- [x] [exactly-once](../../concept/connector-v2-features.md)
+
+IoTDB supports the `exactly-once` feature through idempotent writing. If two pieces of data have
+the same `key` and `timestamp`, the new data will overwrite the old one.
+
+- [ ] [schema projection](../../concept/connector-v2-features.md)
:::tip
diff --git a/docs/en/connector-v2/sink/Jdbc.md b/docs/en/connector-v2/sink/Jdbc.md
index e5063c61b3d..f8c883cec76 100644
--- a/docs/en/connector-v2/sink/Jdbc.md
+++ b/docs/en/connector-v2/sink/Jdbc.md
@@ -4,6 +4,15 @@
## Description
Write data through jdbc. Support Batch mode and Streaming mode, support concurrent writing, support exactly-once semantics (using XA transaction guarantee).
+
+## Key features
+
+- [x] [exactly-once](../../concept/connector-v2-features.md)
+
+Use `Xa transactions` to ensure `exactly-once`. So only support `exactly-once` for the database which is support `Xa transactions`. You can set `is_exactly_once=true` to enable it.
+
+- [ ] [schema projection](../../concept/connector-v2-features.md)
+
## Options
| name | type | required | default value |
diff --git a/docs/en/connector-v2/sink/Kudu.md b/docs/en/connector-v2/sink/Kudu.md
index 9a67831da55..ae08b3afaba 100644
--- a/docs/en/connector-v2/sink/Kudu.md
+++ b/docs/en/connector-v2/sink/Kudu.md
@@ -8,6 +8,11 @@ Write data to Kudu.
The tested kudu version is 1.11.1.
+## Key features
+
+- [ ] [exactly-once](../../concept/connector-v2-features.md)
+- [ ] [schema projection](../../concept/connector-v2-features.md)
+
## Options
| name | type | required | default value |
diff --git a/docs/en/connector-v2/sink/LocalFile.md b/docs/en/connector-v2/sink/LocalFile.md
index 773c625afcf..0df942e3704 100644
--- a/docs/en/connector-v2/sink/LocalFile.md
+++ b/docs/en/connector-v2/sink/LocalFile.md
@@ -4,24 +4,38 @@
## Description
-Output data to local file. Support bounded and unbounded job.
+Output data to local file.
+
+## Key features
+
+- [x] [exactly-once](../../concept/connector-v2-features.md)
+
+By default, we use 2PC commit to ensure `exactly-once`
+
+- [ ] [schema projection](../../concept/connector-v2-features.md)
+- [x] file format
+ - [x] text
+ - [x] csv
+ - [x] parquet
+ - [x] orc
+ - [x] json
## Options
-| name | type | required | default value |
-| --------------------------------- | ------ | -------- | ------------------------------------------------------------- |
-| path | string | yes | - |
-| file_name_expression | string | no | "${transactionId}" |
-| file_format | string | no | "text" |
-| filename_time_format | string | no | "yyyy.MM.dd" |
-| field_delimiter | string | no | '\001' |
-| row_delimiter | string | no | "\n" |
-| partition_by | array | no | - |
-| partition_dir_expression | string | no | "\${k0}=\${v0}\/\${k1}=\${v1}\/...\/\${kn}=\${vn}\/" |
-| is_partition_field_write_in_file | boolean| no | false |
-| sink_columns | array | no | When this parameter is empty, all fields are sink columns |
-| is_enable_transaction | boolean| no | true |
-| save_mode | string | no | "error" |
+| name | type | required | default value |
+| --------------------------------- | ------ | -------- | --------------------------------------------------- |
+| path | string | yes | - |
+| file_name_expression | string | no | "${transactionId}" |
+| file_format | string | no | "text" |
+| filename_time_format | string | no | "yyyy.MM.dd" |
+| field_delimiter | string | no | '\001' |
+| row_delimiter | string | no | "\n" |
+| partition_by | array | no | - |
+| partition_dir_expression | string | no | "${k0}=${v0}/${k1}=${v1}/.../${kn}=${vn}/" |
+| is_partition_field_write_in_file | boolean| no | false |
+| sink_columns | array | no | When this parameter is empty, all fields are sink columns |
+| is_enable_transaction | boolean| no | true |
+| save_mode | string | no | "error" |
### path [string]
diff --git a/docs/en/connector-v2/sink/Neo4j.md b/docs/en/connector-v2/sink/Neo4j.md
index 4ab8017fe03..519212b0109 100644
--- a/docs/en/connector-v2/sink/Neo4j.md
+++ b/docs/en/connector-v2/sink/Neo4j.md
@@ -8,6 +8,11 @@ Write data to Neo4j.
`neo4j-java-driver` version 4.4.9
+## Key features
+
+- [ ] [exactly-once](../../concept/connector-v2-features.md)
+- [ ] [schema projection](../../concept/connector-v2-features.md)
+
## Options
| name | type | required | default value |
diff --git a/docs/en/connector-v2/sink/OssFile.md b/docs/en/connector-v2/sink/OssFile.md
new file mode 100644
index 00000000000..c5a96aae1ed
--- /dev/null
+++ b/docs/en/connector-v2/sink/OssFile.md
@@ -0,0 +1,217 @@
+# OssFile
+
+> Oss file sink connector
+
+## Description
+
+Output data to oss file system.
+
+> Tips: We made some trade-offs in order to support more file types, so we used the HDFS protocol for internal access to OSS and this connector need some hadoop dependencies.
+> It's only support hadoop version **2.9.X+**.
+
+## Key features
+
+- [x] [exactly-once](../../concept/connector-v2-features.md)
+
+By default, we use 2PC commit to ensure `exactly-once`
+
+- [ ] [schema projection](../../concept/connector-v2-features.md)
+- [x] file format
+ - [x] text
+ - [x] csv
+ - [x] parquet
+ - [x] orc
+ - [x] json
+
+## Options
+
+| name | type | required | default value |
+|----------------------------------| ------ |---------|-----------------------------|
+| path | string | yes | - |
+| bucket | string | yes | - |
+| access_key | string | yes | - |
+| access_secret | string | yes | - |
+| endpoint | string | yes | - |
+| file_name_expression | string | no | "${transactionId}" |
+| file_format | string | no | "text" |
+| filename_time_format | string | no | "yyyy.MM.dd" |
+| field_delimiter | string | no | '\001' |
+| row_delimiter | string | no | "\n" |
+| partition_by | array | no | - |
+| partition_dir_expression | string | no | "${k0}=${v0}/${k1}=${v1}/.../${kn}=${vn}/" |
+| is_partition_field_write_in_file | boolean| no | false |
+| sink_columns | array | no | When this parameter is empty, all fields are sink columns |
+| is_enable_transaction | boolean| no | true |
+| save_mode | string | no | "error" |
+
+### path [string]
+
+The target dir path is required.
+
+### bucket [string]
+
+The bucket address of oss file system, for example: `oss://tyrantlucifer-image-bed`
+
+### access_key [string]
+
+The access key of oss file system.
+
+### access_secret [string]
+
+The access secret of oss file system.
+
+### endpoint [string]
+
+The endpoint of oss file system.
+
+### file_name_expression [string]
+
+`file_name_expression` describes the file expression which will be created into the `path`. We can add the variable `${now}` or `${uuid}` in the `file_name_expression`, like `test_${uuid}_${now}`,
+`${now}` represents the current time, and its format can be defined by specifying the option `filename_time_format`.
+
+Please note that, If `is_enable_transaction` is `true`, we will auto add `${transactionId}_` in the head of the file.
+
+### file_format [string]
+
+We supported as the following file types:
+
+`text` `csv` `parquet` `orc` `json`
+
+Please note that, The final file name will end with the file_format's suffix, the suffix of the text file is `txt`.
+
+### filename_time_format [string]
+
+When the format in the `file_name_expression` parameter is `xxxx-${now}` , `filename_time_format` can specify the time format of the path, and the default value is `yyyy.MM.dd` . The commonly used time formats are listed as follows:
+
+| Symbol | Description |
+| ------ | ------------------ |
+| y | Year |
+| M | Month |
+| d | Day of month |
+| H | Hour in day (0-23) |
+| m | Minute in hour |
+| s | Second in minute |
+
+See [Java SimpleDateFormat](https://docs.oracle.com/javase/tutorial/i18n/format/simpleDateFormat.html) for detailed time format syntax.
+
+### field_delimiter [string]
+
+The separator between columns in a row of data. Only needed by `text` and `csv` file format.
+
+### row_delimiter [string]
+
+The separator between rows in a file. Only needed by `text` and `csv` file format.
+
+### partition_by [array]
+
+Partition data based on selected fields
+
+### partition_dir_expression [string]
+
+If the `partition_by` is specified, we will generate the corresponding partition directory based on the partition information, and the final file will be placed in the partition directory.
+
+Default `partition_dir_expression` is `${k0}=${v0}/${k1}=${v1}/.../${kn}=${vn}/`. `k0` is the first partition field and `v0` is the value of the first partition field.
+
+### is_partition_field_write_in_file [boolean]
+
+If `is_partition_field_write_in_file` is `true`, the partition field and the value of it will be written into data file.
+
+For example, if you want to write a Hive Data File, Its value should be `false`.
+
+### sink_columns [array]
+
+Which columns need be written to file, default value is all the columns get from `Transform` or `Source`.
+The order of the fields determines the order in which the file is actually written.
+
+### is_enable_transaction [boolean]
+
+If `is_enable_transaction` is true, we will ensure that data will not be lost or duplicated when it is written to the target directory.
+
+Please note that, If `is_enable_transaction` is `true`, we will auto add `${transactionId}_` in the head of the file.
+
+Only support `true` now.
+
+### save_mode [string]
+
+Storage mode, currently supports `overwrite`. This means we will delete the old file when a new file have a same name with it.
+
+If `is_enable_transaction` is `true`, Basically, we won't encounter the same file name. Because we will add the transaction id to file name.
+
+For the specific meaning of each mode, see [save-modes](https://spark.apache.org/docs/latest/sql-programming-guide.html#save-modes)
+
+## Example
+
+For text file format
+
+```hocon
+
+ OssFile {
+ path="/seatunnel/sink"
+ bucket = "oss://tyrantlucifer-image-bed"
+ access_key = "xxxxxxxxxxx"
+ access_secret = "xxxxxxxxxxx"
+ endpoint = "oss-cn-beijing.aliyuncs.com"
+ field_delimiter="\t"
+ row_delimiter="\n"
+ partition_by=["age"]
+ partition_dir_expression="${k0}=${v0}"
+ is_partition_field_write_in_file=true
+ file_name_expression="${transactionId}_${now}"
+ file_format="text"
+ sink_columns=["name","age"]
+ filename_time_format="yyyy.MM.dd"
+ is_enable_transaction=true
+ save_mode="error"
+ }
+
+```
+
+For parquet file format
+
+```hocon
+
+ OssFile {
+ path="/seatunnel/sink"
+ bucket = "oss://tyrantlucifer-image-bed"
+ access_key = "xxxxxxxxxxx"
+ access_secret = "xxxxxxxxxxxxxxxxx"
+ endpoint = "oss-cn-beijing.aliyuncs.com"
+ field_delimiter="\t"
+ row_delimiter="\n"
+ partition_by=["age"]
+ partition_dir_expression="${k0}=${v0}"
+ is_partition_field_write_in_file=true
+ file_name_expression="${transactionId}_${now}"
+ file_format="parquet"
+ sink_columns=["name","age"]
+ filename_time_format="yyyy.MM.dd"
+ is_enable_transaction=true
+ save_mode="error"
+ }
+
+```
+
+For orc file format
+
+```bash
+
+ OssFile {
+ path="/seatunnel/sink"
+ bucket = "oss://tyrantlucifer-image-bed"
+ access_key = "xxxxxxxxxxx"
+ access_secret = "xxxxxxxxxxx"
+ endpoint = "oss-cn-beijing.aliyuncs.com"
+ field_delimiter="\t"
+ row_delimiter="\n"
+ partition_by=["age"]
+ partition_dir_expression="${k0}=${v0}"
+ is_partition_field_write_in_file=true
+ file_name_expression="${transactionId}_${now}"
+ file_format="orc"
+ sink_columns=["name","age"]
+ filename_time_format="yyyy.MM.dd"
+ is_enable_transaction=true
+ save_mode="error"
+ }
+
+```
diff --git a/docs/en/connector-v2/sink/Phoenix.md b/docs/en/connector-v2/sink/Phoenix.md
index 746c54d319c..f7383daea1f 100644
--- a/docs/en/connector-v2/sink/Phoenix.md
+++ b/docs/en/connector-v2/sink/Phoenix.md
@@ -12,6 +12,11 @@ Two ways of connecting Phoenix with Java JDBC. One is to connect to zookeeper th
> Tips: Not support exactly-once semantics (XA transaction is not yet supported in Phoenix).
+## Key features
+
+- [ ] [exactly-once](../../concept/connector-v2-features.md)
+- [ ] [schema projection](../../concept/connector-v2-features.md)
+
## Options
### driver [string]
diff --git a/docs/en/connector-v2/sink/Socket.md b/docs/en/connector-v2/sink/Socket.md
index 7339f7b0133..498cfa99d12 100644
--- a/docs/en/connector-v2/sink/Socket.md
+++ b/docs/en/connector-v2/sink/Socket.md
@@ -7,6 +7,10 @@
Used to send data to Socket Server. Both support streaming and batch mode.
> For example, if the data from upstream is [`age: 12, name: jared`], the content send to socket server is the following: `{"name":"jared","age":17}`
+## Key features
+
+- [ ] [exactly-once](../../concept/connector-v2-features.md)
+- [ ] [schema projection](../../concept/connector-v2-features.md)
## Options
diff --git a/docs/en/connector-v2/sink/dingtalk.md b/docs/en/connector-v2/sink/dingtalk.md
index 6fe0e2a43d5..e949ae2bcbc 100644
--- a/docs/en/connector-v2/sink/dingtalk.md
+++ b/docs/en/connector-v2/sink/dingtalk.md
@@ -6,6 +6,11 @@
A sink plugin which use DingTalk robot send message
+## Key features
+
+- [ ] [exactly-once](../../concept/connector-v2-features.md)
+- [ ] [schema projection](../../concept/connector-v2-features.md)
+
## Options
| name | type | required | default value |
diff --git a/docs/en/connector-v2/source/Clickhouse.md b/docs/en/connector-v2/source/Clickhouse.md
index 7e761c0eefe..e73c621b2ab 100644
--- a/docs/en/connector-v2/source/Clickhouse.md
+++ b/docs/en/connector-v2/source/Clickhouse.md
@@ -4,7 +4,19 @@
## Description
-Used to read data from Clickhouse. Currently, only supports Batch mode.
+Used to read data from Clickhouse.
+
+## Key features
+
+- [x] [batch](../../concept/connector-v2-features.md)
+- [ ] [stream](../../concept/connector-v2-features.md)
+- [ ] [exactly-once](../../concept/connector-v2-features.md)
+- [x] [schema projection](../../concept/connector-v2-features.md)
+
+supports query SQL and can achieve projection effect.
+
+- [ ] [parallelism](../../concept/connector-v2-features.md)
+- [ ] [support user-defined split](../../concept/connector-v2-features.md)
:::tip
diff --git a/docs/en/connector-v2/source/FakeSource.md b/docs/en/connector-v2/source/FakeSource.md
index 9c4bf4ffd0a..3c66ce679ff 100644
--- a/docs/en/connector-v2/source/FakeSource.md
+++ b/docs/en/connector-v2/source/FakeSource.md
@@ -7,6 +7,15 @@
The FakeSource is a virtual data source, which randomly generates the number of rows according to the data structure of the user-defined schema,
just for testing, such as type conversion and feature testing
+## Key features
+
+- [x] [batch](../../concept/connector-v2-features.md)
+- [x] [stream](../../concept/connector-v2-features.md)
+- [ ] [exactly-once](../../concept/connector-v2-features.md)
+- [x] [schema projection](../../concept/connector-v2-features.md)
+- [ ] [parallelism](../../concept/connector-v2-features.md)
+- [ ] [support user-defined split](../../concept/connector-v2-features.md)
+
## Options
| name | type | required | default value |
diff --git a/docs/en/connector-v2/source/Greenplum.md b/docs/en/connector-v2/source/Greenplum.md
index cd140549b78..fad156c24c1 100644
--- a/docs/en/connector-v2/source/Greenplum.md
+++ b/docs/en/connector-v2/source/Greenplum.md
@@ -6,6 +6,18 @@
Read Greenplum data through [Jdbc connector](Jdbc.md).
+## Key features
+
+- [x] [batch](../../concept/connector-v2-features.md)
+- [ ] [stream](../../concept/connector-v2-features.md)
+- [ ] [exactly-once](../../concept/connector-v2-features.md)
+- [x] [schema projection](../../concept/connector-v2-features.md)
+
+supports query SQL and can achieve projection effect.
+
+- [x] [parallelism](../../concept/connector-v2-features.md)
+- [ ] [support user-defined split](../../concept/connector-v2-features.md)
+
:::tip
Optional jdbc drivers:
diff --git a/docs/en/connector-v2/source/HdfsFile.md b/docs/en/connector-v2/source/HdfsFile.md
index 00bbe5fdd88..e6b3fc90c36 100644
--- a/docs/en/connector-v2/source/HdfsFile.md
+++ b/docs/en/connector-v2/source/HdfsFile.md
@@ -6,6 +6,21 @@
Read data from hdfs file system.
+## Key features
+
+- [x] [batch](../../concept/connector-v2-features.md)
+- [ ] [stream](../../concept/connector-v2-features.md)
+- [ ] [exactly-once](../../concept/connector-v2-features.md)
+- [x] [schema projection](../../concept/connector-v2-features.md)
+- [x] [parallelism](../../concept/connector-v2-features.md)
+- [ ] [support user-defined split](../../concept/connector-v2-features.md)
+- [x] file format
+ - [x] text
+ - [x] csv
+ - [x] parquet
+ - [x] orc
+ - [x] json
+
## Options
| name | type | required | default value |
diff --git a/docs/en/connector-v2/source/Http.md b/docs/en/connector-v2/source/Http.md
index 507cf64f1fb..0fbbc43e1e0 100644
--- a/docs/en/connector-v2/source/Http.md
+++ b/docs/en/connector-v2/source/Http.md
@@ -4,7 +4,16 @@
## Description
-Used to read data from Http. Both support streaming and batch mode.
+Used to read data from Http.
+
+## Key features
+
+- [x] [batch](../../concept/connector-v2-features.md)
+- [x] [stream](../../concept/connector-v2-features.md)
+- [ ] [exactly-once](../../concept/connector-v2-features.md)
+- [x] [schema projection](../../concept/connector-v2-features.md)
+- [ ] [parallelism](../../concept/connector-v2-features.md)
+- [ ] [support user-defined split](../../concept/connector-v2-features.md)
## Options
diff --git a/docs/en/connector-v2/source/Hudi.md b/docs/en/connector-v2/source/Hudi.md
index 2fb9f060451..7eae78720f3 100644
--- a/docs/en/connector-v2/source/Hudi.md
+++ b/docs/en/connector-v2/source/Hudi.md
@@ -8,6 +8,18 @@ Used to read data from Hudi. Currently, only supports hudi cow table and Snapsho
In order to use this connector, You must ensure your spark/flink cluster already integrated hive. The tested hive version is 2.3.9.
+## Key features
+
+- [x] [batch](../../concept/connector-v2-features.md)
+
+Currently, only supports hudi cow table and Snapshot Query with Batch Mode
+
+- [ ] [stream](../../concept/connector-v2-features.md)
+- [x] [exactly-once](../../concept/connector-v2-features.md)
+- [ ] [schema projection](../../concept/connector-v2-features.md)
+- [x] [parallelism](../../concept/connector-v2-features.md)
+- [ ] [support user-defined split](../../concept/connector-v2-features.md)
+
## Options
| name | type | required | default value |
diff --git a/docs/en/connector-v2/source/IoTDB.md b/docs/en/connector-v2/source/IoTDB.md
index cd241e4208d..01a3487a387 100644
--- a/docs/en/connector-v2/source/IoTDB.md
+++ b/docs/en/connector-v2/source/IoTDB.md
@@ -4,7 +4,19 @@
## Description
-Read external data source data through IoTDB. Currently supports Batch mode.
+Read external data source data through IoTDB.
+
+## Key features
+
+- [x] [batch](../../concept/connector-v2-features.md)
+- [ ] [stream](../../concept/connector-v2-features.md)
+- [x] [exactly-once](../../concept/connector-v2-features.md)
+- [x] [schema projection](../../concept/connector-v2-features.md)
+
+supports query SQL and can achieve projection effect.
+
+- [x] [parallelism](../../concept/connector-v2-features.md)
+- [ ] [support user-defined split](../../concept/connector-v2-features.md)
## Options
diff --git a/docs/en/connector-v2/source/Jdbc.md b/docs/en/connector-v2/source/Jdbc.md
index 18c075d2c67..5f1e47ac98e 100644
--- a/docs/en/connector-v2/source/Jdbc.md
+++ b/docs/en/connector-v2/source/Jdbc.md
@@ -4,7 +4,19 @@
## Description
-Read external data source data through JDBC. Currently supports mysql and Postgres databases, and supports Batch mode.
+Read external data source data through JDBC.
+
+## Key features
+
+- [x] [batch](../../concept/connector-v2-features.md)
+- [ ] [stream](../../concept/connector-v2-features.md)
+- [x] [exactly-once](../../concept/connector-v2-features.md)
+- [x] [schema projection](../../concept/connector-v2-features.md)
+
+supports query SQL and can achieve projection effect.
+
+- [x] [parallelism](../../concept/connector-v2-features.md)
+- [ ] [support user-defined split](../../concept/connector-v2-features.md)
## Options
diff --git a/docs/en/connector-v2/source/Kudu.md b/docs/en/connector-v2/source/Kudu.md
index 3cb6cff766a..22ff426236a 100644
--- a/docs/en/connector-v2/source/Kudu.md
+++ b/docs/en/connector-v2/source/Kudu.md
@@ -4,10 +4,19 @@
## Description
-Used to read data from Kudu. Currently, only supports Query with Batch Mode.
+Used to read data from Kudu.
The tested kudu version is 1.11.1.
+## Key features
+
+- [x] [batch](../../concept/connector-v2-features.md)
+- [ ] [stream](../../concept/connector-v2-features.md)
+- [ ] [exactly-once](../../concept/connector-v2-features.md)
+- [ ] [schema projection](../../concept/connector-v2-features.md)
+- [ ] [parallelism](../../concept/connector-v2-features.md)
+- [ ] [support user-defined split](../../concept/connector-v2-features.md)
+
## Options
| name | type | required | default value |
diff --git a/docs/en/connector-v2/source/LocalFile.md b/docs/en/connector-v2/source/LocalFile.md
index e6ac6142f34..1067f2079ea 100644
--- a/docs/en/connector-v2/source/LocalFile.md
+++ b/docs/en/connector-v2/source/LocalFile.md
@@ -6,6 +6,21 @@
Read data from local file system.
+## Key features
+
+- [x] [batch](../../concept/connector-v2-features.md)
+- [ ] [stream](../../concept/connector-v2-features.md)
+- [ ] [exactly-once](../../concept/connector-v2-features.md)
+- [x] [schema projection](../../concept/connector-v2-features.md)
+- [x] [parallelism](../../concept/connector-v2-features.md)
+- [ ] [support user-defined split](../../concept/connector-v2-features.md)
+- [x] file format
+ - [x] text
+ - [x] csv
+ - [x] parquet
+ - [x] orc
+ - [x] json
+
## Options
| name | type | required | default value |
diff --git a/docs/en/connector-v2/source/OssFile.md b/docs/en/connector-v2/source/OssFile.md
index e81914f5432..7e296bed0da 100644
--- a/docs/en/connector-v2/source/OssFile.md
+++ b/docs/en/connector-v2/source/OssFile.md
@@ -9,17 +9,33 @@ Read data from aliyun oss file system.
> Tips: We made some trade-offs in order to support more file types, so we used the HDFS protocol for internal access to OSS and this connector need some hadoop dependencies.
> It's only support hadoop version **2.9.X+**.
+## Key features
+
+- [x] [batch](../../concept/connector-v2-features.md)
+- [ ] [stream](../../concept/connector-v2-features.md)
+- [ ] [exactly-once](../../concept/connector-v2-features.md)
+- [x] [schema projection](../../concept/connector-v2-features.md)
+- [x] file format
+ - [x] text
+ - [x] csv
+ - [x] parquet
+ - [x] orc
+ - [x] json
+
+- [x] [parallelism](../../concept/connector-v2-features.md)
+- [ ] [support user-defined split](../../concept/connector-v2-features.md)
+
## Options
-| name | type | required | default value |
-|--------------|--------|----------|---------------|
-| path | string | yes | - |
-| type | string | yes | - |
-| bucket | string | yes | - |
-| accessKey | string | yes | - |
-| accessSecret | string | yes | - |
-| endpoint | string | yes | - |
-| schema | config | no | - |
+| name | type | required | default value |
+|---------------|--------|----------|---------------|
+| path | string | yes | - |
+| type | string | yes | - |
+| bucket | string | yes | - |
+| access_key | string | yes | - |
+| access_secret | string | yes | - |
+| endpoint | string | yes | - |
+| schema | config | no | - |
### path [string]
@@ -77,11 +93,11 @@ Now connector will treat the upstream data as the following:
The bucket address of oss file system, for example: `oss://tyrantlucifer-image-bed`
-### accessKey [string]
+### access_key [string]
The access key of oss file system.
-### accessSecret [string]
+### access_secret [string]
The access secret of oss file system.
@@ -100,8 +116,8 @@ The schema of upstream data.
OssFile {
path = "/seatunnel/orc"
bucket = "oss://tyrantlucifer-image-bed"
- accessKey = "xxxxxxxxxxxxxxxxx"
- accessSecret = "xxxxxxxxxxxxxxxxxxxxxx"
+ access_key = "xxxxxxxxxxxxxxxxx"
+ access_secret = "xxxxxxxxxxxxxxxxxxxxxx"
endpoint = "oss-cn-beijing.aliyuncs.com"
type = "orc"
}
@@ -113,8 +129,8 @@ The schema of upstream data.
OssFile {
path = "/seatunnel/json"
bucket = "oss://tyrantlucifer-image-bed"
- accessKey = "xxxxxxxxxxxxxxxxx"
- accessSecret = "xxxxxxxxxxxxxxxxxxxxxx"
+ access_key = "xxxxxxxxxxxxxxxxx"
+ access_secret = "xxxxxxxxxxxxxxxxxxxxxx"
endpoint = "oss-cn-beijing.aliyuncs.com"
type = "json"
schema {
diff --git a/docs/en/connector-v2/source/Phoenix.md b/docs/en/connector-v2/source/Phoenix.md
index 9d68f70ce21..a82196ea397 100644
--- a/docs/en/connector-v2/source/Phoenix.md
+++ b/docs/en/connector-v2/source/Phoenix.md
@@ -10,6 +10,18 @@ Two ways of connecting Phoenix with Java JDBC. One is to connect to zookeeper th
> Tips: By default, the (thin) driver jar is used. If you want to use the (thick) driver or other versions of Phoenix (thin) driver, you need to recompile the jdbc connector module
+## Key features
+
+- [x] [batch](../../concept/connector-v2-features.md)
+- [x] [stream](../../concept/connector-v2-features.md)
+- [ ] [exactly-once](../../concept/connector-v2-features.md)
+- [x] [schema projection](../../concept/connector-v2-features.md)
+
+supports query SQL and can achieve projection effect.
+
+- [ ] [parallelism](../../concept/connector-v2-features.md)
+- [ ] [support user-defined split](../../concept/connector-v2-features.md)
+
## Options
### driver [string]
diff --git a/docs/en/connector-v2/source/Redis.md b/docs/en/connector-v2/source/Redis.md
new file mode 100644
index 00000000000..dfb1b434039
--- /dev/null
+++ b/docs/en/connector-v2/source/Redis.md
@@ -0,0 +1,158 @@
+# Redis
+
+> Redis source connector
+
+## Description
+
+Used to read data from Redis.
+
+## Key features
+
+- [x] [batch](../../concept/connector-v2-features.md)
+- [ ] [stream](../../concept/connector-v2-features.md)
+- [ ] [exactly-once](../../concept/connector-v2-features.md)
+- [x] [schema projection](../../concept/connector-v2-features.md)
+- [ ] [parallelism](../../concept/connector-v2-features.md)
+- [ ] [support user-defined split](../../concept/connector-v2-features.md)
+
+## Options
+
+| name | type | required | default value |
+|-----------|--------|----------|---------------|
+| host | string | yes | - |
+| port | int | yes | - |
+| keys | string | yes | - |
+| data_type | string | yes | - |
+| auth | string | No | - |
+| schema | config | No | - |
+| format | string | No | json |
+
+### host [string]
+
+redis host
+
+### port [int]
+
+redis port
+
+### keys [string]
+
+keys pattern
+
+**Tips:Redis source connector support fuzzy key matching, user needs to ensure that the matched keys are the same type**
+
+### data_type [string]
+
+redis data types, support `key` `hash` `list` `set` `zset`
+
+- key
+> The value of each key will be sent downstream as a single row of data.
+> For example, the value of key is `SeaTunnel test message`, the data received downstream is `SeaTunnel test message` and only one message will be received.
+
+
+- hash
+> The hash key-value pairs will be formatted as json to be sent downstream as a single row of data.
+> For example, the value of hash is `name:tyrantlucifer age:26`, the data received downstream is `{"name":"tyrantlucifer", "age":"26"}` and only one message will be received.
+
+- list
+> Each element in the list will be sent downstream as a single row of data.
+> For example, the value of list is `[tyrantlucier, CalvinKirs]`, the data received downstream are `tyrantlucifer` and `CalvinKirs` and only two message will be received.
+
+- set
+> Each element in the set will be sent downstream as a single row of data
+> For example, the value of set is `[tyrantlucier, CalvinKirs]`, the data received downstream are `tyrantlucifer` and `CalvinKirs` and only two message will be received.
+
+- zset
+> Each element in the sorted set will be sent downstream as a single row of data
+> For example, the value of sorted set is `[tyrantlucier, CalvinKirs]`, the data received downstream are `tyrantlucifer` and `CalvinKirs` and only two message will be received.
+
+### auth [String]
+
+redis authentication password, you need it when you connect to an encrypted cluster
+
+### format [String]
+
+the format of upstream data, now only support `json` `text`, default `json`.
+
+when you assign format is `json`, you should also assign schema option, for example:
+
+upstream data is the following:
+
+```json
+
+{"code": 200, "data": "get success", "success": true}
+
+```
+
+you should assign schema as the following:
+
+```hocon
+
+schema {
+ fields {
+ code = int
+ data = string
+ success = boolean
+ }
+}
+
+```
+
+connector will generate data as the following:
+
+| code | data | success |
+|------|-------------|---------|
+| 200 | get success | true |
+
+when you assign format is `text`, connector will do nothing for upstream data, for example:
+
+upstream data is the following:
+
+```json
+
+{"code": 200, "data": "get success", "success": true}
+
+```
+
+connector will generate data as the following:
+
+| content |
+|---------|
+| {"code": 200, "data": "get success", "success": true} |
+
+### schema [Config]
+
+#### fields [Config]
+
+the schema fields of upstream data
+
+## Example
+
+simple:
+
+```hocon
+ Redis {
+ host = localhost
+ port = 6379
+ keys = "key_test*"
+ data_type = key
+ format = text
+ }
+```
+
+```hocon
+ Redis {
+ host = localhost
+ port = 6379
+ keys = "key_test*"
+ data_type = key
+ format = json
+ schema {
+ fields {
+ name = string
+ age = int
+ }
+ }
+ }
+```
+
diff --git a/docs/en/connector-v2/source/Socket.md b/docs/en/connector-v2/source/Socket.md
index b9b0a25406e..84a2b487ead 100644
--- a/docs/en/connector-v2/source/Socket.md
+++ b/docs/en/connector-v2/source/Socket.md
@@ -4,7 +4,16 @@
## Description
-Used to read data from Socket. Both support streaming and batch mode.
+Used to read data from Socket.
+
+## Key features
+
+- [x] [batch](../../concept/connector-v2-features.md)
+- [x] [stream](../../concept/connector-v2-features.md)
+- [ ] [exactly-once](../../concept/connector-v2-features.md)
+- [ ] [schema projection](../../concept/connector-v2-features.md)
+- [ ] [parallelism](../../concept/connector-v2-features.md)
+- [ ] [support user-defined split](../../concept/connector-v2-features.md)
## Options
diff --git a/docs/en/connector-v2/source/pulsar.md b/docs/en/connector-v2/source/pulsar.md
index b028dd36109..02c42a26045 100644
--- a/docs/en/connector-v2/source/pulsar.md
+++ b/docs/en/connector-v2/source/pulsar.md
@@ -4,7 +4,16 @@
## Description
-Source connector for Apache Pulsar. It can support both off-line and real-time jobs.
+Source connector for Apache Pulsar.
+
+## Key features
+
+- [x] [batch](../../concept/connector-v2-features.md)
+- [x] [stream](../../concept/connector-v2-features.md)
+- [x] [exactly-once](../../concept/connector-v2-features.md)
+- [x] [schema projection](../../concept/connector-v2-features.md)
+- [x] [parallelism](../../concept/connector-v2-features.md)
+- [ ] [support user-defined split](../../concept/connector-v2-features.md)
## Options
diff --git a/docs/en/connector/flink-sql/ElasticSearch.md b/docs/en/connector/flink-sql/ElasticSearch.md
index b41d2bd43d1..317c638ad06 100644
--- a/docs/en/connector/flink-sql/ElasticSearch.md
+++ b/docs/en/connector/flink-sql/ElasticSearch.md
@@ -9,8 +9,8 @@ With elasticsearch connector, you can use the Flink SQL to write data into Elast
## Usage
Let us have a brief example to show how to use the connector.
-### 1. kafka prepare
-Please refer to the [Eleastic Doc](https://www.elastic.co/guide/index.html) to prepare elastic environment.
+### 1. Elastic prepare
+Please refer to the [Elastic Doc](https://www.elastic.co/guide/index.html) to prepare elastic environment.
### 2. prepare seatunnel configuration
ElasticSearch provide different connectors for different version:
diff --git a/plugin-mapping.properties b/plugin-mapping.properties
index 97885524943..0107a87df3b 100644
--- a/plugin-mapping.properties
+++ b/plugin-mapping.properties
@@ -112,6 +112,7 @@ seatunnel.sink.HdfsFile = connector-file-hadoop
seatunnel.source.LocalFile = connector-file-local
seatunnel.sink.LocalFile = connector-file-local
seatunnel.source.OssFile = connector-file-oss
+seatunnel.sink.OssFile = connector-file-oss
seatunnel.source.Pulsar = connector-pulsar
seatunnel.source.Hudi = connector-hudi
seatunnel.sink.DingTalk = connector-dingtalk
@@ -121,5 +122,6 @@ seatunnel.sink.IoTDB = connector-iotdb
seatunnel.sink.Neo4j = connector-neo4j
seatunnel.sink.FtpFile = connector-file-ftp
seatunnel.sink.Socket = connector-socket
+seatunnel.source.Redis = connector-redis
seatunnel.sink.DataHub = connector-datahub
seatunnel.sink.Sentry = connector-sentry
diff --git a/pom.xml b/pom.xml
index 6982068dec6..0c4695e5240 100644
--- a/pom.xml
+++ b/pom.xml
@@ -109,6 +109,7 @@
+
2.1.3-SNAPSHOT2.1.1UTF-8
@@ -117,23 +118,16 @@
2.11${java.version}${java.version}
- 2.4.0
- 2.4
- 4.1.00.13.1
- 1.13.6
- 0.11.1
- 1.5.6
- 1.8.2
- 2.3.9
+ 1.13.6
+ 2.4.0
+ 2.41.21.9.41.41.71.8.1
- 4.3.01.9.13
- 3.1.61.192.12.7
@@ -153,51 +147,27 @@
2.9.12.8.23.3.1
- 2.0.2
+ 3.10.13.8
- 1.8.0
- 7.0.2
- 5.0.0-HBase-2.0
- 0.36.3.17.5.12.7.5-7.0
- 1.10.0
- 6.8.3
- 0.2
- 1.0.0
- 1.7.0
- 2.2.0
- 2.6.03.4
- 3.6
- 1.11.1
- 1.5.64.43.3.0providedprovided
- 2.9.21.13
- 4.5.13
- 4.4.4
- 4.4.4
- 4.1.43.0.0apache${project.version}1.815.9.0
- 2.4.1
- 0.22.1
- 2.7.0
- 1.29.01.3.3
- 3.2.4
+ 3.3.03.2.0
- 1.2.7
+ 1.3.01.20
- 2.222.17.11.0.00.38.0
@@ -214,7 +184,6 @@
2.6.53.0.0org.apache.seatunnel.shade
- 4.3.01.1.8.32.6.85.3.20
@@ -225,13 +194,9 @@
6.2.2.Final1.14.31.3.2
- 7.5.13.10.0
- 0.13.1
- 5.2.5-HBase-2.x4.2.0
- 4.4.9
- 2.19.0-public
+ 0.10.7
@@ -241,109 +206,6 @@
seatunnel-config-shade${seatunnel.config.shade.version}
-
-
- org.apache.spark
- spark-streaming_${scala.binary.version}
- ${spark.version}
- ${spark.scope}
-
-
-
- org.apache.spark
- spark-core_${scala.binary.version}
- ${spark.version}
- ${spark.scope}
-
-
-
- org.apache.spark
- spark-sql_${scala.binary.version}
- ${spark.version}
- ${spark.scope}
-
-
-
- org.apache.spark
- spark-hive_${scala.binary.version}
- ${spark.version}
- ${spark.scope}
-
-
- net.jpountz.lz4
- lz4
- 1.3.0
-
-
-
- org.apache.kudu
- kudu-client
- ${kudu.version}
-
-
-
- com.sun.mail
- javax.mail
- ${email.version}
-
-
-
- org.apache.flink
- flink-java
- ${flink.version}
- ${flink.scope}
-
-
- org.apache.flink
- flink-table-planner_${scala.binary.version}
- ${flink.version}
- ${flink.scope}
-
-
- org.apache.flink
- flink-table-planner-blink_${scala.binary.version}
- ${flink.version}
- ${flink.scope}
-
-
-
- org.apache.flink
- flink-streaming-java_${scala.binary.version}
- ${flink.version}
- ${flink.scope}
-
-
-
- org.apache.flink
- flink-table-common
- ${flink.version}
- ${flink.scope}
-
-
-
- org.apache.flink
- flink-table-api-java-bridge_${scala.binary.version}
- ${flink.version}
- ${flink.scope}
-
-
- org.apache.flink
- flink-optimizer_${scala.binary.version}
- ${flink.version}
- ${flink.scope}
-
-
- org.apache.flink
- flink-clients_${scala.binary.version}
- ${flink.version}
- ${flink.scope}
-
-
- org.apache.flink
- flink-runtime-web_${scala.binary.version}
- ${flink.version}
- ${flink.scope}
-
@@ -359,48 +221,12 @@
${postgresql.version}
-
- com.norbitltd
- spoiwo_${scala.binary.version}
- ${spoiwo.version}
-
-
commons-codeccommons-codec${codec.version}
-
- com.typesafe.play
- play-mailer_${scala.binary.version}
- ${play-mailer.version}
-
-
-
- org.apache.phoenix
- phoenix-spark
- ${phoenix-spark.version}
-
-
- org.glassfish.web
- javax.servlet.jsp
-
-
-
-
-
- org.mongodb.spark
- mongo-spark-connector_${scala.binary.version}
- ${mongo-spark.version}
-
-
-
- com.101tec
- zkclient
- ${zkclient.version}
-
-
org.apache.flinkflink-shaded-hadoop-2
@@ -413,82 +239,6 @@
-
- org.apache.parquet
- parquet-avro
- ${parquet-avro.version}
-
-
-
- org.elasticsearch
- elasticsearch-spark-20_${scala.binary.version}
- ${elasticsearch-spark.version}
-
-
-
- org.apache.flink
- flink-connector-elasticsearch${elasticsearch}_${scala.binary.version}
- ${flink.version}
-
-
-
- org.apache.flink
- flink-connector-elasticsearch6_${scala.binary.version}
- ${flink.version}
-
-
-
- org.apache.flink
- flink-connector-jdbc_${scala.binary.version}
- ${flink.version}
-
-
-
- org.apache.flink
- flink-connector-kafka_${scala.binary.version}
- ${flink.version}
-
-
-
- ru.yandex.clickhouse
- clickhouse-jdbc
- ${clickhouse-jdbc.version}
-
-
-
- org.apache.hbase.connectors.spark
- hbase-spark
- ${hbase-spark.version}
-
-
- org.glassfish.web
- javax.servlet.jsp
-
-
- junit
- junit
-
-
-
-
-
- org.apache.kudu
- kudu-spark2_${scala.binary.version}
- ${kudu-spark.version}
-
-
-
- com.redislabs
- spark-redis_${scala.binary.version}
- ${spark-redis.version}
-
-
-
- org.apache.spark
- spark-streaming-kafka-0-10_${scala.binary.version}
- ${spark.version}
-
-
org.projectlomboklombok
@@ -506,92 +256,13 @@
commons-collections4${commons-collections4.version}
-
-
- org.apache.flink
- flink-csv
- ${flink.version}
-
-
- org.apache.flink
- flink-orc_${scala.binary.version}
- ${flink.version}
-
-
- org.apache.flink
- flink-parquet_${scala.binary.version}
- ${flink.version}
-
-
- org.apache.flink
- flink-json
- ${flink.version}
-
-
- org.apache.flink
- flink-avro
- ${flink.version}
-
-
-
- org.apache.flink
- flink-statebackend-rocksdb_${scala.binary.version}
- ${flink.version}
-
-
-
- org.apache.hadoop
- hadoop-aliyun
- ${hadoop-aliyun.version}
-
-
-
- org.apache.hudi
- hudi-hadoop-mr-bundle
- ${hudi.version}
-
-
-
- org.apache.hudi
- hudi-spark-bundle_${scala.binary.version}
- ${hudi.version}
-
-
-
- org.apache.httpcomponents
- httpclient
- ${httpclient.version}
-
-
- org.apache.httpcomponents
- httpcore
- ${httpcore.version}
-
-
-
- org.apache.httpcomponents
- httpcore-nio
- ${httpcore-nio.version}
-
-
-
- org.apache.httpcomponents
- httpasyncclient
- ${httpasyncclient.version}
-
-
+
com.beustjcommander${jcommander.version}
-
- org.apache.sshd
- sshd-scp
- ${sshd.version}
-
-
org.junit.jupiterjunit-jupiter-engine
@@ -624,12 +295,6 @@
${jackson.version}
-
- com.fasterxml.jackson.module
- jackson-module-scala_${scala.binary.version}
- ${jackson.version}
-
-
org.testcontainerstestcontainers
@@ -646,57 +311,17 @@
-
-
- com.pingcap.tispark
- tispark-assembly
- ${tispark.version}
- org.apache.logging.log4jlog4j-core${log4j-core.version}
-
- org.apache.druid
- druid-indexing-service
- ${druid.version}
-
-
- org.apache.calcite
- calcite-druid
- ${calcite-druid.version}
- com.typesafeconfig${config.version}
-
- org.neo4j
- neo4j-connector-apache-spark_${scala.binary.version}
- ${neo4j.connector.spark.version}_for_spark_${spark.binary.version}
-
-
-
- org.influxdb
- influxdb-java
- ${influxdb-java.version}
-
-
-
- org.apache.iceberg
- iceberg-core
- ${iceberg.version}
-
-
- org.apache.iceberg
- iceberg-spark-runtime
- ${iceberg.version}
- provided
-
-
org.scala-langscala-library
@@ -708,12 +333,6 @@
guava${guava.version}
-
-
- com.github.jsonzou
- jmockdata
- ${jmockdata.version}
- org.slf4jslf4j-api
@@ -725,172 +344,12 @@
slf4j-log4j12${slf4j.version}
-
-
- org.xerial.snappy
- snappy-java
- ${snappy-java.version}
-
-
-
- org.apache.orc
- orc-core
- ${orc.version}
-
-
- javax.servlet
- servlet-api
-
-
-
- org.apache.logging.log4j
- *
-
-
- com.fasterxml.jackson.core
- *
-
-
- org.apapche.hadoop
- *
-
-
- org.apache.curator
- *
-
-
-
-
- org.codehaus.jackson
- jackson-core-asl
- ${codehaus.jackson.version}
-
-
- org.codehaus.jackson
- jackson-xc
- ${codehaus.jackson.version}
-
-
- org.codehaus.jackson
- jackson-mapper-asl
- ${codehaus.jackson.version}
-
-
- org.codehaus.jackson
- jackson-jaxrs
- ${codehaus.jackson.version}
-
-
- com.sun.jersey
- jersey-json
- ${jersey.version}
-
-
- org.apache.hive
- hive-exec
- ${hive.exec.version}
-
-
- org.pentaho
- pentaho-aggdesigner-algorithm
-
-
- javax.servlet
- servlet-api
-
-
- org.apache.logging.log4j
- log4j-1.2-api
-
-
- org.apache.logging.log4j
- log4j-web
-
-
- com.fasterxml.jackson.core
- *
-
-
- org.apapche.hadoop
- *
-
-
- com.github.joshelser
- dropwizard-metrics-hadoop-metrics2-reporter
-
-
- org.apache.logging.log4j
- *
-
-
- org.apache.zookeeper
- zookeeper
-
-
- org.apache.hadoop
- hadoop-yarn-server-resourcemanager
-
-
- org.apache.hadoop
- hadoop-hdfs
-
-
-
+
commons-loggingcommons-logging${commons.logging.version}
-
- commons-beanutils
- commons-beanutils
- ${commons.beanutils.version}
-
-
- commons-cli
- commons-cli
- ${commons.cli.version}
-
-
- commons-configuration
- commons-configuration
- ${commons.configuration.version}
-
-
- commons-digester
- commons-digester
- ${commons.digester.version}
-
-
- org.apache.curator
- curator-client
- ${curator.version}
-
-
- org.apache.curator
- curator-framework
- ${curator.version}
-
-
- org.apache.curator
- curator-recipes
- ${curator.version}
-
-
- com.sun.jersey
- jersey-core
- ${jersey.version}
-
-
- com.sun.jersey
- jersey-server
- ${jersey.version}
-
-
- javax.servlet.jsp
- jsp-api
- ${javax.servlet.jap.version}
-
@@ -953,24 +412,6 @@
checker-qual${checker.qual.version}
-
-
- org.apache.iotdb
- iotdb-session
- ${iotdb.version}
-
-
- ch.qos.logback
- logback-classic
-
-
-
-
-
- com.aliyun.phoenix
- ali-phoenix-shaded-thin-client
- ${phoenix.version}
- org.awaitility
@@ -980,15 +421,21 @@
- commons-net
- commons-net
- ${commons-net.version}
+ io.jsonwebtoken
+ jjwt-api
+ ${jwt.version}
+
+
+ io.jsonwebtoken
+ jjwt-impl
+ ${jwt.version}
+ runtime
-
- com.aliyun.datahub
- aliyun-sdk-datahub
- ${datahub.version}
+ io.jsonwebtoken
+ jjwt-jackson
+ ${jwt.version}
+ runtime
@@ -1134,7 +581,10 @@
maven-shade-plugin${maven-shade-plugin.version}
- false
+ false
+ true
+
+ true*:*
@@ -1302,37 +752,36 @@
+
+
+ org.codehaus.mojo
+ flatten-maven-plugin
+ ${flatten-maven-plugin.version}
+
+ true
+ resolveCiFriendliesOnly
+
+
+
+ flatten
+ process-resources
+
+ flatten
+
+
+
+ flatten.clean
+ clean
+
+ clean
+
+
+
+
-
-
- org.codehaus.mojo
- flatten-maven-plugin
- ${flatten-maven-plugin.version}
-
-
- resolveCiFriendliesOnly
-
-
-
- flatten
- process-resources
-
- flatten
-
-
-
- flatten.clean
- clean
-
- clean
-
-
-
-
-
org.apache.maven.pluginsmaven-compiler-plugin
@@ -1367,6 +816,12 @@
maven-shade-plugin
+
+
+ org.codehaus.mojo
+ flatten-maven-plugin
+
+
org.apache.maven.pluginsmaven-surefire-plugin
diff --git a/seatunnel-apis/seatunnel-api-flink/pom.xml b/seatunnel-apis/seatunnel-api-flink/pom.xml
index 0668fb2c656..2e0d90a74f2 100644
--- a/seatunnel-apis/seatunnel-api-flink/pom.xml
+++ b/seatunnel-apis/seatunnel-api-flink/pom.xml
@@ -29,6 +29,10 @@
4.0.0seatunnel-api-flink
+
+
+ 1.8.2
+
@@ -46,51 +50,67 @@
org.apache.flinkflink-java
+ ${flink.1.13.6.version}
+ providedorg.apache.flinkflink-table-planner_${scala.binary.version}
+ ${flink.1.13.6.version}
+ providedorg.apache.flinkflink-table-planner-blink_${scala.binary.version}
+ ${flink.1.13.6.version}
+ providedorg.apache.flinkflink-streaming-java_${scala.binary.version}
+ ${flink.1.13.6.version}
+ providedorg.apache.flinkflink-optimizer_${scala.binary.version}
- ${flink.version}
+ ${flink.1.13.6.version}
+ providedorg.apache.flinkflink-statebackend-rocksdb_${scala.binary.version}
+ ${flink.1.13.6.version}
+ providedorg.apache.flinkflink-csv
+ ${flink.1.13.6.version}org.apache.flinkflink-orc_${scala.binary.version}
+ ${flink.1.13.6.version}org.apache.flinkflink-parquet_${scala.binary.version}
+ ${flink.1.13.6.version}org.apache.flinkflink-json
+ ${flink.1.13.6.version}org.apache.flinkflink-avro
+ ${flink.1.13.6.version}avro
diff --git a/seatunnel-apis/seatunnel-api-spark/pom.xml b/seatunnel-apis/seatunnel-api-spark/pom.xml
index 8f4e7ca0ac7..020635abaab 100644
--- a/seatunnel-apis/seatunnel-api-spark/pom.xml
+++ b/seatunnel-apis/seatunnel-api-spark/pom.xml
@@ -29,6 +29,10 @@
4.0.0seatunnel-api-spark
+
+
+ 1.3.0
+
@@ -45,18 +49,25 @@
org.apache.sparkspark-streaming_${scala.binary.version}
+ ${spark.2.4.0.version}
+ ${spark.scope}org.apache.sparkspark-core_${scala.binary.version}
+ ${spark.2.4.0.version}
+ ${spark.scope}org.apache.sparkspark-sql_${scala.binary.version}
+ ${spark.2.4.0.version}
+ ${spark.scope}net.jpountz.lz4lz4
+ ${lz4.version}
diff --git a/seatunnel-apis/seatunnel-api-spark/src/main/scala/org/apache/seatunnel/spark/stream/SparkStreamingExecution.scala b/seatunnel-apis/seatunnel-api-spark/src/main/scala/org/apache/seatunnel/spark/stream/SparkStreamingExecution.scala
index 3ad85d3c83b..afb62667fdd 100644
--- a/seatunnel-apis/seatunnel-api-spark/src/main/scala/org/apache/seatunnel/spark/stream/SparkStreamingExecution.scala
+++ b/seatunnel-apis/seatunnel-api-spark/src/main/scala/org/apache/seatunnel/spark/stream/SparkStreamingExecution.scala
@@ -48,9 +48,12 @@ class SparkStreamingExecution(sparkEnvironment: SparkEnvironment)
dataset)
}
var ds = dataset
- for (tf <- transforms) {
- ds = SparkEnvironment.transformProcess(sparkEnvironment, tf, ds)
- SparkEnvironment.registerTransformTempView(tf, ds)
+
+ if (ds.take(1).length > 0) {
+ for (tf <- transforms) {
+ ds = SparkEnvironment.transformProcess(sparkEnvironment, tf, ds)
+ SparkEnvironment.registerTransformTempView(tf, ds)
+ }
}
source.beforeOutput()
diff --git a/seatunnel-connectors-v2-dist/pom.xml b/seatunnel-connectors-v2-dist/pom.xml
index a9a5825a271..baf154db151 100644
--- a/seatunnel-connectors-v2-dist/pom.xml
+++ b/seatunnel-connectors-v2-dist/pom.xml
@@ -146,6 +146,11 @@
connector-neo4j${project.version}
+
+ org.apache.seatunnel
+ connector-redis
+ ${project.version}
+ org.apache.seatunnelconnector-datahub
diff --git a/seatunnel-connectors-v2/connector-clickhouse/pom.xml b/seatunnel-connectors-v2/connector-clickhouse/pom.xml
index bcad4bb70e6..aee8ffe4091 100644
--- a/seatunnel-connectors-v2/connector-clickhouse/pom.xml
+++ b/seatunnel-connectors-v2/connector-clickhouse/pom.xml
@@ -27,18 +27,15 @@
4.0.0connector-clickhouse
-
+
+ 0.3.2-patch9
+ 2.7.0
+
-
-
- org.apache.seatunnel
- seatunnel-api
- ${project.version}
-
-
org.apache.sshdsshd-scp
+ ${sshd.scp.version}
@@ -50,7 +47,7 @@
com.clickhouseclickhouse-http-client
- 0.3.2-patch9
+ ${clickhouse.version}
@@ -62,7 +59,7 @@
com.clickhouseclickhouse-jdbc
- 0.3.2-patch9
+ ${clickhouse.version}
diff --git a/seatunnel-connectors-v2/connector-common/pom.xml b/seatunnel-connectors-v2/connector-common/pom.xml
index a6ef0714170..4ed052e493f 100644
--- a/seatunnel-connectors-v2/connector-common/pom.xml
+++ b/seatunnel-connectors-v2/connector-common/pom.xml
@@ -28,15 +28,4 @@
4.0.0connector-common
-
-
-
-
-
-
- org.apache.seatunnel
- seatunnel-api
- ${project.version}
-
-
\ No newline at end of file
diff --git a/seatunnel-connectors-v2/connector-datahub/pom.xml b/seatunnel-connectors-v2/connector-datahub/pom.xml
index f928b216c32..bda415cc551 100644
--- a/seatunnel-connectors-v2/connector-datahub/pom.xml
+++ b/seatunnel-connectors-v2/connector-datahub/pom.xml
@@ -26,6 +26,10 @@
4.0.0connector-datahub
+
+
+ 2.19.0-public
+
@@ -37,6 +41,7 @@
com.aliyun.datahubaliyun-sdk-datahub
+ ${datahub.version}
diff --git a/seatunnel-connectors-v2/connector-dingtalk/pom.xml b/seatunnel-connectors-v2/connector-dingtalk/pom.xml
index f15aa9aeeb9..31faf57abbe 100644
--- a/seatunnel-connectors-v2/connector-dingtalk/pom.xml
+++ b/seatunnel-connectors-v2/connector-dingtalk/pom.xml
@@ -24,6 +24,9 @@
${revision}4.0.0
+
+ 2.0.0
+ connector-dingtalk
@@ -37,7 +40,7 @@
com.aliyunalibaba-dingtalk-service-sdk
- 2.0.0
+ ${dingtalk.service.version}
diff --git a/seatunnel-connectors-v2/connector-elasticsearch/pom.xml b/seatunnel-connectors-v2/connector-elasticsearch/pom.xml
index 7a6b6c8dc3b..23a1a9df626 100644
--- a/seatunnel-connectors-v2/connector-elasticsearch/pom.xml
+++ b/seatunnel-connectors-v2/connector-elasticsearch/pom.xml
@@ -28,13 +28,13 @@
4.0.0connector-elasticsearch
+
+
+ 7.5.1
+ 2.12.6
+
-
- org.apache.seatunnel
- seatunnel-api
- ${project.version}
- org.elasticsearch.clientelasticsearch-rest-client
@@ -43,6 +43,7 @@
com.fasterxml.jackson.corejackson-databind
+ ${jackson.databind.version}
diff --git a/seatunnel-connectors-v2/connector-email/pom.xml b/seatunnel-connectors-v2/connector-email/pom.xml
index 80414be3edb..4e1cb5b1755 100644
--- a/seatunnel-connectors-v2/connector-email/pom.xml
+++ b/seatunnel-connectors-v2/connector-email/pom.xml
@@ -28,6 +28,10 @@
4.0.0connector-email
+
+
+ 1.5.6
+
@@ -38,11 +42,7 @@
com.sun.mailjavax.mail
-
-
- org.apache.seatunnel
- seatunnel-api
- ${project.version}
+ ${email.version}
diff --git a/seatunnel-connectors-v2/connector-file/connector-file-base/pom.xml b/seatunnel-connectors-v2/connector-file/connector-file-base/pom.xml
index 7927451c994..e2c4f2f6cdc 100644
--- a/seatunnel-connectors-v2/connector-file/connector-file-base/pom.xml
+++ b/seatunnel-connectors-v2/connector-file/connector-file-base/pom.xml
@@ -28,6 +28,26 @@
4.0.0connector-file-base
+
+
+ 3.6
+ 1.5.6
+ 4.4
+ 3.4
+ 2.7.5-7.0
+ 1.10.0
+
+
+
+
+
+ org.apache.flink
+ flink-shaded-hadoop-2
+ ${flink.hadoop.version}
+ provided
+
+
+
@@ -59,16 +79,19 @@
org.apache.parquetparquet-avro
+ ${parquet-avro.version}commons-netcommons-net
+ ${commons-net.version}org.apache.orcorc-core
+ ${orc.version}hadoop-common
@@ -80,17 +103,18 @@
org.apache.commonscommons-collections4
+ ${commons.collecton4.version}org.apache.commonscommons-lang3
+ ${commons.lang3.version}org.apache.flinkflink-shaded-hadoop-2
- provided
diff --git a/seatunnel-connectors-v2/connector-file/connector-file-base/src/main/java/org/apache/seatunnel/connectors/seatunnel/file/sink/BaseFileSink.java b/seatunnel-connectors-v2/connector-file/connector-file-base/src/main/java/org/apache/seatunnel/connectors/seatunnel/file/sink/BaseFileSink.java
index 0faa8d77e9c..e5bc67ffa57 100644
--- a/seatunnel-connectors-v2/connector-file/connector-file-base/src/main/java/org/apache/seatunnel/connectors/seatunnel/file/sink/BaseFileSink.java
+++ b/seatunnel-connectors-v2/connector-file/connector-file-base/src/main/java/org/apache/seatunnel/connectors/seatunnel/file/sink/BaseFileSink.java
@@ -23,7 +23,6 @@
import org.apache.seatunnel.api.serialization.Serializer;
import org.apache.seatunnel.api.sink.SeaTunnelSink;
import org.apache.seatunnel.api.sink.SinkAggregatedCommitter;
-import org.apache.seatunnel.api.sink.SinkCommitter;
import org.apache.seatunnel.api.sink.SinkWriter;
import org.apache.seatunnel.api.table.type.SeaTunnelDataType;
import org.apache.seatunnel.api.table.type.SeaTunnelRow;
@@ -32,7 +31,6 @@
import org.apache.seatunnel.connectors.seatunnel.file.sink.commit.FileAggregatedCommitInfo2;
import org.apache.seatunnel.connectors.seatunnel.file.sink.commit.FileCommitInfo2;
import org.apache.seatunnel.connectors.seatunnel.file.sink.commit.FileSinkAggregatedCommitter2;
-import org.apache.seatunnel.connectors.seatunnel.file.sink.commit.FileSinkCommitter2;
import org.apache.seatunnel.connectors.seatunnel.file.sink.config.TextFileSinkConfig;
import org.apache.seatunnel.connectors.seatunnel.file.sink.state.FileSinkState2;
import org.apache.seatunnel.connectors.seatunnel.file.sink.writer.WriteStrategy;
@@ -77,11 +75,6 @@ public SinkWriter restoreWriter(S
return new BaseFileSinkWriter(writeStrategy, hadoopConf, context, jobId, states);
}
- @Override
- public Optional> createCommitter() throws IOException {
- return Optional.of(new FileSinkCommitter2());
- }
-
@Override
public Optional> createAggregatedCommitter() throws IOException {
return Optional.of(new FileSinkAggregatedCommitter2());
diff --git a/seatunnel-connectors-v2/connector-file/connector-file-hadoop/pom.xml b/seatunnel-connectors-v2/connector-file/connector-file-hadoop/pom.xml
index 788265a7976..330d7c610c8 100644
--- a/seatunnel-connectors-v2/connector-file/connector-file-hadoop/pom.xml
+++ b/seatunnel-connectors-v2/connector-file/connector-file-hadoop/pom.xml
@@ -38,7 +38,6 @@
org.apache.flinkflink-shaded-hadoop-2
- provided
\ No newline at end of file
diff --git a/seatunnel-connectors-v2/connector-file/connector-file-local/pom.xml b/seatunnel-connectors-v2/connector-file/connector-file-local/pom.xml
index 3d71d062d16..58b9d01d31a 100644
--- a/seatunnel-connectors-v2/connector-file/connector-file-local/pom.xml
+++ b/seatunnel-connectors-v2/connector-file/connector-file-local/pom.xml
@@ -38,7 +38,6 @@
org.apache.flinkflink-shaded-hadoop-2
- provided
\ No newline at end of file
diff --git a/seatunnel-connectors-v2/connector-file/connector-file-oss/pom.xml b/seatunnel-connectors-v2/connector-file/connector-file-oss/pom.xml
index f3fb964d142..0a0ae910f50 100644
--- a/seatunnel-connectors-v2/connector-file/connector-file-oss/pom.xml
+++ b/seatunnel-connectors-v2/connector-file/connector-file-oss/pom.xml
@@ -28,6 +28,9 @@
4.0.0connector-file-oss
+
+ 2.9.2
+
@@ -36,16 +39,10 @@
connector-file-base${project.version}
-
-
- org.apache.flink
- flink-shaded-hadoop-2
- provided
-
-
org.apache.hadoophadoop-aliyun
+ ${hadoop-aliyun.version}
diff --git a/seatunnel-connectors-v2/connector-file/connector-file-oss/src/main/java/org/apache/seatunnel/connectors/seatunnel/file/oss/config/OssConf.java b/seatunnel-connectors-v2/connector-file/connector-file-oss/src/main/java/org/apache/seatunnel/connectors/seatunnel/file/oss/config/OssConf.java
new file mode 100644
index 00000000000..96fa483f25f
--- /dev/null
+++ b/seatunnel-connectors-v2/connector-file/connector-file-oss/src/main/java/org/apache/seatunnel/connectors/seatunnel/file/oss/config/OssConf.java
@@ -0,0 +1,49 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.seatunnel.connectors.seatunnel.file.oss.config;
+
+import org.apache.seatunnel.connectors.seatunnel.file.config.HadoopConf;
+
+import org.apache.seatunnel.shade.com.typesafe.config.Config;
+
+import org.apache.hadoop.fs.aliyun.oss.Constants;
+
+import java.util.HashMap;
+
+public class OssConf extends HadoopConf {
+ private final String fsHdfsImpl = "org.apache.hadoop.fs.aliyun.oss.AliyunOSSFileSystem";
+
+ @Override
+ public String getFsHdfsImpl() {
+ return fsHdfsImpl;
+ }
+
+ public OssConf(String hdfsNameKey) {
+ super(hdfsNameKey);
+ }
+
+ public static HadoopConf buildWithConfig(Config config) {
+ HadoopConf hadoopConf = new OssConf(config.getString(OssConfig.BUCKET));
+ HashMap ossOptions = new HashMap<>();
+ ossOptions.put(Constants.ACCESS_KEY_ID, config.getString(OssConfig.ACCESS_KEY));
+ ossOptions.put(Constants.ACCESS_KEY_SECRET, config.getString(OssConfig.ACCESS_SECRET));
+ ossOptions.put(Constants.ENDPOINT_KEY, config.getString(OssConfig.ENDPOINT));
+ hadoopConf.setExtraOptions(ossOptions);
+ return hadoopConf;
+ }
+}
diff --git a/seatunnel-connectors-v2/connector-file/connector-file-oss/src/main/java/org/apache/seatunnel/connectors/seatunnel/file/oss/source/config/OssSourceConfig.java b/seatunnel-connectors-v2/connector-file/connector-file-oss/src/main/java/org/apache/seatunnel/connectors/seatunnel/file/oss/config/OssConfig.java
similarity index 79%
rename from seatunnel-connectors-v2/connector-file/connector-file-oss/src/main/java/org/apache/seatunnel/connectors/seatunnel/file/oss/source/config/OssSourceConfig.java
rename to seatunnel-connectors-v2/connector-file/connector-file-oss/src/main/java/org/apache/seatunnel/connectors/seatunnel/file/oss/config/OssConfig.java
index fa672875768..7a928e579ea 100644
--- a/seatunnel-connectors-v2/connector-file/connector-file-oss/src/main/java/org/apache/seatunnel/connectors/seatunnel/file/oss/source/config/OssSourceConfig.java
+++ b/seatunnel-connectors-v2/connector-file/connector-file-oss/src/main/java/org/apache/seatunnel/connectors/seatunnel/file/oss/config/OssConfig.java
@@ -15,13 +15,13 @@
* limitations under the License.
*/
-package org.apache.seatunnel.connectors.seatunnel.file.oss.source.config;
+package org.apache.seatunnel.connectors.seatunnel.file.oss.config;
import org.apache.seatunnel.connectors.seatunnel.file.config.BaseSourceConfig;
-public class OssSourceConfig extends BaseSourceConfig {
- public static final String ACCESS_KEY = "accessKey";
- public static final String ACCESS_SECRET = "accessSecret";
+public class OssConfig extends BaseSourceConfig {
+ public static final String ACCESS_KEY = "access_key";
+ public static final String ACCESS_SECRET = "access_secret";
public static final String ENDPOINT = "endpoint";
public static final String BUCKET = "bucket";
}
diff --git a/seatunnel-connectors-v2/connector-file/connector-file-oss/src/main/java/org/apache/seatunnel/connectors/seatunnel/file/oss/sink/OssFileSink.java b/seatunnel-connectors-v2/connector-file/connector-file-oss/src/main/java/org/apache/seatunnel/connectors/seatunnel/file/oss/sink/OssFileSink.java
new file mode 100644
index 00000000000..ff2d16004e3
--- /dev/null
+++ b/seatunnel-connectors-v2/connector-file/connector-file-oss/src/main/java/org/apache/seatunnel/connectors/seatunnel/file/oss/sink/OssFileSink.java
@@ -0,0 +1,53 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.seatunnel.connectors.seatunnel.file.oss.sink;
+
+import org.apache.seatunnel.api.common.PrepareFailException;
+import org.apache.seatunnel.api.sink.SeaTunnelSink;
+import org.apache.seatunnel.common.config.CheckConfigUtil;
+import org.apache.seatunnel.common.config.CheckResult;
+import org.apache.seatunnel.common.constants.PluginType;
+import org.apache.seatunnel.connectors.seatunnel.file.config.FileSystemType;
+import org.apache.seatunnel.connectors.seatunnel.file.oss.config.OssConf;
+import org.apache.seatunnel.connectors.seatunnel.file.oss.config.OssConfig;
+import org.apache.seatunnel.connectors.seatunnel.file.sink.BaseFileSink;
+
+import org.apache.seatunnel.shade.com.typesafe.config.Config;
+
+import com.google.auto.service.AutoService;
+
+@AutoService(SeaTunnelSink.class)
+public class OssFileSink extends BaseFileSink {
+ @Override
+ public String getPluginName() {
+ return FileSystemType.OSS.getFileSystemPluginName();
+ }
+
+ @Override
+ public void prepare(Config pluginConfig) throws PrepareFailException {
+ super.prepare(pluginConfig);
+ CheckResult result = CheckConfigUtil.checkAllExists(pluginConfig,
+ OssConfig.FILE_PATH,
+ OssConfig.BUCKET, OssConfig.ACCESS_KEY,
+ OssConfig.ACCESS_SECRET, OssConfig.BUCKET);
+ if (!result.isSuccess()) {
+ throw new PrepareFailException(getPluginName(), PluginType.SINK, result.getMsg());
+ }
+ hadoopConf = OssConf.buildWithConfig(pluginConfig);
+ }
+}
diff --git a/seatunnel-connectors-v2/connector-file/connector-file-oss/src/main/java/org/apache/seatunnel/connectors/seatunnel/file/oss/source/OssFileSource.java b/seatunnel-connectors-v2/connector-file/connector-file-oss/src/main/java/org/apache/seatunnel/connectors/seatunnel/file/oss/source/OssFileSource.java
index 1eb588c8ea9..2008dcc1cae 100644
--- a/seatunnel-connectors-v2/connector-file/connector-file-oss/src/main/java/org/apache/seatunnel/connectors/seatunnel/file/oss/source/OssFileSource.java
+++ b/seatunnel-connectors-v2/connector-file/connector-file-oss/src/main/java/org/apache/seatunnel/connectors/seatunnel/file/oss/source/OssFileSource.java
@@ -25,18 +25,16 @@
import org.apache.seatunnel.connectors.seatunnel.common.schema.SeaTunnelSchema;
import org.apache.seatunnel.connectors.seatunnel.file.config.FileSystemType;
import org.apache.seatunnel.connectors.seatunnel.file.exception.FilePluginException;
-import org.apache.seatunnel.connectors.seatunnel.file.oss.source.config.OssConf;
-import org.apache.seatunnel.connectors.seatunnel.file.oss.source.config.OssSourceConfig;
+import org.apache.seatunnel.connectors.seatunnel.file.oss.config.OssConf;
+import org.apache.seatunnel.connectors.seatunnel.file.oss.config.OssConfig;
import org.apache.seatunnel.connectors.seatunnel.file.source.BaseFileSource;
import org.apache.seatunnel.connectors.seatunnel.file.source.reader.ReadStrategyFactory;
import org.apache.seatunnel.shade.com.typesafe.config.Config;
import com.google.auto.service.AutoService;
-import org.apache.hadoop.fs.aliyun.oss.Constants;
import java.io.IOException;
-import java.util.HashMap;
@AutoService(SeaTunnelSource.class)
public class OssFileSource extends BaseFileSource {
@@ -48,28 +46,23 @@ public String getPluginName() {
@Override
public void prepare(Config pluginConfig) throws PrepareFailException {
CheckResult result = CheckConfigUtil.checkAllExists(pluginConfig,
- OssSourceConfig.FILE_PATH, OssSourceConfig.FILE_TYPE,
- OssSourceConfig.BUCKET, OssSourceConfig.ACCESS_KEY,
- OssSourceConfig.ACCESS_SECRET, OssSourceConfig.BUCKET);
+ OssConfig.FILE_PATH, OssConfig.FILE_TYPE,
+ OssConfig.BUCKET, OssConfig.ACCESS_KEY,
+ OssConfig.ACCESS_SECRET, OssConfig.BUCKET);
if (!result.isSuccess()) {
throw new PrepareFailException(getPluginName(), PluginType.SOURCE, result.getMsg());
}
- readStrategy = ReadStrategyFactory.of(pluginConfig.getString(OssSourceConfig.FILE_TYPE));
- String path = pluginConfig.getString(OssSourceConfig.FILE_PATH);
- hadoopConf = new OssConf(pluginConfig.getString(OssSourceConfig.BUCKET));
- HashMap ossOptions = new HashMap<>();
- ossOptions.put(Constants.ACCESS_KEY_ID, pluginConfig.getString(OssSourceConfig.ACCESS_KEY));
- ossOptions.put(Constants.ACCESS_KEY_SECRET, pluginConfig.getString(OssSourceConfig.ACCESS_SECRET));
- ossOptions.put(Constants.ENDPOINT_KEY, pluginConfig.getString(OssSourceConfig.ENDPOINT));
- hadoopConf.setExtraOptions(ossOptions);
+ readStrategy = ReadStrategyFactory.of(pluginConfig.getString(OssConfig.FILE_TYPE));
+ String path = pluginConfig.getString(OssConfig.FILE_PATH);
+ hadoopConf = OssConf.buildWithConfig(pluginConfig);
try {
filePaths = readStrategy.getFileNamesByPath(hadoopConf, path);
} catch (IOException e) {
throw new PrepareFailException(getPluginName(), PluginType.SOURCE, "Check file path fail.");
}
// support user-defined schema
- if (pluginConfig.hasPath(OssSourceConfig.SCHEMA)) {
- Config schemaConfig = pluginConfig.getConfig(OssSourceConfig.SCHEMA);
+ if (pluginConfig.hasPath(OssConfig.SCHEMA)) {
+ Config schemaConfig = pluginConfig.getConfig(OssConfig.SCHEMA);
rowType = SeaTunnelSchema
.buildWithConfig(schemaConfig)
.getSeaTunnelRowType();
diff --git a/seatunnel-connectors-v2/connector-hive/pom.xml b/seatunnel-connectors-v2/connector-hive/pom.xml
index d9c0c862801..a00016488e2 100644
--- a/seatunnel-connectors-v2/connector-hive/pom.xml
+++ b/seatunnel-connectors-v2/connector-hive/pom.xml
@@ -28,24 +28,75 @@
4.0.0connector-hive
+
+
+ 2.3.9
+ 2.7.5-7.0
+ 1.5.6
+ 1.10.0
+ 4.4
+ 3.4
+ 3.1.6
+ org.apache.hivehive-exec
+ ${hive.exec.version}provided
-
-
-
- org.apache.seatunnel
- seatunnel-api
- ${project.version}
+
+
+ org.pentaho
+ pentaho-aggdesigner-algorithm
+
+
+ javax.servlet
+ servlet-api
+
+
+ org.apache.logging.log4j
+ log4j-1.2-api
+
+
+ org.apache.logging.log4j
+ log4j-web
+
+
+ com.fasterxml.jackson.core
+ *
+
+
+ org.apapche.hadoop
+ *
+
+
+ com.github.joshelser
+ dropwizard-metrics-hadoop-metrics2-reporter
+
+
+ org.apache.logging.log4j
+ *
+
+
+ org.apache.zookeeper
+ zookeeper
+
+
+ org.apache.hadoop
+ hadoop-yarn-server-resourcemanager
+
+
+ org.apache.hadoop
+ hadoop-hdfs
+
+ org.apache.flinkflink-shaded-hadoop-2
- ${flink-shaded-hadoop-2.version}
+ ${flink.hadoop.version}provided
@@ -59,12 +110,17 @@
org.apache.commonscommons-lang3
+ ${commons.lang3.version}
-
org.apache.orcorc-core
+ ${orc.version}
+
+ javax.servlet
+ servlet-api
+ org.apache.hadoophadoop-common
@@ -73,17 +129,35 @@
org.apache.hadoophadoop-hdfs
+
+ org.apache.logging.log4j
+ *
+
+
+ com.fasterxml.jackson.core
+ *
+
+
+ org.apapche.hadoop
+ *
+
+
+ org.apache.curator
+ *
+ org.apache.parquetparquet-avro
+ ${parquet-avro.version}org.apache.commonscommons-collections4
+ ${commons.collecton4.version}
diff --git a/seatunnel-connectors-v2/connector-http/connector-http-base/pom.xml b/seatunnel-connectors-v2/connector-http/connector-http-base/pom.xml
index 2a062f59602..510e32de739 100644
--- a/seatunnel-connectors-v2/connector-http/connector-http-base/pom.xml
+++ b/seatunnel-connectors-v2/connector-http/connector-http-base/pom.xml
@@ -28,6 +28,11 @@
4.0.0connector-http-base
+
+
+ 4.5.13
+ 4.4.4
+
@@ -45,11 +50,13 @@
org.apache.httpcomponentshttpclient
+ ${httpclient.version}org.apache.httpcomponentshttpcore
+ ${httpcore.version}
diff --git a/seatunnel-connectors-v2/connector-hudi/pom.xml b/seatunnel-connectors-v2/connector-hudi/pom.xml
index 227d4a62769..7a4a330e6cf 100644
--- a/seatunnel-connectors-v2/connector-hudi/pom.xml
+++ b/seatunnel-connectors-v2/connector-hudi/pom.xml
@@ -28,29 +28,77 @@
4.0.0connector-hudi
+
+ 2.3.9
+ 0.11.1
+ 3.4
+ org.apache.hivehive-exec
+ ${hive.exec.version}provided
-
-
-
- org.apache.seatunnel
- seatunnel-api
- ${project.version}
+
+
+ org.pentaho
+ pentaho-aggdesigner-algorithm
+
+
+ javax.servlet
+ servlet-api
+
+
+ org.apache.logging.log4j
+ log4j-1.2-api
+
+
+ org.apache.logging.log4j
+ log4j-web
+
+
+ com.fasterxml.jackson.core
+ *
+
+
+ org.apapche.hadoop
+ *
+
+
+ com.github.joshelser
+ dropwizard-metrics-hadoop-metrics2-reporter
+
+
+ org.apache.logging.log4j
+ *
+
+
+ org.apache.zookeeper
+ zookeeper
+
+
+ org.apache.hadoop
+ hadoop-yarn-server-resourcemanager
+
+
+ org.apache.hadoop
+ hadoop-hdfs
+
+ org.apache.hudihudi-hadoop-mr-bundle
+ ${hudi.version}org.apache.commonscommons-lang3
+ ${commons.lang3.version}
diff --git a/seatunnel-connectors-v2/connector-iotdb/pom.xml b/seatunnel-connectors-v2/connector-iotdb/pom.xml
index f9635cb83c1..788d87262f8 100644
--- a/seatunnel-connectors-v2/connector-iotdb/pom.xml
+++ b/seatunnel-connectors-v2/connector-iotdb/pom.xml
@@ -28,13 +28,11 @@
4.0.0connector-iotdb
+
+ 0.13.1
+
-
- org.apache.seatunnel
- seatunnel-api
- ${project.version}
- org.apache.seatunnelconnector-common
@@ -44,6 +42,13 @@
org.apache.iotdbiotdb-session
+ ${iotdb.version}
+
+
+ ch.qos.logback
+ logback-classic
+
+
diff --git a/seatunnel-connectors-v2/connector-jdbc/pom.xml b/seatunnel-connectors-v2/connector-jdbc/pom.xml
index 3aec10ea4b5..eb4e1b37d4c 100644
--- a/seatunnel-connectors-v2/connector-jdbc/pom.xml
+++ b/seatunnel-connectors-v2/connector-jdbc/pom.xml
@@ -28,29 +28,32 @@
4.0.0connector-jdbc
+
+
+ 5.2.5-HBase-2.x
+ 42.3.3
+ 8.0.16
+
-
- org.apache.seatunnel
- seatunnel-api
- ${project.version}
-
-
mysqlmysql-connector-java
+ ${mysql.version}providedorg.postgresqlpostgresql
+ ${pg.version}com.aliyun.phoenixali-phoenix-shaded-thin-client
+ ${phoenix.version}
diff --git a/seatunnel-connectors-v2/connector-jdbc/src/main/java/org/apache/seatunnel/connectors/seatunnel/jdbc/internal/executor/SimpleBatchStatementExecutor.java b/seatunnel-connectors-v2/connector-jdbc/src/main/java/org/apache/seatunnel/connectors/seatunnel/jdbc/internal/executor/SimpleBatchStatementExecutor.java
index 1c96d5c05ec..02ec08f3949 100644
--- a/seatunnel-connectors-v2/connector-jdbc/src/main/java/org/apache/seatunnel/connectors/seatunnel/jdbc/internal/executor/SimpleBatchStatementExecutor.java
+++ b/seatunnel-connectors-v2/connector-jdbc/src/main/java/org/apache/seatunnel/connectors/seatunnel/jdbc/internal/executor/SimpleBatchStatementExecutor.java
@@ -68,7 +68,9 @@ public void executeBatch() throws SQLException {
batch.clear();
// cache commit
- st.getConnection().commit();
+ if (!st.getConnection().getAutoCommit()) {
+ st.getConnection().commit();
+ }
st.clearParameters();
st.clearBatch();
@@ -78,6 +80,9 @@ public void executeBatch() throws SQLException {
@Override
public void closeStatements() throws SQLException {
if (st != null) {
+ if (!st.getConnection().getAutoCommit()) {
+ st.getConnection().commit();
+ }
st.close();
st = null;
}
diff --git a/seatunnel-connectors-v2/connector-kafka/pom.xml b/seatunnel-connectors-v2/connector-kafka/pom.xml
index 3297426fb80..75e56237b24 100644
--- a/seatunnel-connectors-v2/connector-kafka/pom.xml
+++ b/seatunnel-connectors-v2/connector-kafka/pom.xml
@@ -28,20 +28,18 @@
4.0.0connector-kafka
+
+
+ 3.2.0
+
-
- org.apache.seatunnel
- seatunnel-api
- ${project.version}
-
-
org.apache.kafkakafka-clients
- 3.2.0
+ ${kafka.client.version}
diff --git a/seatunnel-connectors-v2/connector-kudu/pom.xml b/seatunnel-connectors-v2/connector-kudu/pom.xml
index 6a3e5238d9e..2971012128c 100644
--- a/seatunnel-connectors-v2/connector-kudu/pom.xml
+++ b/seatunnel-connectors-v2/connector-kudu/pom.xml
@@ -28,13 +28,13 @@
4.0.0connector-kudu
+
+
+ 1.11.1
+ 3.4
+
-
- org.apache.seatunnel
- seatunnel-api
- ${project.version}
- org.apache.seatunnelseatunnel-common
@@ -43,11 +43,13 @@
org.apache.kudukudu-client
+ ${kudu.version}org.apache.commonscommons-lang3
+ ${commons.lang3.version}org.apache.seatunnel
diff --git a/seatunnel-connectors-v2/connector-neo4j/pom.xml b/seatunnel-connectors-v2/connector-neo4j/pom.xml
index a4f2f481b24..40ac9188f17 100644
--- a/seatunnel-connectors-v2/connector-neo4j/pom.xml
+++ b/seatunnel-connectors-v2/connector-neo4j/pom.xml
@@ -28,13 +28,10 @@
connector-neo4j
-
+
+ 4.4.9
+
-
- org.apache.seatunnel
- seatunnel-api
- ${project.version}
- org.neo4j.driverneo4j-java-driver
diff --git a/seatunnel-connectors-v2/connector-pulsar/pom.xml b/seatunnel-connectors-v2/connector-pulsar/pom.xml
index 3fd4cfb906c..cb23ca7b452 100644
--- a/seatunnel-connectors-v2/connector-pulsar/pom.xml
+++ b/seatunnel-connectors-v2/connector-pulsar/pom.xml
@@ -31,15 +31,10 @@
2.8.0
+ 3.4
-
- org.apache.seatunnel
- seatunnel-api
- ${project.version}
-
-
org.apache.seatunnelseatunnel-format-json
@@ -52,21 +47,6 @@
${project.version}
-
-
-
- org.testcontainers
- pulsar
- ${testcontainer.version}
- test
-
-
- junit
- junit
-
-
-
-
diff --git a/seatunnel-connectors-v2/connector-redis/pom.xml b/seatunnel-connectors-v2/connector-redis/pom.xml
new file mode 100644
index 00000000000..afffdf127e4
--- /dev/null
+++ b/seatunnel-connectors-v2/connector-redis/pom.xml
@@ -0,0 +1,58 @@
+
+
+
+
+ seatunnel-connectors-v2
+ org.apache.seatunnel
+ ${revision}
+
+ 4.0.0
+
+ connector-redis
+
+
+ 4.2.2
+
+
+
+
+
+ org.apache.seatunnel
+ connector-common
+ ${project.version}
+
+
+
+ org.apache.seatunnel
+ seatunnel-format-json
+ ${project.version}
+
+
+
+ redis.clients
+ jedis
+ ${jedis.version}
+
+
+
+
+
\ No newline at end of file
diff --git a/seatunnel-connectors-v2/connector-redis/src/main/java/org/apache/seatunnel/connectors/seatunnel/redis/config/RedisConfig.java b/seatunnel-connectors-v2/connector-redis/src/main/java/org/apache/seatunnel/connectors/seatunnel/redis/config/RedisConfig.java
new file mode 100644
index 00000000000..5c3dca23989
--- /dev/null
+++ b/seatunnel-connectors-v2/connector-redis/src/main/java/org/apache/seatunnel/connectors/seatunnel/redis/config/RedisConfig.java
@@ -0,0 +1,27 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.seatunnel.connectors.seatunnel.redis.config;
+
+public class RedisConfig {
+ public static final String HOST = "host";
+ public static final String PORT = "port";
+ public static final String AUTH = "auth";
+ public static final String KEY_PATTERN = "keys";
+ public static final String DATA_TYPE = "data_type";
+ public static final String FORMAT = "format";
+}
diff --git a/seatunnel-connectors-v2/connector-redis/src/main/java/org/apache/seatunnel/connectors/seatunnel/redis/config/RedisDataType.java b/seatunnel-connectors-v2/connector-redis/src/main/java/org/apache/seatunnel/connectors/seatunnel/redis/config/RedisDataType.java
new file mode 100644
index 00000000000..b48540a353f
--- /dev/null
+++ b/seatunnel-connectors-v2/connector-redis/src/main/java/org/apache/seatunnel/connectors/seatunnel/redis/config/RedisDataType.java
@@ -0,0 +1,67 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.seatunnel.connectors.seatunnel.redis.config;
+
+import org.apache.seatunnel.common.utils.JsonUtils;
+
+import redis.clients.jedis.Jedis;
+
+import java.util.ArrayList;
+import java.util.Collections;
+import java.util.List;
+import java.util.Map;
+import java.util.Set;
+
+public enum RedisDataType {
+ KEY {
+ @Override
+ public List get(Jedis jedis, String key) {
+ return Collections.singletonList(jedis.get(key));
+ }
+ },
+ HASH {
+ @Override
+ public List get(Jedis jedis, String key) {
+ Map kvMap = jedis.hgetAll(key);
+ return Collections.singletonList(JsonUtils.toJsonString(kvMap));
+ }
+ },
+ LIST {
+ @Override
+ public List get(Jedis jedis, String key) {
+ return jedis.lrange(key, 0, -1);
+ }
+ },
+ SET {
+ @Override
+ public List get(Jedis jedis, String key) {
+ Set members = jedis.smembers(key);
+ return new ArrayList<>(members);
+ }
+ },
+ ZSET {
+ @Override
+ public List get(Jedis jedis, String key) {
+ return jedis.zrange(key, 0, -1);
+ }
+ };
+
+ public List get(Jedis jedis, String key) {
+ return Collections.emptyList();
+ }
+}
diff --git a/seatunnel-connectors-v2/connector-redis/src/main/java/org/apache/seatunnel/connectors/seatunnel/redis/config/RedisParameters.java b/seatunnel-connectors-v2/connector-redis/src/main/java/org/apache/seatunnel/connectors/seatunnel/redis/config/RedisParameters.java
new file mode 100644
index 00000000000..d8276b8c2ce
--- /dev/null
+++ b/seatunnel-connectors-v2/connector-redis/src/main/java/org/apache/seatunnel/connectors/seatunnel/redis/config/RedisParameters.java
@@ -0,0 +1,53 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.seatunnel.connectors.seatunnel.redis.config;
+
+import org.apache.seatunnel.shade.com.typesafe.config.Config;
+
+import lombok.Data;
+
+import java.io.Serializable;
+
+@Data
+public class RedisParameters implements Serializable {
+ private String host;
+ private int port;
+ private String auth = "";
+ private String keysPattern;
+ private RedisDataType redisDataType;
+
+ public void buildWithConfig(Config config) {
+ // set host
+ this.host = config.getString(RedisConfig.HOST);
+ // set port
+ this.port = config.getInt(RedisConfig.PORT);
+ // set auth
+ if (config.hasPath(RedisConfig.AUTH)) {
+ this.auth = config.getString(RedisConfig.AUTH);
+ }
+ // set keysPattern
+ this.keysPattern = config.getString(RedisConfig.KEY_PATTERN);
+ // set redis data type
+ try {
+ String dataType = config.getString(RedisConfig.DATA_TYPE);
+ this.redisDataType = RedisDataType.valueOf(dataType.toUpperCase());
+ } catch (IllegalArgumentException e) {
+ throw new RuntimeException("Redis source connector only support these data types [key, hash, list, set, zset]", e);
+ }
+ }
+}
diff --git a/seatunnel-connectors-v2/connector-redis/src/main/java/org/apache/seatunnel/connectors/seatunnel/redis/source/RedisSource.java b/seatunnel-connectors-v2/connector-redis/src/main/java/org/apache/seatunnel/connectors/seatunnel/redis/source/RedisSource.java
new file mode 100644
index 00000000000..d860e6af59b
--- /dev/null
+++ b/seatunnel-connectors-v2/connector-redis/src/main/java/org/apache/seatunnel/connectors/seatunnel/redis/source/RedisSource.java
@@ -0,0 +1,99 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.seatunnel.connectors.seatunnel.redis.source;
+
+import org.apache.seatunnel.api.common.PrepareFailException;
+import org.apache.seatunnel.api.common.SeaTunnelContext;
+import org.apache.seatunnel.api.serialization.DeserializationSchema;
+import org.apache.seatunnel.api.source.Boundedness;
+import org.apache.seatunnel.api.source.SeaTunnelSource;
+import org.apache.seatunnel.api.table.type.SeaTunnelDataType;
+import org.apache.seatunnel.api.table.type.SeaTunnelRow;
+import org.apache.seatunnel.api.table.type.SeaTunnelRowType;
+import org.apache.seatunnel.common.config.CheckConfigUtil;
+import org.apache.seatunnel.common.config.CheckResult;
+import org.apache.seatunnel.common.constants.PluginType;
+import org.apache.seatunnel.connectors.seatunnel.common.schema.SeaTunnelSchema;
+import org.apache.seatunnel.connectors.seatunnel.common.source.AbstractSingleSplitReader;
+import org.apache.seatunnel.connectors.seatunnel.common.source.AbstractSingleSplitSource;
+import org.apache.seatunnel.connectors.seatunnel.common.source.SingleSplitReaderContext;
+import org.apache.seatunnel.connectors.seatunnel.redis.config.RedisConfig;
+import org.apache.seatunnel.connectors.seatunnel.redis.config.RedisParameters;
+import org.apache.seatunnel.format.json.JsonDeserializationSchema;
+
+import org.apache.seatunnel.shade.com.typesafe.config.Config;
+
+import com.google.auto.service.AutoService;
+
+@AutoService(SeaTunnelSource.class)
+public class RedisSource extends AbstractSingleSplitSource {
+ private final RedisParameters redisParameters = new RedisParameters();
+ private SeaTunnelContext seaTunnelContext;
+ private SeaTunnelRowType seaTunnelRowType;
+ private DeserializationSchema deserializationSchema;
+
+ @Override
+ public String getPluginName() {
+ return "Redis";
+ }
+
+ @Override
+ public void prepare(Config pluginConfig) throws PrepareFailException {
+ CheckResult result = CheckConfigUtil.checkAllExists(pluginConfig, RedisConfig.HOST, RedisConfig.PORT, RedisConfig.KEY_PATTERN, RedisConfig.DATA_TYPE);
+ if (!result.isSuccess()) {
+ throw new PrepareFailException(getPluginName(), PluginType.SOURCE, result.getMsg());
+ }
+ this.redisParameters.buildWithConfig(pluginConfig);
+ if (pluginConfig.hasPath(SeaTunnelSchema.SCHEMA)) {
+ Config schema = pluginConfig.getConfig(SeaTunnelSchema.SCHEMA);
+ this.seaTunnelRowType = SeaTunnelSchema.buildWithConfig(schema).getSeaTunnelRowType();
+ } else {
+ this.seaTunnelRowType = SeaTunnelSchema.buildSimpleTextSchema();
+ }
+ // TODO: use format SPI
+ // default use json format
+ String format;
+ if (pluginConfig.hasPath(RedisConfig.FORMAT)) {
+ format = pluginConfig.getString(RedisConfig.FORMAT);
+ this.deserializationSchema = null;
+ } else {
+ format = "json";
+ this.deserializationSchema = new JsonDeserializationSchema(false, false, seaTunnelRowType);
+ }
+ }
+
+ @Override
+ public Boundedness getBoundedness() {
+ return Boundedness.BOUNDED;
+ }
+
+ @Override
+ public SeaTunnelDataType getProducedType() {
+ return seaTunnelRowType;
+ }
+
+ @Override
+ public void setSeaTunnelContext(SeaTunnelContext seaTunnelContext) {
+ this.seaTunnelContext = seaTunnelContext;
+ }
+
+ @Override
+ public AbstractSingleSplitReader createReader(SingleSplitReaderContext readerContext) throws Exception {
+ return new RedisSourceReader(redisParameters, readerContext, deserializationSchema);
+ }
+}
diff --git a/seatunnel-connectors-v2/connector-redis/src/main/java/org/apache/seatunnel/connectors/seatunnel/redis/source/RedisSourceReader.java b/seatunnel-connectors-v2/connector-redis/src/main/java/org/apache/seatunnel/connectors/seatunnel/redis/source/RedisSourceReader.java
new file mode 100644
index 00000000000..5a86570977c
--- /dev/null
+++ b/seatunnel-connectors-v2/connector-redis/src/main/java/org/apache/seatunnel/connectors/seatunnel/redis/source/RedisSourceReader.java
@@ -0,0 +1,79 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.seatunnel.connectors.seatunnel.redis.source;
+
+import org.apache.seatunnel.api.serialization.DeserializationSchema;
+import org.apache.seatunnel.api.source.Collector;
+import org.apache.seatunnel.api.table.type.SeaTunnelRow;
+import org.apache.seatunnel.connectors.seatunnel.common.source.AbstractSingleSplitReader;
+import org.apache.seatunnel.connectors.seatunnel.common.source.SingleSplitReaderContext;
+import org.apache.seatunnel.connectors.seatunnel.redis.config.RedisDataType;
+import org.apache.seatunnel.connectors.seatunnel.redis.config.RedisParameters;
+
+import org.apache.commons.lang3.StringUtils;
+import redis.clients.jedis.Jedis;
+
+import java.io.IOException;
+import java.util.List;
+import java.util.Objects;
+import java.util.Set;
+
+public class RedisSourceReader extends AbstractSingleSplitReader {
+ private final RedisParameters redisParameters;
+ private final SingleSplitReaderContext context;
+ private final DeserializationSchema deserializationSchema;
+ private Jedis jedis;
+
+ public RedisSourceReader(RedisParameters redisParameters, SingleSplitReaderContext context, DeserializationSchema deserializationSchema) {
+ this.redisParameters = redisParameters;
+ this.context = context;
+ this.deserializationSchema = deserializationSchema;
+ }
+
+ @Override
+ public void open() throws Exception {
+ this.jedis = new Jedis(redisParameters.getHost(), redisParameters.getPort());
+ if (StringUtils.isNotBlank(redisParameters.getAuth())) {
+ this.jedis.auth(redisParameters.getAuth());
+ }
+ }
+
+ @Override
+ public void close() throws IOException {
+ if (Objects.nonNull(jedis)) {
+ jedis.close();
+ }
+ }
+
+ @Override
+ public void pollNext(Collector output) throws Exception {
+ Set keys = jedis.keys(redisParameters.getKeysPattern());
+ RedisDataType redisDataType = redisParameters.getRedisDataType();
+ for (String key : keys) {
+ List values = redisDataType.get(jedis, key);
+ for (String value : values) {
+ if (deserializationSchema == null) {
+ output.collect(new SeaTunnelRow(new Object[]{value}));
+ } else {
+ deserializationSchema.deserialize(value.getBytes(), output);
+ }
+ }
+ }
+ context.signalNoMoreElement();
+ }
+}
diff --git a/seatunnel-connectors-v2/connector-socket/pom.xml b/seatunnel-connectors-v2/connector-socket/pom.xml
index fab1c5f3ec2..d285ae4cda7 100644
--- a/seatunnel-connectors-v2/connector-socket/pom.xml
+++ b/seatunnel-connectors-v2/connector-socket/pom.xml
@@ -39,7 +39,6 @@
org.apache.seatunnelseatunnel-format-json${project.version}
- compile
diff --git a/seatunnel-connectors-v2/pom.xml b/seatunnel-connectors-v2/pom.xml
index 33638178858..e1c5b683d38 100644
--- a/seatunnel-connectors-v2/pom.xml
+++ b/seatunnel-connectors-v2/pom.xml
@@ -50,11 +50,29 @@
connector-elasticsearchconnector-iotdbconnector-neo4j
+ connector-redisconnector-datahubconnector-sentry
+
+
+
+ org.apache.seatunnel
+ seatunnel-api
+ ${project.version}
+ provided
+
+
+
+
+
+ org.apache.seatunnel
+ seatunnel-api
+ ${project.version}
+ provided
+
diff --git a/seatunnel-connectors/seatunnel-connectors-flink-sql/flink-sql-connector-elasticsearch-6/pom.xml b/seatunnel-connectors/seatunnel-connectors-flink-sql/flink-sql-connector-elasticsearch-6/pom.xml
index 44174398f32..67ec121f600 100644
--- a/seatunnel-connectors/seatunnel-connectors-flink-sql/flink-sql-connector-elasticsearch-6/pom.xml
+++ b/seatunnel-connectors/seatunnel-connectors-flink-sql/flink-sql-connector-elasticsearch-6/pom.xml
@@ -29,7 +29,7 @@
org.apache.flinkflink-connector-elasticsearch6_${scala.binary.version}
- ${flink.version}
+ ${flink.1.13.6.version}
diff --git a/seatunnel-connectors/seatunnel-connectors-flink-sql/flink-sql-connector-elasticsearch-7/pom.xml b/seatunnel-connectors/seatunnel-connectors-flink-sql/flink-sql-connector-elasticsearch-7/pom.xml
index c472227c513..0a6dba880db 100644
--- a/seatunnel-connectors/seatunnel-connectors-flink-sql/flink-sql-connector-elasticsearch-7/pom.xml
+++ b/seatunnel-connectors/seatunnel-connectors-flink-sql/flink-sql-connector-elasticsearch-7/pom.xml
@@ -29,7 +29,7 @@
org.apache.flinkflink-connector-elasticsearch7_${scala.binary.version}
- ${flink.version}
+ ${flink.1.13.6.version}
diff --git a/seatunnel-connectors/seatunnel-connectors-flink-sql/flink-sql-connector-jdbc/pom.xml b/seatunnel-connectors/seatunnel-connectors-flink-sql/flink-sql-connector-jdbc/pom.xml
index c8c253371f7..7480c0a0145 100644
--- a/seatunnel-connectors/seatunnel-connectors-flink-sql/flink-sql-connector-jdbc/pom.xml
+++ b/seatunnel-connectors/seatunnel-connectors-flink-sql/flink-sql-connector-jdbc/pom.xml
@@ -29,7 +29,7 @@
org.apache.flinkflink-connector-jdbc_2.11
- ${flink.version}
+ ${flink.1.13.6.version}
\ No newline at end of file
diff --git a/seatunnel-connectors/seatunnel-connectors-flink-sql/flink-sql-connector-kafka/pom.xml b/seatunnel-connectors/seatunnel-connectors-flink-sql/flink-sql-connector-kafka/pom.xml
index ac518a7519e..3ad2c411dba 100644
--- a/seatunnel-connectors/seatunnel-connectors-flink-sql/flink-sql-connector-kafka/pom.xml
+++ b/seatunnel-connectors/seatunnel-connectors-flink-sql/flink-sql-connector-kafka/pom.xml
@@ -29,6 +29,7 @@
org.apache.flinkflink-connector-kafka_${scala.binary.version}
+ ${flink.1.13.6.version}
diff --git a/seatunnel-connectors/seatunnel-connectors-flink/pom.xml b/seatunnel-connectors/seatunnel-connectors-flink/pom.xml
index b05f09bffb5..fa26056bc8b 100644
--- a/seatunnel-connectors/seatunnel-connectors-flink/pom.xml
+++ b/seatunnel-connectors/seatunnel-connectors-flink/pom.xml
@@ -29,6 +29,9 @@
seatunnel-connectors-flinkpom
+
+ provided
+ seatunnel-connector-flink-console
@@ -46,5 +49,58 @@
seatunnel-connector-flink-httpseatunnel-connector-flink-assert
+
+
+
+
+
+ org.apache.seatunnel
+ seatunnel-api-flink
+ ${project.version}
+ provided
+
+
+ org.apache.seatunnel
+ seatunnel-common
+ ${project.version}
+
+
+
+ org.apache.flink
+ flink-java
+ ${flink.1.13.6.version}
+ ${flink.scope}
+
+
+ org.apache.flink
+ flink-table-planner_${scala.binary.version}
+ ${flink.1.13.6.version}
+ ${flink.scope}
+
+
+ org.apache.flink
+ flink-streaming-java_${scala.binary.version}
+ ${flink.1.13.6.version}
+ ${flink.scope}
+
+
+ org.apache.flink
+ flink-table-common
+ ${flink.1.13.6.version}
+ ${flink.scope}
+
+
+
+
+
+
+ org.apache.seatunnel
+ seatunnel-api-flink
+
+
+ org.apache.flink
+ flink-java
+
+
diff --git a/seatunnel-connectors/seatunnel-connectors-flink/seatunnel-connector-flink-assert/pom.xml b/seatunnel-connectors/seatunnel-connectors-flink/seatunnel-connector-flink-assert/pom.xml
index 3ccb346d518..85821183264 100644
--- a/seatunnel-connectors/seatunnel-connectors-flink/seatunnel-connector-flink-assert/pom.xml
+++ b/seatunnel-connectors/seatunnel-connectors-flink/seatunnel-connector-flink-assert/pom.xml
@@ -30,17 +30,6 @@
seatunnel-connector-flink-assert
-
- org.apache.seatunnel
- seatunnel-api-flink
- ${project.version}
- provided
-
-
-
- org.apache.flink
- flink-java
- org.apache.flinkflink-table-planner_${scala.binary.version}
diff --git a/seatunnel-connectors/seatunnel-connectors-flink/seatunnel-connector-flink-clickhouse/pom.xml b/seatunnel-connectors/seatunnel-connectors-flink/seatunnel-connector-flink-clickhouse/pom.xml
index 9c728ca1fa2..ee282654d99 100644
--- a/seatunnel-connectors/seatunnel-connectors-flink/seatunnel-connector-flink-clickhouse/pom.xml
+++ b/seatunnel-connectors/seatunnel-connectors-flink/seatunnel-connector-flink-clickhouse/pom.xml
@@ -28,23 +28,13 @@
4.0.0seatunnel-connector-flink-clickhouse
+
+
+ 0.2
+ 2.7.0
+
-
- org.apache.seatunnel
- seatunnel-api-flink
- ${project.version}
- provided
-
-
- org.apache.seatunnel
- seatunnel-common
- ${project.version}
-
-
- org.apache.flink
- flink-java
- org.apache.flinkflink-table-planner_${scala.binary.version}
@@ -57,11 +47,13 @@
ru.yandex.clickhouseclickhouse-jdbc
+ ${clickhouse-jdbc.version}org.apache.sshdsshd-scp
+ ${sshd.version}
diff --git a/seatunnel-connectors/seatunnel-connectors-flink/seatunnel-connector-flink-console/pom.xml b/seatunnel-connectors/seatunnel-connectors-flink/seatunnel-connector-flink-console/pom.xml
index 3f34b6196db..207c564000f 100644
--- a/seatunnel-connectors/seatunnel-connectors-flink/seatunnel-connector-flink-console/pom.xml
+++ b/seatunnel-connectors/seatunnel-connectors-flink/seatunnel-connector-flink-console/pom.xml
@@ -30,17 +30,6 @@
seatunnel-connector-flink-console
-
- org.apache.seatunnel
- seatunnel-api-flink
- ${project.version}
- provided
-
-
-
- org.apache.flink
- flink-java
- org.apache.flinkflink-table-planner_${scala.binary.version}
diff --git a/seatunnel-connectors/seatunnel-connectors-flink/seatunnel-connector-flink-doris/pom.xml b/seatunnel-connectors/seatunnel-connectors-flink/seatunnel-connector-flink-doris/pom.xml
index 4b792b2d915..ea4dbd942cf 100644
--- a/seatunnel-connectors/seatunnel-connectors-flink/seatunnel-connector-flink-doris/pom.xml
+++ b/seatunnel-connectors/seatunnel-connectors-flink/seatunnel-connector-flink-doris/pom.xml
@@ -30,17 +30,6 @@
seatunnel-connector-flink-doris
-
- org.apache.seatunnel
- seatunnel-api-flink
- ${project.version}
- provided
-
-
-
- org.apache.flink
- flink-java
- org.apache.flinkflink-table-planner_${scala.binary.version}
@@ -52,7 +41,6 @@
org.apache.flinkflink-table-common
- ${flink.version}
diff --git a/seatunnel-connectors/seatunnel-connectors-flink/seatunnel-connector-flink-druid/pom.xml b/seatunnel-connectors/seatunnel-connectors-flink/seatunnel-connector-flink-druid/pom.xml
index 7814f37524f..7a88897b066 100644
--- a/seatunnel-connectors/seatunnel-connectors-flink/seatunnel-connector-flink-druid/pom.xml
+++ b/seatunnel-connectors/seatunnel-connectors-flink/seatunnel-connector-flink-druid/pom.xml
@@ -28,30 +28,22 @@
4.0.0seatunnel-connector-flink-druid
-
+
+
+ 0.22.1
+ 1.29.0
+
+
-
- org.apache.seatunnel
- seatunnel-api-flink
- ${project.version}
- provided
-
-
- org.apache.seatunnel
- seatunnel-common
- ${project.version}
-
-
- org.apache.flink
- flink-java
- org.apache.druiddruid-indexing-service
+ ${druid.version}org.apache.calcitecalcite-druid
+ ${calcite-druid.version}
diff --git a/seatunnel-connectors/seatunnel-connectors-flink/seatunnel-connector-flink-elasticsearch6/pom.xml b/seatunnel-connectors/seatunnel-connectors-flink/seatunnel-connector-flink-elasticsearch6/pom.xml
index 509cea3fea1..3cf39757d1e 100644
--- a/seatunnel-connectors/seatunnel-connectors-flink/seatunnel-connector-flink-elasticsearch6/pom.xml
+++ b/seatunnel-connectors/seatunnel-connectors-flink/seatunnel-connector-flink-elasticsearch6/pom.xml
@@ -28,18 +28,13 @@
4.0.0seatunnel-connector-flink-elasticsearch6
+
+
+ 6.3.1
+
-
- org.apache.seatunnel
- seatunnel-api-flink
- ${project.version}
-
-
-
- org.apache.flink
- flink-java
-
+
org.apache.flinkflink-table-planner_${scala.binary.version}
@@ -48,11 +43,12 @@
org.apache.flinkflink-streaming-java_${scala.binary.version}
-
org.apache.flinkflink-connector-elasticsearch6_${scala.binary.version}
+ ${flink.1.13.6.version}
+
org.elasticsearch.clienttransport
diff --git a/seatunnel-connectors/seatunnel-connectors-flink/seatunnel-connector-flink-elasticsearch7/pom.xml b/seatunnel-connectors/seatunnel-connectors-flink/seatunnel-connector-flink-elasticsearch7/pom.xml
index 67723599f24..32e62477040 100644
--- a/seatunnel-connectors/seatunnel-connectors-flink/seatunnel-connector-flink-elasticsearch7/pom.xml
+++ b/seatunnel-connectors/seatunnel-connectors-flink/seatunnel-connector-flink-elasticsearch7/pom.xml
@@ -28,18 +28,12 @@
4.0.0seatunnel-connector-flink-elasticsearch7
+
+
+ 7.5.1
+
-
- org.apache.seatunnel
- seatunnel-api-flink
- ${project.version}
-
-
-
- org.apache.flink
- flink-java
- org.apache.flinkflink-table-planner_${scala.binary.version}
@@ -52,6 +46,7 @@
org.apache.flinkflink-connector-elasticsearch7_${scala.binary.version}
+ ${flink.1.13.6.version}org.elasticsearch.client
diff --git a/seatunnel-connectors/seatunnel-connectors-flink/seatunnel-connector-flink-fake/pom.xml b/seatunnel-connectors/seatunnel-connectors-flink/seatunnel-connector-flink-fake/pom.xml
index 6f2550539f6..7a05a547401 100644
--- a/seatunnel-connectors/seatunnel-connectors-flink/seatunnel-connector-flink-fake/pom.xml
+++ b/seatunnel-connectors/seatunnel-connectors-flink/seatunnel-connector-flink-fake/pom.xml
@@ -28,19 +28,12 @@
4.0.0seatunnel-connector-flink-fake
+
+
+ 4.3.0
+
-
- org.apache.seatunnel
- seatunnel-api-flink
- ${project.version}
- provided
-
-
-
- org.apache.flink
- flink-java
- org.apache.flinkflink-table-planner_${scala.binary.version}
@@ -52,6 +45,7 @@
com.github.jsonzoujmockdata
+ ${jmockdata.version}
diff --git a/seatunnel-connectors/seatunnel-connectors-flink/seatunnel-connector-flink-file/pom.xml b/seatunnel-connectors/seatunnel-connectors-flink/seatunnel-connector-flink-file/pom.xml
index 063f276d6d9..b76030db6d1 100644
--- a/seatunnel-connectors/seatunnel-connectors-flink/seatunnel-connector-flink-file/pom.xml
+++ b/seatunnel-connectors/seatunnel-connectors-flink/seatunnel-connector-flink-file/pom.xml
@@ -29,23 +29,12 @@
seatunnel-connector-flink-file
-
-
- org.apache.seatunnel
- seatunnel-api-flink
- ${project.version}
- provided
-
-
- org.apache.seatunnel
- seatunnel-common
- ${project.version}
-
+
+ 2.7.5-7.0
+ 1.10.0
+
-
- org.apache.flink
- flink-java
-
+ org.apache.flinkflink-table-planner_${scala.binary.version}
@@ -58,11 +47,13 @@
org.apache.parquetparquet-avro
+ ${parquet-avro.version}org.apache.flinkflink-shaded-hadoop-2
+ ${flink-shaded-hadoop-2.version}xml-apis
diff --git a/seatunnel-connectors/seatunnel-connectors-flink/seatunnel-connector-flink-http/pom.xml b/seatunnel-connectors/seatunnel-connectors-flink/seatunnel-connector-flink-http/pom.xml
index 185553abf98..4bd64d2c822 100644
--- a/seatunnel-connectors/seatunnel-connectors-flink/seatunnel-connector-flink-http/pom.xml
+++ b/seatunnel-connectors/seatunnel-connectors-flink/seatunnel-connector-flink-http/pom.xml
@@ -28,19 +28,13 @@
4.0.0seatunnel-connector-flink-http
+
+
+ 4.4.4
+ 4.5.13
+
-
- org.apache.seatunnel
- seatunnel-api-flink
- ${project.version}
- provided
-
-
-
- org.apache.flink
- flink-java
- org.apache.flinkflink-table-planner_${scala.binary.version}
@@ -49,16 +43,17 @@
org.apache.flinkflink-streaming-java_${scala.binary.version}
-
+
org.apache.httpcomponentshttpcore
+ ${httpcore.version}org.apache.httpcomponentshttpclient
+ ${httpclient.version}
-
\ No newline at end of file
diff --git a/seatunnel-connectors/seatunnel-connectors-flink/seatunnel-connector-flink-influxdb/pom.xml b/seatunnel-connectors/seatunnel-connectors-flink/seatunnel-connector-flink-influxdb/pom.xml
index da293aae6ec..55de19e1459 100644
--- a/seatunnel-connectors/seatunnel-connectors-flink/seatunnel-connector-flink-influxdb/pom.xml
+++ b/seatunnel-connectors/seatunnel-connectors-flink/seatunnel-connector-flink-influxdb/pom.xml
@@ -28,21 +28,15 @@
4.0.0seatunnel-connector-flink-influxdb
+
+ 2.22
+
-
- org.apache.seatunnel
- seatunnel-api-flink
- ${project.version}
- provided
-
-
- org.apache.flink
- flink-java
- org.influxdbinfluxdb-java
+ ${influxdb-java.version}
diff --git a/seatunnel-connectors/seatunnel-connectors-flink/seatunnel-connector-flink-jdbc/pom.xml b/seatunnel-connectors/seatunnel-connectors-flink/seatunnel-connector-flink-jdbc/pom.xml
index 985531218d6..91cff962e31 100644
--- a/seatunnel-connectors/seatunnel-connectors-flink/seatunnel-connector-flink-jdbc/pom.xml
+++ b/seatunnel-connectors/seatunnel-connectors-flink/seatunnel-connector-flink-jdbc/pom.xml
@@ -28,41 +28,38 @@
4.0.0seatunnel-connector-flink-jdbc
+
+
+ 42.3.3
+ 8.0.16
+
-
- org.apache.seatunnel
- seatunnel-api-flink
- ${project.version}
- provided
-
-
org.apache.flink
- flink-java
+ flink-table-planner_${scala.binary.version}org.apache.flink
- flink-table-planner_${scala.binary.version}
+ flink-streaming-java_${scala.binary.version}org.apache.flink
- flink-streaming-java_${scala.binary.version}
+ flink-connector-jdbc_${scala.binary.version}
+ ${flink.1.13.6.version}
-
+
mysqlmysql-connector-java
+ ${mysql.version}
+ testorg.postgresqlpostgresql
-
-
-
- org.apache.flink
- flink-connector-jdbc_${scala.binary.version}
+ ${pg.version}
diff --git a/seatunnel-connectors/seatunnel-connectors-flink/seatunnel-connector-flink-kafka/pom.xml b/seatunnel-connectors/seatunnel-connectors-flink/seatunnel-connector-flink-kafka/pom.xml
index f0022aff9c8..b90e1c8898b 100644
--- a/seatunnel-connectors/seatunnel-connectors-flink/seatunnel-connector-flink-kafka/pom.xml
+++ b/seatunnel-connectors/seatunnel-connectors-flink/seatunnel-connector-flink-kafka/pom.xml
@@ -30,18 +30,6 @@
seatunnel-connector-flink-kafka
-
- org.apache.seatunnel
- seatunnel-api-flink
- ${project.version}
- provided
-
-
-
- org.apache.flink
- flink-java
-
-
org.apache.flinkflink-table-planner_${scala.binary.version}
@@ -55,6 +43,7 @@
org.apache.flinkflink-connector-kafka_${scala.binary.version}
+ ${flink.1.13.6.version}
diff --git a/seatunnel-connectors/seatunnel-connectors-flink/seatunnel-connector-flink-socket/pom.xml b/seatunnel-connectors/seatunnel-connectors-flink/seatunnel-connector-flink-socket/pom.xml
index 9781a2fe43c..cc637257c59 100644
--- a/seatunnel-connectors/seatunnel-connectors-flink/seatunnel-connector-flink-socket/pom.xml
+++ b/seatunnel-connectors/seatunnel-connectors-flink/seatunnel-connector-flink-socket/pom.xml
@@ -25,22 +25,9 @@
seatunnel-connectors-flink${revision}
- 4.0.0
-
seatunnel-connector-flink-socket
-
+ 4.0.0
-
- org.apache.seatunnel
- seatunnel-api-flink
- ${project.version}
- provided
-
-
-
- org.apache.flink
- flink-java
- org.apache.flinkflink-table-planner_${scala.binary.version}
diff --git a/seatunnel-connectors/seatunnel-connectors-spark/pom.xml b/seatunnel-connectors/seatunnel-connectors-spark/pom.xml
index 553a54dadac..ccdafa83e94 100644
--- a/seatunnel-connectors/seatunnel-connectors-spark/pom.xml
+++ b/seatunnel-connectors/seatunnel-connectors-spark/pom.xml
@@ -55,5 +55,45 @@
seatunnel-connector-spark-httpseatunnel-connector-spark-webhook
+
+
+
+ org.apache.spark
+ spark-core_${scala.binary.version}
+ ${spark.2.4.0.version}
+ ${spark.scope}
+
+
+ org.apache.spark
+ spark-sql_${scala.binary.version}
+ ${spark.2.4.0.version}
+ ${spark.scope}
+
+
+ org.apache.spark
+ spark-streaming_${scala.binary.version}
+ ${spark.2.4.0.version}
+ ${spark.scope}
+
+
+
+
+
+
+
+ org.apache.seatunnel
+ seatunnel-api-spark
+ provided
+ ${project.version}
+
+
+ org.apache.spark
+ spark-core_${scala.binary.version}
+
+
+ org.apache.spark
+ spark-sql_${scala.binary.version}
+
+
diff --git a/seatunnel-connectors/seatunnel-connectors-spark/seatunnel-connector-spark-clickhouse/pom.xml b/seatunnel-connectors/seatunnel-connectors-spark/seatunnel-connector-spark-clickhouse/pom.xml
index f9560d02a23..c74b9ece496 100644
--- a/seatunnel-connectors/seatunnel-connectors-spark/seatunnel-connector-spark-clickhouse/pom.xml
+++ b/seatunnel-connectors/seatunnel-connectors-spark/seatunnel-connector-spark-clickhouse/pom.xml
@@ -28,41 +28,29 @@
4.0.0seatunnel-connector-spark-clickhouse
+
+
+ 0.2
+ 2.7.0
+
-
- org.apache.seatunnel
- seatunnel-api-spark
- provided
- ${project.version}
- ru.yandex.clickhouseclickhouse-jdbc
+ ${clickhouse.jdbc.version}
-
-
- org.apache.spark
- spark-core_${scala.binary.version}
-
-
-
- org.apache.spark
- spark-sql_${scala.binary.version}
-
-
org.apache.sshdsshd-scp
+ ${sshd.scp.version}
-
org.scalatestscalatest_${scala.binary.version}3.2.3test
-
\ No newline at end of file
diff --git a/seatunnel-connectors/seatunnel-connectors-spark/seatunnel-connector-spark-console/pom.xml b/seatunnel-connectors/seatunnel-connectors-spark/seatunnel-connector-spark-console/pom.xml
index a54b053574e..7e6a4a8a9b3 100644
--- a/seatunnel-connectors/seatunnel-connectors-spark/seatunnel-connector-spark-console/pom.xml
+++ b/seatunnel-connectors/seatunnel-connectors-spark/seatunnel-connector-spark-console/pom.xml
@@ -29,23 +29,4 @@
seatunnel-connector-spark-console
-
-
- org.apache.seatunnel
- seatunnel-api-spark
- ${project.version}
- provided
-
-
-
- org.apache.spark
- spark-core_${scala.binary.version}
-
-
-
- org.apache.spark
- spark-sql_${scala.binary.version}
-
-
-
\ No newline at end of file
diff --git a/seatunnel-connectors/seatunnel-connectors-spark/seatunnel-connector-spark-doris/pom.xml b/seatunnel-connectors/seatunnel-connectors-spark/seatunnel-connector-spark-doris/pom.xml
index 4c701f2e877..84588b8ee63 100644
--- a/seatunnel-connectors/seatunnel-connectors-spark/seatunnel-connector-spark-doris/pom.xml
+++ b/seatunnel-connectors/seatunnel-connectors-spark/seatunnel-connector-spark-doris/pom.xml
@@ -28,43 +28,37 @@
4.0.0seatunnel-connector-spark-doris
+
+
+ 4.5.13
+ 4.4.4
+ 4.4.4
+ 4.1.4
+
-
- org.apache.seatunnel
- seatunnel-api-spark
- ${project.version}
- provided
-
-
-
- org.apache.spark
- spark-core_${scala.binary.version}
-
-
-
- org.apache.spark
- spark-sql_${scala.binary.version}
-
-
org.apache.httpcomponentshttpclient
+ ${httpclient.version}org.apache.httpcomponentshttpcore
+ ${httpcore.version}org.apache.httpcomponentshttpcore-nio
+ ${httpnio.version}org.apache.httpcomponentshttpasyncclient
+ ${httpasyncclient.version}
diff --git a/seatunnel-connectors/seatunnel-connectors-spark/seatunnel-connector-spark-elasticsearch/pom.xml b/seatunnel-connectors/seatunnel-connectors-spark/seatunnel-connector-spark-elasticsearch/pom.xml
index 792a3e8edb2..22fc9ed6c0a 100644
--- a/seatunnel-connectors/seatunnel-connectors-spark/seatunnel-connector-spark-elasticsearch/pom.xml
+++ b/seatunnel-connectors/seatunnel-connectors-spark/seatunnel-connector-spark-elasticsearch/pom.xml
@@ -28,27 +28,16 @@
4.0.0seatunnel-connector-spark-elasticsearch
+
+
+ 6.8.3
+
-
- org.apache.seatunnel
- seatunnel-api-spark
- ${project.version}
- provided
- org.elasticsearchelasticsearch-spark-20_${scala.binary.version}
-
-
-
- org.apache.spark
- spark-core_${scala.binary.version}
-
-
-
- org.apache.spark
- spark-sql_${scala.binary.version}
+ ${elasticsearch.version}
diff --git a/seatunnel-connectors/seatunnel-connectors-spark/seatunnel-connector-spark-email/pom.xml b/seatunnel-connectors/seatunnel-connectors-spark/seatunnel-connector-spark-email/pom.xml
index 650b8c60507..99e51f8bee0 100644
--- a/seatunnel-connectors/seatunnel-connectors-spark/seatunnel-connector-spark-email/pom.xml
+++ b/seatunnel-connectors/seatunnel-connectors-spark/seatunnel-connector-spark-email/pom.xml
@@ -28,31 +28,21 @@
4.0.0seatunnel-connector-spark-email
+
+ 1.8.0
+ 7.0.2
+
-
- org.apache.seatunnel
- seatunnel-api-spark
- ${project.version}
- provided
- com.norbitltdspoiwo_${scala.binary.version}
+ ${spoiwo.version}com.typesafe.playplay-mailer_${scala.binary.version}
-
-
-
- org.apache.spark
- spark-core_${scala.binary.version}
-
-
-
- org.apache.spark
- spark-sql_${scala.binary.version}
+ ${play-mailer.version}
diff --git a/seatunnel-connectors/seatunnel-connectors-spark/seatunnel-connector-spark-fake/pom.xml b/seatunnel-connectors/seatunnel-connectors-spark/seatunnel-connector-spark-fake/pom.xml
index 8e1223270ea..5c2483508e8 100644
--- a/seatunnel-connectors/seatunnel-connectors-spark/seatunnel-connector-spark-fake/pom.xml
+++ b/seatunnel-connectors/seatunnel-connectors-spark/seatunnel-connector-spark-fake/pom.xml
@@ -30,23 +30,6 @@
seatunnel-connector-spark-fake
-
- org.apache.seatunnel
- seatunnel-api-spark
- ${project.version}
- provided
-
-
-
- org.apache.spark
- spark-core_${scala.binary.version}
-
-
-
- org.apache.spark
- spark-sql_${scala.binary.version}
-
-
org.apache.sparkspark-streaming_${scala.binary.version}
diff --git a/seatunnel-connectors/seatunnel-connectors-spark/seatunnel-connector-spark-feishu/pom.xml b/seatunnel-connectors/seatunnel-connectors-spark/seatunnel-connector-spark-feishu/pom.xml
index 039d8da22fe..5ac937dbe6c 100644
--- a/seatunnel-connectors/seatunnel-connectors-spark/seatunnel-connector-spark-feishu/pom.xml
+++ b/seatunnel-connectors/seatunnel-connectors-spark/seatunnel-connector-spark-feishu/pom.xml
@@ -28,28 +28,16 @@
4.0.0seatunnel-connector-spark-feishu
+
+
+ 4.5.13
+
-
- org.apache.seatunnel
- seatunnel-api-spark
- ${project.version}
- provided
-
-
-
- org.apache.spark
- spark-core_${scala.binary.version}
-
-
-
- org.apache.spark
- spark-sql_${scala.binary.version}
-
-
org.apache.httpcomponentshttpclient
+ ${httpclient.version}
diff --git a/seatunnel-connectors/seatunnel-connectors-spark/seatunnel-connector-spark-file/pom.xml b/seatunnel-connectors/seatunnel-connectors-spark/seatunnel-connector-spark-file/pom.xml
index 85c3fbd8f28..4c35a3417e3 100644
--- a/seatunnel-connectors/seatunnel-connectors-spark/seatunnel-connector-spark-file/pom.xml
+++ b/seatunnel-connectors/seatunnel-connectors-spark/seatunnel-connector-spark-file/pom.xml
@@ -29,23 +29,4 @@
seatunnel-connector-spark-file
-
-
- org.apache.seatunnel
- seatunnel-api-spark
- ${project.version}
- provided
-
-
-
- org.apache.spark
- spark-core_${scala.binary.version}
-
-
-
- org.apache.spark
- spark-sql_${scala.binary.version}
-
-
-
\ No newline at end of file
diff --git a/seatunnel-connectors/seatunnel-connectors-spark/seatunnel-connector-spark-hbase/pom.xml b/seatunnel-connectors/seatunnel-connectors-spark/seatunnel-connector-spark-hbase/pom.xml
index 8fe340c44ca..0ae94a4797b 100644
--- a/seatunnel-connectors/seatunnel-connectors-spark/seatunnel-connector-spark-hbase/pom.xml
+++ b/seatunnel-connectors/seatunnel-connectors-spark/seatunnel-connector-spark-hbase/pom.xml
@@ -28,38 +28,24 @@
4.0.0seatunnel-connector-spark-hbase
+
+ 1.0.0
+
-
- org.apache.seatunnel
- seatunnel-api-spark
- ${project.version}
- provided
-
-
-
- org.apache.spark
- spark-core_${scala.binary.version}
-
-
-
- org.apache.spark
- spark-sql_${scala.binary.version}
-
-
org.apache.sparkspark-streaming_${scala.binary.version}
-
org.apache.sparkspark-hive_${scala.binary.version}
+ ${spark.2.4.0.version}
-
org.apache.hbase.connectors.sparkhbase-spark
+ ${hbase.spark.version}org.codehaus.janino
diff --git a/seatunnel-connectors/seatunnel-connectors-spark/seatunnel-connector-spark-hive/pom.xml b/seatunnel-connectors/seatunnel-connectors-spark/seatunnel-connector-spark-hive/pom.xml
index 37fda3923d8..e94bf7f00e6 100644
--- a/seatunnel-connectors/seatunnel-connectors-spark/seatunnel-connector-spark-hive/pom.xml
+++ b/seatunnel-connectors/seatunnel-connectors-spark/seatunnel-connector-spark-hive/pom.xml
@@ -29,23 +29,4 @@
seatunnel-connector-spark-hive
-
-
- org.apache.seatunnel
- seatunnel-api-spark
- ${project.version}
- provided
-
-
-
- org.apache.spark
- spark-core_${scala.binary.version}
-
-
-
- org.apache.spark
- spark-sql_${scala.binary.version}
-
-
-
\ No newline at end of file
diff --git a/seatunnel-connectors/seatunnel-connectors-spark/seatunnel-connector-spark-http/pom.xml b/seatunnel-connectors/seatunnel-connectors-spark/seatunnel-connector-spark-http/pom.xml
index 758a7fca6bb..d9c2b59ac21 100644
--- a/seatunnel-connectors/seatunnel-connectors-spark/seatunnel-connector-spark-http/pom.xml
+++ b/seatunnel-connectors/seatunnel-connectors-spark/seatunnel-connector-spark-http/pom.xml
@@ -25,28 +25,26 @@
seatunnel-connectors-spark${revision}
- 4.0.0seatunnel-connector-spark-http
-
+
+
+ 4.4
+ 4.5.13
+
+ 4.0.0
- org.apache.seatunnel
- seatunnel-api-spark
- ${project.version}
- provided
+ org.apache.httpcomponents
+ httpcore
+ ${httpcore.version}
-
- org.apache.spark
- spark-core_${scala.binary.version}
+ org.apache.httpcomponents
+ httpclient
+ ${httpclient.version}
-
-
- org.apache.spark
- spark-sql_${scala.binary.version}
-
-
+
\ No newline at end of file
diff --git a/seatunnel-connectors/seatunnel-connectors-spark/seatunnel-connector-spark-hudi/pom.xml b/seatunnel-connectors/seatunnel-connectors-spark/seatunnel-connector-spark-hudi/pom.xml
index 6c303806725..018c0680627 100644
--- a/seatunnel-connectors/seatunnel-connectors-spark/seatunnel-connector-spark-hudi/pom.xml
+++ b/seatunnel-connectors/seatunnel-connectors-spark/seatunnel-connector-spark-hudi/pom.xml
@@ -28,28 +28,16 @@
4.0.0seatunnel-connector-spark-hudi
+
+ 0.11.1
+
-
- org.apache.seatunnel
- seatunnel-api-spark
- ${project.version}
- provided
- org.apache.hudihudi-spark-bundle_${scala.binary.version}
-
-
-
- org.apache.spark
- spark-core_${scala.binary.version}
-
-
-
- org.apache.spark
- spark-sql_${scala.binary.version}
+ ${hudi-spark-bundle.version}
diff --git a/seatunnel-connectors/seatunnel-connectors-spark/seatunnel-connector-spark-iceberg/pom.xml b/seatunnel-connectors/seatunnel-connectors-spark/seatunnel-connector-spark-iceberg/pom.xml
index 678f6d1fde3..6b71ff1083c 100644
--- a/seatunnel-connectors/seatunnel-connectors-spark/seatunnel-connector-spark-iceberg/pom.xml
+++ b/seatunnel-connectors/seatunnel-connectors-spark/seatunnel-connector-spark-iceberg/pom.xml
@@ -28,32 +28,21 @@
4.0.0seatunnel-connector-spark-iceberg
+
+ 0.13.1
+
-
- org.apache.seatunnel
- seatunnel-api-spark
- ${project.version}
- provided
-
-
-
- org.apache.spark
- spark-core_${scala.binary.version}
-
-
-
- org.apache.spark
- spark-sql_${scala.binary.version}
- org.apache.icebergiceberg-core
+ ${iceberg.version}org.apache.icebergiceberg-spark-runtime
+ ${iceberg.version}
diff --git a/seatunnel-connectors/seatunnel-connectors-spark/seatunnel-connector-spark-jdbc/pom.xml b/seatunnel-connectors/seatunnel-connectors-spark/seatunnel-connector-spark-jdbc/pom.xml
index 485381fb8d4..cac85b1f7f7 100644
--- a/seatunnel-connectors/seatunnel-connectors-spark/seatunnel-connector-spark-jdbc/pom.xml
+++ b/seatunnel-connectors/seatunnel-connectors-spark/seatunnel-connector-spark-jdbc/pom.xml
@@ -28,37 +28,28 @@
4.0.0seatunnel-connector-spark-jdbc
+
+
+ 42.3.3
+ 8.0.16
+
- org.apache.seatunnel
- seatunnel-api-spark
- ${project.version}
- provided
+ org.apache.spark
+ spark-streaming_${scala.binary.version}
+
mysqlmysql-connector-java
+ ${mysql.version}
+ test
-
-
- org.apache.spark
- spark-core_${scala.binary.version}
-
-
-
- org.apache.spark
- spark-sql_${scala.binary.version}
-
-
-
- org.apache.spark
- spark-streaming_${scala.binary.version}
-
-
org.postgresqlpostgresql
+ ${pg.version}
diff --git a/seatunnel-connectors/seatunnel-connectors-spark/seatunnel-connector-spark-kafka/pom.xml b/seatunnel-connectors/seatunnel-connectors-spark/seatunnel-connector-spark-kafka/pom.xml
index fa39bf9ace1..2e24a9eeb19 100644
--- a/seatunnel-connectors/seatunnel-connectors-spark/seatunnel-connector-spark-kafka/pom.xml
+++ b/seatunnel-connectors/seatunnel-connectors-spark/seatunnel-connector-spark-kafka/pom.xml
@@ -30,15 +30,10 @@
seatunnel-connector-spark-kafka
-
- org.apache.seatunnel
- seatunnel-api-spark
- ${project.version}
- provided
- org.apache.sparkspark-streaming-kafka-0-10_${scala.binary.version}
+ ${spark.2.4.0.version}
@@ -46,16 +41,6 @@
spark-streaming_${scala.binary.version}
-
- org.apache.spark
- spark-core_${scala.binary.version}
-
-
-
- org.apache.spark
- spark-sql_${scala.binary.version}
-
-
\ No newline at end of file
diff --git a/seatunnel-connectors/seatunnel-connectors-spark/seatunnel-connector-spark-kudu/pom.xml b/seatunnel-connectors/seatunnel-connectors-spark/seatunnel-connector-spark-kudu/pom.xml
index 750b015f98d..73ecdbb0440 100644
--- a/seatunnel-connectors/seatunnel-connectors-spark/seatunnel-connector-spark-kudu/pom.xml
+++ b/seatunnel-connectors/seatunnel-connectors-spark/seatunnel-connector-spark-kudu/pom.xml
@@ -28,25 +28,11 @@
4.0.0seatunnel-connector-spark-kudu
+
+ 1.7.0
+
-
- org.apache.seatunnel
- seatunnel-api-spark
- ${project.version}
- provided
-
-
-
- org.apache.spark
- spark-core_${scala.binary.version}
-
-
-
- org.apache.spark
- spark-sql_${scala.binary.version}
-
-
org.apache.sparkspark-streaming_${scala.binary.version}
@@ -55,6 +41,7 @@
org.apache.kudukudu-spark2_${scala.binary.version}
+ ${kudu.version}
diff --git a/seatunnel-connectors/seatunnel-connectors-spark/seatunnel-connector-spark-mongodb/pom.xml b/seatunnel-connectors/seatunnel-connectors-spark/seatunnel-connector-spark-mongodb/pom.xml
index da7c82ad29f..39ab76e2500 100644
--- a/seatunnel-connectors/seatunnel-connectors-spark/seatunnel-connector-spark-mongodb/pom.xml
+++ b/seatunnel-connectors/seatunnel-connectors-spark/seatunnel-connector-spark-mongodb/pom.xml
@@ -24,33 +24,16 @@
4.0.0seatunnel-connector-spark-mongodb
+
+
+ 2.2.0
+
-
- org.apache.seatunnel
- seatunnel-api-spark
- ${project.version}
- provided
-
-
- org.apache.seatunnel
- seatunnel-common
- ${project.version}
-
-
org.mongodb.sparkmongo-spark-connector_${scala.binary.version}
-
-
-
- org.apache.spark
- spark-core_${scala.binary.version}
-
-
-
- org.apache.spark
- spark-sql_${scala.binary.version}
+ ${mongodb.version}
diff --git a/seatunnel-connectors/seatunnel-connectors-spark/seatunnel-connector-spark-neo4j/pom.xml b/seatunnel-connectors/seatunnel-connectors-spark/seatunnel-connector-spark-neo4j/pom.xml
index 42ce83a6e2e..355026b6bd3 100644
--- a/seatunnel-connectors/seatunnel-connectors-spark/seatunnel-connector-spark-neo4j/pom.xml
+++ b/seatunnel-connectors/seatunnel-connectors-spark/seatunnel-connector-spark-neo4j/pom.xml
@@ -28,24 +28,11 @@
4.0.0seatunnel-connector-spark-neo4j
+
+ 4.1.0
+
-
- org.apache.seatunnel
- seatunnel-api-spark
- ${project.version}
- provided
-
-
-
- org.apache.spark
- spark-core_${scala.binary.version}
-
-
-
- org.apache.spark
- spark-sql_${scala.binary.version}
- org.apache.spark
@@ -56,6 +43,7 @@
org.neo4jneo4j-connector-apache-spark_${scala.binary.version}
+ ${neo4j.version}_for_spark_${spark.binary.2.4.version}
diff --git a/seatunnel-connectors/seatunnel-connectors-spark/seatunnel-connector-spark-phoenix/pom.xml b/seatunnel-connectors/seatunnel-connectors-spark/seatunnel-connector-spark-phoenix/pom.xml
index 013bced01cf..59dc253fc69 100644
--- a/seatunnel-connectors/seatunnel-connectors-spark/seatunnel-connector-spark-phoenix/pom.xml
+++ b/seatunnel-connectors/seatunnel-connectors-spark/seatunnel-connector-spark-phoenix/pom.xml
@@ -28,39 +28,33 @@
4.0.0seatunnel-connector-spark-phoenix
+
+
+ 5.0.0-HBase-2.0
+ 0.3
+
-
- org.apache.seatunnel
- seatunnel-api-spark
- ${project.version}
- provided
-
-
org.apache.phoenixphoenix-spark
+ ${phoenix.version}junitjunit
+
+ org.glassfish.web
+ javax.servlet.jsp
+ com.101teczkclient
-
-
-
- org.apache.spark
- spark-core_${scala.binary.version}
-
-
-
- org.apache.spark
- spark-sql_${scala.binary.version}
+ ${zkclient.version}
diff --git a/seatunnel-connectors/seatunnel-connectors-spark/seatunnel-connector-spark-redis/pom.xml b/seatunnel-connectors/seatunnel-connectors-spark/seatunnel-connector-spark-redis/pom.xml
index b70120dbd62..a08575b92c1 100644
--- a/seatunnel-connectors/seatunnel-connectors-spark/seatunnel-connector-spark-redis/pom.xml
+++ b/seatunnel-connectors/seatunnel-connectors-spark/seatunnel-connector-spark-redis/pom.xml
@@ -28,33 +28,18 @@
4.0.0seatunnel-connector-spark-redis
-
+
+ 2.6.0
+
- org.apache.seatunnel
- seatunnel-api-spark
- ${project.version}
- provided
+ org.apache.spark
+ spark-streaming_${scala.binary.version}
-
com.redislabsspark-redis_${scala.binary.version}
-
-
-
- org.apache.spark
- spark-core_${scala.binary.version}
-
-
-
- org.apache.spark
- spark-sql_${scala.binary.version}
-
-
-
- org.apache.spark
- spark-streaming_${scala.binary.version}
+ ${redis.version}
diff --git a/seatunnel-connectors/seatunnel-connectors-spark/seatunnel-connector-spark-socket/pom.xml b/seatunnel-connectors/seatunnel-connectors-spark/seatunnel-connector-spark-socket/pom.xml
index 87a3f84c0ee..8e951ed3bd4 100644
--- a/seatunnel-connectors/seatunnel-connectors-spark/seatunnel-connector-spark-socket/pom.xml
+++ b/seatunnel-connectors/seatunnel-connectors-spark/seatunnel-connector-spark-socket/pom.xml
@@ -30,23 +30,6 @@
seatunnel-connector-spark-socket
-
- org.apache.seatunnel
- seatunnel-api-spark
- ${project.version}
- provided
-
-
-
- org.apache.spark
- spark-core_${scala.binary.version}
-
-
-
- org.apache.spark
- spark-sql_${scala.binary.version}
-
-
org.apache.sparkspark-streaming_${scala.binary.version}
diff --git a/seatunnel-connectors/seatunnel-connectors-spark/seatunnel-connector-spark-tidb/pom.xml b/seatunnel-connectors/seatunnel-connectors-spark/seatunnel-connector-spark-tidb/pom.xml
index 9955b057a6d..c9b8c7d7b8c 100644
--- a/seatunnel-connectors/seatunnel-connectors-spark/seatunnel-connector-spark-tidb/pom.xml
+++ b/seatunnel-connectors/seatunnel-connectors-spark/seatunnel-connector-spark-tidb/pom.xml
@@ -28,25 +28,12 @@
4.0.0seatunnel-connector-spark-tidb
+
+
+ 2.4.1
+
-
- org.apache.seatunnel
- seatunnel-api-spark
- ${project.version}
- provided
-
-
-
- org.apache.spark
- spark-core_${scala.binary.version}
-
-
-
- org.apache.spark
- spark-sql_${scala.binary.version}
-
-
org.apache.sparkspark-streaming_${scala.binary.version}
@@ -55,6 +42,7 @@
com.pingcap.tisparktispark-assembly
+ ${tis.version}mysql
diff --git a/seatunnel-connectors/seatunnel-connectors-spark/seatunnel-connector-spark-webhook/pom.xml b/seatunnel-connectors/seatunnel-connectors-spark/seatunnel-connector-spark-webhook/pom.xml
index 0a7a9246ca2..104b1c6dafb 100644
--- a/seatunnel-connectors/seatunnel-connectors-spark/seatunnel-connector-spark-webhook/pom.xml
+++ b/seatunnel-connectors/seatunnel-connectors-spark/seatunnel-connector-spark-webhook/pom.xml
@@ -30,28 +30,10 @@
seatunnel-connector-spark-webhook
-
- org.apache.seatunnel
- seatunnel-api-spark
- ${project.version}
- provided
-
-
-
- org.apache.spark
- spark-core_${scala.binary.version}
-
-
-
- org.apache.spark
- spark-sql_${scala.binary.version}
-
-
org.apache.sparkspark-streaming_${scala.binary.version}
-
\ No newline at end of file
diff --git a/seatunnel-core/seatunnel-core-flink-sql/pom.xml b/seatunnel-core/seatunnel-core-flink-sql/pom.xml
index 89e5236e89d..39c93e33302 100644
--- a/seatunnel-core/seatunnel-core-flink-sql/pom.xml
+++ b/seatunnel-core/seatunnel-core-flink-sql/pom.xml
@@ -39,22 +39,22 @@
org.apache.flinkflink-streaming-java_${scala.binary.version}
- ${flink.version}
+ ${flink.1.13.6.version}org.apache.flinkflink-table-common
- ${flink.version}
+ ${flink.1.13.6.version}org.apache.flinkflink-table-api-java-bridge_${scala.binary.version}
- ${flink.version}
+ ${flink.1.13.6.version}org.apache.flinkflink-table-planner_${scala.binary.version}
- ${flink.version}
+ ${flink.1.13.6.version}
diff --git a/seatunnel-core/seatunnel-core-flink/pom.xml b/seatunnel-core/seatunnel-core-flink/pom.xml
index 47535c2d38d..4be9326ca96 100644
--- a/seatunnel-core/seatunnel-core-flink/pom.xml
+++ b/seatunnel-core/seatunnel-core-flink/pom.xml
@@ -51,14 +51,20 @@
org.apache.flinkflink-java
+ ${flink.1.13.6.version}
+ ${flink.scope}org.apache.flinkflink-table-planner_${scala.binary.version}
+ ${flink.1.13.6.version}
+ ${flink.scope}org.apache.flinkflink-streaming-java_${scala.binary.version}
+ ${flink.1.13.6.version}
+ ${flink.scope}
@@ -130,7 +136,7 @@
--build-argSCALA_VERSION=${scala.binary.version}--build-arg
- FLINK_VERSION=${flink.version}
+ FLINK_VERSION=${flink.1.13.6.version}-t${docker.hub}/${docker.repo}:${docker.tag}-t
@@ -159,7 +165,7 @@
--build-argSCALA_VERSION=${scala.binary.version}--build-arg
- FLINK_VERSION=${flink.version}
+ FLINK_VERSION=${flink.1.13.6.version}--push-t${docker.hub}/${docker.repo}:${docker.tag}
diff --git a/seatunnel-core/seatunnel-core-spark/pom.xml b/seatunnel-core/seatunnel-core-spark/pom.xml
index c8b6037a6a2..fa92953dc76 100644
--- a/seatunnel-core/seatunnel-core-spark/pom.xml
+++ b/seatunnel-core/seatunnel-core-spark/pom.xml
@@ -56,14 +56,20 @@
org.apache.sparkspark-streaming_${scala.binary.version}
+ ${spark.2.4.0.version}
+ ${spark.scope}org.apache.sparkspark-core_${scala.binary.version}
+ ${spark.2.4.0.version}
+ ${spark.scope}org.apache.sparkspark-sql_${scala.binary.version}
+ ${spark.2.4.0.version}
+ ${spark.scope}
@@ -151,7 +157,7 @@
build--no-cache--build-arg
- SPARK_VERSION=${spark.version}
+ SPARK_VERSION=${spark.2.4.0.version}--build-argHADOOP_VERSION=${hadoop.binary.version}-t
@@ -180,7 +186,7 @@
build--no-cache--build-arg
- SPARK_VERSION=${spark.version}
+ SPARK_VERSION=${spark.2.4.0.version}--build-argHADOOP_VERSION=${hadoop.binary.version}--push
diff --git a/seatunnel-core/seatunnel-flink-starter/pom.xml b/seatunnel-core/seatunnel-flink-starter/pom.xml
index ebfbdff6bef..f542fbfe61b 100644
--- a/seatunnel-core/seatunnel-flink-starter/pom.xml
+++ b/seatunnel-core/seatunnel-flink-starter/pom.xml
@@ -58,14 +58,20 @@
org.apache.flinkflink-java
+ ${flink.1.13.6.version}
+ ${flink.scope}org.apache.flinkflink-table-planner_${scala.binary.version}
+ ${flink.1.13.6.version}
+ ${flink.scope}org.apache.flinkflink-streaming-java_${scala.binary.version}
+ ${flink.1.13.6.version}
+ ${flink.scope}
diff --git a/seatunnel-core/seatunnel-spark-starter/pom.xml b/seatunnel-core/seatunnel-spark-starter/pom.xml
index 9fa398bb802..e894a6a157e 100644
--- a/seatunnel-core/seatunnel-spark-starter/pom.xml
+++ b/seatunnel-core/seatunnel-spark-starter/pom.xml
@@ -56,14 +56,20 @@
org.apache.sparkspark-streaming_${scala.binary.version}
+ ${spark.2.4.0.version}
+ ${spark.scope}org.apache.sparkspark-core_${scala.binary.version}
+ ${spark.2.4.0.version}
+ ${spark.scope}org.apache.sparkspark-sql_${scala.binary.version}
+ ${spark.2.4.0.version}
+ ${spark.scope}
diff --git a/seatunnel-dist/release-docs/LICENSE b/seatunnel-dist/release-docs/LICENSE
index d717ec09304..62574819942 100644
--- a/seatunnel-dist/release-docs/LICENSE
+++ b/seatunnel-dist/release-docs/LICENSE
@@ -326,6 +326,9 @@ The text of each license is the standard Apache 2.0 license.
(Apache License 2.0) swagger-annotations (io.swagger:swagger-annotations:1.5.10 - https://github.com/swagger-api/swagger-core/modules/swagger-annotations)
(Apache License 2.0) swagger-models (io.swagger:swagger-models:1.5.10 - https://github.com/swagger-api/swagger-core/modules/swagger-models)
(Apache License 2.0) swagger-models (io.swagger:swagger-models:1.5.10 - https://github.com/swagger-api/swagger-core/modules/swagger-models)
+ (Apache License 2.0) jjwt (io.jsonwebtoken:jjwt-api:0.10.7 - https://github.com/jwtk/jjwt)
+ (Apache License 2.0) jjwt (io.jsonwebtoken:impl-:0.10.7 - https://github.com/jwtk/jjwt)
+ (Apache License 2.0) jjwt (io.jsonwebtoken:jjwt-jackson:0.10.7 - https://github.com/jwtk/jjwt)
(Apache License Version 2) HikariCP (com.zaxxer:HikariCP:4.0.3 - https://github.com/brettwooldridge/HikariCP)
(Apache License) HttpClient (commons-httpclient:commons-httpclient:3.1 - http://jakarta.apache.org/httpcomponents/httpclient-3.x/)
(Apache License) HttpClient (org.apache.httpcomponents:httpclient:4.0.1 - http://hc.apache.org/httpcomponents-client)
@@ -585,6 +588,7 @@ The text of each license is the standard Apache 2.0 license.
(Apache License, Version 2.0) spring-boot (org.springframework.boot:spring-boot:2.6.8 - https://spring.io/projects/spring-boot)
(Apache License, Version 2.0) spring-boot-autoconfigure (org.springframework.boot:spring-boot-autoconfigure:2.6.8 - https://spring.io/projects/spring-boot)
(Apache License, Version 2.0) spring-boot-starter (org.springframework.boot:spring-boot-starter:2.6.8 - https://spring.io/projects/spring-boot)
+ (Apache License, Version 2.0) spring-boot-starter-aop (org.springframework.boot:spring-boot-starter-aop:2.6.8 - https://spring.io/projects/spring-boot)
(Apache License, Version 2.0) spring-boot-starter-jdbc (org.springframework.boot:spring-boot-starter-jdbc:2.6.3 - https://spring.io/projects/spring-boot)
(Apache License, Version 2.0) spring-boot-starter-jetty (org.springframework.boot:spring-boot-starter-jetty:2.6.8 - https://spring.io/projects/spring-boot)
(Apache License, Version 2.0) spring-boot-starter-json (org.springframework.boot:spring-boot-starter-json:2.6.8 - https://spring.io/projects/spring-boot)
@@ -1198,7 +1202,7 @@ The text of each license is also included at licenses/LICENSE-[project].txt.
(Eclipse Public License, Version 1.0) Aether Utilities (org.eclipse.aether:aether-util:0.9.0.M2 - http://www.eclipse.org/aether/aether-util/)
(The Eclipse Public License, Version 1.0) OkHttp Aether Connector (io.tesla.aether:aether-connector-okhttp:0.0.9 - http://tesla.io/aether-connector-okhttp)
(The Eclipse Public License, Version 1.0) tesla-aether (io.tesla.aether:tesla-aether:0.0.5 - http://tesla.io/tesla-aether)
-
+ (Eclipse Public License - v 2.0) aspectjweaver (org.aspectj:aspectjweaver:1.9.7 - https://github.com/eclipse/org.aspectj/)
========================================================================
Public Domain License
diff --git a/seatunnel-dist/release-docs/licenses/LICENSE-aspectjweaver.txt b/seatunnel-dist/release-docs/licenses/LICENSE-aspectjweaver.txt
new file mode 100644
index 00000000000..4d4254f1d92
--- /dev/null
+++ b/seatunnel-dist/release-docs/licenses/LICENSE-aspectjweaver.txt
@@ -0,0 +1,279 @@
+Per: https://www.eclipse.org/org/documents/epl-2.0/EPL-2.0.txt
+
+Eclipse Public License - v 2.0
+
+ THE ACCOMPANYING PROGRAM IS PROVIDED UNDER THE TERMS OF THIS ECLIPSE
+ PUBLIC LICENSE ("AGREEMENT"). ANY USE, REPRODUCTION OR DISTRIBUTION
+ OF THE PROGRAM CONSTITUTES RECIPIENT'S ACCEPTANCE OF THIS AGREEMENT.
+
+1. DEFINITIONS
+
+"Contribution" means:
+
+ a) in the case of the initial Contributor, the initial content
+ Distributed under this Agreement, and
+
+ b) in the case of each subsequent Contributor:
+ i) changes to the Program, and
+ ii) additions to the Program;
+ where such changes and/or additions to the Program originate from
+ and are Distributed by that particular Contributor. A Contribution
+ "originates" from a Contributor if it was added to the Program by
+ such Contributor itself or anyone acting on such Contributor's behalf.
+ Contributions do not include changes or additions to the Program that
+ are not Modified Works.
+
+"Contributor" means any person or entity that Distributes the Program.
+
+"Licensed Patents" mean patent claims licensable by a Contributor which
+are necessarily infringed by the use or sale of its Contribution alone
+or when combined with the Program.
+
+"Program" means the Contributions Distributed in accordance with this
+Agreement.
+
+"Recipient" means anyone who receives the Program under this Agreement
+or any Secondary License (as applicable), including Contributors.
+
+"Derivative Works" shall mean any work, whether in Source Code or other
+form, that is based on (or derived from) the Program and for which the
+editorial revisions, annotations, elaborations, or other modifications
+represent, as a whole, an original work of authorship.
+
+"Modified Works" shall mean any work in Source Code or other form that
+results from an addition to, deletion from, or modification of the
+contents of the Program, including, for purposes of clarity any new file
+in Source Code form that contains any contents of the Program. Modified
+Works shall not include works that contain only declarations,
+interfaces, types, classes, structures, or files of the Program solely
+in each case in order to link to, bind by name, or subclass the Program
+or Modified Works thereof.
+
+"Distribute" means the acts of a) distributing or b) making available
+in any manner that enables the transfer of a copy.
+
+"Source Code" means the form of a Program preferred for making
+modifications, including but not limited to software source code,
+documentation source, and configuration files.
+
+"Secondary License" means either the GNU General Public License,
+Version 2.0, or any later versions of that license, including any
+exceptions or additional permissions as identified by the initial
+Contributor.
+
+2. GRANT OF RIGHTS
+
+ a) Subject to the terms of this Agreement, each Contributor hereby
+ grants Recipient a non-exclusive, worldwide, royalty-free copyright
+ license to reproduce, prepare Derivative Works of, publicly display,
+ publicly perform, Distribute and sublicense the Contribution of such
+ Contributor, if any, and such Derivative Works.
+
+ b) Subject to the terms of this Agreement, each Contributor hereby
+ grants Recipient a non-exclusive, worldwide, royalty-free patent
+ license under Licensed Patents to make, use, sell, offer to sell,
+ import and otherwise transfer the Contribution of such Contributor,
+ if any, in Source Code or other form. This patent license shall
+ apply to the combination of the Contribution and the Program if, at
+ the time the Contribution is added by the Contributor, such addition
+ of the Contribution causes such combination to be covered by the
+ Licensed Patents. The patent license shall not apply to any other
+ combinations which include the Contribution. No hardware per se is
+ licensed hereunder.
+
+ c) Recipient understands that although each Contributor grants the
+ licenses to its Contributions set forth herein, no assurances are
+ provided by any Contributor that the Program does not infringe the
+ patent or other intellectual property rights of any other entity.
+ Each Contributor disclaims any liability to Recipient for claims
+ brought by any other entity based on infringement of intellectual
+ property rights or otherwise. As a condition to exercising the
+ rights and licenses granted hereunder, each Recipient hereby
+ assumes sole responsibility to secure any other intellectual
+ property rights needed, if any. For example, if a third party
+ patent license is required to allow Recipient to Distribute the
+ Program, it is Recipient's responsibility to acquire that license
+ before distributing the Program.
+
+ d) Each Contributor represents that to its knowledge it has
+ sufficient copyright rights in its Contribution, if any, to grant
+ the copyright license set forth in this Agreement.
+
+ e) Notwithstanding the terms of any Secondary License, no
+ Contributor makes additional grants to any Recipient (other than
+ those set forth in this Agreement) as a result of such Recipient's
+ receipt of the Program under the terms of a Secondary License
+ (if permitted under the terms of Section 3).
+
+3. REQUIREMENTS
+
+3.1 If a Contributor Distributes the Program in any form, then:
+
+ a) the Program must also be made available as Source Code, in
+ accordance with section 3.2, and the Contributor must accompany
+ the Program with a statement that the Source Code for the Program
+ is available under this Agreement, and informs Recipients how to
+ obtain it in a reasonable manner on or through a medium customarily
+ used for software exchange; and
+
+ b) the Contributor may Distribute the Program under a license
+ different than this Agreement, provided that such license:
+ i) effectively disclaims on behalf of all other Contributors all
+ warranties and conditions, express and implied, including
+ warranties or conditions of title and non-infringement, and
+ implied warranties or conditions of merchantability and fitness
+ for a particular purpose;
+
+ ii) effectively excludes on behalf of all other Contributors all
+ liability for damages, including direct, indirect, special,
+ incidental and consequential damages, such as lost profits;
+
+ iii) does not attempt to limit or alter the recipients' rights
+ in the Source Code under section 3.2; and
+
+ iv) requires any subsequent distribution of the Program by any
+ party to be under a license that satisfies the requirements
+ of this section 3.
+
+3.2 When the Program is Distributed as Source Code:
+
+ a) it must be made available under this Agreement, or if the
+ Program (i) is combined with other material in a separate file or
+ files made available under a Secondary License, and (ii) the initial
+ Contributor attached to the Source Code the notice described in
+ Exhibit A of this Agreement, then the Program may be made available
+ under the terms of such Secondary Licenses, and
+
+ b) a copy of this Agreement must be included with each copy of
+ the Program.
+
+3.3 Contributors may not remove or alter any copyright, patent,
+trademark, attribution notices, disclaimers of warranty, or limitations
+of liability ("notices") contained within the Program from any copy of
+the Program which they Distribute, provided that Contributors may add
+their own appropriate notices.
+
+4. COMMERCIAL DISTRIBUTION
+
+Commercial distributors of software may accept certain responsibilities
+with respect to end users, business partners and the like. While this
+license is intended to facilitate the commercial use of the Program,
+the Contributor who includes the Program in a commercial product
+offering should do so in a manner which does not create potential
+liability for other Contributors. Therefore, if a Contributor includes
+the Program in a commercial product offering, such Contributor
+("Commercial Contributor") hereby agrees to defend and indemnify every
+other Contributor ("Indemnified Contributor") against any losses,
+damages and costs (collectively "Losses") arising from claims, lawsuits
+and other legal actions brought by a third party against the Indemnified
+Contributor to the extent caused by the acts or omissions of such
+Commercial Contributor in connection with its distribution of the Program
+in a commercial product offering. The obligations in this section do not
+apply to any claims or Losses relating to any actual or alleged
+intellectual property infringement. In order to qualify, an Indemnified
+Contributor must: a) promptly notify the Commercial Contributor in
+writing of such claim, and b) allow the Commercial Contributor to control,
+and cooperate with the Commercial Contributor in, the defense and any
+related settlement negotiations. The Indemnified Contributor may
+participate in any such claim at its own expense.
+
+For example, a Contributor might include the Program in a commercial
+product offering, Product X. That Contributor is then a Commercial
+Contributor. If that Commercial Contributor then makes performance
+claims, or offers warranties related to Product X, those performance
+claims and warranties are such Commercial Contributor's responsibility
+alone. Under this section, the Commercial Contributor would have to
+defend claims against the other Contributors related to those performance
+claims and warranties, and if a court requires any other Contributor to
+pay any damages as a result, the Commercial Contributor must pay
+those damages.
+
+5. NO WARRANTY
+
+EXCEPT AS EXPRESSLY SET FORTH IN THIS AGREEMENT, AND TO THE EXTENT
+PERMITTED BY APPLICABLE LAW, THE PROGRAM IS PROVIDED ON AN "AS IS"
+BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, EITHER EXPRESS OR
+IMPLIED INCLUDING, WITHOUT LIMITATION, ANY WARRANTIES OR CONDITIONS OF
+TITLE, NON-INFRINGEMENT, MERCHANTABILITY OR FITNESS FOR A PARTICULAR
+PURPOSE. Each Recipient is solely responsible for determining the
+appropriateness of using and distributing the Program and assumes all
+risks associated with its exercise of rights under this Agreement,
+including but not limited to the risks and costs of program errors,
+compliance with applicable laws, damage to or loss of data, programs
+or equipment, and unavailability or interruption of operations.
+
+6. DISCLAIMER OF LIABILITY
+
+EXCEPT AS EXPRESSLY SET FORTH IN THIS AGREEMENT, AND TO THE EXTENT
+PERMITTED BY APPLICABLE LAW, NEITHER RECIPIENT NOR ANY CONTRIBUTORS
+SHALL HAVE ANY LIABILITY FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
+EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING WITHOUT LIMITATION LOST
+PROFITS), HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
+CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
+ARISING IN ANY WAY OUT OF THE USE OR DISTRIBUTION OF THE PROGRAM OR THE
+EXERCISE OF ANY RIGHTS GRANTED HEREUNDER, EVEN IF ADVISED OF THE
+POSSIBILITY OF SUCH DAMAGES.
+
+7. GENERAL
+
+If any provision of this Agreement is invalid or unenforceable under
+applicable law, it shall not affect the validity or enforceability of
+the remainder of the terms of this Agreement, and without further
+action by the parties hereto, such provision shall be reformed to the
+minimum extent necessary to make such provision valid and enforceable.
+
+If Recipient institutes patent litigation against any entity
+(including a cross-claim or counterclaim in a lawsuit) alleging that the
+Program itself (excluding combinations of the Program with other software
+or hardware) infringes such Recipient's patent(s), then such Recipient's
+rights granted under Section 2(b) shall terminate as of the date such
+litigation is filed.
+
+All Recipient's rights under this Agreement shall terminate if it
+fails to comply with any of the material terms or conditions of this
+Agreement and does not cure such failure in a reasonable period of
+time after becoming aware of such noncompliance. If all Recipient's
+rights under this Agreement terminate, Recipient agrees to cease use
+and distribution of the Program as soon as reasonably practicable.
+However, Recipient's obligations under this Agreement and any licenses
+granted by Recipient relating to the Program shall continue and survive.
+
+Everyone is permitted to copy and distribute copies of this Agreement,
+but in order to avoid inconsistency the Agreement is copyrighted and
+may only be modified in the following manner. The Agreement Steward
+reserves the right to publish new versions (including revisions) of
+this Agreement from time to time. No one other than the Agreement
+Steward has the right to modify this Agreement. The Eclipse Foundation
+is the initial Agreement Steward. The Eclipse Foundation may assign the
+responsibility to serve as the Agreement Steward to a suitable separate
+entity. Each new version of the Agreement will be given a distinguishing
+version number. The Program (including Contributions) may always be
+Distributed subject to the version of the Agreement under which it was
+received. In addition, after a new version of the Agreement is published,
+Contributor may elect to Distribute the Program (including its
+Contributions) under the new version.
+
+Except as expressly stated in Sections 2(a) and 2(b) above, Recipient
+receives no rights or licenses to the intellectual property of any
+Contributor under this Agreement, whether expressly, by implication,
+estoppel or otherwise. All rights in the Program not expressly granted
+under this Agreement are reserved. Nothing in this Agreement is intended
+to be enforceable by any entity that is not a Contributor or Recipient.
+No third-party beneficiary rights are created under this Agreement.
+
+Exhibit A - Form of Secondary Licenses Notice
+
+"This Source Code may also be made available under the following
+Secondary Licenses when the conditions for such availability set forth
+in the Eclipse Public License, v. 2.0 are satisfied: {name license(s),
+version(s), and exceptions or additional permissions here}."
+
+ Simply including a copy of this Agreement, including this Exhibit A
+ is not sufficient to license the Source Code under Secondary Licenses.
+
+ If it is not possible or desirable to put the notice in a particular
+ file, then You may include the notice in a location (such as a LICENSE
+ file in a relevant directory) where a recipient would be likely to
+ look for such a notice.
+
+ You may add additional accurate notices of copyright ownership.
\ No newline at end of file
diff --git a/seatunnel-e2e/seatunnel-flink-connector-v2-e2e/src/test/java/org/apache/seatunnel/e2e/flink/v2/redis/RedisIT.java b/seatunnel-e2e/seatunnel-flink-connector-v2-e2e/src/test/java/org/apache/seatunnel/e2e/flink/v2/redis/RedisIT.java
new file mode 100644
index 00000000000..271ba232afd
--- /dev/null
+++ b/seatunnel-e2e/seatunnel-flink-connector-v2-e2e/src/test/java/org/apache/seatunnel/e2e/flink/v2/redis/RedisIT.java
@@ -0,0 +1,90 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.seatunnel.e2e.flink.v2.redis;
+
+import static org.testcontainers.shaded.org.awaitility.Awaitility.given;
+
+import org.apache.seatunnel.e2e.flink.FlinkContainer;
+
+import com.google.common.collect.Lists;
+import lombok.extern.slf4j.Slf4j;
+import org.junit.jupiter.api.AfterEach;
+import org.junit.jupiter.api.Assertions;
+import org.junit.jupiter.api.BeforeEach;
+import org.junit.jupiter.api.Test;
+import org.testcontainers.containers.Container;
+import org.testcontainers.containers.GenericContainer;
+import org.testcontainers.containers.output.Slf4jLogConsumer;
+import org.testcontainers.lifecycle.Startables;
+import redis.clients.jedis.Jedis;
+
+import java.io.IOException;
+import java.util.concurrent.TimeUnit;
+import java.util.stream.Stream;
+
+@Slf4j
+public class RedisIT extends FlinkContainer {
+ private static final String REDIS_IMAGE = "redis:latest";
+ private static final String REDIS_CONTAINER_HOST = "flink_e2e_redis";
+ private static final String REDIS_HOST = "localhost";
+ private static final int REDIS_PORT = 6379;
+ private GenericContainer> redisContainer;
+ private Jedis jedis;
+
+ @BeforeEach
+ public void startRedisContainer() {
+ redisContainer = new GenericContainer<>(REDIS_IMAGE)
+ .withNetwork(NETWORK)
+ .withNetworkAliases(REDIS_CONTAINER_HOST)
+ .withLogConsumer(new Slf4jLogConsumer(log));
+ redisContainer.setPortBindings(Lists.newArrayList(String.format("%s:%s", REDIS_PORT, REDIS_PORT)));
+ Startables.deepStart(Stream.of(redisContainer)).join();
+ log.info("Redis container started");
+ given().ignoreExceptions()
+ .await()
+ .atMost(180, TimeUnit.SECONDS)
+ .untilAsserted(this::initJedis);
+ this.generateTestData();
+ }
+
+ private void initJedis() {
+ jedis = new Jedis(REDIS_HOST, REDIS_PORT);
+ jedis.connect();
+ }
+
+ private void generateTestData() {
+ jedis.set("key_test", "test");
+ jedis.set("key_test1", "test1");
+ jedis.set("key_test2", "test2");
+ jedis.set("key_test3", "test3");
+ jedis.set("key_test4", "test4");
+ }
+
+ @Test
+ public void testRedisSource() throws IOException, InterruptedException {
+ Container.ExecResult execResult = executeSeaTunnelFlinkJob("/redis/redis_source.conf");
+ Assertions.assertEquals(0, execResult.getExitCode());
+ }
+
+ @AfterEach
+ public void close() {
+ super.close();
+ jedis.close();
+ redisContainer.close();
+ }
+}
diff --git a/seatunnel-e2e/seatunnel-flink-connector-v2-e2e/src/test/resources/redis/redis_source.conf b/seatunnel-e2e/seatunnel-flink-connector-v2-e2e/src/test/resources/redis/redis_source.conf
new file mode 100644
index 00000000000..9805b2a1f14
--- /dev/null
+++ b/seatunnel-e2e/seatunnel-flink-connector-v2-e2e/src/test/resources/redis/redis_source.conf
@@ -0,0 +1,65 @@
+#
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements. See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+######
+###### This config file is a demonstration of streaming processing in seatunnel config
+######
+
+env {
+ # You can set flink configuration here
+ execution.parallelism = 1
+ job.mode = "BATCH"
+ #execution.checkpoint.interval = 10000
+ #execution.checkpoint.data-uri = "hdfs://localhost:9000/checkpoint"
+}
+
+source {
+ # This is a example source plugin **only for test and demonstrate the feature source plugin**
+ Redis {
+ host = flink_e2e_redis
+ port = 6379
+ keys = "key_test*"
+ data_type = key
+ format = text
+ }
+ # If you would like to get more information about how to configure seatunnel and see full list of source plugins,
+ # please go to https://seatunnel.apache.org/docs/connector-v2/source/Redis
+}
+
+transform {
+
+}
+
+sink {
+
+ Console {
+
+ }
+
+ Assert {
+ rules = [
+ {
+ field_name = content
+ field_type = string
+ field_value = [
+ {
+ rule_type = NOT_NULL
+ }
+ ]
+ }
+ ]
+ }
+}
\ No newline at end of file
diff --git a/seatunnel-e2e/seatunnel-spark-connector-v2-e2e/src/test/java/org/apache/seatunnel/e2e/spark/v2/redis/RedisIT.java b/seatunnel-e2e/seatunnel-spark-connector-v2-e2e/src/test/java/org/apache/seatunnel/e2e/spark/v2/redis/RedisIT.java
new file mode 100644
index 00000000000..a597be1ccf1
--- /dev/null
+++ b/seatunnel-e2e/seatunnel-spark-connector-v2-e2e/src/test/java/org/apache/seatunnel/e2e/spark/v2/redis/RedisIT.java
@@ -0,0 +1,90 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.seatunnel.e2e.spark.v2.redis;
+
+import static org.testcontainers.shaded.org.awaitility.Awaitility.given;
+
+import org.apache.seatunnel.e2e.spark.SparkContainer;
+
+import com.google.common.collect.Lists;
+import lombok.extern.slf4j.Slf4j;
+import org.junit.jupiter.api.AfterEach;
+import org.junit.jupiter.api.Assertions;
+import org.junit.jupiter.api.BeforeEach;
+import org.junit.jupiter.api.Test;
+import org.testcontainers.containers.Container;
+import org.testcontainers.containers.GenericContainer;
+import org.testcontainers.containers.output.Slf4jLogConsumer;
+import org.testcontainers.lifecycle.Startables;
+import redis.clients.jedis.Jedis;
+
+import java.io.IOException;
+import java.util.concurrent.TimeUnit;
+import java.util.stream.Stream;
+
+@Slf4j
+public class RedisIT extends SparkContainer {
+ private static final String REDIS_IMAGE = "redis:latest";
+ private static final String REDIS_CONTAINER_HOST = "spark_e2e_redis";
+ private static final String REDIS_HOST = "localhost";
+ private static final int REDIS_PORT = 6379;
+ private GenericContainer> redisContainer;
+ private Jedis jedis;
+
+ @BeforeEach
+ public void startRedisContainer() {
+ redisContainer = new GenericContainer<>(REDIS_IMAGE)
+ .withNetwork(NETWORK)
+ .withNetworkAliases(REDIS_CONTAINER_HOST)
+ .withLogConsumer(new Slf4jLogConsumer(log));
+ redisContainer.setPortBindings(Lists.newArrayList(String.format("%s:%s", REDIS_PORT, REDIS_PORT)));
+ Startables.deepStart(Stream.of(redisContainer)).join();
+ log.info("Redis container started");
+ given().ignoreExceptions()
+ .await()
+ .atMost(180, TimeUnit.SECONDS)
+ .untilAsserted(this::initJedis);
+ this.generateTestData();
+ }
+
+ private void initJedis() {
+ jedis = new Jedis(REDIS_HOST, REDIS_PORT);
+ jedis.connect();
+ }
+
+ private void generateTestData() {
+ jedis.set("key_test", "test");
+ jedis.set("key_test1", "test1");
+ jedis.set("key_test2", "test2");
+ jedis.set("key_test3", "test3");
+ jedis.set("key_test4", "test4");
+ }
+
+ @Test
+ public void testRedisSource() throws IOException, InterruptedException {
+ Container.ExecResult execResult = executeSeaTunnelSparkJob("/redis/redis_source.conf");
+ Assertions.assertEquals(0, execResult.getExitCode());
+ }
+
+ @AfterEach
+ public void close() {
+ super.close();
+ jedis.close();
+ redisContainer.close();
+ }
+}
diff --git a/seatunnel-e2e/seatunnel-spark-connector-v2-e2e/src/test/resources/redis/redis_source.conf b/seatunnel-e2e/seatunnel-spark-connector-v2-e2e/src/test/resources/redis/redis_source.conf
new file mode 100644
index 00000000000..25ea6e009bf
--- /dev/null
+++ b/seatunnel-e2e/seatunnel-spark-connector-v2-e2e/src/test/resources/redis/redis_source.conf
@@ -0,0 +1,68 @@
+#
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements. See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+env {
+ # You can set spark configuration here
+ spark.app.name = "SeaTunnel"
+ spark.executor.instances = 2
+ spark.executor.cores = 1
+ spark.executor.memory = "1g"
+ spark.master = local
+ job.mode = "BATCH"
+}
+
+source {
+ # This is a example source plugin **only for test and demonstrate the feature source plugin**
+ Redis {
+ host = spark_e2e_redis
+ port = 6379
+ keys = "key_test*"
+ data_type = key
+ format = text
+ }
+
+ # If you would like to get more information about how to configure seatunnel and see full list of source plugins,
+ # please go to https://seatunnel.apache.org/docs/connector-v2/source/Redis
+}
+
+transform {
+
+}
+
+sink {
+
+ Console {
+
+ }
+
+ Assert {
+ rules = [
+ {
+ field_name = content
+ field_type = string
+ field_value = [
+ {
+ rule_type = NOT_NULL
+ }
+ ]
+ }
+ ]
+ }
+
+ # If you would like to get more information about how to configure seatunnel and see full list of sink plugins,
+ # please go to https://seatunnel.apache.org/docs/connector-v2/sink/Console
+}
\ No newline at end of file
diff --git a/seatunnel-examples/seatunnel-flink-connector-v2-example/pom.xml b/seatunnel-examples/seatunnel-flink-connector-v2-example/pom.xml
index d52f99dbdf9..bc5bcd47062 100644
--- a/seatunnel-examples/seatunnel-flink-connector-v2-example/pom.xml
+++ b/seatunnel-examples/seatunnel-flink-connector-v2-example/pom.xml
@@ -76,47 +76,42 @@
connector-dingtalk${project.version}
-
- org.apache.seatunnel
- connector-datahub
- ${project.version}
- org.apache.flinkflink-java
- ${flink.version}
+ ${flink.1.13.6.version}${flink.scope}org.apache.flinkflink-table-planner_${scala.binary.version}
- ${flink.version}
+ ${flink.1.13.6.version}${flink.scope}org.apache.flinkflink-table-planner-blink_${scala.binary.version}
- ${flink.version}
+ ${flink.1.13.6.version}${flink.scope}org.apache.flinkflink-streaming-java_${scala.binary.version}
- ${flink.version}
+ ${flink.1.13.6.version}${flink.scope}org.apache.flinkflink-clients_${scala.binary.version}
- ${flink.version}
+ ${flink.1.13.6.version}${flink.scope}org.apache.flinkflink-runtime-web_${scala.binary.version}
- ${flink.version}
+ ${flink.1.13.6.version}${flink.scope}
diff --git a/seatunnel-examples/seatunnel-flink-examples/pom.xml b/seatunnel-examples/seatunnel-flink-examples/pom.xml
index 24a6264c063..e67cdc720b1 100644
--- a/seatunnel-examples/seatunnel-flink-examples/pom.xml
+++ b/seatunnel-examples/seatunnel-flink-examples/pom.xml
@@ -67,37 +67,37 @@
org.apache.flinkflink-java
- ${flink.version}
+ ${flink.1.13.6.version}${flink.scope}org.apache.flinkflink-table-planner_${scala.binary.version}
- ${flink.version}
+ ${flink.1.13.6.version}${flink.scope}org.apache.flinkflink-table-planner-blink_${scala.binary.version}
- ${flink.version}
+ ${flink.1.13.6.version}${flink.scope}org.apache.flinkflink-streaming-java_${scala.binary.version}
- ${flink.version}
+ ${flink.1.13.6.version}${flink.scope}org.apache.flinkflink-clients_${scala.binary.version}
- ${flink.version}
+ ${flink.1.13.6.version}${flink.scope}org.apache.flinkflink-runtime-web_${scala.binary.version}
- ${flink.version}
+ ${flink.1.13.6.version}${flink.scope}
diff --git a/seatunnel-examples/seatunnel-flink-sql-examples/pom.xml b/seatunnel-examples/seatunnel-flink-sql-examples/pom.xml
index e97fa020ad3..b00162eac3b 100644
--- a/seatunnel-examples/seatunnel-flink-sql-examples/pom.xml
+++ b/seatunnel-examples/seatunnel-flink-sql-examples/pom.xml
@@ -47,47 +47,47 @@
org.apache.flinkflink-java
- ${flink.version}
+ ${flink.1.13.6.version}${flink.scope}org.apache.flinkflink-table-planner_${scala.binary.version}
- ${flink.version}
+ ${flink.1.13.6.version}${flink.scope}org.apache.flinkflink-table-planner-blink_${scala.binary.version}
- ${flink.version}
+ ${flink.1.13.6.version}${flink.scope}org.apache.flinkflink-streaming-java_${scala.binary.version}
- ${flink.version}
+ ${flink.1.13.6.version}${flink.scope}org.apache.flinkflink-table-common
- ${flink.version}
+ ${flink.1.13.6.version}${flink.scope}org.apache.flinkflink-table-api-java-bridge_${scala.binary.version}
- ${flink.version}
+ ${flink.1.13.6.version}${flink.scope}org.apache.flinkflink-clients_${scala.binary.version}
- ${flink.version}
+ ${flink.1.13.6.version}${flink.scope}
diff --git a/seatunnel-examples/seatunnel-spark-connector-v2-example/pom.xml b/seatunnel-examples/seatunnel-spark-connector-v2-example/pom.xml
index b0d88aba471..90dbe5c17d1 100644
--- a/seatunnel-examples/seatunnel-spark-connector-v2-example/pom.xml
+++ b/seatunnel-examples/seatunnel-spark-connector-v2-example/pom.xml
@@ -61,29 +61,35 @@
org.apache.sparkspark-streaming_${scala.binary.version}
- ${spark.version}
+ ${spark.2.4.0.version}${spark.scope}org.apache.sparkspark-core_${scala.binary.version}
- ${spark.version}
+ ${spark.2.4.0.version}${spark.scope}org.apache.sparkspark-sql_${scala.binary.version}
- ${spark.version}
+ ${spark.2.4.0.version}${spark.scope}org.apache.sparkspark-hive_${scala.binary.version}
- ${spark.version}
+ ${spark.2.4.0.version}${spark.scope}
+
+
+ org.glassfish.web
+ javax.servlet.jsp
+
+
diff --git a/seatunnel-examples/seatunnel-spark-examples/pom.xml b/seatunnel-examples/seatunnel-spark-examples/pom.xml
index 49c634f0bd7..0254e3e927d 100644
--- a/seatunnel-examples/seatunnel-spark-examples/pom.xml
+++ b/seatunnel-examples/seatunnel-spark-examples/pom.xml
@@ -65,28 +65,28 @@
org.apache.sparkspark-streaming_${scala.binary.version}
- ${spark.version}
+ ${spark.2.4.0.version}${spark.scope}org.apache.sparkspark-core_${scala.binary.version}
- ${spark.version}
+ ${spark.2.4.0.version}${spark.scope}org.apache.sparkspark-sql_${scala.binary.version}
- ${spark.version}
+ ${spark.2.4.0.version}${spark.scope}org.apache.sparkspark-hive_${scala.binary.version}
- ${spark.version}
+ ${spark.2.4.0.version}${spark.scope}
diff --git a/seatunnel-server/seatunnel-app/pom.xml b/seatunnel-server/seatunnel-app/pom.xml
index b9d4c84fd12..8d6c821dcf7 100644
--- a/seatunnel-server/seatunnel-app/pom.xml
+++ b/seatunnel-server/seatunnel-app/pom.xml
@@ -153,6 +153,25 @@
seatunnel-scheduler-dolphinscheduler${project.version}
+
+
+ io.jsonwebtoken
+ jjwt-api
+
+
+ io.jsonwebtoken
+ jjwt-impl
+
+
+ io.jsonwebtoken
+ jjwt-jackson
+
+
+
+ org.springframework.boot
+ spring-boot-starter-aop
+ ${spring-boot.version}
+
diff --git a/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/adapter/SeatunnelWebAdapter.java b/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/adapter/SeatunnelWebAdapter.java
new file mode 100644
index 00000000000..9f1d4ab0ff0
--- /dev/null
+++ b/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/adapter/SeatunnelWebAdapter.java
@@ -0,0 +1,62 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.seatunnel.app.adapter;
+
+import org.apache.seatunnel.app.interceptor.AuthenticationInterceptor;
+import org.apache.seatunnel.app.resolver.UserIdMethodArgumentResolver;
+
+import org.springframework.context.annotation.Bean;
+import org.springframework.context.annotation.Configuration;
+import org.springframework.web.method.support.HandlerMethodArgumentResolver;
+import org.springframework.web.servlet.config.annotation.InterceptorRegistry;
+import org.springframework.web.servlet.config.annotation.WebMvcConfigurer;
+
+import javax.annotation.Resource;
+
+import java.util.List;
+
+@Configuration
+public class SeatunnelWebAdapter implements WebMvcConfigurer {
+ @Bean
+ public AuthenticationInterceptor authenticationInterceptor() {
+ return new AuthenticationInterceptor();
+ }
+
+ @Resource
+ private UserIdMethodArgumentResolver currentUserMethodArgumentResolver;
+
+ @Override
+ public void addInterceptors(InterceptorRegistry registry) {
+ registry.addInterceptor(authenticationInterceptor()).order(1).addPathPatterns("/**")
+ // exclude swagger api path
+ .excludePathPatterns(
+ "/swagger-resources/**",
+ "/webjars/**",
+ "/v2/**",
+ "/swagger-ui.html**"
+ )
+ // exclude login
+ .excludePathPatterns("/api/v1/user/login**")
+ ;
+ }
+
+ @Override
+ public void addArgumentResolvers(List argumentResolvers) {
+ argumentResolvers.add(currentUserMethodArgumentResolver);
+ }
+}
diff --git a/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/aspect/LoginAspect.java b/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/aspect/LoginAspect.java
new file mode 100644
index 00000000000..1bba7f74a05
--- /dev/null
+++ b/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/aspect/LoginAspect.java
@@ -0,0 +1,75 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.seatunnel.app.aspect;
+
+import static org.apache.seatunnel.server.common.Constants.TOKEN;
+
+import org.apache.seatunnel.app.common.Result;
+import org.apache.seatunnel.app.common.UserTokenStatusEnum;
+import org.apache.seatunnel.app.dal.dao.IUserDao;
+import org.apache.seatunnel.app.domain.dto.user.UserLoginLogDto;
+import org.apache.seatunnel.app.domain.response.user.UserSimpleInfoRes;
+import org.apache.seatunnel.app.security.JwtUtils;
+
+import lombok.extern.slf4j.Slf4j;
+import org.aspectj.lang.JoinPoint;
+import org.aspectj.lang.annotation.AfterReturning;
+import org.aspectj.lang.annotation.Aspect;
+import org.aspectj.lang.annotation.Pointcut;
+import org.springframework.core.annotation.Order;
+import org.springframework.stereotype.Component;
+import org.springframework.web.context.request.RequestContextHolder;
+import org.springframework.web.context.request.ServletRequestAttributes;
+
+import javax.annotation.Resource;
+import javax.servlet.http.HttpServletResponse;
+
+@Slf4j
+@Aspect
+@Component
+@Order(2)
+public class LoginAspect {
+ @Resource
+ private JwtUtils jwtUtils;
+
+ @Resource
+ private IUserDao userDaoImpl;
+
+ @Pointcut("execution(public * org.apache.seatunnel.app.controller.UserController.login(..))")
+ public void loginPointCut() {
+
+ }
+
+ @AfterReturning(value = "loginPointCut()", returning = "obj")
+ public void check(JoinPoint pjp, Object obj) {
+ final Result target = (Result) obj;
+ final UserSimpleInfoRes data = target.getData();
+
+ ServletRequestAttributes attributes = (ServletRequestAttributes) RequestContextHolder.getRequestAttributes();
+ final HttpServletResponse response = attributes.getResponse();
+ final String token = jwtUtils.genToken(data.toMap());
+ response.setHeader(TOKEN, token);
+
+ final UserLoginLogDto logDto = UserLoginLogDto.builder()
+ .token(token)
+ .tokenStatus(UserTokenStatusEnum.ENABLE.enable())
+ .userId(data.getId())
+ .build();
+ userDaoImpl.insertLoginLog(logDto);
+ }
+}
diff --git a/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/aspect/LogoutAspect.java b/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/aspect/LogoutAspect.java
new file mode 100644
index 00000000000..3d481c2f5a3
--- /dev/null
+++ b/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/aspect/LogoutAspect.java
@@ -0,0 +1,58 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.seatunnel.app.aspect;
+
+import static org.apache.seatunnel.server.common.Constants.USER_ID;
+
+import org.apache.seatunnel.app.dal.dao.IUserDao;
+
+import lombok.extern.slf4j.Slf4j;
+import org.aspectj.lang.JoinPoint;
+import org.aspectj.lang.annotation.Aspect;
+import org.aspectj.lang.annotation.Before;
+import org.aspectj.lang.annotation.Pointcut;
+import org.springframework.core.annotation.Order;
+import org.springframework.stereotype.Component;
+import org.springframework.web.context.request.RequestContextHolder;
+import org.springframework.web.context.request.ServletRequestAttributes;
+
+import javax.annotation.Resource;
+import javax.servlet.http.HttpServletRequest;
+
+@Slf4j
+@Aspect
+@Component
+@Order(2)
+public class LogoutAspect {
+
+ @Resource
+ private IUserDao userDaoImpl;
+
+ @Pointcut("execution(public * org.apache.seatunnel.app.controller.UserController.logout(..))")
+ public void logoutPointCut() {
+
+ }
+
+ @Before("logoutPointCut()")
+ public void check(JoinPoint pjp) {
+ ServletRequestAttributes attributes = (ServletRequestAttributes) RequestContextHolder.getRequestAttributes();
+ HttpServletRequest request = attributes.getRequest();
+ final Integer userId = (Integer) request.getAttribute(USER_ID);
+ userDaoImpl.disableToken(userId);
+ }
+}
diff --git a/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/aspect/UserId.java b/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/aspect/UserId.java
new file mode 100644
index 00000000000..628e103ca95
--- /dev/null
+++ b/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/aspect/UserId.java
@@ -0,0 +1,28 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.seatunnel.app.aspect;
+
+import java.lang.annotation.ElementType;
+import java.lang.annotation.Retention;
+import java.lang.annotation.RetentionPolicy;
+import java.lang.annotation.Target;
+
+@Target({ElementType.PARAMETER})
+@Retention(RetentionPolicy.RUNTIME)
+public @interface UserId {
+}
diff --git a/seatunnel-connectors-v2/connector-file/connector-file-oss/src/main/java/org/apache/seatunnel/connectors/seatunnel/file/oss/source/config/OssConf.java b/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/common/UserTokenStatusEnum.java
similarity index 65%
rename from seatunnel-connectors-v2/connector-file/connector-file-oss/src/main/java/org/apache/seatunnel/connectors/seatunnel/file/oss/source/config/OssConf.java
rename to seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/common/UserTokenStatusEnum.java
index d197ed17fd9..502a9647a1d 100644
--- a/seatunnel-connectors-v2/connector-file/connector-file-oss/src/main/java/org/apache/seatunnel/connectors/seatunnel/file/oss/source/config/OssConf.java
+++ b/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/common/UserTokenStatusEnum.java
@@ -15,19 +15,14 @@
* limitations under the License.
*/
-package org.apache.seatunnel.connectors.seatunnel.file.oss.source.config;
+package org.apache.seatunnel.app.common;
-import org.apache.seatunnel.connectors.seatunnel.file.config.HadoopConf;
+public enum UserTokenStatusEnum {
+ ENABLE,
+ DISABLE,
+ ;
-public class OssConf extends HadoopConf {
- private final String fsHdfsImpl = "org.apache.hadoop.fs.aliyun.oss.AliyunOSSFileSystem";
-
- @Override
- public String getFsHdfsImpl() {
- return fsHdfsImpl;
- }
-
- public OssConf(String hdfsNameKey) {
- super(hdfsNameKey);
+ public boolean enable() {
+ return this == ENABLE;
}
}
diff --git a/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/config/Swagger2.java b/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/config/Swagger2.java
index f8a5718da5a..3eb93a85697 100644
--- a/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/config/Swagger2.java
+++ b/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/config/Swagger2.java
@@ -17,14 +17,8 @@
package org.apache.seatunnel.app.config;
-import com.fasterxml.classmate.TypeResolver;
-import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
-import org.springframework.web.servlet.config.annotation.EnableWebMvc;
-import org.springframework.web.servlet.config.annotation.ResourceHandlerRegistry;
-import org.springframework.web.servlet.config.annotation.ViewControllerRegistry;
-import org.springframework.web.servlet.config.annotation.WebMvcConfigurer;
import springfox.documentation.builders.ApiInfoBuilder;
import springfox.documentation.builders.PathSelectors;
import springfox.documentation.builders.RequestHandlerSelectors;
@@ -35,11 +29,7 @@
@Configuration
@EnableSwagger2
-@EnableWebMvc
-public class Swagger2 implements WebMvcConfigurer {
- @Autowired
- private TypeResolver typeResolver;
-
+public class Swagger2{
@Bean
public Docket createRestApi() {
@@ -49,30 +39,6 @@ public Docket createRestApi() {
.apis(RequestHandlerSelectors.basePackage("org.apache.seatunnel.app.controller"))
.paths(PathSelectors.any())
.build();
-
- }
-
- @Override
- public void addViewControllers(ViewControllerRegistry registry) {
- registry.addRedirectViewController("/api/v2/api-docs", "/v2/api-docs");
- registry.addRedirectViewController("/api/swagger-resources/configuration/ui", "/swagger-resources/configuration/ui");
- registry.addRedirectViewController("/api/swagger-resources/configuration/security", "/swagger-resources/configuration/security");
- registry.addRedirectViewController("/api/swagger-resources", "/swagger-resources");
-
- registry.addRedirectViewController("/api/null/api-docs",
- "/api-docs").setKeepQueryParams(true);
- registry.addRedirectViewController("/api/null/swagger-resources/configuration/ui",
- "/swagger-resources/configuration/ui");
- registry.addRedirectViewController("/api/null/swagger-resources/configuration/security",
- "/swagger-resources/configuration/security");
- registry.addRedirectViewController("/api/null/swagger-resources", "/swagger-resources");
- }
-
- @Override
- public void addResourceHandlers(ResourceHandlerRegistry registry) {
- registry.addResourceHandler("/api/swagger-ui.html**").addResourceLocations("classpath:/META-INF/resources/swagger-ui.html");
- registry.addResourceHandler("/api/webjars/**").addResourceLocations("classpath:/META-INF/resources/webjars/");
- registry.addResourceHandler("/doc.html**").addResourceLocations("classpath:/META-INF/resources/");
}
private ApiInfo apiInfo() {
diff --git a/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/controller/TaskController.java b/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/controller/TaskController.java
index d9110ebcae7..68edd6052c1 100644
--- a/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/controller/TaskController.java
+++ b/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/controller/TaskController.java
@@ -33,6 +33,7 @@
import org.springframework.web.bind.annotation.PutMapping;
import org.springframework.web.bind.annotation.RequestBody;
import org.springframework.web.bind.annotation.RequestMapping;
+import org.springframework.web.bind.annotation.RequestParam;
import org.springframework.web.bind.annotation.RestController;
import javax.annotation.Resource;
@@ -69,4 +70,11 @@ Result> listInstance(@RequestBody @NotNull Insta
Result tmpExecute(@RequestBody @NotNull ExecuteReq req) {
return Result.success(iTaskService.tmpExecute(req));
}
+
+ @PostMapping("/kill")
+ @ApiOperation(value = "kill running instance", httpMethod = "POST")
+ Result kill(@RequestParam Long instanceId) {
+ iTaskService.kill(instanceId);
+ return Result.success();
+ }
}
diff --git a/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/controller/UserController.java b/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/controller/UserController.java
index d7cf924d209..feb8b460c19 100644
--- a/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/controller/UserController.java
+++ b/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/controller/UserController.java
@@ -21,6 +21,7 @@
import org.apache.seatunnel.app.domain.request.user.AddUserReq;
import org.apache.seatunnel.app.domain.request.user.UpdateUserReq;
import org.apache.seatunnel.app.domain.request.user.UserListReq;
+import org.apache.seatunnel.app.domain.request.user.UserLoginReq;
import org.apache.seatunnel.app.domain.response.PageInfo;
import org.apache.seatunnel.app.domain.response.user.AddUserRes;
import org.apache.seatunnel.app.domain.response.user.UserSimpleInfoRes;
@@ -30,6 +31,7 @@
import io.swagger.annotations.ApiImplicitParams;
import io.swagger.annotations.ApiOperation;
import org.springframework.web.bind.annotation.DeleteMapping;
+import org.springframework.web.bind.annotation.PatchMapping;
import org.springframework.web.bind.annotation.PostMapping;
import org.springframework.web.bind.annotation.PutMapping;
import org.springframework.web.bind.annotation.RequestBody;
@@ -95,4 +97,14 @@ public Result disable(@RequestParam @NotNull Integer id) {
iUserService.disable(id);
return Result.success();
}
+
+ @PostMapping("/login")
+ public Result login(@RequestBody UserLoginReq req) {
+ return Result.success(iUserService.login(req));
+ }
+
+ @PatchMapping("/logout")
+ public Result logout() {
+ return Result.success();
+ }
}
diff --git a/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/dal/dao/IUserDao.java b/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/dal/dao/IUserDao.java
index ebf1d19c0bb..db2f1662320 100644
--- a/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/dal/dao/IUserDao.java
+++ b/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/dal/dao/IUserDao.java
@@ -18,8 +18,10 @@
package org.apache.seatunnel.app.dal.dao;
import org.apache.seatunnel.app.dal.entity.User;
+import org.apache.seatunnel.app.dal.entity.UserLoginLog;
import org.apache.seatunnel.app.domain.dto.user.ListUserDto;
import org.apache.seatunnel.app.domain.dto.user.UpdateUserDto;
+import org.apache.seatunnel.app.domain.dto.user.UserLoginLogDto;
import org.apache.seatunnel.server.common.PageData;
public interface IUserDao {
@@ -40,4 +42,12 @@ public interface IUserDao {
User getById(int operatorId);
User getByName(String user);
+
+ User checkPassword(String username, String password);
+
+ long insertLoginLog(UserLoginLogDto dto);
+
+ void disableToken(int userId);
+
+ UserLoginLog getLastLoginLog(Integer userId);
}
diff --git a/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/dal/dao/impl/UserDaoImpl.java b/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/dal/dao/impl/UserDaoImpl.java
index 8af3d8e5200..a94ee5eacec 100644
--- a/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/dal/dao/impl/UserDaoImpl.java
+++ b/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/dal/dao/impl/UserDaoImpl.java
@@ -22,11 +22,15 @@
import static com.google.common.base.Preconditions.checkState;
import org.apache.seatunnel.app.common.UserStatusEnum;
+import org.apache.seatunnel.app.common.UserTokenStatusEnum;
import org.apache.seatunnel.app.dal.dao.IUserDao;
import org.apache.seatunnel.app.dal.entity.User;
+import org.apache.seatunnel.app.dal.entity.UserLoginLog;
+import org.apache.seatunnel.app.dal.mapper.UserLoginLogMapper;
import org.apache.seatunnel.app.dal.mapper.UserMapper;
import org.apache.seatunnel.app.domain.dto.user.ListUserDto;
import org.apache.seatunnel.app.domain.dto.user.UpdateUserDto;
+import org.apache.seatunnel.app.domain.dto.user.UserLoginLogDto;
import org.apache.seatunnel.server.common.PageData;
import org.springframework.stereotype.Repository;
@@ -40,6 +44,8 @@
public class UserDaoImpl implements IUserDao {
@Resource
private UserMapper userMapper;
+ @Resource
+ private UserLoginLogMapper userLoginLogMapper;
@Override
public int add(UpdateUserDto dto) {
@@ -106,4 +112,30 @@ public User getById(int operatorId) {
public User getByName(String user) {
return userMapper.selectByName(user);
}
+
+ @Override
+ public User checkPassword(String username, String password) {
+ return userMapper.selectByNameAndPasswd(username, password);
+ }
+
+ @Override
+ public long insertLoginLog(UserLoginLogDto dto) {
+ final UserLoginLog log = new UserLoginLog();
+ log.setToken(dto.getToken());
+ log.setTokenStatus(dto.getTokenStatus());
+ log.setUserId(dto.getUserId());
+
+ userLoginLogMapper.insert(log);
+ return log.getId();
+ }
+
+ @Override
+ public void disableToken(int userId) {
+ userLoginLogMapper.updateStatus(userId, UserTokenStatusEnum.DISABLE.enable());
+ }
+
+ @Override
+ public UserLoginLog getLastLoginLog(Integer userId) {
+ return userLoginLogMapper.checkLastTokenEnable(userId);
+ }
}
diff --git a/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/dal/entity/UserLoginLog.java b/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/dal/entity/UserLoginLog.java
new file mode 100644
index 00000000000..008d46d766e
--- /dev/null
+++ b/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/dal/entity/UserLoginLog.java
@@ -0,0 +1,37 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.seatunnel.app.dal.entity;
+
+import lombok.Data;
+
+import java.util.Date;
+
+@Data
+public class UserLoginLog {
+ private Long id;
+
+ private Integer userId;
+
+ private String token;
+
+ private Boolean tokenStatus;
+
+ private Date createTime;
+
+ private Date updateTime;
+}
diff --git a/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/dal/mapper/UserLoginLogMapper.java b/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/dal/mapper/UserLoginLogMapper.java
new file mode 100644
index 00000000000..448011691e5
--- /dev/null
+++ b/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/dal/mapper/UserLoginLogMapper.java
@@ -0,0 +1,34 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.seatunnel.app.dal.mapper;
+
+import org.apache.seatunnel.app.dal.entity.UserLoginLog;
+
+import org.apache.ibatis.annotations.Param;
+
+public interface UserLoginLogMapper {
+ int insert(UserLoginLog userLoginLog);
+
+ int updateStatus(@Param("userId") int userId, @Param("enable") boolean enable);
+
+ UserLoginLog checkLastTokenEnable(@Param("userId") Integer userId);
+}
+
+
+
+
diff --git a/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/dal/mapper/UserMapper.java b/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/dal/mapper/UserMapper.java
index 5d0cbae252e..3bd74f2265b 100644
--- a/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/dal/mapper/UserMapper.java
+++ b/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/dal/mapper/UserMapper.java
@@ -39,4 +39,6 @@ public interface UserMapper {
User selectByName(@Param("username") String username);
int countBySelective(@Param("user") User user);
+
+ User selectByNameAndPasswd(@Param("username") String username, @Param("password") String password);
}
diff --git a/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/domain/dto/user/UserLoginLogDto.java b/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/domain/dto/user/UserLoginLogDto.java
new file mode 100644
index 00000000000..55af565c7e1
--- /dev/null
+++ b/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/domain/dto/user/UserLoginLogDto.java
@@ -0,0 +1,33 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.seatunnel.app.domain.dto.user;
+
+import lombok.Builder;
+import lombok.Data;
+
+@Data
+@Builder
+public class UserLoginLogDto {
+ private Long id;
+
+ private Integer userId;
+
+ private String token;
+
+ private Boolean tokenStatus;
+}
diff --git a/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/domain/request/user/UserLoginReq.java b/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/domain/request/user/UserLoginReq.java
new file mode 100644
index 00000000000..fdf2cd3f77a
--- /dev/null
+++ b/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/domain/request/user/UserLoginReq.java
@@ -0,0 +1,26 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.seatunnel.app.domain.request.user;
+
+import lombok.Data;
+
+@Data
+public class UserLoginReq {
+ private String username;
+ private String password;
+}
diff --git a/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/domain/response/user/UserSimpleInfoRes.java b/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/domain/response/user/UserSimpleInfoRes.java
index 3c18064ed1c..b1c6af20e0c 100644
--- a/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/domain/response/user/UserSimpleInfoRes.java
+++ b/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/domain/response/user/UserSimpleInfoRes.java
@@ -17,8 +17,21 @@
package org.apache.seatunnel.app.domain.response.user;
+import com.google.common.collect.Maps;
import io.swagger.annotations.ApiModel;
+import java.util.HashMap;
+import java.util.Map;
+
@ApiModel(value = "userSimpleInfoRes", description = "user simple information")
-public class UserSimpleInfoRes extends BaseUserInfoRes{
+public class UserSimpleInfoRes extends BaseUserInfoRes {
+
+ public Map toMap() {
+ final HashMap userMap = Maps.newHashMap();
+ userMap.put("id", getId());
+ userMap.put("name", getName());
+ userMap.put("status", getStatus());
+ userMap.put("type", getType());
+ return userMap;
+ }
}
diff --git a/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/interceptor/AuthenticationInterceptor.java b/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/interceptor/AuthenticationInterceptor.java
new file mode 100644
index 00000000000..3b432625677
--- /dev/null
+++ b/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/interceptor/AuthenticationInterceptor.java
@@ -0,0 +1,97 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.seatunnel.app.interceptor;
+
+import static org.apache.seatunnel.server.common.Constants.OPTIONS;
+import static org.apache.seatunnel.server.common.Constants.TOKEN;
+import static org.apache.seatunnel.server.common.Constants.USER_ID;
+import static org.apache.seatunnel.server.common.SeatunnelErrorEnum.TOKEN_ILLEGAL;
+import static io.jsonwebtoken.Claims.EXPIRATION;
+
+import org.apache.seatunnel.app.dal.dao.IUserDao;
+import org.apache.seatunnel.app.dal.entity.UserLoginLog;
+import org.apache.seatunnel.app.security.JwtUtils;
+import org.apache.seatunnel.server.common.SeatunnelException;
+
+import lombok.extern.slf4j.Slf4j;
+import org.apache.commons.lang3.StringUtils;
+import org.springframework.web.servlet.HandlerInterceptor;
+import org.springframework.web.servlet.ModelAndView;
+
+import javax.annotation.Resource;
+import javax.servlet.http.HttpServletRequest;
+import javax.servlet.http.HttpServletResponse;
+
+import java.util.Map;
+import java.util.Objects;
+
+@Slf4j
+public class AuthenticationInterceptor implements HandlerInterceptor {
+
+ @Resource
+ private IUserDao userDaoImpl;
+
+ @Resource
+ private JwtUtils jwtUtils;
+
+ @Override
+ @SuppressWarnings("MagicNumber")
+ public boolean preHandle(HttpServletRequest request, HttpServletResponse response, Object handler) throws Exception {
+ if (request.getMethod().equals(OPTIONS)) {
+ response.setHeader("Access-Control-Allow-Origin", "*");
+ response.setHeader("Access-Control-Allow-Headers", "*");
+ response.setHeader("Access-Control-Allow-Methods", "*");
+ response.setHeader("Access-Control-Allow-Credentials", "true");
+ response.setHeader("Access-Control-Max-Age", "3600");
+ return true;
+ }
+ long currentTimestamp = System.currentTimeMillis();
+ final String token = request.getHeader(TOKEN);
+ if (StringUtils.isBlank(token)) {
+ throw new SeatunnelException(TOKEN_ILLEGAL);
+ }
+ final Map map = jwtUtils.parseToken(token);
+ final Integer userId = (Integer) map.get(USER_ID);
+ if (Objects.isNull(userId)) {
+ throw new SeatunnelException(TOKEN_ILLEGAL);
+ }
+ final UserLoginLog userLoginLog = userDaoImpl.getLastLoginLog(userId);
+ if (Objects.isNull(userLoginLog) || !userLoginLog.getTokenStatus()) {
+ throw new SeatunnelException(TOKEN_ILLEGAL);
+ }
+
+ final Integer expireDate = (Integer) map.get(EXPIRATION);
+ if (Objects.isNull(expireDate) || currentTimestamp - (long) expireDate * 1000 > 0) {
+ throw new SeatunnelException(TOKEN_ILLEGAL);
+ }
+
+ map.forEach(request::setAttribute);
+
+ return true;
+ }
+
+ @Override
+ public void postHandle(HttpServletRequest request, HttpServletResponse response, Object handler, ModelAndView modelAndView) throws Exception {
+ // do nothing
+ }
+
+ @Override
+ public void afterCompletion(HttpServletRequest request, HttpServletResponse response, Object handler, Exception ex) throws Exception {
+ // do nothing
+ }
+}
diff --git a/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/resolver/UserIdMethodArgumentResolver.java b/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/resolver/UserIdMethodArgumentResolver.java
new file mode 100644
index 00000000000..3ebfca7af6f
--- /dev/null
+++ b/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/resolver/UserIdMethodArgumentResolver.java
@@ -0,0 +1,45 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.seatunnel.app.resolver;
+
+import static org.apache.seatunnel.server.common.Constants.USER_ID;
+
+import org.apache.seatunnel.app.aspect.UserId;
+
+import org.springframework.core.MethodParameter;
+import org.springframework.stereotype.Component;
+import org.springframework.web.bind.support.WebDataBinderFactory;
+import org.springframework.web.context.request.NativeWebRequest;
+import org.springframework.web.context.request.RequestAttributes;
+import org.springframework.web.method.support.HandlerMethodArgumentResolver;
+import org.springframework.web.method.support.ModelAndViewContainer;
+
+@Component
+public class UserIdMethodArgumentResolver implements HandlerMethodArgumentResolver {
+ @Override
+ public boolean supportsParameter(MethodParameter parameter) {
+ return parameter.getParameterType().isAssignableFrom(Integer.class)
+ && parameter.hasParameterAnnotation(UserId.class);
+ }
+
+ @Override
+ public Object resolveArgument(MethodParameter parameter, ModelAndViewContainer mavContainer, NativeWebRequest webRequest, WebDataBinderFactory binderFactory) throws Exception {
+ return (Integer) webRequest.getAttribute(USER_ID, RequestAttributes.SCOPE_REQUEST);
+ }
+
+}
diff --git a/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/security/JwtUtils.java b/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/security/JwtUtils.java
new file mode 100644
index 00000000000..9ddc9b40fb7
--- /dev/null
+++ b/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/security/JwtUtils.java
@@ -0,0 +1,66 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.seatunnel.app.security;
+
+import io.jsonwebtoken.Claims;
+import io.jsonwebtoken.Jws;
+import io.jsonwebtoken.Jwts;
+import io.jsonwebtoken.SignatureAlgorithm;
+import org.apache.commons.lang3.time.DateUtils;
+import org.springframework.beans.factory.InitializingBean;
+import org.springframework.beans.factory.annotation.Value;
+import org.springframework.stereotype.Component;
+
+import java.nio.charset.StandardCharsets;
+import java.util.Date;
+import java.util.Map;
+import java.util.UUID;
+
+@Component
+public class JwtUtils implements InitializingBean {
+ @Value("${jwt.expireTime}")
+ private int expireTime;
+ @Value("${jwt.secretKey}")
+ private String secretKey;
+ @Value("${jwt.algorithm}")
+ private String algorithmString;
+ private SignatureAlgorithm algorithm = null;
+
+ @Override
+ public void afterPropertiesSet() throws Exception {
+ algorithm = SignatureAlgorithm.valueOf(algorithmString);
+ }
+
+ public String genToken(Map data) {
+ final Date currentDate = new Date();
+ final Date expireDate = DateUtils.addSeconds(currentDate, expireTime);
+
+ return Jwts.builder()
+ .signWith(SignatureAlgorithm.HS256, secretKey.getBytes(StandardCharsets.UTF_8))
+ .setId(UUID.randomUUID().toString())
+ .setClaims(data)
+ .setIssuedAt(currentDate)
+ .setExpiration(expireDate)
+ .compact();
+ }
+
+ public Map parseToken(String token) {
+ final Jws claims = Jwts.parser().setSigningKey(secretKey.getBytes(StandardCharsets.UTF_8)).parseClaimsJws(token);
+ return claims.getBody();
+ }
+}
diff --git a/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/service/ITaskService.java b/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/service/ITaskService.java
index 475da0e512c..78960c72796 100644
--- a/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/service/ITaskService.java
+++ b/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/service/ITaskService.java
@@ -36,4 +36,6 @@ public interface ITaskService {
PageInfo listInstance(InstanceListReq req);
InstanceSimpleInfoRes tmpExecute(ExecuteReq req);
+
+ void kill(Long instanceId);
}
diff --git a/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/service/IUserService.java b/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/service/IUserService.java
index 54f56f0d9ed..60ba8d430bd 100644
--- a/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/service/IUserService.java
+++ b/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/service/IUserService.java
@@ -20,6 +20,7 @@
import org.apache.seatunnel.app.domain.request.user.AddUserReq;
import org.apache.seatunnel.app.domain.request.user.UpdateUserReq;
import org.apache.seatunnel.app.domain.request.user.UserListReq;
+import org.apache.seatunnel.app.domain.request.user.UserLoginReq;
import org.apache.seatunnel.app.domain.response.PageInfo;
import org.apache.seatunnel.app.domain.response.user.AddUserRes;
import org.apache.seatunnel.app.domain.response.user.UserSimpleInfoRes;
@@ -37,4 +38,6 @@ public interface IUserService {
void enable(int id);
void disable(int id);
+
+ UserSimpleInfoRes login(UserLoginReq req);
}
diff --git a/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/service/impl/TaskServiceImpl.java b/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/service/impl/TaskServiceImpl.java
index 7b779d81db6..b12d8c2b7a9 100644
--- a/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/service/impl/TaskServiceImpl.java
+++ b/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/service/impl/TaskServiceImpl.java
@@ -224,6 +224,11 @@ public InstanceSimpleInfoRes tmpExecute(ExecuteReq req) {
return this.translate(iJobService.execute(dto));
}
+ @Override
+ public void kill(Long instanceId) {
+ iJobService.kill(instanceId);
+ }
+
private JobSimpleInfoRes translate(JobSimpleInfoDto dto) {
return JobSimpleInfoRes.builder()
.jobId(dto.getJobId())
diff --git a/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/service/impl/UserServiceImpl.java b/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/service/impl/UserServiceImpl.java
index db34ce159f6..377866b2e09 100644
--- a/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/service/impl/UserServiceImpl.java
+++ b/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/service/impl/UserServiceImpl.java
@@ -17,6 +17,8 @@
package org.apache.seatunnel.app.service.impl;
+import static org.apache.seatunnel.server.common.SeatunnelErrorEnum.USERNAME_PASSWORD_NO_MATCHED;
+
import org.apache.seatunnel.app.dal.dao.IUserDao;
import org.apache.seatunnel.app.dal.entity.User;
import org.apache.seatunnel.app.domain.dto.user.ListUserDto;
@@ -24,6 +26,7 @@
import org.apache.seatunnel.app.domain.request.user.AddUserReq;
import org.apache.seatunnel.app.domain.request.user.UpdateUserReq;
import org.apache.seatunnel.app.domain.request.user.UserListReq;
+import org.apache.seatunnel.app.domain.request.user.UserLoginReq;
import org.apache.seatunnel.app.domain.response.PageInfo;
import org.apache.seatunnel.app.domain.response.user.AddUserRes;
import org.apache.seatunnel.app.domain.response.user.UserSimpleInfoRes;
@@ -31,6 +34,7 @@
import org.apache.seatunnel.app.service.IUserService;
import org.apache.seatunnel.app.util.PasswordUtils;
import org.apache.seatunnel.server.common.PageData;
+import org.apache.seatunnel.server.common.SeatunnelException;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.stereotype.Component;
@@ -39,6 +43,7 @@
import javax.annotation.Resource;
import java.util.List;
+import java.util.Objects;
import java.util.stream.Collectors;
@Component
@@ -127,6 +132,19 @@ public void disable(int id) {
userDaoImpl.disable(id);
}
+ @Override
+ public UserSimpleInfoRes login(UserLoginReq req) {
+
+ final String username = req.getUsername();
+ final String password = PasswordUtils.encryptWithSalt(defaultSalt, req.getPassword());
+
+ final User user = userDaoImpl.checkPassword(username, password);
+ if (Objects.isNull(user)) {
+ throw new SeatunnelException(USERNAME_PASSWORD_NO_MATCHED);
+ }
+ return translate(user);
+ }
+
private UserSimpleInfoRes translate(User user) {
final UserSimpleInfoRes info = new UserSimpleInfoRes();
info.setId(user.getId());
diff --git a/seatunnel-server/seatunnel-app/src/main/resources/application.yml b/seatunnel-server/seatunnel-app/src/main/resources/application.yml
index 0c62be51648..1d05098d6c5 100644
--- a/seatunnel-server/seatunnel-app/src/main/resources/application.yml
+++ b/seatunnel-server/seatunnel-app/src/main/resources/application.yml
@@ -22,4 +22,22 @@ spring:
driver-class-name: com.mysql.jdbc.Driver
url: jdbc:mysql://127.0.0.1:3306/seatunnel?useSSL=false&useUnicode=true&characterEncoding=utf-8&allowMultiQueries=true
username: root
- password: 123456
\ No newline at end of file
+ password: 123456
+ mvc:
+ pathmatch:
+ matching-strategy: ant_path_matcher
+ds:
+ script:
+ dir: /dj
+ project:
+ default: test_dj
+ tenant:
+ default: default
+ api:
+ token: 12345678
+ prefix: http://127.0.0.1:12345/dolphinscheduler
+
+jwt:
+ expireTime: 86400
+ secretKey: https://github.com/apache/incubator-seatunnel
+ algorithm: HS256
\ No newline at end of file
diff --git a/seatunnel-server/seatunnel-app/src/main/resources/org/apache/seatunnel/app/dal/mapper/UserLoginLogMapper.xml b/seatunnel-server/seatunnel-app/src/main/resources/org/apache/seatunnel/app/dal/mapper/UserLoginLogMapper.xml
new file mode 100644
index 00000000000..f8f94442111
--- /dev/null
+++ b/seatunnel-server/seatunnel-app/src/main/resources/org/apache/seatunnel/app/dal/mapper/UserLoginLogMapper.xml
@@ -0,0 +1,58 @@
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ id,
+ user_id,
+ token,
+ token_status,
+ create_time,
+ update_time
+
+
+
+ insert into `user_login_log` (user_id, token, `token_status`)
+ VALUES (#{userId,jdbcType=INTEGER}, #{token,jdbcType=VARCHAR}, #{tokenStatus,jdbcType=BOOLEAN})
+
+
+
+ update user_login_log
+ set token_status = #{enable,jdbcType=BOOLEAN}
+ where user_id = #{userId,jdbcType=INTEGER} and token_status != #{enable}
+
+
+
diff --git a/seatunnel-server/seatunnel-app/src/main/resources/org/apache/seatunnel/app/dal/mapper/UserMapper.xml b/seatunnel-server/seatunnel-app/src/main/resources/org/apache/seatunnel/app/dal/mapper/UserMapper.xml
index 070729e84c4..1c22238b8d5 100644
--- a/seatunnel-server/seatunnel-app/src/main/resources/org/apache/seatunnel/app/dal/mapper/UserMapper.xml
+++ b/seatunnel-server/seatunnel-app/src/main/resources/org/apache/seatunnel/app/dal/mapper/UserMapper.xml
@@ -90,4 +90,10 @@
+
diff --git a/seatunnel-server/seatunnel-scheduler/seatunnel-scheduler-dolphinscheduler/src/main/java/org/apache/seatunnel/scheduler/dolphinscheduler/ExecuteTypeEnum.java b/seatunnel-server/seatunnel-scheduler/seatunnel-scheduler-dolphinscheduler/src/main/java/org/apache/seatunnel/scheduler/dolphinscheduler/ExecuteTypeEnum.java
new file mode 100644
index 00000000000..f8b77a877fb
--- /dev/null
+++ b/seatunnel-server/seatunnel-scheduler/seatunnel-scheduler-dolphinscheduler/src/main/java/org/apache/seatunnel/scheduler/dolphinscheduler/ExecuteTypeEnum.java
@@ -0,0 +1,27 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.seatunnel.scheduler.dolphinscheduler;
+
+public enum ExecuteTypeEnum {
+ NONE,
+ REPEAT_RUNNING,
+ RECOVER_SUSPENDED_PROCESS,
+ START_FAILURE_TASK_PROCESS,
+ STOP,
+ PAUSE
+}
diff --git a/seatunnel-server/seatunnel-scheduler/seatunnel-scheduler-dolphinscheduler/src/main/java/org/apache/seatunnel/scheduler/dolphinscheduler/IDolphinschedulerService.java b/seatunnel-server/seatunnel-scheduler/seatunnel-scheduler-dolphinscheduler/src/main/java/org/apache/seatunnel/scheduler/dolphinscheduler/IDolphinschedulerService.java
index 8e0769caee4..8085a69ad9f 100644
--- a/seatunnel-server/seatunnel-scheduler/seatunnel-scheduler-dolphinscheduler/src/main/java/org/apache/seatunnel/scheduler/dolphinscheduler/IDolphinschedulerService.java
+++ b/seatunnel-server/seatunnel-scheduler/seatunnel-scheduler-dolphinscheduler/src/main/java/org/apache/seatunnel/scheduler/dolphinscheduler/IDolphinschedulerService.java
@@ -57,4 +57,6 @@ public interface IDolphinschedulerService {
PageData listTaskInstance(ListProcessInstanceDto dto);
void deleteProcessDefinition(long code);
+
+ void killProcessInstance(long processInstanceId);
}
diff --git a/seatunnel-server/seatunnel-scheduler/seatunnel-scheduler-dolphinscheduler/src/main/java/org/apache/seatunnel/scheduler/dolphinscheduler/constants/DolphinschedulerConstants.java b/seatunnel-server/seatunnel-scheduler/seatunnel-scheduler-dolphinscheduler/src/main/java/org/apache/seatunnel/scheduler/dolphinscheduler/constants/DolphinschedulerConstants.java
index 110a190cedd..d6733a3b9a2 100644
--- a/seatunnel-server/seatunnel-scheduler/seatunnel-scheduler-dolphinscheduler/src/main/java/org/apache/seatunnel/scheduler/dolphinscheduler/constants/DolphinschedulerConstants.java
+++ b/seatunnel-server/seatunnel-scheduler/seatunnel-scheduler-dolphinscheduler/src/main/java/org/apache/seatunnel/scheduler/dolphinscheduler/constants/DolphinschedulerConstants.java
@@ -44,6 +44,7 @@ public class DolphinschedulerConstants {
public static final String SCHEDULE_ONLINE = "/projects/%s/schedules/%s/online";
public static final String SCHEDULE_OFFLINE = "/projects/%s/schedules/%s/offline";
public static final String DELETE_PROCESS_DEFINITION = "/projects/%s/process-definition/%s";
+ public static final String EXECUTE = "/projects/%s/executors/execute";
/**
* request param
@@ -167,6 +168,8 @@ public class DolphinschedulerConstants {
public static final String EXEC_TYPE_DEFAULT = "START_PROCESS";
public static final String EXEC_TYPE_COMPLEMENT = "COMPLEMENT_DATA";
public static final String DEPENDENT_MODE_DEFAULT = "OFF_MODE";
+ public static final String PROCESS_INSTANCE_ID = "processInstanceId";
+ public static final String EXECUTE_TYPE = "executeType";
/**
* response param
diff --git a/seatunnel-server/seatunnel-scheduler/seatunnel-scheduler-dolphinscheduler/src/main/java/org/apache/seatunnel/scheduler/dolphinscheduler/dto/TaskInstanceDto.java b/seatunnel-server/seatunnel-scheduler/seatunnel-scheduler-dolphinscheduler/src/main/java/org/apache/seatunnel/scheduler/dolphinscheduler/dto/TaskInstanceDto.java
index 2e49196f64f..c036df943db 100644
--- a/seatunnel-server/seatunnel-scheduler/seatunnel-scheduler-dolphinscheduler/src/main/java/org/apache/seatunnel/scheduler/dolphinscheduler/dto/TaskInstanceDto.java
+++ b/seatunnel-server/seatunnel-scheduler/seatunnel-scheduler-dolphinscheduler/src/main/java/org/apache/seatunnel/scheduler/dolphinscheduler/dto/TaskInstanceDto.java
@@ -32,7 +32,7 @@ public class TaskInstanceDto {
private boolean firstRun;
private int dryRun;
private String flag;
- private int environmentCode;
+ private long environmentCode;
private String processInstance;
private int pid;
private String taskParams;
diff --git a/seatunnel-server/seatunnel-scheduler/seatunnel-scheduler-dolphinscheduler/src/main/java/org/apache/seatunnel/scheduler/dolphinscheduler/impl/DolphinschedulerServiceImpl.java b/seatunnel-server/seatunnel-scheduler/seatunnel-scheduler-dolphinscheduler/src/main/java/org/apache/seatunnel/scheduler/dolphinscheduler/impl/DolphinschedulerServiceImpl.java
index 746192a9fe7..d4de2383f22 100644
--- a/seatunnel-server/seatunnel-scheduler/seatunnel-scheduler-dolphinscheduler/src/main/java/org/apache/seatunnel/scheduler/dolphinscheduler/impl/DolphinschedulerServiceImpl.java
+++ b/seatunnel-server/seatunnel-scheduler/seatunnel-scheduler-dolphinscheduler/src/main/java/org/apache/seatunnel/scheduler/dolphinscheduler/impl/DolphinschedulerServiceImpl.java
@@ -34,6 +34,8 @@
import static org.apache.seatunnel.scheduler.dolphinscheduler.constants.DolphinschedulerConstants.END_TIME;
import static org.apache.seatunnel.scheduler.dolphinscheduler.constants.DolphinschedulerConstants.ENVIRONMENT_CODE;
import static org.apache.seatunnel.scheduler.dolphinscheduler.constants.DolphinschedulerConstants.ENVIRONMENT_CODE_DEFAULT;
+import static org.apache.seatunnel.scheduler.dolphinscheduler.constants.DolphinschedulerConstants.EXECUTE;
+import static org.apache.seatunnel.scheduler.dolphinscheduler.constants.DolphinschedulerConstants.EXECUTE_TYPE;
import static org.apache.seatunnel.scheduler.dolphinscheduler.constants.DolphinschedulerConstants.FAILED_NODE_DEFAULT;
import static org.apache.seatunnel.scheduler.dolphinscheduler.constants.DolphinschedulerConstants.FAILURE_STRATEGY;
import static org.apache.seatunnel.scheduler.dolphinscheduler.constants.DolphinschedulerConstants.FAILURE_STRATEGY_DEFAULT;
@@ -62,6 +64,7 @@
import static org.apache.seatunnel.scheduler.dolphinscheduler.constants.DolphinschedulerConstants.PROCESS_DEFINITION;
import static org.apache.seatunnel.scheduler.dolphinscheduler.constants.DolphinschedulerConstants.PROCESS_DEFINITION_CODE;
import static org.apache.seatunnel.scheduler.dolphinscheduler.constants.DolphinschedulerConstants.PROCESS_DEFINITION_NAME;
+import static org.apache.seatunnel.scheduler.dolphinscheduler.constants.DolphinschedulerConstants.PROCESS_INSTANCE_ID;
import static org.apache.seatunnel.scheduler.dolphinscheduler.constants.DolphinschedulerConstants.PROCESS_INSTANCE_NAME;
import static org.apache.seatunnel.scheduler.dolphinscheduler.constants.DolphinschedulerConstants.PROCESS_INSTANCE_PRIORITY;
import static org.apache.seatunnel.scheduler.dolphinscheduler.constants.DolphinschedulerConstants.PROCESS_INSTANCE_PRIORITY_DEFAULT;
@@ -116,6 +119,7 @@
import static org.apache.seatunnel.server.common.SeatunnelErrorEnum.NO_MATCHED_PROJECT;
import static org.apache.seatunnel.server.common.SeatunnelErrorEnum.UNEXPECTED_RETURN_CODE;
+import org.apache.seatunnel.scheduler.dolphinscheduler.ExecuteTypeEnum;
import org.apache.seatunnel.scheduler.dolphinscheduler.IDolphinschedulerService;
import org.apache.seatunnel.scheduler.dolphinscheduler.dto.ConditionResult;
import org.apache.seatunnel.scheduler.dolphinscheduler.dto.ListProcessDefinitionDto;
@@ -187,7 +191,7 @@ public void afterPropertiesSet() throws Exception {
}
@Override
- public ProcessDefinitionDto createOrUpdateProcessDefinition(UpdateProcessDefinitionDto dto) {
+ public ProcessDefinitionDto createOrUpdateProcessDefinition(UpdateProcessDefinitionDto dto) {
// gen task code
final List taskCodes = genTaskCodes(defaultProjectCode, GEN_NUM_DEFAULT);
@@ -442,6 +446,11 @@ public void deleteProcessDefinition(long code) {
checkResult(result, false);
}
+ @Override
+ public void killProcessInstance(long processInstanceId) {
+ execute(processInstanceId, ExecuteTypeEnum.STOP);
+ }
+
private ProjectDto queryProjectCodeByName(String projectName) throws IOException {
final Map result = HttpUtils.builder()
.withUrl(apiPrefix.concat(QUERY_PROJECT_LIST_PAGING))
@@ -459,6 +468,17 @@ private ProjectDto queryProjectCodeByName(String projectName) throws IOException
return projectDto;
}
+ private void execute(long processInstanceId, ExecuteTypeEnum executeType) {
+ final Map result = HttpUtils.builder()
+ .withUrl(apiPrefix.concat(String.format(EXECUTE, defaultProjectCode)))
+ .withMethod(Connection.Method.POST)
+ .withRequestBody(this.objectToString(null))
+ .withData(createParamMap(PROCESS_INSTANCE_ID, processInstanceId, EXECUTE_TYPE, executeType.name()))
+ .withToken(TOKEN, token)
+ .execute(Map.class);
+ checkResult(result, false);
+ }
+
private TaskDefinitionDto buildTaskDefinitionJson(Long taskCode, TaskDescriptionDto taskDescriptionDto) {
final ResourceDto resourceDto = createOrUpdateScriptContent(taskDescriptionDto.getName(), taskDescriptionDto.getContent());
final TaskDefinitionDto taskDefinitionDto = new TaskDefinitionDto();
diff --git a/seatunnel-server/seatunnel-scheduler/seatunnel-scheduler-dolphinscheduler/src/main/java/org/apache/seatunnel/scheduler/dolphinscheduler/impl/InstanceServiceImpl.java b/seatunnel-server/seatunnel-scheduler/seatunnel-scheduler-dolphinscheduler/src/main/java/org/apache/seatunnel/scheduler/dolphinscheduler/impl/InstanceServiceImpl.java
index 1c7f7555dc7..ff295d8ddad 100644
--- a/seatunnel-server/seatunnel-scheduler/seatunnel-scheduler-dolphinscheduler/src/main/java/org/apache/seatunnel/scheduler/dolphinscheduler/impl/InstanceServiceImpl.java
+++ b/seatunnel-server/seatunnel-scheduler/seatunnel-scheduler-dolphinscheduler/src/main/java/org/apache/seatunnel/scheduler/dolphinscheduler/impl/InstanceServiceImpl.java
@@ -51,7 +51,8 @@ public PageData list(InstanceListDto dto) {
final PageData instancePageData = iDolphinschedulerService.listTaskInstance(listDto);
final List data = instancePageData.getData().stream().map(t -> InstanceDto.builder()
- .instanceId(t.getId())
+ // use processInstanceId instead of origin task instance id.
+ .instanceId(t.getProcessInstanceId())
.instanceCode(t.getProcessInstanceId())
.instanceName(t.getProcessInstanceName())
.status(t.getState())
diff --git a/seatunnel-server/seatunnel-scheduler/seatunnel-scheduler-dolphinscheduler/src/main/java/org/apache/seatunnel/scheduler/dolphinscheduler/impl/JobServiceImpl.java b/seatunnel-server/seatunnel-scheduler/seatunnel-scheduler-dolphinscheduler/src/main/java/org/apache/seatunnel/scheduler/dolphinscheduler/impl/JobServiceImpl.java
index be898a437ce..b3d8f525e3d 100644
--- a/seatunnel-server/seatunnel-scheduler/seatunnel-scheduler-dolphinscheduler/src/main/java/org/apache/seatunnel/scheduler/dolphinscheduler/impl/JobServiceImpl.java
+++ b/seatunnel-server/seatunnel-scheduler/seatunnel-scheduler-dolphinscheduler/src/main/java/org/apache/seatunnel/scheduler/dolphinscheduler/impl/JobServiceImpl.java
@@ -214,6 +214,11 @@ public InstanceDto execute(ExecuteDto dto) {
}
}
+ @Override
+ public void kill(Long instanceId) {
+ iDolphinschedulerService.killProcessInstance(instanceId);
+ }
+
private ProcessDefinitionDto getProcessDefinitionDto(JobDto dto) {
final TaskDescriptionDto taskDescriptionDto = TaskDescriptionDto.builder()
.name(dto.getJobName())
diff --git a/seatunnel-server/seatunnel-server-common/src/main/java/org/apache/seatunnel/server/common/Constants.java b/seatunnel-server/seatunnel-server-common/src/main/java/org/apache/seatunnel/server/common/Constants.java
index 000bb307730..f55aa89d287 100644
--- a/seatunnel-server/seatunnel-server-common/src/main/java/org/apache/seatunnel/server/common/Constants.java
+++ b/seatunnel-server/seatunnel-server-common/src/main/java/org/apache/seatunnel/server/common/Constants.java
@@ -21,4 +21,8 @@ public class Constants {
public static final String BLANK_SPACE = " ";
public static final String COMMA = ",";
public static final String UNDERLINE = "_";
+ public static final String TOKEN = "token";
+ public static final String USER_ID = "id";
+
+ public static final String OPTIONS = "OPTIONS";
}
diff --git a/seatunnel-server/seatunnel-server-common/src/main/java/org/apache/seatunnel/server/common/SeatunnelErrorEnum.java b/seatunnel-server/seatunnel-server-common/src/main/java/org/apache/seatunnel/server/common/SeatunnelErrorEnum.java
index db3fce8e4b4..9e9f494ecc2 100644
--- a/seatunnel-server/seatunnel-server-common/src/main/java/org/apache/seatunnel/server/common/SeatunnelErrorEnum.java
+++ b/seatunnel-server/seatunnel-server-common/src/main/java/org/apache/seatunnel/server/common/SeatunnelErrorEnum.java
@@ -22,9 +22,13 @@ public enum SeatunnelErrorEnum {
SCRIPT_ALREADY_EXIST(10001, "script already exist", "You already have a script with the same name : '%s'"),
NO_SUCH_SCRIPT(10002, "no such script", "No such script. Maybe deleted by others."),
USER_ALREADY_EXISTS(10003, "user already exist", "The same username [%s] is exist."),
- NO_SUCH_USER(10002, "no such user", "No such user. Maybe deleted by others."),
- SCHEDULER_CONFIG_NOT_EXIST(10003, "scheduler config not exist", "This script's scheduler config not exist, please check your config."),
- JSON_TRANSFORM_FAILED(10004, "json transform failed", "Json transform failed, it may be a bug."),
+ NO_SUCH_USER(10004, "no such user", "No such user. Maybe deleted by others."),
+ SCHEDULER_CONFIG_NOT_EXIST(10005, "scheduler config not exist", "This script's scheduler config not exist, please check your config."),
+ JSON_TRANSFORM_FAILED(10006, "json transform failed", "Json transform failed, it may be a bug."),
+
+ USERNAME_PASSWORD_NO_MATCHED(10007, "username and password no matched", "The user name and password do not match, please check your input"),
+
+ TOKEN_ILLEGAL(10008, "token illegal", "The token is expired or invalid, please login again."),
/**
* request dolphinscheduler failed
diff --git a/seatunnel-server/seatunnel-spi/src/main/java/org/apache/seatunnel/spi/scheduler/IJobService.java b/seatunnel-server/seatunnel-spi/src/main/java/org/apache/seatunnel/spi/scheduler/IJobService.java
index e160fc36d98..7a629124074 100644
--- a/seatunnel-server/seatunnel-spi/src/main/java/org/apache/seatunnel/spi/scheduler/IJobService.java
+++ b/seatunnel-server/seatunnel-spi/src/main/java/org/apache/seatunnel/spi/scheduler/IJobService.java
@@ -33,4 +33,6 @@ public interface IJobService {
PageData list(JobListDto dto);
InstanceDto execute(ExecuteDto dto);
+
+ void kill(Long instanceId);
}
diff --git a/seatunnel-transforms/seatunnel-transforms-flink/pom.xml b/seatunnel-transforms/seatunnel-transforms-flink/pom.xml
index bcf2d36d556..92213dcf9ad 100644
--- a/seatunnel-transforms/seatunnel-transforms-flink/pom.xml
+++ b/seatunnel-transforms/seatunnel-transforms-flink/pom.xml
@@ -37,5 +37,33 @@
seatunnel-transform-flink-splitseatunnel-transform-flink-udf
+
+
+
+ org.apache.seatunnel
+ seatunnel-api-flink
+ ${project.version}
+ provided
+
+
+
+ org.apache.flink
+ flink-java
+ ${flink.1.13.6.version}
+ ${flink.scope}
+
+
+ org.apache.flink
+ flink-table-planner_${scala.binary.version}
+ ${flink.1.13.6.version}
+ ${flink.scope}
+
+
+ org.apache.flink
+ flink-streaming-java_${scala.binary.version}
+ ${flink.1.13.6.version}
+ ${flink.scope}
+
+
diff --git a/seatunnel-transforms/seatunnel-transforms-flink/seatunnel-transform-flink-datastream2table/pom.xml b/seatunnel-transforms/seatunnel-transforms-flink/seatunnel-transform-flink-datastream2table/pom.xml
index d7895f3762f..243b72ff76e 100644
--- a/seatunnel-transforms/seatunnel-transforms-flink/seatunnel-transform-flink-datastream2table/pom.xml
+++ b/seatunnel-transforms/seatunnel-transforms-flink/seatunnel-transform-flink-datastream2table/pom.xml
@@ -28,27 +28,4 @@
4.0.0seatunnel-transform-flink-datastream2table
-
-
-
- org.apache.seatunnel
- seatunnel-api-flink
- ${project.version}
- provided
-
-
-
- org.apache.flink
- flink-java
-
-
- org.apache.flink
- flink-table-planner_${scala.binary.version}
-
-
- org.apache.flink
- flink-streaming-java_${scala.binary.version}
-
-
-
diff --git a/seatunnel-transforms/seatunnel-transforms-flink/seatunnel-transform-flink-split/pom.xml b/seatunnel-transforms/seatunnel-transforms-flink/seatunnel-transform-flink-split/pom.xml
index 459858cea80..2fa297ec8a2 100644
--- a/seatunnel-transforms/seatunnel-transforms-flink/seatunnel-transform-flink-split/pom.xml
+++ b/seatunnel-transforms/seatunnel-transforms-flink/seatunnel-transform-flink-split/pom.xml
@@ -28,27 +28,4 @@
4.0.0seatunnel-transform-flink-split
-
-
-
- org.apache.seatunnel
- seatunnel-api-flink
- ${project.version}
- provided
-
-
-
- org.apache.flink
- flink-java
-
-
- org.apache.flink
- flink-table-planner_${scala.binary.version}
-
-
- org.apache.flink
- flink-streaming-java_${scala.binary.version}
-
-
-
diff --git a/seatunnel-transforms/seatunnel-transforms-flink/seatunnel-transform-flink-sql/pom.xml b/seatunnel-transforms/seatunnel-transforms-flink/seatunnel-transform-flink-sql/pom.xml
index aae7a647fab..d0e4c25a868 100644
--- a/seatunnel-transforms/seatunnel-transforms-flink/seatunnel-transform-flink-sql/pom.xml
+++ b/seatunnel-transforms/seatunnel-transforms-flink/seatunnel-transform-flink-sql/pom.xml
@@ -28,27 +28,4 @@
4.0.0seatunnel-transform-flink-sql
-
-
-
- org.apache.seatunnel
- seatunnel-api-flink
- ${project.version}
- provided
-
-
-
- org.apache.flink
- flink-java
-
-
- org.apache.flink
- flink-table-planner_${scala.binary.version}
-
-
- org.apache.flink
- flink-streaming-java_${scala.binary.version}
-
-
-
diff --git a/seatunnel-transforms/seatunnel-transforms-flink/seatunnel-transform-flink-table2datastream/pom.xml b/seatunnel-transforms/seatunnel-transforms-flink/seatunnel-transform-flink-table2datastream/pom.xml
index 065caa10576..96126173beb 100644
--- a/seatunnel-transforms/seatunnel-transforms-flink/seatunnel-transform-flink-table2datastream/pom.xml
+++ b/seatunnel-transforms/seatunnel-transforms-flink/seatunnel-transform-flink-table2datastream/pom.xml
@@ -28,27 +28,4 @@
4.0.0seatunnel-transform-flink-table2datastream
-
-
-
- org.apache.seatunnel
- seatunnel-api-flink
- ${project.version}
- provided
-
-
- org.apache.flink
- flink-java
-
-
- org.apache.flink
- flink-table-planner_${scala.binary.version}
-
-
- org.apache.flink
- flink-streaming-java_${scala.binary.version}
-
-
-
-
diff --git a/seatunnel-transforms/seatunnel-transforms-flink/seatunnel-transform-flink-udf/pom.xml b/seatunnel-transforms/seatunnel-transforms-flink/seatunnel-transform-flink-udf/pom.xml
index 612a3270764..e17ecb6c43f 100644
--- a/seatunnel-transforms/seatunnel-transforms-flink/seatunnel-transform-flink-udf/pom.xml
+++ b/seatunnel-transforms/seatunnel-transforms-flink/seatunnel-transform-flink-udf/pom.xml
@@ -28,26 +28,4 @@
4.0.0seatunnel-transform-flink-udf
-
-
-
- org.apache.seatunnel
- seatunnel-api-flink
- ${project.version}
- provided
-
-
-
- org.apache.flink
- flink-java
-
-
- org.apache.flink
- flink-table-planner_${scala.binary.version}
-
-
- org.apache.flink
- flink-streaming-java_${scala.binary.version}
-
-
\ No newline at end of file
diff --git a/seatunnel-transforms/seatunnel-transforms-spark/pom.xml b/seatunnel-transforms/seatunnel-transforms-spark/pom.xml
index 8367d8bdc2a..daa8cd9c04c 100644
--- a/seatunnel-transforms/seatunnel-transforms-spark/pom.xml
+++ b/seatunnel-transforms/seatunnel-transforms-spark/pom.xml
@@ -39,5 +39,45 @@
seatunnel-transform-spark-nulltfseatunnel-transform-spark-null-rate
+
+
+
+
+ org.apache.seatunnel
+ seatunnel-api-spark
+ ${project.version}
+ provided
+
+
+
+ org.apache.spark
+ spark-core_${scala.binary.version}
+ ${spark.2.4.0.version}
+ ${spark.scope}
+
+
+
+ org.apache.spark
+ spark-sql_${scala.binary.version}
+ ${spark.2.4.0.version}
+ ${spark.scope}
+
+
+
+
+
+
+ org.apache.seatunnel
+ seatunnel-api-spark
+
+
+ org.apache.spark
+ spark-core_${scala.binary.version}
+
+
+ org.apache.spark
+ spark-sql_${scala.binary.version}
+
+
diff --git a/seatunnel-transforms/seatunnel-transforms-spark/seatunnel-transform-spark-json/pom.xml b/seatunnel-transforms/seatunnel-transforms-spark/seatunnel-transform-spark-json/pom.xml
index 0a8cba3140f..04e89ee982d 100644
--- a/seatunnel-transforms/seatunnel-transforms-spark/seatunnel-transform-spark-json/pom.xml
+++ b/seatunnel-transforms/seatunnel-transforms-spark/seatunnel-transform-spark-json/pom.xml
@@ -28,25 +28,4 @@
4.0.0seatunnel-transform-spark-json
-
-
-
- org.apache.seatunnel
- seatunnel-api-spark
- ${project.version}
- provided
-
-
-
- org.apache.spark
- spark-core_${scala.binary.version}
-
-
-
- org.apache.spark
- spark-sql_${scala.binary.version}
-
-
-
-
diff --git a/seatunnel-transforms/seatunnel-transforms-spark/seatunnel-transform-spark-null-rate/pom.xml b/seatunnel-transforms/seatunnel-transforms-spark/seatunnel-transform-spark-null-rate/pom.xml
index 95179c970ee..dada4d4d3f3 100644
--- a/seatunnel-transforms/seatunnel-transforms-spark/seatunnel-transform-spark-null-rate/pom.xml
+++ b/seatunnel-transforms/seatunnel-transforms-spark/seatunnel-transform-spark-null-rate/pom.xml
@@ -28,23 +28,4 @@
4.0.0seatunnel-transform-spark-null-rate
-
-
-
- org.apache.seatunnel
- seatunnel-api-spark
- ${project.version}
- provided
-
-
-
- org.apache.spark
- spark-core_${scala.binary.version}
-
-
-
- org.apache.spark
- spark-sql_${scala.binary.version}
-
-
diff --git a/seatunnel-transforms/seatunnel-transforms-spark/seatunnel-transform-spark-nulltf/pom.xml b/seatunnel-transforms/seatunnel-transforms-spark/seatunnel-transform-spark-nulltf/pom.xml
index 6d4d7412a55..50fcd27566a 100644
--- a/seatunnel-transforms/seatunnel-transforms-spark/seatunnel-transform-spark-nulltf/pom.xml
+++ b/seatunnel-transforms/seatunnel-transforms-spark/seatunnel-transform-spark-nulltf/pom.xml
@@ -28,24 +28,4 @@
4.0.0seatunnel-transform-spark-nulltf
-
-
-
- org.apache.seatunnel
- seatunnel-api-spark
- ${project.version}
- provided
-
-
-
- org.apache.spark
- spark-core_${scala.binary.version}
-
-
-
- org.apache.spark
- spark-sql_${scala.binary.version}
-
-
-
diff --git a/seatunnel-transforms/seatunnel-transforms-spark/seatunnel-transform-spark-replace/pom.xml b/seatunnel-transforms/seatunnel-transforms-spark/seatunnel-transform-spark-replace/pom.xml
index d5a2fb45419..949225c2a83 100644
--- a/seatunnel-transforms/seatunnel-transforms-spark/seatunnel-transform-spark-replace/pom.xml
+++ b/seatunnel-transforms/seatunnel-transforms-spark/seatunnel-transform-spark-replace/pom.xml
@@ -28,23 +28,4 @@
4.0.0seatunnel-transform-spark-replace
-
-
-
- org.apache.seatunnel
- seatunnel-api-spark
- ${project.version}
- provided
-
-
-
- org.apache.spark
- spark-core_${scala.binary.version}
-
-
-
- org.apache.spark
- spark-sql_${scala.binary.version}
-
-
diff --git a/seatunnel-transforms/seatunnel-transforms-spark/seatunnel-transform-spark-split/pom.xml b/seatunnel-transforms/seatunnel-transforms-spark/seatunnel-transform-spark-split/pom.xml
index d9feed469b9..6ff13f4680b 100644
--- a/seatunnel-transforms/seatunnel-transforms-spark/seatunnel-transform-spark-split/pom.xml
+++ b/seatunnel-transforms/seatunnel-transforms-spark/seatunnel-transform-spark-split/pom.xml
@@ -28,24 +28,4 @@
4.0.0seatunnel-transform-spark-split
-
-
-
- org.apache.seatunnel
- seatunnel-api-spark
- ${project.version}
- provided
-
-
-
- org.apache.spark
- spark-core_${scala.binary.version}
-
-
-
- org.apache.spark
- spark-sql_${scala.binary.version}
-
-
-
diff --git a/seatunnel-transforms/seatunnel-transforms-spark/seatunnel-transform-spark-sql/pom.xml b/seatunnel-transforms/seatunnel-transforms-spark/seatunnel-transform-spark-sql/pom.xml
index d00b8943a76..d7e21833507 100644
--- a/seatunnel-transforms/seatunnel-transforms-spark/seatunnel-transform-spark-sql/pom.xml
+++ b/seatunnel-transforms/seatunnel-transforms-spark/seatunnel-transform-spark-sql/pom.xml
@@ -28,25 +28,4 @@
4.0.0seatunnel-transform-spark-sql
-
-
-
- org.apache.seatunnel
- seatunnel-api-spark
- ${project.version}
- provided
-
-
-
- org.apache.spark
- spark-core_${scala.binary.version}
-
-
-
- org.apache.spark
- spark-sql_${scala.binary.version}
-
-
-
-
diff --git a/seatunnel-transforms/seatunnel-transforms-spark/seatunnel-transform-spark-uuid/pom.xml b/seatunnel-transforms/seatunnel-transforms-spark/seatunnel-transform-spark-uuid/pom.xml
index 82f1b6d28c6..b8c65fe02c0 100644
--- a/seatunnel-transforms/seatunnel-transforms-spark/seatunnel-transform-spark-uuid/pom.xml
+++ b/seatunnel-transforms/seatunnel-transforms-spark/seatunnel-transform-spark-uuid/pom.xml
@@ -28,23 +28,4 @@
4.0.0seatunnel-transform-spark-uuid
-
-
-
- org.apache.seatunnel
- seatunnel-api-spark
- ${project.version}
- provided
-
-
-
- org.apache.spark
- spark-core_${scala.binary.version}
-
-
-
- org.apache.spark
- spark-sql_${scala.binary.version}
-
-
diff --git a/seatunnel-translation/seatunnel-translation-flink/pom.xml b/seatunnel-translation/seatunnel-translation-flink/pom.xml
index 3319df6705a..3530afaf985 100644
--- a/seatunnel-translation/seatunnel-translation-flink/pom.xml
+++ b/seatunnel-translation/seatunnel-translation-flink/pom.xml
@@ -25,10 +25,6 @@
seatunnel-translation-flink
-
- 1.13.6
-
-
org.apache.seatunnel
@@ -39,20 +35,20 @@
org.apache.flinkflink-table-planner_${scala.binary.version}
- ${flink.version}
+ ${flink.1.13.6.version}providedorg.apache.flinkflink-table-planner-blink_${scala.binary.version}
- ${flink.version}
+ ${flink.1.13.6.version}providedorg.apache.flinkflink-java
- ${flink.version}
+ ${flink.1.13.6.version}provided
diff --git a/seatunnel-translation/seatunnel-translation-flink/src/main/java/org/apache/seatunnel/translation/flink/source/BaseSeaTunnelSourceFunction.java b/seatunnel-translation/seatunnel-translation-flink/src/main/java/org/apache/seatunnel/translation/flink/source/BaseSeaTunnelSourceFunction.java
index e91a73ed4e5..79860a90177 100644
--- a/seatunnel-translation/seatunnel-translation-flink/src/main/java/org/apache/seatunnel/translation/flink/source/BaseSeaTunnelSourceFunction.java
+++ b/seatunnel-translation/seatunnel-translation-flink/src/main/java/org/apache/seatunnel/translation/flink/source/BaseSeaTunnelSourceFunction.java
@@ -102,8 +102,10 @@ public void close() throws Exception {
public void cancel() {
running = false;
try {
- LOG.debug("Cancel the SeaTunnelSourceFunction of Flink.");
- internalSource.close();
+ if (internalSource != null) {
+ LOG.debug("Cancel the SeaTunnelSourceFunction of Flink.");
+ internalSource.close();
+ }
} catch (Exception e) {
throw new RuntimeException(e);
}
diff --git a/seatunnel-translation/seatunnel-translation-spark/pom.xml b/seatunnel-translation/seatunnel-translation-spark/pom.xml
index a6c77788144..f2fe73e2b6e 100644
--- a/seatunnel-translation/seatunnel-translation-spark/pom.xml
+++ b/seatunnel-translation/seatunnel-translation-spark/pom.xml
@@ -29,4 +29,30 @@
seatunnel-translation-spark-2.4seatunnel-translation-spark-common
+
+
+
+
+
+ org.apache.spark
+ spark-streaming_${scala.binary.version}
+ ${spark.2.4.0.version}
+ ${spark.scope}
+
+
+
+ org.apache.spark
+ spark-core_${scala.binary.version}
+ ${spark.2.4.0.version}
+ ${spark.scope}
+
+
+
+ org.apache.spark
+ spark-sql_${scala.binary.version}
+ ${spark.2.4.0.version}
+ ${spark.scope}
+
+
+
diff --git a/seatunnel-translation/seatunnel-translation-spark/seatunnel-translation-spark-2.4/pom.xml b/seatunnel-translation/seatunnel-translation-spark/seatunnel-translation-spark-2.4/pom.xml
index 850812a1a4c..6eb4029b503 100644
--- a/seatunnel-translation/seatunnel-translation-spark/seatunnel-translation-spark-2.4/pom.xml
+++ b/seatunnel-translation/seatunnel-translation-spark/seatunnel-translation-spark-2.4/pom.xml
@@ -25,12 +25,6 @@
seatunnel-translation-spark-2.4
-
- 2.4.0
- 2.11
- provided
-
-
org.apache.seatunnel
@@ -42,22 +36,16 @@
org.apache.sparkspark-streaming_${scala.binary.version}
- ${spark.version}
- ${spark.scope}org.apache.sparkspark-core_${scala.binary.version}
- ${spark.version}
- ${spark.scope}org.apache.sparkspark-sql_${scala.binary.version}
- ${spark.version}
- ${spark.scope}
diff --git a/seatunnel-translation/seatunnel-translation-spark/seatunnel-translation-spark-common/pom.xml b/seatunnel-translation/seatunnel-translation-spark/seatunnel-translation-spark-common/pom.xml
index c05c33d788d..3388ca15c43 100644
--- a/seatunnel-translation/seatunnel-translation-spark/seatunnel-translation-spark-common/pom.xml
+++ b/seatunnel-translation/seatunnel-translation-spark/seatunnel-translation-spark-common/pom.xml
@@ -36,22 +36,16 @@
org.apache.sparkspark-streaming_${scala.binary.version}
- ${spark.version}
- ${spark.scope}org.apache.sparkspark-core_${scala.binary.version}
- ${spark.version}
- ${spark.scope}org.apache.sparkspark-sql_${scala.binary.version}
- ${spark.version}
- ${spark.scope}
\ No newline at end of file
diff --git a/seatunnel-ui/src/layouts/dashboard/header/menu/use-menu.ts b/seatunnel-ui/src/layouts/dashboard/header/menu/use-menu.ts
index 54f6c9c0f73..f3dae99daf2 100644
--- a/seatunnel-ui/src/layouts/dashboard/header/menu/use-menu.ts
+++ b/seatunnel-ui/src/layouts/dashboard/header/menu/use-menu.ts
@@ -30,6 +30,10 @@ export function useMenu() {
{
label: () => h(NEllipsis, null, { default: () => t('menu.jobs') }),
key: 'jobs'
+ },
+ {
+ label: () => h(NEllipsis, null, { default: () => t('menu.tasks') }),
+ key: 'tasks'
}
]
diff --git a/seatunnel-ui/src/layouts/dashboard/index.tsx b/seatunnel-ui/src/layouts/dashboard/index.tsx
index 6f0c4960ca8..77d93afb81e 100644
--- a/seatunnel-ui/src/layouts/dashboard/index.tsx
+++ b/seatunnel-ui/src/layouts/dashboard/index.tsx
@@ -28,7 +28,7 @@ const Dashboard = defineComponent({
-
+
)
diff --git a/seatunnel-ui/src/locales/en_US/index.ts b/seatunnel-ui/src/locales/en_US/index.ts
index 67b034a873b..4c71c783736 100644
--- a/seatunnel-ui/src/locales/en_US/index.ts
+++ b/seatunnel-ui/src/locales/en_US/index.ts
@@ -22,6 +22,7 @@ import user_manage from '@/locales/en_US/user-manage'
import data_pipes from '@/locales/en_US/data-pipes'
import log from '@/locales/en_US/log'
import jobs from '@/locales/en_US/jobs'
+import tasks from '@/locales/en_US/tasks'
export default {
login,
@@ -30,5 +31,6 @@ export default {
user_manage,
data_pipes,
log,
- jobs
+ jobs,
+ tasks
}
diff --git a/seatunnel-ui/src/locales/en_US/jobs.ts b/seatunnel-ui/src/locales/en_US/jobs.ts
index 50a89622e20..07cae2a730d 100644
--- a/seatunnel-ui/src/locales/en_US/jobs.ts
+++ b/seatunnel-ui/src/locales/en_US/jobs.ts
@@ -18,8 +18,7 @@
export default {
jobs: 'Jobs',
search: 'Search',
- search_data_pipes_name: 'Search Data Pipes Name',
- data_pipes_name: 'Data Pipes Name',
+ data_pipe_name: 'Data Pipe Name',
plan: 'Plan',
create_date: 'Create Date',
publish: 'Publish',
diff --git a/seatunnel-ui/src/locales/en_US/menu.ts b/seatunnel-ui/src/locales/en_US/menu.ts
index b52d9b69621..722579a4d28 100644
--- a/seatunnel-ui/src/locales/en_US/menu.ts
+++ b/seatunnel-ui/src/locales/en_US/menu.ts
@@ -21,5 +21,6 @@ export default {
manage: 'Manage',
user_manage: 'User Manage',
help: 'Help',
- logout: 'Logout'
+ logout: 'Logout',
+ tasks: 'Tasks'
}
diff --git a/seatunnel-ui/src/locales/en_US/tasks.ts b/seatunnel-ui/src/locales/en_US/tasks.ts
new file mode 100644
index 00000000000..4031ae00103
--- /dev/null
+++ b/seatunnel-ui/src/locales/en_US/tasks.ts
@@ -0,0 +1,37 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+export default {
+ tasks: 'Tasks',
+ search: 'Search',
+ tasks_name: 'Tasks Name',
+ success: 'Success',
+ fail: 'Fail',
+ running: 'Running',
+ task_name: 'Task Name',
+ state: 'State',
+ run_frequency: 'Run Frequency',
+ once: 'Once',
+ crontab: 'Crontab',
+ next_run: 'Next Run',
+ last_run: 'Last Run',
+ last_total_bytes: 'Last Total Bytes',
+ last_total_records: 'Last Total Records',
+ rerun: 'Rerun',
+ kill: 'Kill',
+ view_log: 'View Log'
+}
diff --git a/seatunnel-ui/src/router/routes.ts b/seatunnel-ui/src/router/routes.ts
index a6dc561c2b4..1784f7c93fe 100644
--- a/seatunnel-ui/src/router/routes.ts
+++ b/seatunnel-ui/src/router/routes.ts
@@ -18,6 +18,7 @@
import utils from '@/utils'
import dataPipes from '@/router/data-pipes'
import jobs from '@/router/jobs'
+import tasks from '@/router/tasks'
import userManage from '@/router/user-manage'
import type { RouteRecordRaw } from 'vue-router'
import type { Component } from 'vue'
@@ -25,7 +26,7 @@ import type { Component } from 'vue'
const modules = import.meta.glob('/src/views/**/**.tsx')
const components: { [key: string]: Component } = utils.mapping(modules)
-const basePage: RouteRecordRaw[] = [dataPipes, jobs, userManage]
+const basePage: RouteRecordRaw[] = [dataPipes, jobs, tasks, userManage]
const loginPage: RouteRecordRaw[] = [
{
diff --git a/seatunnel-ui/src/router/tasks.ts b/seatunnel-ui/src/router/tasks.ts
new file mode 100644
index 00000000000..8e1dd713f9d
--- /dev/null
+++ b/seatunnel-ui/src/router/tasks.ts
@@ -0,0 +1,42 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+import utils from '@/utils'
+import type { Component } from 'vue'
+
+const modules = import.meta.glob('/src/views/**/**.tsx')
+const components: { [key: string]: Component } = utils.mapping(modules)
+
+export default {
+ path: '/tasks',
+ name: 'tasks',
+ meta: {
+ title: 'tasks'
+ },
+ redirect: { name: 'tasks-list' },
+ component: () => import('@/layouts/dashboard'),
+ children: [
+ {
+ path: '/tasks/list',
+ name: 'tasks-list',
+ component: components['tasks-list'],
+ meta: {
+ title: 'tasks-list'
+ }
+ }
+ ]
+}
diff --git a/seatunnel-ui/src/views/jobs/list/index.tsx b/seatunnel-ui/src/views/jobs/list/index.tsx
index 354d4e06f56..88a09e7bd1b 100644
--- a/seatunnel-ui/src/views/jobs/list/index.tsx
+++ b/seatunnel-ui/src/views/jobs/list/index.tsx
@@ -48,7 +48,7 @@ const JobsList = defineComponent({
'header-extra': () => (
diff --git a/seatunnel-ui/src/views/jobs/list/use-table.ts b/seatunnel-ui/src/views/jobs/list/use-table.ts
index d844299b9ca..ff6e7817dc1 100644
--- a/seatunnel-ui/src/views/jobs/list/use-table.ts
+++ b/seatunnel-ui/src/views/jobs/list/use-table.ts
@@ -34,8 +34,8 @@ export function useTable() {
const createColumns = (state: any) => {
state.columns = [
{
- title: t('jobs.data_pipes_name'),
- key: 'data_pipes_name'
+ title: t('jobs.data_pipe_name'),
+ key: 'data_pipe_name'
},
{
title: t('jobs.plan'),
diff --git a/seatunnel-ui/src/views/tasks/list/index.tsx b/seatunnel-ui/src/views/tasks/list/index.tsx
new file mode 100644
index 00000000000..b18977bc250
--- /dev/null
+++ b/seatunnel-ui/src/views/tasks/list/index.tsx
@@ -0,0 +1,95 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+import { defineComponent, onMounted, toRefs } from 'vue'
+import {
+ NButton,
+ NCard,
+ NDataTable,
+ NInput,
+ NPagination,
+ NSpace,
+ NSelect
+} from 'naive-ui'
+import { useI18n } from 'vue-i18n'
+import { useTable } from './use-table'
+
+const TasksList = defineComponent({
+ setup() {
+ const { t } = useI18n()
+ const { state, createColumns } = useTable()
+
+ const handleSearch = () => {}
+
+ onMounted(() => {
+ createColumns(state)
+ })
+
+ return { t, handleSearch, ...toRefs(state) }
+ },
+ render() {
+ return (
+
+
+ {{
+ 'header-extra': () => (
+
+
+
+
+ {this.t('tasks.search')}
+
+
+ )
+ }}
+
+
+
+
+
+
+
+
+
+
+ )
+ }
+})
+
+export default TasksList
diff --git a/seatunnel-ui/src/views/tasks/list/use-table.ts b/seatunnel-ui/src/views/tasks/list/use-table.ts
new file mode 100644
index 00000000000..0ea56834d16
--- /dev/null
+++ b/seatunnel-ui/src/views/tasks/list/use-table.ts
@@ -0,0 +1,116 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+import { useI18n } from 'vue-i18n'
+import { h, reactive, ref } from 'vue'
+import { NButton, NSpace, NTag, NIcon } from 'naive-ui'
+import { UploadOutlined, DownloadOutlined } from '@vicons/antd'
+
+export function useTable() {
+ const { t } = useI18n()
+
+ const state = reactive({
+ columns: [],
+ tableData: [{ state: 'success' }, { state: 'fail' }, { state: 'running' }],
+ page: ref(1),
+ pageSize: ref(10),
+ totalPage: ref(1),
+ loading: ref(false)
+ })
+
+ const createColumns = (state: any) => {
+ state.columns = [
+ {
+ title: t('tasks.task_name'),
+ key: 'task_name'
+ },
+ {
+ title: t('tasks.state'),
+ key: 'state',
+ render: (row: any) => {
+ if (row.state === 'success') {
+ return h(NTag, { type: 'success' }, t('tasks.success'))
+ } else if (row.state === 'fail') {
+ return h(NTag, { type: 'error' }, t('tasks.fail'))
+ } else if (row.state === 'running') {
+ return h(NTag, { type: 'info' }, t('tasks.running'))
+ }
+ }
+ },
+ {
+ title: t('tasks.run_frequency'),
+ key: 'run_frequency'
+ },
+ {
+ title: t('tasks.next_run'),
+ key: 'next_run'
+ },
+ {
+ title: t('tasks.last_run'),
+ key: 'last_run'
+ },
+ {
+ title: t('tasks.last_total_bytes'),
+ key: 'last_total_bytes',
+ render: (row: any) =>
+ h(NSpace, {}, [
+ h(
+ NTag,
+ { type: 'success' },
+ { icon: h(NIcon, {}, h(UploadOutlined)), default: 12 + ' (KB)' }
+ ),
+ h(
+ NTag,
+ { type: 'error' },
+ { icon: h(NIcon, {}, h(DownloadOutlined)), default: 16 + ' (KB)' }
+ )
+ ])
+ },
+ {
+ title: t('tasks.last_total_records'),
+ key: 'last_total_records',
+ render: (row: any) =>
+ h(NSpace, {}, [
+ h(
+ NTag,
+ { type: 'success' },
+ { icon: h(NIcon, {}, h(UploadOutlined)), default: 66 }
+ ),
+ h(
+ NTag,
+ { type: 'error' },
+ { icon: h(NIcon, {}, h(DownloadOutlined)), default: 77 }
+ )
+ ])
+ },
+ {
+ title: t('tasks.operation'),
+ key: 'operation',
+ render: (row: any) =>
+ h(NSpace, null, {
+ default: () => [
+ h(NButton, { text: true }, t('tasks.rerun')),
+ h(NButton, { text: true }, t('tasks.kill')),
+ h(NButton, { text: true }, t('tasks.view_log'))
+ ]
+ })
+ }
+ ]
+ }
+
+ return { state, createColumns }
+}
diff --git a/tools/dependencies/known-dependencies.txt b/tools/dependencies/known-dependencies.txt
index dad98d576e9..af1a6847d8e 100755
--- a/tools/dependencies/known-dependencies.txt
+++ b/tools/dependencies/known-dependencies.txt
@@ -35,6 +35,7 @@ asm-all-5.0.2.jar
asm-analysis-7.1.jar
asm-commons-7.1.jar
asm-tree-7.1.jar
+aspectjweaver-1.9.7.jar
async-http-client-2.5.3.jar
async-http-client-netty-utils-2.5.3.jar
audience-annotations-0.11.0.jar
@@ -168,6 +169,7 @@ google-http-client-1.26.0.jar
google-http-client-jackson2-1.26.0.jar
google-oauth-client-1.26.0.jar
gson-2.2.4.jar
+gson-2.8.9.jar
guava-19.0.jar
guice-3.0.jar
guice-4.1.0.jar
@@ -348,7 +350,7 @@ jcodings-1.0.18.jar
jcodings-1.0.43.jar
jcommander-1.81.jar
jdbi-2.63.1.jar
-jedis-3.2.0.jar
+jedis-4.2.2.jar
jersey-client-1.19.jar
jersey-client-1.9.jar
jersey-client-2.22.2.jar
@@ -421,6 +423,7 @@ joni-2.1.27.jar
jopt-simple-5.0.2.jar
jpam-1.1.jar
jsch-0.1.54.jar
+json-20211205.jar
json-path-2.3.0.jar
json-smart-2.3.jar
jsoup-1.14.3.jar
@@ -429,6 +432,9 @@ jsr305-1.3.9.jar
jsr305-2.0.1.jar
jsr311-api-1.1.1.jar
jvm-attach-api-1.5.jar
+jjwt-api-0.10.7.jar
+jjwt-impl-0.10.7.jar
+jjwt-jackson-0.10.7.jar
kafka-clients-2.0.0.jar
kafka-clients-2.4.1.jar
kerb-admin-1.0.1.jar
@@ -655,6 +661,7 @@ spring-beans-5.3.20.jar
spring-boot-2.6.8.jar
spring-boot-autoconfigure-2.6.8.jar
spring-boot-starter-2.6.8.jar
+spring-boot-starter-aop-2.6.8.jar
spring-boot-starter-jdbc-2.6.3.jar
spring-boot-starter-jetty-2.6.8.jar
spring-boot-starter-json-2.6.8.jar