Skip to content

Commit

Permalink
Merge branch '1.10_release' into git_1.10_release
Browse files Browse the repository at this point in the history
# Conflicts:
#	docs/quickstart.md
#	flinkx-core/src/main/java/com/dtstack/flinkx/util/RangeSplitUtil.java
#	flinkx-kafka/flinkx-kafka-writer/src/main/java/com/dtstack/flinkx/kafka/writer/KafkaWriter.java
#	flinkx-kafka10/flinkx-kafka10-writer/src/main/java/com/dtstack/flinkx/kafka10/writer/Kafka10Writer.java
#	flinkx-kafka11/flinkx-kafka11-writer/src/main/java/com/dtstack/flinkx/kafka11/writer/Kafka11Writer.java
#	flinkx-launcher/src/main/java/com/dtstack/flinkx/launcher/perJob/PerJobClusterClientBuilder.java
#	flinkx-launcher/src/main/resources/log4j.properties
#	flinkx-postgresql/flinkx-postgresql-core/src/main/java/com/dtstack/flinkx/postgresql/PostgresqlTypeConverter.java
#	flinkx-rdb/flinkx-rdb-core/src/main/java/com/dtstack/flinkx/rdb/util/DbUtil.java
#	flinkx-rdb/flinkx-rdb-writer/src/main/java/com/dtstack/flinkx/rdb/outputformat/JdbcOutputFormat.java
#	flinkx-test/src/main/java/com/dtstack/flinkx/test/LocalTest.java
#	pom.xml
  • Loading branch information
yanghuaiGit committed Jan 12, 2021
2 parents 7d140cd + b1ad219 commit fe458a9
Show file tree
Hide file tree
Showing 262 changed files with 7,376 additions and 1,978 deletions.
3 changes: 2 additions & 1 deletion .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -10,4 +10,5 @@ nohup.out
flinkconf/
hadoopconf/
/default_task_id_output
/syncplugins
/syncplugins
/ci/
8 changes: 5 additions & 3 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -66,12 +66,10 @@ The following databases are currently supported:
| | Hive | | [doc](docs/offline/writer/hivewriter.md) |
| Stream Synchronization | Kafka | [doc](docs/realTime/reader/kafkareader.md) | [doc](docs/realTime/writer/kafkawriter.md) |
| | EMQX | [doc](docs/realTime/reader/emqxreader.md) | [doc](docs/realTime/writer/emqxwriter.md) |
| | RestApi | [doc](docs/realTime/reader/restapireader.md) | [doc](docs/realTime/writer/restapiwriter.md) |
| | RestApi || [doc](docs/realTime/writer/restapiwriter.md) |
| | MySQL Binlog | [doc](docs/realTime/reader/binlogreader.md) | |
| | MongoDB Oplog | [doc](docs/realTime/reader/mongodboplogreader.md)| |
| | PostgreSQL WAL | [doc](docs/realTime/reader/pgwalreader.md) | |
| | Oracle Logminer| Coming Soon | |
| | SqlServer CDC | Coming Soon | |

# Quick Start

Expand All @@ -89,6 +87,10 @@ Please click [Statistics Metric](docs/statistics.md)

Please click [Kerberos](docs/kerberos.md)

# Questions

Please click [Questions](docs/questions.md)

# How to contribute FlinkX

Please click [Contribution](docs/contribution.md)
Expand Down
8 changes: 5 additions & 3 deletions README_CH.md
Original file line number Diff line number Diff line change
Expand Up @@ -77,12 +77,10 @@ FlinkX目前支持下面这些数据库:
| | Hive | | [doc](docs/offline/writer/hivewriter.md) |
| Stream Synchronization | Kafka | [doc](docs/realTime/reader/kafkareader.md) | [doc](docs/realTime/writer/kafkawriter.md) |
| | EMQX | [doc](docs/realTime/reader/emqxreader.md) | [doc](docs/realTime/writer/emqxwriter.md) |
| | RestApi | [doc](docs/realTime/reader/restapireader.md) | [doc](docs/realTime/writer/restapiwriter.md) |
| | RestApi | | [doc](docs/realTime/writer/restapiwriter.md) |
| | MySQL Binlog | [doc](docs/realTime/reader/binlogreader.md) | |
| | MongoDB Oplog | [doc](docs/realTime/reader/mongodboplogreader.md)| |
| | PostgreSQL WAL | [doc](docs/realTime/reader/pgwalreader.md) | |
| | Oracle Logminer| Coming Soon | |
| | SqlServer CDC | Coming Soon | |

# 快速开始

Expand All @@ -100,6 +98,10 @@ FlinkX目前支持下面这些数据库:

请点击[Kerberos](docs/kerberos.md)

# Questions

请点击[Questions](docs/questions.md)

# 如何贡献FlinkX

请点击[如何贡献FlinkX](docs/contribution.md)
Expand Down
3 changes: 3 additions & 0 deletions bin/install_jars.bat
Original file line number Diff line number Diff line change
Expand Up @@ -6,3 +6,6 @@ call mvn install:install-file -DgroupId=com.esen.jdbc -DartifactId=gbase -Dversi

call mvn install:install-file -DgroupId=dm.jdbc.driver -DartifactId=dm7 -Dversion=18.0.0 -Dpackaging=jar -Dfile=../jars/Dm7JdbcDriver18.jar

call mvn install:install-file -DgroupId=com.kingbase8 -DartifactId=kingbase8 -Dversion=8.2.0 -Dpackaging=jar -Dfile=../jars/kingbase8-8.2.0.jar

call mvn install:install-file -DgroupId=fakepath -DartifactId=vertica-jdbc -Dversion=9.1.1-0 -Dpackaging=jar -Dfile=../jars/vertica-jdbc-9.1.1-0.jar
8 changes: 7 additions & 1 deletion bin/install_jars.sh
Original file line number Diff line number Diff line change
Expand Up @@ -10,4 +10,10 @@ mvn install:install-file -DgroupId=com.github.noraui -DartifactId=ojdbc8 -Dversi
mvn install:install-file -DgroupId=com.esen.jdbc -DartifactId=gbase -Dversion=8.3.81.53 -Dpackaging=jar -Dfile=../jars/gbase-8.3.81.53.jar

## dm driver
mvn install:install-file -DgroupId=dm.jdbc.driver -DartifactId=dm7 -Dversion=18.0.0 -Dpackaging=jar -Dfile=../jars/Dm7JdbcDriver18.jar
mvn install:install-file -DgroupId=dm.jdbc.driver -DartifactId=dm7 -Dversion=18.0.0 -Dpackaging=jar -Dfile=../jars/Dm7JdbcDriver18.jar

## kingbase driver
mvn install:install-file -DgroupId=com.kingbase8 -DartifactId=kingbase8 -Dversion=8.2.0 -Dpackaging=jar -Dfile=../jars/kingbase8-8.2.0.jar

## vertica driver
mvn install:install-file -DgroupId=fakepath -DartifactId=vertica-jdbc -Dversion=9.1.1-0 -Dpackaging=jar -Dfile=../jars/vertica-jdbc-9.1.1-0.jar
36 changes: 36 additions & 0 deletions docs/example/kafka09_stream.json
Original file line number Diff line number Diff line change
@@ -0,0 +1,36 @@
{
"job": {
"content": [
{
"reader": {
"parameter": {
"topic": "kafka09",
"groupId": "default",
"codec": "text",
"encoding": "UTF-8",
"blankIgnore": false,
"consumerSettings": {
"zookeeper.connect": "localhost:2181/kafka09"
}
},
"name": "kafka09reader"
},
"writer": {
"parameter": {
"print": true
},
"name": "streamwriter"
}
}
],
"setting": {
"restore": {
"isRestore": false,
"isStream": true
},
"speed": {
"channel": 1
}
}
}
}
42 changes: 42 additions & 0 deletions docs/example/kafka10_stream.json
Original file line number Diff line number Diff line change
@@ -0,0 +1,42 @@
{
"job": {
"content": [
{
"reader": {
"parameter": {
"topic": "kafka10",
"groupId": "default",
"codec": "text",
"encoding": "UTF-8",
"blankIgnore": false,
"consumerSettings": {
"bootstrap.servers": "localhost:9092"
}
},
"name": "kafka10reader"
},
"writer": {
"parameter": {
"print": true
},
"name": "streamwriter"
}
}
],
"writer": {
"parameter": {
"print": true
},
"name": "streamwriter"
}
},
"setting": {
"restore": {
"isRestore": false,
"isStream": true
},
"speed": {
"channel": 1
}
}
}
36 changes: 36 additions & 0 deletions docs/example/kafka11_stream.json
Original file line number Diff line number Diff line change
@@ -0,0 +1,36 @@
{
"job": {
"content": [
{
"reader": {
"parameter": {
"topic": "kafka11",
"groupId": "default",
"codec": "text",
"encoding": "UTF-8",
"blankIgnore": false,
"consumerSettings": {
"bootstrap.servers": "localhost:9092"
}
},
"name": "kafka11reader"
},
"writer": {
"parameter": {
"print": true
},
"name": "streamwriter"
}
}
],
"setting": {
"restore": {
"isRestore": false,
"isStream": true
},
"speed": {
"channel": 1
}
}
}
}
52 changes: 52 additions & 0 deletions docs/example/kafka_hive.json
Original file line number Diff line number Diff line change
@@ -0,0 +1,52 @@
{
"job": {
"content": [
{
"reader" : {
"parameter" : {
"topic" : "test",
"mode": "timestamp",
"timestamp": 1609812275000,
"codec": "text",
"consumerSettings" : {
"bootstrap.servers" : "ip1:9092,ip2:9092,ip3:9092"
}
},
"name" : "kafkareader"
},
"writer": {
"parameter" : {
"jdbcUrl" : "jdbc:hive2://ip:10000/test",
"fileType" : "parquet",
"writeMode" : "overwrite",
"compress" : "",
"charsetName" : "UTF-8",
"maxFileSize" : 1073741824,
"tablesColumn" : "{\"message\":[{\"part\":false,\"comment\":\"\",\"type\":\"string\",\"key\":\"message\"}]}",
"partition" : "pt",
"partitionType" : "DAY",
"defaultFS" : "hdfs://ns",
"hadoopConfig": {
"dfs.ha.namenodes.ns": "nn1,nn2",
"dfs.namenode.rpc-address.ns.nn2": "ip1:9000",
"dfs.client.failover.proxy.provider.ns": "org.apache.hadoop.hdfs.server.namenode.ha.ConfiguredFailoverProxyProvider",
"dfs.namenode.rpc-address.ns.nn1": "ip2:9000",
"dfs.nameservices": "ns"
}
},
"name" : "hivewriter"
}
}
],
"setting": {
"restore": {
"isRestore": true,
"isStream": true
},
"speed": {
"readerChannel": 3,
"writerChannel": 1
}
}
}
}
65 changes: 65 additions & 0 deletions docs/example/kafka_mysql.json
Original file line number Diff line number Diff line change
@@ -0,0 +1,65 @@
{
"job": {
"content": [
{
"reader": {
"parameter": {
"topic": "tudou",
"mode": "timestamp",
"timestamp": 1609812275000,
"offset": "partition:0,offset:0;partition:1,offset:1;partition:2,offset:2",
"codec": "text",
"blankIgnore": false,
"consumerSettings": {
"bootstrap.servers": "ip1:9092,ip2:9092,ip3:9092"
},
"column": ["id","user_id","name"]
},
"name": "kafkareader"
},
"writer": {
"name": "mysqlwriter",
"parameter": {
"username": "root",
"password": "abc123",
"connection": [
{
"jdbcUrl": "jdbc:mysql://localhost:3306/test",
"table": [
"test"
]
}
],
"preSql": ["truncate table test;"],
"postSql": [],
"writeMode": "insert",
"column": [
{
"name": "id",
"type": "BIGINT"
},
{
"name": "user_id",
"type": "BIGINT"
},
{
"name": "name",
"type": "varchar"
}
]
}
}
}
],
"setting": {
"restore": {
"isRestore": true,
"isStream": true
},
"speed": {
"readerChannel": 3,
"writerChannel": 1
}
}
}
}
34 changes: 34 additions & 0 deletions docs/example/kafka_stream.json
Original file line number Diff line number Diff line change
@@ -0,0 +1,34 @@
{
"job": {
"content": [
{
"reader": {
"parameter": {
"topic": "test",
"mode": "timestamp",
"timestamp": 1609812275000,
"offset": "partition:0,offset:0;partition:1,offset:1;partition:2,offset:2",
"codec": "text",
"blankIgnore": false,
"consumerSettings": {
"bootstrap.servers": "ip1:9092,ip2:9092,ip3:9092"
}
},
"name": "kafkareader"
},
"writer": {
"parameter": {
"print": true
},
"name": "streamwriter"
}
}
],
"setting": {
"speed": {
"readerChannel": 3,
"writerChannel": 1
}
}
}
}
30 changes: 30 additions & 0 deletions docs/example/stream_stream.json
Original file line number Diff line number Diff line change
@@ -0,0 +1,30 @@
{
"job" : {
"content" : [ {
"reader" : {
"parameter" : {
"column" : [ {
"name": "id",
"type" : "id"
}, {
"name": "string",
"type" : "string"
} ],
"sliceRecordCount" : [ "10"]
},
"name" : "streamreader"
},
"writer" : {
"parameter" : {
"print" : true
},
"name" : "streamwriter"
}
} ],
"setting" : {
"speed" : {
"channel" : 1
}
}
}
}
Loading

0 comments on commit fe458a9

Please sign in to comment.