Skip to content

Commit 56df6ec

Browse files
author
dapeng
committed
Merge branch '1.10_release_4.0.x' into 1.8_release_4.0.x_mergeRelease
# Conflicts: # redis5/redis5-side/redis-async-side/src/main/java/com/dtstack/flink/sql/side/redis/RedisAsyncReqRow.java
2 parents b5045b5 + 499fd88 commit 56df6ec

File tree

197 files changed

+2926
-1997
lines changed

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

197 files changed

+2926
-1997
lines changed

.gitignore

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -14,4 +14,4 @@ lib/
1414
.DS_Store
1515
bin/nohup.out
1616
.DS_Store
17-
bin/sideSql.txt
17+
bin/sideSql.txt

cassandra/cassandra-side/cassandra-all-side/pom.xml

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -70,14 +70,14 @@
7070
</goals>
7171
<configuration>
7272
<tasks>
73-
<copy todir="${basedir}/../../../plugins/cassandraallside">
73+
<copy todir="${basedir}/../../../sqlplugins/cassandraallside">
7474
<fileset dir="target/">
7575
<include name="${project.artifactId}-${project.version}.jar" />
7676
</fileset>
7777
</copy>
7878

79-
<move file="${basedir}/../../../plugins/cassandraallside/${project.artifactId}-${project.version}.jar"
80-
tofile="${basedir}/../../../plugins/cassandraallside/${project.name}-${git.branch}.jar" />
79+
<move file="${basedir}/../../../sqlplugins/cassandraallside/${project.artifactId}-${project.version}.jar"
80+
tofile="${basedir}/../../../sqlplugins/cassandraallside/${project.name}-${git.branch}.jar" />
8181
</tasks>
8282
</configuration>
8383
</execution>

cassandra/cassandra-side/cassandra-all-side/src/main/java/com/dtstack/flink/sql/side/cassandra/CassandraAllReqRow.java

Lines changed: 21 additions & 33 deletions
Original file line numberDiff line numberDiff line change
@@ -18,39 +18,33 @@
1818

1919
package com.dtstack.flink.sql.side.cassandra;
2020

21-
import org.apache.flink.api.java.typeutils.RowTypeInfo;
22-
import org.apache.flink.table.runtime.types.CRow;
23-
import org.apache.flink.table.typeutils.TimeIndicatorTypeInfo;
24-
import org.apache.flink.types.Row;
25-
import org.apache.flink.util.Collector;
26-
27-
import com.datastax.driver.core.Cluster;
28-
import com.datastax.driver.core.ConsistencyLevel;
29-
import com.datastax.driver.core.HostDistance;
30-
import com.datastax.driver.core.PoolingOptions;
31-
import com.datastax.driver.core.QueryOptions;
32-
import com.datastax.driver.core.ResultSet;
33-
import com.datastax.driver.core.Session;
34-
import com.datastax.driver.core.SocketOptions;
21+
import com.datastax.driver.core.*;
3522
import com.datastax.driver.core.policies.DowngradingConsistencyRetryPolicy;
3623
import com.datastax.driver.core.policies.RetryPolicy;
24+
import com.dtstack.flink.sql.side.AbstractSideTableInfo;
3725
import com.dtstack.flink.sql.side.BaseAllReqRow;
3826
import com.dtstack.flink.sql.side.FieldInfo;
3927
import com.dtstack.flink.sql.side.JoinInfo;
40-
import com.dtstack.flink.sql.side.AbstractSideTableInfo;
4128
import com.dtstack.flink.sql.side.cassandra.table.CassandraSideTableInfo;
29+
import com.dtstack.flink.sql.util.RowDataComplete;
4230
import com.google.common.collect.Lists;
4331
import com.google.common.collect.Maps;
4432
import org.apache.calcite.sql.JoinType;
4533
import org.apache.commons.collections.CollectionUtils;
4634
import org.apache.commons.lang3.StringUtils;
35+
import org.apache.flink.api.java.typeutils.RowTypeInfo;
36+
import org.apache.flink.table.dataformat.BaseRow;
37+
import org.apache.flink.types.Row;
38+
import org.apache.flink.util.Collector;
4739
import org.slf4j.Logger;
4840
import org.slf4j.LoggerFactory;
4941

5042
import java.net.InetAddress;
5143
import java.sql.SQLException;
52-
import java.sql.Timestamp;
53-
import java.util.*;
44+
import java.util.ArrayList;
45+
import java.util.Calendar;
46+
import java.util.List;
47+
import java.util.Map;
5448
import java.util.concurrent.atomic.AtomicReference;
5549

5650
/**
@@ -84,14 +78,7 @@ public Row fillData(Row input, Object sideInput) {
8478
Row row = new Row(sideInfo.getOutFieldInfoList().size());
8579
for (Map.Entry<Integer, Integer> entry : sideInfo.getInFieldIndex().entrySet()) {
8680
Object obj = input.getField(entry.getValue());
87-
boolean isTimeIndicatorTypeInfo = TimeIndicatorTypeInfo.class.isAssignableFrom(sideInfo.getRowTypeInfo().getTypeAt(entry.getValue()).getClass());
88-
89-
//Type information for indicating event or processing time. However, it behaves like a regular SQL timestamp but is serialized as Long.
90-
if (obj instanceof Timestamp && isTimeIndicatorTypeInfo) {
91-
//去除上一层OutputRowtimeProcessFunction 调用时区导致的影响
92-
obj = ((Timestamp) obj).getTime() + (long)LOCAL_TZ.getOffset(((Timestamp) obj).getTime());
93-
}
94-
81+
obj = convertTimeIndictorTypeInfo(entry.getValue(), obj);
9582
row.setField(entry.getKey(), obj);
9683
}
9784

@@ -129,14 +116,14 @@ protected void reloadCache() {
129116

130117

131118
@Override
132-
public void flatMap(CRow input, Collector<CRow> out) throws Exception {
119+
public void flatMap(Row input, Collector<BaseRow> out) throws Exception {
133120
List<Object> inputParams = Lists.newArrayList();
134121
for (Integer conValIndex : sideInfo.getEqualValIndex()) {
135-
Object equalObj = input.row().getField(conValIndex);
122+
Object equalObj = input.getField(conValIndex);
136123
if (equalObj == null) {
137-
if(sideInfo.getJoinType() == JoinType.LEFT){
138-
Row data = fillData(input.row(), null);
139-
out.collect(new CRow(data, input.change()));
124+
if (sideInfo.getJoinType() == JoinType.LEFT) {
125+
Row row = fillData(input, null);
126+
RowDataComplete.collectRow(out, row);
140127
}
141128
return;
142129
}
@@ -148,8 +135,8 @@ public void flatMap(CRow input, Collector<CRow> out) throws Exception {
148135
List<Map<String, Object>> cacheList = cacheRef.get().get(key);
149136
if (CollectionUtils.isEmpty(cacheList)) {
150137
if (sideInfo.getJoinType() == JoinType.LEFT) {
151-
Row row = fillData(input.row(), null);
152-
out.collect(new CRow(row, input.change()));
138+
Row row = fillData(input, null);
139+
RowDataComplete.collectRow(out, row);
153140
} else {
154141
return;
155142
}
@@ -158,7 +145,8 @@ public void flatMap(CRow input, Collector<CRow> out) throws Exception {
158145
}
159146

160147
for (Map<String, Object> one : cacheList) {
161-
out.collect(new CRow(fillData(input.row(), one), input.change()));
148+
Row row = fillData(input, one);
149+
RowDataComplete.collectRow(out, row);
162150
}
163151

164152
}

cassandra/cassandra-side/cassandra-async-side/pom.xml

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -86,14 +86,14 @@
8686
</goals>
8787
<configuration>
8888
<tasks>
89-
<copy todir="${basedir}/../../../plugins/cassandraasyncside">
89+
<copy todir="${basedir}/../../../sqlplugins/cassandraasyncside">
9090
<fileset dir="target/">
9191
<include name="${project.artifactId}-${project.version}.jar" />
9292
</fileset>
9393
</copy>
9494

95-
<move file="${basedir}/../../../plugins/cassandraasyncside/${project.artifactId}-${project.version}.jar"
96-
tofile="${basedir}/../../../plugins/cassandraasyncside/${project.name}-${git.branch}.jar" />
95+
<move file="${basedir}/../../../sqlplugins/cassandraasyncside/${project.artifactId}-${project.version}.jar"
96+
tofile="${basedir}/../../../sqlplugins/cassandraasyncside/${project.name}-${git.branch}.jar" />
9797
</tasks>
9898
</configuration>
9999
</execution>

cassandra/cassandra-side/cassandra-async-side/src/main/java/com/dtstack/flink/sql/side/cassandra/CassandraAsyncReqRow.java

Lines changed: 16 additions & 38 deletions
Original file line numberDiff line numberDiff line change
@@ -19,48 +19,34 @@
1919

2020
package com.dtstack.flink.sql.side.cassandra;
2121

22-
import org.apache.flink.api.java.typeutils.RowTypeInfo;
23-
import org.apache.flink.configuration.Configuration;
24-
import org.apache.flink.streaming.api.functions.async.ResultFuture;
25-
import org.apache.flink.table.runtime.types.CRow;
26-
import org.apache.flink.table.typeutils.TimeIndicatorTypeInfo;
27-
import org.apache.flink.types.Row;
28-
29-
import com.datastax.driver.core.Cluster;
30-
import com.datastax.driver.core.ConsistencyLevel;
31-
import com.datastax.driver.core.HostDistance;
32-
import com.datastax.driver.core.PoolingOptions;
33-
import com.datastax.driver.core.QueryOptions;
34-
import com.datastax.driver.core.ResultSet;
35-
import com.datastax.driver.core.Session;
36-
import com.datastax.driver.core.SocketOptions;
22+
import com.datastax.driver.core.*;
3723
import com.datastax.driver.core.policies.DowngradingConsistencyRetryPolicy;
3824
import com.datastax.driver.core.policies.RetryPolicy;
3925
import com.dtstack.flink.sql.enums.ECacheContentType;
40-
import com.dtstack.flink.sql.side.BaseAsyncReqRow;
41-
import com.dtstack.flink.sql.side.CacheMissVal;
42-
import com.dtstack.flink.sql.side.FieldInfo;
43-
import com.dtstack.flink.sql.side.JoinInfo;
44-
import com.dtstack.flink.sql.side.AbstractSideTableInfo;
26+
import com.dtstack.flink.sql.side.*;
4527
import com.dtstack.flink.sql.side.cache.CacheObj;
4628
import com.dtstack.flink.sql.side.cassandra.table.CassandraSideTableInfo;
29+
import com.dtstack.flink.sql.util.RowDataComplete;
4730
import com.google.common.base.Function;
4831
import com.google.common.collect.Lists;
4932
import com.google.common.util.concurrent.AsyncFunction;
5033
import com.google.common.util.concurrent.FutureCallback;
5134
import com.google.common.util.concurrent.Futures;
5235
import com.google.common.util.concurrent.ListenableFuture;
53-
import io.vertx.core.json.JsonArray;
5436
import org.apache.commons.lang3.StringUtils;
37+
import org.apache.flink.api.java.typeutils.RowTypeInfo;
38+
import org.apache.flink.configuration.Configuration;
39+
import org.apache.flink.streaming.api.functions.async.ResultFuture;
40+
import org.apache.flink.table.dataformat.BaseRow;
41+
import org.apache.flink.types.Row;
5542
import org.slf4j.Logger;
5643
import org.slf4j.LoggerFactory;
5744

5845
import java.net.InetAddress;
59-
import java.sql.Timestamp;
6046
import java.util.ArrayList;
47+
import java.util.Collections;
6148
import java.util.List;
6249
import java.util.Map;
63-
import java.util.TimeZone;
6450

6551
/**
6652
* Reason:
@@ -74,8 +60,6 @@ public class CassandraAsyncReqRow extends BaseAsyncReqRow {
7460

7561
private static final Logger LOG = LoggerFactory.getLogger(CassandraAsyncReqRow.class);
7662

77-
private static final TimeZone LOCAL_TZ = TimeZone.getDefault();
78-
7963
private final static int DEFAULT_VERTX_EVENT_LOOP_POOL_SIZE = 10;
8064

8165
private final static int DEFAULT_VERTX_WORKER_POOL_SIZE = 20;
@@ -165,7 +149,7 @@ private void connCassandraDB(CassandraSideTableInfo tableInfo) {
165149
}
166150

167151
@Override
168-
public void handleAsyncInvoke(Map<String, Object> inputParams, CRow input, ResultFuture<CRow> resultFuture) throws Exception {
152+
public void handleAsyncInvoke(Map<String, Object> inputParams, Row input, ResultFuture<BaseRow> resultFuture) throws Exception {
169153

170154
String key = buildCacheKey(inputParams);
171155
//connect Cassandra
@@ -196,15 +180,15 @@ public void onSuccess(List<com.datastax.driver.core.Row> rows) {
196180
cluster.closeAsync();
197181
if (rows.size() > 0) {
198182
List<com.datastax.driver.core.Row> cacheContent = Lists.newArrayList();
199-
List<CRow> rowList = Lists.newArrayList();
183+
List<Row> rowList = Lists.newArrayList();
200184
for (com.datastax.driver.core.Row line : rows) {
201-
Row row = fillData(input.row(), line);
185+
Row row = fillData(input, line);
202186
if (openCache()) {
203187
cacheContent.add(line);
204188
}
205-
rowList.add(new CRow(row, input.change()));
189+
rowList.add(row);
206190
}
207-
resultFuture.complete(rowList);
191+
RowDataComplete.completeRow(resultFuture, rowList);
208192
if (openCache()) {
209193
putCache(key, CacheObj.buildCacheObj(ECacheContentType.MultiLine, cacheContent));
210194
}
@@ -213,7 +197,7 @@ public void onSuccess(List<com.datastax.driver.core.Row> rows) {
213197
if (openCache()) {
214198
putCache(key, CacheMissVal.getMissKeyObj());
215199
}
216-
resultFuture.complete(null);
200+
resultFuture.complete(Collections.EMPTY_LIST);
217201
}
218202
}
219203

@@ -251,13 +235,7 @@ public Row fillData(Row input, Object line) {
251235
Row row = new Row(sideInfo.getOutFieldInfoList().size());
252236
for (Map.Entry<Integer, Integer> entry : sideInfo.getInFieldIndex().entrySet()) {
253237
Object obj = input.getField(entry.getValue());
254-
boolean isTimeIndicatorTypeInfo = TimeIndicatorTypeInfo.class.isAssignableFrom(sideInfo.getRowTypeInfo().getTypeAt(entry.getValue()).getClass());
255-
256-
if (obj instanceof Timestamp && isTimeIndicatorTypeInfo) {
257-
//去除上一层OutputRowtimeProcessFunction 调用时区导致的影响
258-
obj = ((Timestamp) obj).getTime() + (long)LOCAL_TZ.getOffset(((Timestamp) obj).getTime());
259-
}
260-
238+
obj = convertTimeIndictorTypeInfo(entry.getValue(), obj);
261239
row.setField(entry.getKey(), obj);
262240
}
263241

cassandra/cassandra-side/cassandra-async-side/src/main/java/com/dtstack/flink/sql/side/cassandra/CassandraAsyncSideInfo.java

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -91,8 +91,8 @@ public void dealOneEqualCon(SqlNode sqlNode, String sideTableName) {
9191
if (leftTableName.equalsIgnoreCase(sideTableName)) {
9292
equalFieldList.add(leftField);
9393
int equalFieldIndex = -1;
94-
for (int i = 0; i < rowTypeInfo.getFieldNames().length; i++) {
95-
String fieldName = rowTypeInfo.getFieldNames()[i];
94+
for (int i = 0; i < getFieldNames().length; i++) {
95+
String fieldName = getFieldNames()[i];
9696
if (fieldName.equalsIgnoreCase(rightField)) {
9797
equalFieldIndex = i;
9898
}
@@ -107,8 +107,8 @@ public void dealOneEqualCon(SqlNode sqlNode, String sideTableName) {
107107

108108
equalFieldList.add(rightField);
109109
int equalFieldIndex = -1;
110-
for (int i = 0; i < rowTypeInfo.getFieldNames().length; i++) {
111-
String fieldName = rowTypeInfo.getFieldNames()[i];
110+
for (int i = 0; i < getFieldNames().length; i++) {
111+
String fieldName = getFieldNames()[i];
112112
if (fieldName.equalsIgnoreCase(leftField)) {
113113
equalFieldIndex = i;
114114
}

cassandra/cassandra-sink/pom.xml

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -64,14 +64,14 @@
6464
</goals>
6565
<configuration>
6666
<tasks>
67-
<copy todir="${basedir}/../../plugins/cassandrasink">
67+
<copy todir="${basedir}/../../sqlplugins/cassandrasink">
6868
<fileset dir="target/">
6969
<include name="${project.artifactId}-${project.version}.jar" />
7070
</fileset>
7171
</copy>
7272

73-
<move file="${basedir}/../../plugins/cassandrasink/${project.artifactId}-${project.version}.jar"
74-
tofile="${basedir}/../../plugins/cassandrasink/${project.name}-${git.branch}.jar" />
73+
<move file="${basedir}/../../sqlplugins/cassandrasink/${project.artifactId}-${project.version}.jar"
74+
tofile="${basedir}/../../sqlplugins/cassandrasink/${project.name}-${git.branch}.jar" />
7575
</tasks>
7676
</configuration>
7777
</execution>

cassandra/cassandra-sink/src/main/java/com/dtstack/flink/sql/sink/cassandra/CassandraSink.java

Lines changed: 8 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -28,6 +28,7 @@
2828
import org.apache.flink.api.java.typeutils.RowTypeInfo;
2929
import org.apache.flink.api.java.typeutils.TupleTypeInfo;
3030
import org.apache.flink.streaming.api.datastream.DataStream;
31+
import org.apache.flink.streaming.api.datastream.DataStreamSink;
3132
import org.apache.flink.streaming.api.functions.sink.OutputFormatSinkFunction;
3233
import org.apache.flink.streaming.api.functions.sink.RichSinkFunction;
3334
import org.apache.flink.table.sinks.RetractStreamTableSink;
@@ -82,6 +83,11 @@ public CassandraSink genStreamSink(AbstractTargetTableInfo targetTableInfo) {
8283

8384
@Override
8485
public void emitDataStream(DataStream<Tuple2<Boolean, Row>> dataStream) {
86+
consumeDataStream(dataStream);
87+
}
88+
89+
@Override
90+
public DataStreamSink<Tuple2<Boolean, Row>> consumeDataStream(DataStream<Tuple2<Boolean, Row>> dataStream) {
8591
CassandraOutputFormat.CassandraFormatBuilder builder = CassandraOutputFormat.buildOutputFormat();
8692
builder.setAddress(this.address)
8793
.setDatabase(this.database)
@@ -100,7 +106,8 @@ public void emitDataStream(DataStream<Tuple2<Boolean, Row>> dataStream) {
100106

101107
CassandraOutputFormat outputFormat = builder.finish();
102108
RichSinkFunction richSinkFunction = new OutputFormatSinkFunction(outputFormat);
103-
dataStream.addSink(richSinkFunction);
109+
DataStreamSink dataStreamSink = dataStream.addSink(richSinkFunction);
110+
return dataStreamSink;
104111
}
105112

106113
@Override

clickhouse/clickhouse-side/clickhouse-all-side/pom.xml

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -72,14 +72,14 @@
7272
</goals>
7373
<configuration>
7474
<tasks>
75-
<copy todir="${basedir}/../../../plugins/clickhouseallside">
75+
<copy todir="${basedir}/../../../sqlplugins/clickhouseallside">
7676
<fileset dir="target/">
7777
<include name="${project.artifactId}-${project.version}.jar"/>
7878
</fileset>
7979
</copy>
8080

81-
<move file="${basedir}/../../../plugins/clickhouseallside/${project.artifactId}-${project.version}.jar"
82-
tofile="${basedir}/../../../plugins/clickhouseallside/${project.name}-${git.branch}.jar"/>
81+
<move file="${basedir}/../../../sqlplugins/clickhouseallside/${project.artifactId}-${project.version}.jar"
82+
tofile="${basedir}/../../../sqlplugins/clickhouseallside/${project.name}-${git.branch}.jar"/>
8383
</tasks>
8484
</configuration>
8585
</execution>

clickhouse/clickhouse-side/clickhouse-async-side/pom.xml

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -72,14 +72,14 @@
7272
</goals>
7373
<configuration>
7474
<tasks>
75-
<copy todir="${basedir}/../../../plugins/clickhouseasyncside">
75+
<copy todir="${basedir}/../../../sqlplugins/clickhouseasyncside">
7676
<fileset dir="target/">
7777
<include name="${project.artifactId}-${project.version}.jar"/>
7878
</fileset>
7979
</copy>
8080

81-
<move file="${basedir}/../../../plugins/clickhouseasyncside/${project.artifactId}-${project.version}.jar"
82-
tofile="${basedir}/../../../plugins/clickhouseasyncside/${project.name}-${git.branch}.jar"/>
81+
<move file="${basedir}/../../../sqlplugins/clickhouseasyncside/${project.artifactId}-${project.version}.jar"
82+
tofile="${basedir}/../../../sqlplugins/clickhouseasyncside/${project.name}-${git.branch}.jar"/>
8383
</tasks>
8484
</configuration>
8585
</execution>

0 commit comments

Comments
 (0)