Skip to content

Commit b331085

Browse files
author
dapeng
committed
Merge branch '1.8_zy_3.10.x' into 1.8_release_3.10.x_mergedTest_new
# Conflicts: # hbase/hbase-side/hbase-async-side/src/main/java/com/dtstack/flink/sql/side/hbase/HbaseAsyncReqRow.java # hbase/hbase-side/hbase-side-core/src/main/java/com/dtstack/flink/sql/side/hbase/utils/HbaseUtils.java # kafka-base/kafka-base-sink/src/main/java/com/dtstack/flink/sql/sink/kafka/AbstractKafkaProducerFactory.java # rdb/rdb-side/src/main/java/com/dtstack/flink/sql/side/rdb/async/RdbAsyncReqRow.java
2 parents 0e21db6 + 8295b7e commit b331085

File tree

13 files changed

+526
-19
lines changed

13 files changed

+526
-19
lines changed

core/src/main/java/com/dtstack/flink/sql/side/BaseSideInfo.java

Lines changed: 2 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -132,13 +132,12 @@ public void dealOneEqualCon(SqlNode sqlNode, String sideTableName){
132132

133133
String leftTableName = left.getComponent(0).getSimple();
134134
String leftField = left.getComponent(1).getSimple();
135-
Map<String, String> physicalFields = sideTableInfo.getPhysicalFields();
136135

137136
String rightTableName = right.getComponent(0).getSimple();
138137
String rightField = right.getComponent(1).getSimple();
139138

140139
if(leftTableName.equalsIgnoreCase(sideTableName)){
141-
equalFieldList.add(physicalFields.get(leftField));
140+
equalFieldList.add(leftField);
142141
int equalFieldIndex = -1;
143142
for(int i=0; i<rowTypeInfo.getFieldNames().length; i++){
144143
String fieldName = rowTypeInfo.getFieldNames()[i];
@@ -154,7 +153,7 @@ public void dealOneEqualCon(SqlNode sqlNode, String sideTableName){
154153

155154
}else if(rightTableName.equalsIgnoreCase(sideTableName)){
156155

157-
equalFieldList.add(physicalFields.get(rightField));
156+
equalFieldList.add(rightField);
158157
int equalFieldIndex = -1;
159158
for(int i=0; i<rowTypeInfo.getFieldNames().length; i++){
160159
String fieldName = rowTypeInfo.getFieldNames()[i];

core/src/main/java/com/dtstack/flink/sql/side/SidePredicatesParser.java

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -130,10 +130,10 @@ private void extractPredicateInfo(SqlNode whereNode, List<PredicateInfo> predica
130130

131131
// 跳过函数
132132
if ((((SqlBasicCall) whereNode).getOperands()[0] instanceof SqlIdentifier)
133-
&& (((SqlBasicCall) whereNode).getOperands()[1].getKind() != SqlKind.OTHER_FUNCTION)) {
133+
&& (((SqlBasicCall) whereNode).getOperands()[1].getKind() == SqlKind.LITERAL)) {
134134
fillPredicateInfoToList((SqlBasicCall) whereNode, predicatesInfoList, operatorName, operatorKind, 0, 1);
135135
} else if ((((SqlBasicCall) whereNode).getOperands()[1] instanceof SqlIdentifier)
136-
&& (((SqlBasicCall) whereNode).getOperands()[0].getKind() != SqlKind.OTHER_FUNCTION)) {
136+
&& (((SqlBasicCall) whereNode).getOperands()[0].getKind() == LITERAL)) {
137137
fillPredicateInfoToList((SqlBasicCall) whereNode, predicatesInfoList, operatorName, operatorKind, 1, 0);
138138
}
139139
}

core/src/main/java/com/dtstack/flink/sql/util/DtStringUtil.java

Lines changed: 29 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -261,14 +261,43 @@ public static String firstUpperCase(String str) {
261261
}
262262

263263
public static String getTableFullPath(String schema, String tableName) {
264+
String[] tableInfoSplit = StringUtils.split(tableName, ".");
265+
//表明表信息带了schema
266+
if(tableInfoSplit.length == 2){
267+
schema = tableInfoSplit[0];
268+
tableName = tableInfoSplit[1];
269+
}
270+
271+
//清理首个字符" 和最后字符 "
272+
schema = rmStrQuote(schema);
273+
tableName = rmStrQuote(tableName);
274+
264275
if (StringUtils.isEmpty(schema)){
265276
return addQuoteForStr(tableName);
266277
}
278+
267279
String schemaAndTabName = addQuoteForStr(schema) + "." + addQuoteForStr(tableName);
268280
return schemaAndTabName;
269281
}
270282

283+
/**
284+
* 清理首个字符" 和最后字符 "
285+
*/
286+
public static String rmStrQuote(String str){
287+
if(StringUtils.isEmpty(str)){
288+
return str;
289+
}
290+
291+
if(str.startsWith("\"")){
292+
str = str.substring(1);
293+
}
294+
295+
if(str.endsWith("\"")){
296+
str = str.substring(0, str.length()-1);
297+
}
271298

299+
return str;
300+
}
272301

273302
public static String addQuoteForStr(String column) {
274303
return getStartQuote() + column + getEndQuote();

hbase/hbase-side/hbase-async-side/src/main/java/com/dtstack/flink/sql/side/hbase/HbaseAsyncReqRow.java

Lines changed: 3 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -33,6 +33,7 @@
3333
import com.dtstack.flink.sql.factory.DTThreadFactory;
3434
import com.google.common.collect.Maps;
3535
import com.stumbleupon.async.Deferred;
36+
import org.apache.commons.lang3.StringUtils;
3637
import org.apache.flink.api.java.typeutils.RowTypeInfo;
3738
import org.apache.flink.configuration.Configuration;
3839
import org.apache.flink.streaming.api.functions.async.ResultFuture;
@@ -83,12 +84,13 @@ public HbaseAsyncReqRow(RowTypeInfo rowTypeInfo, JoinInfo joinInfo, List<FieldIn
8384
super(new HbaseAsyncSideInfo(rowTypeInfo, joinInfo, outFieldInfoList, sideTableInfo));
8485

8586
tableName = ((HbaseSideTableInfo)sideTableInfo).getTableName();
86-
colNames = ((HbaseSideTableInfo)sideTableInfo).getColumnRealNames();
87+
colNames = StringUtils.split(sideInfo.getSideSelectFields(), ",");
8788
}
8889

8990

9091
@Override
9192
public void open(Configuration parameters) throws Exception {
93+
super.open(parameters);
9294
AbstractSideTableInfo sideTableInfo = sideInfo.getSideTableInfo();
9395
HbaseSideTableInfo hbaseSideTableInfo = (HbaseSideTableInfo) sideTableInfo;
9496
ExecutorService executorService =new ThreadPoolExecutor(DEFAULT_POOL_SIZE, DEFAULT_POOL_SIZE,

hbase/hbase-side/hbase-async-side/src/main/java/com/dtstack/flink/sql/side/hbase/rowkeydealer/RowKeyEqualModeDealer.java

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -94,7 +94,7 @@ public void asyncGetData(String tableName, String rowKeyStr, CRow input, ResultF
9494

9595
Row row = fillData(input.row(), sideVal);
9696
if(openCache){
97-
sideCache.putCache(rowKeyStr, CacheObj.buildCacheObj(ECacheContentType.SingleLine, row));
97+
sideCache.putCache(rowKeyStr, CacheObj.buildCacheObj(ECacheContentType.SingleLine, sideVal));
9898
}
9999
resultFuture.complete(Collections.singleton(new CRow(row, input.change())));
100100
} catch (Exception e) {

hbase/hbase-side/hbase-side-core/src/main/java/com/dtstack/flink/sql/side/hbase/table/HbaseSideParser.java

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -61,6 +61,7 @@ public HbaseSideParser() {
6161
public AbstractTableInfo getTableInfo(String tableName, String fieldsInfo, Map<String, Object> props) {
6262
HbaseSideTableInfo hbaseTableInfo = new HbaseSideTableInfo();
6363
hbaseTableInfo.setName(tableName);
64+
parseCacheProp(hbaseTableInfo, props);
6465
parseFieldsInfo(fieldsInfo, hbaseTableInfo);
6566
hbaseTableInfo.setTableName((String) props.get(TABLE_NAME_KEY.toLowerCase()));
6667
hbaseTableInfo.setParallelism(MathUtil.getIntegerVal(props.get(PARALLELISM_KEY.toLowerCase())));
@@ -100,6 +101,7 @@ private void dealField(Matcher matcher, AbstractTableInfo tableInfo){
100101
sideTableInfo.addFieldClass(fieldClass);
101102
sideTableInfo.addFieldType(fieldType);
102103
sideTableInfo.putAliasNameRef(aliasStr, fieldName);
104+
sideTableInfo.addPhysicalMappings(aliasStr, fieldName);
103105
}
104106

105107

hbase/hbase-side/hbase-side-core/src/main/java/com/dtstack/flink/sql/side/hbase/utils/HbaseUtils.java

Lines changed: 5 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -66,11 +66,12 @@ public static Object convertByte(byte[] hbaseData, String type){
6666

6767
case "double":
6868
return Bytes.toDouble(hbaseData);
69+
case "decimal":
70+
return Bytes.toBigDecimal(hbaseData);
6971
default:
70-
break;
71-
72+
throw new RuntimeException("not support type of " + type);
7273
}
73-
74-
throw new RuntimeException("not support type of " + type);
7574
}
75+
76+
7677
}

kafka-base/kafka-base-sink/src/main/java/com/dtstack/flink/sql/sink/kafka/AbstractKafkaProducerFactory.java

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -26,6 +26,9 @@
2626
import org.apache.commons.lang3.StringUtils;
2727
import org.apache.flink.api.common.serialization.SerializationSchema;
2828
import org.apache.flink.api.common.typeinfo.TypeInformation;
29+
import org.apache.flink.formats.avro.AvroRowSerializationSchema;
30+
import org.apache.flink.formats.csv.CsvRowSerializationSchema;
31+
import org.apache.flink.formats.json.DTJsonRowSerializationSchema;
2932
import org.apache.flink.streaming.api.functions.sink.RichSinkFunction;
3033
import org.apache.flink.streaming.connectors.kafka.partitioner.FlinkKafkaPartitioner;
3134
import org.apache.flink.table.runtime.types.CRow;
Lines changed: 225 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,225 @@
1+
/*
2+
* Licensed to the Apache Software Foundation (ASF) under one or more
3+
* contributor license agreements. See the NOTICE file distributed with
4+
* this work for additional information regarding copyright ownership.
5+
* The ASF licenses this file to You under the Apache License, Version 2.0
6+
* (the "License"); you may not use this file except in compliance with
7+
* the License. You may obtain a copy of the License at
8+
*
9+
* http://www.apache.org/licenses/LICENSE-2.0
10+
*
11+
* Unless required by applicable law or agreed to in writing, software
12+
* distributed under the License is distributed on an "AS IS" BASIS,
13+
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
14+
* See the License for the specific language governing permissions and
15+
* limitations under the License.
16+
*/
17+
18+
package org.apache.flink.formats.json;
19+
20+
import org.apache.flink.annotation.PublicEvolving;
21+
import org.apache.flink.api.common.serialization.SerializationSchema;
22+
import org.apache.flink.api.common.typeinfo.BasicArrayTypeInfo;
23+
import org.apache.flink.api.common.typeinfo.PrimitiveArrayTypeInfo;
24+
import org.apache.flink.api.common.typeinfo.TypeInformation;
25+
import org.apache.flink.api.common.typeinfo.Types;
26+
import org.apache.flink.api.java.typeutils.ObjectArrayTypeInfo;
27+
import org.apache.flink.api.java.typeutils.RowTypeInfo;
28+
import org.apache.flink.shaded.jackson2.com.fasterxml.jackson.databind.JsonNode;
29+
import org.apache.flink.shaded.jackson2.com.fasterxml.jackson.databind.ObjectMapper;
30+
import org.apache.flink.shaded.jackson2.com.fasterxml.jackson.databind.node.ArrayNode;
31+
import org.apache.flink.shaded.jackson2.com.fasterxml.jackson.databind.node.ContainerNode;
32+
import org.apache.flink.shaded.jackson2.com.fasterxml.jackson.databind.node.ObjectNode;
33+
import org.apache.flink.types.Row;
34+
import org.apache.flink.util.Preconditions;
35+
36+
import java.math.BigDecimal;
37+
import java.math.BigInteger;
38+
import java.sql.Time;
39+
import java.sql.Timestamp;
40+
import java.text.SimpleDateFormat;
41+
import java.util.Objects;
42+
43+
/**
44+
* Serialization schema that serializes an object of Flink types into a JSON bytes.
45+
*
46+
* <p>Serializes the input Flink object into a JSON string and
47+
* converts it into <code>byte[]</code>.
48+
*
49+
* <p>Result <code>byte[]</code> messages can be deserialized using {@link DTJsonRowDeserializationSchema}.
50+
*/
51+
@PublicEvolving
52+
public class DTJsonRowSerializationSchema implements SerializationSchema<Row> {
53+
54+
private static final long serialVersionUID = -2885556750743978636L;
55+
56+
/** Type information describing the input type. */
57+
private final TypeInformation<Row> typeInfo;
58+
59+
/** Object mapper that is used to create output JSON objects. */
60+
private final ObjectMapper mapper = new ObjectMapper();
61+
62+
/** Formatter for RFC 3339-compliant string representation of a time value (with UTC timezone, without milliseconds). */
63+
private SimpleDateFormat timeFormat = new SimpleDateFormat("HH:mm:ss'Z'");
64+
65+
/** Formatter for RFC 3339-compliant string representation of a time value (with UTC timezone). */
66+
private SimpleDateFormat timeFormatWithMillis = new SimpleDateFormat("HH:mm:ss.SSS'Z'");
67+
68+
/** Formatter for RFC 3339-compliant string representation of a timestamp value (with UTC timezone). */
69+
private SimpleDateFormat timestampFormat = new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ss.SSS'Z'");
70+
71+
/** Reusable object node. */
72+
private transient ObjectNode node;
73+
74+
/**
75+
* Creates a JSON serialization schema for the given type information.
76+
*
77+
* @param typeInfo The field names of {@link Row} are used to map to JSON properties.
78+
*/
79+
public DTJsonRowSerializationSchema(TypeInformation<Row> typeInfo) {
80+
Preconditions.checkNotNull(typeInfo, "Type information");
81+
this.typeInfo = typeInfo;
82+
}
83+
84+
/**
85+
* Creates a JSON serialization schema for the given JSON schema.
86+
*
87+
* @param jsonSchema JSON schema describing the result type
88+
*
89+
* @see <a href="http://json-schema.org/">http://json-schema.org/</a>
90+
*/
91+
public DTJsonRowSerializationSchema(String jsonSchema) {
92+
this(JsonRowSchemaConverter.convert(jsonSchema));
93+
}
94+
95+
@Override
96+
public byte[] serialize(Row row) {
97+
if (node == null) {
98+
node = mapper.createObjectNode();
99+
}
100+
101+
try {
102+
convertRow(node, (RowTypeInfo) typeInfo, row);
103+
return mapper.writeValueAsBytes(node);
104+
} catch (Throwable t) {
105+
throw new RuntimeException("Could not serialize row '" + row + "'. " +
106+
"Make sure that the schema matches the input.", t);
107+
}
108+
}
109+
110+
@Override
111+
public boolean equals(Object o) {
112+
if (this == o) {
113+
return true;
114+
}
115+
if (o == null || getClass() != o.getClass()) {
116+
return false;
117+
}
118+
final DTJsonRowSerializationSchema that = (DTJsonRowSerializationSchema) o;
119+
return Objects.equals(typeInfo, that.typeInfo);
120+
}
121+
122+
@Override
123+
public int hashCode() {
124+
return Objects.hash(typeInfo);
125+
}
126+
127+
// --------------------------------------------------------------------------------------------
128+
129+
private ObjectNode convertRow(ObjectNode reuse, RowTypeInfo info, Row row) {
130+
if (reuse == null) {
131+
reuse = mapper.createObjectNode();
132+
}
133+
final String[] fieldNames = info.getFieldNames();
134+
final TypeInformation<?>[] fieldTypes = info.getFieldTypes();
135+
136+
// validate the row
137+
if (row.getArity() != fieldNames.length) {
138+
throw new IllegalStateException(String.format(
139+
"Number of elements in the row '%s' is different from number of field names: %d", row, fieldNames.length));
140+
}
141+
142+
for (int i = 0; i < fieldNames.length; i++) {
143+
final String name = fieldNames[i];
144+
145+
final JsonNode fieldConverted = convert(reuse, reuse.get(name), fieldTypes[i], row.getField(i));
146+
reuse.set(name, fieldConverted);
147+
}
148+
149+
return reuse;
150+
}
151+
152+
private JsonNode convert(ContainerNode<?> container, JsonNode reuse, TypeInformation<?> info, Object object) {
153+
if (info.equals(Types.VOID) || object == null) {
154+
return container.nullNode();
155+
} else if (info.equals(Types.BOOLEAN)) {
156+
return container.booleanNode((Boolean) object);
157+
} else if (info.equals(Types.STRING)) {
158+
return container.textNode((String) object);
159+
} else if (info.equals(Types.BIG_DEC)) {
160+
// convert decimal if necessary
161+
if (object instanceof BigDecimal) {
162+
return container.numberNode((BigDecimal) object);
163+
}
164+
return container.numberNode(BigDecimal.valueOf(((Number) object).doubleValue()));
165+
} else if (info.equals(Types.BIG_INT)) {
166+
// convert integer if necessary
167+
if (object instanceof BigInteger) {
168+
return container.numberNode((BigInteger) object);
169+
}
170+
return container.numberNode(BigInteger.valueOf(((Number) object).longValue()));
171+
} else if (info.equals(Types.SQL_DATE)) {
172+
return container.textNode(object.toString());
173+
} else if (info.equals(Types.SQL_TIME)) {
174+
final Time time = (Time) object;
175+
// strip milliseconds if possible
176+
if (time.getTime() % 1000 > 0) {
177+
return container.textNode(timeFormatWithMillis.format(time));
178+
}
179+
return container.textNode(timeFormat.format(time));
180+
} else if (info.equals(Types.SQL_TIMESTAMP)) {
181+
return container.textNode(timestampFormat.format((Timestamp) object));
182+
} else if (info instanceof RowTypeInfo) {
183+
if (reuse != null && reuse instanceof ObjectNode) {
184+
return convertRow((ObjectNode) reuse, (RowTypeInfo) info, (Row) object);
185+
} else {
186+
return convertRow(null, (RowTypeInfo) info, (Row) object);
187+
}
188+
} else if (info instanceof ObjectArrayTypeInfo) {
189+
if (reuse != null && reuse instanceof ArrayNode) {
190+
return convertObjectArray((ArrayNode) reuse, ((ObjectArrayTypeInfo) info).getComponentInfo(), (Object[]) object);
191+
} else {
192+
return convertObjectArray(null, ((ObjectArrayTypeInfo) info).getComponentInfo(), (Object[]) object);
193+
}
194+
} else if (info instanceof BasicArrayTypeInfo) {
195+
if (reuse != null && reuse instanceof ArrayNode) {
196+
return convertObjectArray((ArrayNode) reuse, ((BasicArrayTypeInfo) info).getComponentInfo(), (Object[]) object);
197+
} else {
198+
return convertObjectArray(null, ((BasicArrayTypeInfo) info).getComponentInfo(), (Object[]) object);
199+
}
200+
} else if (info instanceof PrimitiveArrayTypeInfo && ((PrimitiveArrayTypeInfo) info).getComponentType().equals(Types.BYTE)) {
201+
return container.binaryNode((byte[]) object);
202+
} else {
203+
// for types that were specified without JSON schema
204+
// e.g. POJOs
205+
try {
206+
return mapper.valueToTree(object);
207+
} catch (IllegalArgumentException e) {
208+
throw new IllegalStateException("Unsupported type information '" + info + "' for object: " + object, e);
209+
}
210+
}
211+
}
212+
213+
private ArrayNode convertObjectArray(ArrayNode reuse, TypeInformation<?> info, Object[] array) {
214+
if (reuse == null) {
215+
reuse = mapper.createArrayNode();
216+
} else {
217+
reuse.removeAll();
218+
}
219+
220+
for (Object object : array) {
221+
reuse.add(convert(reuse, null, info, object));
222+
}
223+
return reuse;
224+
}
225+
}

0 commit comments

Comments
 (0)