Skip to content

Commit e76a921

Browse files
committed
Merge branch '1.10_release_4.0.x' into hotfix_1.10_4.0.x_29910
# Conflicts: # hbase/hbase-sink/src/main/java/com/dtstack/flink/sql/sink/hbase/RowKeyBuilder.java
2 parents 8857622 + 25fadc4 commit e76a921

File tree

69 files changed

+2252
-461
lines changed

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

69 files changed

+2252
-461
lines changed
Lines changed: 30 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,30 @@
1+
/*
2+
* Licensed to the Apache Software Foundation (ASF) under one
3+
* or more contributor license agreements. See the NOTICE file
4+
* distributed with this work for additional information
5+
* regarding copyright ownership. The ASF licenses this file
6+
* to you under the Apache License, Version 2.0 (the
7+
* "License"); you may not use this file except in compliance
8+
* with the License. You may obtain a copy of the License at
9+
*
10+
* http://www.apache.org/licenses/LICENSE-2.0
11+
*
12+
* Unless required by applicable law or agreed to in writing, software
13+
* distributed under the License is distributed on an "AS IS" BASIS,
14+
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
15+
* See the License for the specific language governing permissions and
16+
* limitations under the License.
17+
*/
18+
19+
package com.dtstack.flink.sql.constant;
20+
21+
/**
22+
* @program: flinkStreamSQL
23+
* @author: wuren
24+
* @create: 2020/09/15
25+
**/
26+
public class PluginParamConsts {
27+
public static final String PRINCIPAL = "principal";
28+
public static final String KEYTAB = "keytab";
29+
public static final String KRB5_CONF = "krb5conf";
30+
}

core/src/main/java/com/dtstack/flink/sql/format/dtnest/DtNestRowDeserializationSchema.java

Lines changed: 32 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -42,6 +42,7 @@
4242
import java.util.Iterator;
4343
import java.util.List;
4444
import java.util.Map;
45+
import java.util.regex.Pattern;
4546

4647
/**
4748
* source data parse to json format
@@ -64,6 +65,9 @@ public class DtNestRowDeserializationSchema extends AbstractDeserializationSchem
6465
private final List<AbstractTableInfo.FieldExtraInfo> fieldExtraInfos;
6566
private final String charsetName;
6667

68+
private static final Pattern TIMESTAMP_PATTERN = Pattern.compile("^\\d+$");
69+
private static final Pattern TIME_FORMAT_PATTERN = Pattern.compile("\\w+\\d+:\\d+:\\d+");
70+
6771
public DtNestRowDeserializationSchema(TypeInformation<Row> typeInfo, Map<String, String> rowAndFieldMapping,
6872
List<AbstractTableInfo.FieldExtraInfo> fieldExtraInfos,
6973
String charsetName) {
@@ -146,11 +150,11 @@ private Object convert(JsonNode node, TypeInformation<?> info) {
146150
return Date.valueOf(node.asText());
147151
} else if (info.getTypeClass().equals(Types.SQL_TIME.getTypeClass())) {
148152
// local zone
149-
return Time.valueOf(node.asText());
153+
return convertToTime(node.asText());
150154
} else if (info.getTypeClass().equals(Types.SQL_TIMESTAMP.getTypeClass())) {
151155
// local zone
152-
return Timestamp.valueOf(node.asText());
153-
} else if (info instanceof RowTypeInfo) {
156+
return convertToTimestamp(node.asText());
157+
} else if (info instanceof RowTypeInfo) {
154158
return convertRow(node, (RowTypeInfo) info);
155159
} else if (info instanceof ObjectArrayTypeInfo) {
156160
return convertObjectArray(node, ((ObjectArrayTypeInfo) info).getComponentInfo());
@@ -165,6 +169,29 @@ private Object convert(JsonNode node, TypeInformation<?> info) {
165169
}
166170
}
167171

172+
/**
173+
* 将 2020-09-07 14:49:10.0 和 1598446699685 两种格式都转化为 Timestamp
174+
*/
175+
private Timestamp convertToTimestamp(String timestamp) {
176+
if (TIMESTAMP_PATTERN.matcher(timestamp).find()) {
177+
return new Timestamp(Long.parseLong(timestamp));
178+
}
179+
if (TIME_FORMAT_PATTERN.matcher(timestamp).find()) {
180+
return Timestamp.valueOf(timestamp);
181+
}
182+
throw new IllegalArgumentException("Incorrect time format of timestamp");
183+
}
184+
185+
private Time convertToTime(String timestamp) {
186+
if (TIMESTAMP_PATTERN.matcher(timestamp).find()) {
187+
return new Time(Long.parseLong(timestamp));
188+
}
189+
if (TIME_FORMAT_PATTERN.matcher(timestamp).find()) {
190+
return Time.valueOf(timestamp);
191+
}
192+
throw new IllegalArgumentException("Incorrect time format of time");
193+
}
194+
168195
private Row convertTopRow() {
169196
Row row = new Row(fieldNames.length);
170197
try {
@@ -175,7 +202,7 @@ private Row convertTopRow() {
175202
if (node == null) {
176203
if (fieldExtraInfo != null && fieldExtraInfo.getNotNull()) {
177204
throw new IllegalStateException("Failed to find field with name '"
178-
+ fieldNames[i] + "'.");
205+
+ fieldNames[i] + "'.");
179206
} else {
180207
row.setField(i, null);
181208
}
@@ -216,6 +243,7 @@ private Object convertObjectArray(JsonNode node, TypeInformation<?> elementType)
216243
}
217244
return array;
218245
}
246+
219247
@Override
220248
public TypeInformation<Row> getProducedType() {
221249
return typeInfo;
Lines changed: 65 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,65 @@
1+
/*
2+
* Licensed to the Apache Software Foundation (ASF) under one
3+
* or more contributor license agreements. See the NOTICE file
4+
* distributed with this work for additional information
5+
* regarding copyright ownership. The ASF licenses this file
6+
* to you under the Apache License, Version 2.0 (the
7+
* "License"); you may not use this file except in compliance
8+
* with the License. You may obtain a copy of the License at
9+
*
10+
* http://www.apache.org/licenses/LICENSE-2.0
11+
*
12+
* Unless required by applicable law or agreed to in writing, software
13+
* distributed under the License is distributed on an "AS IS" BASIS,
14+
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
15+
* See the License for the specific language governing permissions and
16+
* limitations under the License.
17+
*/
18+
19+
package com.dtstack.flink.sql.krb;
20+
21+
import com.google.common.base.Strings;
22+
23+
/**
24+
* @program: flinkStreamSQL
25+
* @author: wuren
26+
* @create: 2020/09/15
27+
**/
28+
public interface KerberosTable {
29+
30+
String getPrincipal();
31+
32+
void setPrincipal(String principal);
33+
34+
String getKeytab();
35+
36+
void setKeytab(String keytab);
37+
38+
String getKrb5conf();
39+
40+
void setKrb5conf(String krb5conf);
41+
42+
boolean isEnableKrb();
43+
44+
void setEnableKrb(boolean enableKrb);
45+
46+
default void judgeKrbEnable() {
47+
boolean allSet =
48+
!Strings.isNullOrEmpty(getPrincipal()) &&
49+
!Strings.isNullOrEmpty(getKeytab()) &&
50+
!Strings.isNullOrEmpty(getKrb5conf());
51+
52+
boolean allNotSet =
53+
Strings.isNullOrEmpty(getPrincipal()) &&
54+
Strings.isNullOrEmpty(getKeytab()) &&
55+
Strings.isNullOrEmpty(getKrb5conf());
56+
57+
if (allSet) {
58+
setEnableKrb(true);
59+
} else if (allNotSet) {
60+
setEnableKrb(false);
61+
} else {
62+
throw new RuntimeException("Missing kerberos parameter! all kerberos params must be set, or all kerberos params are not set");
63+
}
64+
}
65+
}

core/src/main/java/com/dtstack/flink/sql/parser/SqlParser.java

Lines changed: 20 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -28,8 +28,11 @@
2828
import com.google.common.collect.Lists;
2929
import com.google.common.base.Strings;
3030

31+
import java.util.ArrayList;
3132
import java.util.List;
3233
import java.util.Set;
34+
import java.util.regex.Matcher;
35+
import java.util.regex.Pattern;
3336

3437
/**
3538
* Reason:
@@ -51,6 +54,8 @@ public static void setLocalSqlPluginRoot(String localSqlPluginRoot){
5154
LOCAL_SQL_PLUGIN_ROOT = localSqlPluginRoot;
5255
}
5356

57+
private static final Pattern ADD_FIlE_PATTERN = Pattern.compile("(?i).*add\\s+file\\s+.+");
58+
5459
/**
5560
* flink support sql syntax
5661
* CREATE TABLE sls_stream() with ();
@@ -70,6 +75,7 @@ public static SqlTree parseSql(String sql, String pluginLoadMode) throws Excepti
7075
.replace("\t", " ").trim();
7176

7277
List<String> sqlArr = DtStringUtil.splitIgnoreQuota(sql, SQL_DELIMITER);
78+
sqlArr = removeAddFileStmt(sqlArr);
7379
SqlTree sqlTree = new SqlTree();
7480
AbstractTableInfoParser tableInfoParser = new AbstractTableInfoParser();
7581
for(String childSql : sqlArr){
@@ -150,4 +156,18 @@ public static SqlTree parseSql(String sql, String pluginLoadMode) throws Excepti
150156

151157
return sqlTree;
152158
}
159+
160+
/**
161+
* remove add file with statment etc. add file /etc/krb5.conf;
162+
*/
163+
private static List<String> removeAddFileStmt(List<String> stmts) {
164+
List<String> cleanedStmts = new ArrayList<>();
165+
for (String stmt : stmts) {
166+
Matcher matcher = ADD_FIlE_PATTERN.matcher(stmt);
167+
if(!matcher.matches()) {
168+
cleanedStmts.add(stmt);
169+
}
170+
}
171+
return cleanedStmts;
172+
}
153173
}

core/src/main/java/com/dtstack/flink/sql/side/SidePredicatesParser.java

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -140,7 +140,7 @@ private void extractPredicateInfo(SqlNode whereNode, List<PredicateInfo> predica
140140
}
141141
}
142142

143-
private void fillPredicateInfoToList(SqlBasicCall whereNode, List<PredicateInfo> predicatesInfoList, String operatorName, SqlKind operatorKind,
143+
private void fillPredicateInfoToList(SqlBasicCall whereNode, List<PredicateInfo> predicatesInfoList, String operatorName, SqlKind operatorKind,
144144
int fieldIndex, int conditionIndex) {
145145
SqlNode sqlNode = whereNode.getOperands()[fieldIndex];
146146
if (sqlNode.getKind() == SqlKind.IDENTIFIER) {

core/src/main/java/com/dtstack/flink/sql/side/SideSqlExec.java

Lines changed: 2 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -409,10 +409,9 @@ private void joinFun(Object pollObj,
409409

410410
RowTypeInfo typeInfo = new RowTypeInfo(targetTable.getSchema().getFieldTypes(), targetTable.getSchema().getFieldNames());
411411

412-
DataStream adaptStream = tableEnv.toRetractStream(targetTable, Row.class)
412+
DataStream adaptStream = tableEnv.toRetractStream(targetTable, typeInfo)
413413
.filter(f -> f.f0)
414-
.map(f -> f.f1)
415-
.returns(Row.class);
414+
.map(f -> f.f1);
416415

417416
//join side table before keyby ===> Reducing the size of each dimension table cache of async
418417
if (sideTableInfo.isPartitionedJoin()) {

core/src/main/java/com/dtstack/flink/sql/table/AbstractTableInfo.java

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -49,7 +49,7 @@ public abstract class AbstractTableInfo implements Serializable {
4949
private final List<String> fieldList = Lists.newArrayList();
5050

5151
/**key:别名, value: realField */
52-
private Map<String, String> physicalFields = Maps.newHashMap();
52+
private Map<String, String> physicalFields = Maps.newLinkedHashMap();
5353

5454
private final List<String> fieldTypeList = Lists.newArrayList();
5555

Lines changed: 36 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,36 @@
1+
/*
2+
* Licensed to the Apache Software Foundation (ASF) under one
3+
* or more contributor license agreements. See the NOTICE file
4+
* distributed with this work for additional information
5+
* regarding copyright ownership. The ASF licenses this file
6+
* to you under the Apache License, Version 2.0 (the
7+
* "License"); you may not use this file except in compliance
8+
* with the License. You may obtain a copy of the License at
9+
*
10+
* http://www.apache.org/licenses/LICENSE-2.0
11+
*
12+
* Unless required by applicable law or agreed to in writing, software
13+
* distributed under the License is distributed on an "AS IS" BASIS,
14+
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
15+
* See the License for the specific language governing permissions and
16+
* limitations under the License.
17+
*/
18+
19+
package com.dtstack.flink.sql.util;
20+
21+
import org.apache.flink.util.Preconditions;
22+
23+
import java.io.File;
24+
25+
/**
26+
* @program: flinkStreamSQL
27+
* @author: wuren
28+
* @create: 2020/09/21
29+
**/
30+
public class DtFileUtils {
31+
public static void checkExists(String path) {
32+
File file = new File(path);
33+
String errorMsg = "%s file is not exist!";
34+
Preconditions.checkState(file.exists(), errorMsg, path);
35+
}
36+
}
Lines changed: 54 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,54 @@
1+
/*
2+
* Licensed to the Apache Software Foundation (ASF) under one
3+
* or more contributor license agreements. See the NOTICE file
4+
* distributed with this work for additional information
5+
* regarding copyright ownership. The ASF licenses this file
6+
* to you under the Apache License, Version 2.0 (the
7+
* "License"); you may not use this file except in compliance
8+
* with the License. You may obtain a copy of the License at
9+
*
10+
* http://www.apache.org/licenses/LICENSE-2.0
11+
*
12+
* Unless required by applicable law or agreed to in writing, software
13+
* distributed under the License is distributed on an "AS IS" BASIS,
14+
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
15+
* See the License for the specific language governing permissions and
16+
* limitations under the License.
17+
*/
18+
19+
package com.dtstack.flink.sql.util;
20+
21+
import org.apache.hadoop.conf.Configuration;
22+
import org.apache.hadoop.security.UserGroupInformation;
23+
import org.slf4j.Logger;
24+
import org.slf4j.LoggerFactory;
25+
26+
import java.io.IOException;
27+
28+
/**
29+
* @program: flinkStreamSQL
30+
* @author: wuren
31+
* @create: 2020/09/14
32+
**/
33+
public class KrbUtils {
34+
35+
private static final Logger LOG = LoggerFactory.getLogger(KrbUtils.class);
36+
37+
public static final String KRB5_CONF_KEY = "java.security.krb5.conf";
38+
public static final String HADOOP_AUTH_KEY = "hadoop.security.authentication";
39+
public static final String KRB_STR = "Kerberos";
40+
// public static final String FALSE_STR = "false";
41+
// public static final String SUBJECT_ONLY_KEY = "javax.security.auth.useSubjectCredsOnly";
42+
43+
public static UserGroupInformation getUgi(String principal, String keytabPath, String krb5confPath) throws IOException {
44+
LOG.info("Kerberos login with principal: {} and keytab: {}", principal, keytabPath);
45+
System.setProperty(KRB5_CONF_KEY, krb5confPath);
46+
// TODO 尚未探索出此选项的意义,以后研究明白方可打开
47+
// System.setProperty(SUBJECT_ONLY_KEY, FALSE_STR);
48+
Configuration configuration = new Configuration();
49+
configuration.set(HADOOP_AUTH_KEY , KRB_STR);
50+
UserGroupInformation.setConfiguration(configuration);
51+
return UserGroupInformation.loginUserFromKeytabAndReturnUGI(principal, keytabPath);
52+
}
53+
54+
}

0 commit comments

Comments
 (0)