Skip to content

Commit 619cb89

Browse files
committed
Revert "Merge branch 'v1.8.0_dirtydata_manager' into 'v1.8.0_dev' "
This reverts merge request !153
1 parent ae75fae commit 619cb89

File tree

31 files changed

+74
-1265
lines changed

31 files changed

+74
-1265
lines changed

core/pom.xml

Lines changed: 0 additions & 29 deletions
Original file line numberDiff line numberDiff line change
@@ -20,7 +20,6 @@
2020
<calcite.server.version>1.16.0</calcite.server.version>
2121
<jackson.version>2.7.9</jackson.version>
2222
<guava.version>19.0</guava.version>
23-
<hadoop.version>2.7.3</hadoop.version>
2423
</properties>
2524

2625
<dependencies>
@@ -47,12 +46,6 @@
4746
<groupId>org.apache.flink</groupId>
4847
<artifactId>flink-streaming-java_2.11</artifactId>
4948
<version>${flink.version}</version>
50-
<exclusions>
51-
<exclusion>
52-
<artifactId>flink-hadoop-fs</artifactId>
53-
<groupId>org.apache.flink</groupId>
54-
</exclusion>
55-
</exclusions>
5649
</dependency>
5750

5851
<dependency>
@@ -114,12 +107,6 @@
114107
<groupId>org.apache.flink</groupId>
115108
<artifactId>flink-yarn_2.11</artifactId>
116109
<version>${flink.version}</version>
117-
<exclusions>
118-
<exclusion>
119-
<artifactId>flink-shaded-hadoop2</artifactId>
120-
<groupId>org.apache.flink</groupId>
121-
</exclusion>
122-
</exclusions>
123110
</dependency>
124111

125112
<!-- https://mvnrepository.com/artifact/org.apache.flink/flink-statebackend-rocksdb -->
@@ -129,22 +116,6 @@
129116
<version>${flink.version}</version>
130117
</dependency>
131118

132-
<dependency>
133-
<groupId>org.apache.hadoop</groupId>
134-
<artifactId>hadoop-common</artifactId>
135-
<version>${hadoop.version}</version>
136-
</dependency>
137-
138-
<dependency>
139-
<groupId>org.apache.hadoop</groupId>
140-
<artifactId>hadoop-hdfs</artifactId>
141-
<version>${hadoop.version}</version>
142-
</dependency>
143-
<dependency>
144-
<groupId>org.apache.hadoop</groupId>
145-
<artifactId>hadoop-mapreduce-client-core</artifactId>
146-
<version>${hadoop.version}</version>
147-
</dependency>
148119
</dependencies>
149120

150121
<build>

core/src/main/java/com/dtstack/flink/sql/Main.java

Lines changed: 4 additions & 22 deletions
Original file line numberDiff line numberDiff line change
@@ -22,9 +22,7 @@
2222

2323
import com.dtstack.flink.sql.config.CalciteConfig;
2424
import com.dtstack.flink.sql.classloader.ClassLoaderManager;
25-
import com.dtstack.flink.sql.config.DirtyConfig;
2625
import com.dtstack.flink.sql.constrant.ConfigConstrant;
27-
import com.dtstack.flink.sql.dirty.DirtyDataManager;
2826
import com.dtstack.flink.sql.enums.ClusterMode;
2927
import com.dtstack.flink.sql.enums.ECacheType;
3028
import com.dtstack.flink.sql.enums.EPluginLoadMode;
@@ -50,7 +48,6 @@
5048
import com.dtstack.flink.sql.util.PluginUtil;
5149
import org.apache.calcite.sql.SqlInsert;
5250
import org.apache.calcite.sql.SqlNode;
53-
import org.apache.commons.collections.MapUtils;
5451
import org.apache.commons.io.Charsets;
5552
import org.apache.commons.lang3.StringUtils;
5653
import org.apache.flink.api.common.ExecutionConfig;
@@ -77,7 +74,6 @@
7774
import org.slf4j.Logger;
7875
import org.slf4j.LoggerFactory;
7976
import java.io.File;
80-
import java.io.IOException;
8177
import java.lang.reflect.Field;
8278
import java.lang.reflect.InvocationTargetException;
8379
import java.lang.reflect.Method;
@@ -86,7 +82,6 @@
8682
import java.net.URLDecoder;
8783
import java.util.List;
8884
import java.util.Map;
89-
import java.util.Optional;
9085
import java.util.Properties;
9186
import java.util.Set;
9287
import java.util.concurrent.TimeUnit;
@@ -119,7 +114,6 @@ public static void main(String[] args) throws Exception {
119114
String pluginLoadMode = options.getPluginLoadMode();
120115
String deployMode = options.getMode();
121116
String confProp = options.getConfProp();
122-
String dirtyProp = options.getDirtyProp();
123117

124118
sql = URLDecoder.decode(sql, Charsets.UTF_8.name());
125119
SqlParser.setLocalSqlPluginRoot(localSqlPluginPath);
@@ -129,18 +123,13 @@ public static void main(String[] args) throws Exception {
129123
addJarListStr = URLDecoder.decode(addJarListStr, Charsets.UTF_8.name());
130124
addJarFileList = objMapper.readValue(addJarListStr, List.class);
131125
}
126+
132127
confProp = URLDecoder.decode(confProp, Charsets.UTF_8.toString());
133128
Properties confProperties = PluginUtil.jsonStrToObject(confProp, Properties.class);
134-
135-
dirtyProp = URLDecoder.decode(dirtyProp, Charsets.UTF_8.toString());
136-
// set DirtyDataManager dirtyconfig
137-
DirtyConfig dirtyConfig = getDirtyDataManagerDirtyConfig(dirtyProp);
138-
139129
StreamExecutionEnvironment env = getStreamExeEnv(confProperties, deployMode);
140130
StreamTableEnvironment tableEnv = StreamTableEnvironment.getTableEnvironment(env);
141131
StreamQueryConfig queryConfig = getStreamTableEnvTTL(confProperties, tableEnv);
142132

143-
144133
List<URL> jarURList = Lists.newArrayList();
145134
SqlTree sqlTree = SqlParser.parseSql(sql);
146135

@@ -156,7 +145,7 @@ public static void main(String[] args) throws Exception {
156145
//register udf
157146
registerUDF(sqlTree, jarURList, tableEnv);
158147
//register table schema
159-
registerTable(sqlTree, env, tableEnv, localSqlPluginPath, remoteSqlPluginPath, pluginLoadMode, sideTableMap, registerTableCache, dirtyConfig);
148+
registerTable(sqlTree, env, tableEnv, localSqlPluginPath, remoteSqlPluginPath, pluginLoadMode, sideTableMap, registerTableCache);
160149

161150
sqlTranslation(localSqlPluginPath, tableEnv,sqlTree,sideTableMap,registerTableCache, queryConfig);
162151

@@ -167,11 +156,6 @@ public static void main(String[] args) throws Exception {
167156
env.execute(name);
168157
}
169158

170-
private static DirtyConfig getDirtyDataManagerDirtyConfig(String dirtyProp) throws IOException {
171-
Map dirtyCofig = PluginUtil.jsonStrToObject(dirtyProp, Map.class);
172-
return dirtyCofig.size() == 0 ? null : new DirtyConfig(dirtyCofig);
173-
}
174-
175159
private static void sqlTranslation(String localSqlPluginPath, StreamTableEnvironment tableEnv,SqlTree sqlTree,Map<String, SideTableInfo> sideTableMap,Map<String, Table> registerTableCache, StreamQueryConfig queryConfig) throws Exception {
176160
SideSqlExec sideSqlExec = new SideSqlExec();
177161
sideSqlExec.setLocalSqlPluginPath(localSqlPluginPath);
@@ -251,16 +235,14 @@ private static void registerUDF(SqlTree sqlTree, List<URL> jarURList, StreamTabl
251235

252236

253237
private static void registerTable(SqlTree sqlTree, StreamExecutionEnvironment env, StreamTableEnvironment tableEnv, String localSqlPluginPath,
254-
String remoteSqlPluginPath, String pluginLoadMode, Map<String, SideTableInfo> sideTableMap,
255-
Map<String, Table> registerTableCache, DirtyConfig dirtyConfig) throws Exception {
238+
String remoteSqlPluginPath, String pluginLoadMode, Map<String, SideTableInfo> sideTableMap, Map<String, Table> registerTableCache) throws Exception {
256239
Set<URL> classPathSet = Sets.newHashSet();
257240
WaterMarkerAssigner waterMarkerAssigner = new WaterMarkerAssigner();
258241
for (TableInfo tableInfo : sqlTree.getTableInfoMap().values()) {
259242

260243
if (tableInfo instanceof SourceTableInfo) {
261244

262245
SourceTableInfo sourceTableInfo = (SourceTableInfo) tableInfo;
263-
sourceTableInfo.setDirtyConfig(dirtyConfig);
264246
Table table = StreamSourceFactory.getStreamSource(sourceTableInfo, env, tableEnv, localSqlPluginPath);
265247
tableEnv.registerTable(sourceTableInfo.getAdaptName(), table);
266248
//Note --- parameter conversion function can not be used inside a function of the type of polymerization
@@ -290,7 +272,7 @@ private static void registerTable(SqlTree sqlTree, StreamExecutionEnvironment en
290272
registerTableCache.put(tableInfo.getName(), regTable);
291273
classPathSet.add(buildSourceAndSinkPathByLoadMode(tableInfo.getType(), SourceTableInfo.SOURCE_SUFFIX, localSqlPluginPath, remoteSqlPluginPath, pluginLoadMode));
292274
} else if (tableInfo instanceof TargetTableInfo) {
293-
tableInfo.setDirtyConfig(dirtyConfig);
275+
294276
TableSink tableSink = StreamSinkFactory.getTableSink((TargetTableInfo) tableInfo, localSqlPluginPath);
295277
TypeInformation[] flinkTypes = FlinkUtil.transformTypes(tableInfo.getFieldClasses());
296278
tableEnv.registerTableSink(tableInfo.getName(), tableInfo.getFields(), flinkTypes, tableSink);

core/src/main/java/com/dtstack/flink/sql/authenticate/KerberosUtil.java

Lines changed: 0 additions & 213 deletions
This file was deleted.

0 commit comments

Comments
 (0)