Skip to content

Commit 8182901

Browse files
committed
【fix] use LinkedHashMap replace Map, LinkedList replace set
1 parent 633e83a commit 8182901

File tree

5 files changed

+31
-29
lines changed

5 files changed

+31
-29
lines changed

core/src/main/java/com/dtstack/flink/sql/table/AbstractTableInfo.java

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -49,7 +49,7 @@ public abstract class AbstractTableInfo implements Serializable {
4949
private final List<String> fieldList = Lists.newArrayList();
5050

5151
/**key:别名, value: realField */
52-
private Map<String, String> physicalFields = Maps.newHashMap();
52+
private Map<String, String> physicalFields = Maps.newLinkedHashMap();
5353

5454
private final List<String> fieldTypeList = Lists.newArrayList();
5555

hbase/hbase-sink/src/main/java/com/dtstack/flink/sql/sink/hbase/HbaseOutputFormat.java

Lines changed: 22 additions & 20 deletions
Original file line numberDiff line numberDiff line change
@@ -28,7 +28,11 @@
2828
import org.apache.flink.configuration.Configuration;
2929
import org.apache.flink.types.Row;
3030
import org.apache.flink.util.Preconditions;
31-
import org.apache.hadoop.hbase.*;
31+
import org.apache.hadoop.hbase.AuthUtil;
32+
import org.apache.hadoop.hbase.ChoreService;
33+
import org.apache.hadoop.hbase.HBaseConfiguration;
34+
import org.apache.hadoop.hbase.ScheduledChore;
35+
import org.apache.hadoop.hbase.TableName;
3236
import org.apache.hadoop.hbase.client.Connection;
3337
import org.apache.hadoop.hbase.client.ConnectionFactory;
3438
import org.apache.hadoop.hbase.client.Delete;
@@ -42,9 +46,10 @@
4246
import java.io.File;
4347
import java.io.IOException;
4448
import java.security.PrivilegedAction;
49+
import java.util.LinkedHashMap;
50+
import java.util.LinkedList;
4551
import java.util.List;
4652
import java.util.Map;
47-
import java.util.Set;
4853

4954
/**
5055
* @author: jingzhen@dtstack.com
@@ -61,7 +66,7 @@ public class HbaseOutputFormat extends AbstractDtRichOutputFormat<Tuple2> {
6166
private String[] columnNames;
6267
private String updateMode;
6368
private String[] columnTypes;
64-
private Map<String, String> columnNameFamily;
69+
private LinkedHashMap<String, String> columnNameFamily;
6570

6671
private boolean kerberosAuthEnable;
6772
private String regionserverKeytabFile;
@@ -129,21 +134,18 @@ private void openKerberosConn() throws Exception {
129134

130135
UserGroupInformation userGroupInformation = HbaseConfigUtils.loginAndReturnUGI(conf, clientPrincipal, clientKeytabFile);
131136
org.apache.hadoop.conf.Configuration finalConf = conf;
132-
conn = userGroupInformation.doAs(new PrivilegedAction<Connection>() {
133-
@Override
134-
public Connection run() {
135-
try {
136-
ScheduledChore authChore = AuthUtil.getAuthChore(finalConf);
137-
if (authChore != null) {
138-
choreService = new ChoreService("hbaseKerberosSink");
139-
choreService.scheduleChore(authChore);
140-
}
141-
142-
return ConnectionFactory.createConnection(finalConf);
143-
} catch (IOException e) {
144-
LOG.error("Get connection fail with config:{}", finalConf);
145-
throw new RuntimeException(e);
137+
conn = userGroupInformation.doAs((PrivilegedAction<Connection>) () -> {
138+
try {
139+
ScheduledChore authChore = AuthUtil.getAuthChore(finalConf);
140+
if (authChore != null) {
141+
choreService = new ChoreService("hbaseKerberosSink");
142+
choreService.scheduleChore(authChore);
146143
}
144+
145+
return ConnectionFactory.createConnection(finalConf);
146+
} catch (IOException e) {
147+
LOG.error("Get connection fail with config:{}", finalConf);
148+
throw new RuntimeException(e);
147149
}
148150
});
149151
}
@@ -304,7 +306,7 @@ public HbaseOutputFormatBuilder setColumnTypes(String[] columnTypes) {
304306
return this;
305307
}
306308

307-
public HbaseOutputFormatBuilder setColumnNameFamily(Map<String, String> columnNameFamily) {
309+
public HbaseOutputFormatBuilder setColumnNameFamily(LinkedHashMap<String, String> columnNameFamily) {
308310
format.columnNameFamily = columnNameFamily;
309311
return this;
310312
}
@@ -355,8 +357,8 @@ public HbaseOutputFormat finish() {
355357
String[] qualifiers = new String[format.columnNames.length];
356358

357359
if (format.columnNameFamily != null) {
358-
Set<String> keySet = format.columnNameFamily.keySet();
359-
String[] columns = keySet.toArray(new String[keySet.size()]);
360+
List<String> keyList = new LinkedList<>(format.columnNameFamily.keySet());
361+
String[] columns = keyList.toArray(new String[0]);
360362
for (int i = 0; i < columns.length; ++i) {
361363
String col = columns[i];
362364
String[] part = col.split(":");

hbase/hbase-sink/src/main/java/com/dtstack/flink/sql/sink/hbase/HbaseSink.java

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -35,7 +35,7 @@
3535
import org.apache.flink.table.sinks.TableSink;
3636
import org.apache.flink.types.Row;
3737

38-
import java.util.Map;
38+
import java.util.LinkedHashMap;
3939

4040
/**
4141
* Date: 2018/09/14
@@ -45,7 +45,7 @@
4545
public class HbaseSink implements RetractStreamTableSink<Row>, IStreamSinkGener<HbaseSink> {
4646

4747
protected String[] fieldNames;
48-
protected Map<String, String> columnNameFamily;
48+
protected LinkedHashMap<String, String> columnNameFamily;
4949
TypeInformation<?>[] fieldTypes;
5050
protected String zookeeperQuorum;
5151
protected String port;

hbase/hbase-sink/src/main/java/com/dtstack/flink/sql/sink/hbase/table/HbaseSinkParser.java

Lines changed: 2 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -97,7 +97,6 @@ public AbstractTableInfo getTableInfo(String tableName, String fieldsInfo, Map<S
9797

9898
public void parseFieldsInfo(String fieldsInfo, HbaseTableInfo tableInfo){
9999
List<String> fieldRows = DtStringUtil.splitIgnoreQuota(fieldsInfo, ',');
100-
Map<String, String> columnFamilies = new LinkedHashMap<>();
101100
for(String fieldRow : fieldRows){
102101
fieldRow = fieldRow.trim();
103102

@@ -128,8 +127,8 @@ public void parseFieldsInfo(String fieldsInfo, HbaseTableInfo tableInfo){
128127
tableInfo.finish();
129128
}
130129

131-
private Map<String, String> parseColumnFamily(Map<String, String> physicalFieldMap){
132-
Map<String, String> columnFamiles = Maps.newLinkedHashMap();
130+
private LinkedHashMap<String, String> parseColumnFamily(Map<String, String> physicalFieldMap){
131+
LinkedHashMap<String, String> columnFamiles = Maps.newLinkedHashMap();
133132
physicalFieldMap.values().forEach(x -> {
134133
String[] columnFamily = StringUtils.split(x.trim(), ":");
135134
columnFamiles.put(x, columnFamily[1]);

hbase/hbase-sink/src/main/java/com/dtstack/flink/sql/sink/hbase/table/HbaseTableInfo.java

Lines changed: 4 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -25,6 +25,7 @@
2525
import com.google.common.base.Preconditions;
2626
import com.google.common.collect.Maps;
2727

28+
import java.util.LinkedHashMap;
2829
import java.util.Map;
2930

3031
/**
@@ -44,7 +45,7 @@ public class HbaseTableInfo extends AbstractTargetTableInfo {
4445

4546
private String rowkey;
4647

47-
private Map<String, String> columnNameFamily;
48+
private LinkedHashMap<String, String> columnNameFamily;
4849

4950
private String[] columnNames;
5051

@@ -116,11 +117,11 @@ public void setRowkey(String rowkey) {
116117
this.rowkey = rowkey;
117118
}
118119

119-
public Map<String, String> getColumnNameFamily() {
120+
public LinkedHashMap<String, String> getColumnNameFamily() {
120121
return columnNameFamily;
121122
}
122123

123-
public void setColumnNameFamily(Map<String, String> columnNameFamily) {
124+
public void setColumnNameFamily(LinkedHashMap<String, String> columnNameFamily) {
124125
this.columnNameFamily = columnNameFamily;
125126
}
126127

0 commit comments

Comments
 (0)