Skip to content

Commit c50bd17

Browse files
Merge branch '1.8_test_3.10.x' of ssh://git.dtstack.cn:10022/dt-insight-engine/flinkStreamSQL into test_1.8_3.1.0x_elasticsearch6-sink
2 parents dd35dda + 69f0d75 commit c50bd17

File tree

4 files changed

+7
-19
lines changed

4 files changed

+7
-19
lines changed

cassandra/cassandra-sink/src/main/java/com/dtstack/flink/sql/sink/cassandra/CassandraOutputFormat.java

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -219,7 +219,7 @@ private void insertWrite(Row row) {
219219
}
220220
} catch (Exception e) {
221221
if(outDirtyRecords.getCount() % DIRTY_PRINT_FREQUENCY == 0){
222-
LOG.error("record insert failed ..", row.toString().substring(0, 100));
222+
LOG.error("record insert failed, total dirty num:{}, current record:{}", outDirtyRecords.getCount(), row.toString());
223223
LOG.error("", e);
224224
}
225225

core/pom.xml

Lines changed: 3 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -20,6 +20,7 @@
2020
<calcite.server.version>1.16.0</calcite.server.version>
2121
<jackson.version>2.7.9</jackson.version>
2222
<guava.version>19.0</guava.version>
23+
<logback.version>1.1.7</logback.version>
2324
</properties>
2425

2526
<dependencies>
@@ -121,10 +122,11 @@
121122
<artifactId>junit</artifactId>
122123
<version>4.12</version>
123124
</dependency>
125+
124126
<dependency>
125127
<groupId>ch.qos.logback</groupId>
126128
<artifactId>logback-classic</artifactId>
127-
<version>1.1.7</version>
129+
<version>${logback.version}</version>
128130
</dependency>
129131

130132
</dependencies>

core/src/main/java/com/dtstack/flink/sql/Main.java

Lines changed: 0 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -27,8 +27,6 @@
2727
import org.slf4j.Logger;
2828
import org.slf4j.LoggerFactory;
2929

30-
import ch.qos.logback.classic.Level;
31-
import ch.qos.logback.classic.LoggerContext;
3230

3331
/**
3432
* Date: 2018/6/26
@@ -45,10 +43,4 @@ public static void main(String[] args) throws Exception {
4543
env.execute(paramsInfo.getName());
4644
LOG.info("program {} execution success", paramsInfo.getName());
4745
}
48-
private static void setLogLevel(String level){
49-
LoggerContext loggerContext= (LoggerContext) LoggerFactory.getILoggerFactory();
50-
//设置全局日志级别
51-
ch.qos.logback.classic.Logger logger = loggerContext.getLogger("root");
52-
logger.setLevel(Level.toLevel(level, Level.INFO));
53-
}
5446
}

kudu/kudu-sink/src/main/java/com/dtstack/flink/sql/sink/kudu/KuduOutputFormat.java

Lines changed: 3 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -23,13 +23,7 @@
2323
import org.apache.flink.api.java.tuple.Tuple2;
2424
import org.apache.flink.configuration.Configuration;
2525
import org.apache.flink.types.Row;
26-
import org.apache.kudu.client.AsyncKuduClient;
27-
import org.apache.kudu.client.AsyncKuduSession;
28-
import org.apache.kudu.client.KuduClient;
29-
import org.apache.kudu.client.KuduException;
30-
import org.apache.kudu.client.KuduTable;
31-
import org.apache.kudu.client.Operation;
32-
import org.apache.kudu.client.PartialRow;
26+
import org.apache.kudu.client.*;
3327
import org.slf4j.Logger;
3428
import org.slf4j.LoggerFactory;
3529

@@ -123,7 +117,7 @@ public void writeRecord(Tuple2 record) throws IOException {
123117
Row row = tupleTrans.getField(1);
124118
if (row.getArity() != fieldNames.length) {
125119
if(outDirtyRecords.getCount() % DIRTY_PRINT_FREQUENCY == 0) {
126-
LOG.error("record insert failed ..{}", row.toString());
120+
LOG.error("record insert failed:{}", row.toString());
127121
LOG.error("cause by row.getArity() != fieldNames.length");
128122
}
129123
outDirtyRecords.inc();
@@ -142,7 +136,7 @@ public void writeRecord(Tuple2 record) throws IOException {
142136
outRecords.inc();
143137
} catch (KuduException e) {
144138
if(outDirtyRecords.getCount() % DIRTY_PRINT_FREQUENCY == 0){
145-
LOG.error("record insert failed ..{}", row.toString().substring(0, 100));
139+
LOG.error("record insert failed, total dirty record:{} current row:{}", outDirtyRecords.getCount(), row.toString());
146140
LOG.error("", e);
147141
}
148142
outDirtyRecords.inc();

0 commit comments

Comments
 (0)