Skip to content

Commit f753d25

Browse files
authored
HIVE-28135: Remove HiveIOExceptionHandlerUtil (#5163)
1 parent d89f7c5 commit f753d25

File tree

11 files changed

+63
-428
lines changed

11 files changed

+63
-428
lines changed

common/src/java/org/apache/hadoop/hive/conf/HiveConf.java

Lines changed: 0 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -3736,10 +3736,6 @@ public static enum ConfVars {
37363736
HIVE_REWORK_MAPREDWORK("hive.rework.mapredwork", false,
37373737
"should rework the mapred work or not.\n" +
37383738
"This is first introduced by SymlinkTextInputFormat to replace symlink files with real paths at compile time."),
3739-
HIVE_IO_EXCEPTION_HANDLERS("hive.io.exception.handlers", "",
3740-
"A list of io exception handler class names. This is used\n" +
3741-
"to construct a list exception handlers to handle exceptions thrown\n" +
3742-
"by record readers"),
37433739

37443740
// logging configuration
37453741
HIVE_LOG4J_FILE("hive.log4j.file", "",

ql/src/java/org/apache/hadoop/hive/ql/io/BucketizedHiveRecordReader.java

Lines changed: 1 addition & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -20,7 +20,6 @@
2020

2121
import java.io.IOException;
2222

23-
import org.apache.hadoop.hive.io.HiveIOExceptionHandlerUtil;
2423
import org.apache.hadoop.io.Writable;
2524
import org.apache.hadoop.io.WritableComparable;
2625
import org.apache.hadoop.mapred.FileSplit;
@@ -118,12 +117,7 @@ protected boolean initNextRecordReader() throws IOException {
118117
}
119118

120119
// get a record reader for the idx-th chunk
121-
try {
122-
recordReader = inputFormat.getRecordReader(split.getSplit(idx), jobConf,
123-
reporter);
124-
} catch (Exception e) {
125-
recordReader = HiveIOExceptionHandlerUtil.handleRecordReaderCreationException(e, jobConf);
126-
}
120+
recordReader = inputFormat.getRecordReader(split.getSplit(idx), jobConf, reporter);
127121

128122
// if we're performing a binary search, we need to restart it
129123
if (isSorted) {

ql/src/java/org/apache/hadoop/hive/ql/io/HiveContextAwareRecordReader.java

Lines changed: 48 additions & 54 deletions
Original file line numberDiff line numberDiff line change
@@ -29,7 +29,6 @@
2929
import org.slf4j.LoggerFactory;
3030
import org.apache.hadoop.fs.FileSystem;
3131
import org.apache.hadoop.fs.Path;
32-
import org.apache.hadoop.hive.io.HiveIOExceptionHandlerUtil;
3332
import org.apache.hadoop.hive.ql.exec.Utilities;
3433
import org.apache.hadoop.hive.ql.exec.FooterBuffer;
3534
import org.apache.hadoop.hive.ql.io.IOContext.Comparison;
@@ -320,65 +319,60 @@ public boolean doNext(K key, V value) throws IOException {
320319
}
321320
}
322321

323-
try {
324-
325-
/**
326-
* When start reading new file, check header, footer rows.
327-
* If file contains header, skip header lines before reading the records.
328-
* If file contains footer, used a FooterBuffer to remove footer lines
329-
* at the end of the table file.
330-
**/
331-
if (this.ioCxtRef.getCurrentBlockStart() == 0) {
332-
333-
// Check if the table file has header to skip.
334-
footerBuffer = null;
335-
Path filePath = this.ioCxtRef.getInputPath();
336-
PartitionDesc part = null;
337-
try {
338-
if (pathToPartitionInfo == null) {
339-
pathToPartitionInfo = Utilities
340-
.getMapWork(jobConf).getPathToPartitionInfo();
341-
}
342-
part = HiveFileFormatUtils
343-
.getFromPathRecursively(pathToPartitionInfo,
344-
filePath, IOPrepareCache.get().getPartitionDescMap());
345-
} catch (AssertionError ae) {
346-
LOG.info("Cannot get partition description from " + this.ioCxtRef.getInputPath()
347-
+ "because " + ae.getMessage());
348-
part = null;
349-
} catch (Exception e) {
350-
LOG.info("Cannot get partition description from " + this.ioCxtRef.getInputPath()
351-
+ "because " + e.getMessage());
352-
part = null;
353-
}
354-
TableDesc table = (part == null) ? null : part.getTableDesc();
355-
// In TextFormat, skipping is already taken care of as part of SkippingTextInputFormat.
356-
// This code will be also called from LLAP when pipeline is non-vectorized and cannot create wrapper.
357-
if (table != null && !TextInputFormat.class.isAssignableFrom(part.getInputFileFormatClass())) {
358-
headerCount = Utilities.getHeaderCount(table);
359-
footerCount = Utilities.getFooterCount(table, jobConf);
322+
/**
323+
* When start reading new file, check header, footer rows.
324+
* If file contains header, skip header lines before reading the records.
325+
* If file contains footer, used a FooterBuffer to remove footer lines
326+
* at the end of the table file.
327+
**/
328+
if (this.ioCxtRef.getCurrentBlockStart() == 0) {
329+
330+
// Check if the table file has header to skip.
331+
footerBuffer = null;
332+
Path filePath = this.ioCxtRef.getInputPath();
333+
PartitionDesc part = null;
334+
try {
335+
if (pathToPartitionInfo == null) {
336+
pathToPartitionInfo = Utilities
337+
.getMapWork(jobConf).getPathToPartitionInfo();
360338
}
339+
part = HiveFileFormatUtils
340+
.getFromPathRecursively(pathToPartitionInfo,
341+
filePath, IOPrepareCache.get().getPartitionDescMap());
342+
} catch (AssertionError ae) {
343+
LOG.info("Cannot get partition description from " + this.ioCxtRef.getInputPath()
344+
+ "because " + ae.getMessage());
345+
part = null;
346+
} catch (Exception e) {
347+
LOG.info("Cannot get partition description from " + this.ioCxtRef.getInputPath()
348+
+ "because " + e.getMessage());
349+
part = null;
350+
}
351+
TableDesc table = (part == null) ? null : part.getTableDesc();
352+
// In TextFormat, skipping is already taken care of as part of SkippingTextInputFormat.
353+
// This code will be also called from LLAP when pipeline is non-vectorized and cannot create wrapper.
354+
if (table != null && !TextInputFormat.class.isAssignableFrom(part.getInputFileFormatClass())) {
355+
headerCount = Utilities.getHeaderCount(table);
356+
footerCount = Utilities.getFooterCount(table, jobConf);
357+
}
361358

362-
// If input contains header, skip header.
363-
if (!Utilities.skipHeader(recordReader, headerCount, key, value)) {
359+
// If input contains header, skip header.
360+
if (!Utilities.skipHeader(recordReader, headerCount, key, value)) {
361+
return false;
362+
}
363+
if (footerCount > 0) {
364+
footerBuffer = new FooterBuffer();
365+
if (!footerBuffer.initializeBuffer(jobConf, recordReader, footerCount, key, value)) {
364366
return false;
365367
}
366-
if (footerCount > 0) {
367-
footerBuffer = new FooterBuffer();
368-
if (!footerBuffer.initializeBuffer(jobConf, recordReader, footerCount, key, value)) {
369-
return false;
370-
}
371-
}
372368
}
373-
if (footerBuffer == null) {
369+
}
370+
if (footerBuffer == null) {
374371

375-
// Table files don't have footer rows.
376-
return recordReader.next(key, value);
377-
} else {
378-
return footerBuffer.updateBuffer(jobConf, recordReader, key, value);
379-
}
380-
} catch (Exception e) {
381-
return HiveIOExceptionHandlerUtil.handleRecordReaderNextException(e, jobConf);
372+
// Table files don't have footer rows.
373+
return recordReader.next(key, value);
374+
} else {
375+
return footerBuffer.updateBuffer(jobConf, recordReader, key, value);
382376
}
383377
}
384378

ql/src/java/org/apache/hadoop/hive/ql/io/HiveInputFormat.java

Lines changed: 1 addition & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -34,7 +34,6 @@
3434
import org.apache.hadoop.hive.common.type.TimestampTZUtil;
3535
import org.apache.hadoop.hive.conf.HiveConf;
3636
import org.apache.hadoop.hive.conf.HiveConf.ConfVars;
37-
import org.apache.hadoop.hive.io.HiveIOExceptionHandlerUtil;
3837
import org.apache.hadoop.hive.llap.io.api.LlapIo;
3938
import org.apache.hadoop.hive.llap.io.api.LlapProxy;
4039
import org.apache.hadoop.hive.ql.exec.LimitOperator;
@@ -475,8 +474,7 @@ public RecordReader getRecordReader(InputSplit split, JobConf job,
475474
LOG.info("Ignoring exception while getting record reader as limit is reached", rootCause);
476475
innerReader = new NullRowsRecordReader(job, split);
477476
} else {
478-
innerReader = HiveIOExceptionHandlerUtil
479-
.handleRecordReaderCreationException(e, job);
477+
throw new IOException("Exception caught while creating the inner reader", e);
480478
}
481479
}
482480
HiveRecordReader<K,V> rr = new HiveRecordReader(innerReader, job);

ql/src/java/org/apache/hadoop/hive/ql/io/SymlinkTextInputFormat.java

Lines changed: 1 addition & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -31,7 +31,6 @@
3131
import org.apache.hadoop.fs.FileSystem;
3232
import org.apache.hadoop.fs.Path;
3333
import org.apache.hadoop.hive.common.FileUtils;
34-
import org.apache.hadoop.hive.io.HiveIOExceptionHandlerUtil;
3534
import org.apache.hadoop.io.LongWritable;
3635
import org.apache.hadoop.io.Text;
3736
import org.apache.hadoop.mapred.FileInputFormat;
@@ -102,14 +101,7 @@ public RecordReader<LongWritable, Text> getRecordReader(
102101
// The target data is in TextInputFormat.
103102
TextInputFormat inputFormat = new TextInputFormat();
104103
inputFormat.configure(job);
105-
RecordReader innerReader = null;
106-
try {
107-
innerReader = inputFormat.getRecordReader(targetSplit, job,
108-
reporter);
109-
} catch (Exception e) {
110-
innerReader = HiveIOExceptionHandlerUtil
111-
.handleRecordReaderCreationException(e, job);
112-
}
104+
RecordReader innerReader = inputFormat.getRecordReader(targetSplit, job, reporter);
113105
HiveRecordReader rr = new HiveRecordReader(innerReader, job);
114106
rr.initIOContext((FileSplit)targetSplit, job, TextInputFormat.class, innerReader);
115107
return rr;

ql/src/test/results/clientnegative/serde_regex2.q.out

Lines changed: 8 additions & 16 deletions
Original file line numberDiff line numberDiff line change
@@ -61,21 +61,17 @@ PREHOOK: type: QUERY
6161
PREHOOK: Input: default@serde_regex
6262
#### A masked pattern was here ####
6363
Status: Failed
64-
Vertex failed, vertexName=Map 1, vertexId=vertex_#ID#, diagnostics=[Task failed, taskId=task_#ID#, diagnostics=[TaskAttempt 0 failed, info=[Error: Error while running task ( failure ) : attempt_#ID#:java.lang.RuntimeException: org.apache.hadoop.hive.ql.metadata.HiveException: java.io.IOException: java.io.IOException: org.apache.hadoop.hive.serde2.SerDeException: Number of matching groups doesn't match the number of columns
64+
Vertex failed, vertexName=Map 1, vertexId=vertex_#ID#, diagnostics=[Task failed, taskId=task_#ID#, diagnostics=[TaskAttempt 0 failed, info=[Error: Error while running task ( failure ) : attempt_#ID#:java.lang.RuntimeException: org.apache.hadoop.hive.ql.metadata.HiveException: java.io.IOException: org.apache.hadoop.hive.serde2.SerDeException: Number of matching groups doesn't match the number of columns
6565
#### A masked pattern was here ####
66-
Caused by: org.apache.hadoop.hive.ql.metadata.HiveException: java.io.IOException: java.io.IOException: org.apache.hadoop.hive.serde2.SerDeException: Number of matching groups doesn't match the number of columns
67-
#### A masked pattern was here ####
68-
Caused by: java.io.IOException: java.io.IOException: org.apache.hadoop.hive.serde2.SerDeException: Number of matching groups doesn't match the number of columns
66+
Caused by: org.apache.hadoop.hive.ql.metadata.HiveException: java.io.IOException: org.apache.hadoop.hive.serde2.SerDeException: Number of matching groups doesn't match the number of columns
6967
#### A masked pattern was here ####
7068
Caused by: java.io.IOException: org.apache.hadoop.hive.serde2.SerDeException: Number of matching groups doesn't match the number of columns
7169
#### A masked pattern was here ####
7270
Caused by: org.apache.hadoop.hive.serde2.SerDeException: Number of matching groups doesn't match the number of columns
7371
#### A masked pattern was here ####
74-
], TaskAttempt 1 failed, info=[Error: Error while running task ( failure ) : attempt_#ID#:java.lang.RuntimeException: org.apache.hadoop.hive.ql.metadata.HiveException: java.io.IOException: java.io.IOException: org.apache.hadoop.hive.serde2.SerDeException: Number of matching groups doesn't match the number of columns
75-
#### A masked pattern was here ####
76-
Caused by: org.apache.hadoop.hive.ql.metadata.HiveException: java.io.IOException: java.io.IOException: org.apache.hadoop.hive.serde2.SerDeException: Number of matching groups doesn't match the number of columns
72+
], TaskAttempt 1 failed, info=[Error: Error while running task ( failure ) : attempt_#ID#:java.lang.RuntimeException: org.apache.hadoop.hive.ql.metadata.HiveException: java.io.IOException: org.apache.hadoop.hive.serde2.SerDeException: Number of matching groups doesn't match the number of columns
7773
#### A masked pattern was here ####
78-
Caused by: java.io.IOException: java.io.IOException: org.apache.hadoop.hive.serde2.SerDeException: Number of matching groups doesn't match the number of columns
74+
Caused by: org.apache.hadoop.hive.ql.metadata.HiveException: java.io.IOException: org.apache.hadoop.hive.serde2.SerDeException: Number of matching groups doesn't match the number of columns
7975
#### A masked pattern was here ####
8076
Caused by: java.io.IOException: org.apache.hadoop.hive.serde2.SerDeException: Number of matching groups doesn't match the number of columns
8177
#### A masked pattern was here ####
@@ -84,21 +80,17 @@ Caused by: org.apache.hadoop.hive.serde2.SerDeException: Number of matching grou
8480
]], Vertex did not succeed due to OWN_TASK_FAILURE, failedTasks:1 killedTasks:0, Vertex vertex_#ID# [Map 1] killed/failed due to:OWN_TASK_FAILURE]
8581
[Masked Vertex killed due to OTHER_VERTEX_FAILURE]
8682
DAG did not succeed due to VERTEX_FAILURE. failedVertices:1 killedVertices:1
87-
FAILED: Execution Error, return code 2 from org.apache.hadoop.hive.ql.exec.tez.TezTask. Vertex failed, vertexName=Map 1, vertexId=vertex_#ID#, diagnostics=[Task failed, taskId=task_#ID#, diagnostics=[TaskAttempt 0 failed, info=[Error: Error while running task ( failure ) : attempt_#ID#:java.lang.RuntimeException: org.apache.hadoop.hive.ql.metadata.HiveException: java.io.IOException: java.io.IOException: org.apache.hadoop.hive.serde2.SerDeException: Number of matching groups doesn't match the number of columns
83+
FAILED: Execution Error, return code 2 from org.apache.hadoop.hive.ql.exec.tez.TezTask. Vertex failed, vertexName=Map 1, vertexId=vertex_#ID#, diagnostics=[Task failed, taskId=task_#ID#, diagnostics=[TaskAttempt 0 failed, info=[Error: Error while running task ( failure ) : attempt_#ID#:java.lang.RuntimeException: org.apache.hadoop.hive.ql.metadata.HiveException: java.io.IOException: org.apache.hadoop.hive.serde2.SerDeException: Number of matching groups doesn't match the number of columns
8884
#### A masked pattern was here ####
89-
Caused by: org.apache.hadoop.hive.ql.metadata.HiveException: java.io.IOException: java.io.IOException: org.apache.hadoop.hive.serde2.SerDeException: Number of matching groups doesn't match the number of columns
90-
#### A masked pattern was here ####
91-
Caused by: java.io.IOException: java.io.IOException: org.apache.hadoop.hive.serde2.SerDeException: Number of matching groups doesn't match the number of columns
85+
Caused by: org.apache.hadoop.hive.ql.metadata.HiveException: java.io.IOException: org.apache.hadoop.hive.serde2.SerDeException: Number of matching groups doesn't match the number of columns
9286
#### A masked pattern was here ####
9387
Caused by: java.io.IOException: org.apache.hadoop.hive.serde2.SerDeException: Number of matching groups doesn't match the number of columns
9488
#### A masked pattern was here ####
9589
Caused by: org.apache.hadoop.hive.serde2.SerDeException: Number of matching groups doesn't match the number of columns
9690
#### A masked pattern was here ####
97-
], TaskAttempt 1 failed, info=[Error: Error while running task ( failure ) : attempt_#ID#:java.lang.RuntimeException: org.apache.hadoop.hive.ql.metadata.HiveException: java.io.IOException: java.io.IOException: org.apache.hadoop.hive.serde2.SerDeException: Number of matching groups doesn't match the number of columns
98-
#### A masked pattern was here ####
99-
Caused by: org.apache.hadoop.hive.ql.metadata.HiveException: java.io.IOException: java.io.IOException: org.apache.hadoop.hive.serde2.SerDeException: Number of matching groups doesn't match the number of columns
91+
], TaskAttempt 1 failed, info=[Error: Error while running task ( failure ) : attempt_#ID#:java.lang.RuntimeException: org.apache.hadoop.hive.ql.metadata.HiveException: java.io.IOException: org.apache.hadoop.hive.serde2.SerDeException: Number of matching groups doesn't match the number of columns
10092
#### A masked pattern was here ####
101-
Caused by: java.io.IOException: java.io.IOException: org.apache.hadoop.hive.serde2.SerDeException: Number of matching groups doesn't match the number of columns
93+
Caused by: org.apache.hadoop.hive.ql.metadata.HiveException: java.io.IOException: org.apache.hadoop.hive.serde2.SerDeException: Number of matching groups doesn't match the number of columns
10294
#### A masked pattern was here ####
10395
Caused by: java.io.IOException: org.apache.hadoop.hive.serde2.SerDeException: Number of matching groups doesn't match the number of columns
10496
#### A masked pattern was here ####

shims/common/src/main/java/org/apache/hadoop/hive/io/HiveIOExceptionHandler.java

Lines changed: 0 additions & 52 deletions
This file was deleted.

0 commit comments

Comments
 (0)