Skip to content

Commit e2bba92

Browse files
Handle hardcoded __HIVE_DEFAULT_PARTTION__ in common/FileUtils
1 parent 5def79f commit e2bba92

File tree

26 files changed

+120
-83
lines changed

26 files changed

+120
-83
lines changed

common/src/java/org/apache/hadoop/hive/common/FileUtils.java

Lines changed: 18 additions & 15 deletions
Original file line numberDiff line numberDiff line change
@@ -64,6 +64,7 @@
6464
import org.apache.hadoop.fs.PathIsDirectoryException;
6565
import org.apache.hadoop.hive.conf.HiveConf;
6666
import org.apache.hadoop.hive.conf.HiveConf.ConfVars;
67+
import org.apache.hadoop.hive.metastore.utils.MetaStoreUtils;
6768
import org.apache.hadoop.hive.shims.HadoopShims;
6869
import org.apache.hadoop.hive.shims.ShimLoader;
6970
import org.apache.hadoop.hive.shims.Utils;
@@ -161,9 +162,9 @@ private FileUtils() {
161162
// prevent instantiation
162163
}
163164

164-
165-
public static String makePartName(List<String> partCols, List<String> vals) {
166-
return makePartName(partCols, vals, null);
165+
public static String makePartName(List<String> partCols, List<String> vals, Map<String, String> tableParams,
166+
Configuration conf) {
167+
return makePartName(partCols, vals, null, tableParams, conf);
167168
}
168169

169170
/**
@@ -175,15 +176,15 @@ public static String makePartName(List<String> partCols, List<String> vals) {
175176
* @return An escaped, valid partition name.
176177
*/
177178
public static String makePartName(List<String> partCols, List<String> vals,
178-
String defaultStr) {
179+
String defaultStr, Map<String, String> tableParams, Configuration conf) {
179180
StringBuilder name = new StringBuilder();
180181
for (int i = 0; i < partCols.size(); i++) {
181182
if (i > 0) {
182183
name.append(Path.SEPARATOR);
183184
}
184-
name.append(escapePathName((partCols.get(i)).toLowerCase(), defaultStr));
185+
name.append(escapePathName((partCols.get(i)).toLowerCase(), defaultStr, tableParams, conf));
185186
name.append('=');
186-
name.append(escapePathName(vals.get(i), defaultStr));
187+
name.append(escapePathName(vals.get(i), defaultStr, tableParams, conf));
187188
}
188189
return name.toString();
189190
}
@@ -196,9 +197,9 @@ public static String makePartName(List<String> partCols, List<String> vals,
196197
* @return
197198
*/
198199
public static String makeDefaultListBucketingDirName(List<String> skewedCols,
199-
String name) {
200+
String name, Map<String, String> tableParams, Configuration conf) {
200201
String lbDirName;
201-
String defaultDir = FileUtils.escapePathName(name);
202+
String defaultDir = FileUtils.escapePathName(name, tableParams, conf);
202203
StringBuilder defaultDirPath = new StringBuilder();
203204
for (int i = 0; i < skewedCols.size(); i++) {
204205
if (i > 0) {
@@ -216,15 +217,16 @@ public static String makeDefaultListBucketingDirName(List<String> skewedCols,
216217
* @param vals The skewed values
217218
* @return An escaped, valid list bucketing directory name.
218219
*/
219-
public static String makeListBucketingDirName(List<String> lbCols, List<String> vals) {
220+
public static String makeListBucketingDirName(List<String> lbCols, List<String> vals, Map<String, String> tableParams,
221+
Configuration conf) {
220222
StringBuilder name = new StringBuilder();
221223
for (int i = 0; i < lbCols.size(); i++) {
222224
if (i > 0) {
223225
name.append(Path.SEPARATOR);
224226
}
225-
name.append(escapePathName((lbCols.get(i)).toLowerCase()));
227+
name.append(escapePathName((lbCols.get(i)).toLowerCase(), tableParams, conf));
226228
name.append('=');
227-
name.append(escapePathName(vals.get(i)));
229+
name.append(escapePathName(vals.get(i), tableParams, conf));
228230
}
229231
return name.toString();
230232
}
@@ -276,8 +278,8 @@ static boolean needsEscaping(char c) {
276278
return c < charToEscape.size() && charToEscape.get(c);
277279
}
278280

279-
public static String escapePathName(String path) {
280-
return escapePathName(path, null);
281+
public static String escapePathName(String path, Map<String, String> tableParams, Configuration conf) {
282+
return escapePathName(path, null, tableParams, conf);
281283
}
282284

283285
/**
@@ -287,15 +289,16 @@ public static String escapePathName(String path) {
287289
* The default name for the path, if the given path is empty or null.
288290
* @return An escaped path name.
289291
*/
290-
public static String escapePathName(String path, String defaultPath) {
292+
public static String escapePathName(String path, String defaultPath, Map<String, String> tableParams,
293+
Configuration conf) {
291294

292295
// __HIVE_DEFAULT_NULL__ is the system default value for null and empty string.
293296
// TODO: we should allow user to specify default partition or HDFS file location.
294297
if (path == null || path.length() == 0) {
295298
if (defaultPath == null) {
296299
//previously, when path is empty or null and no default path is specified,
297300
// __HIVE_DEFAULT_PARTITION__ was the return value for escapePathName
298-
return "__HIVE_DEFAULT_PARTITION__";
301+
return MetaStoreUtils.getDefaultPartitionName(tableParams, conf);
299302
} else {
300303
return defaultPath;
301304
}

common/src/test/org/apache/hadoop/hive/common/TestFileUtils.java

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -309,7 +309,7 @@ public void testPathEscapeChars() {
309309
StringBuilder sb = new StringBuilder();
310310
FileUtils.charToEscape.stream().forEach(integer -> sb.append((char) integer));
311311
String path = sb.toString();
312-
assertEquals(path, FileUtils.unescapePathName(FileUtils.escapePathName(path)));
312+
assertEquals(path, FileUtils.unescapePathName(FileUtils.escapePathName(path, null, new HiveConf())));
313313
}
314314

315315
@Test

hcatalog/core/src/main/java/org/apache/hive/hcatalog/mapreduce/FileOutputCommitterContainer.java

Lines changed: 8 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -396,7 +396,8 @@ private Partition constructPartition(
396396
HdfsUtils.setFullFileStatus(conf, status, status.getFileStatus().getGroup(), fs,
397397
partPath, false);
398398
}
399-
partPath = constructPartialPartPath(partPath, partKey.getName().toLowerCase(), partKVs);
399+
partPath = constructPartialPartPath(partPath, partKey.getName().toLowerCase(), partKVs, table.getParameters(),
400+
conf);
400401
}
401402
}
402403

@@ -428,7 +429,8 @@ private String getFinalDynamicPartitionDestination(Table table, Map<String, Stri
428429
// file:///tmp/hcat_junit_warehouse/employee/_DYN0.7770480401313761/emp_country=IN/emp_state=KA ->
429430
// file:///tmp/hcat_junit_warehouse/employee/emp_country=IN/emp_state=KA
430431
for (FieldSchema partKey : table.getPartitionKeys()) {
431-
partPath = constructPartialPartPath(partPath, partKey.getName().toLowerCase(), partKVs);
432+
partPath = constructPartialPartPath(partPath, partKey.getName().toLowerCase(), partKVs, table.getParameters(),
433+
conf);
432434
}
433435

434436
return partPath.toString();
@@ -455,11 +457,12 @@ private Map<String, String> getStorerParameterMap(StorerInfo storer) {
455457
return params;
456458
}
457459

458-
private Path constructPartialPartPath(Path partialPath, String partKey, Map<String, String> partKVs) {
460+
private Path constructPartialPartPath(Path partialPath, String partKey, Map<String, String> partKVs,
461+
Map<String, String> tableParams, Configuration conf) {
459462

460-
StringBuilder sb = new StringBuilder(FileUtils.escapePathName(partKey));
463+
StringBuilder sb = new StringBuilder(FileUtils.escapePathName(partKey, tableParams, conf));
461464
sb.append("=");
462-
sb.append(FileUtils.escapePathName(partKVs.get(partKey)));
465+
sb.append(FileUtils.escapePathName(partKVs.get(partKey), tableParams, conf));
463466
return new Path(partialPath, sb.toString());
464467
}
465468

hcatalog/core/src/main/java/org/apache/hive/hcatalog/mapreduce/FosterStorageHandler.java

Lines changed: 3 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -184,7 +184,9 @@ public void configureOutputJobProperties(TableDesc tableDesc,
184184
cols.add(name);
185185
values.add(value);
186186
}
187-
outputLocation = FileUtils.makePartName(cols, values);
187+
outputLocation = FileUtils.makePartName(cols, values, (jobInfo.getTableInfo() != null &&
188+
jobInfo.getTableInfo().getTable() != null) ? jobInfo.getTableInfo().getTable().getParameters() : null,
189+
conf);
188190
}
189191

190192
if (outputLocation!= null && !outputLocation.isEmpty()){

ql/src/java/org/apache/hadoop/hive/ql/ddl/table/lock/show/ShowLocksOperation.java

Lines changed: 3 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -44,6 +44,7 @@
4444
import org.apache.hadoop.hive.metastore.api.ShowLocksResponseElement;
4545
import org.apache.hadoop.hive.ql.Context;
4646
import org.apache.hadoop.hive.ql.ddl.DDLOperation;
47+
import org.apache.hadoop.hive.ql.metadata.Hive;
4748
import org.apache.hadoop.hive.ql.metadata.HiveException;
4849
import org.apache.hadoop.hive.ql.session.SessionState;
4950

@@ -175,7 +176,8 @@ private ShowLocksResponse getLocksForNewFormat(HiveLockManager lockMgr) throws H
175176
keyList.add(partKey);
176177
valList.add(partVal);
177178
}
178-
String partName = FileUtils.makePartName(keyList, valList);
179+
String partName = FileUtils.makePartName(keyList, valList, Hive.get(context.getConf()).getTable(
180+
request.getDbname(), request.getTablename()).getParameters(), context.getConf());
179181
request.setPartname(partName);
180182
}
181183

ql/src/java/org/apache/hadoop/hive/ql/ddl/table/misc/truncate/TruncateTableDesc.java

Lines changed: 4 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -76,6 +76,10 @@ public String getTableName() {
7676
return tableName.getNotEmptyDbTable();
7777
}
7878

79+
public TableName getTableNameObject() {
80+
return tableName;
81+
}
82+
7983
@Override
8084
public String getFullTableName() {
8185
return tableName.getNotEmptyDbTable();

ql/src/java/org/apache/hadoop/hive/ql/ddl/table/misc/truncate/TruncateTableOperation.java

Lines changed: 3 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -70,7 +70,9 @@ public int execute() throws HiveException {
7070
// or the existing table is newer than our update.
7171
LOG.debug("DDLTask: Truncate Table/Partition is skipped as table {} / partition {} is newer than update",
7272
tableName, (partSpec == null) ?
73-
"null" : FileUtils.makePartName(new ArrayList<>(partSpec.keySet()), new ArrayList<>(partSpec.values())));
73+
"null" : FileUtils.makePartName(new ArrayList<>(partSpec.keySet()), new ArrayList<>(partSpec.values()),
74+
context.getDb().getTable(desc.getTableNameObject().getDb(), desc.getTableNameObject().getTable()).
75+
getParameters(), context.getConf()));
7476
return 0;
7577
}
7678

ql/src/java/org/apache/hadoop/hive/ql/ddl/table/partition/rename/AlterTableRenamePartitionOperation.java

Lines changed: 4 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -49,11 +49,13 @@ public int execute() throws HiveException {
4949
Map<String, String> oldPartSpec = desc.getOldPartSpec();
5050
ReplicationSpec replicationSpec = desc.getReplicationSpec();
5151

52+
Table tbl = context.getDb().getTable(tableName);
5253
if (!AlterTableUtils.allowOperationInReplicationScope(context.getDb(), tableName, oldPartSpec, replicationSpec)) {
5354
// no rename, the table is missing either due to drop/rename which follows the current rename.
5455
// or the existing table is newer than our update.
5556
LOG.debug("DDLTask: Rename Partition is skipped as table {} / partition {} is newer than update", tableName,
56-
FileUtils.makePartName(new ArrayList<>(oldPartSpec.keySet()), new ArrayList<>(oldPartSpec.values())));
57+
FileUtils.makePartName(new ArrayList<>(oldPartSpec.keySet()), new ArrayList<>(oldPartSpec.values()),
58+
tbl.getParameters(), context.getConf()));
5759
return 0;
5860
}
5961

@@ -62,11 +64,10 @@ public int execute() throws HiveException {
6264
throw new HiveException("Rename Partition: Not allowed as bootstrap dump in progress");
6365
}
6466

65-
Table tbl = context.getDb().getTable(tableName);
6667
Partition oldPart = context.getDb().getPartition(tbl, oldPartSpec, false);
6768
if (oldPart == null) {
6869
String partName = FileUtils.makePartName(new ArrayList<String>(oldPartSpec.keySet()),
69-
new ArrayList<String>(oldPartSpec.values()));
70+
new ArrayList<String>(oldPartSpec.values()), tbl.getParameters(), context.getConf());
7071
throw new HiveException("Rename partition: source partition [" + partName + "] does not exist.");
7172
}
7273

ql/src/java/org/apache/hadoop/hive/ql/exec/FileSinkOperator.java

Lines changed: 5 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -1371,7 +1371,8 @@ protected String generateListBucketingDirName(Object row) {
13711371
/* The row matches skewed column names. */
13721372
if (allSkewedVals.contains(skewedValsCandidate)) {
13731373
/* matches skewed values. */
1374-
lbDirName = FileUtils.makeListBucketingDirName(skewedCols, skewedValsCandidate);
1374+
lbDirName = FileUtils.makeListBucketingDirName(skewedCols, skewedValsCandidate, conf.getTable() != null ?
1375+
conf.getTable().getParameters() : null, hconf);
13751376
locationMap.put(skewedValsCandidate, lbDirName);
13761377
} else {
13771378
lbDirName = createDefaultLbDir(skewedCols, locationMap);
@@ -1386,7 +1387,7 @@ private String createDefaultLbDir(List<String> skewedCols,
13861387
Map<List<String>, String> locationMap) {
13871388
String lbDirName;
13881389
lbDirName = FileUtils.makeDefaultListBucketingDirName(skewedCols,
1389-
lbCtx.getDefaultDirName());
1390+
lbCtx.getDefaultDirName(), conf.getTable() != null ? conf.getTable().getParameters() : null, hconf);
13901391
List<String> defaultKey = Lists.newArrayList(lbCtx.getDefaultKey());
13911392
if (!locationMap.containsKey(defaultKey)) {
13921393
locationMap.put(defaultKey, lbDirName);
@@ -1474,7 +1475,8 @@ protected FSPaths getDynOutPaths(List<String> row, String lbDir) throws HiveExce
14741475
// return the relative path corresponding to the row.
14751476
// e.g., ds=2008-04-08/hr=11
14761477
private String getDynPartDirectory(List<String> row, List<String> dpColNames) {
1477-
return FileUtils.makePartName(dpColNames, row);
1478+
return FileUtils.makePartName(dpColNames, row, conf.getTable() != null ?
1479+
conf.getTable().getParameters() : null, hconf);
14781480
}
14791481

14801482
@Override

ql/src/java/org/apache/hadoop/hive/ql/exec/HashTableSinkOperator.java

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -295,7 +295,7 @@ protected void flushToFile() throws IOException, HiveException {
295295
}
296296
// get current input file name
297297
String bigBucketFileName = getExecContext().getCurrentBigBucketFile();
298-
String fileName = getExecContext().getLocalWork().getBucketFileName(bigBucketFileName);
298+
String fileName = getExecContext().getLocalWork().getBucketFileName(bigBucketFileName, hconf);
299299
// get the tmp URI path; it will be a hdfs path if not local mode
300300
// TODO [MM gap?]: this doesn't work, however this is MR only.
301301
// The path for writer and reader mismatch:

0 commit comments

Comments
 (0)