Skip to content

Commit 38d2917

Browse files
Handle hardcoded __HIVE_DEFAULT_PARTTION__ in metastore-common/FileUtils
1 parent e2bba92 commit 38d2917

File tree

69 files changed

+404
-273
lines changed

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

69 files changed

+404
-273
lines changed

hcatalog/server-extensions/src/main/java/org/apache/hive/hcatalog/listener/DbNotificationListener.java

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -410,7 +410,8 @@ public PartitionFiles next() {
410410
fileIterator = Collections.emptyIterator();
411411
}
412412
PartitionFiles partitionFiles =
413-
new PartitionFiles(Warehouse.makePartName(t.getPartitionKeys(), p.getValues()), fileIterator);
413+
new PartitionFiles(Warehouse.makePartName(t.getPartitionKeys(), p.getValues(), t.getParameters(), conf),
414+
fileIterator);
414415
return partitionFiles;
415416
} catch (MetaException e) {
416417
throw new RuntimeException(e);

hcatalog/webhcat/java-client/src/main/java/org/apache/hive/hcatalog/api/HCatPartition.java

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -138,7 +138,8 @@ Partition toHivePartition() throws HCatException {
138138
if (sd.getLocation() == null) {
139139
LOG.warn("Partition location is not set! Attempting to construct default partition location.");
140140
try {
141-
String partName = Warehouse.makePartName(HCatSchemaUtils.getFieldSchemas(hcatTable.getPartCols()), values);
141+
String partName = Warehouse.makePartName(HCatSchemaUtils.getFieldSchemas(hcatTable.getPartCols()), values,
142+
hcatTable.getTblProps(), hcatTable.getConf());
142143
sd.setLocation(new Path(hcatTable.getSd().getLocation(), partName).toString());
143144
}
144145
catch(MetaException exception) {

hcatalog/webhcat/java-client/src/test/java/org/apache/hive/hcatalog/api/TestHCatClient.java

Lines changed: 6 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -156,7 +156,12 @@ public static String fixPath(String path) {
156156
}
157157

158158
public static String makePartLocation(HCatTable table, Map<String, String> partitionSpec) throws MetaException {
159-
return (new Path(table.getSd().getLocation(), Warehouse.makePartPath(partitionSpec))).toUri().toString();
159+
try {
160+
return (new Path(table.getSd().getLocation(), Warehouse.makePartPath(partitionSpec,
161+
table.toHiveTable().getParameters(), getConf()))).toUri().toString();
162+
} catch (HCatException e) {
163+
throw new RuntimeException(e);
164+
}
160165
}
161166

162167
@Test

iceberg/iceberg-handler/src/main/java/org/apache/iceberg/mr/hive/HiveIcebergStorageHandler.java

Lines changed: 2 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -78,7 +78,6 @@
7878
import org.apache.hadoop.hive.ql.ddl.table.create.CreateTableDesc;
7979
import org.apache.hadoop.hive.ql.ddl.table.create.like.CreateTableLikeDesc;
8080
import org.apache.hadoop.hive.ql.ddl.table.misc.properties.AlterTableSetPropertiesDesc;
81-
import org.apache.hadoop.hive.ql.ddl.table.partition.PartitionUtils;
8281
import org.apache.hadoop.hive.ql.exec.ColumnInfo;
8382
import org.apache.hadoop.hive.ql.exec.Utilities;
8483
import org.apache.hadoop.hive.ql.hooks.WriteEntity;
@@ -877,8 +876,7 @@ public DynamicPartitionCtx createDPContext(
877876
Table table = IcebergTableUtil.getTable(conf, tableDesc.getProperties());
878877

879878
DynamicPartitionCtx dpCtx = new DynamicPartitionCtx(Maps.newLinkedHashMap(),
880-
PartitionUtils.getDefaultPartitionName(hmsTable.getParameters(), hiveConf),
881-
hiveConf.getIntVar(ConfVars.DYNAMIC_PARTITION_MAX_PARTS_PER_NODE));
879+
hiveConf.getIntVar(ConfVars.DYNAMIC_PARTITION_MAX_PARTS_PER_NODE), hmsTable.getParameters(), hiveConf);
882880

883881
if (table.spec().isPartitioned() &&
884882
hiveConf.getIntVar(ConfVars.HIVE_OPT_SORT_DYNAMIC_PARTITION_THRESHOLD) >= 0) {
@@ -2010,7 +2008,7 @@ public Partition getPartition(org.apache.hadoop.hive.ql.metadata.Table table,
20102008
Map<String, String> partitionSpec, RewritePolicy policy) throws SemanticException {
20112009
validatePartSpec(table, partitionSpec, policy);
20122010
try {
2013-
String partName = Warehouse.makePartName(partitionSpec, false);
2011+
String partName = Warehouse.makePartName(partitionSpec, false, table.getParameters(), conf);
20142012
return new DummyPartition(table, partName, partitionSpec);
20152013
} catch (MetaException e) {
20162014
throw new SemanticException("Unable to construct name for dummy partition due to: ", e);

iceberg/iceberg-handler/src/main/java/org/apache/iceberg/mr/hive/HiveTableUtil.java

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -124,7 +124,8 @@ public static void importFiles(String sourceLocation,
124124
Partition partition = partitionIterator.next();
125125
Callable<Void> task = () -> {
126126
Path partitionPath = new Path(partition.getSd().getLocation());
127-
String partitionName = Warehouse.makePartName(partitionKeys, partition.getValues());
127+
String partitionName = Warehouse.makePartName(partitionKeys, partition.getValues(),
128+
icebergTable.properties(), conf);
128129
Map<String, String> partitionSpec = Warehouse.makeSpecFromName(partitionName);
129130
RemoteIterator<LocatedFileStatus> iterator = getFilesIterator(partitionPath, conf);
130131
List<DataFile> dataFiles =

itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/TestReplChangeManager.java

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -197,15 +197,15 @@ public void testRecyclePartTable() throws Exception {
197197
Partition part3 = createPartition(dbName, tblName, columns, values, serdeInfo);
198198
client.add_partition(part3);
199199

200-
Path part1Path = new Path(warehouse.getDefaultPartitionPath(db, tbl, ImmutableMap.of("dt", "20160101")), "part");
200+
Path part1Path = new Path(warehouse.getDefaultPartitionPath(db, tbl, ImmutableMap.of("dt", "20160101"), hiveConf), "part");
201201
createFile(part1Path, "p1");
202202
String path1Chksum = ReplChangeManager.checksumFor(part1Path, fs);
203203

204-
Path part2Path = new Path(warehouse.getDefaultPartitionPath(db, tbl, ImmutableMap.of("dt", "20160102")), "part");
204+
Path part2Path = new Path(warehouse.getDefaultPartitionPath(db, tbl, ImmutableMap.of("dt", "20160102"), hiveConf), "part");
205205
createFile(part2Path, "p2");
206206
String path2Chksum = ReplChangeManager.checksumFor(part2Path, fs);
207207

208-
Path part3Path = new Path(warehouse.getDefaultPartitionPath(db, tbl, ImmutableMap.of("dt", "20160103")), "part");
208+
Path part3Path = new Path(warehouse.getDefaultPartitionPath(db, tbl, ImmutableMap.of("dt", "20160103"), hiveConf), "part");
209209
createFile(part3Path, "p3");
210210
String path3Chksum = ReplChangeManager.checksumFor(part3Path, fs);
211211

itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/parse/TestReplicationScenariosAcidTables.java

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -4320,7 +4320,7 @@ public void testAcidTablesBootstrapWithMajorCompaction() throws Throwable {
43204320
List<Partition> partList = primary.getAllPartitions(primaryDbName, tableNamepart);
43214321
for (Partition part : partList) {
43224322
Table tbl = primary.getTable(primaryDbName, tableNamepart);
4323-
String partName = Warehouse.makePartName(tbl.getPartitionKeys(), part.getValues());
4323+
String partName = Warehouse.makePartName(tbl.getPartitionKeys(), part.getValues(), tbl.getParameters(), conf);
43244324
runCompaction(primaryDbName, tableNamepart, partName, CompactionType.MAJOR);
43254325
}
43264326

@@ -4377,7 +4377,7 @@ public void testAcidTablesBootstrapWithMinorCompaction() throws Throwable {
43774377
List<Partition> partList = primary.getAllPartitions(primaryDbName, tableNamepart);
43784378
for (Partition part : partList) {
43794379
Table tbl = primary.getTable(primaryDbName, tableNamepart);
4380-
String partName = Warehouse.makePartName(tbl.getPartitionKeys(), part.getValues());
4380+
String partName = Warehouse.makePartName(tbl.getPartitionKeys(), part.getValues(), tbl.getParameters(), conf);
43814381
runCompaction(primaryDbName, tableNamepart, partName, CompactionType.MINOR);
43824382
}
43834383
List<String> withClause = Arrays.asList(

itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/parse/TestStatsReplicationScenarios.java

Lines changed: 3 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -191,14 +191,15 @@ private void verifyReplicatedStatsForPartitionsOfTable(String tableName)
191191
return;
192192
}
193193

194-
List<FieldSchema> partKeys = primary.getTable(primaryDbName, tableName).getPartitionKeys();
194+
Table table = primary.getTable(primaryDbName, tableName);
195+
List<FieldSchema> partKeys = table.getPartitionKeys();
195196
for (Partition pPart : pParts) {
196197
Partition rPart = replica.getPartition(replicatedDbName, tableName,
197198
pPart.getValues());
198199

199200
Map<String, String> rParams = collectStatsParams(rPart.getParameters());
200201
Map<String, String> pParams = collectStatsParams(pPart.getParameters());
201-
String partName = Warehouse.makePartName(partKeys, pPart.getValues());
202+
String partName = Warehouse.makePartName(partKeys, pPart.getValues(), table.getParameters(), conf);
202203
Assert.assertEquals("Mismatch in stats parameters for partition " + partName + " of table " + tableName,
203204
pParams, rParams);
204205

ql/src/java/org/apache/hadoop/hive/ql/ddl/process/show/compactions/ShowCompactionsOperation.java

Lines changed: 6 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -97,7 +97,12 @@ private ShowCompactRequest getShowCompactioRequest(ShowCompactionsDesc desc) thr
9797
request.setState(compactionStateStr2Enum(desc.getCompactionStatus()).getSqlConst());
9898
}
9999
if (isNotEmpty(desc.getPartSpec())) {
100-
request.setPartName(AcidUtils.getPartitionName(desc.getPartSpec()));
100+
try {
101+
request.setPartName(AcidUtils.getPartitionName(desc.getPartSpec(), context.getDb().getTable(
102+
request.getDbName(), request.getTbName()).getParameters(), context.getConf()));
103+
} catch (HiveException e) {
104+
throw new RuntimeException(e);
105+
}
101106
}
102107
if(desc.getCompactionId()>0){
103108
request.setId(desc.getCompactionId());

ql/src/java/org/apache/hadoop/hive/ql/ddl/table/misc/columnstats/AlterTableUpdateColumnStatistictAnalyzer.java

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -51,7 +51,7 @@ protected void analyzeCommand(TableName tableName, Map<String, String> partition
5151
String columnName = getUnescapedName((ASTNode) command.getChild(0));
5252
Map<String, String> properties = getProps((ASTNode) (command.getChild(1)).getChild(0));
5353

54-
String partitionName = AcidUtils.getPartitionName(partitionSpec);
54+
String partitionName = AcidUtils.getPartitionName(partitionSpec, table.getParameters(), conf);
5555
String columnType = getColumnType(table, columnName);
5656

5757
ColumnStatsUpdateWork work = new ColumnStatsUpdateWork(partitionName, properties, table.getDbName(),

0 commit comments

Comments
 (0)