Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -374,7 +374,7 @@ private List<String> getTableData(String table, String database) throws Exceptio
Hive hive = Hive.get(conf);
org.apache.hadoop.hive.ql.metadata.Table tbl = hive.getTable(database, table);
FetchWork work;
if (!tbl.getPartCols().isEmpty()) {
if (!tbl.getPartCols(true).isEmpty()) {
List<Partition> partitions = hive.getPartitions(tbl);
List<PartitionDesc> partDesc = new ArrayList<PartitionDesc>();
List<Path> partLocs = new ArrayList<Path>();
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -75,7 +75,7 @@ public static boolean isSchemaEvolutionEnabled(Table table, Configuration conf)
}

public static boolean isFullPartitionSpec(Table table, Map<String, String> partitionSpec) {
for (FieldSchema partitionCol : table.getPartCols()) {
for (FieldSchema partitionCol : table.getPartCols(true)) {
if (partitionSpec.get(partitionCol.getName()) == null) {
return false;
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -66,9 +66,7 @@ private List<FieldSchema> getColumnsByPattern() throws HiveException {

private List<FieldSchema> getCols() throws HiveException {
Table table = context.getDb().getTable(desc.getTableName());
List<FieldSchema> allColumns = new ArrayList<>();
allColumns.addAll(table.getCols());
allColumns.addAll(table.getPartCols());
List<FieldSchema> allColumns = new ArrayList<>(table.getAllCols());
return allColumns;
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -100,7 +100,7 @@ private Table createViewLikeTable(Table oldTable) throws HiveException {
setUserSpecifiedLocation(table);

table.setFields(oldTable.getCols());
table.setPartCols(oldTable.getPartCols());
table.setPartCols(oldTable.getPartCols(true));

if (desc.getDefaultSerdeProps() != null) {
for (Map.Entry<String, String> e : desc.getDefaultSerdeProps().entrySet()) {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -131,7 +131,7 @@ private void getColumnsNoColumnPath(Table table, Partition partition, List<Field
cols.addAll(partition == null || table.getTableType() == TableType.VIRTUAL_VIEW ?
table.getCols() : partition.getCols());
if (!desc.isFormatted()) {
cols.addAll(table.getPartCols());
cols.addAll(table.getPartCols(false));
}

// Fetch partition statistics only for describe extended or formatted.
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -174,9 +174,7 @@ private void addPartitionData(DataOutputStream out, HiveConf conf, String column
List<FieldSchema> partitionColumns = null;
// TODO (HIVE-29413): Refactor to a generic getPartCols() implementation
if (table.isPartitioned()) {
partitionColumns = table.hasNonNativePartitionSupport() ?
table.getStorageHandler().getPartitionKeys(table) :
table.getPartCols();
partitionColumns = table.getPartCols(true);
}
if (CollectionUtils.isNotEmpty(partitionColumns) &&
conf.getBoolVar(ConfVars.HIVE_DISPLAY_PARTITION_COLUMNS_SEPARATELY)) {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -69,7 +69,7 @@ private Map<String, Object> makeOneTableStatus(Table table, Hive db, HiveConf co

builder.put("partitioned", table.isPartitioned());
if (table.isPartitioned()) {
builder.put("partitionColumns", JsonDescTableFormatter.createColumnsInfo(table.getPartCols(),
builder.put("partitionColumns", JsonDescTableFormatter.createColumnsInfo(table.getPartCols(true),
Collections.emptyList()));
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -73,7 +73,7 @@ private void writeStorageInfo(DataOutputStream out, Partition partition, Table t
private void writeColumnsInfo(DataOutputStream out, Table table) throws IOException, UnsupportedEncodingException {
String columns = MetaStoreUtils.getDDLFromFieldSchema("columns", table.getCols());
String partitionColumns = table.isPartitioned() ?
MetaStoreUtils.getDDLFromFieldSchema("partition_columns", table.getPartCols()) : "";
MetaStoreUtils.getDDLFromFieldSchema("partition_columns", table.getPartCols(true)) : "";

out.write(Utilities.newLineCode);
out.write(("columns:" + columns).getBytes(StandardCharsets.UTF_8));
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -150,7 +150,7 @@ public static List<Partition> getPartitionsWithSpecs(Hive db, Table table, GetPa
}

private static String tablePartitionColNames(Table table) {
List<FieldSchema> partCols = table.getPartCols();
List<FieldSchema> partCols = table.getPartCols(true);
return String.join("/", partCols.toString());
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -84,7 +84,7 @@ protected void analyzeCommand(TableName tableName, Map<String, String> partition
if (AcidUtils.isTransactionalTable(sourceTable) || AcidUtils.isTransactionalTable(destTable)) {
throw new SemanticException(ErrorMsg.EXCHANGE_PARTITION_NOT_ALLOWED_WITH_TRANSACTIONAL_TABLES.getMsg());
}
List<String> sourceProjectFilters = MetaStoreUtils.getPvals(sourceTable.getPartCols(), partitionSpecs);
List<String> sourceProjectFilters = MetaStoreUtils.getPvals(sourceTable.getPartCols(true), partitionSpecs);

// check if source partition exists
GetPartitionsFilterSpec sourcePartitionsFilterSpec = new GetPartitionsFilterSpec();
Expand All @@ -106,7 +106,7 @@ protected void analyzeCommand(TableName tableName, Map<String, String> partition
throw new SemanticException(ErrorMsg.PARTITION_VALUE_NOT_CONTINUOUS.getMsg(partitionSpecs.toString()));
}

List<String> destProjectFilters = MetaStoreUtils.getPvals(destTable.getPartCols(), partitionSpecs);
List<String> destProjectFilters = MetaStoreUtils.getPvals(destTable.getPartCols(true), partitionSpecs);

// check if dest partition exists
GetPartitionsFilterSpec getDestPartitionsFilterSpec = new GetPartitionsFilterSpec();
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -102,7 +102,7 @@ ExprNodeDesc getShowPartitionsFilter(Table table, ASTNode command) throws Semant
if (astChild.getType() == HiveParser.TOK_WHERE) {
RowResolver rwsch = new RowResolver();
Map<String, String> colTypes = new HashMap<String, String>();
for (FieldSchema fs : table.getPartCols()) {
for (FieldSchema fs : table.getPartCols(true)) {
rwsch.put(table.getTableName(), fs.getName(), new ColumnInfo(fs.getName(),
TypeInfoFactory.stringTypeInfo, null, true));
colTypes.put(fs.getName().toLowerCase(), fs.getType());
Expand Down Expand Up @@ -202,8 +202,8 @@ private String getShowPartitionsOrder(Table table, ASTNode command) throws Seman
if (astChild.getType() == HiveParser.TOK_ORDERBY) {
Map<String, Integer> poses = new HashMap<String, Integer>();
RowResolver rwsch = new RowResolver();
for (int i = 0; i < table.getPartCols().size(); i++) {
FieldSchema fs = table.getPartCols().get(i);
for (int i = 0; i < table.getPartCols(true).size(); i++) {
FieldSchema fs = table.getPartCols(true).get(i);
rwsch.put(table.getTableName(), fs.getName(), new ColumnInfo(fs.getName(),
TypeInfoFactory.getPrimitiveTypeInfo(fs.getType()), null, true));
poses.put(fs.getName().toLowerCase(), i);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -129,7 +129,7 @@ private Path getOriginalDir(Table table, PartSpecInfo partitionSpecInfo, List<Pa
// in full partition specification case we allow custom locations to keep backward compatibility
if (partitions.isEmpty()) {
throw new HiveException("No partition matches the specification");
} else if (partitionSpecInfo.values.size() != table.getPartCols().size()) {
} else if (partitionSpecInfo.values.size() != table.getPartCols(true).size()) {
// for partial specifications we need partitions to follow the scheme
for (Partition partition : partitions) {
if (AlterTableArchiveUtils.partitionInCustomLocation(table, partition)) {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -71,7 +71,7 @@ static void setOriginalLocation(Partition partition, String loc) {
static boolean partitionInCustomLocation(Table table, Partition partition) throws HiveException {
String subdir = null;
try {
subdir = Warehouse.makePartName(table.getPartCols(), partition.getValues());
subdir = Warehouse.makePartName(table.getPartCols(true), partition.getValues());
} catch (MetaException e) {
throw new HiveException("Unable to get partition's directory", e);
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -146,7 +146,7 @@ private Path getOriginalDir(Table table, PartSpecInfo partitionSpecInfo, List<Pa
// to keep backward compatibility
if (partitions.isEmpty()) {
throw new HiveException("No partition matches the specification");
} else if (partitionSpecInfo.values.size() != table.getPartCols().size()) {
} else if (partitionSpecInfo.values.size() != table.getPartCols(true).size()) {
// for partial specifications we need partitions to follow the scheme
for (Partition partition : partitions) {
if (AlterTableArchiveUtils.partitionInCustomLocation(table, partition)) {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -111,7 +111,7 @@ public static void validateTablesUsed(SemanticAnalyzer analyzer) throws Semantic

protected void validateReplaceWithPartitions(String viewName, Table oldView, List<FieldSchema> partitionColumns)
throws SemanticException {
if (oldView.getPartCols().isEmpty() || oldView.getPartCols().equals(partitionColumns)) {
if (oldView.getPartCols(true).isEmpty() || oldView.getPartCols(true).equals(partitionColumns)) {
return;
}

Expand Down
6 changes: 3 additions & 3 deletions ql/src/java/org/apache/hadoop/hive/ql/exec/ArchiveUtils.java
Original file line number Diff line number Diff line change
Expand Up @@ -74,7 +74,7 @@ static public PartSpecInfo create(Table tbl, Map<String, String> partSpec)
// ARCHIVE PARTITION(hr='13') won't
List<FieldSchema> prefixFields = new ArrayList<FieldSchema>();
List<String> prefixValues = new ArrayList<String>();
List<FieldSchema> partCols = tbl.getPartCols();
List<FieldSchema> partCols = tbl.getPartCols(true);
Iterator<String> itrPsKeys = partSpec.keySet().iterator();
for (FieldSchema fs : partCols) {
if (!itrPsKeys.hasNext()) {
Expand Down Expand Up @@ -222,7 +222,7 @@ public static int getArchivingLevel(Partition p) throws HiveException {
* @throws HiveException
*/
public static String getPartialName(Partition p, int level) throws HiveException {
List<FieldSchema> fields = p.getTable().getPartCols().subList(0, level);
List<FieldSchema> fields = p.getTable().getPartCols(true).subList(0, level);
List<String> values = p.getValues().subList(0, level);
try {
return Warehouse.makePartName(fields, values);
Expand Down Expand Up @@ -273,7 +273,7 @@ public static String conflictingArchiveNameOrNull(Hive db, Table tbl,

Map<String, String> spec = new HashMap<String, String>(partSpec);
List<String> reversedKeys = new ArrayList<String>();
for (FieldSchema fs : tbl.getPartCols()) {
for (FieldSchema fs : tbl.getPartCols(true)) {
if (spec.containsKey(fs.getName())) {
reversedKeys.add(fs.getName());
}
Expand Down
4 changes: 2 additions & 2 deletions ql/src/java/org/apache/hadoop/hive/ql/exec/DDLPlanUtils.java
Original file line number Diff line number Diff line change
Expand Up @@ -294,7 +294,7 @@ public String getPartitionActualName(Partition pt) {
*/
private Map<String, PrimitiveObjectInspector.PrimitiveCategory> getPartitionColumnToPrimitiveCategory(Partition pt) {
Map<String, PrimitiveObjectInspector.PrimitiveCategory> resultMap = new HashMap<>();
for (FieldSchema schema: pt.getTable().getPartCols()) {
for (FieldSchema schema: pt.getTable().getPartCols(true)) {
resultMap.put(
schema.getName(),
((PrimitiveTypeInfo) TypeInfoUtils.getTypeInfoFromTypeString(schema.getType())).getPrimitiveCategory()
Expand Down Expand Up @@ -976,7 +976,7 @@ private String getComment(Table table) {
}

private String getPartitionsForView(Table table) {
List<FieldSchema> partitionKeys = table.getPartCols();
List<FieldSchema> partitionKeys = table.getPartCols(true);
if (partitionKeys.isEmpty()) {
return "";
}
Expand Down
2 changes: 1 addition & 1 deletion ql/src/java/org/apache/hadoop/hive/ql/exec/MoveTask.java
Original file line number Diff line number Diff line change
Expand Up @@ -633,7 +633,7 @@ public void logMessage(LoadTableDesc tbd) {

private DataContainer handleStaticParts(Hive db, Table table, LoadTableDesc tbd,
TaskInformation ti) throws HiveException, IOException, InvalidOperationException {
List<String> partVals = MetaStoreUtils.getPvals(table.getPartCols(), tbd.getPartitionSpec());
List<String> partVals = MetaStoreUtils.getPvals(table.getPartCols(true), tbd.getPartitionSpec());
db.validatePartitionNameCharacters(partVals);
if (Utilities.FILE_OP_LOGGER.isTraceEnabled()) {
Utilities.FILE_OP_LOGGER.trace("loadPartition called from " + tbd.getSourcePath()
Expand Down
2 changes: 1 addition & 1 deletion ql/src/java/org/apache/hadoop/hive/ql/exec/Utilities.java
Original file line number Diff line number Diff line change
Expand Up @@ -4315,7 +4315,7 @@ public static void setPartitionColumnNames(Configuration conf, TableScanOperator
if (metadata == null) {
return;
}
List<FieldSchema> partCols = metadata.getPartCols();
List<FieldSchema> partCols = metadata.getPartCols(true);
if (partCols != null && !partCols.isEmpty()) {
conf.set(serdeConstants.LIST_PARTITION_COLUMNS, MetaStoreUtils.getColumnNamesFromFieldSchema(partCols));
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -548,7 +548,7 @@ public static Task<?> createViewTask(MetaData metaData, String dbNameToLoadIn, H
}

CreateViewDesc desc = new CreateViewDesc(dbDotView, table.getCols(), null, table.getParameters(),
table.getPartColNames(), false, false, viewOriginalText, viewExpandedText, table.getPartCols());
table.getPartColNames(), false, false, viewOriginalText, viewExpandedText, table.getPartCols(true));

desc.setReplicationSpec(metaData.getReplicationSpec());
desc.setOwnerName(table.getOwner());
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -91,9 +91,7 @@ public List<String> getValues() {
values = new ArrayList<>();

// TODO (HIVE-29413): Refactor to a generic getPartCols() implementation
for (FieldSchema fs : table.hasNonNativePartitionSupport()
? table.getStorageHandler().getPartitionKeys(table)
: table.getPartCols()) {
for (FieldSchema fs : table.getPartCols(true)) {
String val = partSpec.get(fs.getName());
values.add(val);
}
Expand Down
16 changes: 8 additions & 8 deletions ql/src/java/org/apache/hadoop/hive/ql/metadata/Hive.java
Original file line number Diff line number Diff line change
Expand Up @@ -867,7 +867,7 @@ public void createTable(String tableName, List<String> columns, List<String> par
FieldSchema part = new FieldSchema();
part.setName(partCol);
part.setType(STRING_TYPE_NAME); // default partition key
tbl.getPartCols().add(part);
tbl.getPartCols(true).add(part);
}
}
tbl.setSerializationLib(LazySimpleSerDe.class.getName());
Expand Down Expand Up @@ -1244,16 +1244,16 @@ public void renamePartition(Table tbl, Map<String, String> oldPartSpec, Partitio
throws HiveException {
try {
Map<String, String> newPartSpec = newPart.getSpec();
if (oldPartSpec.keySet().size() != tbl.getPartCols().size()
|| newPartSpec.keySet().size() != tbl.getPartCols().size()) {
if (oldPartSpec.keySet().size() != tbl.getPartCols(true).size()
|| newPartSpec.keySet().size() != tbl.getPartCols(true).size()) {
throw new HiveException("Unable to rename partition to the same name: number of partition cols don't match. ");
}
if (!oldPartSpec.keySet().equals(newPartSpec.keySet())){
throw new HiveException("Unable to rename partition to the same name: old and new partition cols don't match. ");
}
List<String> pvals = new ArrayList<String>();

for (FieldSchema field : tbl.getPartCols()) {
for (FieldSchema field : tbl.getPartCols(true)) {
String val = oldPartSpec.get(field.getName());
if (val == null || val.length() == 0) {
throw new HiveException("get partition: Value for key "
Expand Down Expand Up @@ -3830,7 +3830,7 @@ public Partition getPartition(Table tbl, Map<String, String> partSpec,
boolean forceCreate, String partPath, boolean inheritTableSpecs) throws HiveException {
tbl.validatePartColumnNames(partSpec, true);
List<String> pvals = new ArrayList<String>();
for (FieldSchema field : tbl.getPartCols()) {
for (FieldSchema field : tbl.getPartCols(true)) {
String val = partSpec.get(field.getName());
// enable dynamic partitioning
if ((val == null && !HiveConf.getBoolVar(conf, HiveConf.ConfVars.DYNAMIC_PARTITIONING))
Expand Down Expand Up @@ -4219,7 +4219,7 @@ public List<String> getPartitionNames(Table tbl, Map<String, String> partSpec, s
if (tbl.hasNonNativePartitionSupport()) {
return tbl.getStorageHandler().getPartitionNames(tbl, partSpec);
}
List<String> pvals = MetaStoreUtils.getPvals(tbl.getPartCols(), partSpec);
List<String> pvals = MetaStoreUtils.getPvals(tbl.getPartCols(true), partSpec);
return getPartitionNamesByPartitionVals(tbl, pvals, max);
}

Expand Down Expand Up @@ -4461,7 +4461,7 @@ private List<Partition> getPartitionsWithAuth(Table tbl, Map<String, String> par
throw new HiveException(ErrorMsg.TABLE_NOT_PARTITIONED, tbl.getTableName());
}

List<String> partialPvals = MetaStoreUtils.getPvals(tbl.getPartCols(), partialPartSpec);
List<String> partialPvals = MetaStoreUtils.getPvals(tbl.getPartCols(true), partialPartSpec);

List<org.apache.hadoop.hive.metastore.api.Partition> partitions = null;
try {
Expand Down Expand Up @@ -4770,7 +4770,7 @@ static List<Partition> convertFromPartSpec(Iterator<PartitionSpec> iterator, Tab
|| partitionWithoutSD.getRelativePath().isEmpty()) {
if (tbl.getDataLocation() != null) {
Path partPath = new Path(tbl.getDataLocation(),
Warehouse.makePartName(tbl.getPartCols(),
Warehouse.makePartName(tbl.getPartCols(true),
partitionWithoutSD.getValues()));
partitionLocation = partPath.toString();
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -409,7 +409,7 @@ private static RelNode createMaterializedViewScan(HiveConf conf, Table viewTable

// 1.2 Add column info corresponding to partition columns
ArrayList<ColumnInfo> partitionColumns = new ArrayList<ColumnInfo>();
for (FieldSchema part_col : viewTable.getPartCols()) {
for (FieldSchema part_col : viewTable.getPartCols(true)) {
colName = part_col.getName();
colInfo = new ColumnInfo(colName,
TypeInfoFactory.getPrimitiveTypeInfo(part_col.getType()), null, true);
Expand Down
10 changes: 5 additions & 5 deletions ql/src/java/org/apache/hadoop/hive/ql/metadata/Partition.java
Original file line number Diff line number Diff line change
Expand Up @@ -125,7 +125,7 @@ public Partition(Table tbl, Map<String, String> partSpec, Path location) throws
public static org.apache.hadoop.hive.metastore.api.Partition createMetaPartitionObject(
Table tbl, Map<String, String> partSpec, Path location) throws HiveException {
List<String> pvals = new ArrayList<String>();
for (FieldSchema field : tbl.getPartCols()) {
for (FieldSchema field : tbl.getPartCols(true)) {
String val = partSpec.get(field.getName());
if (val == null || val.isEmpty()) {
throw new HiveException("partition spec is invalid; field "
Expand Down Expand Up @@ -173,7 +173,7 @@ protected void initialize(Table table,
// set default if location is not set and this is a physical
// table partition (not a view partition)
if (table.getDataLocation() != null) {
Path partPath = new Path(table.getDataLocation(), Warehouse.makePartName(table.getPartCols(), tPartition.getValues()));
Path partPath = new Path(table.getDataLocation(), Warehouse.makePartName(table.getPartCols(true), tPartition.getValues()));
tPartition.getSd().setLocation(partPath.toString());
}
}
Expand All @@ -200,7 +200,7 @@ protected void initialize(Table table,

public String getName() {
try {
return Warehouse.makePartName(table.getPartCols(), tPartition.getValues());
return Warehouse.makePartName(table.getPartCols(true), tPartition.getValues());
} catch (MetaException e) {
throw new RuntimeException(e);
}
Expand Down Expand Up @@ -543,7 +543,7 @@ public void setLocation(String location) {
public void setValues(Map<String, String> partSpec)
throws HiveException {
List<String> pvals = new ArrayList<String>();
for (FieldSchema field : table.getPartCols()) {
for (FieldSchema field : table.getPartCols(true)) {
String val = partSpec.get(field.getName());
if (val == null) {
throw new HiveException(
Expand Down Expand Up @@ -601,7 +601,7 @@ public Map<List<String>, String> getSkewedColValueLocationMaps() {

public void checkValidity() throws HiveException {
if (!tPartition.getSd().equals(table.getSd())) {
Table.validateColumns(getCols(), table.getPartCols(), DDLUtils.isIcebergTable(table));
Table.validateColumns(getCols(), table.getPartCols(true), DDLUtils.isIcebergTable(table));
}
}

Expand Down
Loading