Skip to content
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -130,7 +130,8 @@ class CHIteratorApi extends IteratorApi with Logging with LogLevelUtil {
partitionSchema: StructType,
fileFormat: ReadFileFormat,
metadataColumnNames: Seq[String],
properties: Map[String, String]): SplitInfo = {
properties: Map[String, String],
dataSchema: StructType): SplitInfo = {
partition match {
case p: GlutenMergeTreePartition =>
ExtensionTableBuilder
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -55,7 +55,8 @@ class VeloxIteratorApi extends IteratorApi with Logging {
partitionSchema: StructType,
fileFormat: ReadFileFormat,
metadataColumnNames: Seq[String],
properties: Map[String, String]): SplitInfo = {
properties: Map[String, String],
dataSchema: StructType): SplitInfo = {
partition match {
case f: FilePartition =>
val (
Expand All @@ -69,7 +70,7 @@ class VeloxIteratorApi extends IteratorApi with Logging {
constructSplitInfo(partitionSchema, f.files, metadataColumnNames)
val preferredLocations =
SoftAffinity.getFilePartitionLocations(f)
LocalFilesBuilder.makeLocalFiles(
val localFile = LocalFilesBuilder.makeLocalFiles(
f.index,
paths,
starts,
Expand All @@ -82,6 +83,8 @@ class VeloxIteratorApi extends IteratorApi with Logging {
preferredLocations.toList.asJava,
mapAsJavaMap(properties)
)
localFile.setFileSchema(dataSchema)
localFile
case _ =>
throw new UnsupportedOperationException(s"Unsupported input partition.")
}
Expand Down Expand Up @@ -168,26 +171,28 @@ class VeloxIteratorApi extends IteratorApi with Logging {
SparkShimLoader.getSparkShims.generateMetadataColumns(file, metadataColumnNames)
metadataColumns.add(metadataColumn)
val partitionColumn = new JHashMap[String, String]()
for (i <- 0 until file.partitionValues.numFields) {
val partitionColumnValue = if (file.partitionValues.isNullAt(i)) {
ExternalCatalogUtils.DEFAULT_PARTITION_NAME
} else {
val pn = file.partitionValues.get(i, schema.fields(i).dataType)
schema.fields(i).dataType match {
case _: BinaryType =>
new String(pn.asInstanceOf[Array[Byte]], StandardCharsets.UTF_8)
case _: DateType =>
DateFormatter.apply().format(pn.asInstanceOf[Integer])
case _: DecimalType =>
pn.asInstanceOf[Decimal].toJavaBigInteger.toString
case _: TimestampType =>
TimestampFormatter
.getFractionFormatter(ZoneOffset.UTC)
.format(pn.asInstanceOf[java.lang.Long])
case _ => pn.toString
if (file.partitionValues != null) {
for (i <- 0 until file.partitionValues.numFields) {
val partitionColumnValue = if (file.partitionValues.isNullAt(i)) {
ExternalCatalogUtils.DEFAULT_PARTITION_NAME
} else {
val pn = file.partitionValues.get(i, schema.fields(i).dataType)
schema.fields(i).dataType match {
case _: BinaryType =>
new String(pn.asInstanceOf[Array[Byte]], StandardCharsets.UTF_8)
case _: DateType =>
DateFormatter.apply().format(pn.asInstanceOf[Integer])
case _: DecimalType =>
pn.asInstanceOf[Decimal].toJavaBigInteger.toString
case _: TimestampType =>
TimestampFormatter
.getFractionFormatter(ZoneOffset.UTC)
.format(pn.asInstanceOf[java.lang.Long])
case _ => pn.toString
}
}
partitionColumn.put(schema.names(i), partitionColumnValue)
}
partitionColumn.put(schema.names(i), partitionColumnValue)
}
partitionColumns.add(partitionColumn)
}
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,59 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.gluten.execution

import org.apache.spark.sql.Row
import org.apache.spark.sql.types.{IntegerType, StructField, StructType}

class VeloxOrcSchemaEvolutionSuite extends VeloxWholeStageTransformerSuite {
override protected val resourcePath: String = "/tpch-data-parquet"
override protected val fileFormat: String = "parquet"

import testImplicits._

test("read ORC with column names all starting with '_col'") {
withTempPath {
tmp =>
val df = Seq((1, 2, 3), (4, 5, 6), (7, 8, 9)).toDF("_col0", "_col1", "_col2")
df.write.format("orc").save(s"file://${tmp.getCanonicalPath}")

withTempView("test") {
spark.read
.format("orc")
.schema(
StructType(
Array(
StructField("a", IntegerType, nullable = true),
StructField("b", IntegerType, nullable = true),
StructField("c", IntegerType, nullable = true)
)))
.load(s"file://${tmp.getCanonicalPath}")
.createOrReplaceTempView("test")

runQueryAndCompare("select a, b, c from test") {
df =>
checkAnswer(
df,
Row(1, 2, 3) ::
Row(4, 5, 6) ::
Row(7, 8, 9) ::
Nil)
}
}
}
}
}
5 changes: 5 additions & 0 deletions cpp/velox/compute/VeloxBackend.cc
Original file line number Diff line number Diff line change
Expand Up @@ -271,6 +271,11 @@ void VeloxBackend::initConnector() {
connectorConfMap[velox::connector::hive::HiveConfig::kFilePreloadThreshold] =
backendConf_->get<std::string>(kFilePreloadThreshold, "1048576"); // 1M

// Map table schema to file schema using name
connectorConfMap[velox::connector::hive::HiveConfig::kParquetUseColumnNames] = "true";
connectorConfMap[velox::connector::hive::HiveConfig::kOrcUseColumnNames] = "true";
connectorConfMap[velox::connector::hive::HiveConfig::kOrcUseNestedColumnNames] = "true";

// read as UTC
connectorConfMap[velox::connector::hive::HiveConfig::kReadTimestampPartitionValueAsLocalTime] = "false";

Expand Down
14 changes: 14 additions & 0 deletions cpp/velox/compute/VeloxPlanConverter.cc
Original file line number Diff line number Diff line change
Expand Up @@ -51,6 +51,10 @@ std::shared_ptr<SplitInfo> parseScanSplitInfo(
splitInfo->partitionColumns.reserve(fileList.size());
splitInfo->properties.reserve(fileList.size());
splitInfo->metadataColumns.reserve(fileList.size());

std::vector<std::string> colNames;
std::vector<TypePtr> veloxTypes;

for (const auto& file : fileList) {
// Expect all Partitions share the same index.
splitInfo->partitionIndex = file.partition_index();
Expand All @@ -71,6 +75,16 @@ std::shared_ptr<SplitInfo> parseScanSplitInfo(
splitInfo->starts.emplace_back(file.start());
splitInfo->lengths.emplace_back(file.length());

if (colNames.empty() && file.has_schema()) {
const auto& tableSchema = file.schema();
colNames.reserve(tableSchema.names().size());
for (const auto& name : tableSchema.names()) {
colNames.emplace_back(name);
}
veloxTypes = SubstraitParser::parseNamedStruct(tableSchema);
}
splitInfo->fileSchema = ROW(std::move(colNames), std::move(veloxTypes));

facebook::velox::FileProperties fileProps;
if (file.has_properties()) {
fileProps.fileSize = file.properties().filesize();
Expand Down
2 changes: 2 additions & 0 deletions cpp/velox/compute/WholeStageResultIterator.cc
Original file line number Diff line number Diff line change
Expand Up @@ -598,6 +598,8 @@ std::shared_ptr<velox::config::ConfigBase> WholeStageResultIterator::createConne
std::to_string(veloxCfg_->get<int32_t>(kMaxPartitions, 10000));
configs[velox::connector::hive::HiveConfig::kIgnoreMissingFilesSession] =
std::to_string(veloxCfg_->get<bool>(kIgnoreMissingFiles, false));
configs[velox::connector::hive::HiveConfig::kParquetUseColumnNamesSession] = "true";
configs[velox::connector::hive::HiveConfig::kOrcUseColumnNamesSession] = "true";
return std::make_shared<velox::config::ConfigBase>(std::move(configs));
}

Expand Down
14 changes: 12 additions & 2 deletions cpp/velox/substrait/SubstraitToVeloxPlan.cc
Original file line number Diff line number Diff line change
Expand Up @@ -1284,15 +1284,25 @@ core::PlanNodePtr SubstraitToVeloxPlanConverter::toVeloxPlan(const ::substrait::
std::shared_ptr<connector::hive::HiveTableHandle> tableHandle;
if (!readRel.has_filter()) {
tableHandle = std::make_shared<connector::hive::HiveTableHandle>(
kHiveConnectorId, "hive_table", filterPushdownEnabled, common::SubfieldFilters{}, nullptr);
kHiveConnectorId,
"hive_table",
filterPushdownEnabled,
common::SubfieldFilters{},
nullptr,
splitInfo->fileSchema);
} else {
common::SubfieldFilters subfieldFilters;
auto names = colNameList;
auto types = veloxTypeList;
auto remainingFilter = exprConverter_->toVeloxExpr(readRel.filter(), ROW(std::move(names), std::move(types)));

tableHandle = std::make_shared<connector::hive::HiveTableHandle>(
kHiveConnectorId, "hive_table", filterPushdownEnabled, std::move(subfieldFilters), remainingFilter);
kHiveConnectorId,
"hive_table",
filterPushdownEnabled,
std::move(subfieldFilters),
remainingFilter,
splitInfo->fileSchema);
}

// Get assignments and out names.
Expand Down
3 changes: 3 additions & 0 deletions cpp/velox/substrait/SubstraitToVeloxPlan.h
Original file line number Diff line number Diff line change
Expand Up @@ -55,6 +55,9 @@ struct SplitInfo {
/// The file sizes and modification times of the files to be scanned.
std::vector<std::optional<facebook::velox::FileProperties>> properties;

/// The file schema
RowTypePtr fileSchema;

/// Make SplitInfo polymorphic
virtual ~SplitInfo() = default;
};
Expand Down
4 changes: 2 additions & 2 deletions cpp/velox/tests/Substrait2VeloxPlanConversionTest.cc
Original file line number Diff line number Diff line change
Expand Up @@ -257,7 +257,7 @@ TEST_F(Substrait2VeloxPlanConversionTest, ifthenTest) {
// Convert to Velox PlanNode.
auto planNode = planConverter_->toVeloxPlan(substraitPlan, std::vector<::substrait::ReadRel_LocalFiles>{split});
ASSERT_EQ(
"-- Project[1][expressions: ] -> \n -- TableScan[0][table: hive_table, remaining filter: (and(and(and(and(isnotnull(\"hd_vehicle_count\"),or(equalto(\"hd_buy_potential\",\">10000\"),equalto(\"hd_buy_potential\",\"unknown\"))),greaterthan(\"hd_vehicle_count\",0)),if(greaterthan(\"hd_vehicle_count\",0),greaterthan(divide(cast \"hd_dep_count\" as DOUBLE,cast \"hd_vehicle_count\" as DOUBLE),1.2))),isnotnull(\"hd_demo_sk\")))] -> n0_0:BIGINT, n0_1:VARCHAR, n0_2:BIGINT, n0_3:BIGINT\n",
"-- Project[1][expressions: ] -> \n -- TableScan[0][table: hive_table, remaining filter: (and(and(and(and(isnotnull(\"hd_vehicle_count\"),or(equalto(\"hd_buy_potential\",\">10000\"),equalto(\"hd_buy_potential\",\"unknown\"))),greaterthan(\"hd_vehicle_count\",0)),if(greaterthan(\"hd_vehicle_count\",0),greaterthan(divide(cast \"hd_dep_count\" as DOUBLE,cast \"hd_vehicle_count\" as DOUBLE),1.2))),isnotnull(\"hd_demo_sk\"))), data columns: ROW<>] -> n0_0:BIGINT, n0_1:VARCHAR, n0_2:BIGINT, n0_3:BIGINT\n",
planNode->toString(true, true));
}

Expand All @@ -273,7 +273,7 @@ TEST_F(Substrait2VeloxPlanConversionTest, filterUpper) {
// Convert to Velox PlanNode.
auto planNode = planConverter_->toVeloxPlan(substraitPlan, std::vector<::substrait::ReadRel_LocalFiles>{split});
ASSERT_EQ(
"-- Project[1][expressions: ] -> \n -- TableScan[0][table: hive_table, remaining filter: (and(isnotnull(\"key\"),lessthan(\"key\",3)))] -> n0_0:INTEGER\n",
"-- Project[1][expressions: ] -> \n -- TableScan[0][table: hive_table, remaining filter: (and(isnotnull(\"key\"),lessthan(\"key\",3))), data columns: ROW<>] -> n0_0:INTEGER\n",
planNode->toString(true, true));
}

Expand Down
4 changes: 2 additions & 2 deletions ep/build-velox/src/get_velox.sh
Original file line number Diff line number Diff line change
Expand Up @@ -16,8 +16,8 @@

set -exu

VELOX_REPO=https://github.com/oap-project/velox.git
VELOX_BRANCH=2025_03_02
VELOX_REPO=https://github.com/ccat3z/velox.git
VELOX_BRANCH=feat/orc-positional-oap
VELOX_HOME=""

OS=`uname -s`
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -151,15 +151,17 @@ public ReadRel.LocalFiles toProtobuf() {
if (index != null) {
fileBuilder.setPartitionIndex(index);
}
Map<String, String> partitionColumn = partitionColumns.get(i);
if (!partitionColumn.isEmpty()) {
partitionColumn.forEach(
(key, value) -> {
ReadRel.LocalFiles.FileOrFiles.partitionColumn.Builder pcBuilder =
ReadRel.LocalFiles.FileOrFiles.partitionColumn.newBuilder();
pcBuilder.setKey(key).setValue(value);
fileBuilder.addPartitionColumns(pcBuilder.build());
});
if (partitionColumns != null && partitionColumns.size() == paths.size()) {
Map<String, String> partitionColumn = partitionColumns.get(i);
if (!partitionColumn.isEmpty()) {
partitionColumn.forEach(
(key, value) -> {
ReadRel.LocalFiles.FileOrFiles.partitionColumn.Builder pcBuilder =
ReadRel.LocalFiles.FileOrFiles.partitionColumn.newBuilder();
pcBuilder.setKey(key).setValue(value);
fileBuilder.addPartitionColumns(pcBuilder.build());
});
}
}
fileBuilder.setLength(lengths.get(i));
fileBuilder.setStart(starts.get(i));
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -37,7 +37,8 @@ trait IteratorApi {
partitionSchema: StructType,
fileFormat: ReadFileFormat,
metadataColumnNames: Seq[String],
properties: Map[String, String]): SplitInfo
properties: Map[String, String],
dataSchema: StructType): SplitInfo

def genSplitInfoForPartitions(
partitionIndex: Int,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -74,8 +74,9 @@ trait BasicScanExecTransformer extends LeafTransformSupport with BaseDataSource
_,
getPartitionSchema,
fileFormat,
getMetadataColumns().map(_.name),
getProperties))
getMetadataColumns.map(_.name),
getProperties,
getDataSchema))
}

val serializableHadoopConf: SerializableConfiguration = new SerializableConfiguration(
Expand All @@ -94,6 +95,14 @@ trait BasicScanExecTransformer extends LeafTransformSupport with BaseDataSource
case _ =>
}

val dataSchemaValidateResult = BackendsApiManager.getValidatorApiInstance
.doSchemaValidate(this.getDataSchema)
.map(ValidationResult.failed(_))
.getOrElse(ValidationResult.succeeded)
if (!dataSchemaValidateResult.ok()) {
return dataSchemaValidateResult
}

val validationResult = BackendsApiManager.getSettings
.validateScanExec(
fileFormat,
Expand Down
Loading