From 81e0b004d58b9989d71d4c680b8cf33e06e96bd7 Mon Sep 17 00:00:00 2001 From: Chong Gao Date: Fri, 9 Jan 2026 14:57:43 +0800 Subject: [PATCH 01/59] Initial Spark 410 shim setup --- pom.xml | 43 ++++++++++++++++++++++++++++++++++++++++++- scala2.13/pom.xml | 43 ++++++++++++++++++++++++++++++++++++++++++- 2 files changed, 84 insertions(+), 2 deletions(-) diff --git a/pom.xml b/pom.xml index 0a0711efdd9..c689559b10f 100644 --- a/pom.xml +++ b/pom.xml @@ -1,6 +1,6 @@ source-javadoc @@ -966,6 +1006,7 @@ 3.5.7 4.0.0 4.0.1 + 4.1.0 3.12.4 4.9.2 diff --git a/scala2.13/pom.xml b/scala2.13/pom.xml index 920a9f10a2a..fa3963d3e52 100644 --- a/scala2.13/pom.xml +++ b/scala2.13/pom.xml @@ -1,6 +1,6 @@ source-javadoc @@ -966,6 +1006,7 @@ 3.5.7 4.0.0 4.0.1 + 4.1.0 3.12.4 4.9.2 From bf8f71ec4f2bf88bde19f0c04f6f80414ff19d4b Mon Sep 17 00:00:00 2001 From: Chong Gao Date: Fri, 9 Jan 2026 14:59:05 +0800 Subject: [PATCH 02/59] Add Spark 410 base shim files --- .../spark/rapids/shims/SparkShims.scala | 49 +++++++ .../spark410/SparkShimServiceProvider.scala | 36 +++++ .../rapids/shims/datetimeExpressions.scala | 126 ++++++++++++++++++ 3 files changed, 211 insertions(+) create mode 100644 sql-plugin/src/main/spark410/scala/com/nvidia/spark/rapids/shims/SparkShims.scala create mode 100644 sql-plugin/src/main/spark410/scala/com/nvidia/spark/rapids/shims/spark410/SparkShimServiceProvider.scala create mode 100644 sql-plugin/src/main/spark410/scala/org/apache/spark/sql/rapids/shims/datetimeExpressions.scala diff --git a/sql-plugin/src/main/spark410/scala/com/nvidia/spark/rapids/shims/SparkShims.scala b/sql-plugin/src/main/spark410/scala/com/nvidia/spark/rapids/shims/SparkShims.scala new file mode 100644 index 00000000000..81f72bcf35e --- /dev/null +++ b/sql-plugin/src/main/spark410/scala/com/nvidia/spark/rapids/shims/SparkShims.scala @@ -0,0 +1,49 @@ +/* + * Copyright (c) 2026, NVIDIA CORPORATION. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +/*** spark-rapids-shim-json-lines +{"spark": "410"} +spark-rapids-shim-json-lines ***/ +package com.nvidia.spark.rapids.shims + +import com.nvidia.spark.rapids._ +import com.nvidia.spark.rapids.{HashExprChecks, Murmur3HashExprMeta, XxHash64ExprMeta} + +import org.apache.spark.sql.catalyst.expressions.{CollationAwareMurmur3Hash, CollationAwareXxHash64, Expression} +import org.apache.spark.sql.rapids.{GpuMurmur3Hash, GpuXxHash64} + +/** + * SparkShimImpl for Spark 4.1.0 + * Extends Spark400PlusCommonShims with 4.1.0 specific overrides. + */ +object SparkShimImpl extends Spark400PlusCommonShims with RebaseShims { + override def getExprs: Map[Class[_ <: Expression], ExprRule[_ <: Expression]] = { + val shimExprs: Map[Class[_ <: Expression], ExprRule[_ <: Expression]] = Seq( + GpuOverrides.expr[CollationAwareMurmur3Hash]( + "Collation-aware murmur3 hash operator", + HashExprChecks.murmur3ProjectChecks, + Murmur3HashExprMeta + ), + GpuOverrides.expr[CollationAwareXxHash64]( + "Collation-aware xxhash64 operator", + HashExprChecks.xxhash64ProjectChecks, + XxHash64ExprMeta + ) + ).map(r => (r.getClassFor.asSubclass(classOf[Expression]), r)).toMap + // Include TimeAddShims for TimestampAddInterval support in 4.1.0 + super.getExprs ++ shimExprs ++ TimeAddShims.exprs + } +} diff --git a/sql-plugin/src/main/spark410/scala/com/nvidia/spark/rapids/shims/spark410/SparkShimServiceProvider.scala b/sql-plugin/src/main/spark410/scala/com/nvidia/spark/rapids/shims/spark410/SparkShimServiceProvider.scala new file mode 100644 index 00000000000..fa41297f65c --- /dev/null +++ b/sql-plugin/src/main/spark410/scala/com/nvidia/spark/rapids/shims/spark410/SparkShimServiceProvider.scala @@ -0,0 +1,36 @@ +/* + * Copyright (c) 2025-2026, NVIDIA CORPORATION. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +/*** spark-rapids-shim-json-lines +{"spark": "410"} +spark-rapids-shim-json-lines ***/ +package com.nvidia.spark.rapids.shims.spark410 + +import com.nvidia.spark.rapids.SparkShimVersion + +object SparkShimServiceProvider { + val VERSION = SparkShimVersion(4, 1, 0) + val VERSIONNAMES = Seq(s"$VERSION") +} + +class SparkShimServiceProvider extends com.nvidia.spark.rapids.SparkShimServiceProvider { + + override def getShimVersion: SparkShimVersion = SparkShimServiceProvider.VERSION + + override def matchesVersion(version: String): Boolean = { + SparkShimServiceProvider.VERSIONNAMES.contains(version) + } +} diff --git a/sql-plugin/src/main/spark410/scala/org/apache/spark/sql/rapids/shims/datetimeExpressions.scala b/sql-plugin/src/main/spark410/scala/org/apache/spark/sql/rapids/shims/datetimeExpressions.scala new file mode 100644 index 00000000000..d6a42db838b --- /dev/null +++ b/sql-plugin/src/main/spark410/scala/org/apache/spark/sql/rapids/shims/datetimeExpressions.scala @@ -0,0 +1,126 @@ +/* + * Copyright (c) 2026, NVIDIA CORPORATION. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +/*** spark-rapids-shim-json-lines +{"spark": "410"} +spark-rapids-shim-json-lines ***/ +package org.apache.spark.sql.rapids.shims + +import java.util.concurrent.TimeUnit + +import ai.rapids.cudf.{BinaryOp, BinaryOperable, ColumnVector, ColumnView, DType, Scalar} +import com.nvidia.spark.rapids.{GpuColumnVector, GpuExpression, GpuScalar} +import com.nvidia.spark.rapids.Arm.{withResource, withResourceIfAllowed} +import com.nvidia.spark.rapids.RapidsPluginImplicits._ +import com.nvidia.spark.rapids.shims.ShimBinaryExpression + +import org.apache.spark.sql.catalyst.expressions.{ExpectsInputTypes, Expression, TimeZoneAwareExpression} +import org.apache.spark.sql.types._ +import org.apache.spark.sql.vectorized.ColumnarBatch +import org.apache.spark.unsafe.types.CalendarInterval + +/** + * GPU implementation for TimestampAddInterval (Spark 4.1+). + * This is the renamed version of TimeAdd from earlier Spark versions. + */ +case class GpuTimestampAddInterval(start: Expression, + interval: Expression, + timeZoneId: Option[String] = None) + extends ShimBinaryExpression + with GpuExpression + with TimeZoneAwareExpression + with ExpectsInputTypes + with Serializable { + + override def left: Expression = start + override def right: Expression = interval + + override def toString: String = s"$left + $right" + override def sql: String = s"${left.sql} + ${right.sql}" + + override lazy val resolved: Boolean = childrenResolved && checkInputDataTypes().isSuccess + + val microSecondsInOneDay: Long = TimeUnit.DAYS.toMicros(1) + + override def withTimeZone(timeZoneId: String): TimeZoneAwareExpression = { + copy(timeZoneId = Option(timeZoneId)) + } + + override def inputTypes: Seq[AbstractDataType] = + Seq(AnyTimestampType, TypeCollection(CalendarIntervalType, DayTimeIntervalType)) + + override def dataType: DataType = start.dataType + + override def columnarEval(batch: ColumnarBatch): GpuColumnVector = { + withResourceIfAllowed(left.columnarEval(batch)) { lhs => + withResourceIfAllowed(right.columnarEvalAny(batch)) { rhs => + // lhs is start, rhs is interval + (lhs, rhs) match { + case (l, intervalS: GpuScalar) => + // get long type interval + val interval = intervalS.dataType match { + case CalendarIntervalType => + // Scalar does not support 'CalendarInterval' now, so use + // the Scala value instead. + // Skip the null check because it wll be detected by the following calls. + val calendarI = intervalS.getValue.asInstanceOf[CalendarInterval] + if (calendarI.months != 0) { + throw new UnsupportedOperationException("Months aren't supported at the moment") + } + calendarI.days * microSecondsInOneDay + calendarI.microseconds + case _: DayTimeIntervalType => + intervalS.getValue.asInstanceOf[Long] + case _ => + throw new UnsupportedOperationException( + "GpuTimestampAddInterval unsupported data type: " + intervalS.dataType) + } + + // add interval + if (interval != 0) { + withResource(Scalar.durationFromLong(DType.DURATION_MICROSECONDS, interval)) { d => + GpuColumnVector.from(timestampAddDuration(l.getBase, d), dataType) + } + } else { + l.incRefCount() + } + case (l, r: GpuColumnVector) => + (l.dataType(), r.dataType) match { + case (_: TimestampType, _: DayTimeIntervalType) => + // DayTimeIntervalType is stored as long + // bitCastTo is similar to reinterpret_cast, it's fast, the time can be ignored. + withResource(r.getBase.bitCastTo(DType.DURATION_MICROSECONDS)) { duration => + GpuColumnVector.from(timestampAddDuration(l.getBase, duration), dataType) + } + case _ => + throw new UnsupportedOperationException( + "GpuTimestampAddInterval takes column and interval as an argument only") + } + case _ => + throw new UnsupportedOperationException( + "GpuTimestampAddInterval takes column and interval as an argument only, the types " + + s"passed are, left: ${lhs.getClass} right: ${rhs.getClass}") + } + } + } + } + + private def timestampAddDuration(cv: ColumnView, duration: BinaryOperable): ColumnVector = { + // Not use cv.add(duration), because of it invoke BinaryOperable.implicitConversion, + // and currently BinaryOperable.implicitConversion return Long + // Directly specify the return type is TIMESTAMP_MICROSECONDS + cv.binaryOp(BinaryOp.ADD, duration, DType.TIMESTAMP_MICROSECONDS) + } +} From a39bfeb1afb46ca7d7356b864ba59caaa8cb698a Mon Sep 17 00:00:00 2001 From: Chong Gao Date: Fri, 9 Jan 2026 14:59:26 +0800 Subject: [PATCH 03/59] Add 410 markers to existing shims and fix API changes for Spark 410 --- .../sql/tests/datagen/DataGenExprShims.scala | 3 +- .../sql/rapids/shims/TrampolineUtilShim.scala | 3 +- .../spark/rapids/HashedPriorityQueue.java | 8 +-- .../nvidia/spark/rapids/GpuOverrides.scala | 41 +++---------- .../com/nvidia/spark/rapids/RapidsMeta.scala | 9 +-- .../spark/sql/rapids/GpuDataSourceBase.scala | 9 ++- .../expressions/GpuRandomExpressions.scala | 12 +++- .../sql/rapids/collectionOperations.scala | 11 ++-- .../execution/GpuBroadcastExchangeExec.scala | 4 +- .../python/GpuWindowInPandasExecBase.scala | 7 ++- .../nvidia/spark/rapids/shims/AQEUtils.scala | 3 +- .../rapids/shims/AggregationTagging.scala | 3 +- .../rapids/shims/CudfUnsafeRowBase.scala | 3 +- .../spark/rapids/shims/DeltaLakeUtils.scala | 3 +- .../rapids/shims/FileSourceScanExecMeta.scala | 3 +- .../rapids/shims/GpuBatchScanExecBase.scala | 3 +- .../shims/GpuFileFormatDataWriterShim.scala | 3 +- .../shims/GpuOrcDataReader320Plus.scala | 3 +- .../rapids/shims/GpuOrcDataReaderBase.scala | 3 +- .../spark/rapids/shims/GpuParquetCrypto.scala | 3 +- .../rapids/shims/GpuWindowInPandasExec.scala | 3 +- .../nvidia/spark/rapids/shims/HashUtils.scala | 3 +- .../shims/HybridFileSourceScanExecMeta.scala | 3 +- .../shims/OffsetWindowFunctionMeta.scala | 3 +- .../spark/rapids/shims/OrcCastingShims.scala | 3 +- .../shims/OrcShims320untilAllBase.scala | 3 +- .../rapids/shims/RapidsCsvScanMeta.scala | 3 +- .../spark/rapids/shims/RebaseShims.scala | 12 ++-- .../rapids/shims/ShimAQEShuffleReadExec.scala | 3 +- .../rapids/shims/ShimBaseSubqueryExec.scala | 3 +- .../shims/ShimBroadcastExchangeLike.scala | 3 +- .../spark/rapids/shims/ShimLeafExecNode.scala | 3 +- .../rapids/shims/ShimPredicateHelper.scala | 3 +- .../rapids/shims/Spark320PlusNonDBShims.scala | 9 ++- .../rapids/shims/Spark320PlusShims.scala | 61 +++---------------- .../rapids/shims/StaticPartitionShims.scala | 3 +- .../nvidia/spark/rapids/shims/TreeNode.scala | 3 +- .../spark/rapids/shims/XxHash64Shims.scala | 3 +- .../spark/rapids/shims/YearParseUtil.scala | 3 +- .../rapids/shims/extractValueShims.scala | 3 +- .../spark/rapids/shims/gpuWindows.scala | 3 +- .../spark/rapids/v1FallbackWriters.scala | 3 +- .../shims/GpuShuffleBlockResolver.scala | 3 +- .../rapids/shims/GpuShuffleExchangeExec.scala | 3 +- .../rapids/shims/ShuffledBatchRDDUtil.scala | 3 +- .../shims/storage/ShimDiskBlockManager.scala | 3 +- .../shims/GpuRowBasedHiveGenericUDFShim.scala | 3 +- .../sql/rapids/RapidsShuffleWriter.scala | 3 +- .../shims/GpuGroupedPythonRunnerFactory.scala | 3 +- .../spark/sql/rapids/shims/AvroUtils.scala | 3 +- .../rapids/shims/RapidsQueryErrorUtils.scala | 3 +- .../shims/RapidsShuffleThreadedWriter.scala | 3 +- .../sql/rapids/shims/Spark32XShimsUtils.scala | 3 +- .../rapids/shims/datetimeExpressions.scala | 2 +- .../storage/RapidsPushBasedFetchHelper.scala | 3 +- .../RapidsShuffleBlockFetcherIterator.scala | 3 +- .../rapids/shims/Spark321PlusShims.scala | 3 +- .../spark/sql/rapids/shims/GpuAscii.scala | 3 +- .../nvidia/spark/rapids/GpuBloomFilter.scala | 3 +- .../rapids/GpuBloomFilterMightContain.scala | 3 +- .../spark/rapids/GpuInSubqueryExec.scala | 3 +- .../nvidia/spark/rapids/shims/AnsiUtil.scala | 3 +- .../spark/rapids/shims/BloomFilterShims.scala | 3 +- .../rapids/shims/BucketingUtilsShim.scala | 3 +- .../rapids/shims/CharVarcharUtilsShims.scala | 3 +- .../rapids/shims/DayTimeIntervalShims.scala | 32 +--------- .../spark/rapids/shims/DistributionUtil.scala | 3 +- .../rapids/shims/FilteredPartitions.scala | 3 +- .../spark/rapids/shims/GpuDataSourceRDD.scala | 3 +- .../rapids/shims/GpuHashPartitioning.scala | 3 +- .../rapids/shims/GpuIntervalUtilsBase.scala | 3 +- .../rapids/shims/GpuRangePartitioning.scala | 3 +- .../spark/rapids/shims/GpuTypeShims.scala | 3 +- .../spark/rapids/shims/InSubqueryShims.scala | 3 +- .../spark/rapids/shims/OrcReadingShims.scala | 3 +- .../nvidia/spark/rapids/shims/OrcShims.scala | 3 +- .../shims/RapidsFileSourceMetaUtils.scala | 3 +- .../rapids/shims/RapidsOrcScanMeta.scala | 3 +- .../rapids/shims/RapidsParquetScanMeta.scala | 3 +- .../spark/rapids/shims/RoundingShims.scala | 3 +- .../spark/rapids/shims/ScanExecShims.scala | 3 +- .../rapids/shims/Spark330PlusNonDBShims.scala | 3 +- .../rapids/shims/Spark330PlusShims.scala | 3 +- .../shims/parquet/ParquetFieldIdShims.scala | 3 +- .../parquet/ParquetSchemaClipShims.scala | 3 +- .../RapidsVectorizedColumnReader.scala | 3 +- .../parquet/ShimCurrentBatchIterator.scala | 3 +- .../aggregate/GpuBloomFilterAggregate.scala | 3 +- .../shims/RapidsErrorUtilsFor330plus.scala | 3 +- .../shims/RapidsShuffleThreadedReader.scala | 3 +- .../rapids/shims/intervalExpressions.scala | 3 +- .../spark/rapids/shims/AnsiCastShim.scala | 3 +- .../rapids/shims/CastingConfigShim.scala | 3 +- .../shims/ColumnDefaultValuesShims.scala | 3 +- .../shims/DecimalArithmeticOverrides.scala | 3 +- .../spark/rapids/shims/GetMapValueMeta.scala | 3 +- .../spark/rapids/shims/GpuCastShims.scala | 3 +- .../ShimFilePartitionReaderFactory.scala | 3 +- .../spark/rapids/shims/TryModeShim.scala | 3 +- .../spark/rapids/shims/TypeUtilsShims.scala | 3 +- .../parquet/ParquetStringPredShims.scala | 3 +- .../rapids/DataSourceStrategyUtils.scala | 3 +- .../GpuCheckOverflowInTableInsert.scala | 3 +- .../rapids/aggregate/aggregateFunctions.scala | 3 +- .../apache/spark/sql/rapids/arithmetic.scala | 3 +- .../rapids/shims/Spark331PlusNonDBShims.scala | 3 +- ...aSourceTableAsSelectCommandMetaShims.scala | 3 +- .../rapids/shims/GpuInsertIntoHiveTable.scala | 3 +- .../spark/rapids/shims/GpuKnownNullable.scala | 3 +- ...dCreateHiveTableAsSelectCommandShims.scala | 3 +- .../execution/datasources/GpuWriteFiles.scala | 3 +- .../sql/hive/rapids/shims/HiveFileUtil.scala | 3 +- .../rapids/shims/HiveProviderCmdShims.scala | 3 +- .../sql/rapids/GpuFileFormatWriter.scala | 3 +- .../shims/GpuCastToNumberErrorShim.scala | 3 +- ...eDataSourceTableAsSelectCommandShims.scala | 3 +- .../sql/rapids/shims/GpuDataSource.scala | 3 +- .../sql/rapids/shims/SchemaUtilsShims.scala | 3 +- .../shims/SparkDateTimeExceptionShims.scala | 3 +- .../shims/SparkUpgradeExceptionShims.scala | 3 +- .../spark/rapids/shims/GetSequenceSize.scala | 3 +- .../spark/rapids/shims/CastCheckShims.scala | 3 +- .../nvidia/spark/rapids/shims/ConvShim.scala | 3 +- .../spark/rapids/shims/GlobalLimitShims.scala | 3 +- .../rapids/shims/GpuBroadcastJoinMeta.scala | 3 +- .../rapids/shims/OrcProtoWriterShim.scala | 3 +- .../shims/PartitionedFileUtilsShimBase.scala | 3 +- .../rapids/shims/ShuffleOriginUtil.scala | 3 +- .../rapids/shims/Spark340PlusNonDBShims.scala | 3 +- .../shims/TagScanForRuntimeFiltering.scala | 3 +- .../ParquetLegacyNanoAsLongShims.scala | 3 +- .../ParquetTimestampAnnotationShims.scala | 3 +- .../parquet/ParquetTimestampNTZShims.scala | 3 +- .../shuffle/RapidsShuffleIterator.scala | 3 +- .../spark/sql/catalyst/csv/GpuCsvUtils.scala | 3 +- .../sql/catalyst/json/GpuJsonUtils.scala | 3 +- .../apache/spark/sql/errors/ConvUtils.scala | 3 +- .../sql/rapids/RapidsCachingReader.scala | 3 +- .../execution/GpuBroadcastHashJoinExec.scala | 3 +- .../GpuBroadcastNestedLoopJoinExec.scala | 3 +- .../rapids/execution/ShimTrampolineUtil.scala | 3 +- .../rapids/shims/GpuJsonToStructsShim.scala | 3 +- .../shims/RapidsErrorUtils340PlusBase.scala | 3 +- .../shims/GpuAggregateInPandasExecMeta.scala | 2 +- .../rapids/shims/GpuToPrettyString.scala | 3 +- .../shims/GpuWindowGroupLimitExec.scala | 3 +- .../spark/rapids/shims/PlanShimsImpl.scala | 3 +- .../spark/rapids/shims/PythonUDFShim.scala | 3 +- .../execution/rapids/shims/SplitFiles.scala | 3 +- .../hive/rapids/shims/CreateFunctions.scala | 3 +- .../hive/rapids/shims/FileSinkDescShim.scala | 3 +- .../rapids/shims/HiveInspectorsShim.scala | 3 +- .../python/shims/GpuArrowPythonOutput.scala | 3 +- .../python/shims/GpuArrowPythonRunner.scala | 3 +- .../shims/GpuCoGroupedArrowPythonRunner.scala | 3 +- .../rapids/shims/DecimalMultiply128.scala | 3 +- .../nvidia/spark/rapids/GpuDeltaWrite.scala | 3 +- .../spark/rapids/GpuMergeRowsExecMeta.scala | 3 +- .../rapids/shims/BatchScanExecMetaBase.scala | 3 +- .../rapids/shims/ExternalSourceShim.scala | 3 +- .../spark/rapids/shims/GpuIntervalUtils.scala | 3 +- .../shims/KeyGroupedPartitioningShim.scala | 3 +- .../shims/LegacyBehaviorPolicyShim.scala | 3 +- .../rapids/shims/NullOutputStreamShim.scala | 3 +- .../rapids/shims/Spark350PlusNonDBShims.scala | 3 +- .../shims/v2WriteCommandMetasShim.scala | 3 +- .../catalyst/GpuProjectingColumnarBatch.scala | 3 +- .../parquet/rapids/shims/ParquetCVShims.scala | 2 +- .../shims/ShimVectorizedColumnReader.scala | 3 +- .../datasources/v2/GpuMergeRowsExec.scala | 3 +- .../v2/WriteToDataSourceV2Exec.scala | 3 +- .../sql/rapids/execution/GpuShuffleMeta.scala | 3 +- .../sql/rapids/shims/ArrowUtilsShim.scala | 3 +- .../sql/rapids/shims/DataTypeUtilsShim.scala | 3 +- .../rapids/shims/GpuMapInPandasExecMeta.scala | 3 +- .../rapids/shims/SchemaMetadataShims.scala | 3 +- .../spark/rapids/RapidsShuffleManager.scala | 3 +- .../rapids/shims/BatchScanExecMeta.scala | 3 +- .../rapids/shims/CastTimeToIntShim.scala | 3 +- .../spark/rapids/shims/GpuBatchScanExec.scala | 8 ++- .../spark/rapids/shims/RaiseErrorShim.scala | 3 +- .../shims/ShuffleManagerShimUtils.scala | 3 +- .../python/shims/PythonArgumentsUtils.scala | 3 +- .../ArrayInvalidArgumentErrorUtils.scala | 3 +- ...equenceSizeExceededLimitErrorBuilder.scala | 3 +- .../apache/spark/sql/rapids/shims/misc.scala | 3 +- .../shims/InMemoryTableScanExecLikeShim.scala | 3 +- .../rapids/shims/InMemoryTableScanUtils.scala | 3 +- .../spark/rapids/shims/CudfUnsafeRow.scala | 12 +++- .../rapids/shims/DateTimeUtilsShims.scala | 3 +- .../rapids/shims/GetJsonObjectShim.scala | 3 +- .../spark/rapids/shims/GpuOrcDataReader.scala | 3 +- .../spark/rapids/shims/LogicalPlanShims.scala | 3 +- .../rapids/shims/MapInArrowExecShims.scala | 3 +- .../rapids/shims/NullIntolerantShim.scala | 3 +- .../rapids/shims/OperatorsUtilShims.scala | 3 +- .../shims/PartitionedFileUtilsShim.scala | 3 +- .../shims/Spark400PlusCommonShims.scala | 3 +- .../GpuAtomicCreateTableAsSelectExec.scala | 3 +- .../GpuAtomicReplaceTableAsSelectExec.scala | 3 +- .../rapids/shims/FilePartitionShims.scala | 3 +- .../hive/rapids/shims/CommandUtilsShim.scala | 3 +- .../apache/spark/sql/nvidia/DFUDFShims.scala | 3 +- .../execution/GpuSubqueryBroadcastMeta.scala | 3 +- .../python/shims/GpuBasePythonRunner.scala | 3 +- .../python/shims/WritePythonUDFUtils.scala | 3 +- .../rapids/shims/GpuMapInArrowExecMeta.scala | 3 +- .../sql/rapids/shims/InvokeExprMeta.scala | 3 +- .../sql/rapids/shims/RapidsErrorUtils.scala | 3 +- .../sql/rapids/shims/SparkSessionUtils.scala | 3 +- .../rapids/shims/TrampolineConnectShims.scala | 3 +- .../shims/spark401/SparkShimsSuite.scala | 2 +- 212 files changed, 478 insertions(+), 347 deletions(-) diff --git a/datagen/src/main/spark400/scala/org/apache/spark/sql/tests/datagen/DataGenExprShims.scala b/datagen/src/main/spark400/scala/org/apache/spark/sql/tests/datagen/DataGenExprShims.scala index fe9c6f468ec..2eeac0d839b 100644 --- a/datagen/src/main/spark400/scala/org/apache/spark/sql/tests/datagen/DataGenExprShims.scala +++ b/datagen/src/main/spark400/scala/org/apache/spark/sql/tests/datagen/DataGenExprShims.scala @@ -1,5 +1,5 @@ /* - * Copyright (c) 2024-2025, NVIDIA CORPORATION. + * Copyright (c) 2024-2026, NVIDIA CORPORATION. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -17,6 +17,7 @@ /*** spark-rapids-shim-json-lines {"spark": "400"} {"spark": "401"} +{"spark": "410"} spark-rapids-shim-json-lines ***/ package org.apache.spark.sql.tests.datagen diff --git a/integration_tests/src/test/spark400/scala/org/apache/spark/sql/rapids/shims/TrampolineUtilShim.scala b/integration_tests/src/test/spark400/scala/org/apache/spark/sql/rapids/shims/TrampolineUtilShim.scala index cf3556baec9..31670d66696 100644 --- a/integration_tests/src/test/spark400/scala/org/apache/spark/sql/rapids/shims/TrampolineUtilShim.scala +++ b/integration_tests/src/test/spark400/scala/org/apache/spark/sql/rapids/shims/TrampolineUtilShim.scala @@ -1,5 +1,5 @@ /* - * Copyright (c) 2025, NVIDIA CORPORATION. + * Copyright (c) 2025-2026, NVIDIA CORPORATION. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -17,6 +17,7 @@ /*** spark-rapids-shim-json-lines {"spark": "400"} {"spark": "401"} +{"spark": "410"} spark-rapids-shim-json-lines ***/ package org.apache.spark.sql.rapids.shims diff --git a/sql-plugin/src/main/java/com/nvidia/spark/rapids/HashedPriorityQueue.java b/sql-plugin/src/main/java/com/nvidia/spark/rapids/HashedPriorityQueue.java index e96c7472119..cbb47629c33 100644 --- a/sql-plugin/src/main/java/com/nvidia/spark/rapids/HashedPriorityQueue.java +++ b/sql-plugin/src/main/java/com/nvidia/spark/rapids/HashedPriorityQueue.java @@ -1,5 +1,5 @@ /* - * Copyright (c) 2020-2025, NVIDIA CORPORATION. + * Copyright (c) 2020-2026, NVIDIA CORPORATION. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -137,7 +137,7 @@ public boolean remove(Object o) { if (location == null) { return false; } - int heapIndex = location.getValue(); + int heapIndex = location.intValue(); fillHoleWithLast(heapIndex); return true; } @@ -260,7 +260,7 @@ private void ensureCapacityToInsert() { */ private boolean siftUp(T obj, MutableInt location) { boolean sifted = false; - int heapIndex = location.getValue(); + int heapIndex = location.intValue(); while (heapIndex > 0) { int parentIndex = getParentIndex(heapIndex); T parent = heap[parentIndex]; @@ -287,7 +287,7 @@ private boolean siftUp(T obj, MutableInt location) { */ private boolean siftDown(T obj, MutableInt location) { boolean sifted = false; - int heapIndex = location.getValue(); + int heapIndex = location.intValue(); final int parentIndexEnd = getParentIndex(size + 1); while (heapIndex < parentIndexEnd) { final int leftChildIndex = 2 * heapIndex + 1; diff --git a/sql-plugin/src/main/scala/com/nvidia/spark/rapids/GpuOverrides.scala b/sql-plugin/src/main/scala/com/nvidia/spark/rapids/GpuOverrides.scala index 437c49608fe..baa41da977a 100644 --- a/sql-plugin/src/main/scala/com/nvidia/spark/rapids/GpuOverrides.scala +++ b/sql-plugin/src/main/scala/com/nvidia/spark/rapids/GpuOverrides.scala @@ -1,5 +1,5 @@ /* - * Copyright (c) 2019-2025, NVIDIA CORPORATION. + * Copyright (c) 2019-2026, NVIDIA CORPORATION. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -70,7 +70,7 @@ import org.apache.spark.sql.rapids.catalyst.expressions.GpuRand import org.apache.spark.sql.rapids.execution._ import org.apache.spark.sql.rapids.execution.python._ import org.apache.spark.sql.rapids.execution.python.GpuFlatMapGroupsInPandasExecMeta -import org.apache.spark.sql.rapids.shims.{GpuAscii, GpuMapInPandasExecMeta, GpuTimeAdd} +import org.apache.spark.sql.rapids.shims.{GpuAscii, GpuMapInPandasExecMeta} import org.apache.spark.sql.rapids.zorder.ZOrderRules import org.apache.spark.sql.types._ import org.apache.spark.unsafe.types.{CalendarInterval, UTF8String} @@ -1728,26 +1728,7 @@ object GpuOverrides extends Logging { GpuDateDiff(lhs, rhs) } }), - expr[TimeAdd]( - "Adds interval to timestamp", - ExprChecks.binaryProject(TypeSig.TIMESTAMP, TypeSig.TIMESTAMP, - ("start", TypeSig.TIMESTAMP, TypeSig.TIMESTAMP), - ("interval", TypeSig.lit(TypeEnum.CALENDAR) - .withPsNote(TypeEnum.CALENDAR, "month intervals are not supported"), - TypeSig.CALENDAR)), - (timeAdd, conf, p, r) => new BinaryExprMeta[TimeAdd](timeAdd, conf, p, r) { - override def tagExprForGpu(): Unit = { - GpuOverrides.extractLit(timeAdd.interval).foreach { lit => - val intvl = lit.value.asInstanceOf[CalendarInterval] - if (intvl.months != 0) { - willNotWorkOnGpu("interval months isn't supported") - } - } - } - - override def convertToGpu(lhs: Expression, rhs: Expression): GpuExpression = - GpuTimeAdd(lhs, rhs) - }), + // TimeAdd moved to TimeAddShims to handle API differences across Spark versions expr[DateAddInterval]( "Adds interval to date", ExprChecks.binaryProject(TypeSig.DATE, TypeSig.DATE, @@ -2726,7 +2707,8 @@ object GpuOverrides extends Logging { TypeSig.ARRAY.nested(TypeSig.all)), (in, conf, p, r) => new UnaryExprMeta[MapFromEntries](in, conf, p, r) { override def tagExprForGpu(): Unit = { - SQLConf.get.getConf(SQLConf.MAP_KEY_DEDUP_POLICY).toUpperCase match { + // Spark 4.1+ returns an enum value instead of String, so use toString first + SQLConf.get.getConf(SQLConf.MAP_KEY_DEDUP_POLICY).toString.toUpperCase match { case "EXCEPTION" | "LAST_WIN" => // Good we can support this case other => willNotWorkOnGpu(s"$other is not supported for config setting" + @@ -3122,7 +3104,8 @@ object GpuOverrides extends Logging { TypeSig.all - TypeSig.MAP.nested()))), (in, conf, p, r) => new ExprMeta[TransformKeys](in, conf, p, r) { override def tagExprForGpu(): Unit = { - SQLConf.get.getConf(SQLConf.MAP_KEY_DEDUP_POLICY).toUpperCase match { + // Spark 4.1+ returns an enum value instead of String, so use toString first + SQLConf.get.getConf(SQLConf.MAP_KEY_DEDUP_POLICY).toString.toUpperCase match { case "EXCEPTION"| "LAST_WIN" => // Good we can support this case other => willNotWorkOnGpu(s"$other is not supported for config setting" + @@ -4631,12 +4614,8 @@ object GpuOverrides extends Logging { (s, conf, p, r) => new GpuSubqueryBroadcastMeta(s, conf, p, r) ), SparkShimImpl.aqeShuffleReaderExec, - exec[AggregateInPandasExec]( - "The backend for an Aggregation Pandas UDF, this accelerates the data transfer between" + - " the Java process and the Python process. It also supports scheduling GPU resources" + - " for the Python process when enabled.", - ExecChecks(TypeSig.commonCudfTypes, TypeSig.all), - (aggPy, conf, p, r) => new GpuAggregateInPandasExecMeta(aggPy, conf, p, r)), + // AggregateInPandasExec renamed to ArrowAggregatePythonExec in Spark 4.1.0 + AggregateInPandasExecShims.execRule.orNull, exec[ArrowEvalPythonExec]( "The backend of the Scalar Pandas UDFs. Accelerates the data transfer between the" + " Java process and the Python process. It also supports scheduling GPU resources" + @@ -4693,7 +4672,7 @@ object GpuOverrides extends Logging { neverReplaceExec[DropNamespaceExec]("Namespace metadata operation"), neverReplaceExec[SetCatalogAndNamespaceExec]("Namespace metadata operation"), SparkShimImpl.neverReplaceShowCurrentNamespaceCommand, - neverReplaceExec[ShowNamespacesExec]("Namespace metadata operation"), + ShowNamespacesExecShims.neverReplaceExec.orNull, neverReplaceExec[AlterTableExec]("Table metadata operation"), neverReplaceExec[CreateTableExec]("Table metadata operation"), neverReplaceExec[DeleteFromTableExec]("Table metadata operation"), diff --git a/sql-plugin/src/main/scala/com/nvidia/spark/rapids/RapidsMeta.scala b/sql-plugin/src/main/scala/com/nvidia/spark/rapids/RapidsMeta.scala index 7508b9be83b..4390c1874b4 100644 --- a/sql-plugin/src/main/scala/com/nvidia/spark/rapids/RapidsMeta.scala +++ b/sql-plugin/src/main/scala/com/nvidia/spark/rapids/RapidsMeta.scala @@ -1,5 +1,5 @@ /* - * Copyright (c) 2019-2025, NVIDIA CORPORATION. + * Copyright (c) 2019-2026, NVIDIA CORPORATION. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -35,7 +35,7 @@ import org.apache.spark.sql.execution.aggregate.BaseAggregateExec import org.apache.spark.sql.execution.command.{DataWritingCommand, RunnableCommand} import org.apache.spark.sql.execution.exchange.ShuffleExchangeExec import org.apache.spark.sql.execution.joins.{BroadcastHashJoinExec, BroadcastNestedLoopJoinExec} -import org.apache.spark.sql.execution.python.AggregateInPandasExec +import com.nvidia.spark.rapids.shims.AggregateInPandasExecShims import org.apache.spark.sql.internal.SQLConf import org.apache.spark.sql.rapids.TimeZoneDB import org.apache.spark.sql.rapids.aggregate.{CpuToGpuAggregateBufferConverter, GpuToCpuAggregateBufferConverter} @@ -986,8 +986,9 @@ object ExpressionContext { parent.get.wrapped match { case agg: SparkPlan if SparkShimImpl.isWindowFunctionExec(agg) => WindowAggExprContext - case agg: AggregateInPandasExec => - if (agg.groupingExpressions.isEmpty) { + // AggregateInPandasExec renamed to ArrowAggregatePythonExec in Spark 4.1.0 + case agg: SparkPlan if AggregateInPandasExecShims.isAggregateInPandasExec(agg) => + if (AggregateInPandasExecShims.getGroupingExpressions(agg).isEmpty) { ReductionAggExprContext } else { GroupByAggExprContext diff --git a/sql-plugin/src/main/scala/org/apache/spark/sql/rapids/GpuDataSourceBase.scala b/sql-plugin/src/main/scala/org/apache/spark/sql/rapids/GpuDataSourceBase.scala index 99bbcb199d2..00eab544e1a 100644 --- a/sql-plugin/src/main/scala/org/apache/spark/sql/rapids/GpuDataSourceBase.scala +++ b/sql-plugin/src/main/scala/org/apache/spark/sql/rapids/GpuDataSourceBase.scala @@ -1,5 +1,5 @@ /* - * Copyright (c) 2020-2024, NVIDIA CORPORATION. + * Copyright (c) 2020-2026, NVIDIA CORPORATION. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -41,10 +41,9 @@ import org.apache.spark.sql.execution.datasources.json.JsonFileFormat import org.apache.spark.sql.execution.datasources.orc.OrcFileFormat import org.apache.spark.sql.execution.datasources.v2.FileDataSourceV2 import org.apache.spark.sql.execution.datasources.v2.orc.OrcDataSourceV2 -import org.apache.spark.sql.execution.streaming._ import org.apache.spark.sql.execution.streaming.sources.{RateStreamProvider, TextSocketSourceProvider} import org.apache.spark.sql.internal.SQLConf -import org.apache.spark.sql.rapids.shims.{RapidsErrorUtils, SchemaUtilsShims} +import org.apache.spark.sql.rapids.shims.{FileStreamSinkShims, RapidsErrorUtils, SchemaUtilsShims} import org.apache.spark.sql.sources._ import org.apache.spark.sql.types.{DataType, StructType} import org.apache.spark.util.{HadoopFSUtils, ThreadUtils, Utils} @@ -213,12 +212,12 @@ abstract class GpuDataSourceBase( // We are reading from the results of a streaming query. Load files from the metadata log // instead of listing them using HDFS APIs. case (format: FileFormat, _) - if FileStreamSink.hasMetadata( + if FileStreamSinkShims.hasMetadata( caseInsensitiveOptions.get("path").toSeq ++ paths, newHadoopConfiguration(), sparkSession.sessionState.conf) => val basePath = new Path((caseInsensitiveOptions.get("path").toSeq ++ paths).head) - val fileCatalog = new MetadataLogFileIndex(sparkSession, basePath, + val fileCatalog = FileStreamSinkShims.newMetadataLogFileIndex(sparkSession, basePath, caseInsensitiveOptions, userSpecifiedSchema) val dataSchema = userSpecifiedSchema.orElse { // Remove "path" option so that it is not added to the paths returned by diff --git a/sql-plugin/src/main/scala/org/apache/spark/sql/rapids/catalyst/expressions/GpuRandomExpressions.scala b/sql-plugin/src/main/scala/org/apache/spark/sql/rapids/catalyst/expressions/GpuRandomExpressions.scala index b0091c5daa5..75f1d756379 100644 --- a/sql-plugin/src/main/scala/org/apache/spark/sql/rapids/catalyst/expressions/GpuRandomExpressions.scala +++ b/sql-plugin/src/main/scala/org/apache/spark/sql/rapids/catalyst/expressions/GpuRandomExpressions.scala @@ -1,5 +1,5 @@ /* - * Copyright (c) 2020-2025, NVIDIA CORPORATION. + * Copyright (c) 2020-2026, NVIDIA CORPORATION. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -76,6 +76,16 @@ case class GpuRand(child: Expression, doContextCheck: Boolean) extends ShimUnary override def withNewSeed(seed: Long): GpuRand = GpuRand(GpuLiteral(seed, LongType), doContextCheck) + // Added in Spark 4.1.0 + def withShiftedSeed(shift: Long): Expression = { + val newSeed = child match { + case GpuLiteral(s, IntegerType) => s.asInstanceOf[Int].toLong + shift + case GpuLiteral(s, LongType) => s.asInstanceOf[Long] + shift + case _ => shift + } + withNewSeed(newSeed) + } + def seedExpression: Expression = child override lazy val deterministic: Boolean = false diff --git a/sql-plugin/src/main/scala/org/apache/spark/sql/rapids/collectionOperations.scala b/sql-plugin/src/main/scala/org/apache/spark/sql/rapids/collectionOperations.scala index 133885adb2d..cafffffa2a3 100644 --- a/sql-plugin/src/main/scala/org/apache/spark/sql/rapids/collectionOperations.scala +++ b/sql-plugin/src/main/scala/org/apache/spark/sql/rapids/collectionOperations.scala @@ -1,5 +1,5 @@ /* - * Copyright (c) 2021-2025, NVIDIA CORPORATION. + * Copyright (c) 2021-2026, NVIDIA CORPORATION. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -782,8 +782,9 @@ case class GpuMapFromEntries(child: Expression) extends GpuUnaryExpression with // Check for null keys GpuMapUtils.assertNoNullKeys(inputBase) - // Handle duplicate keys based on the policy - mapKeyDedupPolicy.toUpperCase match { + // Handle duplicate keys based on the policy. + // Spark 4.1+ returns an enum value instead of String, so use toString first. + mapKeyDedupPolicy.toString.toUpperCase match { case "EXCEPTION" => // Check if there are any duplicate keys withResource(inputBase.dropListDuplicatesWithKeysValues()) { deduped => @@ -1621,7 +1622,7 @@ case class GpuMapFromArrays(left: Expression, right: Expression) extends GpuBina withResource(sanitizedLhsBase) { sanitizedLhsBase => withResource(sanitizedRhsBase) { sanitizedRhsBase => - if(mapKeyDedupPolicy == "EXCEPTION") { + if(mapKeyDedupPolicy.toString == "EXCEPTION") { val containsDuplicates = rowContainsDuplicates(sanitizedLhsBase) require(!containsDuplicates, "[DUPLICATED_MAP_KEY] Duplicate map key was found") @@ -1641,7 +1642,7 @@ case class GpuMapFromArrays(left: Expression, right: Expression) extends GpuBina val mapCol = constructMapColumn(sanitizedLhsBase, sanitizedRhsBase) val result = withResource(mapCol) { mapCol => - mapKeyDedupPolicy match { + mapKeyDedupPolicy.toString match { case "LAST_WIN" if rowContainsDuplicates(sanitizedLhsBase) => mapCol.dropListDuplicatesWithKeysValues case _ => diff --git a/sql-plugin/src/main/scala/org/apache/spark/sql/rapids/execution/GpuBroadcastExchangeExec.scala b/sql-plugin/src/main/scala/org/apache/spark/sql/rapids/execution/GpuBroadcastExchangeExec.scala index e2af7408af1..b3275d2c9c2 100644 --- a/sql-plugin/src/main/scala/org/apache/spark/sql/rapids/execution/GpuBroadcastExchangeExec.scala +++ b/sql-plugin/src/main/scala/org/apache/spark/sql/rapids/execution/GpuBroadcastExchangeExec.scala @@ -1,5 +1,5 @@ /* - * Copyright (c) 2019-2025, NVIDIA CORPORATION. + * Copyright (c) 2019-2026, NVIDIA CORPORATION. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -46,7 +46,7 @@ import org.apache.spark.sql.catalyst.plans.logical.Statistics import org.apache.spark.sql.catalyst.plans.physical.{BroadcastMode, BroadcastPartitioning, Partitioning} import org.apache.spark.sql.execution.{SparkPlan, SQLExecution} import org.apache.spark.sql.execution.exchange.{BroadcastExchangeExec, Exchange} -import org.apache.spark.sql.execution.exchange.BroadcastExchangeExec.MAX_BROADCAST_TABLE_BYTES +import com.nvidia.spark.rapids.shims.BroadcastExchangeShims.MAX_BROADCAST_TABLE_BYTES import org.apache.spark.sql.execution.joins.{BroadcastHashJoinExec, BroadcastNestedLoopJoinExec} import org.apache.spark.sql.execution.metric.SQLMetrics import org.apache.spark.sql.internal.{SQLConf, StaticSQLConf} diff --git a/sql-plugin/src/main/scala/org/apache/spark/sql/rapids/execution/python/GpuWindowInPandasExecBase.scala b/sql-plugin/src/main/scala/org/apache/spark/sql/rapids/execution/python/GpuWindowInPandasExecBase.scala index 3d4932fc644..ed025d31d12 100644 --- a/sql-plugin/src/main/scala/org/apache/spark/sql/rapids/execution/python/GpuWindowInPandasExecBase.scala +++ b/sql-plugin/src/main/scala/org/apache/spark/sql/rapids/execution/python/GpuWindowInPandasExecBase.scala @@ -1,5 +1,5 @@ /* - * Copyright (c) 2020-2025, NVIDIA CORPORATION. + * Copyright (c) 2020-2026, NVIDIA CORPORATION. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -37,16 +37,17 @@ import org.apache.spark.sql.catalyst.plans.physical.{AllTuples, ClusteredDistrib import org.apache.spark.sql.execution.python._ import org.apache.spark.sql.rapids.aggregate.GpuAggregateExpression import org.apache.spark.sql.rapids.execution.python.shims.{GpuArrowPythonRunner, PythonArgumentUtils} +import org.apache.spark.sql.rapids.execution.python.shims.WindowInPandasExecTypeShim.WindowInPandasExecType import org.apache.spark.sql.rapids.shims.{ArrowUtilsShim, DataTypeUtilsShim} import org.apache.spark.sql.types.{IntegerType, StructField, StructType} import org.apache.spark.sql.vectorized.ColumnarBatch abstract class GpuWindowInPandasExecMetaBase( - winPandas: WindowInPandasExec, + winPandas: WindowInPandasExecType, conf: RapidsConf, parent: Option[RapidsMeta[_, _, _]], rule: DataFromReplacementRule) - extends SparkPlanMeta[WindowInPandasExec](winPandas, conf, parent, rule) { + extends SparkPlanMeta[WindowInPandasExecType](winPandas, conf, parent, rule) { override def replaceMessage: String = "partially run on GPU" override def noReplacementPossibleMessage(reasons: String): String = diff --git a/sql-plugin/src/main/spark320/scala/com/nvidia/spark/rapids/shims/AQEUtils.scala b/sql-plugin/src/main/spark320/scala/com/nvidia/spark/rapids/shims/AQEUtils.scala index 2367387aab6..a32f58b90ed 100644 --- a/sql-plugin/src/main/spark320/scala/com/nvidia/spark/rapids/shims/AQEUtils.scala +++ b/sql-plugin/src/main/spark320/scala/com/nvidia/spark/rapids/shims/AQEUtils.scala @@ -1,5 +1,5 @@ /* - * Copyright (c) 2021-2025, NVIDIA CORPORATION. + * Copyright (c) 2021-2026, NVIDIA CORPORATION. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -43,6 +43,7 @@ {"spark": "357"} {"spark": "400"} {"spark": "401"} +{"spark": "410"} spark-rapids-shim-json-lines ***/ package com.nvidia.spark.rapids.shims diff --git a/sql-plugin/src/main/spark320/scala/com/nvidia/spark/rapids/shims/AggregationTagging.scala b/sql-plugin/src/main/spark320/scala/com/nvidia/spark/rapids/shims/AggregationTagging.scala index c0fb3c8ab9c..d9529c81289 100644 --- a/sql-plugin/src/main/spark320/scala/com/nvidia/spark/rapids/shims/AggregationTagging.scala +++ b/sql-plugin/src/main/spark320/scala/com/nvidia/spark/rapids/shims/AggregationTagging.scala @@ -1,5 +1,5 @@ /* - * Copyright (c) 2022-2025, NVIDIA CORPORATION. + * Copyright (c) 2022-2026, NVIDIA CORPORATION. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -43,6 +43,7 @@ {"spark": "357"} {"spark": "400"} {"spark": "401"} +{"spark": "410"} spark-rapids-shim-json-lines ***/ package com.nvidia.spark.rapids.shims diff --git a/sql-plugin/src/main/spark320/scala/com/nvidia/spark/rapids/shims/CudfUnsafeRowBase.scala b/sql-plugin/src/main/spark320/scala/com/nvidia/spark/rapids/shims/CudfUnsafeRowBase.scala index 8403274a6c7..28cc0207d0d 100644 --- a/sql-plugin/src/main/spark320/scala/com/nvidia/spark/rapids/shims/CudfUnsafeRowBase.scala +++ b/sql-plugin/src/main/spark320/scala/com/nvidia/spark/rapids/shims/CudfUnsafeRowBase.scala @@ -1,5 +1,5 @@ /* - * Copyright (c) 2020-2025, NVIDIA CORPORATION. + * Copyright (c) 2020-2026, NVIDIA CORPORATION. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -46,6 +46,7 @@ {"spark": "357"} {"spark": "400"} {"spark": "401"} +{"spark": "410"} spark-rapids-shim-json-lines ***/ package com.nvidia.spark.rapids.shims diff --git a/sql-plugin/src/main/spark320/scala/com/nvidia/spark/rapids/shims/DeltaLakeUtils.scala b/sql-plugin/src/main/spark320/scala/com/nvidia/spark/rapids/shims/DeltaLakeUtils.scala index 73fa4f8173d..981b2b8b210 100644 --- a/sql-plugin/src/main/spark320/scala/com/nvidia/spark/rapids/shims/DeltaLakeUtils.scala +++ b/sql-plugin/src/main/spark320/scala/com/nvidia/spark/rapids/shims/DeltaLakeUtils.scala @@ -1,5 +1,5 @@ /* - * Copyright (c) 2022-2025, NVIDIA CORPORATION. + * Copyright (c) 2022-2026, NVIDIA CORPORATION. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -43,6 +43,7 @@ {"spark": "357"} {"spark": "400"} {"spark": "401"} +{"spark": "410"} spark-rapids-shim-json-lines ***/ package com.nvidia.spark.rapids.shims diff --git a/sql-plugin/src/main/spark320/scala/com/nvidia/spark/rapids/shims/FileSourceScanExecMeta.scala b/sql-plugin/src/main/spark320/scala/com/nvidia/spark/rapids/shims/FileSourceScanExecMeta.scala index 8efc30f2272..59a5a594def 100644 --- a/sql-plugin/src/main/spark320/scala/com/nvidia/spark/rapids/shims/FileSourceScanExecMeta.scala +++ b/sql-plugin/src/main/spark320/scala/com/nvidia/spark/rapids/shims/FileSourceScanExecMeta.scala @@ -1,5 +1,5 @@ /* - * Copyright (c) 2022-2025, NVIDIA CORPORATION. + * Copyright (c) 2022-2026, NVIDIA CORPORATION. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -43,6 +43,7 @@ {"spark": "357"} {"spark": "400"} {"spark": "401"} +{"spark": "410"} spark-rapids-shim-json-lines ***/ package com.nvidia.spark.rapids.shims diff --git a/sql-plugin/src/main/spark320/scala/com/nvidia/spark/rapids/shims/GpuBatchScanExecBase.scala b/sql-plugin/src/main/spark320/scala/com/nvidia/spark/rapids/shims/GpuBatchScanExecBase.scala index 0dd7416a75d..2d112a03d2d 100644 --- a/sql-plugin/src/main/spark320/scala/com/nvidia/spark/rapids/shims/GpuBatchScanExecBase.scala +++ b/sql-plugin/src/main/spark320/scala/com/nvidia/spark/rapids/shims/GpuBatchScanExecBase.scala @@ -1,5 +1,5 @@ /* - * Copyright (c) 2023-2025, NVIDIA CORPORATION. + * Copyright (c) 2023-2026, NVIDIA CORPORATION. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -47,6 +47,7 @@ {"spark": "357"} {"spark": "400"} {"spark": "401"} +{"spark": "410"} spark-rapids-shim-json-lines ***/ package com.nvidia.spark.rapids.shims diff --git a/sql-plugin/src/main/spark320/scala/com/nvidia/spark/rapids/shims/GpuFileFormatDataWriterShim.scala b/sql-plugin/src/main/spark320/scala/com/nvidia/spark/rapids/shims/GpuFileFormatDataWriterShim.scala index ad0a4fdd20f..11a57daf18b 100644 --- a/sql-plugin/src/main/spark320/scala/com/nvidia/spark/rapids/shims/GpuFileFormatDataWriterShim.scala +++ b/sql-plugin/src/main/spark320/scala/com/nvidia/spark/rapids/shims/GpuFileFormatDataWriterShim.scala @@ -1,5 +1,5 @@ /* - * Copyright (c) 2023-2025, NVIDIA CORPORATION. + * Copyright (c) 2023-2026, NVIDIA CORPORATION. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -44,6 +44,7 @@ {"spark": "357"} {"spark": "400"} {"spark": "401"} +{"spark": "410"} spark-rapids-shim-json-lines ***/ package com.nvidia.spark.rapids.shims diff --git a/sql-plugin/src/main/spark320/scala/com/nvidia/spark/rapids/shims/GpuOrcDataReader320Plus.scala b/sql-plugin/src/main/spark320/scala/com/nvidia/spark/rapids/shims/GpuOrcDataReader320Plus.scala index 3611d4ae33f..25e14b2d7be 100644 --- a/sql-plugin/src/main/spark320/scala/com/nvidia/spark/rapids/shims/GpuOrcDataReader320Plus.scala +++ b/sql-plugin/src/main/spark320/scala/com/nvidia/spark/rapids/shims/GpuOrcDataReader320Plus.scala @@ -1,5 +1,5 @@ /* - * Copyright (c) 2023-2025, NVIDIA CORPORATION. + * Copyright (c) 2023-2026, NVIDIA CORPORATION. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -43,6 +43,7 @@ {"spark": "357"} {"spark": "400"} {"spark": "401"} +{"spark": "410"} spark-rapids-shim-json-lines ***/ package com.nvidia.spark.rapids.shims diff --git a/sql-plugin/src/main/spark320/scala/com/nvidia/spark/rapids/shims/GpuOrcDataReaderBase.scala b/sql-plugin/src/main/spark320/scala/com/nvidia/spark/rapids/shims/GpuOrcDataReaderBase.scala index e42ced441ff..61cc691efda 100644 --- a/sql-plugin/src/main/spark320/scala/com/nvidia/spark/rapids/shims/GpuOrcDataReaderBase.scala +++ b/sql-plugin/src/main/spark320/scala/com/nvidia/spark/rapids/shims/GpuOrcDataReaderBase.scala @@ -1,5 +1,5 @@ /* - * Copyright (c) 2023-2025, NVIDIA CORPORATION. + * Copyright (c) 2023-2026, NVIDIA CORPORATION. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -46,6 +46,7 @@ {"spark": "357"} {"spark": "400"} {"spark": "401"} +{"spark": "410"} spark-rapids-shim-json-lines ***/ package com.nvidia.spark.rapids.shims diff --git a/sql-plugin/src/main/spark320/scala/com/nvidia/spark/rapids/shims/GpuParquetCrypto.scala b/sql-plugin/src/main/spark320/scala/com/nvidia/spark/rapids/shims/GpuParquetCrypto.scala index 34ab6dbb72d..17ab82c5296 100644 --- a/sql-plugin/src/main/spark320/scala/com/nvidia/spark/rapids/shims/GpuParquetCrypto.scala +++ b/sql-plugin/src/main/spark320/scala/com/nvidia/spark/rapids/shims/GpuParquetCrypto.scala @@ -1,5 +1,5 @@ /* - * Copyright (c) 2022-2025, NVIDIA CORPORATION. + * Copyright (c) 2022-2026, NVIDIA CORPORATION. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -43,6 +43,7 @@ {"spark": "357"} {"spark": "400"} {"spark": "401"} +{"spark": "410"} spark-rapids-shim-json-lines ***/ package com.nvidia.spark.rapids.shims diff --git a/sql-plugin/src/main/spark320/scala/com/nvidia/spark/rapids/shims/GpuWindowInPandasExec.scala b/sql-plugin/src/main/spark320/scala/com/nvidia/spark/rapids/shims/GpuWindowInPandasExec.scala index 13f09211231..25422d6cf43 100644 --- a/sql-plugin/src/main/spark320/scala/com/nvidia/spark/rapids/shims/GpuWindowInPandasExec.scala +++ b/sql-plugin/src/main/spark320/scala/com/nvidia/spark/rapids/shims/GpuWindowInPandasExec.scala @@ -1,5 +1,5 @@ /* - * Copyright (c) 2021-2025, NVIDIA CORPORATION. + * Copyright (c) 2021-2026, NVIDIA CORPORATION. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -43,6 +43,7 @@ {"spark": "357"} {"spark": "400"} {"spark": "401"} +{"spark": "410"} spark-rapids-shim-json-lines ***/ package com.nvidia.spark.rapids.shims diff --git a/sql-plugin/src/main/spark320/scala/com/nvidia/spark/rapids/shims/HashUtils.scala b/sql-plugin/src/main/spark320/scala/com/nvidia/spark/rapids/shims/HashUtils.scala index 2e3be05b402..fffffa29ea0 100644 --- a/sql-plugin/src/main/spark320/scala/com/nvidia/spark/rapids/shims/HashUtils.scala +++ b/sql-plugin/src/main/spark320/scala/com/nvidia/spark/rapids/shims/HashUtils.scala @@ -1,5 +1,5 @@ /* - * Copyright (c) 2021-2025, NVIDIA CORPORATION. + * Copyright (c) 2021-2026, NVIDIA CORPORATION. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -46,6 +46,7 @@ {"spark": "357"} {"spark": "400"} {"spark": "401"} +{"spark": "410"} spark-rapids-shim-json-lines ***/ package com.nvidia.spark.rapids.shims diff --git a/sql-plugin/src/main/spark320/scala/com/nvidia/spark/rapids/shims/HybridFileSourceScanExecMeta.scala b/sql-plugin/src/main/spark320/scala/com/nvidia/spark/rapids/shims/HybridFileSourceScanExecMeta.scala index 799579ea957..5f74f150587 100644 --- a/sql-plugin/src/main/spark320/scala/com/nvidia/spark/rapids/shims/HybridFileSourceScanExecMeta.scala +++ b/sql-plugin/src/main/spark320/scala/com/nvidia/spark/rapids/shims/HybridFileSourceScanExecMeta.scala @@ -1,5 +1,5 @@ /* - * Copyright (c) 2024-2025, NVIDIA CORPORATION. + * Copyright (c) 2024-2026, NVIDIA CORPORATION. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -47,6 +47,7 @@ {"spark": "357"} {"spark": "400"} {"spark": "401"} +{"spark": "410"} spark-rapids-shim-json-lines ***/ package com.nvidia.spark.rapids.shims diff --git a/sql-plugin/src/main/spark320/scala/com/nvidia/spark/rapids/shims/OffsetWindowFunctionMeta.scala b/sql-plugin/src/main/spark320/scala/com/nvidia/spark/rapids/shims/OffsetWindowFunctionMeta.scala index 7edcb625ed8..4f4d0d2bd21 100644 --- a/sql-plugin/src/main/spark320/scala/com/nvidia/spark/rapids/shims/OffsetWindowFunctionMeta.scala +++ b/sql-plugin/src/main/spark320/scala/com/nvidia/spark/rapids/shims/OffsetWindowFunctionMeta.scala @@ -1,5 +1,5 @@ /* - * Copyright (c) 2021-2025, NVIDIA CORPORATION. + * Copyright (c) 2021-2026, NVIDIA CORPORATION. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -47,6 +47,7 @@ {"spark": "357"} {"spark": "400"} {"spark": "401"} +{"spark": "410"} spark-rapids-shim-json-lines ***/ package com.nvidia.spark.rapids.shims diff --git a/sql-plugin/src/main/spark320/scala/com/nvidia/spark/rapids/shims/OrcCastingShims.scala b/sql-plugin/src/main/spark320/scala/com/nvidia/spark/rapids/shims/OrcCastingShims.scala index cf4d92fab2f..4d57a3ef8d9 100644 --- a/sql-plugin/src/main/spark320/scala/com/nvidia/spark/rapids/shims/OrcCastingShims.scala +++ b/sql-plugin/src/main/spark320/scala/com/nvidia/spark/rapids/shims/OrcCastingShims.scala @@ -1,5 +1,5 @@ /* - * Copyright (c) 2022-2025, NVIDIA CORPORATION. + * Copyright (c) 2022-2026, NVIDIA CORPORATION. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -44,6 +44,7 @@ {"spark": "357"} {"spark": "400"} {"spark": "401"} +{"spark": "410"} spark-rapids-shim-json-lines ***/ package com.nvidia.spark.rapids.shims diff --git a/sql-plugin/src/main/spark320/scala/com/nvidia/spark/rapids/shims/OrcShims320untilAllBase.scala b/sql-plugin/src/main/spark320/scala/com/nvidia/spark/rapids/shims/OrcShims320untilAllBase.scala index 0a525342d49..f026cd91173 100644 --- a/sql-plugin/src/main/spark320/scala/com/nvidia/spark/rapids/shims/OrcShims320untilAllBase.scala +++ b/sql-plugin/src/main/spark320/scala/com/nvidia/spark/rapids/shims/OrcShims320untilAllBase.scala @@ -1,5 +1,5 @@ /* - * Copyright (c) 2022-2025, NVIDIA CORPORATION. + * Copyright (c) 2022-2026, NVIDIA CORPORATION. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -44,6 +44,7 @@ {"spark": "357"} {"spark": "400"} {"spark": "401"} +{"spark": "410"} spark-rapids-shim-json-lines ***/ package com.nvidia.spark.rapids.shims diff --git a/sql-plugin/src/main/spark320/scala/com/nvidia/spark/rapids/shims/RapidsCsvScanMeta.scala b/sql-plugin/src/main/spark320/scala/com/nvidia/spark/rapids/shims/RapidsCsvScanMeta.scala index b3375f76d8b..3b616189fc4 100644 --- a/sql-plugin/src/main/spark320/scala/com/nvidia/spark/rapids/shims/RapidsCsvScanMeta.scala +++ b/sql-plugin/src/main/spark320/scala/com/nvidia/spark/rapids/shims/RapidsCsvScanMeta.scala @@ -1,5 +1,5 @@ /* - * Copyright (c) 2022-2025, NVIDIA CORPORATION. + * Copyright (c) 2022-2026, NVIDIA CORPORATION. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -47,6 +47,7 @@ {"spark": "357"} {"spark": "400"} {"spark": "401"} +{"spark": "410"} spark-rapids-shim-json-lines ***/ package com.nvidia.spark.rapids.shims diff --git a/sql-plugin/src/main/spark320/scala/com/nvidia/spark/rapids/shims/RebaseShims.scala b/sql-plugin/src/main/spark320/scala/com/nvidia/spark/rapids/shims/RebaseShims.scala index 4481c460a0b..2cc0483dd53 100644 --- a/sql-plugin/src/main/spark320/scala/com/nvidia/spark/rapids/shims/RebaseShims.scala +++ b/sql-plugin/src/main/spark320/scala/com/nvidia/spark/rapids/shims/RebaseShims.scala @@ -1,5 +1,5 @@ /* - * Copyright (c) 2022-2025, NVIDIA CORPORATION. + * Copyright (c) 2022-2026, NVIDIA CORPORATION. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -46,6 +46,7 @@ {"spark": "357"} {"spark": "400"} {"spark": "401"} +{"spark": "410"} spark-rapids-shim-json-lines ***/ package com.nvidia.spark.rapids.shims @@ -61,14 +62,15 @@ trait RebaseShims { SQLConf.AVRO_REBASE_MODE_IN_READ.key final def avroRebaseWriteKey: String = SQLConf.AVRO_REBASE_MODE_IN_WRITE.key + // Spark 4.1+ returns an enum value instead of String, so use toString final def parquetRebaseRead(conf: SQLConf): String = - conf.getConf(SQLConf.PARQUET_REBASE_MODE_IN_READ) + conf.getConf(SQLConf.PARQUET_REBASE_MODE_IN_READ).toString final def parquetRebaseWrite(conf: SQLConf): String = - conf.getConf(SQLConf.PARQUET_REBASE_MODE_IN_WRITE) + conf.getConf(SQLConf.PARQUET_REBASE_MODE_IN_WRITE).toString def int96ParquetRebaseRead(conf: SQLConf): String = - conf.getConf(SQLConf.PARQUET_INT96_REBASE_MODE_IN_READ) + conf.getConf(SQLConf.PARQUET_INT96_REBASE_MODE_IN_READ).toString def int96ParquetRebaseWrite(conf: SQLConf): String = - conf.getConf(SQLConf.PARQUET_INT96_REBASE_MODE_IN_WRITE) + conf.getConf(SQLConf.PARQUET_INT96_REBASE_MODE_IN_WRITE).toString def int96ParquetRebaseReadKey: String = SQLConf.PARQUET_INT96_REBASE_MODE_IN_READ.key def int96ParquetRebaseWriteKey: String = diff --git a/sql-plugin/src/main/spark320/scala/com/nvidia/spark/rapids/shims/ShimAQEShuffleReadExec.scala b/sql-plugin/src/main/spark320/scala/com/nvidia/spark/rapids/shims/ShimAQEShuffleReadExec.scala index ffa1f6a09c1..7ffebc5c5ae 100644 --- a/sql-plugin/src/main/spark320/scala/com/nvidia/spark/rapids/shims/ShimAQEShuffleReadExec.scala +++ b/sql-plugin/src/main/spark320/scala/com/nvidia/spark/rapids/shims/ShimAQEShuffleReadExec.scala @@ -1,5 +1,5 @@ /* - * Copyright (c) 2021-2025, NVIDIA CORPORATION. + * Copyright (c) 2021-2026, NVIDIA CORPORATION. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -47,6 +47,7 @@ {"spark": "357"} {"spark": "400"} {"spark": "401"} +{"spark": "410"} spark-rapids-shim-json-lines ***/ package com.nvidia.spark.rapids.shims diff --git a/sql-plugin/src/main/spark320/scala/com/nvidia/spark/rapids/shims/ShimBaseSubqueryExec.scala b/sql-plugin/src/main/spark320/scala/com/nvidia/spark/rapids/shims/ShimBaseSubqueryExec.scala index 5179828c462..f2bd28c402b 100644 --- a/sql-plugin/src/main/spark320/scala/com/nvidia/spark/rapids/shims/ShimBaseSubqueryExec.scala +++ b/sql-plugin/src/main/spark320/scala/com/nvidia/spark/rapids/shims/ShimBaseSubqueryExec.scala @@ -1,5 +1,5 @@ /* - * Copyright (c) 2023-2025, NVIDIA CORPORATION. + * Copyright (c) 2023-2026, NVIDIA CORPORATION. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -47,6 +47,7 @@ {"spark": "357"} {"spark": "400"} {"spark": "401"} +{"spark": "410"} spark-rapids-shim-json-lines ***/ package com.nvidia.spark.rapids.shims diff --git a/sql-plugin/src/main/spark320/scala/com/nvidia/spark/rapids/shims/ShimBroadcastExchangeLike.scala b/sql-plugin/src/main/spark320/scala/com/nvidia/spark/rapids/shims/ShimBroadcastExchangeLike.scala index d86793329b0..5f9d691cb63 100644 --- a/sql-plugin/src/main/spark320/scala/com/nvidia/spark/rapids/shims/ShimBroadcastExchangeLike.scala +++ b/sql-plugin/src/main/spark320/scala/com/nvidia/spark/rapids/shims/ShimBroadcastExchangeLike.scala @@ -1,5 +1,5 @@ /* - * Copyright (c) 2021-2025, NVIDIA CORPORATION. + * Copyright (c) 2021-2026, NVIDIA CORPORATION. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -43,6 +43,7 @@ {"spark": "357"} {"spark": "400"} {"spark": "401"} +{"spark": "410"} spark-rapids-shim-json-lines ***/ package com.nvidia.spark.rapids.shims diff --git a/sql-plugin/src/main/spark320/scala/com/nvidia/spark/rapids/shims/ShimLeafExecNode.scala b/sql-plugin/src/main/spark320/scala/com/nvidia/spark/rapids/shims/ShimLeafExecNode.scala index 64641bd3e4a..54dc8cd4194 100644 --- a/sql-plugin/src/main/spark320/scala/com/nvidia/spark/rapids/shims/ShimLeafExecNode.scala +++ b/sql-plugin/src/main/spark320/scala/com/nvidia/spark/rapids/shims/ShimLeafExecNode.scala @@ -1,5 +1,5 @@ /* - * Copyright (c) 2022-2025, NVIDIA CORPORATION. + * Copyright (c) 2022-2026, NVIDIA CORPORATION. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -43,6 +43,7 @@ {"spark": "357"} {"spark": "400"} {"spark": "401"} +{"spark": "410"} spark-rapids-shim-json-lines ***/ package com.nvidia.spark.rapids.shims diff --git a/sql-plugin/src/main/spark320/scala/com/nvidia/spark/rapids/shims/ShimPredicateHelper.scala b/sql-plugin/src/main/spark320/scala/com/nvidia/spark/rapids/shims/ShimPredicateHelper.scala index 10d0cb51325..725bc6baedc 100644 --- a/sql-plugin/src/main/spark320/scala/com/nvidia/spark/rapids/shims/ShimPredicateHelper.scala +++ b/sql-plugin/src/main/spark320/scala/com/nvidia/spark/rapids/shims/ShimPredicateHelper.scala @@ -1,5 +1,5 @@ /* - * Copyright (c) 2022-2025, NVIDIA CORPORATION. + * Copyright (c) 2022-2026, NVIDIA CORPORATION. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -47,6 +47,7 @@ {"spark": "357"} {"spark": "400"} {"spark": "401"} +{"spark": "410"} spark-rapids-shim-json-lines ***/ package com.nvidia.spark.rapids.shims diff --git a/sql-plugin/src/main/spark320/scala/com/nvidia/spark/rapids/shims/Spark320PlusNonDBShims.scala b/sql-plugin/src/main/spark320/scala/com/nvidia/spark/rapids/shims/Spark320PlusNonDBShims.scala index 88ae02bb3f1..67da94119cb 100644 --- a/sql-plugin/src/main/spark320/scala/com/nvidia/spark/rapids/shims/Spark320PlusNonDBShims.scala +++ b/sql-plugin/src/main/spark320/scala/com/nvidia/spark/rapids/shims/Spark320PlusNonDBShims.scala @@ -1,5 +1,5 @@ /* - * Copyright (c) 2022-2025, NVIDIA CORPORATION. + * Copyright (c) 2022-2026, NVIDIA CORPORATION. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -42,6 +42,7 @@ {"spark": "357"} {"spark": "400"} {"spark": "401"} +{"spark": "410"} spark-rapids-shim-json-lines ***/ package com.nvidia.spark.rapids.shims @@ -49,18 +50,16 @@ import com.nvidia.spark.rapids.{BucketJoinTwoSidesPrefetch, FoldLocalAggregate, import org.apache.hadoop.fs.FileStatus import org.apache.spark.sql.catalyst.InternalRow -import org.apache.spark.sql.catalyst.expressions.NamedExpression import org.apache.spark.sql.catalyst.plans.physical.BroadcastMode import org.apache.spark.sql.execution.SparkPlan import org.apache.spark.sql.execution.adaptive.{BroadcastQueryStageExec, ShuffleQueryStageExec} import org.apache.spark.sql.execution.datasources.PartitioningAwareFileIndex import org.apache.spark.sql.execution.exchange.ReusedExchangeExec -import org.apache.spark.sql.execution.python.WindowInPandasExec /** * Shim methods that can be compiled with every supported 3.2.0+ except Databricks versions */ -trait Spark320PlusNonDBShims extends SparkShims { +trait Spark320PlusNonDBShims extends SparkShims with WindowInPandasShims { override final def broadcastModeTransform(mode: BroadcastMode, rows: Array[InternalRow]): Any = mode.transform(rows) @@ -74,7 +73,7 @@ trait Spark320PlusNonDBShims extends SparkShims { fileIndex.allFiles() } - def getWindowExpressions(winPy: WindowInPandasExec): Seq[NamedExpression] = winPy.windowExpression + // getWindowExpressions is now provided by WindowInPandasShims /** * Case class ShuffleQueryStageExec holds an additional field shuffleOrigin diff --git a/sql-plugin/src/main/spark320/scala/com/nvidia/spark/rapids/shims/Spark320PlusShims.scala b/sql-plugin/src/main/spark320/scala/com/nvidia/spark/rapids/shims/Spark320PlusShims.scala index 3d57055f30a..ef327b7f5e7 100644 --- a/sql-plugin/src/main/spark320/scala/com/nvidia/spark/rapids/shims/Spark320PlusShims.scala +++ b/sql-plugin/src/main/spark320/scala/com/nvidia/spark/rapids/shims/Spark320PlusShims.scala @@ -1,5 +1,5 @@ /* - * Copyright (c) 2022-2025, NVIDIA CORPORATION. + * Copyright (c) 2022-2026, NVIDIA CORPORATION. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -47,6 +47,7 @@ {"spark": "357"} {"spark": "400"} {"spark": "401"} +{"spark": "410"} spark-rapids-shim-json-lines ***/ package com.nvidia.spark.rapids.shims @@ -86,9 +87,8 @@ import org.apache.spark.unsafe.types.CalendarInterval /** * Shim base class that can be compiled with every supported 3.2.0+ */ -trait Spark320PlusShims extends SparkShims with RebaseShims with Logging { - - def getWindowExpressions(winPy: WindowInPandasExec): Seq[NamedExpression] +trait Spark320PlusShims extends SparkShims with RebaseShims with WindowInPandasShims + with Logging { override final def aqeShuffleReaderExec: ExecRule[_ <: SparkPlan] = exec[AQEShuffleReadExec]( "A wrapper of shuffle query stage", @@ -203,30 +203,9 @@ trait Spark320PlusShims extends SparkShims with RebaseShims with Logging { // ANSI support for ABS was added in 3.2.0 SPARK-33275 override def convertToGpu(child: Expression): GpuExpression = GpuAbs(child, ansiEnabled) - }), - GpuOverrides.expr[TimeAdd]( - "Adds interval to timestamp", - ExprChecks.binaryProject(TypeSig.TIMESTAMP, TypeSig.TIMESTAMP, - ("start", TypeSig.TIMESTAMP, TypeSig.TIMESTAMP), - ("interval", TypeSig.lit(TypeEnum.DAYTIME) + TypeSig.lit(TypeEnum.CALENDAR), - TypeSig.DAYTIME + TypeSig.CALENDAR)), - (timeAdd, conf, p, r) => new BinaryExprMeta[TimeAdd](timeAdd, conf, p, r) { - override def tagExprForGpu(): Unit = { - GpuOverrides.extractLit(timeAdd.interval).foreach { lit => - lit.dataType match { - case CalendarIntervalType => - val intvl = lit.value.asInstanceOf[CalendarInterval] - if (intvl.months != 0) { - willNotWorkOnGpu("interval months isn't supported") - } - case _: DayTimeIntervalType => // Supported - } - } - } - - override def convertToGpu(lhs: Expression, rhs: Expression): GpuExpression = - GpuTimeAdd(lhs, rhs) - }), + }) + // TimeAdd moved to TimeAddShims to handle version differences + ).map(r => (r.getClassFor.asSubclass(classOf[Expression]), r)).toMap ++ TimeAddShims.exprs ++ Seq( GpuOverrides.expr[SpecifiedWindowFrame]( "Specification of the width of the group (or \"frame\") of input rows " + "around which a window function is evaluated", @@ -285,30 +264,10 @@ trait Spark320PlusShims extends SparkShims with RebaseShims with Logging { TypeSig.STRUCT + TypeSig.MAP + TypeSig.ARRAY + TypeSig.BINARY + GpuTypeShims.additionalCommonOperatorSupportedTypes).nested(), TypeSig.all), - (p, conf, parent, r) => new OverwriteByExpressionExecV1Meta(p, conf, parent, r)), - exec[WindowInPandasExec]( - "The backend for Window Aggregation Pandas UDF, Accelerates the data transfer between" + - " the Java process and the Python process. It also supports scheduling GPU resources" + - " for the Python process when enabled. For now it only supports row based window frame.", - ExecChecks( - (TypeSig.commonCudfTypes + TypeSig.ARRAY).nested(TypeSig.commonCudfTypes), - TypeSig.all), - (winPy, conf, p, r) => new GpuWindowInPandasExecMetaBase(winPy, conf, p, r) { - override val windowExpressions: Seq[BaseExprMeta[NamedExpression]] = - getWindowExpressions(winPy).map(GpuOverrides.wrapExpr(_, this.conf, Some(this))) - - override def convertToGpu(): GpuExec = { - GpuWindowInPandasExec( - windowExpressions.map(_.convertToGpu()), - partitionSpec.map(_.convertToGpu()), - // leave ordering expression on the CPU, it's not used for GPU computation - winPy.orderSpec, - childPlans.head.convertIfNeeded() - )(winPy.partitionSpec) - } - }).disabledByDefault("it only supports row based frame for now") + (p, conf, parent, r) => new OverwriteByExpressionExecV1Meta(p, conf, parent, r)) + // WindowInPandasExec moved to WindowInPandasExecShims to handle version differences ).map(r => (r.getClassFor.asSubclass(classOf[SparkPlan]), r)).toMap - maps ++ ScanExecShims.execs + maps ++ ScanExecShims.execs ++ WindowInPandasExecShims.execs } override def getScans: Map[Class[_ <: Scan], ScanRule[_ <: Scan]] = Seq( diff --git a/sql-plugin/src/main/spark320/scala/com/nvidia/spark/rapids/shims/StaticPartitionShims.scala b/sql-plugin/src/main/spark320/scala/com/nvidia/spark/rapids/shims/StaticPartitionShims.scala index 519b90bc7d2..572e87d59bf 100644 --- a/sql-plugin/src/main/spark320/scala/com/nvidia/spark/rapids/shims/StaticPartitionShims.scala +++ b/sql-plugin/src/main/spark320/scala/com/nvidia/spark/rapids/shims/StaticPartitionShims.scala @@ -1,5 +1,5 @@ /* - * Copyright (c) 2024-2025, NVIDIA CORPORATION. + * Copyright (c) 2024-2026, NVIDIA CORPORATION. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -45,6 +45,7 @@ {"spark": "357"} {"spark": "400"} {"spark": "401"} +{"spark": "410"} spark-rapids-shim-json-lines ***/ package com.nvidia.spark.rapids.shims diff --git a/sql-plugin/src/main/spark320/scala/com/nvidia/spark/rapids/shims/TreeNode.scala b/sql-plugin/src/main/spark320/scala/com/nvidia/spark/rapids/shims/TreeNode.scala index 18f398d02f1..15a6c0b773d 100644 --- a/sql-plugin/src/main/spark320/scala/com/nvidia/spark/rapids/shims/TreeNode.scala +++ b/sql-plugin/src/main/spark320/scala/com/nvidia/spark/rapids/shims/TreeNode.scala @@ -1,5 +1,5 @@ /* - * Copyright (c) 2021-2025, NVIDIA CORPORATION. + * Copyright (c) 2021-2026, NVIDIA CORPORATION. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -47,6 +47,7 @@ {"spark": "357"} {"spark": "400"} {"spark": "401"} +{"spark": "410"} spark-rapids-shim-json-lines ***/ package com.nvidia.spark.rapids.shims diff --git a/sql-plugin/src/main/spark320/scala/com/nvidia/spark/rapids/shims/XxHash64Shims.scala b/sql-plugin/src/main/spark320/scala/com/nvidia/spark/rapids/shims/XxHash64Shims.scala index f4e10c0875f..9594e46c6a9 100644 --- a/sql-plugin/src/main/spark320/scala/com/nvidia/spark/rapids/shims/XxHash64Shims.scala +++ b/sql-plugin/src/main/spark320/scala/com/nvidia/spark/rapids/shims/XxHash64Shims.scala @@ -1,5 +1,5 @@ /* - * Copyright (c) 2023-2025, NVIDIA CORPORATION. + * Copyright (c) 2023-2026, NVIDIA CORPORATION. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -47,6 +47,7 @@ {"spark": "357"} {"spark": "400"} {"spark": "401"} +{"spark": "410"} spark-rapids-shim-json-lines ***/ package com.nvidia.spark.rapids.shims diff --git a/sql-plugin/src/main/spark320/scala/com/nvidia/spark/rapids/shims/YearParseUtil.scala b/sql-plugin/src/main/spark320/scala/com/nvidia/spark/rapids/shims/YearParseUtil.scala index 372427b1dc5..b1949da0346 100644 --- a/sql-plugin/src/main/spark320/scala/com/nvidia/spark/rapids/shims/YearParseUtil.scala +++ b/sql-plugin/src/main/spark320/scala/com/nvidia/spark/rapids/shims/YearParseUtil.scala @@ -1,5 +1,5 @@ /* - * Copyright (c) 2021-2025, NVIDIA CORPORATION. + * Copyright (c) 2021-2026, NVIDIA CORPORATION. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -46,6 +46,7 @@ {"spark": "357"} {"spark": "400"} {"spark": "401"} +{"spark": "410"} spark-rapids-shim-json-lines ***/ package com.nvidia.spark.rapids.shims diff --git a/sql-plugin/src/main/spark320/scala/com/nvidia/spark/rapids/shims/extractValueShims.scala b/sql-plugin/src/main/spark320/scala/com/nvidia/spark/rapids/shims/extractValueShims.scala index 600f6fbf468..5ee2d1bfdcd 100644 --- a/sql-plugin/src/main/spark320/scala/com/nvidia/spark/rapids/shims/extractValueShims.scala +++ b/sql-plugin/src/main/spark320/scala/com/nvidia/spark/rapids/shims/extractValueShims.scala @@ -1,5 +1,5 @@ /* - * Copyright (c) 2022-2025, NVIDIA CORPORATION. + * Copyright (c) 2022-2026, NVIDIA CORPORATION. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -43,6 +43,7 @@ {"spark": "357"} {"spark": "400"} {"spark": "401"} +{"spark": "410"} spark-rapids-shim-json-lines ***/ package com.nvidia.spark.rapids.shims diff --git a/sql-plugin/src/main/spark320/scala/com/nvidia/spark/rapids/shims/gpuWindows.scala b/sql-plugin/src/main/spark320/scala/com/nvidia/spark/rapids/shims/gpuWindows.scala index a8e1f16a4f9..626f9acebdc 100644 --- a/sql-plugin/src/main/spark320/scala/com/nvidia/spark/rapids/shims/gpuWindows.scala +++ b/sql-plugin/src/main/spark320/scala/com/nvidia/spark/rapids/shims/gpuWindows.scala @@ -1,5 +1,5 @@ /* - * Copyright (c) 2021-2025, NVIDIA CORPORATION. + * Copyright (c) 2021-2026, NVIDIA CORPORATION. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -47,6 +47,7 @@ {"spark": "357"} {"spark": "400"} {"spark": "401"} +{"spark": "410"} spark-rapids-shim-json-lines ***/ package com.nvidia.spark.rapids.shims diff --git a/sql-plugin/src/main/spark320/scala/com/nvidia/spark/rapids/v1FallbackWriters.scala b/sql-plugin/src/main/spark320/scala/com/nvidia/spark/rapids/v1FallbackWriters.scala index 2e5fdc6f6ed..6dee91a0150 100644 --- a/sql-plugin/src/main/spark320/scala/com/nvidia/spark/rapids/v1FallbackWriters.scala +++ b/sql-plugin/src/main/spark320/scala/com/nvidia/spark/rapids/v1FallbackWriters.scala @@ -1,5 +1,5 @@ /* - * Copyright (c) 2023-2025, NVIDIA CORPORATION. + * Copyright (c) 2023-2026, NVIDIA CORPORATION. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -47,6 +47,7 @@ {"spark": "357"} {"spark": "400"} {"spark": "401"} +{"spark": "410"} spark-rapids-shim-json-lines ***/ package com.nvidia.spark.rapids diff --git a/sql-plugin/src/main/spark320/scala/org/apache/spark/rapids/shims/GpuShuffleBlockResolver.scala b/sql-plugin/src/main/spark320/scala/org/apache/spark/rapids/shims/GpuShuffleBlockResolver.scala index a2a3520311a..47668945435 100644 --- a/sql-plugin/src/main/spark320/scala/org/apache/spark/rapids/shims/GpuShuffleBlockResolver.scala +++ b/sql-plugin/src/main/spark320/scala/org/apache/spark/rapids/shims/GpuShuffleBlockResolver.scala @@ -1,5 +1,5 @@ /* - * Copyright (c) 2021-2025, NVIDIA CORPORATION. + * Copyright (c) 2021-2026, NVIDIA CORPORATION. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -47,6 +47,7 @@ {"spark": "357"} {"spark": "400"} {"spark": "401"} +{"spark": "410"} spark-rapids-shim-json-lines ***/ package org.apache.spark.sql.rapids.shims diff --git a/sql-plugin/src/main/spark320/scala/org/apache/spark/rapids/shims/GpuShuffleExchangeExec.scala b/sql-plugin/src/main/spark320/scala/org/apache/spark/rapids/shims/GpuShuffleExchangeExec.scala index e07fc7a27dc..e198a3d8824 100644 --- a/sql-plugin/src/main/spark320/scala/org/apache/spark/rapids/shims/GpuShuffleExchangeExec.scala +++ b/sql-plugin/src/main/spark320/scala/org/apache/spark/rapids/shims/GpuShuffleExchangeExec.scala @@ -1,5 +1,5 @@ /* - * Copyright (c) 2020-2025, NVIDIA CORPORATION. + * Copyright (c) 2020-2026, NVIDIA CORPORATION. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -43,6 +43,7 @@ {"spark": "357"} {"spark": "400"} {"spark": "401"} +{"spark": "410"} spark-rapids-shim-json-lines ***/ package org.apache.spark.rapids.shims diff --git a/sql-plugin/src/main/spark320/scala/org/apache/spark/rapids/shims/ShuffledBatchRDDUtil.scala b/sql-plugin/src/main/spark320/scala/org/apache/spark/rapids/shims/ShuffledBatchRDDUtil.scala index d95eb886928..81fc7fef03d 100644 --- a/sql-plugin/src/main/spark320/scala/org/apache/spark/rapids/shims/ShuffledBatchRDDUtil.scala +++ b/sql-plugin/src/main/spark320/scala/org/apache/spark/rapids/shims/ShuffledBatchRDDUtil.scala @@ -1,5 +1,5 @@ /* - * Copyright (c) 2021-2025, NVIDIA CORPORATION. + * Copyright (c) 2021-2026, NVIDIA CORPORATION. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -47,6 +47,7 @@ {"spark": "357"} {"spark": "400"} {"spark": "401"} +{"spark": "410"} spark-rapids-shim-json-lines ***/ package org.apache.spark.rapids.shims diff --git a/sql-plugin/src/main/spark320/scala/org/apache/spark/rapids/shims/storage/ShimDiskBlockManager.scala b/sql-plugin/src/main/spark320/scala/org/apache/spark/rapids/shims/storage/ShimDiskBlockManager.scala index 7cadc661c77..250a3ea7e23 100644 --- a/sql-plugin/src/main/spark320/scala/org/apache/spark/rapids/shims/storage/ShimDiskBlockManager.scala +++ b/sql-plugin/src/main/spark320/scala/org/apache/spark/rapids/shims/storage/ShimDiskBlockManager.scala @@ -1,5 +1,5 @@ /* - * Copyright (c) 2021-2025, NVIDIA CORPORATION. + * Copyright (c) 2021-2026, NVIDIA CORPORATION. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -47,6 +47,7 @@ {"spark": "357"} {"spark": "400"} {"spark": "401"} +{"spark": "410"} spark-rapids-shim-json-lines ***/ package org.apache.spark.rapids.shims.storage diff --git a/sql-plugin/src/main/spark320/scala/org/apache/spark/sql/hive/rapids/shims/GpuRowBasedHiveGenericUDFShim.scala b/sql-plugin/src/main/spark320/scala/org/apache/spark/sql/hive/rapids/shims/GpuRowBasedHiveGenericUDFShim.scala index eebdc3911f1..1d9811514e6 100644 --- a/sql-plugin/src/main/spark320/scala/org/apache/spark/sql/hive/rapids/shims/GpuRowBasedHiveGenericUDFShim.scala +++ b/sql-plugin/src/main/spark320/scala/org/apache/spark/sql/hive/rapids/shims/GpuRowBasedHiveGenericUDFShim.scala @@ -1,5 +1,5 @@ /* - * Copyright (c) 2023-2025, NVIDIA CORPORATION. + * Copyright (c) 2023-2026, NVIDIA CORPORATION. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -42,6 +42,7 @@ {"spark": "357"} {"spark": "400"} {"spark": "401"} +{"spark": "410"} spark-rapids-shim-json-lines ***/ package org.apache.spark.sql.hive.rapids.shims diff --git a/sql-plugin/src/main/spark320/scala/org/apache/spark/sql/rapids/RapidsShuffleWriter.scala b/sql-plugin/src/main/spark320/scala/org/apache/spark/sql/rapids/RapidsShuffleWriter.scala index a5a9303bfae..f5a76967aeb 100644 --- a/sql-plugin/src/main/spark320/scala/org/apache/spark/sql/rapids/RapidsShuffleWriter.scala +++ b/sql-plugin/src/main/spark320/scala/org/apache/spark/sql/rapids/RapidsShuffleWriter.scala @@ -1,5 +1,5 @@ /* - * Copyright (c) 2024-2025, NVIDIA CORPORATION. + * Copyright (c) 2024-2026, NVIDIA CORPORATION. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -47,6 +47,7 @@ {"spark": "357"} {"spark": "400"} {"spark": "401"} +{"spark": "410"} spark-rapids-shim-json-lines ***/ package org.apache.spark.sql.rapids diff --git a/sql-plugin/src/main/spark320/scala/org/apache/spark/sql/rapids/execution/python/shims/GpuGroupedPythonRunnerFactory.scala b/sql-plugin/src/main/spark320/scala/org/apache/spark/sql/rapids/execution/python/shims/GpuGroupedPythonRunnerFactory.scala index 41933f341a3..20ba333a7be 100644 --- a/sql-plugin/src/main/spark320/scala/org/apache/spark/sql/rapids/execution/python/shims/GpuGroupedPythonRunnerFactory.scala +++ b/sql-plugin/src/main/spark320/scala/org/apache/spark/sql/rapids/execution/python/shims/GpuGroupedPythonRunnerFactory.scala @@ -1,5 +1,5 @@ /* - * Copyright (c) 2023-2025, NVIDIA CORPORATION. + * Copyright (c) 2023-2026, NVIDIA CORPORATION. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -43,6 +43,7 @@ {"spark": "357"} {"spark": "400"} {"spark": "401"} +{"spark": "410"} spark-rapids-shim-json-lines ***/ package org.apache.spark.sql.rapids.execution.python.shims diff --git a/sql-plugin/src/main/spark320/scala/org/apache/spark/sql/rapids/shims/AvroUtils.scala b/sql-plugin/src/main/spark320/scala/org/apache/spark/sql/rapids/shims/AvroUtils.scala index 124dcb7e501..882122a17ac 100644 --- a/sql-plugin/src/main/spark320/scala/org/apache/spark/sql/rapids/shims/AvroUtils.scala +++ b/sql-plugin/src/main/spark320/scala/org/apache/spark/sql/rapids/shims/AvroUtils.scala @@ -1,5 +1,5 @@ /* - * Copyright (c) 2022-2025, NVIDIA CORPORATION. + * Copyright (c) 2022-2026, NVIDIA CORPORATION. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -47,6 +47,7 @@ {"spark": "357"} {"spark": "400"} {"spark": "401"} +{"spark": "410"} spark-rapids-shim-json-lines ***/ package org.apache.spark.sql.rapids.shims diff --git a/sql-plugin/src/main/spark320/scala/org/apache/spark/sql/rapids/shims/RapidsQueryErrorUtils.scala b/sql-plugin/src/main/spark320/scala/org/apache/spark/sql/rapids/shims/RapidsQueryErrorUtils.scala index f92f9093c8c..746b6fe65cc 100644 --- a/sql-plugin/src/main/spark320/scala/org/apache/spark/sql/rapids/shims/RapidsQueryErrorUtils.scala +++ b/sql-plugin/src/main/spark320/scala/org/apache/spark/sql/rapids/shims/RapidsQueryErrorUtils.scala @@ -1,5 +1,5 @@ /* - * Copyright (c) 2024-2025, NVIDIA CORPORATION. + * Copyright (c) 2024-2026, NVIDIA CORPORATION. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -47,6 +47,7 @@ {"spark": "357"} {"spark": "400"} {"spark": "401"} +{"spark": "410"} spark-rapids-shim-json-lines ***/ package org.apache.spark.sql.rapids.shims diff --git a/sql-plugin/src/main/spark320/scala/org/apache/spark/sql/rapids/shims/RapidsShuffleThreadedWriter.scala b/sql-plugin/src/main/spark320/scala/org/apache/spark/sql/rapids/shims/RapidsShuffleThreadedWriter.scala index 31d5ca13834..82e243b8380 100644 --- a/sql-plugin/src/main/spark320/scala/org/apache/spark/sql/rapids/shims/RapidsShuffleThreadedWriter.scala +++ b/sql-plugin/src/main/spark320/scala/org/apache/spark/sql/rapids/shims/RapidsShuffleThreadedWriter.scala @@ -1,5 +1,5 @@ /* - * Copyright (c) 2022-2025, NVIDIA CORPORATION. + * Copyright (c) 2022-2026, NVIDIA CORPORATION. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -47,6 +47,7 @@ {"spark": "357"} {"spark": "400"} {"spark": "401"} +{"spark": "410"} spark-rapids-shim-json-lines ***/ package org.apache.spark.sql.rapids.shims diff --git a/sql-plugin/src/main/spark320/scala/org/apache/spark/sql/rapids/shims/Spark32XShimsUtils.scala b/sql-plugin/src/main/spark320/scala/org/apache/spark/sql/rapids/shims/Spark32XShimsUtils.scala index e59a0030da8..86ea5c2e7ad 100644 --- a/sql-plugin/src/main/spark320/scala/org/apache/spark/sql/rapids/shims/Spark32XShimsUtils.scala +++ b/sql-plugin/src/main/spark320/scala/org/apache/spark/sql/rapids/shims/Spark32XShimsUtils.scala @@ -1,5 +1,5 @@ /* - * Copyright (c) 2021-2025, NVIDIA CORPORATION. + * Copyright (c) 2021-2026, NVIDIA CORPORATION. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -47,6 +47,7 @@ {"spark": "357"} {"spark": "400"} {"spark": "401"} +{"spark": "410"} spark-rapids-shim-json-lines ***/ package org.apache.spark.sql.rapids.shims diff --git a/sql-plugin/src/main/spark320/scala/org/apache/spark/sql/rapids/shims/datetimeExpressions.scala b/sql-plugin/src/main/spark320/scala/org/apache/spark/sql/rapids/shims/datetimeExpressions.scala index 25b3b8e15ae..fab83fd26fd 100644 --- a/sql-plugin/src/main/spark320/scala/org/apache/spark/sql/rapids/shims/datetimeExpressions.scala +++ b/sql-plugin/src/main/spark320/scala/org/apache/spark/sql/rapids/shims/datetimeExpressions.scala @@ -1,5 +1,5 @@ /* - * Copyright (c) 2021-2025, NVIDIA CORPORATION. + * Copyright (c) 2021-2026, NVIDIA CORPORATION. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. diff --git a/sql-plugin/src/main/spark320/scala/org/apache/spark/storage/RapidsPushBasedFetchHelper.scala b/sql-plugin/src/main/spark320/scala/org/apache/spark/storage/RapidsPushBasedFetchHelper.scala index ede4a3051a9..6e2bae374c6 100644 --- a/sql-plugin/src/main/spark320/scala/org/apache/spark/storage/RapidsPushBasedFetchHelper.scala +++ b/sql-plugin/src/main/spark320/scala/org/apache/spark/storage/RapidsPushBasedFetchHelper.scala @@ -1,5 +1,5 @@ /* - * Copyright (c) 2022-2025, NVIDIA CORPORATION. + * Copyright (c) 2022-2026, NVIDIA CORPORATION. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -47,6 +47,7 @@ {"spark": "357"} {"spark": "400"} {"spark": "401"} +{"spark": "410"} spark-rapids-shim-json-lines ***/ package org.apache.spark.storage diff --git a/sql-plugin/src/main/spark320/scala/org/apache/spark/storage/RapidsShuffleBlockFetcherIterator.scala b/sql-plugin/src/main/spark320/scala/org/apache/spark/storage/RapidsShuffleBlockFetcherIterator.scala index 1cebee2447e..9a3a2d6adc8 100644 --- a/sql-plugin/src/main/spark320/scala/org/apache/spark/storage/RapidsShuffleBlockFetcherIterator.scala +++ b/sql-plugin/src/main/spark320/scala/org/apache/spark/storage/RapidsShuffleBlockFetcherIterator.scala @@ -1,5 +1,5 @@ /* - * Copyright (c) 2022-2025, NVIDIA CORPORATION. + * Copyright (c) 2022-2026, NVIDIA CORPORATION. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -47,6 +47,7 @@ {"spark": "357"} {"spark": "400"} {"spark": "401"} +{"spark": "410"} spark-rapids-shim-json-lines ***/ package org.apache.spark.storage diff --git a/sql-plugin/src/main/spark321/scala/com/nvidia/spark/rapids/shims/Spark321PlusShims.scala b/sql-plugin/src/main/spark321/scala/com/nvidia/spark/rapids/shims/Spark321PlusShims.scala index 35a1c154c8e..d4b881b157a 100644 --- a/sql-plugin/src/main/spark321/scala/com/nvidia/spark/rapids/shims/Spark321PlusShims.scala +++ b/sql-plugin/src/main/spark321/scala/com/nvidia/spark/rapids/shims/Spark321PlusShims.scala @@ -1,5 +1,5 @@ /* - * Copyright (c) 2021-2025, NVIDIA CORPORATION. + * Copyright (c) 2021-2026, NVIDIA CORPORATION. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -46,6 +46,7 @@ {"spark": "357"} {"spark": "400"} {"spark": "401"} +{"spark": "410"} spark-rapids-shim-json-lines ***/ package com.nvidia.spark.rapids.shims diff --git a/sql-plugin/src/main/spark323/scala/org/apache/spark/sql/rapids/shims/GpuAscii.scala b/sql-plugin/src/main/spark323/scala/org/apache/spark/sql/rapids/shims/GpuAscii.scala index 6cc452945eb..7935e788efa 100644 --- a/sql-plugin/src/main/spark323/scala/org/apache/spark/sql/rapids/shims/GpuAscii.scala +++ b/sql-plugin/src/main/spark323/scala/org/apache/spark/sql/rapids/shims/GpuAscii.scala @@ -1,5 +1,5 @@ /* - * Copyright (c) 2023-2025, NVIDIA CORPORATION. + * Copyright (c) 2023-2026, NVIDIA CORPORATION. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -41,6 +41,7 @@ {"spark": "357"} {"spark": "400"} {"spark": "401"} +{"spark": "410"} spark-rapids-shim-json-lines ***/ package org.apache.spark.sql.rapids.shims diff --git a/sql-plugin/src/main/spark330/scala/com/nvidia/spark/rapids/GpuBloomFilter.scala b/sql-plugin/src/main/spark330/scala/com/nvidia/spark/rapids/GpuBloomFilter.scala index 10cb487251c..bf630527569 100644 --- a/sql-plugin/src/main/spark330/scala/com/nvidia/spark/rapids/GpuBloomFilter.scala +++ b/sql-plugin/src/main/spark330/scala/com/nvidia/spark/rapids/GpuBloomFilter.scala @@ -1,5 +1,5 @@ /* - * Copyright (c) 2023-2025, NVIDIA CORPORATION. + * Copyright (c) 2023-2026, NVIDIA CORPORATION. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -41,6 +41,7 @@ {"spark": "357"} {"spark": "400"} {"spark": "401"} +{"spark": "410"} spark-rapids-shim-json-lines ***/ package com.nvidia.spark.rapids diff --git a/sql-plugin/src/main/spark330/scala/com/nvidia/spark/rapids/GpuBloomFilterMightContain.scala b/sql-plugin/src/main/spark330/scala/com/nvidia/spark/rapids/GpuBloomFilterMightContain.scala index 16c84383b81..81a26c5d4b4 100644 --- a/sql-plugin/src/main/spark330/scala/com/nvidia/spark/rapids/GpuBloomFilterMightContain.scala +++ b/sql-plugin/src/main/spark330/scala/com/nvidia/spark/rapids/GpuBloomFilterMightContain.scala @@ -1,5 +1,5 @@ /* - * Copyright (c) 2023-2025, NVIDIA CORPORATION. + * Copyright (c) 2023-2026, NVIDIA CORPORATION. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -41,6 +41,7 @@ {"spark": "357"} {"spark": "400"} {"spark": "401"} +{"spark": "410"} spark-rapids-shim-json-lines ***/ package com.nvidia.spark.rapids diff --git a/sql-plugin/src/main/spark330/scala/com/nvidia/spark/rapids/GpuInSubqueryExec.scala b/sql-plugin/src/main/spark330/scala/com/nvidia/spark/rapids/GpuInSubqueryExec.scala index bf3a2983f27..e7da3e77af2 100644 --- a/sql-plugin/src/main/spark330/scala/com/nvidia/spark/rapids/GpuInSubqueryExec.scala +++ b/sql-plugin/src/main/spark330/scala/com/nvidia/spark/rapids/GpuInSubqueryExec.scala @@ -1,5 +1,5 @@ /* - * Copyright (c) 2023-2025, NVIDIA CORPORATION. + * Copyright (c) 2023-2026, NVIDIA CORPORATION. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -37,6 +37,7 @@ {"spark": "357"} {"spark": "400"} {"spark": "401"} +{"spark": "410"} spark-rapids-shim-json-lines ***/ package com.nvidia.spark.rapids diff --git a/sql-plugin/src/main/spark330/scala/com/nvidia/spark/rapids/shims/AnsiUtil.scala b/sql-plugin/src/main/spark330/scala/com/nvidia/spark/rapids/shims/AnsiUtil.scala index d1d4d0bbe77..a95d625605f 100644 --- a/sql-plugin/src/main/spark330/scala/com/nvidia/spark/rapids/shims/AnsiUtil.scala +++ b/sql-plugin/src/main/spark330/scala/com/nvidia/spark/rapids/shims/AnsiUtil.scala @@ -1,5 +1,5 @@ /* - * Copyright (c) 2022-2025, NVIDIA CORPORATION. + * Copyright (c) 2022-2026, NVIDIA CORPORATION. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -41,6 +41,7 @@ {"spark": "357"} {"spark": "400"} {"spark": "401"} +{"spark": "410"} spark-rapids-shim-json-lines ***/ package com.nvidia.spark.rapids.shims diff --git a/sql-plugin/src/main/spark330/scala/com/nvidia/spark/rapids/shims/BloomFilterShims.scala b/sql-plugin/src/main/spark330/scala/com/nvidia/spark/rapids/shims/BloomFilterShims.scala index 7249502cd1c..9a3b367d4ab 100644 --- a/sql-plugin/src/main/spark330/scala/com/nvidia/spark/rapids/shims/BloomFilterShims.scala +++ b/sql-plugin/src/main/spark330/scala/com/nvidia/spark/rapids/shims/BloomFilterShims.scala @@ -1,5 +1,5 @@ /* - * Copyright (c) 2023-2025, NVIDIA CORPORATION. + * Copyright (c) 2023-2026, NVIDIA CORPORATION. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -41,6 +41,7 @@ {"spark": "357"} {"spark": "400"} {"spark": "401"} +{"spark": "410"} spark-rapids-shim-json-lines ***/ package com.nvidia.spark.rapids.shims diff --git a/sql-plugin/src/main/spark330/scala/com/nvidia/spark/rapids/shims/BucketingUtilsShim.scala b/sql-plugin/src/main/spark330/scala/com/nvidia/spark/rapids/shims/BucketingUtilsShim.scala index 5d3bee64538..c94a365b013 100644 --- a/sql-plugin/src/main/spark330/scala/com/nvidia/spark/rapids/shims/BucketingUtilsShim.scala +++ b/sql-plugin/src/main/spark330/scala/com/nvidia/spark/rapids/shims/BucketingUtilsShim.scala @@ -1,5 +1,5 @@ /* - * Copyright (c) 2024-2025, NVIDIA CORPORATION. + * Copyright (c) 2024-2026, NVIDIA CORPORATION. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -41,6 +41,7 @@ {"spark": "357"} {"spark": "400"} {"spark": "401"} +{"spark": "410"} spark-rapids-shim-json-lines ***/ package com.nvidia.spark.rapids.shims diff --git a/sql-plugin/src/main/spark330/scala/com/nvidia/spark/rapids/shims/CharVarcharUtilsShims.scala b/sql-plugin/src/main/spark330/scala/com/nvidia/spark/rapids/shims/CharVarcharUtilsShims.scala index 75d8d846ed7..429c3984afa 100644 --- a/sql-plugin/src/main/spark330/scala/com/nvidia/spark/rapids/shims/CharVarcharUtilsShims.scala +++ b/sql-plugin/src/main/spark330/scala/com/nvidia/spark/rapids/shims/CharVarcharUtilsShims.scala @@ -1,5 +1,5 @@ /* - * Copyright (c) 2022-2025, NVIDIA CORPORATION. + * Copyright (c) 2022-2026, NVIDIA CORPORATION. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -41,6 +41,7 @@ {"spark": "357"} {"spark": "400"} {"spark": "401"} +{"spark": "410"} spark-rapids-shim-json-lines ***/ package com.nvidia.spark.rapids.shims diff --git a/sql-plugin/src/main/spark330/scala/com/nvidia/spark/rapids/shims/DayTimeIntervalShims.scala b/sql-plugin/src/main/spark330/scala/com/nvidia/spark/rapids/shims/DayTimeIntervalShims.scala index bd75e81122b..22a7441be0c 100644 --- a/sql-plugin/src/main/spark330/scala/com/nvidia/spark/rapids/shims/DayTimeIntervalShims.scala +++ b/sql-plugin/src/main/spark330/scala/com/nvidia/spark/rapids/shims/DayTimeIntervalShims.scala @@ -1,5 +1,5 @@ /* - * Copyright (c) 2022-2025, NVIDIA CORPORATION. + * Copyright (c) 2022-2026, NVIDIA CORPORATION. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -41,6 +41,7 @@ {"spark": "357"} {"spark": "400"} {"spark": "401"} +{"spark": "410"} spark-rapids-shim-json-lines ***/ package com.nvidia.spark.rapids.shims @@ -49,37 +50,10 @@ import com.nvidia.spark.rapids._ import org.apache.spark.sql.catalyst.expressions._ import org.apache.spark.sql.internal.SQLConf import org.apache.spark.sql.rapids._ -import org.apache.spark.sql.rapids.shims.{GpuDivideDTInterval, GpuMultiplyDTInterval, GpuTimeAdd} -import org.apache.spark.sql.types.{CalendarIntervalType, DayTimeIntervalType} -import org.apache.spark.unsafe.types.CalendarInterval +import org.apache.spark.sql.rapids.shims.{GpuDivideDTInterval, GpuMultiplyDTInterval} object DayTimeIntervalShims { def exprs: Map[Class[_ <: Expression], ExprRule[_ <: Expression]] = Seq( - GpuOverrides.expr[TimeAdd]( - "Adds interval to timestamp", - ExprChecks.binaryProject(TypeSig.TIMESTAMP, TypeSig.TIMESTAMP, - ("start", TypeSig.TIMESTAMP, TypeSig.TIMESTAMP), - // interval support DAYTIME column or CALENDAR literal - ("interval", TypeSig.DAYTIME + TypeSig.lit(TypeEnum.CALENDAR) - .withPsNote(TypeEnum.CALENDAR, "month intervals are not supported"), - TypeSig.DAYTIME + TypeSig.CALENDAR)), - (timeAdd, conf, p, r) => new BinaryExprMeta[TimeAdd](timeAdd, conf, p, r) { - override def tagExprForGpu(): Unit = { - GpuOverrides.extractLit(timeAdd.interval).foreach { lit => - lit.dataType match { - case CalendarIntervalType => - val intvl = lit.value.asInstanceOf[CalendarInterval] - if (intvl.months != 0) { - willNotWorkOnGpu("interval months isn't supported") - } - case _: DayTimeIntervalType => // Supported - } - } - } - - override def convertToGpu(lhs: Expression, rhs: Expression): GpuExpression = - GpuTimeAdd(lhs, rhs) - }), GpuOverrides.expr[Abs]( "Absolute value", ExprChecks.unaryProjectAndAstInputMatchesOutput( diff --git a/sql-plugin/src/main/spark330/scala/com/nvidia/spark/rapids/shims/DistributionUtil.scala b/sql-plugin/src/main/spark330/scala/com/nvidia/spark/rapids/shims/DistributionUtil.scala index 497ad02d1e9..db55affc2eb 100644 --- a/sql-plugin/src/main/spark330/scala/com/nvidia/spark/rapids/shims/DistributionUtil.scala +++ b/sql-plugin/src/main/spark330/scala/com/nvidia/spark/rapids/shims/DistributionUtil.scala @@ -1,5 +1,5 @@ /* - * Copyright (c) 2023-2025, NVIDIA CORPORATION. + * Copyright (c) 2023-2026, NVIDIA CORPORATION. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -37,6 +37,7 @@ {"spark": "357"} {"spark": "400"} {"spark": "401"} +{"spark": "410"} spark-rapids-shim-json-lines ***/ package com.nvidia.spark.rapids.shims diff --git a/sql-plugin/src/main/spark330/scala/com/nvidia/spark/rapids/shims/FilteredPartitions.scala b/sql-plugin/src/main/spark330/scala/com/nvidia/spark/rapids/shims/FilteredPartitions.scala index 0e7b293c539..d87b551762c 100644 --- a/sql-plugin/src/main/spark330/scala/com/nvidia/spark/rapids/shims/FilteredPartitions.scala +++ b/sql-plugin/src/main/spark330/scala/com/nvidia/spark/rapids/shims/FilteredPartitions.scala @@ -1,5 +1,5 @@ /* - * Copyright (c) 2023-2025, NVIDIA CORPORATION. + * Copyright (c) 2023-2026, NVIDIA CORPORATION. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -41,6 +41,7 @@ {"spark": "357"} {"spark": "400"} {"spark": "401"} +{"spark": "410"} spark-rapids-shim-json-lines ***/ package com.nvidia.spark.rapids.shims diff --git a/sql-plugin/src/main/spark330/scala/com/nvidia/spark/rapids/shims/GpuDataSourceRDD.scala b/sql-plugin/src/main/spark330/scala/com/nvidia/spark/rapids/shims/GpuDataSourceRDD.scala index 19dde5b220d..8f05c92a97e 100644 --- a/sql-plugin/src/main/spark330/scala/com/nvidia/spark/rapids/shims/GpuDataSourceRDD.scala +++ b/sql-plugin/src/main/spark330/scala/com/nvidia/spark/rapids/shims/GpuDataSourceRDD.scala @@ -1,5 +1,5 @@ /* - * Copyright (c) 2022-2025, NVIDIA CORPORATION. + * Copyright (c) 2022-2026, NVIDIA CORPORATION. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -41,6 +41,7 @@ {"spark": "357"} {"spark": "400"} {"spark": "401"} +{"spark": "410"} spark-rapids-shim-json-lines ***/ package com.nvidia.spark.rapids.shims diff --git a/sql-plugin/src/main/spark330/scala/com/nvidia/spark/rapids/shims/GpuHashPartitioning.scala b/sql-plugin/src/main/spark330/scala/com/nvidia/spark/rapids/shims/GpuHashPartitioning.scala index 3e8ebdb0bc6..dc870ad7fab 100644 --- a/sql-plugin/src/main/spark330/scala/com/nvidia/spark/rapids/shims/GpuHashPartitioning.scala +++ b/sql-plugin/src/main/spark330/scala/com/nvidia/spark/rapids/shims/GpuHashPartitioning.scala @@ -1,5 +1,5 @@ /* - * Copyright (c) 2021-2025, NVIDIA CORPORATION. + * Copyright (c) 2021-2026, NVIDIA CORPORATION. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -41,6 +41,7 @@ {"spark": "357"} {"spark": "400"} {"spark": "401"} +{"spark": "410"} spark-rapids-shim-json-lines ***/ package com.nvidia.spark.rapids.shims diff --git a/sql-plugin/src/main/spark330/scala/com/nvidia/spark/rapids/shims/GpuIntervalUtilsBase.scala b/sql-plugin/src/main/spark330/scala/com/nvidia/spark/rapids/shims/GpuIntervalUtilsBase.scala index a1b8b8e4204..3e75e6bc684 100644 --- a/sql-plugin/src/main/spark330/scala/com/nvidia/spark/rapids/shims/GpuIntervalUtilsBase.scala +++ b/sql-plugin/src/main/spark330/scala/com/nvidia/spark/rapids/shims/GpuIntervalUtilsBase.scala @@ -1,5 +1,5 @@ /* - * Copyright (c) 2023-2025, NVIDIA CORPORATION. + * Copyright (c) 2023-2026, NVIDIA CORPORATION. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -40,6 +40,7 @@ {"spark": "357"} {"spark": "400"} {"spark": "401"} +{"spark": "410"} spark-rapids-shim-json-lines ***/ package com.nvidia.spark.rapids.shims diff --git a/sql-plugin/src/main/spark330/scala/com/nvidia/spark/rapids/shims/GpuRangePartitioning.scala b/sql-plugin/src/main/spark330/scala/com/nvidia/spark/rapids/shims/GpuRangePartitioning.scala index 626e7287e71..45dac2ff61e 100644 --- a/sql-plugin/src/main/spark330/scala/com/nvidia/spark/rapids/shims/GpuRangePartitioning.scala +++ b/sql-plugin/src/main/spark330/scala/com/nvidia/spark/rapids/shims/GpuRangePartitioning.scala @@ -1,5 +1,5 @@ /* - * Copyright (c) 2022-2025, NVIDIA CORPORATION. + * Copyright (c) 2022-2026, NVIDIA CORPORATION. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -41,6 +41,7 @@ {"spark": "357"} {"spark": "400"} {"spark": "401"} +{"spark": "410"} spark-rapids-shim-json-lines ***/ package com.nvidia.spark.rapids.shims diff --git a/sql-plugin/src/main/spark330/scala/com/nvidia/spark/rapids/shims/GpuTypeShims.scala b/sql-plugin/src/main/spark330/scala/com/nvidia/spark/rapids/shims/GpuTypeShims.scala index cb846a23f34..34ff51c54d5 100644 --- a/sql-plugin/src/main/spark330/scala/com/nvidia/spark/rapids/shims/GpuTypeShims.scala +++ b/sql-plugin/src/main/spark330/scala/com/nvidia/spark/rapids/shims/GpuTypeShims.scala @@ -1,5 +1,5 @@ /* - * Copyright (c) 2022-2025, NVIDIA CORPORATION. + * Copyright (c) 2022-2026, NVIDIA CORPORATION. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -40,6 +40,7 @@ {"spark": "357"} {"spark": "400"} {"spark": "401"} +{"spark": "410"} spark-rapids-shim-json-lines ***/ package com.nvidia.spark.rapids.shims diff --git a/sql-plugin/src/main/spark330/scala/com/nvidia/spark/rapids/shims/InSubqueryShims.scala b/sql-plugin/src/main/spark330/scala/com/nvidia/spark/rapids/shims/InSubqueryShims.scala index a62af4f806f..d39228e4d63 100644 --- a/sql-plugin/src/main/spark330/scala/com/nvidia/spark/rapids/shims/InSubqueryShims.scala +++ b/sql-plugin/src/main/spark330/scala/com/nvidia/spark/rapids/shims/InSubqueryShims.scala @@ -1,5 +1,5 @@ /* - * Copyright (c) 2023-2025, NVIDIA CORPORATION. + * Copyright (c) 2023-2026, NVIDIA CORPORATION. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -37,6 +37,7 @@ {"spark": "357"} {"spark": "400"} {"spark": "401"} +{"spark": "410"} spark-rapids-shim-json-lines ***/ package com.nvidia.spark.rapids.shims diff --git a/sql-plugin/src/main/spark330/scala/com/nvidia/spark/rapids/shims/OrcReadingShims.scala b/sql-plugin/src/main/spark330/scala/com/nvidia/spark/rapids/shims/OrcReadingShims.scala index 062f984048c..62d76b8412b 100644 --- a/sql-plugin/src/main/spark330/scala/com/nvidia/spark/rapids/shims/OrcReadingShims.scala +++ b/sql-plugin/src/main/spark330/scala/com/nvidia/spark/rapids/shims/OrcReadingShims.scala @@ -1,5 +1,5 @@ /* - * Copyright (c) 2022-2025, NVIDIA CORPORATION. + * Copyright (c) 2022-2026, NVIDIA CORPORATION. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -41,6 +41,7 @@ {"spark": "357"} {"spark": "400"} {"spark": "401"} +{"spark": "410"} spark-rapids-shim-json-lines ***/ package com.nvidia.spark.rapids.shims diff --git a/sql-plugin/src/main/spark330/scala/com/nvidia/spark/rapids/shims/OrcShims.scala b/sql-plugin/src/main/spark330/scala/com/nvidia/spark/rapids/shims/OrcShims.scala index 309fe8aac6a..4cb2948f6ad 100644 --- a/sql-plugin/src/main/spark330/scala/com/nvidia/spark/rapids/shims/OrcShims.scala +++ b/sql-plugin/src/main/spark330/scala/com/nvidia/spark/rapids/shims/OrcShims.scala @@ -1,5 +1,5 @@ /* - * Copyright (c) 2022-2025, NVIDIA CORPORATION. + * Copyright (c) 2022-2026, NVIDIA CORPORATION. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -38,6 +38,7 @@ {"spark": "357"} {"spark": "400"} {"spark": "401"} +{"spark": "410"} spark-rapids-shim-json-lines ***/ package com.nvidia.spark.rapids.shims diff --git a/sql-plugin/src/main/spark330/scala/com/nvidia/spark/rapids/shims/RapidsFileSourceMetaUtils.scala b/sql-plugin/src/main/spark330/scala/com/nvidia/spark/rapids/shims/RapidsFileSourceMetaUtils.scala index 0c93d904e1d..5ddc99136ad 100644 --- a/sql-plugin/src/main/spark330/scala/com/nvidia/spark/rapids/shims/RapidsFileSourceMetaUtils.scala +++ b/sql-plugin/src/main/spark330/scala/com/nvidia/spark/rapids/shims/RapidsFileSourceMetaUtils.scala @@ -1,5 +1,5 @@ /* - * Copyright (c) 2022-2025, NVIDIA CORPORATION. + * Copyright (c) 2022-2026, NVIDIA CORPORATION. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -41,6 +41,7 @@ {"spark": "357"} {"spark": "400"} {"spark": "401"} +{"spark": "410"} spark-rapids-shim-json-lines ***/ package com.nvidia.spark.rapids.shims diff --git a/sql-plugin/src/main/spark330/scala/com/nvidia/spark/rapids/shims/RapidsOrcScanMeta.scala b/sql-plugin/src/main/spark330/scala/com/nvidia/spark/rapids/shims/RapidsOrcScanMeta.scala index e260d8484a5..9e43541d186 100644 --- a/sql-plugin/src/main/spark330/scala/com/nvidia/spark/rapids/shims/RapidsOrcScanMeta.scala +++ b/sql-plugin/src/main/spark330/scala/com/nvidia/spark/rapids/shims/RapidsOrcScanMeta.scala @@ -1,5 +1,5 @@ /* - * Copyright (c) 2022-2025, NVIDIA CORPORATION. + * Copyright (c) 2022-2026, NVIDIA CORPORATION. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -41,6 +41,7 @@ {"spark": "357"} {"spark": "400"} {"spark": "401"} +{"spark": "410"} spark-rapids-shim-json-lines ***/ package com.nvidia.spark.rapids.shims diff --git a/sql-plugin/src/main/spark330/scala/com/nvidia/spark/rapids/shims/RapidsParquetScanMeta.scala b/sql-plugin/src/main/spark330/scala/com/nvidia/spark/rapids/shims/RapidsParquetScanMeta.scala index 0ab316433f1..50f08afe247 100644 --- a/sql-plugin/src/main/spark330/scala/com/nvidia/spark/rapids/shims/RapidsParquetScanMeta.scala +++ b/sql-plugin/src/main/spark330/scala/com/nvidia/spark/rapids/shims/RapidsParquetScanMeta.scala @@ -1,5 +1,5 @@ /* - * Copyright (c) 2022-2025, NVIDIA CORPORATION. + * Copyright (c) 2022-2026, NVIDIA CORPORATION. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -41,6 +41,7 @@ {"spark": "357"} {"spark": "400"} {"spark": "401"} +{"spark": "410"} spark-rapids-shim-json-lines ***/ package com.nvidia.spark.rapids.shims diff --git a/sql-plugin/src/main/spark330/scala/com/nvidia/spark/rapids/shims/RoundingShims.scala b/sql-plugin/src/main/spark330/scala/com/nvidia/spark/rapids/shims/RoundingShims.scala index 9b0968e69dc..b5ea21d4b24 100644 --- a/sql-plugin/src/main/spark330/scala/com/nvidia/spark/rapids/shims/RoundingShims.scala +++ b/sql-plugin/src/main/spark330/scala/com/nvidia/spark/rapids/shims/RoundingShims.scala @@ -1,5 +1,5 @@ /* - * Copyright (c) 2022-2025, NVIDIA CORPORATION. + * Copyright (c) 2022-2026, NVIDIA CORPORATION. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -41,6 +41,7 @@ {"spark": "357"} {"spark": "400"} {"spark": "401"} +{"spark": "410"} spark-rapids-shim-json-lines ***/ package com.nvidia.spark.rapids.shims diff --git a/sql-plugin/src/main/spark330/scala/com/nvidia/spark/rapids/shims/ScanExecShims.scala b/sql-plugin/src/main/spark330/scala/com/nvidia/spark/rapids/shims/ScanExecShims.scala index cdbd2d3aea6..364cf622a6c 100644 --- a/sql-plugin/src/main/spark330/scala/com/nvidia/spark/rapids/shims/ScanExecShims.scala +++ b/sql-plugin/src/main/spark330/scala/com/nvidia/spark/rapids/shims/ScanExecShims.scala @@ -1,5 +1,5 @@ /* - * Copyright (c) 2022-2025, NVIDIA CORPORATION. + * Copyright (c) 2022-2026, NVIDIA CORPORATION. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -41,6 +41,7 @@ {"spark": "357"} {"spark": "400"} {"spark": "401"} +{"spark": "410"} spark-rapids-shim-json-lines ***/ package com.nvidia.spark.rapids.shims diff --git a/sql-plugin/src/main/spark330/scala/com/nvidia/spark/rapids/shims/Spark330PlusNonDBShims.scala b/sql-plugin/src/main/spark330/scala/com/nvidia/spark/rapids/shims/Spark330PlusNonDBShims.scala index be61f7a0343..d6f249685fd 100644 --- a/sql-plugin/src/main/spark330/scala/com/nvidia/spark/rapids/shims/Spark330PlusNonDBShims.scala +++ b/sql-plugin/src/main/spark330/scala/com/nvidia/spark/rapids/shims/Spark330PlusNonDBShims.scala @@ -1,5 +1,5 @@ /* - * Copyright (c) 2022-2025, NVIDIA CORPORATION. + * Copyright (c) 2022-2026, NVIDIA CORPORATION. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -37,6 +37,7 @@ {"spark": "357"} {"spark": "400"} {"spark": "401"} +{"spark": "410"} spark-rapids-shim-json-lines ***/ package com.nvidia.spark.rapids.shims diff --git a/sql-plugin/src/main/spark330/scala/com/nvidia/spark/rapids/shims/Spark330PlusShims.scala b/sql-plugin/src/main/spark330/scala/com/nvidia/spark/rapids/shims/Spark330PlusShims.scala index 42a6ae8fe3d..d936a6bd796 100644 --- a/sql-plugin/src/main/spark330/scala/com/nvidia/spark/rapids/shims/Spark330PlusShims.scala +++ b/sql-plugin/src/main/spark330/scala/com/nvidia/spark/rapids/shims/Spark330PlusShims.scala @@ -1,5 +1,5 @@ /* - * Copyright (c) 2021-2025, NVIDIA CORPORATION. + * Copyright (c) 2021-2026, NVIDIA CORPORATION. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -37,6 +37,7 @@ {"spark": "357"} {"spark": "400"} {"spark": "401"} +{"spark": "410"} spark-rapids-shim-json-lines ***/ package com.nvidia.spark.rapids.shims diff --git a/sql-plugin/src/main/spark330/scala/com/nvidia/spark/rapids/shims/parquet/ParquetFieldIdShims.scala b/sql-plugin/src/main/spark330/scala/com/nvidia/spark/rapids/shims/parquet/ParquetFieldIdShims.scala index 4a79eafc71a..e4e21bb6e3b 100644 --- a/sql-plugin/src/main/spark330/scala/com/nvidia/spark/rapids/shims/parquet/ParquetFieldIdShims.scala +++ b/sql-plugin/src/main/spark330/scala/com/nvidia/spark/rapids/shims/parquet/ParquetFieldIdShims.scala @@ -1,5 +1,5 @@ /* - * Copyright (c) 2022-2025, NVIDIA CORPORATION. + * Copyright (c) 2022-2026, NVIDIA CORPORATION. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -40,6 +40,7 @@ {"spark": "357"} {"spark": "400"} {"spark": "401"} +{"spark": "410"} spark-rapids-shim-json-lines ***/ package com.nvidia.spark.rapids.shims.parquet diff --git a/sql-plugin/src/main/spark330/scala/com/nvidia/spark/rapids/shims/parquet/ParquetSchemaClipShims.scala b/sql-plugin/src/main/spark330/scala/com/nvidia/spark/rapids/shims/parquet/ParquetSchemaClipShims.scala index bfefd119088..b11ec25b85a 100644 --- a/sql-plugin/src/main/spark330/scala/com/nvidia/spark/rapids/shims/parquet/ParquetSchemaClipShims.scala +++ b/sql-plugin/src/main/spark330/scala/com/nvidia/spark/rapids/shims/parquet/ParquetSchemaClipShims.scala @@ -1,5 +1,5 @@ /* - * Copyright (c) 2022-2025, NVIDIA CORPORATION. + * Copyright (c) 2022-2026, NVIDIA CORPORATION. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -41,6 +41,7 @@ {"spark": "357"} {"spark": "400"} {"spark": "401"} +{"spark": "410"} spark-rapids-shim-json-lines ***/ package com.nvidia.spark.rapids.shims.parquet diff --git a/sql-plugin/src/main/spark330/scala/org/apache/spark/sql/execution/datasources/parquet/RapidsVectorizedColumnReader.scala b/sql-plugin/src/main/spark330/scala/org/apache/spark/sql/execution/datasources/parquet/RapidsVectorizedColumnReader.scala index 0cc6558c457..d2e8226eb5a 100644 --- a/sql-plugin/src/main/spark330/scala/org/apache/spark/sql/execution/datasources/parquet/RapidsVectorizedColumnReader.scala +++ b/sql-plugin/src/main/spark330/scala/org/apache/spark/sql/execution/datasources/parquet/RapidsVectorizedColumnReader.scala @@ -1,5 +1,5 @@ /* - * Copyright (c) 2024-2025, NVIDIA CORPORATION. + * Copyright (c) 2024-2026, NVIDIA CORPORATION. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -40,6 +40,7 @@ {"spark": "357"} {"spark": "400"} {"spark": "401"} +{"spark": "410"} spark-rapids-shim-json-lines ***/ package org.apache.spark.sql.execution.datasources.parquet diff --git a/sql-plugin/src/main/spark330/scala/org/apache/spark/sql/execution/datasources/parquet/ShimCurrentBatchIterator.scala b/sql-plugin/src/main/spark330/scala/org/apache/spark/sql/execution/datasources/parquet/ShimCurrentBatchIterator.scala index 6e008272def..e69656f0dc0 100644 --- a/sql-plugin/src/main/spark330/scala/org/apache/spark/sql/execution/datasources/parquet/ShimCurrentBatchIterator.scala +++ b/sql-plugin/src/main/spark330/scala/org/apache/spark/sql/execution/datasources/parquet/ShimCurrentBatchIterator.scala @@ -1,5 +1,5 @@ /* - * Copyright (c) 2022-2025, NVIDIA CORPORATION. + * Copyright (c) 2022-2026, NVIDIA CORPORATION. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -41,6 +41,7 @@ {"spark": "357"} {"spark": "400"} {"spark": "401"} +{"spark": "410"} spark-rapids-shim-json-lines ***/ package org.apache.spark.sql.execution.datasources.parquet diff --git a/sql-plugin/src/main/spark330/scala/org/apache/spark/sql/rapids/aggregate/GpuBloomFilterAggregate.scala b/sql-plugin/src/main/spark330/scala/org/apache/spark/sql/rapids/aggregate/GpuBloomFilterAggregate.scala index 83247250ff5..bb226092afe 100644 --- a/sql-plugin/src/main/spark330/scala/org/apache/spark/sql/rapids/aggregate/GpuBloomFilterAggregate.scala +++ b/sql-plugin/src/main/spark330/scala/org/apache/spark/sql/rapids/aggregate/GpuBloomFilterAggregate.scala @@ -1,5 +1,5 @@ /* - * Copyright (c) 2023-2025, NVIDIA CORPORATION. + * Copyright (c) 2023-2026, NVIDIA CORPORATION. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -41,6 +41,7 @@ {"spark": "357"} {"spark": "400"} {"spark": "401"} +{"spark": "410"} spark-rapids-shim-json-lines ***/ package org.apache.spark.sql.rapids.aggregate diff --git a/sql-plugin/src/main/spark330/scala/org/apache/spark/sql/rapids/shims/RapidsErrorUtilsFor330plus.scala b/sql-plugin/src/main/spark330/scala/org/apache/spark/sql/rapids/shims/RapidsErrorUtilsFor330plus.scala index e2fc2b0b64f..1bc42508e0d 100644 --- a/sql-plugin/src/main/spark330/scala/org/apache/spark/sql/rapids/shims/RapidsErrorUtilsFor330plus.scala +++ b/sql-plugin/src/main/spark330/scala/org/apache/spark/sql/rapids/shims/RapidsErrorUtilsFor330plus.scala @@ -1,5 +1,5 @@ /* - * Copyright (c) 2022-2025, NVIDIA CORPORATION. + * Copyright (c) 2022-2026, NVIDIA CORPORATION. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -41,6 +41,7 @@ {"spark": "357"} {"spark": "400"} {"spark": "401"} +{"spark": "410"} spark-rapids-shim-json-lines ***/ package org.apache.spark.sql.rapids.shims diff --git a/sql-plugin/src/main/spark330/scala/org/apache/spark/sql/rapids/shims/RapidsShuffleThreadedReader.scala b/sql-plugin/src/main/spark330/scala/org/apache/spark/sql/rapids/shims/RapidsShuffleThreadedReader.scala index e939445b8a1..988bbeb51b0 100644 --- a/sql-plugin/src/main/spark330/scala/org/apache/spark/sql/rapids/shims/RapidsShuffleThreadedReader.scala +++ b/sql-plugin/src/main/spark330/scala/org/apache/spark/sql/rapids/shims/RapidsShuffleThreadedReader.scala @@ -1,5 +1,5 @@ /* - * Copyright (c) 2022-2025, NVIDIA CORPORATION. + * Copyright (c) 2022-2026, NVIDIA CORPORATION. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -41,6 +41,7 @@ {"spark": "357"} {"spark": "400"} {"spark": "401"} +{"spark": "410"} spark-rapids-shim-json-lines ***/ package org.apache.spark.sql.rapids.shims diff --git a/sql-plugin/src/main/spark330/scala/org/apache/spark/sql/rapids/shims/intervalExpressions.scala b/sql-plugin/src/main/spark330/scala/org/apache/spark/sql/rapids/shims/intervalExpressions.scala index e6dbb0ce08b..8e7d461aee1 100644 --- a/sql-plugin/src/main/spark330/scala/org/apache/spark/sql/rapids/shims/intervalExpressions.scala +++ b/sql-plugin/src/main/spark330/scala/org/apache/spark/sql/rapids/shims/intervalExpressions.scala @@ -1,5 +1,5 @@ /* - * Copyright (c) 2022-2025, NVIDIA CORPORATION. + * Copyright (c) 2022-2026, NVIDIA CORPORATION. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -41,6 +41,7 @@ {"spark": "357"} {"spark": "400"} {"spark": "401"} +{"spark": "410"} spark-rapids-shim-json-lines ***/ package org.apache.spark.sql.rapids.shims diff --git a/sql-plugin/src/main/spark330db/scala/com/nvidia/spark/rapids/shims/AnsiCastShim.scala b/sql-plugin/src/main/spark330db/scala/com/nvidia/spark/rapids/shims/AnsiCastShim.scala index 8e980952669..0e9e317549a 100644 --- a/sql-plugin/src/main/spark330db/scala/com/nvidia/spark/rapids/shims/AnsiCastShim.scala +++ b/sql-plugin/src/main/spark330db/scala/com/nvidia/spark/rapids/shims/AnsiCastShim.scala @@ -1,5 +1,5 @@ /* - * Copyright (c) 2023-2025, NVIDIA CORPORATION. + * Copyright (c) 2023-2026, NVIDIA CORPORATION. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -33,6 +33,7 @@ {"spark": "357"} {"spark": "400"} {"spark": "401"} +{"spark": "410"} spark-rapids-shim-json-lines ***/ package com.nvidia.spark.rapids.shims diff --git a/sql-plugin/src/main/spark330db/scala/com/nvidia/spark/rapids/shims/CastingConfigShim.scala b/sql-plugin/src/main/spark330db/scala/com/nvidia/spark/rapids/shims/CastingConfigShim.scala index f70e569504e..9efb4ee67b9 100644 --- a/sql-plugin/src/main/spark330db/scala/com/nvidia/spark/rapids/shims/CastingConfigShim.scala +++ b/sql-plugin/src/main/spark330db/scala/com/nvidia/spark/rapids/shims/CastingConfigShim.scala @@ -1,5 +1,5 @@ /* - * Copyright (c) 2022-2025, NVIDIA CORPORATION. + * Copyright (c) 2022-2026, NVIDIA CORPORATION. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -34,6 +34,7 @@ {"spark": "357"} {"spark": "400"} {"spark": "401"} +{"spark": "410"} spark-rapids-shim-json-lines ***/ package com.nvidia.spark.rapids.shims diff --git a/sql-plugin/src/main/spark330db/scala/com/nvidia/spark/rapids/shims/ColumnDefaultValuesShims.scala b/sql-plugin/src/main/spark330db/scala/com/nvidia/spark/rapids/shims/ColumnDefaultValuesShims.scala index c347913143a..a025f2026d3 100644 --- a/sql-plugin/src/main/spark330db/scala/com/nvidia/spark/rapids/shims/ColumnDefaultValuesShims.scala +++ b/sql-plugin/src/main/spark330db/scala/com/nvidia/spark/rapids/shims/ColumnDefaultValuesShims.scala @@ -1,5 +1,5 @@ /* - * Copyright (c) 2023-2025, NVIDIA CORPORATION. + * Copyright (c) 2023-2026, NVIDIA CORPORATION. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -33,6 +33,7 @@ {"spark": "357"} {"spark": "400"} {"spark": "401"} +{"spark": "410"} spark-rapids-shim-json-lines ***/ package com.nvidia.spark.rapids.shims diff --git a/sql-plugin/src/main/spark330db/scala/com/nvidia/spark/rapids/shims/DecimalArithmeticOverrides.scala b/sql-plugin/src/main/spark330db/scala/com/nvidia/spark/rapids/shims/DecimalArithmeticOverrides.scala index 204046b9773..afe3e82324e 100644 --- a/sql-plugin/src/main/spark330db/scala/com/nvidia/spark/rapids/shims/DecimalArithmeticOverrides.scala +++ b/sql-plugin/src/main/spark330db/scala/com/nvidia/spark/rapids/shims/DecimalArithmeticOverrides.scala @@ -1,5 +1,5 @@ /* - * Copyright (c) 2022-2025, NVIDIA CORPORATION. + * Copyright (c) 2022-2026, NVIDIA CORPORATION. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -34,6 +34,7 @@ {"spark": "357"} {"spark": "400"} {"spark": "401"} +{"spark": "410"} spark-rapids-shim-json-lines ***/ package com.nvidia.spark.rapids.shims diff --git a/sql-plugin/src/main/spark330db/scala/com/nvidia/spark/rapids/shims/GetMapValueMeta.scala b/sql-plugin/src/main/spark330db/scala/com/nvidia/spark/rapids/shims/GetMapValueMeta.scala index 4f1e8785eb0..5a6cf3c026a 100644 --- a/sql-plugin/src/main/spark330db/scala/com/nvidia/spark/rapids/shims/GetMapValueMeta.scala +++ b/sql-plugin/src/main/spark330db/scala/com/nvidia/spark/rapids/shims/GetMapValueMeta.scala @@ -1,5 +1,5 @@ /* - * Copyright (c) 2022-2025, NVIDIA CORPORATION. + * Copyright (c) 2022-2026, NVIDIA CORPORATION. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -34,6 +34,7 @@ {"spark": "357"} {"spark": "400"} {"spark": "401"} +{"spark": "410"} spark-rapids-shim-json-lines ***/ package com.nvidia.spark.rapids.shims diff --git a/sql-plugin/src/main/spark330db/scala/com/nvidia/spark/rapids/shims/GpuCastShims.scala b/sql-plugin/src/main/spark330db/scala/com/nvidia/spark/rapids/shims/GpuCastShims.scala index 18dd2616494..988658d320b 100644 --- a/sql-plugin/src/main/spark330db/scala/com/nvidia/spark/rapids/shims/GpuCastShims.scala +++ b/sql-plugin/src/main/spark330db/scala/com/nvidia/spark/rapids/shims/GpuCastShims.scala @@ -1,5 +1,5 @@ /* - * Copyright (c) 2023-2025, NVIDIA CORPORATION. + * Copyright (c) 2023-2026, NVIDIA CORPORATION. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -34,6 +34,7 @@ {"spark": "357"} {"spark": "400"} {"spark": "401"} +{"spark": "410"} spark-rapids-shim-json-lines ***/ package com.nvidia.spark.rapids.shims diff --git a/sql-plugin/src/main/spark330db/scala/com/nvidia/spark/rapids/shims/ShimFilePartitionReaderFactory.scala b/sql-plugin/src/main/spark330db/scala/com/nvidia/spark/rapids/shims/ShimFilePartitionReaderFactory.scala index cdc7d1e99e8..daa25d7dded 100644 --- a/sql-plugin/src/main/spark330db/scala/com/nvidia/spark/rapids/shims/ShimFilePartitionReaderFactory.scala +++ b/sql-plugin/src/main/spark330db/scala/com/nvidia/spark/rapids/shims/ShimFilePartitionReaderFactory.scala @@ -1,5 +1,5 @@ /* - * Copyright (c) 2022-2025, NVIDIA CORPORATION. + * Copyright (c) 2022-2026, NVIDIA CORPORATION. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -34,6 +34,7 @@ {"spark": "357"} {"spark": "400"} {"spark": "401"} +{"spark": "410"} spark-rapids-shim-json-lines ***/ package com.nvidia.spark.rapids.shims diff --git a/sql-plugin/src/main/spark330db/scala/com/nvidia/spark/rapids/shims/TryModeShim.scala b/sql-plugin/src/main/spark330db/scala/com/nvidia/spark/rapids/shims/TryModeShim.scala index 54a447f8a95..6eb41cc2e8e 100644 --- a/sql-plugin/src/main/spark330db/scala/com/nvidia/spark/rapids/shims/TryModeShim.scala +++ b/sql-plugin/src/main/spark330db/scala/com/nvidia/spark/rapids/shims/TryModeShim.scala @@ -1,5 +1,5 @@ /* - * Copyright (c) 2025, NVIDIA CORPORATION. + * Copyright (c) 2025-2026, NVIDIA CORPORATION. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -34,6 +34,7 @@ {"spark": "357"} {"spark": "400"} {"spark": "401"} +{"spark": "410"} spark-rapids-shim-json-lines ***/ package com.nvidia.spark.rapids.shims diff --git a/sql-plugin/src/main/spark330db/scala/com/nvidia/spark/rapids/shims/TypeUtilsShims.scala b/sql-plugin/src/main/spark330db/scala/com/nvidia/spark/rapids/shims/TypeUtilsShims.scala index 4fbd25bea4b..b135bfeda1e 100644 --- a/sql-plugin/src/main/spark330db/scala/com/nvidia/spark/rapids/shims/TypeUtilsShims.scala +++ b/sql-plugin/src/main/spark330db/scala/com/nvidia/spark/rapids/shims/TypeUtilsShims.scala @@ -1,5 +1,5 @@ /* - * Copyright (c) 2022-2025, NVIDIA CORPORATION. + * Copyright (c) 2022-2026, NVIDIA CORPORATION. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -34,6 +34,7 @@ {"spark": "357"} {"spark": "400"} {"spark": "401"} +{"spark": "410"} spark-rapids-shim-json-lines ***/ package com.nvidia.spark.rapids.shims diff --git a/sql-plugin/src/main/spark330db/scala/com/nvidia/spark/rapids/shims/parquet/ParquetStringPredShims.scala b/sql-plugin/src/main/spark330db/scala/com/nvidia/spark/rapids/shims/parquet/ParquetStringPredShims.scala index 68bc543ae72..f90f06bb3e0 100644 --- a/sql-plugin/src/main/spark330db/scala/com/nvidia/spark/rapids/shims/parquet/ParquetStringPredShims.scala +++ b/sql-plugin/src/main/spark330db/scala/com/nvidia/spark/rapids/shims/parquet/ParquetStringPredShims.scala @@ -1,5 +1,5 @@ /* - * Copyright (c) 2022-2025, NVIDIA CORPORATION. + * Copyright (c) 2022-2026, NVIDIA CORPORATION. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -34,6 +34,7 @@ {"spark": "357"} {"spark": "400"} {"spark": "401"} +{"spark": "410"} spark-rapids-shim-json-lines ***/ package com.nvidia.spark.rapids.shims.parquet diff --git a/sql-plugin/src/main/spark330db/scala/org/apache/spark/sql/execution/datasources/rapids/DataSourceStrategyUtils.scala b/sql-plugin/src/main/spark330db/scala/org/apache/spark/sql/execution/datasources/rapids/DataSourceStrategyUtils.scala index 92d1de38f8e..024b32029bb 100644 --- a/sql-plugin/src/main/spark330db/scala/org/apache/spark/sql/execution/datasources/rapids/DataSourceStrategyUtils.scala +++ b/sql-plugin/src/main/spark330db/scala/org/apache/spark/sql/execution/datasources/rapids/DataSourceStrategyUtils.scala @@ -1,5 +1,5 @@ /* - * Copyright (c) 2022-2025, NVIDIA CORPORATION. + * Copyright (c) 2022-2026, NVIDIA CORPORATION. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -34,6 +34,7 @@ {"spark": "357"} {"spark": "400"} {"spark": "401"} +{"spark": "410"} spark-rapids-shim-json-lines ***/ package org.apache.spark.sql.execution.datasources.rapids diff --git a/sql-plugin/src/main/spark330db/scala/org/apache/spark/sql/rapids/GpuCheckOverflowInTableInsert.scala b/sql-plugin/src/main/spark330db/scala/org/apache/spark/sql/rapids/GpuCheckOverflowInTableInsert.scala index 8093b4aa761..c215fe7b296 100644 --- a/sql-plugin/src/main/spark330db/scala/org/apache/spark/sql/rapids/GpuCheckOverflowInTableInsert.scala +++ b/sql-plugin/src/main/spark330db/scala/org/apache/spark/sql/rapids/GpuCheckOverflowInTableInsert.scala @@ -1,5 +1,5 @@ /* - * Copyright (c) 2022-2025, NVIDIA CORPORATION. + * Copyright (c) 2022-2026, NVIDIA CORPORATION. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -39,6 +39,7 @@ {"spark": "357"} {"spark": "400"} {"spark": "401"} +{"spark": "410"} spark-rapids-shim-json-lines ***/ package org.apache.spark.sql.rapids diff --git a/sql-plugin/src/main/spark330db/scala/org/apache/spark/sql/rapids/aggregate/aggregateFunctions.scala b/sql-plugin/src/main/spark330db/scala/org/apache/spark/sql/rapids/aggregate/aggregateFunctions.scala index 6f1fccc3711..e5a45b80f34 100644 --- a/sql-plugin/src/main/spark330db/scala/org/apache/spark/sql/rapids/aggregate/aggregateFunctions.scala +++ b/sql-plugin/src/main/spark330db/scala/org/apache/spark/sql/rapids/aggregate/aggregateFunctions.scala @@ -1,5 +1,5 @@ /* - * Copyright (c) 2022-2025, NVIDIA CORPORATION. + * Copyright (c) 2022-2026, NVIDIA CORPORATION. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -34,6 +34,7 @@ {"spark": "357"} {"spark": "400"} {"spark": "401"} +{"spark": "410"} spark-rapids-shim-json-lines ***/ package org.apache.spark.sql.rapids.aggregate diff --git a/sql-plugin/src/main/spark330db/scala/org/apache/spark/sql/rapids/arithmetic.scala b/sql-plugin/src/main/spark330db/scala/org/apache/spark/sql/rapids/arithmetic.scala index 04abb049611..ef280f41a64 100644 --- a/sql-plugin/src/main/spark330db/scala/org/apache/spark/sql/rapids/arithmetic.scala +++ b/sql-plugin/src/main/spark330db/scala/org/apache/spark/sql/rapids/arithmetic.scala @@ -1,5 +1,5 @@ /* - * Copyright (c) 2022-2025, NVIDIA CORPORATION. + * Copyright (c) 2022-2026, NVIDIA CORPORATION. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -34,6 +34,7 @@ {"spark": "357"} {"spark": "400"} {"spark": "401"} +{"spark": "410"} spark-rapids-shim-json-lines ***/ package org.apache.spark.sql.rapids diff --git a/sql-plugin/src/main/spark331/scala/com/nvidia/spark/rapids/shims/Spark331PlusNonDBShims.scala b/sql-plugin/src/main/spark331/scala/com/nvidia/spark/rapids/shims/Spark331PlusNonDBShims.scala index 10868507d0a..50874f08c87 100644 --- a/sql-plugin/src/main/spark331/scala/com/nvidia/spark/rapids/shims/Spark331PlusNonDBShims.scala +++ b/sql-plugin/src/main/spark331/scala/com/nvidia/spark/rapids/shims/Spark331PlusNonDBShims.scala @@ -1,5 +1,5 @@ /* - * Copyright (c) 2022-2025, NVIDIA CORPORATION. + * Copyright (c) 2022-2026, NVIDIA CORPORATION. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -35,6 +35,7 @@ {"spark": "357"} {"spark": "400"} {"spark": "401"} +{"spark": "410"} spark-rapids-shim-json-lines ***/ package com.nvidia.spark.rapids.shims diff --git a/sql-plugin/src/main/spark332db/scala/com/nvidia/spark/rapids/shims/CreateDataSourceTableAsSelectCommandMetaShims.scala b/sql-plugin/src/main/spark332db/scala/com/nvidia/spark/rapids/shims/CreateDataSourceTableAsSelectCommandMetaShims.scala index 4f9bd8dbca6..bda176311db 100644 --- a/sql-plugin/src/main/spark332db/scala/com/nvidia/spark/rapids/shims/CreateDataSourceTableAsSelectCommandMetaShims.scala +++ b/sql-plugin/src/main/spark332db/scala/com/nvidia/spark/rapids/shims/CreateDataSourceTableAsSelectCommandMetaShims.scala @@ -1,5 +1,5 @@ /* - * Copyright (c) 2023-2025, NVIDIA CORPORATION. + * Copyright (c) 2023-2026, NVIDIA CORPORATION. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -33,6 +33,7 @@ {"spark": "357"} {"spark": "400"} {"spark": "401"} +{"spark": "410"} spark-rapids-shim-json-lines ***/ package com.nvidia.spark.rapids.shims diff --git a/sql-plugin/src/main/spark332db/scala/com/nvidia/spark/rapids/shims/GpuInsertIntoHiveTable.scala b/sql-plugin/src/main/spark332db/scala/com/nvidia/spark/rapids/shims/GpuInsertIntoHiveTable.scala index b51b03efd8b..38c62e1b2cd 100644 --- a/sql-plugin/src/main/spark332db/scala/com/nvidia/spark/rapids/shims/GpuInsertIntoHiveTable.scala +++ b/sql-plugin/src/main/spark332db/scala/com/nvidia/spark/rapids/shims/GpuInsertIntoHiveTable.scala @@ -1,5 +1,5 @@ /* - * Copyright (c) 2023-2025, NVIDIA CORPORATION. + * Copyright (c) 2023-2026, NVIDIA CORPORATION. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -33,6 +33,7 @@ {"spark": "357"} {"spark": "400"} {"spark": "401"} +{"spark": "410"} spark-rapids-shim-json-lines ***/ package org.apache.spark.sql.hive.rapids.shims diff --git a/sql-plugin/src/main/spark332db/scala/com/nvidia/spark/rapids/shims/GpuKnownNullable.scala b/sql-plugin/src/main/spark332db/scala/com/nvidia/spark/rapids/shims/GpuKnownNullable.scala index 7d30a30b3bb..7b1be5fde11 100644 --- a/sql-plugin/src/main/spark332db/scala/com/nvidia/spark/rapids/shims/GpuKnownNullable.scala +++ b/sql-plugin/src/main/spark332db/scala/com/nvidia/spark/rapids/shims/GpuKnownNullable.scala @@ -1,5 +1,5 @@ /* - * Copyright (c) 2023-2025, NVIDIA CORPORATION. + * Copyright (c) 2023-2026, NVIDIA CORPORATION. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -33,6 +33,7 @@ {"spark": "357"} {"spark": "400"} {"spark": "401"} +{"spark": "410"} spark-rapids-shim-json-lines ***/ package com.nvidia.spark.rapids.shims diff --git a/sql-plugin/src/main/spark332db/scala/com/nvidia/spark/rapids/shims/GpuOptimizedCreateHiveTableAsSelectCommandShims.scala b/sql-plugin/src/main/spark332db/scala/com/nvidia/spark/rapids/shims/GpuOptimizedCreateHiveTableAsSelectCommandShims.scala index 1e180b4713a..b500f164292 100644 --- a/sql-plugin/src/main/spark332db/scala/com/nvidia/spark/rapids/shims/GpuOptimizedCreateHiveTableAsSelectCommandShims.scala +++ b/sql-plugin/src/main/spark332db/scala/com/nvidia/spark/rapids/shims/GpuOptimizedCreateHiveTableAsSelectCommandShims.scala @@ -1,5 +1,5 @@ /* - * Copyright (c) 2023-2025, NVIDIA CORPORATION. + * Copyright (c) 2023-2026, NVIDIA CORPORATION. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -33,6 +33,7 @@ {"spark": "357"} {"spark": "400"} {"spark": "401"} +{"spark": "410"} spark-rapids-shim-json-lines ***/ package com.nvidia.spark.rapids.shims diff --git a/sql-plugin/src/main/spark332db/scala/org/apache/spark/sql/execution/datasources/GpuWriteFiles.scala b/sql-plugin/src/main/spark332db/scala/org/apache/spark/sql/execution/datasources/GpuWriteFiles.scala index aceb6d73a97..6315f4d4d98 100644 --- a/sql-plugin/src/main/spark332db/scala/org/apache/spark/sql/execution/datasources/GpuWriteFiles.scala +++ b/sql-plugin/src/main/spark332db/scala/org/apache/spark/sql/execution/datasources/GpuWriteFiles.scala @@ -1,5 +1,5 @@ /* - * Copyright (c) 2023-2025, NVIDIA CORPORATION. + * Copyright (c) 2023-2026, NVIDIA CORPORATION. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -33,6 +33,7 @@ {"spark": "357"} {"spark": "400"} {"spark": "401"} +{"spark": "410"} spark-rapids-shim-json-lines ***/ package org.apache.spark.sql.execution.datasources diff --git a/sql-plugin/src/main/spark332db/scala/org/apache/spark/sql/hive/rapids/shims/HiveFileUtil.scala b/sql-plugin/src/main/spark332db/scala/org/apache/spark/sql/hive/rapids/shims/HiveFileUtil.scala index 7426adce4bf..ce2928d00d0 100644 --- a/sql-plugin/src/main/spark332db/scala/org/apache/spark/sql/hive/rapids/shims/HiveFileUtil.scala +++ b/sql-plugin/src/main/spark332db/scala/org/apache/spark/sql/hive/rapids/shims/HiveFileUtil.scala @@ -1,5 +1,5 @@ /* - * Copyright (c) 2023-2025, NVIDIA CORPORATION. + * Copyright (c) 2023-2026, NVIDIA CORPORATION. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -33,6 +33,7 @@ {"spark": "357"} {"spark": "400"} {"spark": "401"} +{"spark": "410"} spark-rapids-shim-json-lines ***/ package org.apache.spark.sql.hive.rapids.shims diff --git a/sql-plugin/src/main/spark332db/scala/org/apache/spark/sql/hive/rapids/shims/HiveProviderCmdShims.scala b/sql-plugin/src/main/spark332db/scala/org/apache/spark/sql/hive/rapids/shims/HiveProviderCmdShims.scala index 3f9017b262e..7aad54b061f 100644 --- a/sql-plugin/src/main/spark332db/scala/org/apache/spark/sql/hive/rapids/shims/HiveProviderCmdShims.scala +++ b/sql-plugin/src/main/spark332db/scala/org/apache/spark/sql/hive/rapids/shims/HiveProviderCmdShims.scala @@ -1,5 +1,5 @@ /* - * Copyright (c) 2023-2025, NVIDIA CORPORATION. + * Copyright (c) 2023-2026, NVIDIA CORPORATION. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -33,6 +33,7 @@ {"spark": "357"} {"spark": "400"} {"spark": "401"} +{"spark": "410"} spark-rapids-shim-json-lines ***/ package org.apache.spark.sql.hive.rapids.shims diff --git a/sql-plugin/src/main/spark332db/scala/org/apache/spark/sql/rapids/GpuFileFormatWriter.scala b/sql-plugin/src/main/spark332db/scala/org/apache/spark/sql/rapids/GpuFileFormatWriter.scala index 57dc8b2e4bd..68eb854231a 100644 --- a/sql-plugin/src/main/spark332db/scala/org/apache/spark/sql/rapids/GpuFileFormatWriter.scala +++ b/sql-plugin/src/main/spark332db/scala/org/apache/spark/sql/rapids/GpuFileFormatWriter.scala @@ -1,5 +1,5 @@ /* - * Copyright (c) 2023-2025, NVIDIA CORPORATION. + * Copyright (c) 2023-2026, NVIDIA CORPORATION. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -33,6 +33,7 @@ {"spark": "357"} {"spark": "400"} {"spark": "401"} +{"spark": "410"} spark-rapids-shim-json-lines ***/ package org.apache.spark.sql.rapids diff --git a/sql-plugin/src/main/spark332db/scala/org/apache/spark/sql/rapids/shims/GpuCastToNumberErrorShim.scala b/sql-plugin/src/main/spark332db/scala/org/apache/spark/sql/rapids/shims/GpuCastToNumberErrorShim.scala index 83926fabd18..7bc5c4b4914 100644 --- a/sql-plugin/src/main/spark332db/scala/org/apache/spark/sql/rapids/shims/GpuCastToNumberErrorShim.scala +++ b/sql-plugin/src/main/spark332db/scala/org/apache/spark/sql/rapids/shims/GpuCastToNumberErrorShim.scala @@ -1,5 +1,5 @@ /* - * Copyright (c) 2025, NVIDIA CORPORATION. + * Copyright (c) 2025-2026, NVIDIA CORPORATION. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -33,6 +33,7 @@ {"spark": "357"} {"spark": "400"} {"spark": "401"} +{"spark": "410"} spark-rapids-shim-json-lines ***/ package org.apache.spark.sql.rapids.shims diff --git a/sql-plugin/src/main/spark332db/scala/org/apache/spark/sql/rapids/shims/GpuCreateDataSourceTableAsSelectCommandShims.scala b/sql-plugin/src/main/spark332db/scala/org/apache/spark/sql/rapids/shims/GpuCreateDataSourceTableAsSelectCommandShims.scala index f9a0dce8fa0..99f4c428af8 100644 --- a/sql-plugin/src/main/spark332db/scala/org/apache/spark/sql/rapids/shims/GpuCreateDataSourceTableAsSelectCommandShims.scala +++ b/sql-plugin/src/main/spark332db/scala/org/apache/spark/sql/rapids/shims/GpuCreateDataSourceTableAsSelectCommandShims.scala @@ -1,5 +1,5 @@ /* - * Copyright (c) 2023-2025, NVIDIA CORPORATION. + * Copyright (c) 2023-2026, NVIDIA CORPORATION. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -33,6 +33,7 @@ {"spark": "357"} {"spark": "400"} {"spark": "401"} +{"spark": "410"} spark-rapids-shim-json-lines ***/ package org.apache.spark.sql.rapids.shims diff --git a/sql-plugin/src/main/spark332db/scala/org/apache/spark/sql/rapids/shims/GpuDataSource.scala b/sql-plugin/src/main/spark332db/scala/org/apache/spark/sql/rapids/shims/GpuDataSource.scala index 712b70f0632..662e5e845be 100644 --- a/sql-plugin/src/main/spark332db/scala/org/apache/spark/sql/rapids/shims/GpuDataSource.scala +++ b/sql-plugin/src/main/spark332db/scala/org/apache/spark/sql/rapids/shims/GpuDataSource.scala @@ -1,5 +1,5 @@ /* - * Copyright (c) 2023-2025, NVIDIA CORPORATION. + * Copyright (c) 2023-2026, NVIDIA CORPORATION. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -33,6 +33,7 @@ {"spark": "357"} {"spark": "400"} {"spark": "401"} +{"spark": "410"} spark-rapids-shim-json-lines ***/ package org.apache.spark.sql.rapids diff --git a/sql-plugin/src/main/spark332db/scala/org/apache/spark/sql/rapids/shims/SchemaUtilsShims.scala b/sql-plugin/src/main/spark332db/scala/org/apache/spark/sql/rapids/shims/SchemaUtilsShims.scala index 4709b8ebc1f..892dd306443 100644 --- a/sql-plugin/src/main/spark332db/scala/org/apache/spark/sql/rapids/shims/SchemaUtilsShims.scala +++ b/sql-plugin/src/main/spark332db/scala/org/apache/spark/sql/rapids/shims/SchemaUtilsShims.scala @@ -1,5 +1,5 @@ /* - * Copyright (c) 2022-2025, NVIDIA CORPORATION. + * Copyright (c) 2022-2026, NVIDIA CORPORATION. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -33,6 +33,7 @@ {"spark": "357"} {"spark": "400"} {"spark": "401"} +{"spark": "410"} spark-rapids-shim-json-lines ***/ package org.apache.spark.sql.rapids.shims diff --git a/sql-plugin/src/main/spark332db/scala/org/apache/spark/sql/rapids/shims/SparkDateTimeExceptionShims.scala b/sql-plugin/src/main/spark332db/scala/org/apache/spark/sql/rapids/shims/SparkDateTimeExceptionShims.scala index ddd86676322..b5651a1fdad 100644 --- a/sql-plugin/src/main/spark332db/scala/org/apache/spark/sql/rapids/shims/SparkDateTimeExceptionShims.scala +++ b/sql-plugin/src/main/spark332db/scala/org/apache/spark/sql/rapids/shims/SparkDateTimeExceptionShims.scala @@ -1,5 +1,5 @@ /* - * Copyright (c) 2022-2025, NVIDIA CORPORATION. + * Copyright (c) 2022-2026, NVIDIA CORPORATION. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -33,6 +33,7 @@ {"spark": "357"} {"spark": "400"} {"spark": "401"} +{"spark": "410"} spark-rapids-shim-json-lines ***/ package org.apache.spark.sql.rapids.shims diff --git a/sql-plugin/src/main/spark332db/scala/org/apache/spark/sql/rapids/shims/SparkUpgradeExceptionShims.scala b/sql-plugin/src/main/spark332db/scala/org/apache/spark/sql/rapids/shims/SparkUpgradeExceptionShims.scala index 7098a00c78c..475f383d031 100644 --- a/sql-plugin/src/main/spark332db/scala/org/apache/spark/sql/rapids/shims/SparkUpgradeExceptionShims.scala +++ b/sql-plugin/src/main/spark332db/scala/org/apache/spark/sql/rapids/shims/SparkUpgradeExceptionShims.scala @@ -1,5 +1,5 @@ /* - * Copyright (c) 2022-2025, NVIDIA CORPORATION. + * Copyright (c) 2022-2026, NVIDIA CORPORATION. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -33,6 +33,7 @@ {"spark": "357"} {"spark": "400"} {"spark": "401"} +{"spark": "410"} spark-rapids-shim-json-lines ***/ package org.apache.spark.sql.rapids.shims diff --git a/sql-plugin/src/main/spark334/scala/com/nvidia/spark/rapids/shims/GetSequenceSize.scala b/sql-plugin/src/main/spark334/scala/com/nvidia/spark/rapids/shims/GetSequenceSize.scala index 8174b4fb0ca..f431fbadc32 100644 --- a/sql-plugin/src/main/spark334/scala/com/nvidia/spark/rapids/shims/GetSequenceSize.scala +++ b/sql-plugin/src/main/spark334/scala/com/nvidia/spark/rapids/shims/GetSequenceSize.scala @@ -1,5 +1,5 @@ /* - * Copyright (c) 2023-2025, NVIDIA CORPORATION. + * Copyright (c) 2023-2026, NVIDIA CORPORATION. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -28,6 +28,7 @@ {"spark": "357"} {"spark": "400"} {"spark": "401"} +{"spark": "410"} spark-rapids-shim-json-lines ***/ package com.nvidia.spark.rapids.shims diff --git a/sql-plugin/src/main/spark340/scala/com/nvidia/spark/rapids/shims/CastCheckShims.scala b/sql-plugin/src/main/spark340/scala/com/nvidia/spark/rapids/shims/CastCheckShims.scala index bafb9244068..86b72246c9e 100644 --- a/sql-plugin/src/main/spark340/scala/com/nvidia/spark/rapids/shims/CastCheckShims.scala +++ b/sql-plugin/src/main/spark340/scala/com/nvidia/spark/rapids/shims/CastCheckShims.scala @@ -1,5 +1,5 @@ /* - * Copyright (c) 2023-2025, NVIDIA CORPORATION. + * Copyright (c) 2023-2026, NVIDIA CORPORATION. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -33,6 +33,7 @@ {"spark": "357"} {"spark": "400"} {"spark": "401"} +{"spark": "410"} spark-rapids-shim-json-lines ***/ package com.nvidia.spark.rapids.shims diff --git a/sql-plugin/src/main/spark340/scala/com/nvidia/spark/rapids/shims/ConvShim.scala b/sql-plugin/src/main/spark340/scala/com/nvidia/spark/rapids/shims/ConvShim.scala index 0caaaa9abc2..f36b11762c7 100644 --- a/sql-plugin/src/main/spark340/scala/com/nvidia/spark/rapids/shims/ConvShim.scala +++ b/sql-plugin/src/main/spark340/scala/com/nvidia/spark/rapids/shims/ConvShim.scala @@ -1,5 +1,5 @@ /* - * Copyright (c) 2025, NVIDIA CORPORATION. + * Copyright (c) 2025-2026, NVIDIA CORPORATION. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -32,6 +32,7 @@ {"spark": "357"} {"spark": "400"} {"spark": "401"} +{"spark": "410"} spark-rapids-shim-json-lines ***/ package com.nvidia.spark.rapids.shims diff --git a/sql-plugin/src/main/spark340/scala/com/nvidia/spark/rapids/shims/GlobalLimitShims.scala b/sql-plugin/src/main/spark340/scala/com/nvidia/spark/rapids/shims/GlobalLimitShims.scala index 7aa3b28455c..292e681e0e7 100644 --- a/sql-plugin/src/main/spark340/scala/com/nvidia/spark/rapids/shims/GlobalLimitShims.scala +++ b/sql-plugin/src/main/spark340/scala/com/nvidia/spark/rapids/shims/GlobalLimitShims.scala @@ -1,5 +1,5 @@ /* - * Copyright (c) 2022-2025, NVIDIA CORPORATION. + * Copyright (c) 2022-2026, NVIDIA CORPORATION. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -32,6 +32,7 @@ {"spark": "357"} {"spark": "400"} {"spark": "401"} +{"spark": "410"} spark-rapids-shim-json-lines ***/ package com.nvidia.spark.rapids.shims diff --git a/sql-plugin/src/main/spark340/scala/com/nvidia/spark/rapids/shims/GpuBroadcastJoinMeta.scala b/sql-plugin/src/main/spark340/scala/com/nvidia/spark/rapids/shims/GpuBroadcastJoinMeta.scala index 0bfe87f6b9a..b177984ca91 100644 --- a/sql-plugin/src/main/spark340/scala/com/nvidia/spark/rapids/shims/GpuBroadcastJoinMeta.scala +++ b/sql-plugin/src/main/spark340/scala/com/nvidia/spark/rapids/shims/GpuBroadcastJoinMeta.scala @@ -1,5 +1,5 @@ /* - * Copyright (c) 2023-2025, NVIDIA CORPORATION. + * Copyright (c) 2023-2026, NVIDIA CORPORATION. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -29,6 +29,7 @@ {"spark": "357"} {"spark": "400"} {"spark": "401"} +{"spark": "410"} spark-rapids-shim-json-lines ***/ package com.nvidia.spark.rapids.shims diff --git a/sql-plugin/src/main/spark340/scala/com/nvidia/spark/rapids/shims/OrcProtoWriterShim.scala b/sql-plugin/src/main/spark340/scala/com/nvidia/spark/rapids/shims/OrcProtoWriterShim.scala index d92bd6954c4..68261f636f1 100644 --- a/sql-plugin/src/main/spark340/scala/com/nvidia/spark/rapids/shims/OrcProtoWriterShim.scala +++ b/sql-plugin/src/main/spark340/scala/com/nvidia/spark/rapids/shims/OrcProtoWriterShim.scala @@ -1,6 +1,6 @@ /* - * Copyright (c) 2022-2025, NVIDIA CORPORATION. + * Copyright (c) 2022-2026, NVIDIA CORPORATION. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -33,6 +33,7 @@ {"spark": "357"} {"spark": "400"} {"spark": "401"} +{"spark": "410"} spark-rapids-shim-json-lines ***/ package com.nvidia.spark.rapids.shims diff --git a/sql-plugin/src/main/spark340/scala/com/nvidia/spark/rapids/shims/PartitionedFileUtilsShimBase.scala b/sql-plugin/src/main/spark340/scala/com/nvidia/spark/rapids/shims/PartitionedFileUtilsShimBase.scala index 0877fbe6d04..55650f62e0e 100644 --- a/sql-plugin/src/main/spark340/scala/com/nvidia/spark/rapids/shims/PartitionedFileUtilsShimBase.scala +++ b/sql-plugin/src/main/spark340/scala/com/nvidia/spark/rapids/shims/PartitionedFileUtilsShimBase.scala @@ -1,5 +1,5 @@ /* - * Copyright (c) 2024-2025, NVIDIA CORPORATION. + * Copyright (c) 2024-2026, NVIDIA CORPORATION. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -30,6 +30,7 @@ {"spark": "357"} {"spark": "400"} {"spark": "401"} +{"spark": "410"} spark-rapids-shim-json-lines ***/ package com.nvidia.spark.rapids.shims diff --git a/sql-plugin/src/main/spark340/scala/com/nvidia/spark/rapids/shims/ShuffleOriginUtil.scala b/sql-plugin/src/main/spark340/scala/com/nvidia/spark/rapids/shims/ShuffleOriginUtil.scala index df59948cef8..4e673296dbe 100644 --- a/sql-plugin/src/main/spark340/scala/com/nvidia/spark/rapids/shims/ShuffleOriginUtil.scala +++ b/sql-plugin/src/main/spark340/scala/com/nvidia/spark/rapids/shims/ShuffleOriginUtil.scala @@ -1,5 +1,5 @@ /* - * Copyright (c) 2023-2025, NVIDIA CORPORATION. + * Copyright (c) 2023-2026, NVIDIA CORPORATION. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -30,6 +30,7 @@ {"spark": "357"} {"spark": "400"} {"spark": "401"} +{"spark": "410"} spark-rapids-shim-json-lines ***/ package com.nvidia.spark.rapids.shims diff --git a/sql-plugin/src/main/spark340/scala/com/nvidia/spark/rapids/shims/Spark340PlusNonDBShims.scala b/sql-plugin/src/main/spark340/scala/com/nvidia/spark/rapids/shims/Spark340PlusNonDBShims.scala index 6e28a071a00..b7e8137454a 100644 --- a/sql-plugin/src/main/spark340/scala/com/nvidia/spark/rapids/shims/Spark340PlusNonDBShims.scala +++ b/sql-plugin/src/main/spark340/scala/com/nvidia/spark/rapids/shims/Spark340PlusNonDBShims.scala @@ -1,5 +1,5 @@ /* - * Copyright (c) 2022-2025, NVIDIA CORPORATION. + * Copyright (c) 2022-2026, NVIDIA CORPORATION. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -30,6 +30,7 @@ {"spark": "357"} {"spark": "400"} {"spark": "401"} +{"spark": "410"} spark-rapids-shim-json-lines ***/ package com.nvidia.spark.rapids.shims diff --git a/sql-plugin/src/main/spark340/scala/com/nvidia/spark/rapids/shims/TagScanForRuntimeFiltering.scala b/sql-plugin/src/main/spark340/scala/com/nvidia/spark/rapids/shims/TagScanForRuntimeFiltering.scala index 01423525f12..05a816e34f5 100644 --- a/sql-plugin/src/main/spark340/scala/com/nvidia/spark/rapids/shims/TagScanForRuntimeFiltering.scala +++ b/sql-plugin/src/main/spark340/scala/com/nvidia/spark/rapids/shims/TagScanForRuntimeFiltering.scala @@ -1,5 +1,5 @@ /* - * Copyright (c) 2023-2025, NVIDIA CORPORATION. + * Copyright (c) 2023-2026, NVIDIA CORPORATION. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -32,6 +32,7 @@ {"spark": "357"} {"spark": "400"} {"spark": "401"} +{"spark": "410"} spark-rapids-shim-json-lines ***/ package com.nvidia.spark.rapids.shims diff --git a/sql-plugin/src/main/spark340/scala/com/nvidia/spark/rapids/shims/parquet/ParquetLegacyNanoAsLongShims.scala b/sql-plugin/src/main/spark340/scala/com/nvidia/spark/rapids/shims/parquet/ParquetLegacyNanoAsLongShims.scala index 36339e280dd..e5d54d0d848 100644 --- a/sql-plugin/src/main/spark340/scala/com/nvidia/spark/rapids/shims/parquet/ParquetLegacyNanoAsLongShims.scala +++ b/sql-plugin/src/main/spark340/scala/com/nvidia/spark/rapids/shims/parquet/ParquetLegacyNanoAsLongShims.scala @@ -1,5 +1,5 @@ /* - * Copyright (c) 2023-2025, NVIDIA CORPORATION. + * Copyright (c) 2023-2026, NVIDIA CORPORATION. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -32,6 +32,7 @@ {"spark": "357"} {"spark": "400"} {"spark": "401"} +{"spark": "410"} spark-rapids-shim-json-lines ***/ package com.nvidia.spark.rapids.shims.parquet diff --git a/sql-plugin/src/main/spark340/scala/com/nvidia/spark/rapids/shims/parquet/ParquetTimestampAnnotationShims.scala b/sql-plugin/src/main/spark340/scala/com/nvidia/spark/rapids/shims/parquet/ParquetTimestampAnnotationShims.scala index 47d6134ee9c..cbdd1fade9f 100644 --- a/sql-plugin/src/main/spark340/scala/com/nvidia/spark/rapids/shims/parquet/ParquetTimestampAnnotationShims.scala +++ b/sql-plugin/src/main/spark340/scala/com/nvidia/spark/rapids/shims/parquet/ParquetTimestampAnnotationShims.scala @@ -1,5 +1,5 @@ /* - * Copyright (c) 2022-2025, NVIDIA CORPORATION. + * Copyright (c) 2022-2026, NVIDIA CORPORATION. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -32,6 +32,7 @@ {"spark": "357"} {"spark": "400"} {"spark": "401"} +{"spark": "410"} spark-rapids-shim-json-lines ***/ package com.nvidia.spark.rapids.shims.parquet diff --git a/sql-plugin/src/main/spark340/scala/com/nvidia/spark/rapids/shims/parquet/ParquetTimestampNTZShims.scala b/sql-plugin/src/main/spark340/scala/com/nvidia/spark/rapids/shims/parquet/ParquetTimestampNTZShims.scala index 981d5c4186a..8fc96095899 100644 --- a/sql-plugin/src/main/spark340/scala/com/nvidia/spark/rapids/shims/parquet/ParquetTimestampNTZShims.scala +++ b/sql-plugin/src/main/spark340/scala/com/nvidia/spark/rapids/shims/parquet/ParquetTimestampNTZShims.scala @@ -1,5 +1,5 @@ /* - * Copyright (c) 2022-2025, NVIDIA CORPORATION. + * Copyright (c) 2022-2026, NVIDIA CORPORATION. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -32,6 +32,7 @@ {"spark": "357"} {"spark": "400"} {"spark": "401"} +{"spark": "410"} spark-rapids-shim-json-lines ***/ package com.nvidia.spark.rapids.shims.parquet diff --git a/sql-plugin/src/main/spark340/scala/com/nvidia/spark/rapids/shuffle/RapidsShuffleIterator.scala b/sql-plugin/src/main/spark340/scala/com/nvidia/spark/rapids/shuffle/RapidsShuffleIterator.scala index ed0f3bc91fc..daf66422bef 100644 --- a/sql-plugin/src/main/spark340/scala/com/nvidia/spark/rapids/shuffle/RapidsShuffleIterator.scala +++ b/sql-plugin/src/main/spark340/scala/com/nvidia/spark/rapids/shuffle/RapidsShuffleIterator.scala @@ -1,5 +1,5 @@ /* - * Copyright (c) 2023-2025, NVIDIA CORPORATION. + * Copyright (c) 2023-2026, NVIDIA CORPORATION. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -32,6 +32,7 @@ {"spark": "357"} {"spark": "400"} {"spark": "401"} +{"spark": "410"} spark-rapids-shim-json-lines ***/ package com.nvidia.spark.rapids.shuffle diff --git a/sql-plugin/src/main/spark340/scala/org/apache/spark/sql/catalyst/csv/GpuCsvUtils.scala b/sql-plugin/src/main/spark340/scala/org/apache/spark/sql/catalyst/csv/GpuCsvUtils.scala index 80c115b67b5..8fcb9a3f973 100644 --- a/sql-plugin/src/main/spark340/scala/org/apache/spark/sql/catalyst/csv/GpuCsvUtils.scala +++ b/sql-plugin/src/main/spark340/scala/org/apache/spark/sql/catalyst/csv/GpuCsvUtils.scala @@ -1,5 +1,5 @@ /* - * Copyright (c) 2023-2025, NVIDIA CORPORATION. + * Copyright (c) 2023-2026, NVIDIA CORPORATION. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -32,6 +32,7 @@ {"spark": "357"} {"spark": "400"} {"spark": "401"} +{"spark": "410"} spark-rapids-shim-json-lines ***/ package org.apache.spark.sql.catalyst.csv diff --git a/sql-plugin/src/main/spark340/scala/org/apache/spark/sql/catalyst/json/GpuJsonUtils.scala b/sql-plugin/src/main/spark340/scala/org/apache/spark/sql/catalyst/json/GpuJsonUtils.scala index 30b8f95aedc..4bf54eb08cb 100644 --- a/sql-plugin/src/main/spark340/scala/org/apache/spark/sql/catalyst/json/GpuJsonUtils.scala +++ b/sql-plugin/src/main/spark340/scala/org/apache/spark/sql/catalyst/json/GpuJsonUtils.scala @@ -1,5 +1,5 @@ /* - * Copyright (c) 2023-2025, NVIDIA CORPORATION. + * Copyright (c) 2023-2026, NVIDIA CORPORATION. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -32,6 +32,7 @@ {"spark": "357"} {"spark": "400"} {"spark": "401"} +{"spark": "410"} spark-rapids-shim-json-lines ***/ package org.apache.spark.sql.catalyst.json diff --git a/sql-plugin/src/main/spark340/scala/org/apache/spark/sql/errors/ConvUtils.scala b/sql-plugin/src/main/spark340/scala/org/apache/spark/sql/errors/ConvUtils.scala index 95669e6f479..c7778902a7a 100644 --- a/sql-plugin/src/main/spark340/scala/org/apache/spark/sql/errors/ConvUtils.scala +++ b/sql-plugin/src/main/spark340/scala/org/apache/spark/sql/errors/ConvUtils.scala @@ -1,5 +1,5 @@ /* - * Copyright (c) 2025, NVIDIA CORPORATION. + * Copyright (c) 2025-2026, NVIDIA CORPORATION. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -32,6 +32,7 @@ {"spark": "357"} {"spark": "400"} {"spark": "401"} +{"spark": "410"} spark-rapids-shim-json-lines ***/ package org.apache.spark.sql.errors diff --git a/sql-plugin/src/main/spark340/scala/org/apache/spark/sql/rapids/RapidsCachingReader.scala b/sql-plugin/src/main/spark340/scala/org/apache/spark/sql/rapids/RapidsCachingReader.scala index 6e0407cc47e..9738e655324 100644 --- a/sql-plugin/src/main/spark340/scala/org/apache/spark/sql/rapids/RapidsCachingReader.scala +++ b/sql-plugin/src/main/spark340/scala/org/apache/spark/sql/rapids/RapidsCachingReader.scala @@ -1,5 +1,5 @@ /* - * Copyright (c) 2023-2025, NVIDIA CORPORATION. + * Copyright (c) 2023-2026, NVIDIA CORPORATION. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -32,6 +32,7 @@ {"spark": "357"} {"spark": "400"} {"spark": "401"} +{"spark": "410"} spark-rapids-shim-json-lines ***/ package org.apache.spark.sql.rapids diff --git a/sql-plugin/src/main/spark340/scala/org/apache/spark/sql/rapids/execution/GpuBroadcastHashJoinExec.scala b/sql-plugin/src/main/spark340/scala/org/apache/spark/sql/rapids/execution/GpuBroadcastHashJoinExec.scala index 864e71079aa..d8fb5c1c2e9 100644 --- a/sql-plugin/src/main/spark340/scala/org/apache/spark/sql/rapids/execution/GpuBroadcastHashJoinExec.scala +++ b/sql-plugin/src/main/spark340/scala/org/apache/spark/sql/rapids/execution/GpuBroadcastHashJoinExec.scala @@ -1,5 +1,5 @@ /* - * Copyright (c) 2023-2025, NVIDIA CORPORATION. + * Copyright (c) 2023-2026, NVIDIA CORPORATION. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -30,6 +30,7 @@ {"spark": "357"} {"spark": "400"} {"spark": "401"} +{"spark": "410"} spark-rapids-shim-json-lines ***/ package org.apache.spark.sql.rapids.execution diff --git a/sql-plugin/src/main/spark340/scala/org/apache/spark/sql/rapids/execution/GpuBroadcastNestedLoopJoinExec.scala b/sql-plugin/src/main/spark340/scala/org/apache/spark/sql/rapids/execution/GpuBroadcastNestedLoopJoinExec.scala index 3f3bc3145be..c2a9ef5fc66 100644 --- a/sql-plugin/src/main/spark340/scala/org/apache/spark/sql/rapids/execution/GpuBroadcastNestedLoopJoinExec.scala +++ b/sql-plugin/src/main/spark340/scala/org/apache/spark/sql/rapids/execution/GpuBroadcastNestedLoopJoinExec.scala @@ -1,5 +1,5 @@ /* - * Copyright (c) 2023-2025, NVIDIA CORPORATION. + * Copyright (c) 2023-2026, NVIDIA CORPORATION. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -30,6 +30,7 @@ {"spark": "357"} {"spark": "400"} {"spark": "401"} +{"spark": "410"} spark-rapids-shim-json-lines ***/ package org.apache.spark.sql.rapids.execution diff --git a/sql-plugin/src/main/spark340/scala/org/apache/spark/sql/rapids/execution/ShimTrampolineUtil.scala b/sql-plugin/src/main/spark340/scala/org/apache/spark/sql/rapids/execution/ShimTrampolineUtil.scala index e63634502bd..2a20f8e6977 100644 --- a/sql-plugin/src/main/spark340/scala/org/apache/spark/sql/rapids/execution/ShimTrampolineUtil.scala +++ b/sql-plugin/src/main/spark340/scala/org/apache/spark/sql/rapids/execution/ShimTrampolineUtil.scala @@ -1,5 +1,5 @@ /* - * Copyright (c) 2021-2025, NVIDIA CORPORATION. + * Copyright (c) 2021-2026, NVIDIA CORPORATION. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -30,6 +30,7 @@ {"spark": "357"} {"spark": "400"} {"spark": "401"} +{"spark": "410"} spark-rapids-shim-json-lines ***/ package org.apache.spark.sql.rapids.execution diff --git a/sql-plugin/src/main/spark340/scala/org/apache/spark/sql/rapids/shims/GpuJsonToStructsShim.scala b/sql-plugin/src/main/spark340/scala/org/apache/spark/sql/rapids/shims/GpuJsonToStructsShim.scala index 5331ab77211..f520d5b34ea 100644 --- a/sql-plugin/src/main/spark340/scala/org/apache/spark/sql/rapids/shims/GpuJsonToStructsShim.scala +++ b/sql-plugin/src/main/spark340/scala/org/apache/spark/sql/rapids/shims/GpuJsonToStructsShim.scala @@ -1,5 +1,5 @@ /* - * Copyright (c) 2023-2025, NVIDIA CORPORATION. + * Copyright (c) 2023-2026, NVIDIA CORPORATION. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -31,6 +31,7 @@ {"spark": "357"} {"spark": "400"} {"spark": "401"} +{"spark": "410"} spark-rapids-shim-json-lines ***/ package org.apache.spark.sql.rapids.shims diff --git a/sql-plugin/src/main/spark340/scala/org/apache/spark/sql/rapids/shims/RapidsErrorUtils340PlusBase.scala b/sql-plugin/src/main/spark340/scala/org/apache/spark/sql/rapids/shims/RapidsErrorUtils340PlusBase.scala index 620126eccca..a080c25de09 100644 --- a/sql-plugin/src/main/spark340/scala/org/apache/spark/sql/rapids/shims/RapidsErrorUtils340PlusBase.scala +++ b/sql-plugin/src/main/spark340/scala/org/apache/spark/sql/rapids/shims/RapidsErrorUtils340PlusBase.scala @@ -1,5 +1,5 @@ /* - * Copyright (c) 2024-2025, NVIDIA CORPORATION. + * Copyright (c) 2024-2026, NVIDIA CORPORATION. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -30,6 +30,7 @@ {"spark": "357"} {"spark": "400"} {"spark": "401"} +{"spark": "410"} spark-rapids-shim-json-lines ***/ package org.apache.spark.sql.rapids.shims diff --git a/sql-plugin/src/main/spark341db/scala/com/nvidia/spark/rapids/shims/GpuAggregateInPandasExecMeta.scala b/sql-plugin/src/main/spark341db/scala/com/nvidia/spark/rapids/shims/GpuAggregateInPandasExecMeta.scala index ee58b8b52f3..3dc6c83a145 100644 --- a/sql-plugin/src/main/spark341db/scala/com/nvidia/spark/rapids/shims/GpuAggregateInPandasExecMeta.scala +++ b/sql-plugin/src/main/spark341db/scala/com/nvidia/spark/rapids/shims/GpuAggregateInPandasExecMeta.scala @@ -1,5 +1,5 @@ /* - * Copyright (c) 2023-2025, NVIDIA CORPORATION. + * Copyright (c) 2023-2026, NVIDIA CORPORATION. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. diff --git a/sql-plugin/src/main/spark341db/scala/com/nvidia/spark/rapids/shims/GpuToPrettyString.scala b/sql-plugin/src/main/spark341db/scala/com/nvidia/spark/rapids/shims/GpuToPrettyString.scala index e7008b35ad6..19ca30b8505 100644 --- a/sql-plugin/src/main/spark341db/scala/com/nvidia/spark/rapids/shims/GpuToPrettyString.scala +++ b/sql-plugin/src/main/spark341db/scala/com/nvidia/spark/rapids/shims/GpuToPrettyString.scala @@ -1,5 +1,5 @@ /* - * Copyright (c) 2023-2025, NVIDIA CORPORATION. + * Copyright (c) 2023-2026, NVIDIA CORPORATION. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -27,6 +27,7 @@ {"spark": "357"} {"spark": "400"} {"spark": "401"} +{"spark": "410"} spark-rapids-shim-json-lines ***/ package com.nvidia.spark.rapids.shims diff --git a/sql-plugin/src/main/spark341db/scala/com/nvidia/spark/rapids/shims/GpuWindowGroupLimitExec.scala b/sql-plugin/src/main/spark341db/scala/com/nvidia/spark/rapids/shims/GpuWindowGroupLimitExec.scala index 485cf509771..9685275367d 100644 --- a/sql-plugin/src/main/spark341db/scala/com/nvidia/spark/rapids/shims/GpuWindowGroupLimitExec.scala +++ b/sql-plugin/src/main/spark341db/scala/com/nvidia/spark/rapids/shims/GpuWindowGroupLimitExec.scala @@ -1,5 +1,5 @@ /* - * Copyright (c) 2024-2025, NVIDIA CORPORATION. + * Copyright (c) 2024-2026, NVIDIA CORPORATION. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -27,6 +27,7 @@ {"spark": "357"} {"spark": "400"} {"spark": "401"} +{"spark": "410"} spark-rapids-shim-json-lines ***/ package com.nvidia.spark.rapids.shims diff --git a/sql-plugin/src/main/spark341db/scala/com/nvidia/spark/rapids/shims/PlanShimsImpl.scala b/sql-plugin/src/main/spark341db/scala/com/nvidia/spark/rapids/shims/PlanShimsImpl.scala index 4c8ddbe46a0..25818de0b0c 100644 --- a/sql-plugin/src/main/spark341db/scala/com/nvidia/spark/rapids/shims/PlanShimsImpl.scala +++ b/sql-plugin/src/main/spark341db/scala/com/nvidia/spark/rapids/shims/PlanShimsImpl.scala @@ -1,5 +1,5 @@ /* - * Copyright (c) 2023-2025, NVIDIA CORPORATION. + * Copyright (c) 2023-2026, NVIDIA CORPORATION. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -19,6 +19,7 @@ {"spark": "350db143"} {"spark": "400"} {"spark": "401"} +{"spark": "410"} spark-rapids-shim-json-lines ***/ package com.nvidia.spark.rapids.shims diff --git a/sql-plugin/src/main/spark341db/scala/com/nvidia/spark/rapids/shims/PythonUDFShim.scala b/sql-plugin/src/main/spark341db/scala/com/nvidia/spark/rapids/shims/PythonUDFShim.scala index 6702d5f5116..c8601afd63f 100644 --- a/sql-plugin/src/main/spark341db/scala/com/nvidia/spark/rapids/shims/PythonUDFShim.scala +++ b/sql-plugin/src/main/spark341db/scala/com/nvidia/spark/rapids/shims/PythonUDFShim.scala @@ -1,5 +1,5 @@ /* - * Copyright (c) 2023-2025, NVIDIA CORPORATION. + * Copyright (c) 2023-2026, NVIDIA CORPORATION. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -27,6 +27,7 @@ {"spark": "357"} {"spark": "400"} {"spark": "401"} +{"spark": "410"} spark-rapids-shim-json-lines ***/ package com.nvidia.spark.rapids.shims diff --git a/sql-plugin/src/main/spark341db/scala/org/apache/spark/sql/execution/rapids/shims/SplitFiles.scala b/sql-plugin/src/main/spark341db/scala/org/apache/spark/sql/execution/rapids/shims/SplitFiles.scala index d09a5e2d823..dfb9371985e 100644 --- a/sql-plugin/src/main/spark341db/scala/org/apache/spark/sql/execution/rapids/shims/SplitFiles.scala +++ b/sql-plugin/src/main/spark341db/scala/org/apache/spark/sql/execution/rapids/shims/SplitFiles.scala @@ -1,5 +1,5 @@ /* - * Copyright (c) 2023-2025, NVIDIA CORPORATION. + * Copyright (c) 2023-2026, NVIDIA CORPORATION. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -28,6 +28,7 @@ {"spark": "357"} {"spark": "400"} {"spark": "401"} +{"spark": "410"} spark-rapids-shim-json-lines ***/ package org.apache.spark.sql.execution.rapids.shims diff --git a/sql-plugin/src/main/spark341db/scala/org/apache/spark/sql/hive/rapids/shims/CreateFunctions.scala b/sql-plugin/src/main/spark341db/scala/org/apache/spark/sql/hive/rapids/shims/CreateFunctions.scala index caa14472108..72fe3f5604f 100644 --- a/sql-plugin/src/main/spark341db/scala/org/apache/spark/sql/hive/rapids/shims/CreateFunctions.scala +++ b/sql-plugin/src/main/spark341db/scala/org/apache/spark/sql/hive/rapids/shims/CreateFunctions.scala @@ -1,5 +1,5 @@ /* - * Copyright (c) 2023-2025, NVIDIA CORPORATION. + * Copyright (c) 2023-2026, NVIDIA CORPORATION. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -27,6 +27,7 @@ {"spark": "357"} {"spark": "400"} {"spark": "401"} +{"spark": "410"} spark-rapids-shim-json-lines ***/ package org.apache.spark.sql.hive.rapids.shims diff --git a/sql-plugin/src/main/spark341db/scala/org/apache/spark/sql/hive/rapids/shims/FileSinkDescShim.scala b/sql-plugin/src/main/spark341db/scala/org/apache/spark/sql/hive/rapids/shims/FileSinkDescShim.scala index 9620fbe34ff..938e2d29723 100644 --- a/sql-plugin/src/main/spark341db/scala/org/apache/spark/sql/hive/rapids/shims/FileSinkDescShim.scala +++ b/sql-plugin/src/main/spark341db/scala/org/apache/spark/sql/hive/rapids/shims/FileSinkDescShim.scala @@ -1,5 +1,5 @@ /* - * Copyright (c) 2023-2025, NVIDIA CORPORATION. + * Copyright (c) 2023-2026, NVIDIA CORPORATION. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -28,6 +28,7 @@ {"spark": "357"} {"spark": "400"} {"spark": "401"} +{"spark": "410"} spark-rapids-shim-json-lines ***/ package org.apache.spark.sql.hive.rapids.shims diff --git a/sql-plugin/src/main/spark341db/scala/org/apache/spark/sql/hive/rapids/shims/HiveInspectorsShim.scala b/sql-plugin/src/main/spark341db/scala/org/apache/spark/sql/hive/rapids/shims/HiveInspectorsShim.scala index 34dd340f6d8..fc48b548ef5 100644 --- a/sql-plugin/src/main/spark341db/scala/org/apache/spark/sql/hive/rapids/shims/HiveInspectorsShim.scala +++ b/sql-plugin/src/main/spark341db/scala/org/apache/spark/sql/hive/rapids/shims/HiveInspectorsShim.scala @@ -1,5 +1,5 @@ /* - * Copyright (c) 2023-2025, NVIDIA CORPORATION. + * Copyright (c) 2023-2026, NVIDIA CORPORATION. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -28,6 +28,7 @@ {"spark": "357"} {"spark": "400"} {"spark": "401"} +{"spark": "410"} spark-rapids-shim-json-lines ***/ package org.apache.spark.sql.hive.rapids.shims diff --git a/sql-plugin/src/main/spark341db/scala/org/apache/spark/sql/rapids/execution/python/shims/GpuArrowPythonOutput.scala b/sql-plugin/src/main/spark341db/scala/org/apache/spark/sql/rapids/execution/python/shims/GpuArrowPythonOutput.scala index 1980b66e67d..12c8bef827f 100644 --- a/sql-plugin/src/main/spark341db/scala/org/apache/spark/sql/rapids/execution/python/shims/GpuArrowPythonOutput.scala +++ b/sql-plugin/src/main/spark341db/scala/org/apache/spark/sql/rapids/execution/python/shims/GpuArrowPythonOutput.scala @@ -1,5 +1,5 @@ /* - * Copyright (c) 2023-2025, NVIDIA CORPORATION. + * Copyright (c) 2023-2026, NVIDIA CORPORATION. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -19,6 +19,7 @@ {"spark": "350db143"} {"spark": "400"} {"spark": "401"} +{"spark": "410"} spark-rapids-shim-json-lines ***/ package org.apache.spark.sql.rapids.execution.python.shims diff --git a/sql-plugin/src/main/spark341db/scala/org/apache/spark/sql/rapids/execution/python/shims/GpuArrowPythonRunner.scala b/sql-plugin/src/main/spark341db/scala/org/apache/spark/sql/rapids/execution/python/shims/GpuArrowPythonRunner.scala index 43d246a645c..47306245636 100644 --- a/sql-plugin/src/main/spark341db/scala/org/apache/spark/sql/rapids/execution/python/shims/GpuArrowPythonRunner.scala +++ b/sql-plugin/src/main/spark341db/scala/org/apache/spark/sql/rapids/execution/python/shims/GpuArrowPythonRunner.scala @@ -1,5 +1,5 @@ /* - * Copyright (c) 2023-2025, NVIDIA CORPORATION. + * Copyright (c) 2023-2026, NVIDIA CORPORATION. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -18,6 +18,7 @@ {"spark": "350db143"} {"spark": "400"} {"spark": "401"} +{"spark": "410"} spark-rapids-shim-json-lines ***/ package org.apache.spark.sql.rapids.execution.python.shims diff --git a/sql-plugin/src/main/spark341db/scala/org/apache/spark/sql/rapids/execution/python/shims/GpuCoGroupedArrowPythonRunner.scala b/sql-plugin/src/main/spark341db/scala/org/apache/spark/sql/rapids/execution/python/shims/GpuCoGroupedArrowPythonRunner.scala index 9dc780c7a6e..4b7d9bf2b5a 100644 --- a/sql-plugin/src/main/spark341db/scala/org/apache/spark/sql/rapids/execution/python/shims/GpuCoGroupedArrowPythonRunner.scala +++ b/sql-plugin/src/main/spark341db/scala/org/apache/spark/sql/rapids/execution/python/shims/GpuCoGroupedArrowPythonRunner.scala @@ -1,5 +1,5 @@ /* - * Copyright (c) 2023-2025, NVIDIA CORPORATION. + * Copyright (c) 2023-2026, NVIDIA CORPORATION. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -19,6 +19,7 @@ {"spark": "350db143"} {"spark": "400"} {"spark": "401"} +{"spark": "410"} spark-rapids-shim-json-lines ***/ package org.apache.spark.sql.rapids.execution.python.shims diff --git a/sql-plugin/src/main/spark342/scala/com/nvidia/spark/rapids/shims/DecimalMultiply128.scala b/sql-plugin/src/main/spark342/scala/com/nvidia/spark/rapids/shims/DecimalMultiply128.scala index 490970a8c6c..d9f1762d94a 100644 --- a/sql-plugin/src/main/spark342/scala/com/nvidia/spark/rapids/shims/DecimalMultiply128.scala +++ b/sql-plugin/src/main/spark342/scala/com/nvidia/spark/rapids/shims/DecimalMultiply128.scala @@ -1,5 +1,5 @@ /* - * Copyright (c) 2023-2025, NVIDIA CORPORATION. + * Copyright (c) 2023-2026, NVIDIA CORPORATION. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -28,6 +28,7 @@ {"spark": "357"} {"spark": "400"} {"spark": "401"} +{"spark": "410"} spark-rapids-shim-json-lines ***/ package com.nvidia.spark.rapids.shims diff --git a/sql-plugin/src/main/spark350/scala/com/nvidia/spark/rapids/GpuDeltaWrite.scala b/sql-plugin/src/main/spark350/scala/com/nvidia/spark/rapids/GpuDeltaWrite.scala index 9f628d2b1d5..2cbf276dbbd 100644 --- a/sql-plugin/src/main/spark350/scala/com/nvidia/spark/rapids/GpuDeltaWrite.scala +++ b/sql-plugin/src/main/spark350/scala/com/nvidia/spark/rapids/GpuDeltaWrite.scala @@ -1,5 +1,5 @@ /* - * Copyright (c) 2025, NVIDIA CORPORATION. + * Copyright (c) 2025-2026, NVIDIA CORPORATION. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -25,6 +25,7 @@ {"spark": "357"} {"spark": "400"} {"spark": "401"} +{"spark": "410"} spark-rapids-shim-json-lines ***/ package com.nvidia.spark.rapids diff --git a/sql-plugin/src/main/spark350/scala/com/nvidia/spark/rapids/GpuMergeRowsExecMeta.scala b/sql-plugin/src/main/spark350/scala/com/nvidia/spark/rapids/GpuMergeRowsExecMeta.scala index 85ed5867023..5458deaefa2 100644 --- a/sql-plugin/src/main/spark350/scala/com/nvidia/spark/rapids/GpuMergeRowsExecMeta.scala +++ b/sql-plugin/src/main/spark350/scala/com/nvidia/spark/rapids/GpuMergeRowsExecMeta.scala @@ -1,5 +1,5 @@ /* - * Copyright (c) 2025, NVIDIA CORPORATION. + * Copyright (c) 2025-2026, NVIDIA CORPORATION. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -25,6 +25,7 @@ {"spark": "357"} {"spark": "400"} {"spark": "401"} +{"spark": "410"} spark-rapids-shim-json-lines ***/ package com.nvidia.spark.rapids diff --git a/sql-plugin/src/main/spark350/scala/com/nvidia/spark/rapids/shims/BatchScanExecMetaBase.scala b/sql-plugin/src/main/spark350/scala/com/nvidia/spark/rapids/shims/BatchScanExecMetaBase.scala index 245590b06b3..7449c283428 100644 --- a/sql-plugin/src/main/spark350/scala/com/nvidia/spark/rapids/shims/BatchScanExecMetaBase.scala +++ b/sql-plugin/src/main/spark350/scala/com/nvidia/spark/rapids/shims/BatchScanExecMetaBase.scala @@ -1,5 +1,5 @@ /* - * Copyright (c) 2024-2025, NVIDIA CORPORATION. + * Copyright (c) 2024-2026, NVIDIA CORPORATION. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -26,6 +26,7 @@ {"spark": "357"} {"spark": "400"} {"spark": "401"} +{"spark": "410"} spark-rapids-shim-json-lines ***/ package com.nvidia.spark.rapids.shims diff --git a/sql-plugin/src/main/spark350/scala/com/nvidia/spark/rapids/shims/ExternalSourceShim.scala b/sql-plugin/src/main/spark350/scala/com/nvidia/spark/rapids/shims/ExternalSourceShim.scala index 4c4748f9e80..aeeb65195ad 100644 --- a/sql-plugin/src/main/spark350/scala/com/nvidia/spark/rapids/shims/ExternalSourceShim.scala +++ b/sql-plugin/src/main/spark350/scala/com/nvidia/spark/rapids/shims/ExternalSourceShim.scala @@ -1,5 +1,5 @@ /* - * Copyright (c) 2025, NVIDIA CORPORATION. + * Copyright (c) 2025-2026, NVIDIA CORPORATION. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -26,6 +26,7 @@ {"spark": "357"} {"spark": "400"} {"spark": "401"} +{"spark": "410"} spark-rapids-shim-json-lines ***/ package com.nvidia.spark.rapids.shims diff --git a/sql-plugin/src/main/spark350/scala/com/nvidia/spark/rapids/shims/GpuIntervalUtils.scala b/sql-plugin/src/main/spark350/scala/com/nvidia/spark/rapids/shims/GpuIntervalUtils.scala index 04cb195b7a5..104e49bcaf7 100644 --- a/sql-plugin/src/main/spark350/scala/com/nvidia/spark/rapids/shims/GpuIntervalUtils.scala +++ b/sql-plugin/src/main/spark350/scala/com/nvidia/spark/rapids/shims/GpuIntervalUtils.scala @@ -1,5 +1,5 @@ /* - * Copyright (c) 2023-2025, NVIDIA CORPORATION. + * Copyright (c) 2023-2026, NVIDIA CORPORATION. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -24,6 +24,7 @@ {"spark": "357"} {"spark": "400"} {"spark": "401"} +{"spark": "410"} spark-rapids-shim-json-lines ***/ package com.nvidia.spark.rapids.shims diff --git a/sql-plugin/src/main/spark350/scala/com/nvidia/spark/rapids/shims/KeyGroupedPartitioningShim.scala b/sql-plugin/src/main/spark350/scala/com/nvidia/spark/rapids/shims/KeyGroupedPartitioningShim.scala index 6c22a682013..60979e1a939 100644 --- a/sql-plugin/src/main/spark350/scala/com/nvidia/spark/rapids/shims/KeyGroupedPartitioningShim.scala +++ b/sql-plugin/src/main/spark350/scala/com/nvidia/spark/rapids/shims/KeyGroupedPartitioningShim.scala @@ -1,5 +1,5 @@ /* - * Copyright (c) 2023-2025, NVIDIA CORPORATION. + * Copyright (c) 2023-2026, NVIDIA CORPORATION. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -24,6 +24,7 @@ {"spark": "357"} {"spark": "400"} {"spark": "401"} +{"spark": "410"} spark-rapids-shim-json-lines ***/ package com.nvidia.spark.rapids.shims diff --git a/sql-plugin/src/main/spark350/scala/com/nvidia/spark/rapids/shims/LegacyBehaviorPolicyShim.scala b/sql-plugin/src/main/spark350/scala/com/nvidia/spark/rapids/shims/LegacyBehaviorPolicyShim.scala index d55876ab576..c07b687c10a 100644 --- a/sql-plugin/src/main/spark350/scala/com/nvidia/spark/rapids/shims/LegacyBehaviorPolicyShim.scala +++ b/sql-plugin/src/main/spark350/scala/com/nvidia/spark/rapids/shims/LegacyBehaviorPolicyShim.scala @@ -1,5 +1,5 @@ /* - * Copyright (c) 2023-2025, NVIDIA CORPORATION. + * Copyright (c) 2023-2026, NVIDIA CORPORATION. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -26,6 +26,7 @@ {"spark": "357"} {"spark": "400"} {"spark": "401"} +{"spark": "410"} spark-rapids-shim-json-lines ***/ package com.nvidia.spark.rapids.shims diff --git a/sql-plugin/src/main/spark350/scala/com/nvidia/spark/rapids/shims/NullOutputStreamShim.scala b/sql-plugin/src/main/spark350/scala/com/nvidia/spark/rapids/shims/NullOutputStreamShim.scala index 676abbd465e..7c4ab68164f 100644 --- a/sql-plugin/src/main/spark350/scala/com/nvidia/spark/rapids/shims/NullOutputStreamShim.scala +++ b/sql-plugin/src/main/spark350/scala/com/nvidia/spark/rapids/shims/NullOutputStreamShim.scala @@ -1,5 +1,5 @@ /* - * Copyright (c) 2023-2025, NVIDIA CORPORATION. + * Copyright (c) 2023-2026, NVIDIA CORPORATION. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -26,6 +26,7 @@ {"spark": "357"} {"spark": "400"} {"spark": "401"} +{"spark": "410"} spark-rapids-shim-json-lines ***/ package com.nvidia.spark.rapids.shims diff --git a/sql-plugin/src/main/spark350/scala/com/nvidia/spark/rapids/shims/Spark350PlusNonDBShims.scala b/sql-plugin/src/main/spark350/scala/com/nvidia/spark/rapids/shims/Spark350PlusNonDBShims.scala index 15d32f01b9d..6f1e64fd3b1 100644 --- a/sql-plugin/src/main/spark350/scala/com/nvidia/spark/rapids/shims/Spark350PlusNonDBShims.scala +++ b/sql-plugin/src/main/spark350/scala/com/nvidia/spark/rapids/shims/Spark350PlusNonDBShims.scala @@ -1,5 +1,5 @@ /* - * Copyright (c) 2023-2025, NVIDIA CORPORATION. + * Copyright (c) 2023-2026, NVIDIA CORPORATION. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -25,6 +25,7 @@ {"spark": "357"} {"spark": "400"} {"spark": "401"} +{"spark": "410"} spark-rapids-shim-json-lines ***/ package com.nvidia.spark.rapids.shims diff --git a/sql-plugin/src/main/spark350/scala/com/nvidia/spark/rapids/shims/v2WriteCommandMetasShim.scala b/sql-plugin/src/main/spark350/scala/com/nvidia/spark/rapids/shims/v2WriteCommandMetasShim.scala index 1082a479644..db5bef6934f 100644 --- a/sql-plugin/src/main/spark350/scala/com/nvidia/spark/rapids/shims/v2WriteCommandMetasShim.scala +++ b/sql-plugin/src/main/spark350/scala/com/nvidia/spark/rapids/shims/v2WriteCommandMetasShim.scala @@ -1,5 +1,5 @@ /* - * Copyright (c) 2025, NVIDIA CORPORATION. + * Copyright (c) 2025-2026, NVIDIA CORPORATION. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -26,6 +26,7 @@ {"spark": "357"} {"spark": "400"} {"spark": "401"} +{"spark": "410"} spark-rapids-shim-json-lines ***/ package com.nvidia.spark.rapids.shims diff --git a/sql-plugin/src/main/spark350/scala/org/apache/spark/sql/catalyst/GpuProjectingColumnarBatch.scala b/sql-plugin/src/main/spark350/scala/org/apache/spark/sql/catalyst/GpuProjectingColumnarBatch.scala index a0ae19da92e..7ecb157a968 100644 --- a/sql-plugin/src/main/spark350/scala/org/apache/spark/sql/catalyst/GpuProjectingColumnarBatch.scala +++ b/sql-plugin/src/main/spark350/scala/org/apache/spark/sql/catalyst/GpuProjectingColumnarBatch.scala @@ -1,5 +1,5 @@ /* - * Copyright (c) 2025, NVIDIA CORPORATION. + * Copyright (c) 2025-2026, NVIDIA CORPORATION. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -25,6 +25,7 @@ {"spark": "357"} {"spark": "400"} {"spark": "401"} +{"spark": "410"} spark-rapids-shim-json-lines ***/ package org.apache.spark.sql.catalyst diff --git a/sql-plugin/src/main/spark350/scala/org/apache/spark/sql/execution/datasources/parquet/rapids/shims/ParquetCVShims.scala b/sql-plugin/src/main/spark350/scala/org/apache/spark/sql/execution/datasources/parquet/rapids/shims/ParquetCVShims.scala index ac417120cf8..b6ad8d3c384 100644 --- a/sql-plugin/src/main/spark350/scala/org/apache/spark/sql/execution/datasources/parquet/rapids/shims/ParquetCVShims.scala +++ b/sql-plugin/src/main/spark350/scala/org/apache/spark/sql/execution/datasources/parquet/rapids/shims/ParquetCVShims.scala @@ -1,5 +1,5 @@ /* - * Copyright (c) 2023-2025, NVIDIA CORPORATION. + * Copyright (c) 2023-2026, NVIDIA CORPORATION. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. diff --git a/sql-plugin/src/main/spark350/scala/org/apache/spark/sql/execution/datasources/parquet/rapids/shims/ShimVectorizedColumnReader.scala b/sql-plugin/src/main/spark350/scala/org/apache/spark/sql/execution/datasources/parquet/rapids/shims/ShimVectorizedColumnReader.scala index eaba9329b95..7183644fbe8 100644 --- a/sql-plugin/src/main/spark350/scala/org/apache/spark/sql/execution/datasources/parquet/rapids/shims/ShimVectorizedColumnReader.scala +++ b/sql-plugin/src/main/spark350/scala/org/apache/spark/sql/execution/datasources/parquet/rapids/shims/ShimVectorizedColumnReader.scala @@ -1,5 +1,5 @@ /* - * Copyright (c) 2023-2025, NVIDIA CORPORATION. + * Copyright (c) 2023-2026, NVIDIA CORPORATION. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -25,6 +25,7 @@ {"spark": "357"} {"spark": "400"} {"spark": "401"} +{"spark": "410"} spark-rapids-shim-json-lines ***/ package org.apache.spark.sql.execution.datasources.parquet.rapids.shims diff --git a/sql-plugin/src/main/spark350/scala/org/apache/spark/sql/execution/datasources/v2/GpuMergeRowsExec.scala b/sql-plugin/src/main/spark350/scala/org/apache/spark/sql/execution/datasources/v2/GpuMergeRowsExec.scala index becdbc62e7a..2063a5f4793 100644 --- a/sql-plugin/src/main/spark350/scala/org/apache/spark/sql/execution/datasources/v2/GpuMergeRowsExec.scala +++ b/sql-plugin/src/main/spark350/scala/org/apache/spark/sql/execution/datasources/v2/GpuMergeRowsExec.scala @@ -1,5 +1,5 @@ /* - * Copyright (c) 2025, NVIDIA CORPORATION. + * Copyright (c) 2025-2026, NVIDIA CORPORATION. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -25,6 +25,7 @@ {"spark": "357"} {"spark": "400"} {"spark": "401"} +{"spark": "410"} spark-rapids-shim-json-lines ***/ package org.apache.spark.sql.execution.datasources.v2 diff --git a/sql-plugin/src/main/spark350/scala/org/apache/spark/sql/execution/datasources/v2/WriteToDataSourceV2Exec.scala b/sql-plugin/src/main/spark350/scala/org/apache/spark/sql/execution/datasources/v2/WriteToDataSourceV2Exec.scala index fba1b86f7d4..ba4ed142a5e 100644 --- a/sql-plugin/src/main/spark350/scala/org/apache/spark/sql/execution/datasources/v2/WriteToDataSourceV2Exec.scala +++ b/sql-plugin/src/main/spark350/scala/org/apache/spark/sql/execution/datasources/v2/WriteToDataSourceV2Exec.scala @@ -1,5 +1,5 @@ /* - * Copyright (c) 2025, NVIDIA CORPORATION. + * Copyright (c) 2025-2026, NVIDIA CORPORATION. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -26,6 +26,7 @@ {"spark": "357"} {"spark": "400"} {"spark": "401"} +{"spark": "410"} spark-rapids-shim-json-lines ***/ package org.apache.spark.sql.execution.datasources.v2 diff --git a/sql-plugin/src/main/spark350/scala/org/apache/spark/sql/rapids/execution/GpuShuffleMeta.scala b/sql-plugin/src/main/spark350/scala/org/apache/spark/sql/rapids/execution/GpuShuffleMeta.scala index d0313746715..895b2c6aebb 100644 --- a/sql-plugin/src/main/spark350/scala/org/apache/spark/sql/rapids/execution/GpuShuffleMeta.scala +++ b/sql-plugin/src/main/spark350/scala/org/apache/spark/sql/rapids/execution/GpuShuffleMeta.scala @@ -1,5 +1,5 @@ /* - * Copyright (c) 2023-2025, NVIDIA CORPORATION. + * Copyright (c) 2023-2026, NVIDIA CORPORATION. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -25,6 +25,7 @@ {"spark": "357"} {"spark": "400"} {"spark": "401"} +{"spark": "410"} spark-rapids-shim-json-lines ***/ package org.apache.spark.sql.rapids.execution diff --git a/sql-plugin/src/main/spark350/scala/org/apache/spark/sql/rapids/shims/ArrowUtilsShim.scala b/sql-plugin/src/main/spark350/scala/org/apache/spark/sql/rapids/shims/ArrowUtilsShim.scala index 232163aabf6..f135c1a2a06 100644 --- a/sql-plugin/src/main/spark350/scala/org/apache/spark/sql/rapids/shims/ArrowUtilsShim.scala +++ b/sql-plugin/src/main/spark350/scala/org/apache/spark/sql/rapids/shims/ArrowUtilsShim.scala @@ -1,5 +1,5 @@ /* - * Copyright (c) 2023-2025, NVIDIA CORPORATION. + * Copyright (c) 2023-2026, NVIDIA CORPORATION. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -26,6 +26,7 @@ {"spark": "357"} {"spark": "400"} {"spark": "401"} +{"spark": "410"} spark-rapids-shim-json-lines ***/ package org.apache.spark.sql.rapids.shims diff --git a/sql-plugin/src/main/spark350/scala/org/apache/spark/sql/rapids/shims/DataTypeUtilsShim.scala b/sql-plugin/src/main/spark350/scala/org/apache/spark/sql/rapids/shims/DataTypeUtilsShim.scala index 1be18ab0f94..18a845f39d6 100644 --- a/sql-plugin/src/main/spark350/scala/org/apache/spark/sql/rapids/shims/DataTypeUtilsShim.scala +++ b/sql-plugin/src/main/spark350/scala/org/apache/spark/sql/rapids/shims/DataTypeUtilsShim.scala @@ -1,5 +1,5 @@ /* - * Copyright (c) 2023-2025, NVIDIA CORPORATION. + * Copyright (c) 2023-2026, NVIDIA CORPORATION. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -26,6 +26,7 @@ {"spark": "357"} {"spark": "400"} {"spark": "401"} +{"spark": "410"} spark-rapids-shim-json-lines ***/ package org.apache.spark.sql.rapids.shims diff --git a/sql-plugin/src/main/spark350/scala/org/apache/spark/sql/rapids/shims/GpuMapInPandasExecMeta.scala b/sql-plugin/src/main/spark350/scala/org/apache/spark/sql/rapids/shims/GpuMapInPandasExecMeta.scala index 074c500ade8..548376119d9 100644 --- a/sql-plugin/src/main/spark350/scala/org/apache/spark/sql/rapids/shims/GpuMapInPandasExecMeta.scala +++ b/sql-plugin/src/main/spark350/scala/org/apache/spark/sql/rapids/shims/GpuMapInPandasExecMeta.scala @@ -1,5 +1,5 @@ /* - * Copyright (c) 2024-2025, NVIDIA CORPORATION. + * Copyright (c) 2024-2026, NVIDIA CORPORATION. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -25,6 +25,7 @@ {"spark": "357"} {"spark": "400"} {"spark": "401"} +{"spark": "410"} spark-rapids-shim-json-lines ***/ package org.apache.spark.sql.rapids.shims diff --git a/sql-plugin/src/main/spark350/scala/org/apache/spark/sql/rapids/shims/SchemaMetadataShims.scala b/sql-plugin/src/main/spark350/scala/org/apache/spark/sql/rapids/shims/SchemaMetadataShims.scala index e251902c16d..93c76bd1d4b 100644 --- a/sql-plugin/src/main/spark350/scala/org/apache/spark/sql/rapids/shims/SchemaMetadataShims.scala +++ b/sql-plugin/src/main/spark350/scala/org/apache/spark/sql/rapids/shims/SchemaMetadataShims.scala @@ -1,5 +1,5 @@ /* - * Copyright (c) 2024-2025, NVIDIA CORPORATION. + * Copyright (c) 2024-2026, NVIDIA CORPORATION. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -25,6 +25,7 @@ {"spark": "357"} {"spark": "400"} {"spark": "401"} +{"spark": "410"} spark-rapids-shim-json-lines ***/ package org.apache.spark.sql.rapids.shims diff --git a/sql-plugin/src/main/spark350db143/scala/com/nvidia/spark/rapids/RapidsShuffleManager.scala b/sql-plugin/src/main/spark350db143/scala/com/nvidia/spark/rapids/RapidsShuffleManager.scala index dbd57737bf0..554332eb7d7 100644 --- a/sql-plugin/src/main/spark350db143/scala/com/nvidia/spark/rapids/RapidsShuffleManager.scala +++ b/sql-plugin/src/main/spark350db143/scala/com/nvidia/spark/rapids/RapidsShuffleManager.scala @@ -1,5 +1,5 @@ /* - * Copyright (c) 2024-2025, NVIDIA CORPORATION. + * Copyright (c) 2024-2026, NVIDIA CORPORATION. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -18,6 +18,7 @@ {"spark": "350db143"} {"spark": "400"} {"spark": "401"} +{"spark": "410"} spark-rapids-shim-json-lines ***/ package com.nvidia.spark.rapids.$_spark.version.classifier_ diff --git a/sql-plugin/src/main/spark350db143/scala/com/nvidia/spark/rapids/shims/BatchScanExecMeta.scala b/sql-plugin/src/main/spark350db143/scala/com/nvidia/spark/rapids/shims/BatchScanExecMeta.scala index dcb5874c50b..d993133b4e6 100644 --- a/sql-plugin/src/main/spark350db143/scala/com/nvidia/spark/rapids/shims/BatchScanExecMeta.scala +++ b/sql-plugin/src/main/spark350db143/scala/com/nvidia/spark/rapids/shims/BatchScanExecMeta.scala @@ -1,5 +1,5 @@ /* - * Copyright (c) 2024-2025, NVIDIA CORPORATION. + * Copyright (c) 2024-2026, NVIDIA CORPORATION. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -18,6 +18,7 @@ {"spark": "350db143"} {"spark": "400"} {"spark": "401"} +{"spark": "410"} spark-rapids-shim-json-lines ***/ package com.nvidia.spark.rapids.shims diff --git a/sql-plugin/src/main/spark350db143/scala/com/nvidia/spark/rapids/shims/CastTimeToIntShim.scala b/sql-plugin/src/main/spark350db143/scala/com/nvidia/spark/rapids/shims/CastTimeToIntShim.scala index f3626bb12a1..b349ee2bc7a 100644 --- a/sql-plugin/src/main/spark350db143/scala/com/nvidia/spark/rapids/shims/CastTimeToIntShim.scala +++ b/sql-plugin/src/main/spark350db143/scala/com/nvidia/spark/rapids/shims/CastTimeToIntShim.scala @@ -1,5 +1,5 @@ /* - * Copyright (c) 2025, NVIDIA CORPORATION. + * Copyright (c) 2025-2026, NVIDIA CORPORATION. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -17,6 +17,7 @@ {"spark": "350db143"} {"spark": "400"} {"spark": "401"} +{"spark": "410"} spark-rapids-shim-json-lines ***/ package com.nvidia.spark.rapids.shims diff --git a/sql-plugin/src/main/spark350db143/scala/com/nvidia/spark/rapids/shims/GpuBatchScanExec.scala b/sql-plugin/src/main/spark350db143/scala/com/nvidia/spark/rapids/shims/GpuBatchScanExec.scala index 9f55a400b73..c7bf3ffb640 100644 --- a/sql-plugin/src/main/spark350db143/scala/com/nvidia/spark/rapids/shims/GpuBatchScanExec.scala +++ b/sql-plugin/src/main/spark350db143/scala/com/nvidia/spark/rapids/shims/GpuBatchScanExec.scala @@ -1,5 +1,5 @@ /* - * Copyright (c) 2024-2025, NVIDIA CORPORATION. + * Copyright (c) 2024-2026, NVIDIA CORPORATION. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -18,11 +18,13 @@ {"spark": "350db143"} {"spark": "400"} {"spark": "401"} +{"spark": "410"} spark-rapids-shim-json-lines ***/ package com.nvidia.spark.rapids.shims import com.google.common.base.Objects import com.nvidia.spark.rapids.GpuScan +import com.nvidia.spark.rapids.shims.StoragePartitionJoinShims import org.apache.spark.SparkException import org.apache.spark.rdd.RDD @@ -34,7 +36,7 @@ import org.apache.spark.sql.catalyst.util.{truncatedString, InternalRowComparabl import org.apache.spark.sql.connector.catalog.Table import org.apache.spark.sql.connector.read._ import org.apache.spark.sql.execution.datasources.rapids.DataSourceStrategyUtils -import org.apache.spark.sql.execution.datasources.v2.{DataSourceRDD, StoragePartitionJoinParams} +import org.apache.spark.sql.execution.datasources.v2.DataSourceRDD case class GpuBatchScanExec( output: Seq[AttributeReference], @@ -42,7 +44,7 @@ case class GpuBatchScanExec( runtimeFilters: Seq[Expression] = Seq.empty, ordering: Option[Seq[SortOrder]] = None, @transient table: Table, - spjParams: StoragePartitionJoinParams = StoragePartitionJoinParams() + spjParams: StoragePartitionJoinShims.SpjParams = StoragePartitionJoinShims.default() ) extends GpuBatchScanExecBase(scan, runtimeFilters) { @transient override lazy val batch: Batch = if (scan == null) null else scan.toBatch diff --git a/sql-plugin/src/main/spark350db143/scala/com/nvidia/spark/rapids/shims/RaiseErrorShim.scala b/sql-plugin/src/main/spark350db143/scala/com/nvidia/spark/rapids/shims/RaiseErrorShim.scala index 9559066967d..bf94e2cd571 100644 --- a/sql-plugin/src/main/spark350db143/scala/com/nvidia/spark/rapids/shims/RaiseErrorShim.scala +++ b/sql-plugin/src/main/spark350db143/scala/com/nvidia/spark/rapids/shims/RaiseErrorShim.scala @@ -1,5 +1,5 @@ /* - * Copyright (c) 2024-2025, NVIDIA CORPORATION. + * Copyright (c) 2024-2026, NVIDIA CORPORATION. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -17,6 +17,7 @@ {"spark": "350db143"} {"spark": "400"} {"spark": "401"} +{"spark": "410"} spark-rapids-shim-json-lines ***/ package com.nvidia.spark.rapids.shims diff --git a/sql-plugin/src/main/spark350db143/scala/com/nvidia/spark/rapids/shims/ShuffleManagerShimUtils.scala b/sql-plugin/src/main/spark350db143/scala/com/nvidia/spark/rapids/shims/ShuffleManagerShimUtils.scala index b1b73d97d5c..c205b3c13e6 100644 --- a/sql-plugin/src/main/spark350db143/scala/com/nvidia/spark/rapids/shims/ShuffleManagerShimUtils.scala +++ b/sql-plugin/src/main/spark350db143/scala/com/nvidia/spark/rapids/shims/ShuffleManagerShimUtils.scala @@ -1,5 +1,5 @@ /* - * Copyright (c) 2024-2025, NVIDIA CORPORATION. + * Copyright (c) 2024-2026, NVIDIA CORPORATION. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -18,6 +18,7 @@ {"spark": "350db143"} {"spark": "400"} {"spark": "401"} +{"spark": "410"} spark-rapids-shim-json-lines ***/ package com.nvidia.spark.rapids.shims diff --git a/sql-plugin/src/main/spark350db143/scala/org/apache/spark/sql/rapids/execution/python/shims/PythonArgumentsUtils.scala b/sql-plugin/src/main/spark350db143/scala/org/apache/spark/sql/rapids/execution/python/shims/PythonArgumentsUtils.scala index c710bafdedb..ee377048107 100644 --- a/sql-plugin/src/main/spark350db143/scala/org/apache/spark/sql/rapids/execution/python/shims/PythonArgumentsUtils.scala +++ b/sql-plugin/src/main/spark350db143/scala/org/apache/spark/sql/rapids/execution/python/shims/PythonArgumentsUtils.scala @@ -1,5 +1,5 @@ /* - * Copyright (c) 2025, NVIDIA CORPORATION. + * Copyright (c) 2025-2026, NVIDIA CORPORATION. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -18,6 +18,7 @@ {"spark": "350db143"} {"spark": "400"} {"spark": "401"} +{"spark": "410"} spark-rapids-shim-json-lines ***/ package org.apache.spark.sql.rapids.execution.python.shims diff --git a/sql-plugin/src/main/spark350db143/scala/org/apache/spark/sql/rapids/shims/ArrayInvalidArgumentErrorUtils.scala b/sql-plugin/src/main/spark350db143/scala/org/apache/spark/sql/rapids/shims/ArrayInvalidArgumentErrorUtils.scala index e18ab985ed8..3742812cc80 100644 --- a/sql-plugin/src/main/spark350db143/scala/org/apache/spark/sql/rapids/shims/ArrayInvalidArgumentErrorUtils.scala +++ b/sql-plugin/src/main/spark350db143/scala/org/apache/spark/sql/rapids/shims/ArrayInvalidArgumentErrorUtils.scala @@ -1,5 +1,5 @@ /* - * Copyright (c) 2025, NVIDIA CORPORATION. + * Copyright (c) 2025-2026, NVIDIA CORPORATION. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -18,6 +18,7 @@ {"spark": "350db143"} {"spark": "400"} {"spark": "401"} +{"spark": "410"} spark-rapids-shim-json-lines ***/ package org.apache.spark.sql.rapids.shims diff --git a/sql-plugin/src/main/spark350db143/scala/org/apache/spark/sql/rapids/shims/SequenceSizeExceededLimitErrorBuilder.scala b/sql-plugin/src/main/spark350db143/scala/org/apache/spark/sql/rapids/shims/SequenceSizeExceededLimitErrorBuilder.scala index bb585d9aaf6..bffa4e91f2c 100644 --- a/sql-plugin/src/main/spark350db143/scala/org/apache/spark/sql/rapids/shims/SequenceSizeExceededLimitErrorBuilder.scala +++ b/sql-plugin/src/main/spark350db143/scala/org/apache/spark/sql/rapids/shims/SequenceSizeExceededLimitErrorBuilder.scala @@ -1,5 +1,5 @@ /* - * Copyright (c) 2024-2025, NVIDIA CORPORATION. + * Copyright (c) 2024-2026, NVIDIA CORPORATION. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -18,6 +18,7 @@ {"spark": "350db143"} {"spark": "400"} {"spark": "401"} +{"spark": "410"} spark-rapids-shim-json-lines ***/ package org.apache.spark.sql.rapids.shims diff --git a/sql-plugin/src/main/spark350db143/scala/org/apache/spark/sql/rapids/shims/misc.scala b/sql-plugin/src/main/spark350db143/scala/org/apache/spark/sql/rapids/shims/misc.scala index 9401974c1ed..c76ef8972b8 100644 --- a/sql-plugin/src/main/spark350db143/scala/org/apache/spark/sql/rapids/shims/misc.scala +++ b/sql-plugin/src/main/spark350db143/scala/org/apache/spark/sql/rapids/shims/misc.scala @@ -1,5 +1,5 @@ /* - * Copyright (c) 2025, NVIDIA CORPORATION. + * Copyright (c) 2025-2026, NVIDIA CORPORATION. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -17,6 +17,7 @@ {"spark": "350db143"} {"spark": "400"} {"spark": "401"} +{"spark": "410"} spark-rapids-shim-json-lines ***/ package org.apache.spark.sql.rapids.shims diff --git a/sql-plugin/src/main/spark352/scala/com/nvidia/spark/rapids/shims/InMemoryTableScanExecLikeShim.scala b/sql-plugin/src/main/spark352/scala/com/nvidia/spark/rapids/shims/InMemoryTableScanExecLikeShim.scala index ee9fd0b1971..38bbad0ba07 100644 --- a/sql-plugin/src/main/spark352/scala/com/nvidia/spark/rapids/shims/InMemoryTableScanExecLikeShim.scala +++ b/sql-plugin/src/main/spark352/scala/com/nvidia/spark/rapids/shims/InMemoryTableScanExecLikeShim.scala @@ -1,5 +1,5 @@ /* - * Copyright (c) 2025, NVIDIA CORPORATION. + * Copyright (c) 2025-2026, NVIDIA CORPORATION. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -23,6 +23,7 @@ {"spark": "357"} {"spark": "400"} {"spark": "401"} +{"spark": "410"} spark-rapids-shim-json-lines ***/ package com.nvidia.spark.rapids.shims diff --git a/sql-plugin/src/main/spark352/scala/com/nvidia/spark/rapids/shims/InMemoryTableScanUtils.scala b/sql-plugin/src/main/spark352/scala/com/nvidia/spark/rapids/shims/InMemoryTableScanUtils.scala index cc3b7e8cdc3..87162bcc91d 100644 --- a/sql-plugin/src/main/spark352/scala/com/nvidia/spark/rapids/shims/InMemoryTableScanUtils.scala +++ b/sql-plugin/src/main/spark352/scala/com/nvidia/spark/rapids/shims/InMemoryTableScanUtils.scala @@ -1,5 +1,5 @@ /* - * Copyright (c) 2025, NVIDIA CORPORATION. + * Copyright (c) 2025-2026, NVIDIA CORPORATION. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -23,6 +23,7 @@ {"spark": "357"} {"spark": "400"} {"spark": "401"} +{"spark": "410"} spark-rapids-shim-json-lines ***/ package com.nvidia.spark.rapids.shims diff --git a/sql-plugin/src/main/spark400/scala/com/nvidia/spark/rapids/shims/CudfUnsafeRow.scala b/sql-plugin/src/main/spark400/scala/com/nvidia/spark/rapids/shims/CudfUnsafeRow.scala index 331f72e571d..587280b8b54 100644 --- a/sql-plugin/src/main/spark400/scala/com/nvidia/spark/rapids/shims/CudfUnsafeRow.scala +++ b/sql-plugin/src/main/spark400/scala/com/nvidia/spark/rapids/shims/CudfUnsafeRow.scala @@ -1,5 +1,5 @@ /* - * Copyright (c) 2024-2025, NVIDIA CORPORATION. + * Copyright (c) 2024-2026, NVIDIA CORPORATION. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -16,6 +16,7 @@ /*** spark-rapids-shim-json-lines {"spark": "400"} {"spark": "401"} +{"spark": "410"} spark-rapids-shim-json-lines ***/ package com.nvidia.spark.rapids.shims @@ -29,6 +30,15 @@ final class CudfUnsafeRow( def getVariant(ordinal: Int) = { throw new UnsupportedOperationException("VariantVal is not supported") } + + // Added in Spark 4.1.0 for spatial types + def getGeography(ordinal: Int) = { + throw new UnsupportedOperationException("GeographyVal is not supported") + } + + def getGeometry(ordinal: Int) = { + throw new UnsupportedOperationException("GeometryVal is not supported") + } } object CudfUnsafeRow extends CudfUnsafeRowTrait diff --git a/sql-plugin/src/main/spark400/scala/com/nvidia/spark/rapids/shims/DateTimeUtilsShims.scala b/sql-plugin/src/main/spark400/scala/com/nvidia/spark/rapids/shims/DateTimeUtilsShims.scala index abcc8276b69..fc9190034d4 100644 --- a/sql-plugin/src/main/spark400/scala/com/nvidia/spark/rapids/shims/DateTimeUtilsShims.scala +++ b/sql-plugin/src/main/spark400/scala/com/nvidia/spark/rapids/shims/DateTimeUtilsShims.scala @@ -1,5 +1,5 @@ /* - * Copyright (c) 2024-2025, NVIDIA CORPORATION. + * Copyright (c) 2024-2026, NVIDIA CORPORATION. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -17,6 +17,7 @@ /*** spark-rapids-shim-json-lines {"spark": "400"} {"spark": "401"} +{"spark": "410"} spark-rapids-shim-json-lines ***/ package com.nvidia.spark.rapids.shims diff --git a/sql-plugin/src/main/spark400/scala/com/nvidia/spark/rapids/shims/GetJsonObjectShim.scala b/sql-plugin/src/main/spark400/scala/com/nvidia/spark/rapids/shims/GetJsonObjectShim.scala index a01b11677ee..20741bb047b 100644 --- a/sql-plugin/src/main/spark400/scala/com/nvidia/spark/rapids/shims/GetJsonObjectShim.scala +++ b/sql-plugin/src/main/spark400/scala/com/nvidia/spark/rapids/shims/GetJsonObjectShim.scala @@ -1,5 +1,5 @@ /* - * Copyright (c) 2025, NVIDIA CORPORATION. + * Copyright (c) 2025-2026, NVIDIA CORPORATION. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -17,6 +17,7 @@ /*** spark-rapids-shim-json-lines {"spark": "400"} {"spark": "401"} +{"spark": "410"} spark-rapids-shim-json-lines ***/ package com.nvidia.spark.rapids.shims diff --git a/sql-plugin/src/main/spark400/scala/com/nvidia/spark/rapids/shims/GpuOrcDataReader.scala b/sql-plugin/src/main/spark400/scala/com/nvidia/spark/rapids/shims/GpuOrcDataReader.scala index 1eb9e7996eb..714374d9bea 100644 --- a/sql-plugin/src/main/spark400/scala/com/nvidia/spark/rapids/shims/GpuOrcDataReader.scala +++ b/sql-plugin/src/main/spark400/scala/com/nvidia/spark/rapids/shims/GpuOrcDataReader.scala @@ -1,5 +1,5 @@ /* - * Copyright (c) 2023-2025, NVIDIA CORPORATION. + * Copyright (c) 2023-2026, NVIDIA CORPORATION. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -16,6 +16,7 @@ /*** spark-rapids-shim-json-lines {"spark": "400"} {"spark": "401"} +{"spark": "410"} spark-rapids-shim-json-lines ***/ package com.nvidia.spark.rapids.shims diff --git a/sql-plugin/src/main/spark400/scala/com/nvidia/spark/rapids/shims/LogicalPlanShims.scala b/sql-plugin/src/main/spark400/scala/com/nvidia/spark/rapids/shims/LogicalPlanShims.scala index 1393206b64b..26302ca5e88 100644 --- a/sql-plugin/src/main/spark400/scala/com/nvidia/spark/rapids/shims/LogicalPlanShims.scala +++ b/sql-plugin/src/main/spark400/scala/com/nvidia/spark/rapids/shims/LogicalPlanShims.scala @@ -1,5 +1,5 @@ /* - * Copyright (c) 2024-2025, NVIDIA CORPORATION. + * Copyright (c) 2024-2026, NVIDIA CORPORATION. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -17,6 +17,7 @@ /*** spark-rapids-shim-json-lines {"spark": "400"} {"spark": "401"} +{"spark": "410"} spark-rapids-shim-json-lines ***/ package com.nvidia.spark.rapids.shims diff --git a/sql-plugin/src/main/spark400/scala/com/nvidia/spark/rapids/shims/MapInArrowExecShims.scala b/sql-plugin/src/main/spark400/scala/com/nvidia/spark/rapids/shims/MapInArrowExecShims.scala index ae6106e1dbe..42491b3e9c3 100644 --- a/sql-plugin/src/main/spark400/scala/com/nvidia/spark/rapids/shims/MapInArrowExecShims.scala +++ b/sql-plugin/src/main/spark400/scala/com/nvidia/spark/rapids/shims/MapInArrowExecShims.scala @@ -1,5 +1,5 @@ /* - * Copyright (c) 2024-2025, NVIDIA CORPORATION. + * Copyright (c) 2024-2026, NVIDIA CORPORATION. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -17,6 +17,7 @@ /*** spark-rapids-shim-json-lines {"spark": "400"} {"spark": "401"} +{"spark": "410"} spark-rapids-shim-json-lines ***/ package com.nvidia.spark.rapids.shims diff --git a/sql-plugin/src/main/spark400/scala/com/nvidia/spark/rapids/shims/NullIntolerantShim.scala b/sql-plugin/src/main/spark400/scala/com/nvidia/spark/rapids/shims/NullIntolerantShim.scala index 31a4aa023a8..23693d06925 100644 --- a/sql-plugin/src/main/spark400/scala/com/nvidia/spark/rapids/shims/NullIntolerantShim.scala +++ b/sql-plugin/src/main/spark400/scala/com/nvidia/spark/rapids/shims/NullIntolerantShim.scala @@ -1,5 +1,5 @@ /* - * Copyright (c) 2024-2025, NVIDIA CORPORATION. + * Copyright (c) 2024-2026, NVIDIA CORPORATION. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -17,6 +17,7 @@ /*** spark-rapids-shim-json-lines {"spark": "400"} {"spark": "401"} +{"spark": "410"} spark-rapids-shim-json-lines ***/ package com.nvidia.spark.rapids.shims diff --git a/sql-plugin/src/main/spark400/scala/com/nvidia/spark/rapids/shims/OperatorsUtilShims.scala b/sql-plugin/src/main/spark400/scala/com/nvidia/spark/rapids/shims/OperatorsUtilShims.scala index e7b1efaf80d..5805473d65b 100644 --- a/sql-plugin/src/main/spark400/scala/com/nvidia/spark/rapids/shims/OperatorsUtilShims.scala +++ b/sql-plugin/src/main/spark400/scala/com/nvidia/spark/rapids/shims/OperatorsUtilShims.scala @@ -1,5 +1,5 @@ /* - * Copyright (c) 2025, NVIDIA CORPORATION. + * Copyright (c) 2025-2026, NVIDIA CORPORATION. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -17,6 +17,7 @@ /*** spark-rapids-shim-json-lines {"spark": "400"} {"spark": "401"} +{"spark": "410"} spark-rapids-shim-json-lines ***/ package com.nvidia.spark.rapids.shims diff --git a/sql-plugin/src/main/spark400/scala/com/nvidia/spark/rapids/shims/PartitionedFileUtilsShim.scala b/sql-plugin/src/main/spark400/scala/com/nvidia/spark/rapids/shims/PartitionedFileUtilsShim.scala index 7fcb95724d7..afbb691abed 100644 --- a/sql-plugin/src/main/spark400/scala/com/nvidia/spark/rapids/shims/PartitionedFileUtilsShim.scala +++ b/sql-plugin/src/main/spark400/scala/com/nvidia/spark/rapids/shims/PartitionedFileUtilsShim.scala @@ -1,5 +1,5 @@ /* - * Copyright (c) 2025, NVIDIA CORPORATION. + * Copyright (c) 2025-2026, NVIDIA CORPORATION. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -18,6 +18,7 @@ /*** spark-rapids-shim-json-lines {"spark": "400"} {"spark": "401"} +{"spark": "410"} spark-rapids-shim-json-lines ***/ package com.nvidia.spark.rapids.shims diff --git a/sql-plugin/src/main/spark400/scala/com/nvidia/spark/rapids/shims/Spark400PlusCommonShims.scala b/sql-plugin/src/main/spark400/scala/com/nvidia/spark/rapids/shims/Spark400PlusCommonShims.scala index c1eb8c3070f..7353b1e379a 100644 --- a/sql-plugin/src/main/spark400/scala/com/nvidia/spark/rapids/shims/Spark400PlusCommonShims.scala +++ b/sql-plugin/src/main/spark400/scala/com/nvidia/spark/rapids/shims/Spark400PlusCommonShims.scala @@ -1,5 +1,5 @@ /* - * Copyright (c) 2025, NVIDIA CORPORATION. + * Copyright (c) 2025-2026, NVIDIA CORPORATION. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -17,6 +17,7 @@ /*** spark-rapids-shim-json-lines {"spark": "400"} {"spark": "401"} +{"spark": "410"} spark-rapids-shim-json-lines ***/ package com.nvidia.spark.rapids.shims diff --git a/sql-plugin/src/main/spark400/scala/org/apache/spark/sql/execution/datasources/v2/rapids/GpuAtomicCreateTableAsSelectExec.scala b/sql-plugin/src/main/spark400/scala/org/apache/spark/sql/execution/datasources/v2/rapids/GpuAtomicCreateTableAsSelectExec.scala index 1c03ebc7665..a203b8e4eaa 100644 --- a/sql-plugin/src/main/spark400/scala/org/apache/spark/sql/execution/datasources/v2/rapids/GpuAtomicCreateTableAsSelectExec.scala +++ b/sql-plugin/src/main/spark400/scala/org/apache/spark/sql/execution/datasources/v2/rapids/GpuAtomicCreateTableAsSelectExec.scala @@ -1,5 +1,5 @@ /* - * Copyright (c) 2025, NVIDIA CORPORATION. + * Copyright (c) 2025-2026, NVIDIA CORPORATION. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -17,6 +17,7 @@ /*** spark-rapids-shim-json-lines {"spark": "400"} {"spark": "401"} +{"spark": "410"} spark-rapids-shim-json-lines ***/ package org.apache.spark.sql.execution.datasources.v2.rapids diff --git a/sql-plugin/src/main/spark400/scala/org/apache/spark/sql/execution/datasources/v2/rapids/GpuAtomicReplaceTableAsSelectExec.scala b/sql-plugin/src/main/spark400/scala/org/apache/spark/sql/execution/datasources/v2/rapids/GpuAtomicReplaceTableAsSelectExec.scala index a52144c5c7b..27a6a8ea99b 100644 --- a/sql-plugin/src/main/spark400/scala/org/apache/spark/sql/execution/datasources/v2/rapids/GpuAtomicReplaceTableAsSelectExec.scala +++ b/sql-plugin/src/main/spark400/scala/org/apache/spark/sql/execution/datasources/v2/rapids/GpuAtomicReplaceTableAsSelectExec.scala @@ -1,5 +1,5 @@ /* - * Copyright (c) 2025, NVIDIA CORPORATION. + * Copyright (c) 2025-2026, NVIDIA CORPORATION. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -17,6 +17,7 @@ /*** spark-rapids-shim-json-lines {"spark": "400"} {"spark": "401"} +{"spark": "410"} spark-rapids-shim-json-lines ***/ package org.apache.spark.sql.execution.datasources.v2.rapids diff --git a/sql-plugin/src/main/spark400/scala/org/apache/spark/sql/execution/rapids/shims/FilePartitionShims.scala b/sql-plugin/src/main/spark400/scala/org/apache/spark/sql/execution/rapids/shims/FilePartitionShims.scala index b932ed724e5..6136e95ca0f 100644 --- a/sql-plugin/src/main/spark400/scala/org/apache/spark/sql/execution/rapids/shims/FilePartitionShims.scala +++ b/sql-plugin/src/main/spark400/scala/org/apache/spark/sql/execution/rapids/shims/FilePartitionShims.scala @@ -1,5 +1,5 @@ /* - * Copyright (c) 2024-2025, NVIDIA CORPORATION. + * Copyright (c) 2024-2026, NVIDIA CORPORATION. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -17,6 +17,7 @@ /*** spark-rapids-shim-json-lines {"spark": "400"} {"spark": "401"} +{"spark": "410"} spark-rapids-shim-json-lines ***/ package org.apache.spark.sql.execution.rapids.shims diff --git a/sql-plugin/src/main/spark400/scala/org/apache/spark/sql/hive/rapids/shims/CommandUtilsShim.scala b/sql-plugin/src/main/spark400/scala/org/apache/spark/sql/hive/rapids/shims/CommandUtilsShim.scala index 6a64edfe79b..4b890de8176 100644 --- a/sql-plugin/src/main/spark400/scala/org/apache/spark/sql/hive/rapids/shims/CommandUtilsShim.scala +++ b/sql-plugin/src/main/spark400/scala/org/apache/spark/sql/hive/rapids/shims/CommandUtilsShim.scala @@ -1,5 +1,5 @@ /* - * Copyright (c) 2024-2025, NVIDIA CORPORATION. + * Copyright (c) 2024-2026, NVIDIA CORPORATION. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -17,6 +17,7 @@ /*** spark-rapids-shim-json-lines {"spark": "400"} {"spark": "401"} +{"spark": "410"} spark-rapids-shim-json-lines ***/ package org.apache.spark.sql.hive.rapids.shims diff --git a/sql-plugin/src/main/spark400/scala/org/apache/spark/sql/nvidia/DFUDFShims.scala b/sql-plugin/src/main/spark400/scala/org/apache/spark/sql/nvidia/DFUDFShims.scala index 5a93f159436..741889e3ad2 100644 --- a/sql-plugin/src/main/spark400/scala/org/apache/spark/sql/nvidia/DFUDFShims.scala +++ b/sql-plugin/src/main/spark400/scala/org/apache/spark/sql/nvidia/DFUDFShims.scala @@ -1,5 +1,5 @@ /* - * Copyright (c) 2024-2025, NVIDIA CORPORATION. + * Copyright (c) 2024-2026, NVIDIA CORPORATION. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -17,6 +17,7 @@ /*** spark-rapids-shim-json-lines {"spark": "400"} {"spark": "401"} +{"spark": "410"} spark-rapids-shim-json-lines ***/ package org.apache.spark.sql.nvidia diff --git a/sql-plugin/src/main/spark400/scala/org/apache/spark/sql/rapids/execution/GpuSubqueryBroadcastMeta.scala b/sql-plugin/src/main/spark400/scala/org/apache/spark/sql/rapids/execution/GpuSubqueryBroadcastMeta.scala index 13ab2eb8dcb..7c50cd08fdb 100644 --- a/sql-plugin/src/main/spark400/scala/org/apache/spark/sql/rapids/execution/GpuSubqueryBroadcastMeta.scala +++ b/sql-plugin/src/main/spark400/scala/org/apache/spark/sql/rapids/execution/GpuSubqueryBroadcastMeta.scala @@ -1,5 +1,5 @@ /* - * Copyright (c) 2024-2025, NVIDIA CORPORATION. + * Copyright (c) 2024-2026, NVIDIA CORPORATION. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -16,6 +16,7 @@ /*** spark-rapids-shim-json-lines {"spark": "400"} {"spark": "401"} +{"spark": "410"} spark-rapids-shim-json-lines ***/ package org.apache.spark.sql.rapids.execution diff --git a/sql-plugin/src/main/spark400/scala/org/apache/spark/sql/rapids/execution/python/shims/GpuBasePythonRunner.scala b/sql-plugin/src/main/spark400/scala/org/apache/spark/sql/rapids/execution/python/shims/GpuBasePythonRunner.scala index 342296e8488..3cf27e223d0 100644 --- a/sql-plugin/src/main/spark400/scala/org/apache/spark/sql/rapids/execution/python/shims/GpuBasePythonRunner.scala +++ b/sql-plugin/src/main/spark400/scala/org/apache/spark/sql/rapids/execution/python/shims/GpuBasePythonRunner.scala @@ -1,5 +1,5 @@ /* - * Copyright (c) 2024-2025, NVIDIA CORPORATION. + * Copyright (c) 2024-2026, NVIDIA CORPORATION. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -17,6 +17,7 @@ /*** spark-rapids-shim-json-lines {"spark": "400"} {"spark": "401"} +{"spark": "410"} spark-rapids-shim-json-lines ***/ package org.apache.spark.sql.rapids.execution.python.shims diff --git a/sql-plugin/src/main/spark400/scala/org/apache/spark/sql/rapids/execution/python/shims/WritePythonUDFUtils.scala b/sql-plugin/src/main/spark400/scala/org/apache/spark/sql/rapids/execution/python/shims/WritePythonUDFUtils.scala index 0b6ef6704c9..64a03f19624 100644 --- a/sql-plugin/src/main/spark400/scala/org/apache/spark/sql/rapids/execution/python/shims/WritePythonUDFUtils.scala +++ b/sql-plugin/src/main/spark400/scala/org/apache/spark/sql/rapids/execution/python/shims/WritePythonUDFUtils.scala @@ -1,5 +1,5 @@ /* - * Copyright (c) 2024-2025, NVIDIA CORPORATION. + * Copyright (c) 2024-2026, NVIDIA CORPORATION. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -17,6 +17,7 @@ /*** spark-rapids-shim-json-lines {"spark": "400"} {"spark": "401"} +{"spark": "410"} spark-rapids-shim-json-lines ***/ package org.apache.spark.sql.rapids.execution.python.shims diff --git a/sql-plugin/src/main/spark400/scala/org/apache/spark/sql/rapids/shims/GpuMapInArrowExecMeta.scala b/sql-plugin/src/main/spark400/scala/org/apache/spark/sql/rapids/shims/GpuMapInArrowExecMeta.scala index f604f28a3cf..3a69475b9c7 100644 --- a/sql-plugin/src/main/spark400/scala/org/apache/spark/sql/rapids/shims/GpuMapInArrowExecMeta.scala +++ b/sql-plugin/src/main/spark400/scala/org/apache/spark/sql/rapids/shims/GpuMapInArrowExecMeta.scala @@ -1,5 +1,5 @@ /* - * Copyright (c) 2024-2025, NVIDIA CORPORATION. + * Copyright (c) 2024-2026, NVIDIA CORPORATION. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -17,6 +17,7 @@ /*** spark-rapids-shim-json-lines {"spark": "400"} {"spark": "401"} +{"spark": "410"} spark-rapids-shim-json-lines ***/ package org.apache.spark.sql.rapids.shims diff --git a/sql-plugin/src/main/spark400/scala/org/apache/spark/sql/rapids/shims/InvokeExprMeta.scala b/sql-plugin/src/main/spark400/scala/org/apache/spark/sql/rapids/shims/InvokeExprMeta.scala index 655d7e0d95c..04b99e42b2a 100644 --- a/sql-plugin/src/main/spark400/scala/org/apache/spark/sql/rapids/shims/InvokeExprMeta.scala +++ b/sql-plugin/src/main/spark400/scala/org/apache/spark/sql/rapids/shims/InvokeExprMeta.scala @@ -1,5 +1,5 @@ /* - * Copyright (c) 2025, NVIDIA CORPORATION. + * Copyright (c) 2025-2026, NVIDIA CORPORATION. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -17,6 +17,7 @@ /*** spark-rapids-shim-json-lines {"spark": "400"} {"spark": "401"} +{"spark": "410"} spark-rapids-shim-json-lines ***/ package org.apache.spark.sql.rapids.shims diff --git a/sql-plugin/src/main/spark400/scala/org/apache/spark/sql/rapids/shims/RapidsErrorUtils.scala b/sql-plugin/src/main/spark400/scala/org/apache/spark/sql/rapids/shims/RapidsErrorUtils.scala index 906d4c6f702..87c7b50b72d 100644 --- a/sql-plugin/src/main/spark400/scala/org/apache/spark/sql/rapids/shims/RapidsErrorUtils.scala +++ b/sql-plugin/src/main/spark400/scala/org/apache/spark/sql/rapids/shims/RapidsErrorUtils.scala @@ -1,5 +1,5 @@ /* - * Copyright (c) 2024-2025, NVIDIA CORPORATION. + * Copyright (c) 2024-2026, NVIDIA CORPORATION. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -17,6 +17,7 @@ /*** spark-rapids-shim-json-lines {"spark": "400"} {"spark": "401"} +{"spark": "410"} spark-rapids-shim-json-lines ***/ package org.apache.spark.sql.rapids.shims diff --git a/sql-plugin/src/main/spark400/scala/org/apache/spark/sql/rapids/shims/SparkSessionUtils.scala b/sql-plugin/src/main/spark400/scala/org/apache/spark/sql/rapids/shims/SparkSessionUtils.scala index 6ec6a927c2c..ba92d3db78e 100644 --- a/sql-plugin/src/main/spark400/scala/org/apache/spark/sql/rapids/shims/SparkSessionUtils.scala +++ b/sql-plugin/src/main/spark400/scala/org/apache/spark/sql/rapids/shims/SparkSessionUtils.scala @@ -1,5 +1,5 @@ /* - * Copyright (c) 2025, NVIDIA CORPORATION. + * Copyright (c) 2025-2026, NVIDIA CORPORATION. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -17,6 +17,7 @@ /*** spark-rapids-shim-json-lines {"spark": "400"} {"spark": "401"} +{"spark": "410"} spark-rapids-shim-json-lines ***/ package org.apache.spark.sql.rapids.shims diff --git a/sql-plugin/src/main/spark400/scala/org/apache/spark/sql/rapids/shims/TrampolineConnectShims.scala b/sql-plugin/src/main/spark400/scala/org/apache/spark/sql/rapids/shims/TrampolineConnectShims.scala index 08d6facc899..f9cbbb3abda 100644 --- a/sql-plugin/src/main/spark400/scala/org/apache/spark/sql/rapids/shims/TrampolineConnectShims.scala +++ b/sql-plugin/src/main/spark400/scala/org/apache/spark/sql/rapids/shims/TrampolineConnectShims.scala @@ -1,5 +1,5 @@ /* - * Copyright (c) 2025, NVIDIA CORPORATION. + * Copyright (c) 2025-2026, NVIDIA CORPORATION. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -17,6 +17,7 @@ /*** spark-rapids-shim-json-lines {"spark": "400"} {"spark": "401"} +{"spark": "410"} spark-rapids-shim-json-lines ***/ package org.apache.spark.sql.rapids.shims diff --git a/sql-plugin/src/test/spark401/scala/com/nvidia/spark/rapids/shims/spark401/SparkShimsSuite.scala b/sql-plugin/src/test/spark401/scala/com/nvidia/spark/rapids/shims/spark401/SparkShimsSuite.scala index 7e0d19e8ce8..cd2130ef66e 100644 --- a/sql-plugin/src/test/spark401/scala/com/nvidia/spark/rapids/shims/spark401/SparkShimsSuite.scala +++ b/sql-plugin/src/test/spark401/scala/com/nvidia/spark/rapids/shims/spark401/SparkShimsSuite.scala @@ -1,5 +1,5 @@ /* - * Copyright (c) 2025, NVIDIA CORPORATION. + * Copyright (c) 2025-2026, NVIDIA CORPORATION. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. From dde1282b65e57d1a3015a766a80401ee8d01d1da Mon Sep 17 00:00:00 2001 From: Chong Gao Date: Fri, 9 Jan 2026 14:59:52 +0800 Subject: [PATCH 04/59] Fix StoragePartitionJoinParams package moved for Spark 410 shim Closes #14103 --- .../shims/StoragePartitionJoinShims.scala | 36 +++++++++++++++++++ .../shims/StoragePartitionJoinShims.scala | 34 ++++++++++++++++++ 2 files changed, 70 insertions(+) create mode 100644 sql-plugin/src/main/spark350db143/scala/com/nvidia/spark/rapids/shims/StoragePartitionJoinShims.scala create mode 100644 sql-plugin/src/main/spark410/scala/com/nvidia/spark/rapids/shims/StoragePartitionJoinShims.scala diff --git a/sql-plugin/src/main/spark350db143/scala/com/nvidia/spark/rapids/shims/StoragePartitionJoinShims.scala b/sql-plugin/src/main/spark350db143/scala/com/nvidia/spark/rapids/shims/StoragePartitionJoinShims.scala new file mode 100644 index 00000000000..e5d4bc72d34 --- /dev/null +++ b/sql-plugin/src/main/spark350db143/scala/com/nvidia/spark/rapids/shims/StoragePartitionJoinShims.scala @@ -0,0 +1,36 @@ +/* + * Copyright (c) 2025-2026, NVIDIA CORPORATION. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +/*** spark-rapids-shim-json-lines +{"spark": "350db143"} +{"spark": "400"} +{"spark": "401"} +spark-rapids-shim-json-lines ***/ +package com.nvidia.spark.rapids.shims + +// Spark 3.5.0-db143, 4.0.x: StoragePartitionJoinParams is in datasources.v2 package +import org.apache.spark.sql.execution.datasources.v2.StoragePartitionJoinParams + +/** + * Shim for StoragePartitionJoinParams to handle package location change. + * In Spark 3.5.0-db143 and 4.0.x, it's in org.apache.spark.sql.execution.datasources.v2 + * In Spark 4.1.0+, it moved to org.apache.spark.sql.execution.joins + */ +object StoragePartitionJoinShims { + type SpjParams = StoragePartitionJoinParams + + def default(): SpjParams = StoragePartitionJoinParams() +} diff --git a/sql-plugin/src/main/spark410/scala/com/nvidia/spark/rapids/shims/StoragePartitionJoinShims.scala b/sql-plugin/src/main/spark410/scala/com/nvidia/spark/rapids/shims/StoragePartitionJoinShims.scala new file mode 100644 index 00000000000..0842f3e21f6 --- /dev/null +++ b/sql-plugin/src/main/spark410/scala/com/nvidia/spark/rapids/shims/StoragePartitionJoinShims.scala @@ -0,0 +1,34 @@ +/* + * Copyright (c) 2025-2026, NVIDIA CORPORATION. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +/*** spark-rapids-shim-json-lines +{"spark": "410"} +spark-rapids-shim-json-lines ***/ +package com.nvidia.spark.rapids.shims + +// Spark 4.1.0+: StoragePartitionJoinParams moved to joins package +import org.apache.spark.sql.execution.joins.StoragePartitionJoinParams + +/** + * Shim for StoragePartitionJoinParams to handle package location change. + * In Spark 4.0.x, it's in org.apache.spark.sql.execution.datasources.v2 + * In Spark 4.1.0+, it moved to org.apache.spark.sql.execution.joins + */ +object StoragePartitionJoinShims { + type SpjParams = StoragePartitionJoinParams + + def default(): SpjParams = StoragePartitionJoinParams() +} From 5c1453bdd89da80203ef92db8c37135beadce1a3 Mon Sep 17 00:00:00 2001 From: Chong Gao Date: Fri, 9 Jan 2026 15:00:08 +0800 Subject: [PATCH 05/59] Fix MAX_BROADCAST_TABLE_BYTES removed for Spark 410 shim Closes #14102 --- .../rapids/shims/BroadcastExchangeShims.scala | 60 +++++++++++++++++++ .../rapids/shims/BroadcastExchangeShims.scala | 30 ++++++++++ 2 files changed, 90 insertions(+) create mode 100644 sql-plugin/src/main/spark320/scala/com/nvidia/spark/rapids/shims/BroadcastExchangeShims.scala create mode 100644 sql-plugin/src/main/spark410/scala/com/nvidia/spark/rapids/shims/BroadcastExchangeShims.scala diff --git a/sql-plugin/src/main/spark320/scala/com/nvidia/spark/rapids/shims/BroadcastExchangeShims.scala b/sql-plugin/src/main/spark320/scala/com/nvidia/spark/rapids/shims/BroadcastExchangeShims.scala new file mode 100644 index 00000000000..97f4b370a1d --- /dev/null +++ b/sql-plugin/src/main/spark320/scala/com/nvidia/spark/rapids/shims/BroadcastExchangeShims.scala @@ -0,0 +1,60 @@ +/* + * Copyright (c) 2025-2026, NVIDIA CORPORATION. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +/*** spark-rapids-shim-json-lines +{"spark": "320"} +{"spark": "321"} +{"spark": "321cdh"} +{"spark": "322"} +{"spark": "323"} +{"spark": "324"} +{"spark": "330"} +{"spark": "330cdh"} +{"spark": "330db"} +{"spark": "331"} +{"spark": "332"} +{"spark": "332cdh"} +{"spark": "332db"} +{"spark": "333"} +{"spark": "334"} +{"spark": "340"} +{"spark": "341"} +{"spark": "341db"} +{"spark": "342"} +{"spark": "343"} +{"spark": "344"} +{"spark": "350"} +{"spark": "350db143"} +{"spark": "351"} +{"spark": "352"} +{"spark": "353"} +{"spark": "354"} +{"spark": "355"} +{"spark": "356"} +{"spark": "357"} +{"spark": "400"} +{"spark": "401"} +spark-rapids-shim-json-lines ***/ +package com.nvidia.spark.rapids.shims + +import org.apache.spark.sql.execution.exchange.BroadcastExchangeExec + +/** + * Shim for MAX_BROADCAST_TABLE_BYTES which was removed in Spark 4.1.0 + */ +object BroadcastExchangeShims { + val MAX_BROADCAST_TABLE_BYTES: Long = BroadcastExchangeExec.MAX_BROADCAST_TABLE_BYTES +} diff --git a/sql-plugin/src/main/spark410/scala/com/nvidia/spark/rapids/shims/BroadcastExchangeShims.scala b/sql-plugin/src/main/spark410/scala/com/nvidia/spark/rapids/shims/BroadcastExchangeShims.scala new file mode 100644 index 00000000000..3a449b12bb6 --- /dev/null +++ b/sql-plugin/src/main/spark410/scala/com/nvidia/spark/rapids/shims/BroadcastExchangeShims.scala @@ -0,0 +1,30 @@ +/* + * Copyright (c) 2025-2026, NVIDIA CORPORATION. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +/*** spark-rapids-shim-json-lines +{"spark": "410"} +spark-rapids-shim-json-lines ***/ +package com.nvidia.spark.rapids.shims + +/** + * Shim for MAX_BROADCAST_TABLE_BYTES which was removed in Spark 4.1.0. + * The constant was 8GB (8L << 30) and is now configurable via conf.maxBroadcastTableSizeInBytes. + * We keep the old hardcoded value for backwards compatibility. + */ +object BroadcastExchangeShims { + // 8GB - the original hardcoded value from Spark + val MAX_BROADCAST_TABLE_BYTES: Long = 8L << 30 +} From d2eed069127e7f22b475de28b89cd7a8803e4a21 Mon Sep 17 00:00:00 2001 From: Chong Gao Date: Fri, 9 Jan 2026 15:00:22 +0800 Subject: [PATCH 06/59] Fix TimeAdd renamed to TimestampAddInterval for Spark 410 shim Closes #14111 --- .../spark/rapids/shims/TimeAddShims.scala | 62 +++++++++++++ .../spark/rapids/shims/TimeAddShims.scala | 86 +++++++++++++++++++ .../rapids/shims/DayTimeIntervalShims.scala | 61 +++++++++++++ .../spark/rapids/shims/TimeAddShims.scala | 31 +++++++ 4 files changed, 240 insertions(+) create mode 100644 sql-plugin/src/main/spark320/scala/com/nvidia/spark/rapids/shims/TimeAddShims.scala create mode 100644 sql-plugin/src/main/spark330/scala/com/nvidia/spark/rapids/shims/TimeAddShims.scala create mode 100644 sql-plugin/src/main/spark410/scala/com/nvidia/spark/rapids/shims/DayTimeIntervalShims.scala create mode 100644 sql-plugin/src/main/spark410/scala/com/nvidia/spark/rapids/shims/TimeAddShims.scala diff --git a/sql-plugin/src/main/spark320/scala/com/nvidia/spark/rapids/shims/TimeAddShims.scala b/sql-plugin/src/main/spark320/scala/com/nvidia/spark/rapids/shims/TimeAddShims.scala new file mode 100644 index 00000000000..5fa548331dc --- /dev/null +++ b/sql-plugin/src/main/spark320/scala/com/nvidia/spark/rapids/shims/TimeAddShims.scala @@ -0,0 +1,62 @@ +/* + * Copyright (c) 2026, NVIDIA CORPORATION. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +/*** spark-rapids-shim-json-lines +{"spark": "320"} +{"spark": "321"} +{"spark": "321cdh"} +{"spark": "322"} +{"spark": "323"} +{"spark": "324"} +spark-rapids-shim-json-lines ***/ +package com.nvidia.spark.rapids.shims + +import com.nvidia.spark.rapids._ +import org.apache.spark.sql.catalyst.expressions.{Expression, TimeAdd} +import org.apache.spark.sql.rapids.shims.GpuTimeAdd +import org.apache.spark.unsafe.types.CalendarInterval + +/** + * TimeAdd expression support for versions before it was renamed to TimestampAddInterval. + * TimeAdd was renamed in Spark 4.1 (and likely backported to Databricks 17.3). + * See: https://github.com/apache/spark/commit/059b395c8cbfe1b0bdc614e6006939e3ac538b13 + */ +object TimeAddShims { + val exprs: Map[Class[_ <: Expression], ExprRule[_ <: Expression]] = { + Seq( + GpuOverrides.expr[TimeAdd]( + "Adds interval to timestamp", + ExprChecks.binaryProject(TypeSig.TIMESTAMP, TypeSig.TIMESTAMP, + ("start", TypeSig.TIMESTAMP, TypeSig.TIMESTAMP), + ("interval", TypeSig.lit(TypeEnum.CALENDAR) + .withPsNote(TypeEnum.CALENDAR, "month intervals are not supported"), + TypeSig.CALENDAR)), + (timeAdd, conf, p, r) => new BinaryExprMeta[TimeAdd](timeAdd, conf, p, r) { + override def tagExprForGpu(): Unit = { + GpuOverrides.extractLit(timeAdd.interval).foreach { lit => + val intvl = lit.value.asInstanceOf[CalendarInterval] + if (intvl.months != 0) { + willNotWorkOnGpu("interval months isn't supported") + } + } + } + + override def convertToGpu(lhs: Expression, rhs: Expression): GpuExpression = + GpuTimeAdd(lhs, rhs) + }) + ).map(r => (r.getClassFor.asSubclass(classOf[Expression]), r)).toMap + } +} diff --git a/sql-plugin/src/main/spark330/scala/com/nvidia/spark/rapids/shims/TimeAddShims.scala b/sql-plugin/src/main/spark330/scala/com/nvidia/spark/rapids/shims/TimeAddShims.scala new file mode 100644 index 00000000000..79f3c8efd06 --- /dev/null +++ b/sql-plugin/src/main/spark330/scala/com/nvidia/spark/rapids/shims/TimeAddShims.scala @@ -0,0 +1,86 @@ +/* + * Copyright (c) 2022-2026, NVIDIA CORPORATION. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +/*** spark-rapids-shim-json-lines +{"spark": "330"} +{"spark": "330cdh"} +{"spark": "330db"} +{"spark": "331"} +{"spark": "332"} +{"spark": "332cdh"} +{"spark": "332db"} +{"spark": "333"} +{"spark": "334"} +{"spark": "340"} +{"spark": "341"} +{"spark": "341db"} +{"spark": "342"} +{"spark": "343"} +{"spark": "344"} +{"spark": "350"} +{"spark": "350db143"} +{"spark": "351"} +{"spark": "352"} +{"spark": "353"} +{"spark": "354"} +{"spark": "355"} +{"spark": "356"} +{"spark": "357"} +{"spark": "400"} +{"spark": "401"} +spark-rapids-shim-json-lines ***/ +package com.nvidia.spark.rapids.shims + +import com.nvidia.spark.rapids._ + +import org.apache.spark.sql.catalyst.expressions.{Expression, TimeAdd} +import org.apache.spark.sql.rapids.shims.GpuTimeAdd +import org.apache.spark.sql.types.{CalendarIntervalType, DayTimeIntervalType} +import org.apache.spark.unsafe.types.CalendarInterval + +/** + * TimeAdd expression support for Spark 3.3.0+ with DAYTIME interval support. + * TimeAdd was renamed to TimestampAddInterval in Spark 4.1. + */ +object TimeAddShims { + val exprs: Map[Class[_ <: Expression], ExprRule[_ <: Expression]] = Seq( + GpuOverrides.expr[TimeAdd]( + "Adds interval to timestamp", + ExprChecks.binaryProject(TypeSig.TIMESTAMP, TypeSig.TIMESTAMP, + ("start", TypeSig.TIMESTAMP, TypeSig.TIMESTAMP), + // interval support DAYTIME column or CALENDAR literal + ("interval", TypeSig.DAYTIME + TypeSig.lit(TypeEnum.CALENDAR) + .withPsNote(TypeEnum.CALENDAR, "month intervals are not supported"), + TypeSig.DAYTIME + TypeSig.CALENDAR)), + (timeAdd, conf, p, r) => new BinaryExprMeta[TimeAdd](timeAdd, conf, p, r) { + override def tagExprForGpu(): Unit = { + GpuOverrides.extractLit(timeAdd.interval).foreach { lit => + lit.dataType match { + case CalendarIntervalType => + val intvl = lit.value.asInstanceOf[CalendarInterval] + if (intvl.months != 0) { + willNotWorkOnGpu("interval months isn't supported") + } + case _: DayTimeIntervalType => // Supported + } + } + } + + override def convertToGpu(lhs: Expression, rhs: Expression): GpuExpression = + GpuTimeAdd(lhs, rhs) + }) + ).map(r => (r.getClassFor.asSubclass(classOf[Expression]), r)).toMap +} diff --git a/sql-plugin/src/main/spark410/scala/com/nvidia/spark/rapids/shims/DayTimeIntervalShims.scala b/sql-plugin/src/main/spark410/scala/com/nvidia/spark/rapids/shims/DayTimeIntervalShims.scala new file mode 100644 index 00000000000..85723ec699e --- /dev/null +++ b/sql-plugin/src/main/spark410/scala/com/nvidia/spark/rapids/shims/DayTimeIntervalShims.scala @@ -0,0 +1,61 @@ +/* + * Copyright (c) 2022-2026, NVIDIA CORPORATION. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +/*** spark-rapids-shim-json-lines +{"spark": "410"} +spark-rapids-shim-json-lines ***/ +package com.nvidia.spark.rapids.shims + +import com.nvidia.spark.rapids._ + +import org.apache.spark.sql.catalyst.expressions._ +import org.apache.spark.sql.rapids.shims.GpuTimestampAddInterval +import org.apache.spark.sql.types.{CalendarIntervalType, DayTimeIntervalType} +import org.apache.spark.unsafe.types.CalendarInterval + +/** + * DayTimeInterval shims for Spark 4.1.0+ + * TimeAdd was renamed to TimestampAddInterval in Spark 4.1.0 + */ +object DayTimeIntervalShims { + def exprs: Map[Class[_ <: Expression], ExprRule[_ <: Expression]] = Seq( + GpuOverrides.expr[TimestampAddInterval]( + "Adds interval to timestamp", + ExprChecks.binaryProject(TypeSig.TIMESTAMP, TypeSig.TIMESTAMP, + ("start", TypeSig.TIMESTAMP, TypeSig.TIMESTAMP), + // interval support DAYTIME column or CALENDAR literal + ("interval", TypeSig.DAYTIME + TypeSig.lit(TypeEnum.CALENDAR) + .withPsNote(TypeEnum.CALENDAR, "month intervals are not supported"), + TypeSig.DAYTIME + TypeSig.CALENDAR)), + (timeAdd, conf, p, r) => new BinaryExprMeta[TimestampAddInterval](timeAdd, conf, p, r) { + override def tagExprForGpu(): Unit = { + GpuOverrides.extractLit(timeAdd.interval).foreach { lit => + lit.dataType match { + case CalendarIntervalType => + val intvl = lit.value.asInstanceOf[CalendarInterval] + if (intvl.months != 0) { + willNotWorkOnGpu("interval months isn't supported") + } + case _: DayTimeIntervalType => // Supported + } + } + } + + override def convertToGpu(lhs: Expression, rhs: Expression): GpuExpression = + GpuTimestampAddInterval(lhs, rhs) + }) + ).map(r => (r.getClassFor.asSubclass(classOf[Expression]), r)).toMap +} diff --git a/sql-plugin/src/main/spark410/scala/com/nvidia/spark/rapids/shims/TimeAddShims.scala b/sql-plugin/src/main/spark410/scala/com/nvidia/spark/rapids/shims/TimeAddShims.scala new file mode 100644 index 00000000000..b04d7e43bbe --- /dev/null +++ b/sql-plugin/src/main/spark410/scala/com/nvidia/spark/rapids/shims/TimeAddShims.scala @@ -0,0 +1,31 @@ +/* + * Copyright (c) 2026, NVIDIA CORPORATION. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +/*** spark-rapids-shim-json-lines +{"spark": "410"} +spark-rapids-shim-json-lines ***/ +package com.nvidia.spark.rapids.shims + +import com.nvidia.spark.rapids._ +import org.apache.spark.sql.catalyst.expressions.Expression + +/** + * Empty TimeAddShims for Spark 4.1.0+. + * TimeAdd was renamed to TimestampAddInterval and is handled by DayTimeIntervalShims. + */ +object TimeAddShims { + val exprs: Map[Class[_ <: Expression], ExprRule[_ <: Expression]] = Map.empty +} From fb1fb4972a06d91b94dac89a9619b93adbfc7a49 Mon Sep 17 00:00:00 2001 From: Chong Gao Date: Fri, 9 Jan 2026 15:00:31 +0800 Subject: [PATCH 07/59] Fix evalMode access changed to evalContext.evalMode for Spark 410 shim Closes #14107 --- .../spark/rapids/shims/TryModeShim.scala | 43 +++++++++++++++++++ 1 file changed, 43 insertions(+) create mode 100644 sql-plugin/src/main/spark410/scala/com/nvidia/spark/rapids/shims/TryModeShim.scala diff --git a/sql-plugin/src/main/spark410/scala/com/nvidia/spark/rapids/shims/TryModeShim.scala b/sql-plugin/src/main/spark410/scala/com/nvidia/spark/rapids/shims/TryModeShim.scala new file mode 100644 index 00000000000..4ed9d1a7441 --- /dev/null +++ b/sql-plugin/src/main/spark410/scala/com/nvidia/spark/rapids/shims/TryModeShim.scala @@ -0,0 +1,43 @@ +/* + * Copyright (c) 2026, NVIDIA CORPORATION. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +/*** spark-rapids-shim-json-lines +{"spark": "410"} +spark-rapids-shim-json-lines ***/ +package com.nvidia.spark.rapids.shims + +import org.apache.spark.sql.catalyst.expressions.{Add, Divide, EvalMode, Expression, Multiply, Remainder, Subtract} +import org.apache.spark.sql.catalyst.expressions.aggregate.{Average, Sum} + +/** + * Spark 4.1.0 version where evalMode was changed to evalContext.evalMode for arithmetic expressions. + * See: https://github.com/apache/spark/commit/a96e9ca81518bff31b0089d459fe78804ca1aa38 + */ +object TryModeShim { + def isTryMode(expr: Expression): Boolean = { + val evalMode = expr match { + case add: Add => add.evalContext.evalMode + case sub: Subtract => sub.evalContext.evalMode + case mul: Multiply => mul.evalContext.evalMode + case div: Divide => div.evalContext.evalMode + case mod: Remainder => mod.evalContext.evalMode + case avg: Average => avg.evalMode // Average still uses evalMode directly as a parameter + case sum: Sum => sum.evalContext.evalMode // Sum uses evalContext.evalMode + case _ => throw new RuntimeException(s"Unsupported expression $expr in TRY mode") + } + evalMode == EvalMode.TRY + } +} From f6d99c67a054471efcea2e7acc29de0cb284b6ad Mon Sep 17 00:00:00 2001 From: Chong Gao Date: Fri, 9 Jan 2026 15:00:40 +0800 Subject: [PATCH 08/59] Fix ShowNamespacesExec removed for Spark 410 shim Closes #14104 --- .../shims/ShowNamespacesExecShims.scala | 62 +++++++++++++++++++ .../shims/ShowNamespacesExecShims.scala | 32 ++++++++++ 2 files changed, 94 insertions(+) create mode 100644 sql-plugin/src/main/spark320/scala/com/nvidia/spark/rapids/shims/ShowNamespacesExecShims.scala create mode 100644 sql-plugin/src/main/spark410/scala/com/nvidia/spark/rapids/shims/ShowNamespacesExecShims.scala diff --git a/sql-plugin/src/main/spark320/scala/com/nvidia/spark/rapids/shims/ShowNamespacesExecShims.scala b/sql-plugin/src/main/spark320/scala/com/nvidia/spark/rapids/shims/ShowNamespacesExecShims.scala new file mode 100644 index 00000000000..13988e8b1a6 --- /dev/null +++ b/sql-plugin/src/main/spark320/scala/com/nvidia/spark/rapids/shims/ShowNamespacesExecShims.scala @@ -0,0 +1,62 @@ +/* + * Copyright (c) 2026, NVIDIA CORPORATION. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +/*** spark-rapids-shim-json-lines +{"spark": "320"} +{"spark": "321"} +{"spark": "321cdh"} +{"spark": "322"} +{"spark": "323"} +{"spark": "324"} +{"spark": "330"} +{"spark": "330cdh"} +{"spark": "330db"} +{"spark": "331"} +{"spark": "332"} +{"spark": "332cdh"} +{"spark": "332db"} +{"spark": "333"} +{"spark": "334"} +{"spark": "340"} +{"spark": "341"} +{"spark": "341db"} +{"spark": "342"} +{"spark": "343"} +{"spark": "344"} +{"spark": "350"} +{"spark": "350db143"} +{"spark": "351"} +{"spark": "352"} +{"spark": "353"} +{"spark": "354"} +{"spark": "355"} +{"spark": "356"} +{"spark": "357"} +{"spark": "400"} +{"spark": "401"} +spark-rapids-shim-json-lines ***/ +package com.nvidia.spark.rapids.shims + +import com.nvidia.spark.rapids.{ExecRule, GpuOverrides} + +import org.apache.spark.sql.execution.SparkPlan +import org.apache.spark.sql.execution.datasources.v2.ShowNamespacesExec + +object ShowNamespacesExecShims { + val neverReplaceExec: Option[ExecRule[_ <: SparkPlan]] = Some( + GpuOverrides.neverReplaceExec[ShowNamespacesExec]("Namespace metadata operation") + ) +} diff --git a/sql-plugin/src/main/spark410/scala/com/nvidia/spark/rapids/shims/ShowNamespacesExecShims.scala b/sql-plugin/src/main/spark410/scala/com/nvidia/spark/rapids/shims/ShowNamespacesExecShims.scala new file mode 100644 index 00000000000..68d558d8c21 --- /dev/null +++ b/sql-plugin/src/main/spark410/scala/com/nvidia/spark/rapids/shims/ShowNamespacesExecShims.scala @@ -0,0 +1,32 @@ +/* + * Copyright (c) 2026, NVIDIA CORPORATION. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +/*** spark-rapids-shim-json-lines +{"spark": "410"} +spark-rapids-shim-json-lines ***/ +package com.nvidia.spark.rapids.shims + +import com.nvidia.spark.rapids.ExecRule + +import org.apache.spark.sql.execution.SparkPlan + +/** + * ShowNamespacesExec was removed/renamed in Spark 4.1.0. + * Return None to skip this exec rule. + */ +object ShowNamespacesExecShims { + val neverReplaceExec: Option[ExecRule[_ <: SparkPlan]] = None +} From 89039afe27d6d25ded8175ec7447ac1460de1797 Mon Sep 17 00:00:00 2001 From: Chong Gao Date: Fri, 9 Jan 2026 15:00:59 +0800 Subject: [PATCH 09/59] Fix AggregateInPandasExec renamed to ArrowAggregatePythonExec for Spark 410 shim Part of #14036 --- .../shims/AggregateInPandasExecShims.scala | 75 +++++++++++++++++++ .../shims/AggregateInPandasExecShims.scala | 37 +++++++++ 2 files changed, 112 insertions(+) create mode 100644 sql-plugin/src/main/spark320/scala/com/nvidia/spark/rapids/shims/AggregateInPandasExecShims.scala create mode 100644 sql-plugin/src/main/spark410/scala/com/nvidia/spark/rapids/shims/AggregateInPandasExecShims.scala diff --git a/sql-plugin/src/main/spark320/scala/com/nvidia/spark/rapids/shims/AggregateInPandasExecShims.scala b/sql-plugin/src/main/spark320/scala/com/nvidia/spark/rapids/shims/AggregateInPandasExecShims.scala new file mode 100644 index 00000000000..7581b29b560 --- /dev/null +++ b/sql-plugin/src/main/spark320/scala/com/nvidia/spark/rapids/shims/AggregateInPandasExecShims.scala @@ -0,0 +1,75 @@ +/* + * Copyright (c) 2026, NVIDIA CORPORATION. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +/*** spark-rapids-shim-json-lines +{"spark": "320"} +{"spark": "321"} +{"spark": "321cdh"} +{"spark": "322"} +{"spark": "323"} +{"spark": "324"} +{"spark": "330"} +{"spark": "330cdh"} +{"spark": "330db"} +{"spark": "331"} +{"spark": "332"} +{"spark": "332cdh"} +{"spark": "332db"} +{"spark": "333"} +{"spark": "334"} +{"spark": "340"} +{"spark": "341"} +{"spark": "341db"} +{"spark": "342"} +{"spark": "343"} +{"spark": "344"} +{"spark": "350"} +{"spark": "350db143"} +{"spark": "351"} +{"spark": "352"} +{"spark": "353"} +{"spark": "354"} +{"spark": "355"} +{"spark": "356"} +{"spark": "357"} +{"spark": "400"} +{"spark": "401"} +spark-rapids-shim-json-lines ***/ +package com.nvidia.spark.rapids.shims + +import com.nvidia.spark.rapids.{ExecChecks, ExecRule, GpuExec, GpuOverrides, TypeSig} + +import org.apache.spark.sql.catalyst.expressions.{Expression, NamedExpression} +import org.apache.spark.sql.execution.SparkPlan +import org.apache.spark.sql.execution.python.AggregateInPandasExec + +object AggregateInPandasExecShims { + val execRule: Option[ExecRule[_ <: SparkPlan]] = Some( + GpuOverrides.exec[AggregateInPandasExec]( + "The backend for an ideAggregateInPandasExec Aggregation Pandas UDF." + + " This accelerates the data transfer between the Java process and the Python process." + + " It also supports scheduling GPU resources for the Python process" + + " when enabled.", + ExecChecks(TypeSig.commonCudfTypes, TypeSig.all), + (aggPy, conf, p, r) => new GpuAggregateInPandasExecMeta(aggPy, conf, p, r)) + ) + + def isAggregateInPandasExec(plan: SparkPlan): Boolean = plan.isInstanceOf[AggregateInPandasExec] + + def getGroupingExpressions(plan: SparkPlan): Seq[NamedExpression] = { + plan.asInstanceOf[AggregateInPandasExec].groupingExpressions + } +} diff --git a/sql-plugin/src/main/spark410/scala/com/nvidia/spark/rapids/shims/AggregateInPandasExecShims.scala b/sql-plugin/src/main/spark410/scala/com/nvidia/spark/rapids/shims/AggregateInPandasExecShims.scala new file mode 100644 index 00000000000..2bcebffdbd4 --- /dev/null +++ b/sql-plugin/src/main/spark410/scala/com/nvidia/spark/rapids/shims/AggregateInPandasExecShims.scala @@ -0,0 +1,37 @@ +/* + * Copyright (c) 2026, NVIDIA CORPORATION. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +/*** spark-rapids-shim-json-lines +{"spark": "410"} +spark-rapids-shim-json-lines ***/ +package com.nvidia.spark.rapids.shims + +import com.nvidia.spark.rapids.ExecRule + +import org.apache.spark.sql.catalyst.expressions.NamedExpression +import org.apache.spark.sql.execution.SparkPlan + +/** + * AggregateInPandasExec was renamed to ArrowAggregatePythonExec in Spark 4.1.0. + * Return None to skip this exec rule for now. + */ +object AggregateInPandasExecShims { + val execRule: Option[ExecRule[_ <: SparkPlan]] = None + + def isAggregateInPandasExec(plan: SparkPlan): Boolean = false + + def getGroupingExpressions(plan: SparkPlan): Seq[NamedExpression] = Seq.empty +} From cdaa3617506e3ecd2f5e8f80e31042038346e52d Mon Sep 17 00:00:00 2001 From: Chong Gao Date: Fri, 9 Jan 2026 15:01:10 +0800 Subject: [PATCH 10/59] Fix WindowInPandasExec renamed to ArrowWindowPythonExec for Spark 410 shim Closes #14036 --- .../shims/WindowInPandasExecShims.scala | 90 +++++++++++++++++++ .../rapids/shims/WindowInPandasShims.scala | 62 +++++++++++++ .../shims/WindowInPandasExecTypeShim.scala | 57 ++++++++++++ .../shims/WindowInPandasExecShims.scala | 32 +++++++ .../rapids/shims/WindowInPandasShims.scala | 32 +++++++ .../shims/WindowInPandasExecTypeShim.scala | 30 +++++++ 6 files changed, 303 insertions(+) create mode 100644 sql-plugin/src/main/spark320/scala/com/nvidia/spark/rapids/shims/WindowInPandasExecShims.scala create mode 100644 sql-plugin/src/main/spark320/scala/com/nvidia/spark/rapids/shims/WindowInPandasShims.scala create mode 100644 sql-plugin/src/main/spark320/scala/org/apache/spark/sql/rapids/execution/python/shims/WindowInPandasExecTypeShim.scala create mode 100644 sql-plugin/src/main/spark410/scala/com/nvidia/spark/rapids/shims/WindowInPandasExecShims.scala create mode 100644 sql-plugin/src/main/spark410/scala/com/nvidia/spark/rapids/shims/WindowInPandasShims.scala create mode 100644 sql-plugin/src/main/spark410/scala/org/apache/spark/sql/rapids/execution/python/shims/WindowInPandasExecTypeShim.scala diff --git a/sql-plugin/src/main/spark320/scala/com/nvidia/spark/rapids/shims/WindowInPandasExecShims.scala b/sql-plugin/src/main/spark320/scala/com/nvidia/spark/rapids/shims/WindowInPandasExecShims.scala new file mode 100644 index 00000000000..f0694b88950 --- /dev/null +++ b/sql-plugin/src/main/spark320/scala/com/nvidia/spark/rapids/shims/WindowInPandasExecShims.scala @@ -0,0 +1,90 @@ +/* + * Copyright (c) 2025-2026, NVIDIA CORPORATION. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +/*** spark-rapids-shim-json-lines +{"spark": "320"} +{"spark": "321"} +{"spark": "321cdh"} +{"spark": "322"} +{"spark": "323"} +{"spark": "324"} +{"spark": "330"} +{"spark": "330cdh"} +{"spark": "330db"} +{"spark": "331"} +{"spark": "332"} +{"spark": "332cdh"} +{"spark": "332db"} +{"spark": "333"} +{"spark": "334"} +{"spark": "340"} +{"spark": "341"} +{"spark": "341db"} +{"spark": "342"} +{"spark": "343"} +{"spark": "344"} +{"spark": "350"} +{"spark": "350db143"} +{"spark": "351"} +{"spark": "352"} +{"spark": "353"} +{"spark": "354"} +{"spark": "355"} +{"spark": "356"} +{"spark": "357"} +{"spark": "400"} +{"spark": "401"} +spark-rapids-shim-json-lines ***/ +package com.nvidia.spark.rapids.shims + +import com.nvidia.spark.rapids._ +import org.apache.spark.sql.catalyst.expressions.{Expression, NamedExpression} +import org.apache.spark.sql.execution.SparkPlan +import org.apache.spark.sql.execution.python.WindowInPandasExec +import org.apache.spark.sql.rapids.execution.python.GpuWindowInPandasExecMetaBase + +/** + * Exec rules for WindowInPandasExec (exists in Spark versions before the rename to ArrowWindowPythonExec). + */ +object WindowInPandasExecShims { + val execs: Map[Class[_ <: SparkPlan], ExecRule[_ <: SparkPlan]] = { + Seq( + GpuOverrides.exec[WindowInPandasExec]( + "The backend for Window Aggregation Pandas UDF, Accelerates the data transfer between" + + " the Java process and the Python process. It also supports scheduling GPU resources" + + " for the Python process when enabled. For now it only supports row based window frame.", + ExecChecks( + (TypeSig.commonCudfTypes + TypeSig.ARRAY).nested(TypeSig.commonCudfTypes), + TypeSig.all), + (winPy, conf, p, r) => new GpuWindowInPandasExecMetaBase(winPy, conf, p, r) { + override val windowExpressions: Seq[BaseExprMeta[NamedExpression]] = + SparkShimImpl.getWindowExpressions(winPy).map(GpuOverrides.wrapExpr(_, this.conf, Some(this))) + + override def convertToGpu(): GpuExec = { + val windowExprGpu = windowExpressions.map(_.convertToGpu()) + val partitionGpu = partitionSpec.map(_.convertToGpu()) + GpuWindowInPandasExec( + windowExprGpu, + partitionGpu, + // leave ordering expression on the CPU, it's not used for GPU computation + winPy.orderSpec, + childPlans.head.convertIfNeeded() + )(winPy.partitionSpec) + } + }).disabledByDefault("it only supports row based frame for now") + ).map(r => (r.getClassFor.asSubclass(classOf[SparkPlan]), r)).toMap + } +} diff --git a/sql-plugin/src/main/spark320/scala/com/nvidia/spark/rapids/shims/WindowInPandasShims.scala b/sql-plugin/src/main/spark320/scala/com/nvidia/spark/rapids/shims/WindowInPandasShims.scala new file mode 100644 index 00000000000..a08010b9699 --- /dev/null +++ b/sql-plugin/src/main/spark320/scala/com/nvidia/spark/rapids/shims/WindowInPandasShims.scala @@ -0,0 +1,62 @@ +/* + * Copyright (c) 2025-2026, NVIDIA CORPORATION. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +/*** spark-rapids-shim-json-lines +{"spark": "320"} +{"spark": "321"} +{"spark": "321cdh"} +{"spark": "322"} +{"spark": "323"} +{"spark": "324"} +{"spark": "330"} +{"spark": "330cdh"} +{"spark": "330db"} +{"spark": "331"} +{"spark": "332"} +{"spark": "332cdh"} +{"spark": "332db"} +{"spark": "333"} +{"spark": "334"} +{"spark": "340"} +{"spark": "341"} +{"spark": "341db"} +{"spark": "342"} +{"spark": "343"} +{"spark": "344"} +{"spark": "350"} +{"spark": "350db143"} +{"spark": "351"} +{"spark": "352"} +{"spark": "353"} +{"spark": "354"} +{"spark": "355"} +{"spark": "356"} +{"spark": "357"} +{"spark": "400"} +{"spark": "401"} +spark-rapids-shim-json-lines ***/ +package com.nvidia.spark.rapids.shims + +import org.apache.spark.sql.catalyst.expressions.NamedExpression +import org.apache.spark.sql.execution.python.WindowInPandasExec + +/** + * Trait providing getWindowExpressions method for versions where WindowInPandasExec exists. + */ +trait WindowInPandasShims { + def getWindowExpressions(winPy: WindowInPandasExec): Seq[NamedExpression] = + winPy.projectList +} diff --git a/sql-plugin/src/main/spark320/scala/org/apache/spark/sql/rapids/execution/python/shims/WindowInPandasExecTypeShim.scala b/sql-plugin/src/main/spark320/scala/org/apache/spark/sql/rapids/execution/python/shims/WindowInPandasExecTypeShim.scala new file mode 100644 index 00000000000..67cd8bd179f --- /dev/null +++ b/sql-plugin/src/main/spark320/scala/org/apache/spark/sql/rapids/execution/python/shims/WindowInPandasExecTypeShim.scala @@ -0,0 +1,57 @@ +/* + * Copyright (c) 2026, NVIDIA CORPORATION. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +/*** spark-rapids-shim-json-lines +{"spark": "320"} +{"spark": "321"} +{"spark": "321cdh"} +{"spark": "322"} +{"spark": "323"} +{"spark": "324"} +{"spark": "330"} +{"spark": "330cdh"} +{"spark": "330db"} +{"spark": "331"} +{"spark": "332"} +{"spark": "332cdh"} +{"spark": "332db"} +{"spark": "333"} +{"spark": "334"} +{"spark": "340"} +{"spark": "341"} +{"spark": "341db"} +{"spark": "342"} +{"spark": "343"} +{"spark": "344"} +{"spark": "350"} +{"spark": "350db143"} +{"spark": "351"} +{"spark": "352"} +{"spark": "353"} +{"spark": "354"} +{"spark": "355"} +{"spark": "356"} +{"spark": "357"} +{"spark": "400"} +{"spark": "401"} +spark-rapids-shim-json-lines ***/ +package org.apache.spark.sql.rapids.execution.python.shims + +import org.apache.spark.sql.execution.python.WindowInPandasExec + +object WindowInPandasExecTypeShim { + type WindowInPandasExecType = WindowInPandasExec +} diff --git a/sql-plugin/src/main/spark410/scala/com/nvidia/spark/rapids/shims/WindowInPandasExecShims.scala b/sql-plugin/src/main/spark410/scala/com/nvidia/spark/rapids/shims/WindowInPandasExecShims.scala new file mode 100644 index 00000000000..f6ee5c738ae --- /dev/null +++ b/sql-plugin/src/main/spark410/scala/com/nvidia/spark/rapids/shims/WindowInPandasExecShims.scala @@ -0,0 +1,32 @@ +/* + * Copyright (c) 2026, NVIDIA CORPORATION. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +/*** spark-rapids-shim-json-lines +{"spark": "410"} +spark-rapids-shim-json-lines ***/ +package com.nvidia.spark.rapids.shims + +import com.nvidia.spark.rapids._ +import org.apache.spark.sql.execution.SparkPlan + +/** + * WindowInPandasExec was renamed to ArrowWindowPythonExec in Spark 4.1. + * This shim provides an empty implementation for 4.1+. + */ +object WindowInPandasExecShims { + // Empty map - WindowInPandasExec doesn't exist in Spark 4.1+ + val execs: Map[Class[_ <: SparkPlan], ExecRule[_ <: SparkPlan]] = Map.empty +} diff --git a/sql-plugin/src/main/spark410/scala/com/nvidia/spark/rapids/shims/WindowInPandasShims.scala b/sql-plugin/src/main/spark410/scala/com/nvidia/spark/rapids/shims/WindowInPandasShims.scala new file mode 100644 index 00000000000..5b4b4d4775a --- /dev/null +++ b/sql-plugin/src/main/spark410/scala/com/nvidia/spark/rapids/shims/WindowInPandasShims.scala @@ -0,0 +1,32 @@ +/* + * Copyright (c) 2026, NVIDIA CORPORATION. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +/*** spark-rapids-shim-json-lines +{"spark": "410"} +spark-rapids-shim-json-lines ***/ +package com.nvidia.spark.rapids.shims + +import org.apache.spark.sql.catalyst.expressions.NamedExpression +import org.apache.spark.sql.execution.python.ArrowWindowPythonExec + +/** + * WindowInPandasExec was renamed to ArrowWindowPythonExec in Spark 4.1. + * This trait provides the implementation for 4.1+. + */ +trait WindowInPandasShims { + def getWindowExpressions(winPy: ArrowWindowPythonExec): Seq[NamedExpression] = + winPy.windowExpression +} diff --git a/sql-plugin/src/main/spark410/scala/org/apache/spark/sql/rapids/execution/python/shims/WindowInPandasExecTypeShim.scala b/sql-plugin/src/main/spark410/scala/org/apache/spark/sql/rapids/execution/python/shims/WindowInPandasExecTypeShim.scala new file mode 100644 index 00000000000..b3b524d09f0 --- /dev/null +++ b/sql-plugin/src/main/spark410/scala/org/apache/spark/sql/rapids/execution/python/shims/WindowInPandasExecTypeShim.scala @@ -0,0 +1,30 @@ +/* + * Copyright (c) 2026, NVIDIA CORPORATION. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +/*** spark-rapids-shim-json-lines +{"spark": "410"} +spark-rapids-shim-json-lines ***/ +package org.apache.spark.sql.rapids.execution.python.shims + +import org.apache.spark.sql.execution.python.ArrowWindowPythonExec + +/** + * WindowInPandasExec was renamed to ArrowWindowPythonExec in Spark 4.1.0. + * Use the new class name as the type alias. + */ +object WindowInPandasExecTypeShim { + type WindowInPandasExecType = ArrowWindowPythonExec +} From deda01a27d273edf019d5cff48e2d5a6720cfc46 Mon Sep 17 00:00:00 2001 From: Chong Gao Date: Fri, 9 Jan 2026 15:01:22 +0800 Subject: [PATCH 11/59] Fix ParquetColumnVector constructor changed for Spark 410 shim Closes #14113 --- .../parquet/rapids/shims/ParquetCVShims.scala | 44 +++++++++++++++++++ 1 file changed, 44 insertions(+) create mode 100644 sql-plugin/src/main/spark410/scala/org/apache/spark/sql/execution/datasources/parquet/rapids/shims/ParquetCVShims.scala diff --git a/sql-plugin/src/main/spark410/scala/org/apache/spark/sql/execution/datasources/parquet/rapids/shims/ParquetCVShims.scala b/sql-plugin/src/main/spark410/scala/org/apache/spark/sql/execution/datasources/parquet/rapids/shims/ParquetCVShims.scala new file mode 100644 index 00000000000..390e7bffa3d --- /dev/null +++ b/sql-plugin/src/main/spark410/scala/org/apache/spark/sql/execution/datasources/parquet/rapids/shims/ParquetCVShims.scala @@ -0,0 +1,44 @@ +/* + * Copyright (c) 2023-2026, NVIDIA CORPORATION. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +/*** spark-rapids-shim-json-lines +{"spark": "410"} +spark-rapids-shim-json-lines ***/ +package org.apache.spark.sql.execution.datasources.parquet + +import org.apache.spark.memory.MemoryMode +import org.apache.spark.sql.catalyst.util.ResolveDefaultColumns +import org.apache.spark.sql.execution.vectorized.WritableColumnVector +import org.apache.spark.sql.types.StructType + +object ParquetCVShims { + + def newParquetCV( + sparkSchema: StructType, + idx: Int, + column: ParquetColumn, + vector: WritableColumnVector, + capacity: Int, + memoryMode: MemoryMode, // Ignored in Spark 4.1.0+ + missingColumns: java.util.Set[ParquetColumn], + isTopLevel: Boolean): ParquetColumnVector = { + val defaultValue = if (sparkSchema != null) { + ResolveDefaultColumns.getExistenceDefaultValues(sparkSchema)(idx) + } else null + // Spark 4.1.0 removed memoryMode parameter + new ParquetColumnVector(column, vector, capacity, missingColumns, isTopLevel, defaultValue) + } +} From 10e302900495f453951425f5e8c276d3a8c87fdc Mon Sep 17 00:00:00 2001 From: Chong Gao Date: Fri, 9 Jan 2026 15:01:34 +0800 Subject: [PATCH 12/59] Fix FileStreamSink/MetadataLogFileIndex package moved for Spark 410 shim Closes #14112 --- .../rapids/shims/FileStreamSinkShims.scala | 71 +++++++++++++++++++ .../rapids/shims/FileStreamSinkShims.scala | 41 +++++++++++ 2 files changed, 112 insertions(+) create mode 100644 sql-plugin/src/main/spark320/scala/org/apache/spark/sql/rapids/shims/FileStreamSinkShims.scala create mode 100644 sql-plugin/src/main/spark410/scala/org/apache/spark/sql/rapids/shims/FileStreamSinkShims.scala diff --git a/sql-plugin/src/main/spark320/scala/org/apache/spark/sql/rapids/shims/FileStreamSinkShims.scala b/sql-plugin/src/main/spark320/scala/org/apache/spark/sql/rapids/shims/FileStreamSinkShims.scala new file mode 100644 index 00000000000..f713e55ac6f --- /dev/null +++ b/sql-plugin/src/main/spark320/scala/org/apache/spark/sql/rapids/shims/FileStreamSinkShims.scala @@ -0,0 +1,71 @@ +/* + * Copyright (c) 2026, NVIDIA CORPORATION. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +/*** spark-rapids-shim-json-lines +{"spark": "320"} +{"spark": "321"} +{"spark": "321cdh"} +{"spark": "322"} +{"spark": "323"} +{"spark": "324"} +{"spark": "330"} +{"spark": "330cdh"} +{"spark": "330db"} +{"spark": "331"} +{"spark": "332"} +{"spark": "332cdh"} +{"spark": "332db"} +{"spark": "333"} +{"spark": "334"} +{"spark": "340"} +{"spark": "341"} +{"spark": "341db"} +{"spark": "342"} +{"spark": "343"} +{"spark": "344"} +{"spark": "350"} +{"spark": "350db143"} +{"spark": "351"} +{"spark": "352"} +{"spark": "353"} +{"spark": "354"} +{"spark": "355"} +{"spark": "356"} +{"spark": "357"} +{"spark": "400"} +{"spark": "401"} +spark-rapids-shim-json-lines ***/ +package org.apache.spark.sql.rapids.shims + +import org.apache.hadoop.conf.Configuration + +import org.apache.spark.sql.SparkSession +import org.apache.spark.sql.execution.streaming.{FileStreamSink, MetadataLogFileIndex} +import org.apache.spark.sql.types.StructType + +object FileStreamSinkShims { + def hasMetadata(paths: Seq[String], hadoopConf: Configuration, sqlConf: org.apache.spark.sql.internal.SQLConf): Boolean = { + FileStreamSink.hasMetadata(paths, hadoopConf, sqlConf) + } + + def newMetadataLogFileIndex( + sparkSession: SparkSession, + basePath: org.apache.hadoop.fs.Path, + options: Map[String, String], + userSpecifiedSchema: Option[StructType]): MetadataLogFileIndex = { + new MetadataLogFileIndex(sparkSession, basePath, options, userSpecifiedSchema) + } +} diff --git a/sql-plugin/src/main/spark410/scala/org/apache/spark/sql/rapids/shims/FileStreamSinkShims.scala b/sql-plugin/src/main/spark410/scala/org/apache/spark/sql/rapids/shims/FileStreamSinkShims.scala new file mode 100644 index 00000000000..fdfefc50ff3 --- /dev/null +++ b/sql-plugin/src/main/spark410/scala/org/apache/spark/sql/rapids/shims/FileStreamSinkShims.scala @@ -0,0 +1,41 @@ +/* + * Copyright (c) 2026, NVIDIA CORPORATION. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +/*** spark-rapids-shim-json-lines +{"spark": "410"} +spark-rapids-shim-json-lines ***/ +package org.apache.spark.sql.rapids.shims + +import org.apache.hadoop.conf.Configuration + +import org.apache.spark.sql.SparkSession +import org.apache.spark.sql.execution.streaming.runtime.MetadataLogFileIndex +import org.apache.spark.sql.execution.streaming.sinks.FileStreamSink +import org.apache.spark.sql.types.StructType + +object FileStreamSinkShims { + def hasMetadata(paths: Seq[String], hadoopConf: Configuration, sqlConf: org.apache.spark.sql.internal.SQLConf): Boolean = { + FileStreamSink.hasMetadata(paths, hadoopConf, sqlConf) + } + + def newMetadataLogFileIndex( + sparkSession: SparkSession, + basePath: org.apache.hadoop.fs.Path, + options: Map[String, String], + userSpecifiedSchema: Option[StructType]): MetadataLogFileIndex = { + new MetadataLogFileIndex(sparkSession, basePath, options, userSpecifiedSchema) + } +} From 387f54f585b5a9fb056bc754daaf3c6e36035489 Mon Sep 17 00:00:00 2001 From: Chong Gao Date: Fri, 9 Jan 2026 15:01:54 +0800 Subject: [PATCH 13/59] Add test files and misc fixes for Spark 410 shim --- .../spark/rapids/shims/SparkShims.scala | 3 +- .../spark401/SparkShimServiceProvider.scala | 3 +- .../shims/spark410/SparkShimsSuite.scala | 35 +++++++++++++++++++ .../spark/rapids/shims/OrcStatisticShim.scala | 3 +- .../sql/rapids/GpuInSubqueryExecSuite.scala | 3 +- .../shuffle/RapidsShuffleTestHelper.scala | 3 +- .../spark/rapids/ToPrettyStringSuite.scala | 3 +- ...eDataSourceTableAsSelectCommandSuite.scala | 3 +- 8 files changed, 49 insertions(+), 7 deletions(-) create mode 100644 sql-plugin/src/test/spark410/scala/com/nvidia/spark/rapids/shims/spark410/SparkShimsSuite.scala diff --git a/sql-plugin/src/main/spark401/scala/com/nvidia/spark/rapids/shims/SparkShims.scala b/sql-plugin/src/main/spark401/scala/com/nvidia/spark/rapids/shims/SparkShims.scala index eaeb1cb4c96..5c50b1af570 100644 --- a/sql-plugin/src/main/spark401/scala/com/nvidia/spark/rapids/shims/SparkShims.scala +++ b/sql-plugin/src/main/spark401/scala/com/nvidia/spark/rapids/shims/SparkShims.scala @@ -1,5 +1,5 @@ /* - * Copyright (c) 2025, NVIDIA CORPORATION. + * Copyright (c) 2025-2026, NVIDIA CORPORATION. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -16,6 +16,7 @@ /*** spark-rapids-shim-json-lines {"spark": "401"} +{"spark": "410"} spark-rapids-shim-json-lines ***/ package com.nvidia.spark.rapids.shims diff --git a/sql-plugin/src/main/spark401/scala/com/nvidia/spark/rapids/shims/spark401/SparkShimServiceProvider.scala b/sql-plugin/src/main/spark401/scala/com/nvidia/spark/rapids/shims/spark401/SparkShimServiceProvider.scala index 18ab73fd79c..f6af7f622e3 100644 --- a/sql-plugin/src/main/spark401/scala/com/nvidia/spark/rapids/shims/spark401/SparkShimServiceProvider.scala +++ b/sql-plugin/src/main/spark401/scala/com/nvidia/spark/rapids/shims/spark401/SparkShimServiceProvider.scala @@ -1,5 +1,5 @@ /* - * Copyright (c) 2025, NVIDIA CORPORATION. + * Copyright (c) 2025-2026, NVIDIA CORPORATION. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -16,6 +16,7 @@ /*** spark-rapids-shim-json-lines {"spark": "401"} +{"spark": "410"} spark-rapids-shim-json-lines ***/ package com.nvidia.spark.rapids.shims.spark401 diff --git a/sql-plugin/src/test/spark410/scala/com/nvidia/spark/rapids/shims/spark410/SparkShimsSuite.scala b/sql-plugin/src/test/spark410/scala/com/nvidia/spark/rapids/shims/spark410/SparkShimsSuite.scala new file mode 100644 index 00000000000..dd69a11d180 --- /dev/null +++ b/sql-plugin/src/test/spark410/scala/com/nvidia/spark/rapids/shims/spark410/SparkShimsSuite.scala @@ -0,0 +1,35 @@ +/* + * Copyright (c) 2025-2026, NVIDIA CORPORATION. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +/*** spark-rapids-shim-json-lines +{"spark": "410"} +spark-rapids-shim-json-lines ***/ +package com.nvidia.spark.rapids.shims.spark410 + +import com.nvidia.spark.rapids._ +import org.scalatest.funsuite.AnyFunSuite + +class SparkShimsSuite extends AnyFunSuite with FQSuiteName { + test("spark shims version") { + assert(ShimLoader.getShimVersion === SparkShimVersion(4, 1, 0)) + } + + test("shuffle manager class") { + assert(ShimLoader.getRapidsShuffleManagerClass === + classOf[com.nvidia.spark.rapids.spark410.RapidsShuffleManager].getCanonicalName) + } + +} diff --git a/tests/src/test/spark320/scala/com/nvidia/spark/rapids/shims/OrcStatisticShim.scala b/tests/src/test/spark320/scala/com/nvidia/spark/rapids/shims/OrcStatisticShim.scala index ddf7eea363a..cb1231fac66 100644 --- a/tests/src/test/spark320/scala/com/nvidia/spark/rapids/shims/OrcStatisticShim.scala +++ b/tests/src/test/spark320/scala/com/nvidia/spark/rapids/shims/OrcStatisticShim.scala @@ -1,5 +1,5 @@ /* - * Copyright (c) 2023-2025, NVIDIA CORPORATION. + * Copyright (c) 2023-2026, NVIDIA CORPORATION. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -44,6 +44,7 @@ {"spark": "357"} {"spark": "400"} {"spark": "401"} +{"spark": "410"} spark-rapids-shim-json-lines ***/ package com.nvidia.spark.rapids.shims diff --git a/tests/src/test/spark330/scala/org/apache/spark/sql/rapids/GpuInSubqueryExecSuite.scala b/tests/src/test/spark330/scala/org/apache/spark/sql/rapids/GpuInSubqueryExecSuite.scala index 03da7f6920a..7141191fe59 100644 --- a/tests/src/test/spark330/scala/org/apache/spark/sql/rapids/GpuInSubqueryExecSuite.scala +++ b/tests/src/test/spark330/scala/org/apache/spark/sql/rapids/GpuInSubqueryExecSuite.scala @@ -1,5 +1,5 @@ /* - * Copyright (c) 2023-2025, NVIDIA CORPORATION. + * Copyright (c) 2023-2026, NVIDIA CORPORATION. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -34,6 +34,7 @@ {"spark": "354"} {"spark": "400"} {"spark": "401"} +{"spark": "410"} spark-rapids-shim-json-lines ***/ package org.apache.spark.sql.rapids diff --git a/tests/src/test/spark340/scala/com/nvidia/spark/rapids/shuffle/RapidsShuffleTestHelper.scala b/tests/src/test/spark340/scala/com/nvidia/spark/rapids/shuffle/RapidsShuffleTestHelper.scala index 8b7bcc156e3..13b09bef7d8 100644 --- a/tests/src/test/spark340/scala/com/nvidia/spark/rapids/shuffle/RapidsShuffleTestHelper.scala +++ b/tests/src/test/spark340/scala/com/nvidia/spark/rapids/shuffle/RapidsShuffleTestHelper.scala @@ -1,5 +1,5 @@ /* - * Copyright (c) 2023-2025, NVIDIA CORPORATION. + * Copyright (c) 2023-2026, NVIDIA CORPORATION. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -32,6 +32,7 @@ {"spark": "357"} {"spark": "400"} {"spark": "401"} +{"spark": "410"} spark-rapids-shim-json-lines ***/ package com.nvidia.spark.rapids.shuffle diff --git a/tests/src/test/spark341db/scala/com/nvidia/spark/rapids/ToPrettyStringSuite.scala b/tests/src/test/spark341db/scala/com/nvidia/spark/rapids/ToPrettyStringSuite.scala index 851ca264efc..7f1def8ab94 100644 --- a/tests/src/test/spark341db/scala/com/nvidia/spark/rapids/ToPrettyStringSuite.scala +++ b/tests/src/test/spark341db/scala/com/nvidia/spark/rapids/ToPrettyStringSuite.scala @@ -1,5 +1,5 @@ /* - * Copyright (c) 2023-2025, NVIDIA CORPORATION. + * Copyright (c) 2023-2026, NVIDIA CORPORATION. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -27,6 +27,7 @@ {"spark": "357"} {"spark": "400"} {"spark": "401"} +{"spark": "410"} spark-rapids-shim-json-lines ***/ package com.nvidia.spark.rapids diff --git a/tests/src/test/spark350/scala/org/apache/spark/sql/rapids/GpuCreateDataSourceTableAsSelectCommandSuite.scala b/tests/src/test/spark350/scala/org/apache/spark/sql/rapids/GpuCreateDataSourceTableAsSelectCommandSuite.scala index 00ca072140b..297306b59e6 100644 --- a/tests/src/test/spark350/scala/org/apache/spark/sql/rapids/GpuCreateDataSourceTableAsSelectCommandSuite.scala +++ b/tests/src/test/spark350/scala/org/apache/spark/sql/rapids/GpuCreateDataSourceTableAsSelectCommandSuite.scala @@ -1,5 +1,5 @@ /* - * Copyright (c) 2024-2025, NVIDIA CORPORATION. + * Copyright (c) 2024-2026, NVIDIA CORPORATION. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -25,6 +25,7 @@ {"spark": "357"} {"spark": "400"} {"spark": "401"} +{"spark": "410"} spark-rapids-shim-json-lines ***/ package org.apache.spark.sql.rapids From 5e0e9427bac54fa139af5b22f9237bac1b767057 Mon Sep 17 00:00:00 2001 From: Chong Gao Date: Fri, 9 Jan 2026 16:31:31 +0800 Subject: [PATCH 14/59] Exclude Delta Lake from Spark 4.1.0 build Use delta-stub instead of delta-40x for Spark 4.1.0 because io.delta:delta-spark is not yet compatible with Spark 4.1.0. CheckpointFileManager moved packages in Spark 4.1.0. Contributes to https://github.com/NVIDIA/spark-rapids/issues/14119 --- scala2.13/pom.xml | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/scala2.13/pom.xml b/scala2.13/pom.xml index fa3963d3e52..c4568026ddd 100644 --- a/scala2.13/pom.xml +++ b/scala2.13/pom.xml @@ -784,7 +784,9 @@ ${spark410.version} ${spark410.version} 1.13.1 - rapids-4-spark-delta-40x + + + rapids-4-spark-delta-stub 2.0.7 [17,) Support for Spark ${spark.version} is only available with Java 17+ @@ -806,7 +808,7 @@ - delta-lake/delta-40x + delta-lake/delta-stub From cb90f650396216786ab40c78ee761f53e40ee7d5 Mon Sep 17 00:00:00 2001 From: Chong Gao Date: Fri, 9 Jan 2026 16:50:34 +0800 Subject: [PATCH 15/59] Add InvalidateCacheShims for AtomicReplaceTableAsSelectExec callback change In Spark 4.1.0, AtomicReplaceTableAsSelectExec.invalidateCache callback signature changed from (TableCatalog, Identifier) => Unit to (TableCatalog, Table, Identifier) => Unit. Create shims to handle this API change: - spark400/InvalidateCacheShims.scala for Spark 4.0.x (2-arg callback) - spark410/InvalidateCacheShims.scala for Spark 4.1.0+ (3-arg callback) - spark410/GpuAtomicReplaceTableAsSelectExec.scala for 4.1.0+ exec Contributes to https://github.com/NVIDIA/spark-rapids/issues/14119 --- .../delta/common/DeltaProviderBase.scala | 5 +- .../rapids/shims/InvalidateCacheShims.scala | 37 ++++++++ .../rapids/shims/InvalidateCacheShims.scala | 36 ++++++++ .../GpuAtomicReplaceTableAsSelectExec.scala | 91 +++++++++++++++++++ 4 files changed, 167 insertions(+), 2 deletions(-) create mode 100644 sql-plugin/src/main/spark400/scala/com/nvidia/spark/rapids/shims/InvalidateCacheShims.scala create mode 100644 sql-plugin/src/main/spark410/scala/com/nvidia/spark/rapids/shims/InvalidateCacheShims.scala create mode 100644 sql-plugin/src/main/spark410/scala/org/apache/spark/sql/execution/datasources/v2/rapids/GpuAtomicReplaceTableAsSelectExec.scala diff --git a/delta-lake/common/src/main/delta-33x-40x/scala/com/nvidia/spark/rapids/delta/common/DeltaProviderBase.scala b/delta-lake/common/src/main/delta-33x-40x/scala/com/nvidia/spark/rapids/delta/common/DeltaProviderBase.scala index e86f33299c2..474f79ff6e3 100644 --- a/delta-lake/common/src/main/delta-33x-40x/scala/com/nvidia/spark/rapids/delta/common/DeltaProviderBase.scala +++ b/delta-lake/common/src/main/delta-33x-40x/scala/com/nvidia/spark/rapids/delta/common/DeltaProviderBase.scala @@ -1,5 +1,5 @@ /* - * Copyright (c) 2025, NVIDIA CORPORATION. + * Copyright (c) 2025-2026, NVIDIA CORPORATION. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -20,6 +20,7 @@ import com.nvidia.spark.rapids._ import com.nvidia.spark.rapids.RapidsPluginImplicits._ import com.nvidia.spark.rapids.delta.{DeltaIOProvider, GpuDeltaDataSource, RapidsDeltaUtils} import com.nvidia.spark.rapids.shims._ +import com.nvidia.spark.rapids.shims.InvalidateCacheShims import org.apache.hadoop.fs.Path import org.apache.spark.sql.SparkSession @@ -132,7 +133,7 @@ abstract class DeltaProviderBase extends DeltaIOProvider { cpuExec.tableSpec, cpuExec.writeOptions, cpuExec.orCreate, - cpuExec.invalidateCache) + InvalidateCacheShims.getInvalidateCache(cpuExec.invalidateCache)) } diff --git a/sql-plugin/src/main/spark400/scala/com/nvidia/spark/rapids/shims/InvalidateCacheShims.scala b/sql-plugin/src/main/spark400/scala/com/nvidia/spark/rapids/shims/InvalidateCacheShims.scala new file mode 100644 index 00000000000..c503278815f --- /dev/null +++ b/sql-plugin/src/main/spark400/scala/com/nvidia/spark/rapids/shims/InvalidateCacheShims.scala @@ -0,0 +1,37 @@ +/* + * Copyright (c) 2026, NVIDIA CORPORATION. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +/*** spark-rapids-shim-json-lines +{"spark": "400"} +{"spark": "401"} +spark-rapids-shim-json-lines ***/ +package com.nvidia.spark.rapids.shims + +import org.apache.spark.sql.connector.catalog.{Identifier, Table, TableCatalog} + +/** + * Shim for invalidateCache callback signature differences between Spark versions. + * In Spark 4.0.x: (TableCatalog, Table, Identifier) => Unit + * In Spark 4.1.0: (TableCatalog, Identifier) => Unit + */ +object InvalidateCacheShims { + type InvalidateCacheType = (TableCatalog, Table, Identifier) => Unit + + def getInvalidateCache( + cpuInvalidateCache: (TableCatalog, Table, Identifier) => Unit): InvalidateCacheType = { + cpuInvalidateCache + } +} diff --git a/sql-plugin/src/main/spark410/scala/com/nvidia/spark/rapids/shims/InvalidateCacheShims.scala b/sql-plugin/src/main/spark410/scala/com/nvidia/spark/rapids/shims/InvalidateCacheShims.scala new file mode 100644 index 00000000000..263cb4fb2c3 --- /dev/null +++ b/sql-plugin/src/main/spark410/scala/com/nvidia/spark/rapids/shims/InvalidateCacheShims.scala @@ -0,0 +1,36 @@ +/* + * Copyright (c) 2026, NVIDIA CORPORATION. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +/*** spark-rapids-shim-json-lines +{"spark": "410"} +spark-rapids-shim-json-lines ***/ +package com.nvidia.spark.rapids.shims + +import org.apache.spark.sql.connector.catalog.{Identifier, TableCatalog} + +/** + * Shim for invalidateCache callback signature differences between Spark versions. + * In Spark 4.0.x: (TableCatalog, Table, Identifier) => Unit + * In Spark 4.1.0: (TableCatalog, Identifier) => Unit + */ +object InvalidateCacheShims { + type InvalidateCacheType = (TableCatalog, Identifier) => Unit + + def getInvalidateCache( + cpuInvalidateCache: (TableCatalog, Identifier) => Unit): InvalidateCacheType = { + cpuInvalidateCache + } +} diff --git a/sql-plugin/src/main/spark410/scala/org/apache/spark/sql/execution/datasources/v2/rapids/GpuAtomicReplaceTableAsSelectExec.scala b/sql-plugin/src/main/spark410/scala/org/apache/spark/sql/execution/datasources/v2/rapids/GpuAtomicReplaceTableAsSelectExec.scala new file mode 100644 index 00000000000..4a19a476b8b --- /dev/null +++ b/sql-plugin/src/main/spark410/scala/org/apache/spark/sql/execution/datasources/v2/rapids/GpuAtomicReplaceTableAsSelectExec.scala @@ -0,0 +1,91 @@ +/* + * Copyright (c) 2026, NVIDIA CORPORATION. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +/*** spark-rapids-shim-json-lines +{"spark": "410"} +spark-rapids-shim-json-lines ***/ +package org.apache.spark.sql.execution.datasources.v2.rapids + +import scala.collection.JavaConverters._ + +import com.nvidia.spark.rapids.GpuExec + +import org.apache.spark.rdd.RDD +import org.apache.spark.sql.catalyst.InternalRow +import org.apache.spark.sql.catalyst.analysis.NoSuchTableException +import org.apache.spark.sql.catalyst.plans.logical.{LogicalPlan, TableSpec} +import org.apache.spark.sql.connector.catalog.{CatalogV2Util, Identifier, StagingTableCatalog, TableCatalog} +import org.apache.spark.sql.connector.expressions.Transform +import org.apache.spark.sql.errors.QueryCompilationErrors +import org.apache.spark.sql.execution.datasources.v2.V2CreateTableAsSelectBaseExec +import org.apache.spark.sql.vectorized.ColumnarBatch + +/** + * GPU version of AtomicReplaceTableAsSelectExec for Spark 4.1.0+. + * + * Physical plan node for v2 replace table as select when the catalog supports staging + * table replacement. + * + * A new table will be created using the schema of the query, and rows from the query are appended. + * If the table exists, its contents and schema should be replaced with the schema and the contents + * of the query. This implementation is atomic. The table replacement is staged, and the commit + * operation at the end should perform the replacement of the table's metadata and contents. If the + * write fails, the table is instructed to roll back staged changes and any previously written table + * is left untouched. + * + * Note: In Spark 4.1.0, invalidateCache signature changed from (TableCatalog, Table, Identifier) + * to (TableCatalog, Identifier). + */ +case class GpuAtomicReplaceTableAsSelectExec( + catalog: StagingTableCatalog, + ident: Identifier, + partitioning: Seq[Transform], + query: LogicalPlan, + tableSpec: TableSpec, + writeOptions: Map[String, String], + orCreate: Boolean, + invalidateCache: (TableCatalog, Identifier) => Unit) + extends V2CreateTableAsSelectBaseExec with GpuExec { + + val properties = CatalogV2Util.convertTableProperties(tableSpec) + + override def supportsColumnar: Boolean = false + + override protected def run(): Seq[InternalRow] = { + val columns = getV2Columns(query.schema, catalog.useNullableQuerySchema) + if (catalog.tableExists(ident)) { + invalidateCache(catalog, ident) + } + val staged = if (orCreate) { + catalog.stageCreateOrReplace( + ident, columns, partitioning.toArray, properties.asJava) + } else if (catalog.tableExists(ident)) { + try { + catalog.stageReplace( + ident, columns, partitioning.toArray, properties.asJava) + } catch { + case e: NoSuchTableException => + throw QueryCompilationErrors.cannotReplaceMissingTableError(ident, Some(e)) + } + } else { + throw QueryCompilationErrors.cannotReplaceMissingTableError(ident) + } + writeToTable(catalog, staged, writeOptions, ident, query, overwrite = true) + } + + override protected def internalDoExecuteColumnar(): RDD[ColumnarBatch] = + throw new IllegalStateException("Columnar execution not supported") +} From 421ae8ac0f36c150cffadd1adc32b1495cb0cf99 Mon Sep 17 00:00:00 2001 From: Chong Gao Date: Fri, 9 Jan 2026 16:50:46 +0800 Subject: [PATCH 16/59] Add generated files for Spark 4.1.0 shim Contributes to https://github.com/NVIDIA/spark-rapids/issues/14056 --- tools/generated_files/410/operatorsScore.csv | 320 +++++++ .../410/supportedDataSource.csv | 14 + tools/generated_files/410/supportedExecs.csv | 62 ++ tools/generated_files/410/supportedExprs.csv | 860 ++++++++++++++++++ 4 files changed, 1256 insertions(+) create mode 100644 tools/generated_files/410/operatorsScore.csv create mode 100644 tools/generated_files/410/supportedDataSource.csv create mode 100644 tools/generated_files/410/supportedExecs.csv create mode 100644 tools/generated_files/410/supportedExprs.csv diff --git a/tools/generated_files/410/operatorsScore.csv b/tools/generated_files/410/operatorsScore.csv new file mode 100644 index 00000000000..1ddea11f3d4 --- /dev/null +++ b/tools/generated_files/410/operatorsScore.csv @@ -0,0 +1,320 @@ +CPUOperator,Score +CoalesceExec,3.0 +CollectLimitExec,3.0 +ExpandExec,3.0 +FileSourceScanExec,3.0 +FilterExec,2.8 +GenerateExec,3.0 +GlobalLimitExec,3.0 +LocalLimitExec,3.0 +ProjectExec,3.0 +RangeExec,3.0 +SampleExec,3.0 +SortExec,8.0 +SubqueryBroadcastExec,3.0 +TakeOrderedAndProjectExec,3.0 +UnionExec,3.0 +AQEShuffleReadExec,3.0 +TableCacheQueryStageExec,3.0 +HashAggregateExec,4.5 +ObjectHashAggregateExec,3.0 +SortAggregateExec,3.0 +InMemoryTableScanExec,3.0 +DataWritingCommandExec,3.0 +ExecutedCommandExec,3.0 +WriteFilesExec,3.0 +AppendDataExec,3.0 +AppendDataExecV1,3.0 +AtomicCreateTableAsSelectExec,3.0 +AtomicReplaceTableAsSelectExec,3.0 +BatchScanExec,3.0 +MergeRowsExec,3.0 +OverwriteByExpressionExec,3.0 +OverwriteByExpressionExecV1,3.0 +OverwritePartitionsDynamicExec,3.0 +ReplaceDataExec,3.0 +WriteDeltaExec,3.0 +BroadcastExchangeExec,3.0 +ShuffleExchangeExec,4.2 +BroadcastHashJoinExec,5.1 +BroadcastNestedLoopJoinExec,3.0 +CartesianProductExec,3.0 +ShuffledHashJoinExec,3.0 +SortMergeJoinExec,22.7 +ArrowEvalPythonExec,1.2 +FlatMapCoGroupsInPandasExec,3.0 +FlatMapGroupsInPandasExec,1.2 +MapInArrowExec,3.0 +MapInPandasExec,1.2 +WindowExec,3.0 +WindowGroupLimitExec,3.0 +HiveTableScanExec,3.0 +Abs,4 +Acos,4 +Acosh,4 +Add,4 +AggregateExpression,4 +Alias,4 +And,4 +ApproximatePercentile,4 +ArrayContains,4 +ArrayDistinct,4 +ArrayExcept,4 +ArrayExists,4 +ArrayFilter,4 +ArrayIntersect,4 +ArrayJoin,4 +ArrayMax,4 +ArrayMin,4 +ArrayPosition,4 +ArrayRemove,4 +ArrayRepeat,4 +ArrayTransform,4 +ArrayUnion,4 +ArraysOverlap,4 +ArraysZip,4 +Ascii,4 +Asin,4 +Asinh,4 +AtLeastNNonNulls,4 +Atan,4 +Atanh,4 +AttributeReference,4 +Average,4 +BRound,4 +Bin,4 +BitAndAgg,4 +BitLength,4 +BitOrAgg,4 +BitXorAgg,4 +BitwiseAnd,4 +BitwiseCount,4 +BitwiseNot,4 +BitwiseOr,4 +BitwiseXor,4 +BloomFilterAggregate,4 +BloomFilterMightContain,4 +BoundReference,4 +CaseWhen,4 +Cbrt,4 +Ceil,4 +CheckOverflowInTableInsert,4 +Coalesce,4 +CollationAwareMurmur3Hash,4 +CollationAwareXxHash64,4 +CollectList,4 +CollectSet,4 +Concat,4 +ConcatWs,4 +Contains,4 +Conv,4 +Cos,4 +Cosh,4 +Cot,4 +Count,4 +CreateArray,4 +CreateMap,4 +CreateNamedStruct,4 +CurrentRow$,4 +DateAdd,4 +DateAddInterval,4 +DateDiff,4 +DateFormatClass,4 +DateSub,4 +DayOfMonth,4 +DayOfWeek,4 +DayOfYear,4 +DenseRank,4 +Discard,4 +Divide,4 +DivideYMInterval,4 +DynamicPruningExpression,4 +ElementAt,4 +Empty2Null,4 +EndsWith,4 +EqualNullSafe,4 +EqualTo,4 +Exp,4 +Explode,4 +Expm1,4 +First,4 +Flatten,4 +Floor,4 +FormatNumber,4 +FromUTCTimestamp,4 +FromUnixTime,4 +GetArrayItem,4 +GetArrayStructFields,4 +GetJsonObject,4 +GetMapValue,4 +GetStructField,4 +GetTimestamp,4 +GreaterThan,4 +GreaterThanOrEqual,4 +Greatest,4 +HiveGenericUDF,4 +HiveHash,4 +HiveSimpleUDF,4 +Hour,4 +HyperLogLogPlusPlus,4 +Hypot,4 +If,4 +In,4 +InSet,4 +InSubqueryExec,4 +InitCap,4 +InputFileBlockLength,4 +InputFileBlockStart,4 +InputFileName,4 +IntegralDivide,4 +Invoke,4 +IsNaN,4 +IsNotNull,4 +IsNull,4 +JsonToStructs,4 +JsonTuple,4 +Keep,4 +KnownFloatingPointNormalized,4 +KnownNotNull,4 +KnownNullable,4 +Lag,4 +LambdaFunction,4 +Last,4 +LastDay,4 +Lead,4 +Least,4 +Length,4 +LessThan,4 +LessThanOrEqual,4 +Like,4 +Literal,4 +Log,4 +Log10,4 +Log1p,4 +Log2,4 +Logarithm,4 +Lower,4 +MakeDecimal,4 +MapConcat,4 +MapEntries,4 +MapFilter,4 +MapFromArrays,4 +MapFromEntries,4 +MapKeys,4 +MapValues,4 +MapZipWith,4 +Max,4 +MaxBy,4 +Md5,4 +MicrosToTimestamp,4 +MillisToTimestamp,4 +Min,4 +MinBy,4 +Minute,4 +MonotonicallyIncreasingID,4 +Month,4 +MonthsBetween,4 +Multiply,4 +MultiplyYMInterval,4 +Murmur3Hash,4 +NaNvl,4 +NamedLambdaVariable,4 +NormalizeNaNAndZero,4 +Not,4 +NthValue,4 +OctetLength,4 +Or,4 +ParseUrl,4 +PercentRank,4 +Percentile,4 +PivotFirst,4 +Pmod,4 +PosExplode,4 +Pow,4 +PreciseTimestampConversion,4 +PythonUDAF,4 +PythonUDF,4 +Quarter,4 +RLike,4 +RaiseError,4 +Rand,4 +Rank,4 +RegExpExtract,4 +RegExpExtractAll,4 +RegExpReplace,4 +Remainder,4 +ReplicateRows,4 +Reverse,4 +Rint,4 +Round,4 +RoundCeil,4 +RoundFloor,4 +RowNumber,4 +ScalaUDF,4 +ScalarSubquery,4 +Second,4 +SecondsToTimestamp,4 +Sequence,4 +Sha1,4 +ShiftLeft,4 +ShiftRight,4 +ShiftRightUnsigned,4 +Signum,4 +Sin,4 +Sinh,4 +Size,4 +Slice,4 +SortArray,4 +SortOrder,4 +SparkPartitionID,4 +SpecifiedWindowFrame,4 +Split,4 +Sqrt,4 +Stack,4 +StartsWith,4 +StaticInvoke,4 +StddevPop,4 +StddevSamp,4 +StringInstr,4 +StringLPad,4 +StringLocate,4 +StringRPad,4 +StringRepeat,4 +StringReplace,4 +StringSplit,4 +StringToMap,4 +StringTranslate,4 +StringTrim,4 +StringTrimLeft,4 +StringTrimRight,4 +StructsToJson,4 +Substring,4 +SubstringIndex,4 +Subtract,4 +Sum,4 +Tan,4 +Tanh,4 +TimestampAddInterval,4 +ToDegrees,4 +ToRadians,4 +ToUTCTimestamp,4 +ToUnixTimestamp,4 +TransformKeys,4 +TransformValues,4 +TruncDate,4 +TruncTimestamp,4 +UnaryMinus,4 +UnaryPositive,4 +UnboundedFollowing$,4 +UnboundedPreceding$,4 +UnixTimestamp,4 +UnscaledValue,4 +Upper,4 +Uuid,4 +VariancePop,4 +VarianceSamp,4 +WeekDay,4 +WindowExpression,4 +WindowSpecDefinition,4 +XxHash64,4 +Year,4 diff --git a/tools/generated_files/410/supportedDataSource.csv b/tools/generated_files/410/supportedDataSource.csv new file mode 100644 index 00000000000..67669d28435 --- /dev/null +++ b/tools/generated_files/410/supportedDataSource.csv @@ -0,0 +1,14 @@ +Format,Direction,BOOLEAN,BYTE,SHORT,INT,LONG,FLOAT,DOUBLE,DATE,TIMESTAMP,STRING,DECIMAL,NULL,BINARY,CALENDAR,ARRAY,MAP,STRUCT,UDT,DAYTIME,YEARMONTH +Avro,read,CO,CO,CO,CO,CO,CO,CO,CO,CO,CO,CO,CO,CO,CO,CO,CO,CO,CO,CO,CO +CSV,read,S,S,S,S,S,S,S,S,PS,S,S,NA,NS,NA,NA,NA,NA,NA,NA,NA +Delta,read,S,S,S,S,S,S,S,S,PS,S,S,NA,S,NA,PS,PS,PS,NS,S,S +Delta,write,S,S,S,S,S,S,S,S,PS,S,S,NA,S,NA,PS,PS,PS,NS,S,S +HiveText,read,S,S,S,S,S,S,S,S,PS,S,S,NS,NS,NS,NS,NS,NS,NS,NS,NS +HiveText,write,S,S,S,S,S,S,S,S,PS,S,S,NS,NS,NS,NS,NS,NS,NS,NS,NS +Iceberg,read,S,S,S,S,S,S,S,S,PS,S,S,NA,S,NA,PS,PS,PS,NS,S,S +Iceberg,write,CO,CO,CO,CO,CO,CO,CO,CO,CO,CO,CO,CO,CO,CO,CO,CO,CO,CO,CO,CO +JSON,read,S,S,S,S,S,S,S,PS,PS,S,S,NA,NS,NA,PS,NS,PS,NS,NA,NA +ORC,read,S,S,S,S,S,S,S,S,PS,S,S,NA,NS,NA,PS,PS,PS,NS,NA,NA +ORC,write,S,S,S,S,S,S,S,S,PS,S,S,NA,NS,NA,PS,PS,PS,NS,NA,NA +Parquet,read,S,S,S,S,S,S,S,S,PS,S,S,NA,S,NA,PS,PS,PS,NS,S,S +Parquet,write,S,S,S,S,S,S,S,S,PS,S,S,NA,S,NA,PS,PS,PS,NS,S,S diff --git a/tools/generated_files/410/supportedExecs.csv b/tools/generated_files/410/supportedExecs.csv new file mode 100644 index 00000000000..afa89d9ea74 --- /dev/null +++ b/tools/generated_files/410/supportedExecs.csv @@ -0,0 +1,62 @@ +Exec,Supported,Notes,Params,BOOLEAN,BYTE,SHORT,INT,LONG,FLOAT,DOUBLE,DATE,TIMESTAMP,STRING,DECIMAL,NULL,BINARY,CALENDAR,ARRAY,MAP,STRUCT,UDT,DAYTIME,YEARMONTH +CoalesceExec,S,None,Input/Output,S,S,S,S,S,S,S,S,PS,S,S,S,S,NS,PS,PS,PS,NS,S,S +CollectLimitExec,NS,This is disabled by default because Collect Limit replacement can be slower on the GPU; if huge number of rows in a batch it could help by limiting the number of rows transferred from GPU to CPU,Input/Output,S,S,S,S,S,S,S,S,PS,S,S,S,NS,NS,PS,PS,PS,NS,NS,NS +ExpandExec,S,None,Input/Output,S,S,S,S,S,S,S,S,PS,S,S,S,NS,NS,PS,PS,PS,NS,NS,NS +FileSourceScanExec,S,None,Input/Output,S,S,S,S,S,S,S,S,PS,S,S,S,S,NS,PS,PS,PS,NS,S,S +FilterExec,S,None,Input/Output,S,S,S,S,S,S,S,S,PS,S,S,S,S,NS,PS,PS,PS,NS,S,S +GenerateExec,S,None,Input/Output,S,S,S,S,S,S,S,S,PS,S,S,S,S,NS,PS,PS,PS,NS,NS,NS +GlobalLimitExec,S,None,Input/Output,S,S,S,S,S,S,S,S,PS,S,S,S,NS,NS,PS,PS,PS,NS,NS,NS +LocalLimitExec,S,None,Input/Output,S,S,S,S,S,S,S,S,PS,S,S,S,NS,NS,PS,PS,PS,NS,NS,NS +ProjectExec,S,None,Input/Output,S,S,S,S,S,S,S,S,PS,S,S,S,S,NS,PS,PS,PS,NS,S,S +RangeExec,S,None,Input/Output,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +SampleExec,S,None,Input/Output,S,S,S,S,S,S,S,S,PS,S,S,S,NS,NS,PS,PS,PS,NS,S,S +SortExec,S,None,Input/Output,S,S,S,S,S,S,S,S,PS,S,S,S,S,NS,PS,PS,PS,NS,NS,NS +SubqueryBroadcastExec,S,None,Input/Output,S,S,S,S,S,S,S,S,PS,S,S,S,S,S,PS,PS,PS,S,S,S +TakeOrderedAndProjectExec,S,None,Input/Output,S,S,S,S,S,S,S,S,PS,S,S,S,NS,NS,PS,PS,PS,NS,NS,NS +UnionExec,S,None,Input/Output,S,S,S,S,S,S,S,S,PS,S,S,S,NS,NS,PS,PS,PS,NS,NS,NS +AQEShuffleReadExec,S,None,Input/Output,S,S,S,S,S,S,S,S,PS,S,S,S,S,NS,PS,PS,PS,NS,NS,NS +TableCacheQueryStageExec,NS,This is disabled by default because Table cache query stage that wraps InMemoryTableScan for AQE,Input/Output,S,S,S,S,S,S,S,S,PS,S,S,S,S,S,PS,PS,PS,S,S,S +HashAggregateExec,S,None,Input/Output,S,S,S,S,S,S,S,S,PS,S,S,S,S,NS,PS,PS,PS,NS,NS,NS +ObjectHashAggregateExec,S,None,Input/Output,S,S,S,S,S,S,S,S,PS,S,S,S,PS,NS,PS,PS,PS,NS,NS,NS +SortAggregateExec,S,None,Input/Output,S,S,S,S,S,S,S,S,PS,S,S,S,S,NS,PS,PS,PS,NS,NS,NS +InMemoryTableScanExec,S,None,Input/Output,S,S,S,S,S,S,S,S,PS,S,S,NS,NS,NS,PS,PS,PS,NS,S,S +DataWritingCommandExec,S,None,Input/Output,S,S,S,S,S,S,S,S,PS,S,PS,NS,S,NS,PS,PS,PS,NS,S,S +ExecutedCommandExec,S,None,Input/Output,S,S,S,S,S,S,S,S,PS,S,S,S,S,S,PS,PS,PS,S,S,S +WriteFilesExec,S,None,Input/Output,S,S,S,S,S,S,S,S,PS,S,S,S,S,S,PS,PS,PS,S,S,S +AppendDataExec,S,None,Input/Output,S,S,S,S,S,S,S,S,PS,S,S,NS,S,NS,PS,PS,PS,NS,S,S +AppendDataExecV1,S,None,Input/Output,S,S,S,S,S,S,S,S,PS,S,S,NS,S,NS,PS,PS,PS,NS,S,S +AtomicCreateTableAsSelectExec,S,None,Input/Output,S,S,S,S,S,S,S,S,PS,S,S,NS,S,NS,PS,PS,PS,NS,S,S +AtomicReplaceTableAsSelectExec,S,None,Input/Output,S,S,S,S,S,S,S,S,PS,S,S,NS,S,NS,PS,PS,PS,NS,S,S +BatchScanExec,S,None,Input/Output,S,S,S,S,S,S,S,S,PS,S,S,NS,S,NS,PS,PS,PS,NS,S,S +MergeRowsExec,S,None,Input/Output,S,S,S,S,S,S,S,S,PS,S,S,NS,S,NS,PS,PS,PS,NS,S,S +OverwriteByExpressionExec,S,None,Input/Output,S,S,S,S,S,S,S,S,PS,S,S,NS,S,NS,PS,PS,PS,NS,S,S +OverwriteByExpressionExecV1,S,None,Input/Output,S,S,S,S,S,S,S,S,PS,S,S,NS,S,NS,PS,PS,PS,NS,S,S +OverwritePartitionsDynamicExec,S,None,Input/Output,S,S,S,S,S,S,S,S,PS,S,S,NS,S,NS,PS,PS,PS,NS,S,S +ReplaceDataExec,S,None,Input/Output,S,S,S,S,S,S,S,S,PS,S,S,NS,S,NS,PS,PS,PS,NS,S,S +WriteDeltaExec,NS,This is disabled by default because Merge on read support for iceberg is experimental,Input/Output,S,S,S,S,S,S,S,S,PS,S,S,NS,S,NS,PS,PS,PS,NS,S,S +BroadcastExchangeExec,S,None,Input/Output,S,S,S,S,S,S,S,S,PS,S,S,S,S,NS,PS,PS,PS,NS,NS,NS +ShuffleExchangeExec,S,None,Input/Output,S,S,S,S,S,S,S,S,PS,S,S,S,S,NS,PS,PS,PS,NS,S,S +BroadcastHashJoinExec,S,None,leftKeys,S,S,S,S,S,S,S,S,PS,S,S,S,NS,NS,NS,NA,PS,NS,NS,NS +BroadcastHashJoinExec,S,None,rightKeys,S,S,S,S,S,S,S,S,PS,S,S,S,NS,NS,NS,NA,PS,NS,NS,NS +BroadcastHashJoinExec,S,None,condition,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +BroadcastHashJoinExec,S,None,Input/Output,S,S,S,S,S,S,S,S,PS,S,S,S,S,NS,PS,PS,PS,NS,NS,NS +BroadcastNestedLoopJoinExec,S,None,condition(A non-inner join only is supported if the condition expression can be converted to a GPU AST expression),S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +BroadcastNestedLoopJoinExec,S,None,Input/Output,S,S,S,S,S,S,S,S,PS,S,S,S,S,NS,PS,PS,PS,NS,NS,NS +CartesianProductExec,S,None,Input/Output,S,S,S,S,S,S,S,S,PS,S,S,S,S,NS,PS,PS,PS,NS,NS,NS +ShuffledHashJoinExec,S,None,leftKeys,S,S,S,S,S,S,S,S,PS,S,S,S,NS,NS,NS,NA,PS,NS,NS,NS +ShuffledHashJoinExec,S,None,rightKeys,S,S,S,S,S,S,S,S,PS,S,S,S,NS,NS,NS,NA,PS,NS,NS,NS +ShuffledHashJoinExec,S,None,condition,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +ShuffledHashJoinExec,S,None,Input/Output,S,S,S,S,S,S,S,S,PS,S,S,S,S,NS,PS,PS,PS,NS,NS,NS +SortMergeJoinExec,S,None,leftKeys,S,S,S,S,S,S,S,S,PS,S,S,S,NS,NS,NS,NA,PS,NS,NS,NS +SortMergeJoinExec,S,None,rightKeys,S,S,S,S,S,S,S,S,PS,S,S,S,NS,NS,NS,NA,PS,NS,NS,NS +SortMergeJoinExec,S,None,condition,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +SortMergeJoinExec,S,None,Input/Output,S,S,S,S,S,S,S,S,PS,S,S,S,S,NS,PS,PS,PS,NS,NS,NS +ArrowEvalPythonExec,S,None,Input/Output,S,S,S,S,S,S,S,S,PS,S,NS,NS,NS,NS,PS,NS,PS,NS,NS,NS +FlatMapCoGroupsInPandasExec,NS,This is disabled by default because Performance is not ideal with many small groups,Input/Output,S,S,S,S,S,S,S,S,PS,S,NS,NS,NS,NS,NS,NS,NS,NS,NS,NS +FlatMapGroupsInPandasExec,S,None,Input/Output,S,S,S,S,S,S,S,S,PS,S,NS,NS,NS,NS,NS,NS,NS,NS,NS,NS +MapInArrowExec,S,None,Input/Output,S,S,S,S,S,S,S,S,PS,S,NS,NS,NS,NS,PS,NS,PS,NS,NS,NS +MapInPandasExec,S,None,Input/Output,S,S,S,S,S,S,S,S,PS,S,NS,NS,NS,NS,PS,NS,PS,NS,NS,NS +WindowExec,S,None,partitionSpec,S,S,S,S,S,S,S,S,PS,S,S,S,NS,NS,NS,NS,PS,NS,NS,NS +WindowExec,S,None,Input/Output,S,S,S,S,S,S,S,S,PS,S,S,S,S,NS,PS,PS,PS,NS,NS,NS +WindowGroupLimitExec,S,None,Input/Output,S,S,S,S,S,S,S,S,PS,S,S,S,NS,NS,PS,PS,PS,NS,NS,NS +HiveTableScanExec,S,None,Input/Output,S,S,S,S,S,S,S,S,PS,S,S,NS,NS,NS,NS,NS,NS,NS,NS,NS diff --git a/tools/generated_files/410/supportedExprs.csv b/tools/generated_files/410/supportedExprs.csv new file mode 100644 index 00000000000..9b5978383c7 --- /dev/null +++ b/tools/generated_files/410/supportedExprs.csv @@ -0,0 +1,860 @@ +Expression,Supported,SQL Func,Notes,Context,Params,BOOLEAN,BYTE,SHORT,INT,LONG,FLOAT,DOUBLE,DATE,TIMESTAMP,STRING,DECIMAL,NULL,BINARY,CALENDAR,ARRAY,MAP,STRUCT,UDT,DAYTIME,YEARMONTH +Abs,S,`abs`,None,project,input,NA,S,S,S,S,S,S,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA +Abs,S,`abs`,None,project,result,NA,S,S,S,S,S,S,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA +Abs,S,`abs`,None,AST,input,NA,NS,NS,S,S,S,S,NA,NA,NA,NS,NA,NA,NA,NA,NA,NA,NA,NA,NA +Abs,S,`abs`,None,AST,result,NA,NS,NS,S,S,S,S,NA,NA,NA,NS,NA,NA,NA,NA,NA,NA,NA,NA,NA +Acos,S,`acos`,None,project,input,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +Acos,S,`acos`,None,project,result,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +Acos,S,`acos`,None,AST,input,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +Acos,S,`acos`,None,AST,result,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +Acosh,S,`acosh`,None,project,input,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +Acosh,S,`acosh`,None,project,result,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +Acosh,S,`acosh`,None,AST,input,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +Acosh,S,`acosh`,None,AST,result,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +Add,S,`+`,None,project,lhs,NA,S,S,S,S,S,S,NA,NA,NA,S,NA,NA,NS,NA,NA,NA,NA,S,S +Add,S,`+`,None,project,rhs,NA,S,S,S,S,S,S,NA,NA,NA,S,NA,NA,NS,NA,NA,NA,NA,S,S +Add,S,`+`,None,project,result,NA,S,S,S,S,S,S,NA,NA,NA,S,NA,NA,NS,NA,NA,NA,NA,S,S +Add,S,`+`,None,AST,lhs,NA,NS,NS,S,S,S,S,NA,NA,NA,NS,NA,NA,NS,NA,NA,NA,NA,NS,NS +Add,S,`+`,None,AST,rhs,NA,NS,NS,S,S,S,S,NA,NA,NA,NS,NA,NA,NS,NA,NA,NA,NA,NS,NS +Add,S,`+`,None,AST,result,NA,NS,NS,S,S,S,S,NA,NA,NA,NS,NA,NA,NS,NA,NA,NA,NA,NS,NS +Alias,S, ,None,project,input,S,S,S,S,S,S,S,S,PS,S,S,S,S,NS,PS,PS,PS,NS,S,S +Alias,S, ,None,project,result,S,S,S,S,S,S,S,S,PS,S,S,S,S,NS,PS,PS,PS,NS,S,S +Alias,S, ,None,AST,input,S,S,S,S,S,S,S,S,PS,S,NS,NS,NS,NS,NS,NS,NS,NS,S,S +Alias,S, ,None,AST,result,S,S,S,S,S,S,S,S,PS,S,NS,NS,NS,NS,NS,NS,NS,NS,S,S +And,S,`and`,None,project,lhs,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +And,S,`and`,None,project,rhs,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +And,S,`and`,None,project,result,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +And,S,`and`,None,AST,lhs,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +And,S,`and`,None,AST,rhs,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +And,S,`and`,None,AST,result,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +ArrayContains,S,`array_contains`,None,project,array,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,PS,NA,NA,NA,NA,NA +ArrayContains,S,`array_contains`,None,project,key,S,S,S,S,S,S,S,S,PS,S,NS,NS,NS,NS,NS,NS,NS,NS,NS,NS +ArrayContains,S,`array_contains`,None,project,result,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +ArrayDistinct,S,`array_distinct`,None,project,input,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,PS,NA,NA,NA,NA,NA +ArrayDistinct,S,`array_distinct`,None,project,result,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,PS,NA,NA,NA,NA,NA +ArrayExcept,S,`array_except`,This is not 100% compatible with the Spark version because the GPU implementation treats -0.0 and 0.0 as equal; but the CPU implementation currently does not (see SPARK-39845). Also; Apache Spark 3.1.3 fixed issue SPARK-36741 where NaNs in these set like operators were not treated as being equal. We have chosen to break with compatibility for the older versions of Spark in this instance and handle NaNs the same as 3.1.3+,project,array1,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,PS,NA,NA,NA,NA,NA +ArrayExcept,S,`array_except`,This is not 100% compatible with the Spark version because the GPU implementation treats -0.0 and 0.0 as equal; but the CPU implementation currently does not (see SPARK-39845). Also; Apache Spark 3.1.3 fixed issue SPARK-36741 where NaNs in these set like operators were not treated as being equal. We have chosen to break with compatibility for the older versions of Spark in this instance and handle NaNs the same as 3.1.3+,project,array2,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,PS,NA,NA,NA,NA,NA +ArrayExcept,S,`array_except`,This is not 100% compatible with the Spark version because the GPU implementation treats -0.0 and 0.0 as equal; but the CPU implementation currently does not (see SPARK-39845). Also; Apache Spark 3.1.3 fixed issue SPARK-36741 where NaNs in these set like operators were not treated as being equal. We have chosen to break with compatibility for the older versions of Spark in this instance and handle NaNs the same as 3.1.3+,project,result,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,PS,NA,NA,NA,NA,NA +ArrayExists,S,`exists`,None,project,argument,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,PS,NA,NA,NA,NA,NA +ArrayExists,S,`exists`,None,project,function,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +ArrayExists,S,`exists`,None,project,result,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +ArrayFilter,S,`filter`,None,project,argument,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,PS,NA,NA,NA,NA,NA +ArrayFilter,S,`filter`,None,project,function,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +ArrayFilter,S,`filter`,None,project,result,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,PS,NA,NA,NA,NA,NA +ArrayIntersect,S,`array_intersect`,This is not 100% compatible with the Spark version because the GPU implementation treats -0.0 and 0.0 as equal; but the CPU implementation currently does not (see SPARK-39845). Also; Apache Spark 3.1.3 fixed issue SPARK-36741 where NaNs in these set like operators were not treated as being equal. We have chosen to break with compatibility for the older versions of Spark in this instance and handle NaNs the same as 3.1.3+,project,array1,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,PS,NA,NA,NA,NA,NA +ArrayIntersect,S,`array_intersect`,This is not 100% compatible with the Spark version because the GPU implementation treats -0.0 and 0.0 as equal; but the CPU implementation currently does not (see SPARK-39845). Also; Apache Spark 3.1.3 fixed issue SPARK-36741 where NaNs in these set like operators were not treated as being equal. We have chosen to break with compatibility for the older versions of Spark in this instance and handle NaNs the same as 3.1.3+,project,array2,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,PS,NA,NA,NA,NA,NA +ArrayIntersect,S,`array_intersect`,This is not 100% compatible with the Spark version because the GPU implementation treats -0.0 and 0.0 as equal; but the CPU implementation currently does not (see SPARK-39845). Also; Apache Spark 3.1.3 fixed issue SPARK-36741 where NaNs in these set like operators were not treated as being equal. We have chosen to break with compatibility for the older versions of Spark in this instance and handle NaNs the same as 3.1.3+,project,result,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,PS,NA,NA,NA,NA,NA +ArrayJoin,S,`array_join`,None,project,array,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA +ArrayJoin,S,`array_join`,None,project,delimiter,NA,NA,NA,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +ArrayJoin,S,`array_join`,None,project,nullReplacement,NA,NA,NA,NA,NA,NA,NA,NA,NA,PS,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +ArrayJoin,S,`array_join`,None,project,result,NA,NA,NA,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +ArrayMax,S,`array_max`,None,project,input,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,PS,NA,NA,NA,NA,NA +ArrayMax,S,`array_max`,None,project,result,S,S,S,S,S,S,S,S,PS,S,S,S,NS,NS,NS,NA,NS,NS,NA,NA +ArrayMin,S,`array_min`,None,project,input,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,PS,NA,NA,NA,NA,NA +ArrayMin,S,`array_min`,None,project,result,S,S,S,S,S,S,S,S,PS,S,S,S,NS,NS,NS,NA,NS,NS,NA,NA +ArrayPosition,S,`array_position`,None,project,array,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,PS,NA,NA,NA,NA,NA +ArrayPosition,S,`array_position`,None,project,key,S,S,S,S,S,S,S,S,PS,S,S,S,S,NS,PS,NA,PS,NS,NA,NA +ArrayPosition,S,`array_position`,None,project,result,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +ArrayRemove,S,`array_remove`,None,project,array,NS,NS,NS,NS,NS,NS,NS,NS,NS,NS,NS,NS,NS,NS,PS,NS,NS,NS,NS,NS +ArrayRemove,S,`array_remove`,None,project,element,S,S,S,S,S,S,S,S,PS,S,S,S,NS,NS,PS,PS,PS,NS,NS,NS +ArrayRemove,S,`array_remove`,None,project,result,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,PS,NA,NA,NA,NA,NA +ArrayRepeat,S,`array_repeat`,None,project,left,S,S,S,S,S,S,S,S,PS,S,S,S,NS,NS,PS,PS,PS,NS,NS,NS +ArrayRepeat,S,`array_repeat`,None,project,right,NA,S,S,S,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +ArrayRepeat,S,`array_repeat`,None,project,result,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,PS,NA,NA,NA,NA,NA +ArrayTransform,S,`transform`,None,project,argument,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,PS,NA,NA,NA,NA,NA +ArrayTransform,S,`transform`,None,project,function,S,S,S,S,S,S,S,S,PS,S,S,S,NS,NS,PS,PS,PS,NS,NS,NS +ArrayTransform,S,`transform`,None,project,result,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,PS,NA,NA,NA,NA,NA +ArrayUnion,S,`array_union`,This is not 100% compatible with the Spark version because the GPU implementation treats -0.0 and 0.0 as equal; but the CPU implementation currently does not (see SPARK-39845). Also; Apache Spark 3.1.3 fixed issue SPARK-36741 where NaNs in these set like operators were not treated as being equal. We have chosen to break with compatibility for the older versions of Spark in this instance and handle NaNs the same as 3.1.3+,project,array1,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,PS,NA,NA,NA,NA,NA +ArrayUnion,S,`array_union`,This is not 100% compatible with the Spark version because the GPU implementation treats -0.0 and 0.0 as equal; but the CPU implementation currently does not (see SPARK-39845). Also; Apache Spark 3.1.3 fixed issue SPARK-36741 where NaNs in these set like operators were not treated as being equal. We have chosen to break with compatibility for the older versions of Spark in this instance and handle NaNs the same as 3.1.3+,project,array2,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,PS,NA,NA,NA,NA,NA +ArrayUnion,S,`array_union`,This is not 100% compatible with the Spark version because the GPU implementation treats -0.0 and 0.0 as equal; but the CPU implementation currently does not (see SPARK-39845). Also; Apache Spark 3.1.3 fixed issue SPARK-36741 where NaNs in these set like operators were not treated as being equal. We have chosen to break with compatibility for the older versions of Spark in this instance and handle NaNs the same as 3.1.3+,project,result,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,PS,NA,NA,NA,NA,NA +ArraysOverlap,S,`arrays_overlap`,This is not 100% compatible with the Spark version because the GPU implementation treats -0.0 and 0.0 as equal; but the CPU implementation currently does not (see SPARK-39845). Also; Apache Spark 3.1.3 fixed issue SPARK-36741 where NaNs in these set like operators were not treated as being equal. We have chosen to break with compatibility for the older versions of Spark in this instance and handle NaNs the same as 3.1.3+,project,array1,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,PS,NA,NA,NA,NA,NA +ArraysOverlap,S,`arrays_overlap`,This is not 100% compatible with the Spark version because the GPU implementation treats -0.0 and 0.0 as equal; but the CPU implementation currently does not (see SPARK-39845). Also; Apache Spark 3.1.3 fixed issue SPARK-36741 where NaNs in these set like operators were not treated as being equal. We have chosen to break with compatibility for the older versions of Spark in this instance and handle NaNs the same as 3.1.3+,project,array2,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,PS,NA,NA,NA,NA,NA +ArraysOverlap,S,`arrays_overlap`,This is not 100% compatible with the Spark version because the GPU implementation treats -0.0 and 0.0 as equal; but the CPU implementation currently does not (see SPARK-39845). Also; Apache Spark 3.1.3 fixed issue SPARK-36741 where NaNs in these set like operators were not treated as being equal. We have chosen to break with compatibility for the older versions of Spark in this instance and handle NaNs the same as 3.1.3+,project,result,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +ArraysZip,S,`arrays_zip`,None,project,children,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,PS,NA,NA,NA,NA,NA +ArraysZip,S,`arrays_zip`,None,project,result,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,PS,NA,NA,NA,NA,NA +Ascii,NS,`ascii`,This is disabled by default because it only supports strings starting with ASCII or Latin-1 characters after Spark 3.2.3; 3.3.1 and 3.4.0. Otherwise the results will not match the CPU.,project,input,NA,NA,NA,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +Ascii,NS,`ascii`,This is disabled by default because it only supports strings starting with ASCII or Latin-1 characters after Spark 3.2.3; 3.3.1 and 3.4.0. Otherwise the results will not match the CPU.,project,result,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +Asin,S,`asin`,None,project,input,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +Asin,S,`asin`,None,project,result,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +Asin,S,`asin`,None,AST,input,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +Asin,S,`asin`,None,AST,result,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +Asinh,S,`asinh`,None,project,input,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +Asinh,S,`asinh`,None,project,result,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +Asinh,S,`asinh`,None,AST,input,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +Asinh,S,`asinh`,None,AST,result,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +AtLeastNNonNulls,S, ,None,project,input,S,S,S,S,S,S,S,S,PS,S,S,S,S,NS,PS,PS,PS,NS,NS,NS +AtLeastNNonNulls,S, ,None,project,result,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +Atan,S,`atan`,None,project,input,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +Atan,S,`atan`,None,project,result,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +Atan,S,`atan`,None,AST,input,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +Atan,S,`atan`,None,AST,result,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +Atanh,S,`atanh`,None,project,input,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +Atanh,S,`atanh`,None,project,result,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +Atanh,S,`atanh`,None,AST,input,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +Atanh,S,`atanh`,None,AST,result,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +AttributeReference,S, ,None,project,result,S,S,S,S,S,S,S,S,PS,S,S,S,S,NS,PS,PS,PS,NS,S,S +AttributeReference,S, ,None,AST,result,S,S,S,S,S,S,S,S,PS,S,NS,NS,NS,NS,NS,NS,NS,NS,S,S +BRound,S,`bround`,None,project,value,NA,S,S,S,S,PS,PS,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA +BRound,S,`bround`,None,project,scale,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +BRound,S,`bround`,None,project,result,NA,S,S,S,S,S,S,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA +Bin,S,`bin`,None,project,input,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +Bin,S,`bin`,None,project,result,NA,NA,NA,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +BitLength,S,`bit_length`,None,project,input,NA,NA,NA,NA,NA,NA,NA,NA,NA,S,NA,NA,NS,NA,NA,NA,NA,NA,NA,NA +BitLength,S,`bit_length`,None,project,result,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +BitwiseAnd,S,`&`,None,project,lhs,NA,S,S,S,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +BitwiseAnd,S,`&`,None,project,rhs,NA,S,S,S,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +BitwiseAnd,S,`&`,None,project,result,NA,S,S,S,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +BitwiseAnd,S,`&`,None,AST,lhs,NA,NS,NS,S,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +BitwiseAnd,S,`&`,None,AST,rhs,NA,NS,NS,S,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +BitwiseAnd,S,`&`,None,AST,result,NA,NS,NS,S,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +BitwiseCount,S,`bit_count`,None,project,input,S,S,S,S,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +BitwiseCount,S,`bit_count`,None,project,result,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +BitwiseNot,S,`~`,None,project,input,NA,S,S,S,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +BitwiseNot,S,`~`,None,project,result,NA,S,S,S,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +BitwiseNot,S,`~`,None,AST,input,NA,NS,NS,S,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +BitwiseNot,S,`~`,None,AST,result,NA,NS,NS,S,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +BitwiseOr,S,`\|`,None,project,lhs,NA,S,S,S,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +BitwiseOr,S,`\|`,None,project,rhs,NA,S,S,S,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +BitwiseOr,S,`\|`,None,project,result,NA,S,S,S,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +BitwiseOr,S,`\|`,None,AST,lhs,NA,NS,NS,S,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +BitwiseOr,S,`\|`,None,AST,rhs,NA,NS,NS,S,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +BitwiseOr,S,`\|`,None,AST,result,NA,NS,NS,S,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +BitwiseXor,S,`^`,None,project,lhs,NA,S,S,S,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +BitwiseXor,S,`^`,None,project,rhs,NA,S,S,S,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +BitwiseXor,S,`^`,None,project,result,NA,S,S,S,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +BitwiseXor,S,`^`,None,AST,lhs,NA,NS,NS,S,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +BitwiseXor,S,`^`,None,AST,rhs,NA,NS,NS,S,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +BitwiseXor,S,`^`,None,AST,result,NA,NS,NS,S,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +BloomFilterMightContain,S, ,None,project,lhs,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,S,S,NA,NA,NA,NA,NA,NA,NA +BloomFilterMightContain,S, ,None,project,rhs,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA +BloomFilterMightContain,S, ,None,project,result,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +BoundReference,S, ,None,project,result,S,S,S,S,S,S,S,S,PS,S,S,S,S,NS,PS,PS,PS,NS,S,S +BoundReference,S, ,None,AST,result,S,S,S,S,S,S,S,S,PS,S,NS,NS,NS,NS,NS,NS,NS,NS,S,S +CaseWhen,S,`when`,None,project,predicate,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +CaseWhen,S,`when`,None,project,value,S,S,S,S,S,S,S,S,PS,S,S,S,S,NS,PS,PS,PS,NS,NS,NS +CaseWhen,S,`when`,None,project,result,S,S,S,S,S,S,S,S,PS,S,S,S,S,NS,PS,PS,PS,NS,NS,NS +Cbrt,S,`cbrt`,None,project,input,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +Cbrt,S,`cbrt`,None,project,result,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +Cbrt,S,`cbrt`,None,AST,input,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +Cbrt,S,`cbrt`,None,AST,result,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +Ceil,S, ,None,project,input,NA,NA,NA,NA,S,NA,S,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA +Ceil,S, ,None,project,result,NA,NA,NA,NA,S,NA,S,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA +CheckOverflowInTableInsert,S, ,None,project,input,S,S,S,S,S,S,S,S,PS,S,S,S,S,S,PS,PS,PS,S,S,S +CheckOverflowInTableInsert,S, ,None,project,result,S,S,S,S,S,S,S,S,PS,S,S,S,S,S,PS,PS,PS,S,S,S +Coalesce,S,`coalesce`,None,project,param,S,S,S,S,S,S,S,S,PS,S,S,S,S,NS,PS,PS,PS,NS,S,S +Coalesce,S,`coalesce`,None,project,result,S,S,S,S,S,S,S,S,PS,S,S,S,S,NS,PS,PS,PS,NS,S,S +CollationAwareMurmur3Hash,S, ,None,project,input,S,S,S,S,S,S,S,S,PS,S,S,S,NS,NS,PS,NS,PS,NS,NS,NS +CollationAwareMurmur3Hash,S, ,None,project,result,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +CollationAwareXxHash64,S, ,None,project,input,S,S,S,S,S,S,S,S,PS,S,S,S,NS,NS,PS,PS,PS,NS,NS,NS +CollationAwareXxHash64,S, ,None,project,result,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +Concat,S,`concat`,None,project,input,NA,NA,NA,NA,NA,NA,NA,NA,NA,S,NA,NA,NS,NA,PS,NA,NA,NA,NA,NA +Concat,S,`concat`,None,project,result,NA,NA,NA,NA,NA,NA,NA,NA,NA,S,NA,NA,NS,NA,PS,NA,NA,NA,NA,NA +ConcatWs,S,`concat_ws`,None,project,input,NA,NA,NA,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,S,NA,NA,NA,NA,NA +ConcatWs,S,`concat_ws`,None,project,result,NA,NA,NA,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +Contains,S, ,None,project,src,NA,NA,NA,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +Contains,S, ,None,project,search,NA,NA,NA,NA,NA,NA,NA,NA,NA,PS,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +Contains,S, ,None,project,result,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +Conv,S,`conv`,None,project,num,NA,NA,NA,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +Conv,S,`conv`,None,project,from_base,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +Conv,S,`conv`,None,project,to_base,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +Conv,S,`conv`,None,project,result,NA,NA,NA,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +Cos,S,`cos`,None,project,input,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +Cos,S,`cos`,None,project,result,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +Cos,S,`cos`,None,AST,input,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +Cos,S,`cos`,None,AST,result,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +Cosh,S,`cosh`,None,project,input,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +Cosh,S,`cosh`,None,project,result,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +Cosh,S,`cosh`,None,AST,input,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +Cosh,S,`cosh`,None,AST,result,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +Cot,S,`cot`,None,project,input,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +Cot,S,`cot`,None,project,result,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +Cot,S,`cot`,None,AST,input,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +Cot,S,`cot`,None,AST,result,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +CreateArray,S,`array`,None,project,arg,S,S,S,S,S,S,S,S,PS,S,S,S,S,NS,PS,NS,PS,NS,NS,NS +CreateArray,S,`array`,None,project,result,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,PS,NA,NA,NA,NA,NA +CreateMap,S,`map`,None,project,key,S,S,S,S,S,S,S,S,PS,S,S,S,NA,NA,PS,NA,PS,NA,NA,NA +CreateMap,S,`map`,None,project,value,S,S,S,S,S,S,S,S,PS,S,S,S,NA,NA,PS,PS,PS,NA,NA,NA +CreateNamedStruct,S,`named_struct`; `struct`,None,project,name,NA,NA,NA,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +CreateNamedStruct,S,`named_struct`; `struct`,None,project,value,S,S,S,S,S,S,S,S,PS,S,S,S,S,NS,PS,PS,PS,NS,NS,NS +CreateNamedStruct,S,`named_struct`; `struct`,None,project,result,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,PS,NA,NA,NA +CurrentRow$,S, ,None,project,result,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA +DateAdd,S,`date_add`; `dateadd`,None,project,startDate,NA,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +DateAdd,S,`date_add`; `dateadd`,None,project,days,NA,S,S,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +DateAdd,S,`date_add`; `dateadd`,None,project,result,NA,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +DateAddInterval,S, ,None,project,start,NA,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +DateAddInterval,S, ,None,project,interval,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,PS,NA,NA,NA,NA,NA,NA +DateAddInterval,S, ,None,project,result,NA,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +DateDiff,S,`date_diff`; `datediff`,None,project,lhs,NA,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +DateDiff,S,`date_diff`; `datediff`,None,project,rhs,NA,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +DateDiff,S,`date_diff`; `datediff`,None,project,result,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +DateFormatClass,S,`date_format`,None,project,timestamp,NA,NA,NA,NA,NA,NA,NA,NA,PS,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +DateFormatClass,S,`date_format`,None,project,strfmt,NA,NA,NA,NA,NA,NA,NA,NA,NA,PS,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +DateFormatClass,S,`date_format`,None,project,result,NA,NA,NA,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +DateSub,S,`date_sub`,None,project,startDate,NA,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +DateSub,S,`date_sub`,None,project,days,NA,S,S,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +DateSub,S,`date_sub`,None,project,result,NA,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +DayOfMonth,S,`day`; `dayofmonth`,None,project,input,NA,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +DayOfMonth,S,`day`; `dayofmonth`,None,project,result,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +DayOfWeek,S,`dayofweek`,None,project,input,NA,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +DayOfWeek,S,`dayofweek`,None,project,result,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +DayOfYear,S,`dayofyear`,None,project,input,NA,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +DayOfYear,S,`dayofyear`,None,project,result,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +DenseRank,S,`dense_rank`,None,window,ordering,S,S,S,S,S,S,S,S,PS,S,S,S,NS,NS,NS,NS,NS,NS,NS,NS +DenseRank,S,`dense_rank`,None,window,result,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +Divide,S,`/`,None,project,lhs,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA +Divide,S,`/`,None,project,rhs,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA +Divide,S,`/`,None,project,result,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA +DivideYMInterval,S, ,None,project,lhs,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,S +DivideYMInterval,S, ,None,project,rhs,NA,S,S,S,S,S,S,NA,NA,NA,NS,NA,NA,NA,NA,NA,NA,NA,NA,NA +DivideYMInterval,S, ,None,project,result,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,S +DynamicPruningExpression,S, ,None,project,input,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +DynamicPruningExpression,S, ,None,project,result,S,S,S,S,S,S,S,S,PS,S,S,S,S,S,PS,PS,PS,S,S,S +ElementAt,S,`element_at`,None,project,array/map,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,PS,PS,NA,NA,NA,NA +ElementAt,S,`element_at`,None,project,index/key,PS,PS,PS,S,PS,PS,PS,PS,PS,PS,PS,NS,NS,NS,NS,NS,NS,NS,NS,NS +ElementAt,S,`element_at`,None,project,result,S,S,S,S,S,S,S,S,PS,S,S,S,S,NS,PS,PS,PS,NS,NS,NS +Empty2Null,S, ,None,project,input,NA,NA,NA,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +Empty2Null,S, ,None,project,result,NA,NA,NA,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +EndsWith,S, ,None,project,src,NA,NA,NA,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +EndsWith,S, ,None,project,search,NA,NA,NA,NA,NA,NA,NA,NA,NA,PS,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +EndsWith,S, ,None,project,result,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +EqualNullSafe,S,`<=>`,None,project,lhs,S,S,S,S,S,S,S,S,PS,S,S,S,NS,NS,NS,NA,PS,NS,NA,NA +EqualNullSafe,S,`<=>`,None,project,rhs,S,S,S,S,S,S,S,S,PS,S,S,S,NS,NS,NS,NA,PS,NS,NA,NA +EqualNullSafe,S,`<=>`,None,project,result,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +EqualTo,S,`==`; `=`,None,project,lhs,S,S,S,S,S,S,S,S,PS,S,S,S,NS,NS,NS,NA,PS,NS,NA,NA +EqualTo,S,`==`; `=`,None,project,rhs,S,S,S,S,S,S,S,S,PS,S,S,S,NS,NS,NS,NA,PS,NS,NA,NA +EqualTo,S,`==`; `=`,None,project,result,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +EqualTo,S,`==`; `=`,None,AST,lhs,S,S,S,S,S,NS,NS,S,PS,S,NS,NS,NS,NS,NS,NA,NS,NS,NA,NA +EqualTo,S,`==`; `=`,None,AST,rhs,S,S,S,S,S,NS,NS,S,PS,S,NS,NS,NS,NS,NS,NA,NS,NS,NA,NA +EqualTo,S,`==`; `=`,None,AST,result,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +Exp,S,`exp`,None,project,input,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +Exp,S,`exp`,None,project,result,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +Exp,S,`exp`,None,AST,input,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +Exp,S,`exp`,None,AST,result,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +Explode,S, ,None,project,input,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,PS,PS,NA,NA,NA,NA +Explode,S, ,None,project,result,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,PS,NA,NA,NA,NA,NA +Expm1,S,`expm1`,None,project,input,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +Expm1,S,`expm1`,None,project,result,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +Expm1,S,`expm1`,None,AST,input,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +Expm1,S,`expm1`,None,AST,result,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +Flatten,S,`flatten`,None,project,input,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,PS,NA,NA,NA,NA,NA +Flatten,S,`flatten`,None,project,result,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,PS,NA,NA,NA,NA,NA +Floor,S, ,None,project,input,NA,NA,NA,NA,S,NA,S,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA +Floor,S, ,None,project,result,NA,NA,NA,NA,S,NA,S,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA +FormatNumber,S,`format_number`,None,project,x,NA,S,S,S,S,S,S,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA +FormatNumber,S,`format_number`,None,project,d,NA,NA,NA,PS,NA,NA,NA,NA,NA,NS,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +FormatNumber,S,`format_number`,None,project,result,NA,NA,NA,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +FromUTCTimestamp,S,`from_utc_timestamp`,None,project,timestamp,NA,NA,NA,NA,NA,NA,NA,NA,PS,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +FromUTCTimestamp,S,`from_utc_timestamp`,None,project,timezone,NA,NA,NA,NA,NA,NA,NA,NA,NA,PS,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +FromUTCTimestamp,S,`from_utc_timestamp`,None,project,result,NA,NA,NA,NA,NA,NA,NA,NA,PS,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +FromUnixTime,S,`from_unixtime`,None,project,sec,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +FromUnixTime,S,`from_unixtime`,None,project,format,NA,NA,NA,NA,NA,NA,NA,NA,NA,PS,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +FromUnixTime,S,`from_unixtime`,None,project,result,NA,NA,NA,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +GetArrayItem,S, ,None,project,array,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,PS,NA,NA,NA,NA,NA +GetArrayItem,S, ,None,project,ordinal,NA,S,S,S,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +GetArrayItem,S, ,None,project,result,S,S,S,S,S,S,S,S,PS,S,S,S,S,NS,PS,PS,PS,NS,NS,NS +GetArrayStructFields,S, ,None,project,input,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,PS,NA,NA,NA,NA,NA +GetArrayStructFields,S, ,None,project,result,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,PS,NA,NA,NA,NA,NA +GetJsonObject,S,`get_json_object`,None,project,json,NA,NA,NA,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +GetJsonObject,S,`get_json_object`,None,project,path,NA,NA,NA,NA,NA,NA,NA,NA,NA,PS,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +GetJsonObject,S,`get_json_object`,None,project,result,NA,NA,NA,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +GetMapValue,S, ,None,project,map,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,PS,NA,NA,NA,NA +GetMapValue,S, ,None,project,key,S,S,S,S,S,S,S,S,PS,S,S,NS,NS,NS,NS,NS,NS,NS,NS,NS +GetMapValue,S, ,None,project,result,S,S,S,S,S,S,S,S,PS,S,S,S,S,NS,PS,PS,PS,NS,NS,NS +GetStructField,S, ,None,project,input,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,PS,NA,NA,NA +GetStructField,S, ,None,project,result,S,S,S,S,S,S,S,S,PS,S,S,S,S,NS,PS,PS,PS,NS,NS,NS +GetTimestamp,S, ,None,project,timeExp,NA,NA,NA,NA,NA,NA,NA,S,PS,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +GetTimestamp,S, ,None,project,format,NA,NA,NA,NA,NA,NA,NA,NA,NA,PS,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +GetTimestamp,S, ,None,project,result,NA,NA,NA,NA,NA,NA,NA,NA,PS,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +GreaterThan,S,`>`,None,project,lhs,S,S,S,S,S,S,S,S,PS,S,S,S,NS,NS,NS,NA,PS,NS,NA,NA +GreaterThan,S,`>`,None,project,rhs,S,S,S,S,S,S,S,S,PS,S,S,S,NS,NS,NS,NA,PS,NS,NA,NA +GreaterThan,S,`>`,None,project,result,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +GreaterThan,S,`>`,None,AST,lhs,S,S,S,S,S,NS,NS,S,PS,S,NS,NS,NS,NS,NS,NA,NS,NS,NA,NA +GreaterThan,S,`>`,None,AST,rhs,S,S,S,S,S,NS,NS,S,PS,S,NS,NS,NS,NS,NS,NA,NS,NS,NA,NA +GreaterThan,S,`>`,None,AST,result,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +GreaterThanOrEqual,S,`>=`,None,project,lhs,S,S,S,S,S,S,S,S,PS,S,S,S,NS,NS,NS,NA,PS,NS,NA,NA +GreaterThanOrEqual,S,`>=`,None,project,rhs,S,S,S,S,S,S,S,S,PS,S,S,S,NS,NS,NS,NA,PS,NS,NA,NA +GreaterThanOrEqual,S,`>=`,None,project,result,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +GreaterThanOrEqual,S,`>=`,None,AST,lhs,S,S,S,S,S,NS,NS,S,PS,S,NS,NS,NS,NS,NS,NA,NS,NS,NA,NA +GreaterThanOrEqual,S,`>=`,None,AST,rhs,S,S,S,S,S,NS,NS,S,PS,S,NS,NS,NS,NS,NS,NA,NS,NS,NA,NA +GreaterThanOrEqual,S,`>=`,None,AST,result,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +Greatest,S,`greatest`,None,project,param,S,S,S,S,S,S,S,S,PS,S,S,S,NS,NS,NS,NA,NS,NS,NA,NA +Greatest,S,`greatest`,None,project,result,S,S,S,S,S,S,S,S,PS,S,S,S,NS,NS,NS,NA,NS,NS,NA,NA +HiveHash,S, ,None,project,input,S,S,S,S,S,S,S,S,PS,S,NS,S,NS,NS,PS,NS,PS,NS,NS,NS +HiveHash,S, ,None,project,result,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +Hour,S, ,None,project,input,NA,NA,NA,NA,NA,NA,NA,NA,PS,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +Hour,S, ,None,project,result,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +Hypot,S,`hypot`,None,project,lhs,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +Hypot,S,`hypot`,None,project,rhs,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +Hypot,S,`hypot`,None,project,result,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +If,S,`if`,None,project,predicate,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +If,S,`if`,None,project,trueValue,S,S,S,S,S,S,S,S,PS,S,S,S,S,NS,PS,PS,PS,NS,S,S +If,S,`if`,None,project,falseValue,S,S,S,S,S,S,S,S,PS,S,S,S,S,NS,PS,PS,PS,NS,S,S +If,S,`if`,None,project,result,S,S,S,S,S,S,S,S,PS,S,S,S,S,NS,PS,PS,PS,NS,S,S +In,S,`in`,None,project,value,S,S,S,S,S,S,S,S,PS,S,S,S,NS,NS,NS,NA,NS,NS,NA,NA +In,S,`in`,None,project,list,PS,PS,PS,PS,PS,PS,PS,PS,PS,PS,PS,NS,NS,NS,NS,NA,NS,NS,NA,NA +In,S,`in`,None,project,result,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +InSet,S, ,None,project,input,S,S,S,S,S,S,S,S,PS,S,S,S,NS,NS,NS,NA,NS,NS,NA,NA +InSet,S, ,None,project,result,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +InitCap,S,`initcap`,This is not 100% compatible with the Spark version because the Unicode version used by cuDF and the JVM may differ; resulting in some corner-case characters not changing case correctly.,project,input,NA,NA,NA,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +InitCap,S,`initcap`,This is not 100% compatible with the Spark version because the Unicode version used by cuDF and the JVM may differ; resulting in some corner-case characters not changing case correctly.,project,result,NA,NA,NA,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +InputFileBlockLength,S,`input_file_block_length`,None,project,result,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +InputFileBlockStart,S,`input_file_block_start`,None,project,result,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +InputFileName,S,`input_file_name`,None,project,result,NA,NA,NA,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +IntegralDivide,S,`div`,None,project,lhs,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA +IntegralDivide,S,`div`,None,project,rhs,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA +IntegralDivide,S,`div`,None,project,result,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +IsNaN,S,`isnan`,None,project,input,NA,NA,NA,NA,NA,S,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +IsNaN,S,`isnan`,None,project,result,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +IsNotNull,S,`isnotnull`,None,project,input,S,S,S,S,S,S,S,S,PS,S,S,S,S,NS,PS,PS,PS,NS,S,NS +IsNotNull,S,`isnotnull`,None,project,result,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +IsNotNull,S,`isnotnull`,None,AST,input,S,S,S,S,S,S,S,S,PS,S,NS,NS,NS,NS,NS,NS,NS,NS,NS,NS +IsNotNull,S,`isnotnull`,None,AST,result,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +IsNull,S,`isnull`,None,project,input,S,S,S,S,S,S,S,S,PS,S,S,S,S,NS,PS,PS,PS,NS,S,NS +IsNull,S,`isnull`,None,project,result,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +IsNull,S,`isnull`,None,AST,input,S,S,S,S,S,S,S,S,PS,S,NS,NS,NS,NS,NS,NS,NS,NS,NS,NS +IsNull,S,`isnull`,None,AST,result,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +JsonToStructs,S,`from_json`,None,project,jsonStr,NA,NA,NA,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +JsonToStructs,S,`from_json`,None,project,result,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NS,PS,PS,NA,NA,NA +JsonTuple,S,`json_tuple`,None,project,json,NA,NA,NA,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +JsonTuple,S,`json_tuple`,None,project,field,NA,NA,NA,NA,NA,NA,NA,NA,NA,PS,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +JsonTuple,S,`json_tuple`,None,project,result,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA +KnownFloatingPointNormalized,S, ,None,project,input,S,S,S,S,S,S,S,S,PS,S,S,S,S,S,PS,PS,PS,S,S,S +KnownFloatingPointNormalized,S, ,None,project,result,S,S,S,S,S,S,S,S,PS,S,S,S,S,S,PS,PS,PS,S,S,S +KnownNotNull,S, ,None,project,input,S,S,S,S,S,S,S,S,PS,S,S,NS,S,S,PS,PS,PS,NS,NS,NS +KnownNotNull,S, ,None,project,result,S,S,S,S,S,S,S,S,PS,S,S,NS,S,S,PS,PS,PS,NS,NS,NS +KnownNullable,S, ,None,project,input,S,S,S,S,S,S,S,S,PS,S,S,S,S,S,PS,PS,PS,S,S,S +KnownNullable,S, ,None,project,result,S,S,S,S,S,S,S,S,PS,S,S,S,S,S,PS,PS,PS,S,S,S +Lag,S,`lag`,None,window,input,S,S,S,S,S,S,S,S,PS,S,S,S,NS,NS,PS,NS,PS,NS,NS,NS +Lag,S,`lag`,None,window,offset,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +Lag,S,`lag`,None,window,default,S,S,S,S,S,S,S,S,PS,S,S,S,NS,NS,PS,NS,PS,NS,NS,NS +Lag,S,`lag`,None,window,result,S,S,S,S,S,S,S,S,PS,S,S,S,NS,NS,PS,NS,PS,NS,NS,NS +LambdaFunction,S, ,None,project,function,S,S,S,S,S,S,S,S,PS,S,S,S,NS,NS,PS,PS,PS,NS,NS,NS +LambdaFunction,S, ,None,project,arguments,S,S,S,S,S,S,S,S,PS,S,S,S,NS,NS,PS,PS,PS,NS,NS,NS +LambdaFunction,S, ,None,project,result,S,S,S,S,S,S,S,S,PS,S,S,S,NS,NS,PS,PS,PS,NS,NS,NS +LastDay,S,`last_day`,None,project,input,NA,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +LastDay,S,`last_day`,None,project,result,NA,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +Lead,S,`lead`,None,window,input,S,S,S,S,S,S,S,S,PS,S,S,S,NS,NS,PS,NS,PS,NS,NS,NS +Lead,S,`lead`,None,window,offset,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +Lead,S,`lead`,None,window,default,S,S,S,S,S,S,S,S,PS,S,S,S,NS,NS,PS,NS,PS,NS,NS,NS +Lead,S,`lead`,None,window,result,S,S,S,S,S,S,S,S,PS,S,S,S,NS,NS,PS,NS,PS,NS,NS,NS +Least,S,`least`,None,project,param,S,S,S,S,S,S,S,S,PS,S,S,S,NS,NS,NS,NA,NS,NS,NA,NA +Least,S,`least`,None,project,result,S,S,S,S,S,S,S,S,PS,S,S,S,NS,NS,NS,NA,NS,NS,NA,NA +Length,S,`char_length`; `character_length`; `len`; `length`,None,project,input,NA,NA,NA,NA,NA,NA,NA,NA,NA,S,NA,NA,NS,NA,NA,NA,NA,NA,NA,NA +Length,S,`char_length`; `character_length`; `len`; `length`,None,project,result,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +LessThan,S,`<`,None,project,lhs,S,S,S,S,S,S,S,S,PS,S,S,S,NS,NS,NS,NA,PS,NS,NA,NA +LessThan,S,`<`,None,project,rhs,S,S,S,S,S,S,S,S,PS,S,S,S,NS,NS,NS,NA,PS,NS,NA,NA +LessThan,S,`<`,None,project,result,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +LessThan,S,`<`,None,AST,lhs,S,S,S,S,S,NS,NS,S,PS,S,NS,NS,NS,NS,NS,NA,NS,NS,NA,NA +LessThan,S,`<`,None,AST,rhs,S,S,S,S,S,NS,NS,S,PS,S,NS,NS,NS,NS,NS,NA,NS,NS,NA,NA +LessThan,S,`<`,None,AST,result,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +LessThanOrEqual,S,`<=`,None,project,lhs,S,S,S,S,S,S,S,S,PS,S,S,S,NS,NS,NS,NA,PS,NS,NA,NA +LessThanOrEqual,S,`<=`,None,project,rhs,S,S,S,S,S,S,S,S,PS,S,S,S,NS,NS,NS,NA,PS,NS,NA,NA +LessThanOrEqual,S,`<=`,None,project,result,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +LessThanOrEqual,S,`<=`,None,AST,lhs,S,S,S,S,S,NS,NS,S,PS,S,NS,NS,NS,NS,NS,NA,NS,NS,NA,NA +LessThanOrEqual,S,`<=`,None,AST,rhs,S,S,S,S,S,NS,NS,S,PS,S,NS,NS,NS,NS,NS,NA,NS,NS,NA,NA +LessThanOrEqual,S,`<=`,None,AST,result,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +Like,S,`like`,None,project,src,NA,NA,NA,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +Like,S,`like`,None,project,search,NA,NA,NA,NA,NA,NA,NA,NA,NA,PS,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +Like,S,`like`,None,project,result,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +Literal,S, ,None,project,result,S,S,S,S,S,S,S,S,PS,S,S,S,S,S,PS,PS,PS,NS,S,S +Literal,S, ,None,AST,result,S,S,S,S,S,S,S,S,PS,S,NS,NS,NS,NS,NS,NS,NS,NS,NS,NS +Log,S,`ln`,None,project,input,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +Log,S,`ln`,None,project,result,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +Log10,S,`log10`,None,project,input,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +Log10,S,`log10`,None,project,result,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +Log1p,S,`log1p`,None,project,input,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +Log1p,S,`log1p`,None,project,result,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +Log2,S,`log2`,None,project,input,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +Log2,S,`log2`,None,project,result,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +Logarithm,S,`log`,None,project,value,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +Logarithm,S,`log`,None,project,base,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +Logarithm,S,`log`,None,project,result,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +Lower,S,`lcase`; `lower`,This is not 100% compatible with the Spark version because the Unicode version used by cuDF and the JVM may differ; resulting in some corner-case characters not changing case correctly.,project,input,NA,NA,NA,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +Lower,S,`lcase`; `lower`,This is not 100% compatible with the Spark version because the Unicode version used by cuDF and the JVM may differ; resulting in some corner-case characters not changing case correctly.,project,result,NA,NA,NA,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +MakeDecimal,S, ,None,project,input,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +MakeDecimal,S, ,None,project,result,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,PS,NA,NA,NA,NA,NA,NA,NA,NA,NA +MapConcat,S,`map_concat`,None,project,input,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,PS,NA,NA,NA,NA +MapConcat,S,`map_concat`,None,project,result,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,PS,NA,NA,NA,NA +MapEntries,S,`map_entries`,None,project,input,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,PS,NA,NA,NA,NA +MapEntries,S,`map_entries`,None,project,result,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,PS,NA,NA,NA,NA,NA +MapFilter,S,`map_filter`,None,project,argument,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,PS,NA,NA,NA,NA +MapFilter,S,`map_filter`,None,project,function,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +MapFilter,S,`map_filter`,None,project,result,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,PS,NA,NA,NA,NA +MapFromArrays,S,`map_from_arrays`,None,project,keys,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,PS,NA,NA,NA,NA,NA +MapFromArrays,S,`map_from_arrays`,None,project,values,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,PS,NA,NA,NA,NA,NA +MapFromArrays,S,`map_from_arrays`,None,project,result,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,PS,NA,NA,NA,NA +MapFromEntries,S,`map_from_entries`,None,project,input,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,PS,NA,NA,NA,NA,NA +MapFromEntries,S,`map_from_entries`,None,project,result,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,PS,NA,NA,NA,NA +MapKeys,S,`map_keys`,None,project,input,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,PS,NA,NA,NA,NA +MapKeys,S,`map_keys`,None,project,result,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,PS,NA,NA,NA,NA,NA +MapValues,S,`map_values`,None,project,input,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,PS,NA,NA,NA,NA +MapValues,S,`map_values`,None,project,result,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,PS,NA,NA,NA,NA,NA +MapZipWith,S,`map_zip_with`,None,project,argument1,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,PS,NA,NA,NA,NA +MapZipWith,S,`map_zip_with`,None,project,argument2,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,PS,NA,NA,NA,NA +MapZipWith,S,`map_zip_with`,None,project,function,S,S,S,S,S,S,S,S,PS,S,S,S,NS,NS,PS,PS,PS,NS,NS,NS +MapZipWith,S,`map_zip_with`,None,project,result,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,PS,NA,NA,NA,NA +Md5,S,`md5`,None,project,input,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA +Md5,S,`md5`,None,project,result,NA,NA,NA,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +MicrosToTimestamp,S,`timestamp_micros`,None,project,input,NA,S,S,S,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +MicrosToTimestamp,S,`timestamp_micros`,None,project,result,NA,NA,NA,NA,NA,NA,NA,NA,PS,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +MillisToTimestamp,S,`timestamp_millis`,None,project,input,NA,S,S,S,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +MillisToTimestamp,S,`timestamp_millis`,None,project,result,NA,NA,NA,NA,NA,NA,NA,NA,PS,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +Minute,S, ,None,project,input,NA,NA,NA,NA,NA,NA,NA,NA,PS,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +Minute,S, ,None,project,result,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +MonotonicallyIncreasingID,S,`monotonically_increasing_id`,None,project,result,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +Month,S,`month`,None,project,input,NA,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +Month,S,`month`,None,project,result,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +MonthsBetween,S,`months_between`,None,project,timestamp1,NA,NA,NA,NA,NA,NA,NA,NA,PS,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +MonthsBetween,S,`months_between`,None,project,timestamp2,NA,NA,NA,NA,NA,NA,NA,NA,PS,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +MonthsBetween,S,`months_between`,None,project,round,PS,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +MonthsBetween,S,`months_between`,None,project,result,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +Multiply,S,`*`,None,project,lhs,NA,S,S,S,S,S,S,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA +Multiply,S,`*`,None,project,rhs,NA,S,S,S,S,S,S,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA +Multiply,S,`*`,None,project,result,NA,S,S,S,S,S,S,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA +Multiply,S,`*`,None,AST,lhs,NA,NS,NS,S,S,S,S,NA,NA,NA,NS,NA,NA,NA,NA,NA,NA,NA,NA,NA +Multiply,S,`*`,None,AST,rhs,NA,NS,NS,S,S,S,S,NA,NA,NA,NS,NA,NA,NA,NA,NA,NA,NA,NA,NA +Multiply,S,`*`,None,AST,result,NA,NS,NS,S,S,S,S,NA,NA,NA,NS,NA,NA,NA,NA,NA,NA,NA,NA,NA +MultiplyYMInterval,S, ,None,project,lhs,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,S +MultiplyYMInterval,S, ,None,project,rhs,NA,S,S,S,S,S,S,NA,NA,NA,NS,NA,NA,NA,NA,NA,NA,NA,NA,NA +MultiplyYMInterval,S, ,None,project,result,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,S +Murmur3Hash,S,`hash`,None,project,input,S,S,S,S,S,S,S,S,PS,S,S,S,NS,NS,PS,NS,PS,NS,NS,NS +Murmur3Hash,S,`hash`,None,project,result,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +NaNvl,S,`nanvl`,None,project,lhs,NA,NA,NA,NA,NA,S,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +NaNvl,S,`nanvl`,None,project,rhs,NA,NA,NA,NA,NA,S,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +NaNvl,S,`nanvl`,None,project,result,NA,NA,NA,NA,NA,S,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +NamedLambdaVariable,S, ,None,project,result,S,S,S,S,S,S,S,S,PS,S,S,S,NS,NS,PS,PS,PS,NS,NS,NS +Not,S,`!`; `not`,None,project,input,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +Not,S,`!`; `not`,None,project,result,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +Not,S,`!`; `not`,None,AST,input,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +Not,S,`!`; `not`,None,AST,result,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +NthValue,S,`nth_value`,None,window,input,S,S,S,S,S,S,S,S,PS,S,S,S,S,NS,PS,PS,PS,NS,NS,NS +NthValue,S,`nth_value`,None,window,offset,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +NthValue,S,`nth_value`,None,window,result,S,S,S,S,S,S,S,S,PS,S,S,S,S,NS,PS,PS,PS,NS,NS,NS +OctetLength,S,`octet_length`,None,project,input,NA,NA,NA,NA,NA,NA,NA,NA,NA,S,NA,NA,NS,NA,NA,NA,NA,NA,NA,NA +OctetLength,S,`octet_length`,None,project,result,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +Or,S,`or`,None,project,lhs,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +Or,S,`or`,None,project,rhs,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +Or,S,`or`,None,project,result,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +Or,S,`or`,None,AST,lhs,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +Or,S,`or`,None,AST,rhs,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +Or,S,`or`,None,AST,result,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +ParseUrl,S,`parse_url`,None,project,url,NA,NA,NA,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +ParseUrl,S,`parse_url`,None,project,partToExtract,NA,NA,NA,NA,NA,NA,NA,NA,NA,PS,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +ParseUrl,S,`parse_url`,None,project,key,NA,NA,NA,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +ParseUrl,S,`parse_url`,None,project,result,NA,NA,NA,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +PercentRank,S,`percent_rank`,None,window,ordering,S,S,S,S,S,S,S,S,PS,S,S,S,NS,NS,NS,NS,NS,NS,NS,NS +PercentRank,S,`percent_rank`,None,window,result,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +Pmod,S,`pmod`,None,project,lhs,NA,S,S,S,S,S,S,NA,NA,NA,PS,NA,NA,NA,NA,NA,NA,NA,NA,NA +Pmod,S,`pmod`,None,project,rhs,NA,S,S,S,S,S,S,NA,NA,NA,NS,NA,NA,NA,NA,NA,NA,NA,NA,NA +Pmod,S,`pmod`,None,project,result,NA,S,S,S,S,S,S,NA,NA,NA,NS,NA,NA,NA,NA,NA,NA,NA,NA,NA +PosExplode,S, ,None,project,input,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,PS,PS,NA,NA,NA,NA +PosExplode,S, ,None,project,result,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,PS,NA,NA,NA,NA,NA +Pow,S,`pow`; `power`,None,project,lhs,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +Pow,S,`pow`; `power`,None,project,rhs,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +Pow,S,`pow`; `power`,None,project,result,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +Pow,S,`pow`; `power`,None,AST,lhs,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +Pow,S,`pow`; `power`,None,AST,rhs,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +Pow,S,`pow`; `power`,None,AST,result,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +PreciseTimestampConversion,S, ,None,project,input,NA,NA,NA,NA,S,NA,NA,NA,PS,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +PreciseTimestampConversion,S, ,None,project,result,NA,NA,NA,NA,S,NA,NA,NA,PS,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +PythonUDAF,S, ,None,aggregation,param,S,S,S,S,S,S,S,S,PS,S,NS,NS,NS,NS,PS,NS,PS,NS,NS,NS +PythonUDAF,S, ,None,aggregation,result,S,S,S,S,S,S,S,S,PS,S,NS,NS,NS,NA,PS,NS,PS,NA,NA,NA +PythonUDAF,S, ,None,reduction,param,S,S,S,S,S,S,S,S,PS,S,NS,NS,NS,NS,PS,NS,PS,NS,NS,NS +PythonUDAF,S, ,None,reduction,result,S,S,S,S,S,S,S,S,PS,S,NS,NS,NS,NA,PS,NS,PS,NA,NA,NA +PythonUDAF,S, ,None,window,param,S,S,S,S,S,S,S,S,PS,S,NS,NS,NS,NS,PS,NS,PS,NS,NS,NS +PythonUDAF,S, ,None,window,result,S,S,S,S,S,S,S,S,PS,S,NS,NS,NS,NA,PS,NS,PS,NA,NA,NA +PythonUDAF,S, ,None,project,param,S,S,S,S,S,S,S,S,PS,S,NS,NS,NS,NS,PS,NS,PS,NS,NS,NS +PythonUDAF,S, ,None,project,result,S,S,S,S,S,S,S,S,PS,S,NS,NS,NS,NA,PS,NS,PS,NA,NA,NA +PythonUDF,S, ,None,aggregation,param,S,S,S,S,S,S,S,S,PS,S,NS,NS,NS,NS,PS,NS,PS,NS,NS,NS +PythonUDF,S, ,None,aggregation,result,S,S,S,S,S,S,S,S,PS,S,NS,NS,NS,NA,PS,NS,PS,NA,NA,NA +PythonUDF,S, ,None,reduction,param,S,S,S,S,S,S,S,S,PS,S,NS,NS,NS,NS,PS,NS,PS,NS,NS,NS +PythonUDF,S, ,None,reduction,result,S,S,S,S,S,S,S,S,PS,S,NS,NS,NS,NA,PS,NS,PS,NA,NA,NA +PythonUDF,S, ,None,window,param,S,S,S,S,S,S,S,S,PS,S,NS,NS,NS,NS,PS,NS,PS,NS,NS,NS +PythonUDF,S, ,None,window,result,S,S,S,S,S,S,S,S,PS,S,NS,NS,NS,NA,PS,NS,PS,NA,NA,NA +PythonUDF,S, ,None,project,param,S,S,S,S,S,S,S,S,PS,S,NS,NS,NS,NS,PS,NS,PS,NS,NS,NS +PythonUDF,S, ,None,project,result,S,S,S,S,S,S,S,S,PS,S,NS,NS,NS,NA,PS,NS,PS,NA,NA,NA +Quarter,S,`quarter`,None,project,input,NA,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +Quarter,S,`quarter`,None,project,result,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +RLike,S,`regexp_like`; `regexp`; `rlike`,None,project,str,NA,NA,NA,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +RLike,S,`regexp_like`; `regexp`; `rlike`,None,project,regexp,NA,NA,NA,NA,NA,NA,NA,NA,NA,PS,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +RLike,S,`regexp_like`; `regexp`; `rlike`,None,project,result,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +RaiseError,S, ,None,project,errorClass,NA,NA,NA,NA,NA,NA,NA,NA,NA,PS,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +RaiseError,S, ,None,project,errorParams,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA +RaiseError,S, ,None,project,result,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA +Rand,S,`rand`; `random`,None,project,seed,NA,NA,NA,S,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +Rand,S,`rand`; `random`,None,project,result,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +Rank,S,`rank`,None,window,ordering,S,S,S,S,S,S,S,S,PS,S,S,S,NS,NS,NS,NS,NS,NS,NS,NS +Rank,S,`rank`,None,window,result,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +RegExpExtract,S,`regexp_extract`,None,project,str,NA,NA,NA,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +RegExpExtract,S,`regexp_extract`,None,project,regexp,NA,NA,NA,NA,NA,NA,NA,NA,NA,PS,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +RegExpExtract,S,`regexp_extract`,None,project,idx,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +RegExpExtract,S,`regexp_extract`,None,project,result,NA,NA,NA,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +RegExpExtractAll,S,`regexp_extract_all`,None,project,str,NA,NA,NA,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +RegExpExtractAll,S,`regexp_extract_all`,None,project,regexp,NA,NA,NA,NA,NA,NA,NA,NA,NA,PS,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +RegExpExtractAll,S,`regexp_extract_all`,None,project,idx,NA,NA,NA,PS,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +RegExpExtractAll,S,`regexp_extract_all`,None,project,result,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA +RegExpReplace,S,`regexp_replace`,None,project,regex,NA,NA,NA,NA,NA,NA,NA,NA,NA,PS,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +RegExpReplace,S,`regexp_replace`,None,project,result,NA,NA,NA,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +RegExpReplace,S,`regexp_replace`,None,project,pos,NA,NA,NA,PS,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +RegExpReplace,S,`regexp_replace`,None,project,str,NA,NA,NA,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +RegExpReplace,S,`regexp_replace`,None,project,rep,NA,NA,NA,NA,NA,NA,NA,NA,NA,PS,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +Remainder,S,`%`; `mod`,None,project,lhs,NA,S,S,S,S,S,S,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA +Remainder,S,`%`; `mod`,None,project,rhs,NA,S,S,S,S,S,S,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA +Remainder,S,`%`; `mod`,None,project,result,NA,S,S,S,S,S,S,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA +ReplicateRows,S, ,None,project,input,S,S,S,S,S,S,S,S,PS,S,S,S,NS,NS,PS,NS,PS,NS,NS,NS +ReplicateRows,S, ,None,project,result,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,PS,NA,NA,NA,NA,NA +Reverse,S,`reverse`,None,project,input,NA,NA,NA,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,PS,NA,NA,NA,NA,NA +Reverse,S,`reverse`,None,project,result,NA,NA,NA,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,PS,NA,NA,NA,NA,NA +Rint,S,`rint`,None,project,input,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +Rint,S,`rint`,None,project,result,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +Rint,S,`rint`,None,AST,input,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +Rint,S,`rint`,None,AST,result,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +Round,S,`round`,None,project,value,NA,S,S,S,S,PS,PS,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA +Round,S,`round`,None,project,scale,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +Round,S,`round`,None,project,result,NA,S,S,S,S,S,S,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA +RoundCeil,S, ,None,project,value,NA,S,S,S,S,PS,PS,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA +RoundCeil,S, ,None,project,scale,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +RoundCeil,S, ,None,project,result,NA,S,S,S,S,S,S,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA +RoundFloor,S, ,None,project,value,NA,S,S,S,S,PS,PS,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA +RoundFloor,S, ,None,project,scale,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +RoundFloor,S, ,None,project,result,NA,S,S,S,S,S,S,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA +RowNumber,S,`row_number`,None,window,ordering,S,S,S,S,S,S,S,S,PS,S,S,S,NS,NS,NS,NS,NS,NS,NS,NS +RowNumber,S,`row_number`,None,window,result,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +ScalaUDF,S, ,None,project,param,S,S,S,S,S,S,S,S,PS,S,S,S,S,S,PS,PS,PS,NS,NS,NS +ScalaUDF,S, ,None,project,result,S,S,S,S,S,S,S,S,PS,S,S,S,S,S,PS,PS,PS,NS,NS,NS +Second,S, ,None,project,input,NA,NA,NA,NA,NA,NA,NA,NA,PS,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +Second,S, ,None,project,result,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +SecondsToTimestamp,S,`timestamp_seconds`,None,project,input,NA,S,S,S,S,S,S,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA +SecondsToTimestamp,S,`timestamp_seconds`,None,project,result,NA,NA,NA,NA,NA,NA,NA,NA,PS,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +Sequence,S,`sequence`,None,project,start,NA,S,S,S,S,NA,NA,NS,NS,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +Sequence,S,`sequence`,None,project,stop,NA,S,S,S,S,NA,NA,NS,NS,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +Sequence,S,`sequence`,None,project,step,NA,S,S,S,S,NA,NA,NA,NA,NA,NA,NA,NA,NS,NA,NA,NA,NA,NA,NA +Sequence,S,`sequence`,None,project,result,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,PS,NA,NA,NA,NA,NA +Sha1,S,`sha1`; `sha`,None,project,input,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA +Sha1,S,`sha1`; `sha`,None,project,result,NA,NA,NA,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +ShiftLeft,S,`<<`; `shiftleft`,None,project,value,NA,NA,NA,S,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +ShiftLeft,S,`<<`; `shiftleft`,None,project,amount,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +ShiftLeft,S,`<<`; `shiftleft`,None,project,result,NA,NA,NA,S,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +ShiftRight,S,`>>`; `shiftright`,None,project,value,NA,NA,NA,S,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +ShiftRight,S,`>>`; `shiftright`,None,project,amount,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +ShiftRight,S,`>>`; `shiftright`,None,project,result,NA,NA,NA,S,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +ShiftRightUnsigned,S,`>>>`; `shiftrightunsigned`,None,project,value,NA,NA,NA,S,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +ShiftRightUnsigned,S,`>>>`; `shiftrightunsigned`,None,project,amount,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +ShiftRightUnsigned,S,`>>>`; `shiftrightunsigned`,None,project,result,NA,NA,NA,S,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +Signum,S,`sign`; `signum`,None,project,input,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +Signum,S,`sign`; `signum`,None,project,result,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +Sin,S,`sin`,None,project,input,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +Sin,S,`sin`,None,project,result,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +Sin,S,`sin`,None,AST,input,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +Sin,S,`sin`,None,AST,result,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +Sinh,S,`sinh`,None,project,input,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +Sinh,S,`sinh`,None,project,result,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +Sinh,S,`sinh`,None,AST,input,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +Sinh,S,`sinh`,None,AST,result,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +Size,S,`cardinality`; `size`,None,project,input,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,PS,PS,NA,NA,NA,NA +Size,S,`cardinality`; `size`,None,project,result,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +Slice,S,`slice`,None,project,x,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,PS,NA,NA,NA,NA,NA +Slice,S,`slice`,None,project,start,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +Slice,S,`slice`,None,project,length,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +Slice,S,`slice`,None,project,result,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,PS,NA,NA,NA,NA,NA +SortArray,S,`sort_array`,None,project,array,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,PS,NA,NA,NA,NA,NA +SortArray,S,`sort_array`,None,project,ascendingOrder,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +SortArray,S,`sort_array`,None,project,result,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,PS,NA,NA,NA,NA,NA +SortOrder,S, ,None,project,input,S,S,S,S,S,S,S,S,PS,S,S,S,NS,NS,PS,NA,PS,NS,NA,NA +SortOrder,S, ,None,project,result,S,S,S,S,S,S,S,S,PS,S,S,S,NS,NS,PS,NA,PS,NS,NA,NA +SparkPartitionID,S,`spark_partition_id`,None,project,result,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +SpecifiedWindowFrame,S, ,None,project,lower,NA,S,S,S,S,S,S,NA,NA,NA,S,NA,NA,S,NA,NA,NA,NA,S,NS +SpecifiedWindowFrame,S, ,None,project,upper,NA,S,S,S,S,S,S,NA,NA,NA,S,NA,NA,S,NA,NA,NA,NA,S,NS +SpecifiedWindowFrame,S, ,None,project,result,NA,S,S,S,S,NS,NS,NA,NA,NA,NS,NA,NA,S,NA,NA,NA,NA,S,NS +Sqrt,S,`sqrt`,None,project,input,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +Sqrt,S,`sqrt`,None,project,result,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +Sqrt,S,`sqrt`,None,AST,input,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +Sqrt,S,`sqrt`,None,AST,result,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +Stack,S,`stack`,None,project,n,NA,NA,NA,PS,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +Stack,S,`stack`,None,project,expr,S,S,S,S,S,S,S,S,PS,S,S,S,NS,NS,PS,PS,PS,NS,NS,NS +Stack,S,`stack`,None,project,result,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,PS,NA,NA,NA,NA,NA +StartsWith,S, ,None,project,src,NA,NA,NA,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +StartsWith,S, ,None,project,search,NA,NA,NA,NA,NA,NA,NA,NA,NA,PS,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +StartsWith,S, ,None,project,result,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +StringInstr,S,`instr`,None,project,str,NA,NA,NA,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +StringInstr,S,`instr`,None,project,substr,NA,NA,NA,NA,NA,NA,NA,NA,NA,PS,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +StringInstr,S,`instr`,None,project,result,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +StringLPad,S, ,None,project,str,NA,NA,NA,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +StringLPad,S, ,None,project,len,NA,NA,NA,PS,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +StringLPad,S, ,None,project,pad,NA,NA,NA,NA,NA,NA,NA,NA,NA,PS,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +StringLPad,S, ,None,project,result,NA,NA,NA,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +StringLocate,S,`locate`; `position`,None,project,substr,NA,NA,NA,NA,NA,NA,NA,NA,NA,PS,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +StringLocate,S,`locate`; `position`,None,project,str,NA,NA,NA,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +StringLocate,S,`locate`; `position`,None,project,start,NA,NA,NA,PS,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +StringLocate,S,`locate`; `position`,None,project,result,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +StringRPad,S, ,None,project,str,NA,NA,NA,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +StringRPad,S, ,None,project,len,NA,NA,NA,PS,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +StringRPad,S, ,None,project,pad,NA,NA,NA,NA,NA,NA,NA,NA,NA,PS,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +StringRPad,S, ,None,project,result,NA,NA,NA,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +StringRepeat,S,`repeat`,None,project,input,NA,NA,NA,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +StringRepeat,S,`repeat`,None,project,repeatTimes,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +StringRepeat,S,`repeat`,None,project,result,NA,NA,NA,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +StringReplace,S,`replace`,None,project,src,NA,NA,NA,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +StringReplace,S,`replace`,None,project,search,NA,NA,NA,NA,NA,NA,NA,NA,NA,PS,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +StringReplace,S,`replace`,None,project,replace,NA,NA,NA,NA,NA,NA,NA,NA,NA,PS,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +StringReplace,S,`replace`,None,project,result,NA,NA,NA,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +StringSplit,S,`split`,None,project,str,NA,NA,NA,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +StringSplit,S,`split`,None,project,regexp,NA,NA,NA,NA,NA,NA,NA,NA,NA,PS,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +StringSplit,S,`split`,None,project,limit,NA,NA,NA,PS,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +StringSplit,S,`split`,None,project,result,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA +StringToMap,S,`str_to_map`,None,project,str,NA,NA,NA,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +StringToMap,S,`str_to_map`,None,project,pairDelim,NA,NA,NA,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +StringToMap,S,`str_to_map`,None,project,keyValueDelim,NA,NA,NA,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +StringToMap,S,`str_to_map`,None,project,result,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA +StringTranslate,S,`translate`,This is not 100% compatible with the Spark version because the GPU implementation supports all unicode code points. In Spark versions < 3.2.0; translate() does not support unicode characters with code point >= U+10000 (See SPARK-34094),project,input,NA,NA,NA,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +StringTranslate,S,`translate`,This is not 100% compatible with the Spark version because the GPU implementation supports all unicode code points. In Spark versions < 3.2.0; translate() does not support unicode characters with code point >= U+10000 (See SPARK-34094),project,from,NA,NA,NA,NA,NA,NA,NA,NA,NA,PS,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +StringTranslate,S,`translate`,This is not 100% compatible with the Spark version because the GPU implementation supports all unicode code points. In Spark versions < 3.2.0; translate() does not support unicode characters with code point >= U+10000 (See SPARK-34094),project,to,NA,NA,NA,NA,NA,NA,NA,NA,NA,PS,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +StringTranslate,S,`translate`,This is not 100% compatible with the Spark version because the GPU implementation supports all unicode code points. In Spark versions < 3.2.0; translate() does not support unicode characters with code point >= U+10000 (See SPARK-34094),project,result,NA,NA,NA,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +StringTrim,S,`trim`,None,project,src,NA,NA,NA,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +StringTrim,S,`trim`,None,project,trimStr,NA,NA,NA,NA,NA,NA,NA,NA,NA,PS,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +StringTrim,S,`trim`,None,project,result,NA,NA,NA,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +StringTrimLeft,S,`ltrim`,None,project,src,NA,NA,NA,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +StringTrimLeft,S,`ltrim`,None,project,trimStr,NA,NA,NA,NA,NA,NA,NA,NA,NA,PS,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +StringTrimLeft,S,`ltrim`,None,project,result,NA,NA,NA,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +StringTrimRight,S,`rtrim`,None,project,src,NA,NA,NA,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +StringTrimRight,S,`rtrim`,None,project,trimStr,NA,NA,NA,NA,NA,NA,NA,NA,NA,PS,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +StringTrimRight,S,`rtrim`,None,project,result,NA,NA,NA,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +StructsToJson,NS,`to_json`,This is disabled by default because it is currently in beta and undergoes continuous enhancements. Please consult the [compatibility documentation](../compatibility.md#json-supporting-types) to determine whether you can enable this configuration for your use case,project,struct,S,S,S,S,S,S,S,S,PS,S,S,NA,NA,NA,PS,PS,PS,NA,NA,NA +StructsToJson,NS,`to_json`,This is disabled by default because it is currently in beta and undergoes continuous enhancements. Please consult the [compatibility documentation](../compatibility.md#json-supporting-types) to determine whether you can enable this configuration for your use case,project,result,NA,NA,NA,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +Substring,S,`substr`; `substring`,None,project,str,NA,NA,NA,NA,NA,NA,NA,NA,NA,S,NA,NA,NS,NA,NA,NA,NA,NA,NA,NA +Substring,S,`substr`; `substring`,None,project,pos,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +Substring,S,`substr`; `substring`,None,project,len,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +Substring,S,`substr`; `substring`,None,project,result,NA,NA,NA,NA,NA,NA,NA,NA,NA,S,NA,NA,NS,NA,NA,NA,NA,NA,NA,NA +SubstringIndex,S,`substring_index`,None,project,str,NA,NA,NA,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +SubstringIndex,S,`substring_index`,None,project,delim,NA,NA,NA,NA,NA,NA,NA,NA,NA,PS,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +SubstringIndex,S,`substring_index`,None,project,count,NA,NA,NA,PS,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +SubstringIndex,S,`substring_index`,None,project,result,NA,NA,NA,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +Subtract,S,`-`,None,project,lhs,NA,S,S,S,S,S,S,NA,NA,NA,S,NA,NA,NS,NA,NA,NA,NA,S,S +Subtract,S,`-`,None,project,rhs,NA,S,S,S,S,S,S,NA,NA,NA,S,NA,NA,NS,NA,NA,NA,NA,S,S +Subtract,S,`-`,None,project,result,NA,S,S,S,S,S,S,NA,NA,NA,S,NA,NA,NS,NA,NA,NA,NA,S,S +Subtract,S,`-`,None,AST,lhs,NA,NS,NS,S,S,S,S,NA,NA,NA,NS,NA,NA,NS,NA,NA,NA,NA,NS,NS +Subtract,S,`-`,None,AST,rhs,NA,NS,NS,S,S,S,S,NA,NA,NA,NS,NA,NA,NS,NA,NA,NA,NA,NS,NS +Subtract,S,`-`,None,AST,result,NA,NS,NS,S,S,S,S,NA,NA,NA,NS,NA,NA,NS,NA,NA,NA,NA,NS,NS +Tan,S,`tan`,None,project,input,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +Tan,S,`tan`,None,project,result,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +Tan,S,`tan`,None,AST,input,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +Tan,S,`tan`,None,AST,result,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +Tanh,S,`tanh`,None,project,input,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +Tanh,S,`tanh`,None,project,result,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +Tanh,S,`tanh`,None,AST,input,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +Tanh,S,`tanh`,None,AST,result,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +TimestampAddInterval,S, ,None,project,start,NA,NA,NA,NA,NA,NA,NA,NA,PS,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +TimestampAddInterval,S, ,None,project,interval,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,PS,NA,NA,NA,NA,S,NA +TimestampAddInterval,S, ,None,project,result,NA,NA,NA,NA,NA,NA,NA,NA,PS,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +ToDegrees,S,`degrees`,None,project,input,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +ToDegrees,S,`degrees`,None,project,result,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +ToRadians,S,`radians`,None,project,input,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +ToRadians,S,`radians`,None,project,result,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +ToUTCTimestamp,S,`to_utc_timestamp`,None,project,timestamp,NA,NA,NA,NA,NA,NA,NA,NA,PS,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +ToUTCTimestamp,S,`to_utc_timestamp`,None,project,timezone,NA,NA,NA,NA,NA,NA,NA,NA,NA,PS,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +ToUTCTimestamp,S,`to_utc_timestamp`,None,project,result,NA,NA,NA,NA,NA,NA,NA,NA,PS,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +ToUnixTimestamp,S,`to_unix_timestamp`,None,project,timeExp,NA,NA,NA,NA,NA,NA,NA,S,PS,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +ToUnixTimestamp,S,`to_unix_timestamp`,None,project,format,NA,NA,NA,NA,NA,NA,NA,NA,NA,PS,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +ToUnixTimestamp,S,`to_unix_timestamp`,None,project,result,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +TransformKeys,S,`transform_keys`,None,project,argument,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,PS,NA,NA,NA,NA +TransformKeys,S,`transform_keys`,None,project,function,S,S,S,S,S,S,S,S,PS,S,S,S,NS,NS,NS,NA,NS,NS,NS,NS +TransformKeys,S,`transform_keys`,None,project,result,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,PS,NA,NA,NA,NA +TransformValues,S,`transform_values`,None,project,argument,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,PS,NA,NA,NA,NA +TransformValues,S,`transform_values`,None,project,function,S,S,S,S,S,S,S,S,PS,S,S,S,NS,NS,PS,PS,PS,NS,NS,NS +TransformValues,S,`transform_values`,None,project,result,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,PS,NA,NA,NA,NA +TruncDate,S,`trunc`,None,project,date,NA,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +TruncDate,S,`trunc`,None,project,format,NA,NA,NA,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +TruncDate,S,`trunc`,None,project,result,NA,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +TruncTimestamp,S,`date_trunc`,None,project,format,NA,NA,NA,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +TruncTimestamp,S,`date_trunc`,None,project,date,NA,NA,NA,NA,NA,NA,NA,NA,PS,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +TruncTimestamp,S,`date_trunc`,None,project,result,NA,NA,NA,NA,NA,NA,NA,NA,PS,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +UnaryMinus,S,`negative`,None,project,input,NA,S,S,S,S,S,S,NA,NA,NA,S,NA,NA,NS,NA,NA,NA,NA,S,S +UnaryMinus,S,`negative`,None,project,result,NA,S,S,S,S,S,S,NA,NA,NA,S,NA,NA,NS,NA,NA,NA,NA,S,S +UnaryMinus,S,`negative`,None,AST,input,NA,NS,NS,S,S,S,S,NA,NA,NA,NS,NA,NA,NS,NA,NA,NA,NA,NS,NS +UnaryMinus,S,`negative`,None,AST,result,NA,NS,NS,S,S,S,S,NA,NA,NA,NS,NA,NA,NS,NA,NA,NA,NA,NS,NS +UnaryPositive,S,`positive`,None,project,input,NA,S,S,S,S,S,S,NA,NA,NA,S,NA,NA,NS,NA,NA,NA,NA,S,S +UnaryPositive,S,`positive`,None,project,result,NA,S,S,S,S,S,S,NA,NA,NA,S,NA,NA,NS,NA,NA,NA,NA,S,S +UnaryPositive,S,`positive`,None,AST,input,NA,S,S,S,S,S,S,NA,NA,NA,NS,NA,NA,NS,NA,NA,NA,NA,S,S +UnaryPositive,S,`positive`,None,AST,result,NA,S,S,S,S,S,S,NA,NA,NA,NS,NA,NA,NS,NA,NA,NA,NA,S,S +UnboundedFollowing$,S, ,None,project,result,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA +UnboundedPreceding$,S, ,None,project,result,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA +UnixTimestamp,S,`unix_timestamp`,None,project,timeExp,NA,NA,NA,NA,NA,NA,NA,S,PS,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +UnixTimestamp,S,`unix_timestamp`,None,project,format,NA,NA,NA,NA,NA,NA,NA,NA,NA,PS,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +UnixTimestamp,S,`unix_timestamp`,None,project,result,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +UnscaledValue,S, ,None,project,input,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,PS,NA,NA,NA,NA,NA,NA,NA,NA,NA +UnscaledValue,S, ,None,project,result,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +Upper,S,`ucase`; `upper`,This is not 100% compatible with the Spark version because the Unicode version used by cuDF and the JVM may differ; resulting in some corner-case characters not changing case correctly.,project,input,NA,NA,NA,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +Upper,S,`ucase`; `upper`,This is not 100% compatible with the Spark version because the Unicode version used by cuDF and the JVM may differ; resulting in some corner-case characters not changing case correctly.,project,result,NA,NA,NA,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +Uuid,S,`uuid`,None,project,result,NA,NA,NA,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +WeekDay,S,`weekday`,None,project,input,NA,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +WeekDay,S,`weekday`,None,project,result,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +WindowExpression,S, ,None,window,windowFunction,S,S,S,S,S,S,S,S,PS,S,S,S,S,S,PS,PS,PS,S,S,S +WindowExpression,S, ,None,window,windowSpec,NA,S,S,S,S,NS,NS,NA,NA,NA,PS,NA,NA,S,NA,NA,NA,NA,S,NS +WindowExpression,S, ,None,window,result,S,S,S,S,S,S,S,S,PS,S,S,S,S,S,PS,PS,PS,S,S,S +WindowSpecDefinition,S, ,None,project,partition,S,S,S,S,S,S,S,S,PS,S,S,S,NS,NS,NS,NS,PS,NS,NS,NS +WindowSpecDefinition,S, ,None,project,value,S,S,S,S,S,S,S,S,PS,S,S,S,NS,NS,NS,NS,PS,NS,NS,NS +WindowSpecDefinition,S, ,None,project,result,S,S,S,S,S,S,S,S,PS,S,S,S,NS,NS,NS,NS,PS,NS,NS,NS +XxHash64,S,`xxhash64`,None,project,input,S,S,S,S,S,S,S,S,PS,S,S,S,NS,NS,PS,PS,PS,NS,NS,NS +XxHash64,S,`xxhash64`,None,project,result,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +Year,S,`year`,None,project,input,NA,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +Year,S,`year`,None,project,result,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +AggregateExpression,S, ,None,aggregation,aggFunc,S,S,S,S,S,S,S,S,PS,S,S,S,S,S,PS,PS,PS,S,S,S +AggregateExpression,S, ,None,aggregation,filter,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +AggregateExpression,S, ,None,aggregation,result,S,S,S,S,S,S,S,S,PS,S,S,S,S,S,PS,PS,PS,S,S,S +AggregateExpression,S, ,None,reduction,aggFunc,S,S,S,S,S,S,S,S,PS,S,S,S,S,S,PS,PS,PS,S,S,S +AggregateExpression,S, ,None,reduction,filter,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +AggregateExpression,S, ,None,reduction,result,S,S,S,S,S,S,S,S,PS,S,S,S,S,S,PS,PS,PS,S,S,S +AggregateExpression,S, ,None,window,aggFunc,S,S,S,S,S,S,S,S,PS,S,S,S,S,S,PS,PS,PS,S,S,S +AggregateExpression,S, ,None,window,filter,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +AggregateExpression,S, ,None,window,result,S,S,S,S,S,S,S,S,PS,S,S,S,S,S,PS,PS,PS,S,S,S +ApproximatePercentile,S,`approx_percentile`; `percentile_approx`,This is not 100% compatible with the Spark version because the GPU implementation of approx_percentile is not bit-for-bit compatible with Apache Spark,aggregation,input,NA,S,S,S,S,S,S,NS,NS,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA +ApproximatePercentile,S,`approx_percentile`; `percentile_approx`,This is not 100% compatible with the Spark version because the GPU implementation of approx_percentile is not bit-for-bit compatible with Apache Spark,aggregation,percentage,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA +ApproximatePercentile,S,`approx_percentile`; `percentile_approx`,This is not 100% compatible with the Spark version because the GPU implementation of approx_percentile is not bit-for-bit compatible with Apache Spark,aggregation,accuracy,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +ApproximatePercentile,S,`approx_percentile`; `percentile_approx`,This is not 100% compatible with the Spark version because the GPU implementation of approx_percentile is not bit-for-bit compatible with Apache Spark,aggregation,result,NA,S,S,S,S,S,S,NS,NS,NA,S,NA,NA,NA,PS,NA,NA,NA,NA,NA +ApproximatePercentile,S,`approx_percentile`; `percentile_approx`,This is not 100% compatible with the Spark version because the GPU implementation of approx_percentile is not bit-for-bit compatible with Apache Spark,reduction,input,NA,S,S,S,S,S,S,NS,NS,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA +ApproximatePercentile,S,`approx_percentile`; `percentile_approx`,This is not 100% compatible with the Spark version because the GPU implementation of approx_percentile is not bit-for-bit compatible with Apache Spark,reduction,percentage,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA +ApproximatePercentile,S,`approx_percentile`; `percentile_approx`,This is not 100% compatible with the Spark version because the GPU implementation of approx_percentile is not bit-for-bit compatible with Apache Spark,reduction,accuracy,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +ApproximatePercentile,S,`approx_percentile`; `percentile_approx`,This is not 100% compatible with the Spark version because the GPU implementation of approx_percentile is not bit-for-bit compatible with Apache Spark,reduction,result,NA,S,S,S,S,S,S,NS,NS,NA,S,NA,NA,NA,PS,NA,NA,NA,NA,NA +Average,S,`avg`; `mean`,None,aggregation,input,NA,S,S,S,S,S,S,NA,NA,NA,S,S,NA,NS,NA,NA,NA,NA,NS,NS +Average,S,`avg`; `mean`,None,aggregation,result,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA +Average,S,`avg`; `mean`,None,reduction,input,NA,S,S,S,S,S,S,NA,NA,NA,S,S,NA,NS,NA,NA,NA,NA,NS,NS +Average,S,`avg`; `mean`,None,reduction,result,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA +Average,S,`avg`; `mean`,None,window,input,NA,S,S,S,S,S,S,NA,NA,NA,S,S,NA,NS,NA,NA,NA,NA,NS,NS +Average,S,`avg`; `mean`,None,window,result,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA +BitAndAgg,S,`bit_and`,None,aggregation,input,NA,S,S,S,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +BitAndAgg,S,`bit_and`,None,aggregation,result,NA,S,S,S,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +BitAndAgg,S,`bit_and`,None,reduction,input,NA,S,S,S,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +BitAndAgg,S,`bit_and`,None,reduction,result,NA,S,S,S,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +BitOrAgg,S,`bit_or`,None,aggregation,input,NA,S,S,S,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +BitOrAgg,S,`bit_or`,None,aggregation,result,NA,S,S,S,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +BitOrAgg,S,`bit_or`,None,reduction,input,NA,S,S,S,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +BitOrAgg,S,`bit_or`,None,reduction,result,NA,S,S,S,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +BitXorAgg,S,`bit_xor`,None,aggregation,input,NA,S,S,S,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +BitXorAgg,S,`bit_xor`,None,aggregation,result,NA,S,S,S,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +BitXorAgg,S,`bit_xor`,None,reduction,input,NA,S,S,S,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +BitXorAgg,S,`bit_xor`,None,reduction,result,NA,S,S,S,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +BloomFilterAggregate,S, ,None,reduction,child,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +BloomFilterAggregate,S, ,None,reduction,estimatedItems,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +BloomFilterAggregate,S, ,None,reduction,numBits,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +BloomFilterAggregate,S, ,None,reduction,result,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA +CollectList,S,`array_agg`; `collect_list`,None,aggregation,input,S,S,S,S,S,S,S,S,PS,S,S,S,S,NS,PS,PS,PS,NS,NS,NS +CollectList,S,`array_agg`; `collect_list`,None,aggregation,result,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,PS,NA,NA,NA,NA,NA +CollectList,S,`array_agg`; `collect_list`,None,reduction,input,S,S,S,S,S,S,S,S,PS,S,S,S,S,NS,PS,PS,PS,NS,NS,NS +CollectList,S,`array_agg`; `collect_list`,None,reduction,result,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,PS,NA,NA,NA,NA,NA +CollectList,S,`array_agg`; `collect_list`,None,window,input,S,S,S,S,S,S,S,S,PS,S,S,S,S,NS,PS,PS,PS,NS,NS,NS +CollectList,S,`array_agg`; `collect_list`,None,window,result,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,PS,NA,NA,NA,NA,NA +CollectSet,S,`collect_set`,None,aggregation,input,S,S,S,S,S,S,S,S,PS,S,S,S,NS,NS,PS,NS,PS,NS,NS,NS +CollectSet,S,`collect_set`,None,aggregation,result,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,PS,NA,NA,NA,NA,NA +CollectSet,S,`collect_set`,None,reduction,input,S,S,S,S,S,S,S,S,PS,S,S,S,NS,NS,PS,NS,PS,NS,NS,NS +CollectSet,S,`collect_set`,None,reduction,result,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,PS,NA,NA,NA,NA,NA +CollectSet,S,`collect_set`,None,window,input,S,S,S,S,S,S,S,S,PS,S,S,S,NS,NS,PS,NS,PS,NS,NS,NS +CollectSet,S,`collect_set`,None,window,result,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,PS,NA,NA,NA,NA,NA +Count,S,`count`,None,aggregation,input,S,S,S,S,S,S,S,S,PS,S,S,S,S,S,PS,PS,PS,S,S,S +Count,S,`count`,None,aggregation,result,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +Count,S,`count`,None,reduction,input,S,S,S,S,S,S,S,S,PS,S,S,S,S,S,PS,PS,PS,S,S,S +Count,S,`count`,None,reduction,result,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +Count,S,`count`,None,window,input,S,S,S,S,S,S,S,S,PS,S,S,S,S,S,PS,PS,PS,S,S,S +Count,S,`count`,None,window,result,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +First,S,`first_value`; `first`,None,aggregation,input,S,S,S,S,S,S,S,S,PS,S,S,S,S,NS,PS,PS,PS,NS,NS,NS +First,S,`first_value`; `first`,None,aggregation,result,S,S,S,S,S,S,S,S,PS,S,S,S,S,NS,PS,PS,PS,NS,NS,NS +First,S,`first_value`; `first`,None,reduction,input,S,S,S,S,S,S,S,S,PS,S,S,S,S,NS,PS,PS,PS,NS,NS,NS +First,S,`first_value`; `first`,None,reduction,result,S,S,S,S,S,S,S,S,PS,S,S,S,S,NS,PS,PS,PS,NS,NS,NS +First,S,`first_value`; `first`,None,window,input,S,S,S,S,S,S,S,S,PS,S,S,S,S,NS,PS,PS,PS,NS,NS,NS +First,S,`first_value`; `first`,None,window,result,S,S,S,S,S,S,S,S,PS,S,S,S,S,NS,PS,PS,PS,NS,NS,NS +HyperLogLogPlusPlus,S,`approx_count_distinct`,None,aggregation,input,S,S,S,S,S,S,S,S,PS,S,S,S,NS,NS,PS,PS,PS,NS,NS,NS +HyperLogLogPlusPlus,S,`approx_count_distinct`,None,aggregation,result,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +HyperLogLogPlusPlus,S,`approx_count_distinct`,None,reduction,input,S,S,S,S,S,S,S,S,PS,S,S,S,NS,NS,PS,PS,PS,NS,NS,NS +HyperLogLogPlusPlus,S,`approx_count_distinct`,None,reduction,result,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +Last,S,`last_value`; `last`,None,aggregation,input,S,S,S,S,S,S,S,S,PS,S,S,S,S,NS,PS,PS,PS,NS,NS,NS +Last,S,`last_value`; `last`,None,aggregation,result,S,S,S,S,S,S,S,S,PS,S,S,S,S,NS,PS,PS,PS,NS,NS,NS +Last,S,`last_value`; `last`,None,reduction,input,S,S,S,S,S,S,S,S,PS,S,S,S,S,NS,PS,PS,PS,NS,NS,NS +Last,S,`last_value`; `last`,None,reduction,result,S,S,S,S,S,S,S,S,PS,S,S,S,S,NS,PS,PS,PS,NS,NS,NS +Last,S,`last_value`; `last`,None,window,input,S,S,S,S,S,S,S,S,PS,S,S,S,S,NS,PS,PS,PS,NS,NS,NS +Last,S,`last_value`; `last`,None,window,result,S,S,S,S,S,S,S,S,PS,S,S,S,S,NS,PS,PS,PS,NS,NS,NS +Max,S,`max`,None,aggregation,input,S,S,S,S,S,S,S,S,PS,S,S,S,NS,NS,PS,NA,PS,NS,NA,NA +Max,S,`max`,None,aggregation,result,S,S,S,S,S,S,S,S,PS,S,S,S,NS,NS,PS,NA,PS,NS,NA,NA +Max,S,`max`,None,reduction,input,S,S,S,S,S,S,S,S,PS,S,S,S,NS,NS,PS,NA,PS,NS,NA,NA +Max,S,`max`,None,reduction,result,S,S,S,S,S,S,S,S,PS,S,S,S,NS,NS,PS,NA,PS,NS,NA,NA +Max,S,`max`,None,window,input,S,S,S,S,S,S,S,S,PS,S,S,S,NS,NS,NS,NA,NS,NS,NA,NA +Max,S,`max`,None,window,result,S,S,S,S,S,S,S,S,PS,S,S,S,NS,NS,NS,NA,NS,NS,NA,NA +MaxBy,S,`max_by`,None,aggregation,value,S,S,S,S,S,S,S,S,PS,S,S,S,S,NS,PS,PS,PS,NS,NS,NS +MaxBy,S,`max_by`,None,aggregation,ordering,S,S,S,S,S,NS,NS,S,PS,S,S,S,NS,NS,PS,NA,PS,NS,NA,NA +MaxBy,S,`max_by`,None,aggregation,result,S,S,S,S,S,S,S,S,PS,S,S,S,S,NS,PS,PS,PS,NS,NS,NS +MaxBy,S,`max_by`,None,reduction,value,S,S,S,S,S,S,S,S,PS,S,S,S,S,NS,PS,PS,PS,NS,NS,NS +MaxBy,S,`max_by`,None,reduction,ordering,S,S,S,S,S,NS,NS,S,PS,S,S,S,NS,NS,PS,NA,PS,NS,NA,NA +MaxBy,S,`max_by`,None,reduction,result,S,S,S,S,S,S,S,S,PS,S,S,S,S,NS,PS,PS,PS,NS,NS,NS +Min,S,`min`,None,aggregation,input,S,S,S,S,S,S,S,S,PS,S,S,S,NS,NS,PS,NA,PS,NS,NA,NA +Min,S,`min`,None,aggregation,result,S,S,S,S,S,S,S,S,PS,S,S,S,NS,NS,PS,NA,PS,NS,NA,NA +Min,S,`min`,None,reduction,input,S,S,S,S,S,S,S,S,PS,S,S,S,NS,NS,PS,NA,PS,NS,NA,NA +Min,S,`min`,None,reduction,result,S,S,S,S,S,S,S,S,PS,S,S,S,NS,NS,PS,NA,PS,NS,NA,NA +Min,S,`min`,None,window,input,S,S,S,S,S,S,S,S,PS,S,S,S,NS,NS,NS,NA,NS,NS,NA,NA +Min,S,`min`,None,window,result,S,S,S,S,S,S,S,S,PS,S,S,S,NS,NS,NS,NA,NS,NS,NA,NA +MinBy,S,`min_by`,None,aggregation,value,S,S,S,S,S,S,S,S,PS,S,S,S,S,NS,PS,PS,PS,NS,NS,NS +MinBy,S,`min_by`,None,aggregation,ordering,S,S,S,S,S,NS,NS,S,PS,S,S,S,NS,NS,PS,NA,PS,NS,NA,NA +MinBy,S,`min_by`,None,aggregation,result,S,S,S,S,S,S,S,S,PS,S,S,S,S,NS,PS,PS,PS,NS,NS,NS +MinBy,S,`min_by`,None,reduction,value,S,S,S,S,S,S,S,S,PS,S,S,S,S,NS,PS,PS,PS,NS,NS,NS +MinBy,S,`min_by`,None,reduction,ordering,S,S,S,S,S,NS,NS,S,PS,S,S,S,NS,NS,PS,NA,PS,NS,NA,NA +MinBy,S,`min_by`,None,reduction,result,S,S,S,S,S,S,S,S,PS,S,S,S,S,NS,PS,PS,PS,NS,NS,NS +Percentile,S,`percentile`,None,aggregation,input,NA,S,S,S,S,S,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +Percentile,S,`percentile`,None,aggregation,percentage,NA,NA,NA,NA,NA,NA,PS,NA,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA +Percentile,S,`percentile`,None,aggregation,frequency,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA +Percentile,S,`percentile`,None,aggregation,result,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA +Percentile,S,`percentile`,None,reduction,input,NA,S,S,S,S,S,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +Percentile,S,`percentile`,None,reduction,percentage,NA,NA,NA,NA,NA,NA,PS,NA,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA +Percentile,S,`percentile`,None,reduction,frequency,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA +Percentile,S,`percentile`,None,reduction,result,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA +PivotFirst,S, ,None,aggregation,pivotColumn,S,S,S,S,S,S,S,S,PS,S,S,S,NS,NS,NS,NS,NS,NS,NS,NS +PivotFirst,S, ,None,aggregation,valueColumn,S,S,S,S,S,S,S,S,PS,S,S,S,NS,NS,NS,NS,NS,NS,NS,NS +PivotFirst,S, ,None,aggregation,result,S,S,S,S,S,S,S,S,PS,S,S,S,NS,NS,PS,NS,NS,NS,NS,NS +PivotFirst,S, ,None,reduction,pivotColumn,S,S,S,S,S,S,S,S,PS,S,S,S,NS,NS,NS,NS,NS,NS,NS,NS +PivotFirst,S, ,None,reduction,valueColumn,S,S,S,S,S,S,S,S,PS,S,S,S,NS,NS,NS,NS,NS,NS,NS,NS +PivotFirst,S, ,None,reduction,result,S,S,S,S,S,S,S,S,PS,S,S,S,NS,NS,PS,NS,NS,NS,NS,NS +StddevPop,S,`stddev_pop`,None,reduction,input,NA,NA,NA,NA,NA,NA,NS,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +StddevPop,S,`stddev_pop`,None,reduction,result,NA,NA,NA,NA,NA,NA,NS,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +StddevPop,S,`stddev_pop`,None,aggregation,input,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +StddevPop,S,`stddev_pop`,None,aggregation,result,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +StddevPop,S,`stddev_pop`,None,window,input,NA,NA,NA,NA,NA,NA,NS,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +StddevPop,S,`stddev_pop`,None,window,result,NA,NA,NA,NA,NA,NA,NS,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +StddevSamp,S,`std`; `stddev_samp`; `stddev`,None,aggregation,input,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +StddevSamp,S,`std`; `stddev_samp`; `stddev`,None,aggregation,result,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +StddevSamp,S,`std`; `stddev_samp`; `stddev`,None,reduction,input,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +StddevSamp,S,`std`; `stddev_samp`; `stddev`,None,reduction,result,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +StddevSamp,S,`std`; `stddev_samp`; `stddev`,None,window,input,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +StddevSamp,S,`std`; `stddev_samp`; `stddev`,None,window,result,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +Sum,S,`sum`,None,aggregation,input,NA,S,S,S,S,S,S,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA +Sum,S,`sum`,None,aggregation,result,NA,NA,NA,NA,S,NA,S,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA +Sum,S,`sum`,None,reduction,input,NA,S,S,S,S,S,S,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA +Sum,S,`sum`,None,reduction,result,NA,NA,NA,NA,S,NA,S,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA +Sum,S,`sum`,None,window,input,NA,S,S,S,S,S,S,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA +Sum,S,`sum`,None,window,result,NA,NA,NA,NA,S,NA,S,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA +VariancePop,S,`var_pop`,None,reduction,input,NA,NA,NA,NA,NA,NA,NS,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +VariancePop,S,`var_pop`,None,reduction,result,NA,NA,NA,NA,NA,NA,NS,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +VariancePop,S,`var_pop`,None,aggregation,input,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +VariancePop,S,`var_pop`,None,aggregation,result,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +VariancePop,S,`var_pop`,None,window,input,NA,NA,NA,NA,NA,NA,NS,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +VariancePop,S,`var_pop`,None,window,result,NA,NA,NA,NA,NA,NA,NS,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +VarianceSamp,S,`var_samp`; `variance`,None,reduction,input,NA,NA,NA,NA,NA,NA,NS,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +VarianceSamp,S,`var_samp`; `variance`,None,reduction,result,NA,NA,NA,NA,NA,NA,NS,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +VarianceSamp,S,`var_samp`; `variance`,None,aggregation,input,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +VarianceSamp,S,`var_samp`; `variance`,None,aggregation,result,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +VarianceSamp,S,`var_samp`; `variance`,None,window,input,NA,NA,NA,NA,NA,NA,NS,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +VarianceSamp,S,`var_samp`; `variance`,None,window,result,NA,NA,NA,NA,NA,NA,NS,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +Invoke,S, ,The supported types are not deterministic since it's a dynamic expression,project,result,PS,PS,PS,PS,PS,PS,PS,PS,PS,PS,PS,PS,PS,PS,PS,PS,PS,PS,PS,PS +StaticInvoke,S, ,The supported types are not deterministic since it's a dynamic expression,project,result,PS,PS,PS,PS,PS,PS,PS,PS,PS,PS,PS,PS,PS,PS,PS,PS,PS,PS,PS,PS +NormalizeNaNAndZero,S, ,None,project,input,NA,NA,NA,NA,NA,S,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +NormalizeNaNAndZero,S, ,None,project,result,NA,NA,NA,NA,NA,S,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +Discard,S, ,None,project,condition,S,S,S,S,S,S,S,S,PS,S,S,S,S,S,PS,PS,PS,S,S,S +Discard,S, ,None,project,result,S,S,S,S,S,S,S,S,PS,S,S,S,S,S,PS,PS,PS,S,S,S +Keep,S, ,None,project,condition,S,S,S,S,S,S,S,S,PS,S,S,S,S,S,PS,PS,PS,S,S,S +Keep,S, ,None,project,outputs,S,S,S,S,S,S,S,S,PS,S,S,S,S,S,PS,PS,PS,S,S,S +Keep,S, ,None,project,result,S,S,S,S,S,S,S,S,PS,S,S,S,S,S,PS,PS,PS,S,S,S +Split,S, ,None,project,condition,S,S,S,S,S,S,S,S,PS,S,S,S,S,S,PS,PS,PS,S,S,S +Split,S, ,None,project,outputs,S,S,S,S,S,S,S,S,PS,S,S,S,S,S,PS,PS,PS,S,S,S +Split,S, ,None,project,result,S,S,S,S,S,S,S,S,PS,S,S,S,S,S,PS,PS,PS,S,S,S +InSubqueryExec,S, ,None,project,input,S,S,S,S,S,S,S,S,PS,S,S,S,NS,NS,NS,NA,NS,NS,NA,NA +InSubqueryExec,S, ,None,project,result,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +ScalarSubquery,S, ,None,project,result,S,S,S,S,S,S,S,S,PS,S,S,S,S,NS,PS,PS,PS,NS,NS,NS +HiveGenericUDF,S, ,None,project,param,S,S,S,S,S,S,S,S,PS,S,S,S,S,S,PS,PS,PS,NS,NS,NS +HiveGenericUDF,S, ,None,project,result,S,S,S,S,S,S,S,S,PS,S,S,S,S,S,PS,PS,PS,NS,NS,NS +HiveSimpleUDF,S, ,None,project,param,S,S,S,S,S,S,S,S,PS,S,S,S,S,S,PS,PS,PS,NS,NS,NS +HiveSimpleUDF,S, ,None,project,result,S,S,S,S,S,S,S,S,PS,S,S,S,S,S,PS,PS,PS,NS,NS,NS From 97721ec013f15b33c4c3fb268869629512481cd6 Mon Sep 17 00:00:00 2001 From: Chong Gao Date: Mon, 12 Jan 2026 10:45:28 +0800 Subject: [PATCH 17/59] Format code Signed-off-by: Chong Gao --- .../src/main/scala/com/nvidia/spark/rapids/RapidsMeta.scala | 3 +-- .../sql/rapids/execution/GpuBroadcastExchangeExec.scala | 4 ++-- .../scala/com/nvidia/spark/rapids/shims/TimeAddShims.scala | 1 + .../nvidia/spark/rapids/shims/WindowInPandasExecShims.scala | 6 ++++-- .../apache/spark/sql/rapids/shims/FileStreamSinkShims.scala | 5 ++++- .../scala/com/nvidia/spark/rapids/shims/TimeAddShims.scala | 1 + .../scala/com/nvidia/spark/rapids/shims/TryModeShim.scala | 2 +- .../nvidia/spark/rapids/shims/WindowInPandasExecShims.scala | 1 + .../apache/spark/sql/rapids/shims/FileStreamSinkShims.scala | 5 ++++- 9 files changed, 19 insertions(+), 9 deletions(-) diff --git a/sql-plugin/src/main/scala/com/nvidia/spark/rapids/RapidsMeta.scala b/sql-plugin/src/main/scala/com/nvidia/spark/rapids/RapidsMeta.scala index 4390c1874b4..6ae4c8bc7f7 100644 --- a/sql-plugin/src/main/scala/com/nvidia/spark/rapids/RapidsMeta.scala +++ b/sql-plugin/src/main/scala/com/nvidia/spark/rapids/RapidsMeta.scala @@ -22,7 +22,7 @@ import scala.collection.mutable import com.nvidia.spark.rapids.GpuTypedImperativeSupportedAggregateExecMeta.{preRowToColProjection, readBufferConverter} import com.nvidia.spark.rapids.RapidsMeta.noNeedToReplaceReason -import com.nvidia.spark.rapids.shims.{DistributionUtil, SparkShimImpl} +import com.nvidia.spark.rapids.shims.{AggregateInPandasExecShims, DistributionUtil, SparkShimImpl} import org.apache.spark.sql.catalyst.expressions.{Attribute, AttributeReference, BinaryExpression, Cast, ComplexTypeMergingExpression, Expression, QuaternaryExpression, RuntimeReplaceable, String2TrimExpression, TernaryExpression, TimeZoneAwareExpression, UnaryExpression, UTCTimestamp, WindowExpression, WindowFunction} import org.apache.spark.sql.catalyst.expressions.aggregate.{AggregateExpression, AggregateFunction, ImperativeAggregate, TypedImperativeAggregate} @@ -35,7 +35,6 @@ import org.apache.spark.sql.execution.aggregate.BaseAggregateExec import org.apache.spark.sql.execution.command.{DataWritingCommand, RunnableCommand} import org.apache.spark.sql.execution.exchange.ShuffleExchangeExec import org.apache.spark.sql.execution.joins.{BroadcastHashJoinExec, BroadcastNestedLoopJoinExec} -import com.nvidia.spark.rapids.shims.AggregateInPandasExecShims import org.apache.spark.sql.internal.SQLConf import org.apache.spark.sql.rapids.TimeZoneDB import org.apache.spark.sql.rapids.aggregate.{CpuToGpuAggregateBufferConverter, GpuToCpuAggregateBufferConverter} diff --git a/sql-plugin/src/main/scala/org/apache/spark/sql/rapids/execution/GpuBroadcastExchangeExec.scala b/sql-plugin/src/main/scala/org/apache/spark/sql/rapids/execution/GpuBroadcastExchangeExec.scala index b3275d2c9c2..f3c03253c93 100644 --- a/sql-plugin/src/main/scala/org/apache/spark/sql/rapids/execution/GpuBroadcastExchangeExec.scala +++ b/sql-plugin/src/main/scala/org/apache/spark/sql/rapids/execution/GpuBroadcastExchangeExec.scala @@ -33,7 +33,8 @@ import com.nvidia.spark.rapids.GpuMetric._ import com.nvidia.spark.rapids.RapidsPluginImplicits._ import com.nvidia.spark.rapids.lore.{GpuLoreDumpRDD, SimpleRDD} import com.nvidia.spark.rapids.lore.GpuLore.LORE_DUMP_RDD_TAG -import com.nvidia.spark.rapids.shims.{ShimBroadcastExchangeLike, ShimUnaryExecNode, SparkShimImpl} +import com.nvidia.spark.rapids.shims.{BroadcastExchangeShims, ShimBroadcastExchangeLike, ShimUnaryExecNode, SparkShimImpl} +import com.nvidia.spark.rapids.shims.BroadcastExchangeShims.MAX_BROADCAST_TABLE_BYTES import org.apache.spark.SparkException import org.apache.spark.broadcast.Broadcast @@ -46,7 +47,6 @@ import org.apache.spark.sql.catalyst.plans.logical.Statistics import org.apache.spark.sql.catalyst.plans.physical.{BroadcastMode, BroadcastPartitioning, Partitioning} import org.apache.spark.sql.execution.{SparkPlan, SQLExecution} import org.apache.spark.sql.execution.exchange.{BroadcastExchangeExec, Exchange} -import com.nvidia.spark.rapids.shims.BroadcastExchangeShims.MAX_BROADCAST_TABLE_BYTES import org.apache.spark.sql.execution.joins.{BroadcastHashJoinExec, BroadcastNestedLoopJoinExec} import org.apache.spark.sql.execution.metric.SQLMetrics import org.apache.spark.sql.internal.{SQLConf, StaticSQLConf} diff --git a/sql-plugin/src/main/spark320/scala/com/nvidia/spark/rapids/shims/TimeAddShims.scala b/sql-plugin/src/main/spark320/scala/com/nvidia/spark/rapids/shims/TimeAddShims.scala index 5fa548331dc..2c3ab7ab3fa 100644 --- a/sql-plugin/src/main/spark320/scala/com/nvidia/spark/rapids/shims/TimeAddShims.scala +++ b/sql-plugin/src/main/spark320/scala/com/nvidia/spark/rapids/shims/TimeAddShims.scala @@ -25,6 +25,7 @@ spark-rapids-shim-json-lines ***/ package com.nvidia.spark.rapids.shims import com.nvidia.spark.rapids._ + import org.apache.spark.sql.catalyst.expressions.{Expression, TimeAdd} import org.apache.spark.sql.rapids.shims.GpuTimeAdd import org.apache.spark.unsafe.types.CalendarInterval diff --git a/sql-plugin/src/main/spark320/scala/com/nvidia/spark/rapids/shims/WindowInPandasExecShims.scala b/sql-plugin/src/main/spark320/scala/com/nvidia/spark/rapids/shims/WindowInPandasExecShims.scala index f0694b88950..92cf2cd9b92 100644 --- a/sql-plugin/src/main/spark320/scala/com/nvidia/spark/rapids/shims/WindowInPandasExecShims.scala +++ b/sql-plugin/src/main/spark320/scala/com/nvidia/spark/rapids/shims/WindowInPandasExecShims.scala @@ -51,13 +51,14 @@ spark-rapids-shim-json-lines ***/ package com.nvidia.spark.rapids.shims import com.nvidia.spark.rapids._ + import org.apache.spark.sql.catalyst.expressions.{Expression, NamedExpression} import org.apache.spark.sql.execution.SparkPlan import org.apache.spark.sql.execution.python.WindowInPandasExec import org.apache.spark.sql.rapids.execution.python.GpuWindowInPandasExecMetaBase /** - * Exec rules for WindowInPandasExec (exists in Spark versions before the rename to ArrowWindowPythonExec). + * Exec rules for WindowInPandasExec (Spark versions before ArrowWindowPythonExec rename). */ object WindowInPandasExecShims { val execs: Map[Class[_ <: SparkPlan], ExecRule[_ <: SparkPlan]] = { @@ -71,7 +72,8 @@ object WindowInPandasExecShims { TypeSig.all), (winPy, conf, p, r) => new GpuWindowInPandasExecMetaBase(winPy, conf, p, r) { override val windowExpressions: Seq[BaseExprMeta[NamedExpression]] = - SparkShimImpl.getWindowExpressions(winPy).map(GpuOverrides.wrapExpr(_, this.conf, Some(this))) + SparkShimImpl.getWindowExpressions(winPy).map( + GpuOverrides.wrapExpr(_, this.conf, Some(this))) override def convertToGpu(): GpuExec = { val windowExprGpu = windowExpressions.map(_.convertToGpu()) diff --git a/sql-plugin/src/main/spark320/scala/org/apache/spark/sql/rapids/shims/FileStreamSinkShims.scala b/sql-plugin/src/main/spark320/scala/org/apache/spark/sql/rapids/shims/FileStreamSinkShims.scala index f713e55ac6f..238cc9cac2d 100644 --- a/sql-plugin/src/main/spark320/scala/org/apache/spark/sql/rapids/shims/FileStreamSinkShims.scala +++ b/sql-plugin/src/main/spark320/scala/org/apache/spark/sql/rapids/shims/FileStreamSinkShims.scala @@ -57,7 +57,10 @@ import org.apache.spark.sql.execution.streaming.{FileStreamSink, MetadataLogFile import org.apache.spark.sql.types.StructType object FileStreamSinkShims { - def hasMetadata(paths: Seq[String], hadoopConf: Configuration, sqlConf: org.apache.spark.sql.internal.SQLConf): Boolean = { + def hasMetadata( + paths: Seq[String], + hadoopConf: Configuration, + sqlConf: org.apache.spark.sql.internal.SQLConf): Boolean = { FileStreamSink.hasMetadata(paths, hadoopConf, sqlConf) } diff --git a/sql-plugin/src/main/spark410/scala/com/nvidia/spark/rapids/shims/TimeAddShims.scala b/sql-plugin/src/main/spark410/scala/com/nvidia/spark/rapids/shims/TimeAddShims.scala index b04d7e43bbe..602215f0e0d 100644 --- a/sql-plugin/src/main/spark410/scala/com/nvidia/spark/rapids/shims/TimeAddShims.scala +++ b/sql-plugin/src/main/spark410/scala/com/nvidia/spark/rapids/shims/TimeAddShims.scala @@ -20,6 +20,7 @@ spark-rapids-shim-json-lines ***/ package com.nvidia.spark.rapids.shims import com.nvidia.spark.rapids._ + import org.apache.spark.sql.catalyst.expressions.Expression /** diff --git a/sql-plugin/src/main/spark410/scala/com/nvidia/spark/rapids/shims/TryModeShim.scala b/sql-plugin/src/main/spark410/scala/com/nvidia/spark/rapids/shims/TryModeShim.scala index 4ed9d1a7441..5baab0e2766 100644 --- a/sql-plugin/src/main/spark410/scala/com/nvidia/spark/rapids/shims/TryModeShim.scala +++ b/sql-plugin/src/main/spark410/scala/com/nvidia/spark/rapids/shims/TryModeShim.scala @@ -23,7 +23,7 @@ import org.apache.spark.sql.catalyst.expressions.{Add, Divide, EvalMode, Express import org.apache.spark.sql.catalyst.expressions.aggregate.{Average, Sum} /** - * Spark 4.1.0 version where evalMode was changed to evalContext.evalMode for arithmetic expressions. + * Spark 4.1.0 version where evalMode changed to evalContext.evalMode for arithmetic. * See: https://github.com/apache/spark/commit/a96e9ca81518bff31b0089d459fe78804ca1aa38 */ object TryModeShim { diff --git a/sql-plugin/src/main/spark410/scala/com/nvidia/spark/rapids/shims/WindowInPandasExecShims.scala b/sql-plugin/src/main/spark410/scala/com/nvidia/spark/rapids/shims/WindowInPandasExecShims.scala index f6ee5c738ae..c3dbf496826 100644 --- a/sql-plugin/src/main/spark410/scala/com/nvidia/spark/rapids/shims/WindowInPandasExecShims.scala +++ b/sql-plugin/src/main/spark410/scala/com/nvidia/spark/rapids/shims/WindowInPandasExecShims.scala @@ -20,6 +20,7 @@ spark-rapids-shim-json-lines ***/ package com.nvidia.spark.rapids.shims import com.nvidia.spark.rapids._ + import org.apache.spark.sql.execution.SparkPlan /** diff --git a/sql-plugin/src/main/spark410/scala/org/apache/spark/sql/rapids/shims/FileStreamSinkShims.scala b/sql-plugin/src/main/spark410/scala/org/apache/spark/sql/rapids/shims/FileStreamSinkShims.scala index fdfefc50ff3..4577fd7a46a 100644 --- a/sql-plugin/src/main/spark410/scala/org/apache/spark/sql/rapids/shims/FileStreamSinkShims.scala +++ b/sql-plugin/src/main/spark410/scala/org/apache/spark/sql/rapids/shims/FileStreamSinkShims.scala @@ -27,7 +27,10 @@ import org.apache.spark.sql.execution.streaming.sinks.FileStreamSink import org.apache.spark.sql.types.StructType object FileStreamSinkShims { - def hasMetadata(paths: Seq[String], hadoopConf: Configuration, sqlConf: org.apache.spark.sql.internal.SQLConf): Boolean = { + def hasMetadata( + paths: Seq[String], + hadoopConf: Configuration, + sqlConf: org.apache.spark.sql.internal.SQLConf): Boolean = { FileStreamSink.hasMetadata(paths, hadoopConf, sqlConf) } From bf5c7e709480330a3008188fecdf31b889e6a4c9 Mon Sep 17 00:00:00 2001 From: Chong Gao Date: Mon, 12 Jan 2026 11:28:04 +0800 Subject: [PATCH 18/59] Fix one line Signed-off-by: Chong Gao --- .../scala/com/nvidia/spark/rapids/shims/TryModeShim.scala | 1 - 1 file changed, 1 deletion(-) diff --git a/sql-plugin/src/main/spark330db/scala/com/nvidia/spark/rapids/shims/TryModeShim.scala b/sql-plugin/src/main/spark330db/scala/com/nvidia/spark/rapids/shims/TryModeShim.scala index 6eb41cc2e8e..f8b6901d152 100644 --- a/sql-plugin/src/main/spark330db/scala/com/nvidia/spark/rapids/shims/TryModeShim.scala +++ b/sql-plugin/src/main/spark330db/scala/com/nvidia/spark/rapids/shims/TryModeShim.scala @@ -34,7 +34,6 @@ {"spark": "357"} {"spark": "400"} {"spark": "401"} -{"spark": "410"} spark-rapids-shim-json-lines ***/ package com.nvidia.spark.rapids.shims From 4dab1d8f7883eed931ffe810dd0674ae58df3484 Mon Sep 17 00:00:00 2001 From: Chong Gao Date: Mon, 12 Jan 2026 14:57:02 +0800 Subject: [PATCH 19/59] Fix unit test cases Signed-off-by: Chong Gao --- .../spark/rapids/GpuParquetFileFormat.scala | 4 +- .../sql/rapids/GpuFileFormatDataWriter.scala | 17 ++-- .../rapids/shims/ParquetVariantShims.scala | 68 +++++++++++++++ .../shims/FileCommitProtocolShims.scala | 85 +++++++++++++++++++ .../rapids/shims/ParquetVariantShims.scala | 37 ++++++++ .../shims/FileCommitProtocolShims.scala | 48 +++++++++++ 6 files changed, 248 insertions(+), 11 deletions(-) create mode 100644 sql-plugin/src/main/spark320/scala/com/nvidia/spark/rapids/shims/ParquetVariantShims.scala create mode 100644 sql-plugin/src/main/spark320/scala/org/apache/spark/sql/rapids/shims/FileCommitProtocolShims.scala create mode 100644 sql-plugin/src/main/spark410/scala/com/nvidia/spark/rapids/shims/ParquetVariantShims.scala create mode 100644 sql-plugin/src/main/spark410/scala/org/apache/spark/sql/rapids/shims/FileCommitProtocolShims.scala diff --git a/sql-plugin/src/main/scala/com/nvidia/spark/rapids/GpuParquetFileFormat.scala b/sql-plugin/src/main/scala/com/nvidia/spark/rapids/GpuParquetFileFormat.scala index 82675f3b448..bc67feb0f60 100644 --- a/sql-plugin/src/main/scala/com/nvidia/spark/rapids/GpuParquetFileFormat.scala +++ b/sql-plugin/src/main/scala/com/nvidia/spark/rapids/GpuParquetFileFormat.scala @@ -1,5 +1,5 @@ /* - * Copyright (c) 2019-2025, NVIDIA CORPORATION. + * Copyright (c) 2019-2026, NVIDIA CORPORATION. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -251,6 +251,8 @@ class GpuParquetFileFormat extends ColumnarFileFormat with Logging { ParquetTimestampNTZShims.setupTimestampNTZConfig(conf, sqlConf) + ParquetVariantShims.setupParquetVariantConfig(conf, sqlConf) + // Sets compression scheme conf.set(ParquetOutputFormat.COMPRESSION, parquetOptions.compressionCodecClassName) diff --git a/sql-plugin/src/main/scala/org/apache/spark/sql/rapids/GpuFileFormatDataWriter.scala b/sql-plugin/src/main/scala/org/apache/spark/sql/rapids/GpuFileFormatDataWriter.scala index b51450c3ee0..f003f65c20e 100644 --- a/sql-plugin/src/main/scala/org/apache/spark/sql/rapids/GpuFileFormatDataWriter.scala +++ b/sql-plugin/src/main/scala/org/apache/spark/sql/rapids/GpuFileFormatDataWriter.scala @@ -1,5 +1,5 @@ /* - * Copyright (c) 2019-2025, NVIDIA CORPORATION. + * Copyright (c) 2019-2026, NVIDIA CORPORATION. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -33,6 +33,7 @@ import org.apache.hadoop.mapreduce.TaskAttemptContext import org.apache.spark.internal.Logging import org.apache.spark.internal.io.FileCommitProtocol +import org.apache.spark.sql.rapids.shims.FileCommitProtocolShims import org.apache.spark.sql.catalyst.InternalRow import org.apache.spark.sql.catalyst.catalog.{BucketSpec, ExternalCatalogUtils} import org.apache.spark.sql.catalyst.catalog.CatalogTypes.TablePartitionSpec @@ -256,16 +257,14 @@ class GpuSingleDirectoryDataWriter( // Initialize currentWriter and statsTrackers newOutputWriter() - @scala.annotation.nowarn( - "msg=method newTaskTempFile in class FileCommitProtocol is deprecated" - ) private def newOutputWriter(): Unit = { currentWriterStatus.recordsInFile = 0 val fileCounter = currentWriterStatus.fileCounter releaseResources() val ext = description.outputWriterFactory.getFileExtension(taskAttemptContext) - val currentPath = committer.newTaskTempFile( + val currentPath = FileCommitProtocolShims.newTaskTempFile( + committer, taskAttemptContext, None, f"-c$fileCounter%03d" + ext) @@ -591,9 +590,6 @@ class GpuDynamicPartitionDataSingleWriter( * currently does not support `bucketId`, it's always None * @param fileCounter integer indicating the number of files to be written to `partDir` */ - @scala.annotation.nowarn( - "msg=method newTaskTempFile.* in class FileCommitProtocol is deprecated" - ) def newWriter(partValues: Option[InternalRow], bucketId: Option[Int], fileCounter: Int): ColumnarOutputWriter = { val partDir = partValues.map(getPartitionPath(_)) @@ -610,9 +606,10 @@ class GpuDynamicPartitionDataSingleWriter( } val currentPath = if (customPath.isDefined) { - committer.newTaskTempFileAbsPath(taskAttemptContext, customPath.get, ext) + FileCommitProtocolShims.newTaskTempFileAbsPath( + committer, taskAttemptContext, customPath.get, ext) } else { - committer.newTaskTempFile(taskAttemptContext, partDir, ext) + FileCommitProtocolShims.newTaskTempFile(committer, taskAttemptContext, partDir, ext) } val debugOutputPath = debugOutputBasePath.map { base => diff --git a/sql-plugin/src/main/spark320/scala/com/nvidia/spark/rapids/shims/ParquetVariantShims.scala b/sql-plugin/src/main/spark320/scala/com/nvidia/spark/rapids/shims/ParquetVariantShims.scala new file mode 100644 index 00000000000..f66d93e379e --- /dev/null +++ b/sql-plugin/src/main/spark320/scala/com/nvidia/spark/rapids/shims/ParquetVariantShims.scala @@ -0,0 +1,68 @@ +/* + * Copyright (c) 2026, NVIDIA CORPORATION. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +/*** spark-rapids-shim-json-lines +{"spark": "320"} +{"spark": "321"} +{"spark": "321cdh"} +{"spark": "322"} +{"spark": "323"} +{"spark": "324"} +{"spark": "330"} +{"spark": "330cdh"} +{"spark": "330db"} +{"spark": "331"} +{"spark": "332"} +{"spark": "332cdh"} +{"spark": "332db"} +{"spark": "333"} +{"spark": "334"} +{"spark": "340"} +{"spark": "341"} +{"spark": "341db"} +{"spark": "342"} +{"spark": "343"} +{"spark": "344"} +{"spark": "350"} +{"spark": "350db143"} +{"spark": "351"} +{"spark": "352"} +{"spark": "353"} +{"spark": "354"} +{"spark": "355"} +{"spark": "356"} +{"spark": "357"} +{"spark": "400"} +{"spark": "400db173"} +{"spark": "401"} +spark-rapids-shim-json-lines ***/ +package com.nvidia.spark.rapids.shims + +import org.apache.hadoop.conf.Configuration + +import org.apache.spark.sql.internal.SQLConf + +/** + * Shim for Parquet variant-related configurations. + * In Spark <= 4.0.x, PARQUET_ANNOTATE_VARIANT_LOGICAL_TYPE doesn't exist. + * In Spark 4.1.0+, we need to set this configuration for ParquetWriteSupport. + */ +object ParquetVariantShims { + def setupParquetVariantConfig(conf: Configuration, sqlConf: SQLConf): Unit = { + // No-op for Spark versions before 4.1.0 + // PARQUET_ANNOTATE_VARIANT_LOGICAL_TYPE doesn't exist + } +} diff --git a/sql-plugin/src/main/spark320/scala/org/apache/spark/sql/rapids/shims/FileCommitProtocolShims.scala b/sql-plugin/src/main/spark320/scala/org/apache/spark/sql/rapids/shims/FileCommitProtocolShims.scala new file mode 100644 index 00000000000..ea4243d1d8f --- /dev/null +++ b/sql-plugin/src/main/spark320/scala/org/apache/spark/sql/rapids/shims/FileCommitProtocolShims.scala @@ -0,0 +1,85 @@ +/* + * Copyright (c) 2026, NVIDIA CORPORATION. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +/*** spark-rapids-shim-json-lines +{"spark": "320"} +{"spark": "321"} +{"spark": "321cdh"} +{"spark": "322"} +{"spark": "323"} +{"spark": "324"} +{"spark": "330"} +{"spark": "330cdh"} +{"spark": "330db"} +{"spark": "331"} +{"spark": "332"} +{"spark": "332cdh"} +{"spark": "332db"} +{"spark": "333"} +{"spark": "334"} +{"spark": "340"} +{"spark": "341"} +{"spark": "341db"} +{"spark": "342"} +{"spark": "343"} +{"spark": "344"} +{"spark": "350"} +{"spark": "350db143"} +{"spark": "351"} +{"spark": "352"} +{"spark": "353"} +{"spark": "354"} +{"spark": "355"} +{"spark": "356"} +{"spark": "357"} +{"spark": "400"} +{"spark": "400db173"} +{"spark": "401"} +spark-rapids-shim-json-lines ***/ +package org.apache.spark.sql.rapids.shims + +import org.apache.hadoop.mapreduce.TaskAttemptContext + +import org.apache.spark.internal.io.FileCommitProtocol + +/** + * Shim for FileCommitProtocol.newTaskTempFile API. + * In Spark <= 4.0.x, we use the deprecated (ext: String) signature. + * In Spark 4.1.0+, we use the new (spec: FileNameSpec) signature. + */ +object FileCommitProtocolShims { + @scala.annotation.nowarn( + "msg=method newTaskTempFile in class FileCommitProtocol is deprecated" + ) + def newTaskTempFile( + committer: FileCommitProtocol, + taskContext: TaskAttemptContext, + dir: Option[String], + ext: String): String = { + committer.newTaskTempFile(taskContext, dir, ext) + } + + @scala.annotation.nowarn( + "msg=method newTaskTempFileAbsPath in class FileCommitProtocol is deprecated" + ) + def newTaskTempFileAbsPath( + committer: FileCommitProtocol, + taskContext: TaskAttemptContext, + absoluteDir: String, + ext: String): String = { + committer.newTaskTempFileAbsPath(taskContext, absoluteDir, ext) + } +} diff --git a/sql-plugin/src/main/spark410/scala/com/nvidia/spark/rapids/shims/ParquetVariantShims.scala b/sql-plugin/src/main/spark410/scala/com/nvidia/spark/rapids/shims/ParquetVariantShims.scala new file mode 100644 index 00000000000..04552935fd7 --- /dev/null +++ b/sql-plugin/src/main/spark410/scala/com/nvidia/spark/rapids/shims/ParquetVariantShims.scala @@ -0,0 +1,37 @@ +/* + * Copyright (c) 2026, NVIDIA CORPORATION. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +/*** spark-rapids-shim-json-lines +{"spark": "410"} +spark-rapids-shim-json-lines ***/ +package com.nvidia.spark.rapids.shims + +import org.apache.hadoop.conf.Configuration + +import org.apache.spark.sql.internal.SQLConf + +/** + * Shim for Parquet variant-related configurations in Spark 4.1.0+. + * Sets PARQUET_ANNOTATE_VARIANT_LOGICAL_TYPE which is required by ParquetWriteSupport. + */ +object ParquetVariantShims { + def setupParquetVariantConfig(conf: Configuration, sqlConf: SQLConf): Unit = { + // Set the variant annotation config that SparkToParquetSchemaConverter requires + conf.set( + SQLConf.PARQUET_ANNOTATE_VARIANT_LOGICAL_TYPE.key, + sqlConf.parquetAnnotateVariantLogicalType.toString) + } +} diff --git a/sql-plugin/src/main/spark410/scala/org/apache/spark/sql/rapids/shims/FileCommitProtocolShims.scala b/sql-plugin/src/main/spark410/scala/org/apache/spark/sql/rapids/shims/FileCommitProtocolShims.scala new file mode 100644 index 00000000000..9402b7736fa --- /dev/null +++ b/sql-plugin/src/main/spark410/scala/org/apache/spark/sql/rapids/shims/FileCommitProtocolShims.scala @@ -0,0 +1,48 @@ +/* + * Copyright (c) 2026, NVIDIA CORPORATION. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +/*** spark-rapids-shim-json-lines +{"spark": "410"} +spark-rapids-shim-json-lines ***/ +package org.apache.spark.sql.rapids.shims + +import org.apache.hadoop.mapreduce.TaskAttemptContext + +import org.apache.spark.internal.io.{FileCommitProtocol, FileNameSpec} + +/** + * Shim for FileCommitProtocol.newTaskTempFile API in Spark 4.1.0+. + * Uses the new (spec: FileNameSpec) signature instead of deprecated (ext: String). + */ +object FileCommitProtocolShims { + def newTaskTempFile( + committer: FileCommitProtocol, + taskContext: TaskAttemptContext, + dir: Option[String], + ext: String): String = { + // FileNameSpec(prefix, suffix) - we put ext as suffix with empty prefix + committer.newTaskTempFile(taskContext, dir, FileNameSpec("", ext)) + } + + def newTaskTempFileAbsPath( + committer: FileCommitProtocol, + taskContext: TaskAttemptContext, + absoluteDir: String, + ext: String): String = { + // FileNameSpec(prefix, suffix) - we put ext as suffix with empty prefix + committer.newTaskTempFileAbsPath(taskContext, absoluteDir, FileNameSpec("", ext)) + } +} From de7311faf4fcdc300b638144d010330a08d3acb2 Mon Sep 17 00:00:00 2001 From: Chong Gao Date: Mon, 12 Jan 2026 15:14:45 +0800 Subject: [PATCH 20/59] Fix unit test cases Signed-off-by: Chong Gao --- .../scala/com/nvidia/spark/rapids/ParquetWriterSuite.scala | 7 +++++-- 1 file changed, 5 insertions(+), 2 deletions(-) diff --git a/tests/src/test/scala/com/nvidia/spark/rapids/ParquetWriterSuite.scala b/tests/src/test/scala/com/nvidia/spark/rapids/ParquetWriterSuite.scala index d382c84b75a..10ad51d7148 100644 --- a/tests/src/test/scala/com/nvidia/spark/rapids/ParquetWriterSuite.scala +++ b/tests/src/test/scala/com/nvidia/spark/rapids/ParquetWriterSuite.scala @@ -1,5 +1,5 @@ /* - * Copyright (c) 2019-2025, NVIDIA CORPORATION. + * Copyright (c) 2019-2026, NVIDIA CORPORATION. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -310,7 +310,10 @@ class ParquetWriterSuite extends SparkQueryCompareTestSuite { spark.sql("DROP TABLE IF EXISTS t") spark.sql("DROP TABLE IF EXISTS tempmetricstable") } - }, new SparkConf().set("spark.sql.sources.commitProtocolClass", slowCommitClass)) + }, new SparkConf() + .set("spark.sql.sources.commitProtocolClass", slowCommitClass) + // OneRowRelationExec cannot run on GPU - it's a CPU-only leaf node for single-row SELECT + .set(RapidsConf.TEST_ALLOWED_NONGPU.key, "OneRowRelationExec")) } } From c4d6dee598412aca5800f277ed3422a9c40e88ae Mon Sep 17 00:00:00 2001 From: Chong Gao Date: Tue, 13 Jan 2026 15:03:27 +0800 Subject: [PATCH 21/59] Fix building errors for Scala 2.12 Signed-off-by: Chong Gao --- .../advanced_configs.md | 22 +- docs/supported_ops.md | 2704 ++++++----------- .../execution/GpuBroadcastExchangeExec.scala | 2 +- .../python/GpuWindowInPandasExecBase.scala | 1 - .../shims/AggregateInPandasExecShims.scala | 4 +- .../rapids/shims/Spark320PlusShims.scala | 20 +- .../spark/rapids/shims/TimeAddShims.scala | 42 +- .../shims/WindowInPandasExecShims.scala | 2 +- .../rapids/shims/WindowInPandasShims.scala | 2 +- tools/generated_files/321/supportedExprs.csv | 2 +- tools/generated_files/operatorsScore.csv | 10 - tools/generated_files/supportedDataSource.csv | 10 +- tools/generated_files/supportedExecs.csv | 27 +- tools/generated_files/supportedExprs.csv | 127 +- 14 files changed, 1101 insertions(+), 1874 deletions(-) diff --git a/docs/additional-functionality/advanced_configs.md b/docs/additional-functionality/advanced_configs.md index 000f9c33772..96a843f0b16 100644 --- a/docs/additional-functionality/advanced_configs.md +++ b/docs/additional-functionality/advanced_configs.md @@ -221,12 +221,11 @@ Name | SQL Function(s) | Description | Default Value | Notes spark.rapids.sql.expression.BitwiseNot|`~`|Returns the bitwise NOT of the operands|true|None| spark.rapids.sql.expression.BitwiseOr|`\|`|Returns the bitwise OR of the operands|true|None| spark.rapids.sql.expression.BitwiseXor|`^`|Returns the bitwise XOR of the operands|true|None| -spark.rapids.sql.expression.BloomFilterMightContain| |Bloom filter query|true|None| spark.rapids.sql.expression.BoundReference| |Reference to a bound variable|true|None| spark.rapids.sql.expression.CaseWhen|`when`|CASE WHEN expression|true|None| spark.rapids.sql.expression.Cast|`bigint`, `binary`, `boolean`, `cast`, `date`, `decimal`, `double`, `float`, `int`, `smallint`, `string`, `timestamp`, `tinyint`|Convert a column of one type of data into another type|true|None| spark.rapids.sql.expression.Cbrt|`cbrt`|Cube root|true|None| -spark.rapids.sql.expression.Ceil| |Ceiling of a number|true|None| +spark.rapids.sql.expression.Ceil|`ceil`, `ceiling`|Ceiling of a number|true|None| spark.rapids.sql.expression.CheckOverflow| |CheckOverflow after arithmetic operations between DecimalType data|true|None| spark.rapids.sql.expression.Coalesce|`coalesce`|Returns the first non-null argument if exists. Otherwise, null|true|None| spark.rapids.sql.expression.Concat|`concat`|List/String concatenate|true|None| @@ -250,8 +249,6 @@ Name | SQL Function(s) | Description | Default Value | Notes spark.rapids.sql.expression.DayOfYear|`dayofyear`|Returns the day of the year from a date or timestamp|true|None| spark.rapids.sql.expression.DenseRank|`dense_rank`|Window function that returns the dense rank value within the aggregation window|true|None| spark.rapids.sql.expression.Divide|`/`|Division|true|None| -spark.rapids.sql.expression.DivideDTInterval| |Day-time interval * operator|true|None| -spark.rapids.sql.expression.DivideYMInterval| |Year-month interval * operator|true|None| spark.rapids.sql.expression.DynamicPruningExpression| |Dynamic pruning expression marker|true|None| spark.rapids.sql.expression.ElementAt|`element_at`|Returns element of array at given(1-based) index in value if column is array. Returns value for the given key in value if column is map.|true|None| spark.rapids.sql.expression.EndsWith| |Ends with|true|None| @@ -261,7 +258,7 @@ Name | SQL Function(s) | Description | Default Value | Notes spark.rapids.sql.expression.Explode|`explode_outer`, `explode`|Given an input array produces a sequence of rows for each value in the array|true|None| spark.rapids.sql.expression.Expm1|`expm1`|Euler's number e raised to a power minus 1|true|None| spark.rapids.sql.expression.Flatten|`flatten`|Creates a single array from an array of arrays|true|None| -spark.rapids.sql.expression.Floor| |Floor of a number|true|None| +spark.rapids.sql.expression.Floor|`floor`|Floor of a number|true|None| spark.rapids.sql.expression.FormatNumber|`format_number`|Formats the number x like '#,###,###.##', rounded to d decimal places.|true|None| spark.rapids.sql.expression.FromUTCTimestamp|`from_utc_timestamp`|Render the input UTC timestamp in the input timezone|true|None| spark.rapids.sql.expression.FromUnixTime|`from_unixtime`|Get the string from a unix timestamp|true|None| @@ -325,8 +322,6 @@ Name | SQL Function(s) | Description | Default Value | Notes spark.rapids.sql.expression.Month|`month`|Returns the month from a date or timestamp|true|None| spark.rapids.sql.expression.MonthsBetween|`months_between`|If `timestamp1` is later than `timestamp2`, then the result is positive. If `timestamp1` and `timestamp2` are on the same day of month, or both are the last day of month, time of day will be ignored. Otherwise, the difference is calculated based on 31 days per month, and rounded to 8 digits unless roundOff=false.|true|None| spark.rapids.sql.expression.Multiply|`*`|Multiplication|true|None| -spark.rapids.sql.expression.MultiplyDTInterval| |Day-time interval * number|true|None| -spark.rapids.sql.expression.MultiplyYMInterval| |Year-month interval * number|true|None| spark.rapids.sql.expression.Murmur3Hash|`hash`|Murmur3 hash operator|true|None| spark.rapids.sql.expression.NaNvl|`nanvl`|Evaluates to `left` iff left is not NaN, `right` otherwise|true|None| spark.rapids.sql.expression.NamedLambdaVariable| |A parameter to a higher order SQL function|true|None| @@ -355,8 +350,6 @@ Name | SQL Function(s) | Description | Default Value | Notes spark.rapids.sql.expression.Reverse|`reverse`|Returns a reversed string or an array with reverse order of elements|true|None| spark.rapids.sql.expression.Rint|`rint`|Rounds up a double value to the nearest double equal to an integer|true|None| spark.rapids.sql.expression.Round|`round`|Round an expression to d decimal places using HALF_UP rounding mode|true|None| -spark.rapids.sql.expression.RoundCeil| |Computes the ceiling of the given expression to d decimal places|true|None| -spark.rapids.sql.expression.RoundFloor| |Computes the floor of the given expression to d decimal places|true|None| spark.rapids.sql.expression.RowNumber|`row_number`|Window function that returns the index for the row within the aggregation window|true|None| spark.rapids.sql.expression.ScalaUDF| |User Defined Function, the UDF can choose to implement a RAPIDS accelerated interface to get better performance.|true|None| spark.rapids.sql.expression.Second|`second`|Returns the second component of the string/timestamp|true|None| @@ -379,9 +372,9 @@ Name | SQL Function(s) | Description | Default Value | Notes spark.rapids.sql.expression.Stack|`stack`|Separates expr1, ..., exprk into n rows.|true|None| spark.rapids.sql.expression.StartsWith| |Starts with|true|None| spark.rapids.sql.expression.StringInstr|`instr`|Instr string operator|true|None| -spark.rapids.sql.expression.StringLPad| |Pad a string on the left|true|None| +spark.rapids.sql.expression.StringLPad|`lpad`|Pad a string on the left|true|None| spark.rapids.sql.expression.StringLocate|`locate`, `position`|Substring search operator|true|None| -spark.rapids.sql.expression.StringRPad| |Pad a string on the right|true|None| +spark.rapids.sql.expression.StringRPad|`rpad`|Pad a string on the right|true|None| spark.rapids.sql.expression.StringRepeat|`repeat`|StringRepeat operator that repeats the given strings with numbers of times given by repeatTimes|true|None| spark.rapids.sql.expression.StringReplace|`replace`|StringReplace operator|true|None| spark.rapids.sql.expression.StringSplit|`split`|Splits `str` around occurrences that match `regex`|true|None| @@ -424,8 +417,7 @@ Name | SQL Function(s) | Description | Default Value | Notes spark.rapids.sql.expression.BitAndAgg|`bit_and`|Returns the bitwise AND of all non-null input values|true|None| spark.rapids.sql.expression.BitOrAgg|`bit_or`|Returns the bitwise OR of all non-null input values|true|None| spark.rapids.sql.expression.BitXorAgg|`bit_xor`|Returns the bitwise XOR of all non-null input values|true|None| -spark.rapids.sql.expression.BloomFilterAggregate| |Bloom filter build|true|None| -spark.rapids.sql.expression.CollectList|`array_agg`, `collect_list`|Collect a list of non-unique elements, not supported in reduction|true|None| +spark.rapids.sql.expression.CollectList|`collect_list`|Collect a list of non-unique elements, not supported in reduction|true|None| spark.rapids.sql.expression.CollectSet|`collect_set`|Collect a set of unique elements, not supported in reduction|true|None| spark.rapids.sql.expression.Count|`count`|Count aggregate operator|true|None| spark.rapids.sql.expression.First|`first_value`, `first`|first aggregate operator|true|None| @@ -444,7 +436,6 @@ Name | SQL Function(s) | Description | Default Value | Notes spark.rapids.sql.expression.VarianceSamp|`var_samp`, `variance`|Aggregation computing sample variance|true|None| spark.rapids.sql.expression.StaticInvoke| |StaticInvoke|true|The supported types are not deterministic since it's a dynamic expression| spark.rapids.sql.expression.NormalizeNaNAndZero| |Normalize NaN and zero|true|None| -spark.rapids.sql.expression.InSubqueryExec| |Evaluates to true if values are in a subquery's result set|true|None| spark.rapids.sql.expression.ScalarSubquery| |Subquery that will return only one row and one column|true|None| spark.rapids.sql.expression.HiveGenericUDF| |Hive Generic UDF, the UDF can choose to implement a RAPIDS accelerated interface to get better performance|true|None| spark.rapids.sql.expression.HiveSimpleUDF| |Hive UDF, the UDF can choose to implement a RAPIDS accelerated interface to get better performance|true|None| @@ -487,12 +478,11 @@ Name | Description | Default Value | Notes spark.rapids.sql.exec.CartesianProductExec|Implementation of join using brute force|true|None| spark.rapids.sql.exec.ShuffledHashJoinExec|Implementation of join using hashed shuffled data|true|None| spark.rapids.sql.exec.SortMergeJoinExec|Sort merge join, replacing with shuffled hash join|true|None| -spark.rapids.sql.exec.AggregateInPandasExec|The backend for an Aggregation Pandas UDF, this accelerates the data transfer between the Java process and the Python process. It also supports scheduling GPU resources for the Python process when enabled.|true|None| +spark.rapids.sql.exec.AggregateInPandasExec|The backend for an ideAggregateInPandasExec Aggregation Pandas UDF. This accelerates the data transfer between the Java process and the Python process. It also supports scheduling GPU resources for the Python process when enabled.|true|None| spark.rapids.sql.exec.ArrowEvalPythonExec|The backend of the Scalar Pandas UDFs. Accelerates the data transfer between the Java process and the Python process. It also supports scheduling GPU resources for the Python process when enabled|true|None| spark.rapids.sql.exec.FlatMapCoGroupsInPandasExec|The backend for CoGrouped Aggregation Pandas UDF. Accelerates the data transfer between the Java process and the Python process. It also supports scheduling GPU resources for the Python process when enabled.|false|This is disabled by default because Performance is not ideal with many small groups| spark.rapids.sql.exec.FlatMapGroupsInPandasExec|The backend for Flat Map Groups Pandas UDF, Accelerates the data transfer between the Java process and the Python process. It also supports scheduling GPU resources for the Python process when enabled.|true|None| spark.rapids.sql.exec.MapInPandasExec|The backend for Map Pandas Iterator UDF. Accelerates the data transfer between the Java process and the Python process. It also supports scheduling GPU resources for the Python process when enabled.|true|None| -spark.rapids.sql.exec.PythonMapInArrowExec|The backend for Map Arrow Iterator UDF. Accelerates the data transfer between the Java process and the Python process. It also supports scheduling GPU resources for the Python process when enabled.|true|None| spark.rapids.sql.exec.WindowInPandasExec|The backend for Window Aggregation Pandas UDF, Accelerates the data transfer between the Java process and the Python process. It also supports scheduling GPU resources for the Python process when enabled. For now it only supports row based window frame.|false|This is disabled by default because it only supports row based frame for now| spark.rapids.sql.exec.WindowExec|Window-operator backend|true|None| spark.rapids.sql.exec.HiveTableScanExec|Scan Exec to read Hive delimited text tables|true|None| diff --git a/docs/supported_ops.md b/docs/supported_ops.md index d3118010574..16deb3b8458 100644 --- a/docs/supported_ops.md +++ b/docs/supported_ops.md @@ -9,7 +9,7 @@ support all data types. The RAPIDS Accelerator for Apache Spark has further restrictions on what types are supported for processing. This tries to document what operations are supported and what data types each operation supports. Because Apache Spark is under active development too and this document was generated -against version 3.3.0 of Spark. Most of this should still +against version 3.2.1 of Spark. Most of this should still apply to other versions of Spark, but there may be slight changes. # General limitations @@ -150,12 +150,12 @@ Accelerator supports are described below. S S NS -PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT
-PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT
-PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT
+PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT, DAYTIME, YEARMONTH
+PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT, DAYTIME, YEARMONTH
+PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT, DAYTIME, YEARMONTH
+NS +NS NS -S -S CollectLimitExec @@ -228,12 +228,12 @@ Accelerator supports are described below. S S NS -PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT
-PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT
-PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT
+PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT, DAYTIME, YEARMONTH
+PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT, DAYTIME, YEARMONTH
+PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT, DAYTIME, YEARMONTH
+NS +NS NS -S -S FilterExec @@ -254,12 +254,12 @@ Accelerator supports are described below. S S NS -PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT
-PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT
-PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT
+PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT, DAYTIME, YEARMONTH
+PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT, DAYTIME, YEARMONTH
+PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT, DAYTIME, YEARMONTH
+NS +NS NS -S -S GenerateExec @@ -358,12 +358,12 @@ Accelerator supports are described below. S S NS -PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT
-PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT
-PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT
+PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT, DAYTIME, YEARMONTH
+PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT, DAYTIME, YEARMONTH
+PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT, DAYTIME, YEARMONTH
+NS +NS NS -S -S RangeExec @@ -410,12 +410,12 @@ Accelerator supports are described below. S NS NS -PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types BINARY, CALENDAR, UDT
-PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types BINARY, CALENDAR, UDT
-PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types BINARY, CALENDAR, UDT
+PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types BINARY, CALENDAR, UDT, DAYTIME, YEARMONTH
+PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types BINARY, CALENDAR, UDT, DAYTIME, YEARMONTH
+PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types BINARY, CALENDAR, UDT, DAYTIME, YEARMONTH
+NS +NS NS -S -S SortExec @@ -670,12 +670,12 @@ Accelerator supports are described below. NS NS NS -PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types NULL, BINARY, CALENDAR, UDT
-PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types NULL, BINARY, CALENDAR, UDT
-PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types NULL, BINARY, CALENDAR, UDT
+PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types NULL, BINARY, CALENDAR, UDT, DAYTIME, YEARMONTH
+PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types NULL, BINARY, CALENDAR, UDT, DAYTIME, YEARMONTH
+PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types NULL, BINARY, CALENDAR, UDT, DAYTIME, YEARMONTH
+NS +NS NS -S -S DataWritingCommandExec @@ -696,12 +696,12 @@ Accelerator supports are described below. NS S NS -PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types NULL, CALENDAR, UDT
-PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types NULL, CALENDAR, UDT
-PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types NULL, CALENDAR, UDT
+PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types NULL, CALENDAR, UDT, DAYTIME, YEARMONTH
+PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types NULL, CALENDAR, UDT, DAYTIME, YEARMONTH
+PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types NULL, CALENDAR, UDT, DAYTIME, YEARMONTH
+NS +NS NS -S -S ExecutedCommandExec @@ -748,12 +748,12 @@ Accelerator supports are described below. NS S NS -PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types NULL, CALENDAR, UDT
-PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types NULL, CALENDAR, UDT
-PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types NULL, CALENDAR, UDT
+PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types NULL, CALENDAR, UDT, DAYTIME, YEARMONTH
+PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types NULL, CALENDAR, UDT, DAYTIME, YEARMONTH
+PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types NULL, CALENDAR, UDT, DAYTIME, YEARMONTH
+NS +NS NS -S -S AtomicCreateTableAsSelectExec @@ -774,12 +774,12 @@ Accelerator supports are described below. NS S NS -PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types NULL, CALENDAR, UDT
-PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types NULL, CALENDAR, UDT
-PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types NULL, CALENDAR, UDT
+PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types NULL, CALENDAR, UDT, DAYTIME, YEARMONTH
+PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types NULL, CALENDAR, UDT, DAYTIME, YEARMONTH
+PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types NULL, CALENDAR, UDT, DAYTIME, YEARMONTH
+NS +NS NS -S -S AtomicReplaceTableAsSelectExec @@ -800,12 +800,12 @@ Accelerator supports are described below. NS S NS -PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types NULL, CALENDAR, UDT
-PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types NULL, CALENDAR, UDT
-PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types NULL, CALENDAR, UDT
+PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types NULL, CALENDAR, UDT, DAYTIME, YEARMONTH
+PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types NULL, CALENDAR, UDT, DAYTIME, YEARMONTH
+PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types NULL, CALENDAR, UDT, DAYTIME, YEARMONTH
+NS +NS NS -S -S BatchScanExec @@ -826,12 +826,12 @@ Accelerator supports are described below. NS S NS -PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types NULL, CALENDAR, UDT
-PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types NULL, CALENDAR, UDT
-PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types NULL, CALENDAR, UDT
+PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types NULL, CALENDAR, UDT, DAYTIME, YEARMONTH
+PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types NULL, CALENDAR, UDT, DAYTIME, YEARMONTH
+PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types NULL, CALENDAR, UDT, DAYTIME, YEARMONTH
+NS +NS NS -S -S OverwriteByExpressionExecV1 @@ -852,12 +852,12 @@ Accelerator supports are described below. NS S NS -PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types NULL, CALENDAR, UDT
-PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types NULL, CALENDAR, UDT
-PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types NULL, CALENDAR, UDT
+PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types NULL, CALENDAR, UDT, DAYTIME, YEARMONTH
+PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types NULL, CALENDAR, UDT, DAYTIME, YEARMONTH
+PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types NULL, CALENDAR, UDT, DAYTIME, YEARMONTH
+NS +NS NS -S -S BroadcastExchangeExec @@ -904,12 +904,12 @@ Accelerator supports are described below. S S NS -PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT
-PS
Round-robin partitioning is not supported if spark.sql.execution.sortBeforeRepartition is true;
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT
-PS
Round-robin partitioning is not supported for nested structs if spark.sql.execution.sortBeforeRepartition is true;
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT
+PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT, DAYTIME, YEARMONTH
+PS
Round-robin partitioning is not supported if spark.sql.execution.sortBeforeRepartition is true;
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT, DAYTIME, YEARMONTH
+PS
Round-robin partitioning is not supported for nested structs if spark.sql.execution.sortBeforeRepartition is true;
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT, DAYTIME, YEARMONTH
+NS +NS NS -S -S BroadcastHashJoinExec @@ -1299,7 +1299,7 @@ Accelerator supports are described below. AggregateInPandasExec -The backend for an Aggregation Pandas UDF, this accelerates the data transfer between the Java process and the Python process. It also supports scheduling GPU resources for the Python process when enabled. +The backend for an ideAggregateInPandasExec Aggregation Pandas UDF. This accelerates the data transfer between the Java process and the Python process. It also supports scheduling GPU resources for the Python process when enabled. None Input/Output S @@ -1454,32 +1454,6 @@ Accelerator supports are described below. NS -PythonMapInArrowExec -The backend for Map Arrow Iterator UDF. Accelerates the data transfer between the Java process and the Python process. It also supports scheduling GPU resources for the Python process when enabled. -None -Input/Output -S -S -S -S -S -S -S -S -PS
UTC is only supported TZ for TIMESTAMP
-S -NS -NS -NS -NS -PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types DECIMAL, NULL, BINARY, CALENDAR, MAP, UDT, DAYTIME, YEARMONTH
-NS -PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types DECIMAL, NULL, BINARY, CALENDAR, MAP, UDT, DAYTIME, YEARMONTH
-NS -NS -NS - - WindowInPandasExec The backend for Window Aggregation Pandas UDF, Accelerates the data transfer between the Java process and the Python process. It also supports scheduling GPU resources for the Python process when enabled. For now it only supports row based window frame. This is disabled by default because it only supports row based frame for now @@ -1670,8 +1644,8 @@ are limited. -S -S + + result @@ -1693,8 +1667,8 @@ are limited. -S -S + + AST @@ -1717,8 +1691,8 @@ are limited. -NS -NS + + result @@ -1740,8 +1714,8 @@ are limited. -NS -NS + + Acos @@ -1964,8 +1938,8 @@ are limited. -S -S +NS +NS rhs @@ -1987,8 +1961,8 @@ are limited. -S -S +NS +NS result @@ -2010,8 +1984,8 @@ are limited. -S -S +NS +NS AST @@ -2132,12 +2106,12 @@ are limited. S S NS -PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT
-PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT
-PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT
+PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT, DAYTIME, YEARMONTH
+PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT, DAYTIME, YEARMONTH
+PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT, DAYTIME, YEARMONTH
+NS +NS NS -S -S result @@ -2155,12 +2129,12 @@ are limited. S S NS -PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT
-PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT
-PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT
+PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT, DAYTIME, YEARMONTH
+PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT, DAYTIME, YEARMONTH
+PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT, DAYTIME, YEARMONTH
+NS +NS NS -S -S AST @@ -2183,8 +2157,8 @@ are limited. NS NS NS -S -S +NS +NS result @@ -2206,8 +2180,8 @@ are limited. NS NS NS -S -S +NS +NS And @@ -4095,12 +4069,12 @@ are limited. S S NS -PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT
-PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT
-PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT
+PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT, DAYTIME, YEARMONTH
+PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT, DAYTIME, YEARMONTH
+PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT, DAYTIME, YEARMONTH
+NS +NS NS -S -S AST @@ -4123,8 +4097,8 @@ are limited. NS NS NS -S -S +NS +NS BRound @@ -4940,114 +4914,12 @@ are limited. -BloomFilterMightContain - -Bloom filter query -None -project -lhs - - - - - - - - - - - -S -S - - - - - - - - - -rhs - - - - -S - - - - - - -S - - - - - - - - - - -result -S - - - - - - - - - - - - - - - - - - - - - -Expression -SQL Functions(s) -Description -Notes -Context -Param/Output -BOOLEAN -BYTE -SHORT -INT -LONG -FLOAT -DOUBLE -DATE -TIMESTAMP -STRING -DECIMAL -NULL -BINARY -CALENDAR -ARRAY -MAP -STRUCT -UDT -DAYTIME -YEARMONTH - - -BoundReference - -Reference to a bound variable -None -project -result +BoundReference + +Reference to a bound variable +None +project +result S S S @@ -5062,12 +4934,12 @@ are limited. S S NS -PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT
-PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT
-PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT
+PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT, DAYTIME, YEARMONTH
+PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT, DAYTIME, YEARMONTH
+PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT, DAYTIME, YEARMONTH
+NS +NS NS -S -S AST @@ -5090,8 +4962,8 @@ are limited. NS NS NS -S -S +NS +NS CaseWhen @@ -5168,6 +5040,34 @@ are limited. NS +Expression +SQL Functions(s) +Description +Notes +Context +Param/Output +BOOLEAN +BYTE +SHORT +INT +LONG +FLOAT +DOUBLE +DATE +TIMESTAMP +STRING +DECIMAL +NULL +BINARY +CALENDAR +ARRAY +MAP +STRUCT +UDT +DAYTIME +YEARMONTH + + Cbrt `cbrt` Cube root @@ -5267,7 +5167,7 @@ are limited. Ceil - +`ceil`, `ceiling` Ceiling of a number None project @@ -5388,12 +5288,12 @@ are limited. S S NS -PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT
-PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT
-PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT
+PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT, DAYTIME, YEARMONTH
+PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT, DAYTIME, YEARMONTH
+PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT, DAYTIME, YEARMONTH
+NS +NS NS -S -S result @@ -5411,40 +5311,12 @@ are limited. S S NS -PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT
-PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT
-PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT
+PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT, DAYTIME, YEARMONTH
+PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT, DAYTIME, YEARMONTH
+PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT, DAYTIME, YEARMONTH
+NS +NS NS -S -S - - -Expression -SQL Functions(s) -Description -Notes -Context -Param/Output -BOOLEAN -BYTE -SHORT -INT -LONG -FLOAT -DOUBLE -DATE -TIMESTAMP -STRING -DECIMAL -NULL -BINARY -CALENDAR -ARRAY -MAP -STRUCT -UDT -DAYTIME -YEARMONTH Concat @@ -5623,6 +5495,34 @@ are limited. +Expression +SQL Functions(s) +Description +Notes +Context +Param/Output +BOOLEAN +BYTE +SHORT +INT +LONG +FLOAT +DOUBLE +DATE +TIMESTAMP +STRING +DECIMAL +NULL +BINARY +CALENDAR +ARRAY +MAP +STRUCT +UDT +DAYTIME +YEARMONTH + + Conv `conv` Convert string representing a number from one base to another @@ -5818,34 +5718,6 @@ are limited. -Expression -SQL Functions(s) -Description -Notes -Context -Param/Output -BOOLEAN -BYTE -SHORT -INT -LONG -FLOAT -DOUBLE -DATE -TIMESTAMP -STRING -DECIMAL -NULL -BINARY -CALENDAR -ARRAY -MAP -STRUCT -UDT -DAYTIME -YEARMONTH - - Cosh `cosh` Hyperbolic cosine @@ -6042,6 +5914,34 @@ are limited. +Expression +SQL Functions(s) +Description +Notes +Context +Param/Output +BOOLEAN +BYTE +SHORT +INT +LONG +FLOAT +DOUBLE +DATE +TIMESTAMP +STRING +DECIMAL +NULL +BINARY +CALENDAR +ARRAY +MAP +STRUCT +UDT +DAYTIME +YEARMONTH + + CreateArray `array` Returns an array with the given elements @@ -6218,34 +6118,6 @@ are limited. -Expression -SQL Functions(s) -Description -Notes -Context -Param/Output -BOOLEAN -BYTE -SHORT -INT -LONG -FLOAT -DOUBLE -DATE -TIMESTAMP -STRING -DECIMAL -NULL -BINARY -CALENDAR -ARRAY -MAP -STRUCT -UDT -DAYTIME -YEARMONTH - - CurrentRow$ Special boundary for a window frame, indicating stopping at the current row @@ -6496,6 +6368,34 @@ are limited. +Expression +SQL Functions(s) +Description +Notes +Context +Param/Output +BOOLEAN +BYTE +SHORT +INT +LONG +FLOAT +DOUBLE +DATE +TIMESTAMP +STRING +DECIMAL +NULL +BINARY +CALENDAR +ARRAY +MAP +STRUCT +UDT +DAYTIME +YEARMONTH + + DateFormatClass `date_format` Converts timestamp to a value of string in the format specified by the date format @@ -6644,34 +6544,6 @@ are limited. -Expression -SQL Functions(s) -Description -Notes -Context -Param/Output -BOOLEAN -BYTE -SHORT -INT -LONG -FLOAT -DOUBLE -DATE -TIMESTAMP -STRING -DECIMAL -NULL -BINARY -CALENDAR -ARRAY -MAP -STRUCT -UDT -DAYTIME -YEARMONTH - - DayOfMonth `day`, `dayofmonth` Returns the day of the month from a date or timestamp @@ -6950,154 +6822,6 @@ are limited. -DivideDTInterval - -Day-time interval * operator -None -project -lhs - - - - - - - - - - - - - - - - - - -S - - - -rhs - -S -S -S -S -S -S - - - -NS - - - - - - - - - - - -result - - - - - - - - - - - - - - - - - - -S - - - -DivideYMInterval - -Year-month interval * operator -None -project -lhs - - - - - - - - - - - - - - - - - - - -S - - -rhs - -S -S -S -S -S -S - - - -NS - - - - - - - - - - - -result - - - - - - - - - - - - - - - - - - - -S - - Expression SQL Functions(s) Description @@ -7870,7 +7594,7 @@ are limited. Floor - +`floor` Floor of a number None project @@ -9182,12 +8906,12 @@ are limited. S S NS -PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT
-PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT
-PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT
+PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT, DAYTIME, YEARMONTH
+PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT, DAYTIME, YEARMONTH
+PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT, DAYTIME, YEARMONTH
+NS +NS NS -S -S falseValue @@ -9205,12 +8929,12 @@ are limited. S S NS -PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT
-PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT
-PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT
+PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT, DAYTIME, YEARMONTH
+PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT, DAYTIME, YEARMONTH
+PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT, DAYTIME, YEARMONTH
+NS +NS NS -S -S result @@ -9228,12 +8952,12 @@ are limited. S S NS -PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT
-PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT
-PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT
+PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT, DAYTIME, YEARMONTH
+PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT, DAYTIME, YEARMONTH
+PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT, DAYTIME, YEARMONTH
+NS +NS NS -S -S In @@ -9669,11 +9393,11 @@ are limited. S S NS -PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT, YEARMONTH
-PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT, YEARMONTH
-PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT, YEARMONTH
+PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT, DAYTIME, YEARMONTH
+PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT, DAYTIME, YEARMONTH
+PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT, DAYTIME, YEARMONTH
+NS NS -S NS @@ -9795,11 +9519,11 @@ are limited. S S NS -PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT, YEARMONTH
-PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT, YEARMONTH
-PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT, YEARMONTH
+PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT, DAYTIME, YEARMONTH
+PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT, DAYTIME, YEARMONTH
+PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT, DAYTIME, YEARMONTH
+NS NS -S NS @@ -12507,154 +12231,6 @@ are limited. -MultiplyDTInterval - -Day-time interval * number -None -project -lhs - - - - - - - - - - - - - - - - - - -S - - - -rhs - -S -S -S -S -S -S - - - -NS - - - - - - - - - - - -result - - - - - - - - - - - - - - - - - - -S - - - -MultiplyYMInterval - -Year-month interval * number -None -project -lhs - - - - - - - - - - - - - - - - - - - -S - - -rhs - -S -S -S -S -S -S - - - -NS - - - - - - - - - - - -result - - - - - - - - - - - - - - - - - - - -S - - Murmur3Hash `hash` Murmur3 hash operator @@ -12780,34 +12356,6 @@ are limited. -Expression -SQL Functions(s) -Description -Notes -Context -Param/Output -BOOLEAN -BYTE -SHORT -INT -LONG -FLOAT -DOUBLE -DATE -TIMESTAMP -STRING -DECIMAL -NULL -BINARY -CALENDAR -ARRAY -MAP -STRUCT -UDT -DAYTIME -YEARMONTH - - NamedLambdaVariable A parameter to a higher order SQL function @@ -12934,6 +12482,34 @@ are limited. +Expression +SQL Functions(s) +Description +Notes +Context +Param/Output +BOOLEAN +BYTE +SHORT +INT +LONG +FLOAT +DOUBLE +DATE +TIMESTAMP +STRING +DECIMAL +NULL +BINARY +CALENDAR +ARRAY +MAP +STRUCT +UDT +DAYTIME +YEARMONTH + + NthValue `nth_value` nth window operator @@ -13203,34 +12779,6 @@ are limited. -Expression -SQL Functions(s) -Description -Notes -Context -Param/Output -BOOLEAN -BYTE -SHORT -INT -LONG -FLOAT -DOUBLE -DATE -TIMESTAMP -STRING -DECIMAL -NULL -BINARY -CALENDAR -ARRAY -MAP -STRUCT -UDT -DAYTIME -YEARMONTH - - ParseUrl `parse_url` Extracts a part from a URL @@ -13328,6 +12876,34 @@ are limited. +Expression +SQL Functions(s) +Description +Notes +Context +Param/Output +BOOLEAN +BYTE +SHORT +INT +LONG +FLOAT +DOUBLE +DATE +TIMESTAMP +STRING +DECIMAL +NULL +BINARY +CALENDAR +ARRAY +MAP +STRUCT +UDT +DAYTIME +YEARMONTH + + PercentRank `percent_rank` Window function that returns the percent rank value within the aggregation window @@ -13648,34 +13224,6 @@ are limited. -Expression -SQL Functions(s) -Description -Notes -Context -Param/Output -BOOLEAN -BYTE -SHORT -INT -LONG -FLOAT -DOUBLE -DATE -TIMESTAMP -STRING -DECIMAL -NULL -BINARY -CALENDAR -ARRAY -MAP -STRUCT -UDT -DAYTIME -YEARMONTH - - PreciseTimestampConversion Expression used internally to convert the TimestampType to Long and back without losing precision, i.e. in microseconds. Used in time windowing @@ -13727,6 +13275,34 @@ are limited. +Expression +SQL Functions(s) +Description +Notes +Context +Param/Output +BOOLEAN +BYTE +SHORT +INT +LONG +FLOAT +DOUBLE +DATE +TIMESTAMP +STRING +DECIMAL +NULL +BINARY +CALENDAR +ARRAY +MAP +STRUCT +UDT +DAYTIME +YEARMONTH + + PromotePrecision PromotePrecision before arithmetic operations between DecimalType data @@ -14966,35 +14542,35 @@ are limited. -RoundCeil - -Computes the ceiling of the given expression to d decimal places -None -project -value - +RowNumber +`row_number` +Window function that returns the index for the row within the aggregation window +None +window +ordering S S S S -PS
result may round slightly differently
-PS
result may round slightly differently
- - - S - - - - - - - - - +S +S +S +PS
UTC is only supported TZ for TIMESTAMP
+S +S +S +NS +NS +NS +NS +NS +NS +NS +NS -scale +result @@ -15017,29 +14593,6 @@ are limited. -result - -S -S -S -S -S -S - - - -S - - - - - - - - - - - Expression SQL Functions(s) Description @@ -15068,131 +14621,6 @@ are limited. YEARMONTH -RoundFloor - -Computes the floor of the given expression to d decimal places -None -project -value - -S -S -S -S -PS
result may round slightly differently
-PS
result may round slightly differently
- - - -S - - - - - - - - - - - -scale - - - -S - - - - - - - - - - - - - - - - - - -result - -S -S -S -S -S -S - - - -S - - - - - - - - - - - -RowNumber -`row_number` -Window function that returns the index for the row within the aggregation window -None -window -ordering -S -S -S -S -S -S -S -S -PS
UTC is only supported TZ for TIMESTAMP
-S -S -S -NS -NS -NS -NS -NS -NS -NS -NS - - -result - - - -S - - - - - - - - - - - - - - - - - - ScalaUDF User Defined Function, the UDF can choose to implement a RAPIDS accelerated interface to get better performance. @@ -15443,34 +14871,6 @@ are limited. -Expression -SQL Functions(s) -Description -Notes -Context -Param/Output -BOOLEAN -BYTE -SHORT -INT -LONG -FLOAT -DOUBLE -DATE -TIMESTAMP -STRING -DECIMAL -NULL -BINARY -CALENDAR -ARRAY -MAP -STRUCT -UDT -DAYTIME -YEARMONTH - - Sha1 `sha1`, `sha` Sha1 hash operator @@ -15596,6 +14996,34 @@ are limited. +Expression +SQL Functions(s) +Description +Notes +Context +Param/Output +BOOLEAN +BYTE +SHORT +INT +LONG +FLOAT +DOUBLE +DATE +TIMESTAMP +STRING +DECIMAL +NULL +BINARY +CALENDAR +ARRAY +MAP +STRUCT +UDT +DAYTIME +YEARMONTH + + ShiftRight `shiftright` Bitwise shift right (>>) @@ -15875,50 +15303,22 @@ are limited. - - -S - - - - - - - - - - - - - - - -Expression -SQL Functions(s) -Description -Notes -Context -Param/Output -BOOLEAN -BYTE -SHORT -INT -LONG -FLOAT -DOUBLE -DATE -TIMESTAMP -STRING -DECIMAL -NULL -BINARY -CALENDAR -ARRAY -MAP -STRUCT -UDT -DAYTIME -YEARMONTH + + +S + + + + + + + + + + + + + Sinh @@ -16019,6 +15419,34 @@ are limited. +Expression +SQL Functions(s) +Description +Notes +Context +Param/Output +BOOLEAN +BYTE +SHORT +INT +LONG +FLOAT +DOUBLE +DATE +TIMESTAMP +STRING +DECIMAL +NULL +BINARY +CALENDAR +ARRAY +MAP +STRUCT +UDT +DAYTIME +YEARMONTH + + Size `cardinality`, `size` The size of an array or a map @@ -16292,34 +15720,6 @@ are limited. -Expression -SQL Functions(s) -Description -Notes -Context -Param/Output -BOOLEAN -BYTE -SHORT -INT -LONG -FLOAT -DOUBLE -DATE -TIMESTAMP -STRING -DECIMAL -NULL -BINARY -CALENDAR -ARRAY -MAP -STRUCT -UDT -DAYTIME -YEARMONTH - - SparkPartitionID `spark_partition_id` Returns the current partition id @@ -16422,6 +15822,34 @@ are limited. NS +Expression +SQL Functions(s) +Description +Notes +Context +Param/Output +BOOLEAN +BYTE +SHORT +INT +LONG +FLOAT +DOUBLE +DATE +TIMESTAMP +STRING +DECIMAL +NULL +BINARY +CALENDAR +ARRAY +MAP +STRUCT +UDT +DAYTIME +YEARMONTH + + Sqrt `sqrt` Square root @@ -16742,36 +16170,8 @@ are limited. -Expression -SQL Functions(s) -Description -Notes -Context -Param/Output -BOOLEAN -BYTE -SHORT -INT -LONG -FLOAT -DOUBLE -DATE -TIMESTAMP -STRING -DECIMAL -NULL -BINARY -CALENDAR -ARRAY -MAP -STRUCT -UDT -DAYTIME -YEARMONTH - - StringLPad - +`lpad` Pad a string on the left None project @@ -16867,6 +16267,34 @@ are limited. +Expression +SQL Functions(s) +Description +Notes +Context +Param/Output +BOOLEAN +BYTE +SHORT +INT +LONG +FLOAT +DOUBLE +DATE +TIMESTAMP +STRING +DECIMAL +NULL +BINARY +CALENDAR +ARRAY +MAP +STRUCT +UDT +DAYTIME +YEARMONTH + + StringLocate `locate`, `position` Substring search operator @@ -16965,7 +16393,7 @@ are limited. StringRPad - +`rpad` Pad a string on the right None project @@ -17135,34 +16563,6 @@ are limited. -Expression -SQL Functions(s) -Description -Notes -Context -Param/Output -BOOLEAN -BYTE -SHORT -INT -LONG -FLOAT -DOUBLE -DATE -TIMESTAMP -STRING -DECIMAL -NULL -BINARY -CALENDAR -ARRAY -MAP -STRUCT -UDT -DAYTIME -YEARMONTH - - StringReplace `replace` StringReplace operator @@ -17260,6 +16660,34 @@ are limited. +Expression +SQL Functions(s) +Description +Notes +Context +Param/Output +BOOLEAN +BYTE +SHORT +INT +LONG +FLOAT +DOUBLE +DATE +TIMESTAMP +STRING +DECIMAL +NULL +BINARY +CALENDAR +ARRAY +MAP +STRUCT +UDT +DAYTIME +YEARMONTH + + StringSplit `split` Splits `str` around occurrences that match `regex` @@ -17551,34 +16979,6 @@ are limited. -Expression -SQL Functions(s) -Description -Notes -Context -Param/Output -BOOLEAN -BYTE -SHORT -INT -LONG -FLOAT -DOUBLE -DATE -TIMESTAMP -STRING -DECIMAL -NULL -BINARY -CALENDAR -ARRAY -MAP -STRUCT -UDT -DAYTIME -YEARMONTH - - StringTrim `trim` StringTrim operator @@ -17653,6 +17053,34 @@ are limited. +Expression +SQL Functions(s) +Description +Notes +Context +Param/Output +BOOLEAN +BYTE +SHORT +INT +LONG +FLOAT +DOUBLE +DATE +TIMESTAMP +STRING +DECIMAL +NULL +BINARY +CALENDAR +ARRAY +MAP +STRUCT +UDT +DAYTIME +YEARMONTH + + StringTrimLeft `ltrim` StringTrimLeft operator @@ -17949,34 +17377,6 @@ are limited. -Expression -SQL Functions(s) -Description -Notes -Context -Param/Output -BOOLEAN -BYTE -SHORT -INT -LONG -FLOAT -DOUBLE -DATE -TIMESTAMP -STRING -DECIMAL -NULL -BINARY -CALENDAR -ARRAY -MAP -STRUCT -UDT -DAYTIME -YEARMONTH - - SubstringIndex `substring_index` substring_index operator @@ -18074,6 +17474,34 @@ are limited. +Expression +SQL Functions(s) +Description +Notes +Context +Param/Output +BOOLEAN +BYTE +SHORT +INT +LONG +FLOAT +DOUBLE +DATE +TIMESTAMP +STRING +DECIMAL +NULL +BINARY +CALENDAR +ARRAY +MAP +STRUCT +UDT +DAYTIME +YEARMONTH + + Subtract `-` Subtraction @@ -18098,8 +17526,8 @@ are limited. -S -S +NS +NS rhs @@ -18121,8 +17549,8 @@ are limited. -S -S +NS +NS result @@ -18144,8 +17572,8 @@ are limited. -S -S +NS +NS AST @@ -18414,34 +17842,6 @@ are limited. -Expression -SQL Functions(s) -Description -Notes -Context -Param/Output -BOOLEAN -BYTE -SHORT -INT -LONG -FLOAT -DOUBLE -DATE -TIMESTAMP -STRING -DECIMAL -NULL -BINARY -CALENDAR -ARRAY -MAP -STRUCT -UDT -DAYTIME -YEARMONTH - - TimeAdd Adds interval to timestamp @@ -18489,7 +17889,7 @@ are limited. -S + @@ -18516,6 +17916,34 @@ are limited. +Expression +SQL Functions(s) +Description +Notes +Context +Param/Output +BOOLEAN +BYTE +SHORT +INT +LONG +FLOAT +DOUBLE +DATE +TIMESTAMP +STRING +DECIMAL +NULL +BINARY +CALENDAR +ARRAY +MAP +STRUCT +UDT +DAYTIME +YEARMONTH + + ToDegrees `degrees` Converts radians to degrees @@ -18840,34 +18268,6 @@ are limited. -Expression -SQL Functions(s) -Description -Notes -Context -Param/Output -BOOLEAN -BYTE -SHORT -INT -LONG -FLOAT -DOUBLE -DATE -TIMESTAMP -STRING -DECIMAL -NULL -BINARY -CALENDAR -ARRAY -MAP -STRUCT -UDT -DAYTIME -YEARMONTH - - TransformValues `transform_values` Transform values in a map using a transform function @@ -18942,6 +18342,34 @@ are limited. +Expression +SQL Functions(s) +Description +Notes +Context +Param/Output +BOOLEAN +BYTE +SHORT +INT +LONG +FLOAT +DOUBLE +DATE +TIMESTAMP +STRING +DECIMAL +NULL +BINARY +CALENDAR +ARRAY +MAP +STRUCT +UDT +DAYTIME +YEARMONTH + + TruncDate `trunc` Truncate the date to the unit specified by the given string format @@ -19114,8 +18542,8 @@ are limited. -S -S +NS +NS result @@ -19137,8 +18565,8 @@ are limited. -S -S +NS +NS AST @@ -19212,8 +18640,8 @@ are limited. -S -S +NS +NS result @@ -19235,8 +18663,8 @@ are limited. -S -S +NS +NS AST @@ -19259,8 +18687,8 @@ are limited. -S -S +NS +NS result @@ -19282,36 +18710,8 @@ are limited. -S -S - - -Expression -SQL Functions(s) -Description -Notes -Context -Param/Output -BOOLEAN -BYTE -SHORT -INT -LONG -FLOAT -DOUBLE -DATE -TIMESTAMP -STRING -DECIMAL -NULL -BINARY -CALENDAR -ARRAY -MAP -STRUCT -UDT -DAYTIME -YEARMONTH +NS +NS UnboundedFollowing$ @@ -19342,6 +18742,34 @@ are limited. +Expression +SQL Functions(s) +Description +Notes +Context +Param/Output +BOOLEAN +BYTE +SHORT +INT +LONG +FLOAT +DOUBLE +DATE +TIMESTAMP +STRING +DECIMAL +NULL +BINARY +CALENDAR +ARRAY +MAP +STRUCT +UDT +DAYTIME +YEARMONTH + + UnboundedPreceding$ Special boundary for a window frame, indicating all rows preceding the current row @@ -19699,34 +19127,6 @@ are limited. S -Expression -SQL Functions(s) -Description -Notes -Context -Param/Output -BOOLEAN -BYTE -SHORT -INT -LONG -FLOAT -DOUBLE -DATE -TIMESTAMP -STRING -DECIMAL -NULL -BINARY -CALENDAR -ARRAY -MAP -STRUCT -UDT -DAYTIME -YEARMONTH - - WindowSpecDefinition Specification of a window function, indicating the partitioning-expression, the row ordering, and the width of the window @@ -19801,6 +19201,34 @@ are limited. NS +Expression +SQL Functions(s) +Description +Notes +Context +Param/Output +BOOLEAN +BYTE +SHORT +INT +LONG +FLOAT +DOUBLE +DATE +TIMESTAMP +STRING +DECIMAL +NULL +BINARY +CALENDAR +ARRAY +MAP +STRUCT +UDT +DAYTIME +YEARMONTH + + XxHash64 `xxhash64` xxhash64 hash operator @@ -20117,34 +19545,6 @@ are limited. S -Expression -SQL Functions(s) -Description -Notes -Context -Param/Output -BOOLEAN -BYTE -SHORT -INT -LONG -FLOAT -DOUBLE -DATE -TIMESTAMP -STRING -DECIMAL -NULL -BINARY -CALENDAR -ARRAY -MAP -STRUCT -UDT -DAYTIME -YEARMONTH - - ApproximatePercentile `approx_percentile`, `percentile_approx` Approximate percentile @@ -20335,6 +19735,34 @@ are limited. +Expression +SQL Functions(s) +Description +Notes +Context +Param/Output +BOOLEAN +BYTE +SHORT +INT +LONG +FLOAT +DOUBLE +DATE +TIMESTAMP +STRING +DECIMAL +NULL +BINARY +CALENDAR +ARRAY +MAP +STRUCT +UDT +DAYTIME +YEARMONTH + + Average `avg`, `mean` Average aggregate operator @@ -20578,34 +20006,6 @@ are limited. -Expression -SQL Functions(s) -Description -Notes -Context -Param/Output -BOOLEAN -BYTE -SHORT -INT -LONG -FLOAT -DOUBLE -DATE -TIMESTAMP -STRING -DECIMAL -NULL -BINARY -CALENDAR -ARRAY -MAP -STRUCT -UDT -DAYTIME -YEARMONTH - - BitOrAgg `bit_or` Returns the bitwise OR of all non-null input values @@ -20736,106 +20136,8 @@ are limited. S S -S -S - - - - - - - - - - - - - - - - - -reduction -input - -S -S -S -S - - - - - - - - - - - - - - - - - -result - -S -S -S -S - - - - - - - - - - - - - - - - - -BloomFilterAggregate - -Bloom filter build -None -reduction -child - - - - -S - - - - - - - - - - - - - - - - - -estimatedItems - - - - -S +S +S @@ -20853,12 +20155,13 @@ are limited. -numBits - - - +reduction +input S +S +S +S @@ -20878,6 +20181,10 @@ are limited. result +S +S +S +S @@ -20889,18 +20196,42 @@ are limited. -S - - - +Expression +SQL Functions(s) +Description +Notes +Context +Param/Output +BOOLEAN +BYTE +SHORT +INT +LONG +FLOAT +DOUBLE +DATE +TIMESTAMP +STRING +DECIMAL +NULL +BINARY +CALENDAR +ARRAY +MAP +STRUCT +UDT +DAYTIME +YEARMONTH + + CollectList -`array_agg`, `collect_list` +`collect_list` Collect a list of non-unique elements, not supported in reduction None aggregation @@ -21044,34 +20375,6 @@ are limited. -Expression -SQL Functions(s) -Description -Notes -Context -Param/Output -BOOLEAN -BYTE -SHORT -INT -LONG -FLOAT -DOUBLE -DATE -TIMESTAMP -STRING -DECIMAL -NULL -BINARY -CALENDAR -ARRAY -MAP -STRUCT -UDT -DAYTIME -YEARMONTH - - CollectSet `collect_set` Collect a set of unique elements, not supported in reduction @@ -21362,6 +20665,34 @@ are limited. +Expression +SQL Functions(s) +Description +Notes +Context +Param/Output +BOOLEAN +BYTE +SHORT +INT +LONG +FLOAT +DOUBLE +DATE +TIMESTAMP +STRING +DECIMAL +NULL +BINARY +CALENDAR +ARRAY +MAP +STRUCT +UDT +DAYTIME +YEARMONTH + + First `first_value`, `first` first aggregate operator @@ -21507,34 +20838,6 @@ are limited. NS -Expression -SQL Functions(s) -Description -Notes -Context -Param/Output -BOOLEAN -BYTE -SHORT -INT -LONG -FLOAT -DOUBLE -DATE -TIMESTAMP -STRING -DECIMAL -NULL -BINARY -CALENDAR -ARRAY -MAP -STRUCT -UDT -DAYTIME -YEARMONTH - - HyperLogLogPlusPlus `approx_count_distinct` Aggregation approximate count distinct @@ -21778,6 +21081,34 @@ are limited. NS +Expression +SQL Functions(s) +Description +Notes +Context +Param/Output +BOOLEAN +BYTE +SHORT +INT +LONG +FLOAT +DOUBLE +DATE +TIMESTAMP +STRING +DECIMAL +NULL +BINARY +CALENDAR +ARRAY +MAP +STRUCT +UDT +DAYTIME +YEARMONTH + + Max `max` Max aggregate operator @@ -21923,34 +21254,6 @@ are limited. -Expression -SQL Functions(s) -Description -Notes -Context -Param/Output -BOOLEAN -BYTE -SHORT -INT -LONG -FLOAT -DOUBLE -DATE -TIMESTAMP -STRING -DECIMAL -NULL -BINARY -CALENDAR -ARRAY -MAP -STRUCT -UDT -DAYTIME -YEARMONTH - - MaxBy `max_by` MaxBy aggregate operator. It may produce different results than CPU when multiple rows in a group have same minimum value in the ordering column and different associated values in the value column. @@ -22240,6 +21543,34 @@ are limited. +Expression +SQL Functions(s) +Description +Notes +Context +Param/Output +BOOLEAN +BYTE +SHORT +INT +LONG +FLOAT +DOUBLE +DATE +TIMESTAMP +STRING +DECIMAL +NULL +BINARY +CALENDAR +ARRAY +MAP +STRUCT +UDT +DAYTIME +YEARMONTH + + MinBy `min_by` MinBy aggregate operator. It may produce different results than CPU when multiple rows in a group have same minimum value in the ordering column and different associated values in the value column. @@ -22384,34 +21715,6 @@ are limited. NS -Expression -SQL Functions(s) -Description -Notes -Context -Param/Output -BOOLEAN -BYTE -SHORT -INT -LONG -FLOAT -DOUBLE -DATE -TIMESTAMP -STRING -DECIMAL -NULL -BINARY -CALENDAR -ARRAY -MAP -STRUCT -UDT -DAYTIME -YEARMONTH - - Percentile `percentile` Aggregation computing exact percentile @@ -22746,6 +22049,34 @@ are limited. NS +Expression +SQL Functions(s) +Description +Notes +Context +Param/Output +BOOLEAN +BYTE +SHORT +INT +LONG +FLOAT +DOUBLE +DATE +TIMESTAMP +STRING +DECIMAL +NULL +BINARY +CALENDAR +ARRAY +MAP +STRUCT +UDT +DAYTIME +YEARMONTH + + StddevPop `stddev_pop` Aggregation computing population standard deviation @@ -22891,34 +22222,6 @@ are limited. -Expression -SQL Functions(s) -Description -Notes -Context -Param/Output -BOOLEAN -BYTE -SHORT -INT -LONG -FLOAT -DOUBLE -DATE -TIMESTAMP -STRING -DECIMAL -NULL -BINARY -CALENDAR -ARRAY -MAP -STRUCT -UDT -DAYTIME -YEARMONTH - - StddevSamp `std`, `stddev_samp`, `stddev` Aggregation computing sample standard deviation @@ -23209,6 +22512,34 @@ are limited. +Expression +SQL Functions(s) +Description +Notes +Context +Param/Output +BOOLEAN +BYTE +SHORT +INT +LONG +FLOAT +DOUBLE +DATE +TIMESTAMP +STRING +DECIMAL +NULL +BINARY +CALENDAR +ARRAY +MAP +STRUCT +UDT +DAYTIME +YEARMONTH + + VariancePop `var_pop` Aggregation computing population variance @@ -23354,34 +22685,6 @@ are limited. -Expression -SQL Functions(s) -Description -Notes -Context -Param/Output -BOOLEAN -BYTE -SHORT -INT -LONG -FLOAT -DOUBLE -DATE -TIMESTAMP -STRING -DECIMAL -NULL -BINARY -CALENDAR -ARRAY -MAP -STRUCT -UDT -DAYTIME -YEARMONTH - - VarianceSamp `var_samp`, `variance` Aggregation computing sample variance @@ -23606,55 +22909,32 @@ are limited. -InSubqueryExec - -Evaluates to true if values are in a subquery's result set -None -project -input -S -S -S -S -S -S -S -S -PS
UTC is only supported TZ for TIMESTAMP
-S -S -S -NS -NS -NS - -NS -NS - - - - -result -S - - - - - - - - - - - - - - - - - - - +Expression +SQL Functions(s) +Description +Notes +Context +Param/Output +BOOLEAN +BYTE +SHORT +INT +LONG +FLOAT +DOUBLE +DATE +TIMESTAMP +STRING +DECIMAL +NULL +BINARY +CALENDAR +ARRAY +MAP +STRUCT +UDT +DAYTIME +YEARMONTH ScalarSubquery @@ -24055,7 +23335,7 @@ and the accelerator produces the same result. -NS + @@ -24245,33 +23525,33 @@ and the accelerator produces the same result. DAYTIME -S -S -S -S -S +NS -S + + + + +NS YEARMONTH -S -S -S -S + + + + @@ -24359,8 +23639,8 @@ and the accelerator produces the same result. -S -S + + SHORT @@ -24382,8 +23662,8 @@ and the accelerator produces the same result. -S -S + + INT @@ -24405,8 +23685,8 @@ and the accelerator produces the same result. -S -S + + LONG @@ -24428,8 +23708,8 @@ and the accelerator produces the same result. -S -S + + FLOAT @@ -24543,7 +23823,7 @@ and the accelerator produces the same result. -S + @@ -24654,7 +23934,7 @@ and the accelerator produces the same result. -PS
The array's child type must also support being cast to the desired child type(s);
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT
+PS
The array's child type must also support being cast to the desired child type(s);
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT, DAYTIME, YEARMONTH
@@ -24678,7 +23958,7 @@ and the accelerator produces the same result. -PS
the map's key and value must also support being cast to the desired child types;
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT
+PS
the map's key and value must also support being cast to the desired child types;
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT, DAYTIME, YEARMONTH
@@ -24702,7 +23982,7 @@ and the accelerator produces the same result. -PS
the struct's children must also support being cast to the desired child type(s);
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT
+PS
the struct's children must also support being cast to the desired child type(s);
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT, DAYTIME, YEARMONTH
@@ -24733,33 +24013,33 @@ and the accelerator produces the same result. DAYTIME -S -S -S -S -S +NS + -S + + + +NS YEARMONTH -S -S -S -S + + + + @@ -25065,8 +24345,8 @@ dates or timestamps, or for a lack of type coercion support. PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types UDT
PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types UDT
NS -S -S + + Write @@ -25088,8 +24368,8 @@ dates or timestamps, or for a lack of type coercion support. PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types UDT
PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types UDT
NS -S -S + + HiveText @@ -25159,8 +24439,8 @@ dates or timestamps, or for a lack of type coercion support. PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types UDT
PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types UDT
NS -S -S + + Write @@ -25182,8 +24462,8 @@ dates or timestamps, or for a lack of type coercion support. NS NS NS -S -S + + JSON @@ -25300,8 +24580,8 @@ dates or timestamps, or for a lack of type coercion support. PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types UDT
PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types UDT
NS -S -S + + Write @@ -25323,8 +24603,8 @@ dates or timestamps, or for a lack of type coercion support. PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types UDT
PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types UDT
NS -S -S + + diff --git a/sql-plugin/src/main/scala/org/apache/spark/sql/rapids/execution/GpuBroadcastExchangeExec.scala b/sql-plugin/src/main/scala/org/apache/spark/sql/rapids/execution/GpuBroadcastExchangeExec.scala index f3c03253c93..d054c7570e9 100644 --- a/sql-plugin/src/main/scala/org/apache/spark/sql/rapids/execution/GpuBroadcastExchangeExec.scala +++ b/sql-plugin/src/main/scala/org/apache/spark/sql/rapids/execution/GpuBroadcastExchangeExec.scala @@ -33,7 +33,7 @@ import com.nvidia.spark.rapids.GpuMetric._ import com.nvidia.spark.rapids.RapidsPluginImplicits._ import com.nvidia.spark.rapids.lore.{GpuLoreDumpRDD, SimpleRDD} import com.nvidia.spark.rapids.lore.GpuLore.LORE_DUMP_RDD_TAG -import com.nvidia.spark.rapids.shims.{BroadcastExchangeShims, ShimBroadcastExchangeLike, ShimUnaryExecNode, SparkShimImpl} +import com.nvidia.spark.rapids.shims.{ShimBroadcastExchangeLike, ShimUnaryExecNode, SparkShimImpl} import com.nvidia.spark.rapids.shims.BroadcastExchangeShims.MAX_BROADCAST_TABLE_BYTES import org.apache.spark.SparkException diff --git a/sql-plugin/src/main/scala/org/apache/spark/sql/rapids/execution/python/GpuWindowInPandasExecBase.scala b/sql-plugin/src/main/scala/org/apache/spark/sql/rapids/execution/python/GpuWindowInPandasExecBase.scala index ed025d31d12..6632fcacc7a 100644 --- a/sql-plugin/src/main/scala/org/apache/spark/sql/rapids/execution/python/GpuWindowInPandasExecBase.scala +++ b/sql-plugin/src/main/scala/org/apache/spark/sql/rapids/execution/python/GpuWindowInPandasExecBase.scala @@ -34,7 +34,6 @@ import org.apache.spark.api.python.{ChainedPythonFunctions, PythonEvalType} import org.apache.spark.rdd.RDD import org.apache.spark.sql.catalyst.expressions._ import org.apache.spark.sql.catalyst.plans.physical.{AllTuples, ClusteredDistribution, Distribution, Partitioning} -import org.apache.spark.sql.execution.python._ import org.apache.spark.sql.rapids.aggregate.GpuAggregateExpression import org.apache.spark.sql.rapids.execution.python.shims.{GpuArrowPythonRunner, PythonArgumentUtils} import org.apache.spark.sql.rapids.execution.python.shims.WindowInPandasExecTypeShim.WindowInPandasExecType diff --git a/sql-plugin/src/main/spark320/scala/com/nvidia/spark/rapids/shims/AggregateInPandasExecShims.scala b/sql-plugin/src/main/spark320/scala/com/nvidia/spark/rapids/shims/AggregateInPandasExecShims.scala index 7581b29b560..19daa5a3b3b 100644 --- a/sql-plugin/src/main/spark320/scala/com/nvidia/spark/rapids/shims/AggregateInPandasExecShims.scala +++ b/sql-plugin/src/main/spark320/scala/com/nvidia/spark/rapids/shims/AggregateInPandasExecShims.scala @@ -50,9 +50,9 @@ spark-rapids-shim-json-lines ***/ package com.nvidia.spark.rapids.shims -import com.nvidia.spark.rapids.{ExecChecks, ExecRule, GpuExec, GpuOverrides, TypeSig} +import com.nvidia.spark.rapids.{ExecChecks, ExecRule, GpuOverrides, TypeSig} -import org.apache.spark.sql.catalyst.expressions.{Expression, NamedExpression} +import org.apache.spark.sql.catalyst.expressions.NamedExpression import org.apache.spark.sql.execution.SparkPlan import org.apache.spark.sql.execution.python.AggregateInPandasExec diff --git a/sql-plugin/src/main/spark320/scala/com/nvidia/spark/rapids/shims/Spark320PlusShims.scala b/sql-plugin/src/main/spark320/scala/com/nvidia/spark/rapids/shims/Spark320PlusShims.scala index ef327b7f5e7..7cc8b83f4a4 100644 --- a/sql-plugin/src/main/spark320/scala/com/nvidia/spark/rapids/shims/Spark320PlusShims.scala +++ b/sql-plugin/src/main/spark320/scala/com/nvidia/spark/rapids/shims/Spark320PlusShims.scala @@ -71,18 +71,14 @@ import org.apache.spark.sql.execution.datasources.v2.orc.OrcScan import org.apache.spark.sql.execution.datasources.v2.parquet.ParquetScan import org.apache.spark.sql.execution.exchange.BroadcastExchangeExec import org.apache.spark.sql.execution.joins._ -import org.apache.spark.sql.execution.python._ import org.apache.spark.sql.execution.window.WindowExecBase import org.apache.spark.sql.internal.SQLConf import org.apache.spark.sql.rapids._ import org.apache.spark.sql.rapids.aggregate._ import org.apache.spark.sql.rapids.execution._ -import org.apache.spark.sql.rapids.execution.python._ import org.apache.spark.sql.rapids.shims._ import org.apache.spark.sql.rapids.shims.SparkSessionUtils import org.apache.spark.sql.rapids.shims.TrampolineConnectShims.SparkSession -import org.apache.spark.sql.types._ -import org.apache.spark.unsafe.types.CalendarInterval /** * Shim base class that can be compiled with every supported 3.2.0+ @@ -151,8 +147,9 @@ trait Spark320PlusShims extends SparkShims with RebaseShims with WindowInPandasS override def isWindowFunctionExec(plan: SparkPlan): Boolean = plan.isInstanceOf[WindowExecBase] - override def getExprs: Map[Class[_ <: Expression], ExprRule[_ <: Expression]] = Seq( - GpuOverrides.expr[Cast]( + override def getExprs: Map[Class[_ <: Expression], ExprRule[_ <: Expression]] = { + val baseExprs: Seq[ExprRule[_ <: Expression]] = Seq( + GpuOverrides.expr[Cast]( "Convert a column of one type of data into another type", new CastChecks(), (cast, conf, p, r) => { @@ -203,9 +200,7 @@ trait Spark320PlusShims extends SparkShims with RebaseShims with WindowInPandasS // ANSI support for ABS was added in 3.2.0 SPARK-33275 override def convertToGpu(child: Expression): GpuExpression = GpuAbs(child, ansiEnabled) - }) - // TimeAdd moved to TimeAddShims to handle version differences - ).map(r => (r.getClassFor.asSubclass(classOf[Expression]), r)).toMap ++ TimeAddShims.exprs ++ Seq( + }), GpuOverrides.expr[SpecifiedWindowFrame]( "Specification of the width of the group (or \"frame\") of input rows " + "around which a window function is evaluated", @@ -233,7 +228,12 @@ trait Spark320PlusShims extends SparkShims with RebaseShims with WindowInPandasS TypeSig.CALENDAR + TypeSig.NULL + TypeSig.integral + TypeSig.DECIMAL_64 + TypeSig.DAYTIME, TypeSig.numericAndInterval))), (windowExpression, conf, p, r) => new GpuWindowExpressionMeta(windowExpression, conf, p, r)) - ).map(r => (r.getClassFor.asSubclass(classOf[Expression]), r)).toMap + ) + val result = new scala.collection.mutable.HashMap[ + Class[_ <: Expression], ExprRule[_ <: Expression]]() + baseExprs.foreach(r => result(r.getClassFor.asSubclass(classOf[Expression])) = r) + result.toMap ++ TimeAddShims.exprs + } override def getExecs: Map[Class[_ <: SparkPlan], ExecRule[_ <: SparkPlan]] = { val maps: Map[Class[_ <: SparkPlan], ExecRule[_ <: SparkPlan]] = Seq( diff --git a/sql-plugin/src/main/spark320/scala/com/nvidia/spark/rapids/shims/TimeAddShims.scala b/sql-plugin/src/main/spark320/scala/com/nvidia/spark/rapids/shims/TimeAddShims.scala index 2c3ab7ab3fa..49d01c6806a 100644 --- a/sql-plugin/src/main/spark320/scala/com/nvidia/spark/rapids/shims/TimeAddShims.scala +++ b/sql-plugin/src/main/spark320/scala/com/nvidia/spark/rapids/shims/TimeAddShims.scala @@ -1,5 +1,5 @@ /* - * Copyright (c) 2026, NVIDIA CORPORATION. + * Copyright (c) 2022-2026, NVIDIA CORPORATION. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -31,33 +31,29 @@ import org.apache.spark.sql.rapids.shims.GpuTimeAdd import org.apache.spark.unsafe.types.CalendarInterval /** - * TimeAdd expression support for versions before it was renamed to TimestampAddInterval. - * TimeAdd was renamed in Spark 4.1 (and likely backported to Databricks 17.3). - * See: https://github.com/apache/spark/commit/059b395c8cbfe1b0bdc614e6006939e3ac538b13 + * TimeAdd expression support for Spark 3.2.x (CalendarInterval only, no DayTimeIntervalType). */ object TimeAddShims { - val exprs: Map[Class[_ <: Expression], ExprRule[_ <: Expression]] = { - Seq( - GpuOverrides.expr[TimeAdd]( - "Adds interval to timestamp", - ExprChecks.binaryProject(TypeSig.TIMESTAMP, TypeSig.TIMESTAMP, - ("start", TypeSig.TIMESTAMP, TypeSig.TIMESTAMP), - ("interval", TypeSig.lit(TypeEnum.CALENDAR) + val exprs: Map[Class[_ <: Expression], ExprRule[_ <: Expression]] = Seq( + GpuOverrides.expr[TimeAdd]( + "Adds interval to timestamp", + ExprChecks.binaryProject(TypeSig.TIMESTAMP, TypeSig.TIMESTAMP, + ("start", TypeSig.TIMESTAMP, TypeSig.TIMESTAMP), + ("interval", TypeSig.lit(TypeEnum.CALENDAR) .withPsNote(TypeEnum.CALENDAR, "month intervals are not supported"), TypeSig.CALENDAR)), - (timeAdd, conf, p, r) => new BinaryExprMeta[TimeAdd](timeAdd, conf, p, r) { - override def tagExprForGpu(): Unit = { - GpuOverrides.extractLit(timeAdd.interval).foreach { lit => - val intvl = lit.value.asInstanceOf[CalendarInterval] - if (intvl.months != 0) { - willNotWorkOnGpu("interval months isn't supported") - } + (timeAdd, conf, p, r) => new BinaryExprMeta[TimeAdd](timeAdd, conf, p, r) { + override def tagExprForGpu(): Unit = { + GpuOverrides.extractLit(timeAdd.interval).foreach { lit => + val intvl = lit.value.asInstanceOf[CalendarInterval] + if (intvl.months != 0) { + willNotWorkOnGpu("interval months isn't supported") } } + } - override def convertToGpu(lhs: Expression, rhs: Expression): GpuExpression = - GpuTimeAdd(lhs, rhs) - }) - ).map(r => (r.getClassFor.asSubclass(classOf[Expression]), r)).toMap - } + override def convertToGpu(lhs: Expression, rhs: Expression): GpuExpression = + GpuTimeAdd(lhs, rhs) + }) + ).map(r => (r.getClassFor.asSubclass(classOf[Expression]), r)).toMap } diff --git a/sql-plugin/src/main/spark320/scala/com/nvidia/spark/rapids/shims/WindowInPandasExecShims.scala b/sql-plugin/src/main/spark320/scala/com/nvidia/spark/rapids/shims/WindowInPandasExecShims.scala index 92cf2cd9b92..0dea7ace89d 100644 --- a/sql-plugin/src/main/spark320/scala/com/nvidia/spark/rapids/shims/WindowInPandasExecShims.scala +++ b/sql-plugin/src/main/spark320/scala/com/nvidia/spark/rapids/shims/WindowInPandasExecShims.scala @@ -52,7 +52,7 @@ package com.nvidia.spark.rapids.shims import com.nvidia.spark.rapids._ -import org.apache.spark.sql.catalyst.expressions.{Expression, NamedExpression} +import org.apache.spark.sql.catalyst.expressions.NamedExpression import org.apache.spark.sql.execution.SparkPlan import org.apache.spark.sql.execution.python.WindowInPandasExec import org.apache.spark.sql.rapids.execution.python.GpuWindowInPandasExecMetaBase diff --git a/sql-plugin/src/main/spark320/scala/com/nvidia/spark/rapids/shims/WindowInPandasShims.scala b/sql-plugin/src/main/spark320/scala/com/nvidia/spark/rapids/shims/WindowInPandasShims.scala index a08010b9699..c6a1fded30b 100644 --- a/sql-plugin/src/main/spark320/scala/com/nvidia/spark/rapids/shims/WindowInPandasShims.scala +++ b/sql-plugin/src/main/spark320/scala/com/nvidia/spark/rapids/shims/WindowInPandasShims.scala @@ -58,5 +58,5 @@ import org.apache.spark.sql.execution.python.WindowInPandasExec */ trait WindowInPandasShims { def getWindowExpressions(winPy: WindowInPandasExec): Seq[NamedExpression] = - winPy.projectList + winPy.windowExpression } diff --git a/tools/generated_files/321/supportedExprs.csv b/tools/generated_files/321/supportedExprs.csv index 6317a529044..46ef1c066c5 100644 --- a/tools/generated_files/321/supportedExprs.csv +++ b/tools/generated_files/321/supportedExprs.csv @@ -614,7 +614,7 @@ Tanh,S,`tanh`,None,project,result,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA Tanh,S,`tanh`,None,AST,input,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA Tanh,S,`tanh`,None,AST,result,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA TimeAdd,S, ,None,project,start,NA,NA,NA,NA,NA,NA,NA,NA,PS,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA -TimeAdd,S, ,None,project,interval,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,PS,NA,NA,NA,NA,PS,NA +TimeAdd,S, ,None,project,interval,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,PS,NA,NA,NA,NA,NA,NA TimeAdd,S, ,None,project,result,NA,NA,NA,NA,NA,NA,NA,NA,PS,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA ToDegrees,S,`degrees`,None,project,input,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA ToDegrees,S,`degrees`,None,project,result,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA diff --git a/tools/generated_files/operatorsScore.csv b/tools/generated_files/operatorsScore.csv index 5a39b48a2e0..935ca9cb6bc 100644 --- a/tools/generated_files/operatorsScore.csv +++ b/tools/generated_files/operatorsScore.csv @@ -38,7 +38,6 @@ ArrowEvalPythonExec,1.2 FlatMapCoGroupsInPandasExec,3.0 FlatMapGroupsInPandasExec,1.2 MapInPandasExec,1.2 -PythonMapInArrowExec,3.0 WindowInPandasExec,1.2 WindowExec,3.0 HiveTableScanExec,3.0 @@ -85,8 +84,6 @@ BitwiseCount,4 BitwiseNot,4 BitwiseOr,4 BitwiseXor,4 -BloomFilterAggregate,4 -BloomFilterMightContain,4 BoundReference,4 CaseWhen,4 Cbrt,4 @@ -117,8 +114,6 @@ DayOfWeek,4 DayOfYear,4 DenseRank,4 Divide,4 -DivideDTInterval,4 -DivideYMInterval,4 DynamicPruningExpression,4 ElementAt,4 EndsWith,4 @@ -151,7 +146,6 @@ Hypot,4 If,4 In,4 InSet,4 -InSubqueryExec,4 InitCap,4 InputFileBlockLength,4 InputFileBlockStart,4 @@ -202,8 +196,6 @@ MonotonicallyIncreasingID,4 Month,4 MonthsBetween,4 Multiply,4 -MultiplyDTInterval,4 -MultiplyYMInterval,4 Murmur3Hash,4 NaNvl,4 NamedLambdaVariable,4 @@ -235,8 +227,6 @@ ReplicateRows,4 Reverse,4 Rint,4 Round,4 -RoundCeil,4 -RoundFloor,4 RowNumber,4 ScalaUDF,4 ScalarSubquery,4 diff --git a/tools/generated_files/supportedDataSource.csv b/tools/generated_files/supportedDataSource.csv index 67669d28435..2c29ce6ef6a 100644 --- a/tools/generated_files/supportedDataSource.csv +++ b/tools/generated_files/supportedDataSource.csv @@ -1,14 +1,14 @@ Format,Direction,BOOLEAN,BYTE,SHORT,INT,LONG,FLOAT,DOUBLE,DATE,TIMESTAMP,STRING,DECIMAL,NULL,BINARY,CALENDAR,ARRAY,MAP,STRUCT,UDT,DAYTIME,YEARMONTH Avro,read,CO,CO,CO,CO,CO,CO,CO,CO,CO,CO,CO,CO,CO,CO,CO,CO,CO,CO,CO,CO CSV,read,S,S,S,S,S,S,S,S,PS,S,S,NA,NS,NA,NA,NA,NA,NA,NA,NA -Delta,read,S,S,S,S,S,S,S,S,PS,S,S,NA,S,NA,PS,PS,PS,NS,S,S -Delta,write,S,S,S,S,S,S,S,S,PS,S,S,NA,S,NA,PS,PS,PS,NS,S,S +Delta,read,S,S,S,S,S,S,S,S,PS,S,S,NA,S,NA,PS,PS,PS,NS,NA,NA +Delta,write,S,S,S,S,S,S,S,S,PS,S,S,NA,S,NA,PS,PS,PS,NS,NA,NA HiveText,read,S,S,S,S,S,S,S,S,PS,S,S,NS,NS,NS,NS,NS,NS,NS,NS,NS HiveText,write,S,S,S,S,S,S,S,S,PS,S,S,NS,NS,NS,NS,NS,NS,NS,NS,NS -Iceberg,read,S,S,S,S,S,S,S,S,PS,S,S,NA,S,NA,PS,PS,PS,NS,S,S +Iceberg,read,S,S,S,S,S,S,S,S,PS,S,S,NA,S,NA,PS,PS,PS,NS,NA,NA Iceberg,write,CO,CO,CO,CO,CO,CO,CO,CO,CO,CO,CO,CO,CO,CO,CO,CO,CO,CO,CO,CO JSON,read,S,S,S,S,S,S,S,PS,PS,S,S,NA,NS,NA,PS,NS,PS,NS,NA,NA ORC,read,S,S,S,S,S,S,S,S,PS,S,S,NA,NS,NA,PS,PS,PS,NS,NA,NA ORC,write,S,S,S,S,S,S,S,S,PS,S,S,NA,NS,NA,PS,PS,PS,NS,NA,NA -Parquet,read,S,S,S,S,S,S,S,S,PS,S,S,NA,S,NA,PS,PS,PS,NS,S,S -Parquet,write,S,S,S,S,S,S,S,S,PS,S,S,NA,S,NA,PS,PS,PS,NS,S,S +Parquet,read,S,S,S,S,S,S,S,S,PS,S,S,NA,S,NA,PS,PS,PS,NS,NA,NA +Parquet,write,S,S,S,S,S,S,S,S,PS,S,S,NA,S,NA,PS,PS,PS,NS,NA,NA diff --git a/tools/generated_files/supportedExecs.csv b/tools/generated_files/supportedExecs.csv index 8afd7c5d795..317fb6c5ca3 100644 --- a/tools/generated_files/supportedExecs.csv +++ b/tools/generated_files/supportedExecs.csv @@ -1,15 +1,15 @@ Exec,Supported,Notes,Params,BOOLEAN,BYTE,SHORT,INT,LONG,FLOAT,DOUBLE,DATE,TIMESTAMP,STRING,DECIMAL,NULL,BINARY,CALENDAR,ARRAY,MAP,STRUCT,UDT,DAYTIME,YEARMONTH -CoalesceExec,S,None,Input/Output,S,S,S,S,S,S,S,S,PS,S,S,S,S,NS,PS,PS,PS,NS,S,S +CoalesceExec,S,None,Input/Output,S,S,S,S,S,S,S,S,PS,S,S,S,S,NS,PS,PS,PS,NS,NS,NS CollectLimitExec,NS,This is disabled by default because Collect Limit replacement can be slower on the GPU; if huge number of rows in a batch it could help by limiting the number of rows transferred from GPU to CPU,Input/Output,S,S,S,S,S,S,S,S,PS,S,S,S,NS,NS,PS,PS,PS,NS,NS,NS ExpandExec,S,None,Input/Output,S,S,S,S,S,S,S,S,PS,S,S,S,NS,NS,PS,PS,PS,NS,NS,NS -FileSourceScanExec,S,None,Input/Output,S,S,S,S,S,S,S,S,PS,S,S,S,S,NS,PS,PS,PS,NS,S,S -FilterExec,S,None,Input/Output,S,S,S,S,S,S,S,S,PS,S,S,S,S,NS,PS,PS,PS,NS,S,S +FileSourceScanExec,S,None,Input/Output,S,S,S,S,S,S,S,S,PS,S,S,S,S,NS,PS,PS,PS,NS,NS,NS +FilterExec,S,None,Input/Output,S,S,S,S,S,S,S,S,PS,S,S,S,S,NS,PS,PS,PS,NS,NS,NS GenerateExec,S,None,Input/Output,S,S,S,S,S,S,S,S,PS,S,S,S,S,NS,PS,PS,PS,NS,NS,NS GlobalLimitExec,S,None,Input/Output,S,S,S,S,S,S,S,S,PS,S,S,S,NS,NS,PS,PS,PS,NS,NS,NS LocalLimitExec,S,None,Input/Output,S,S,S,S,S,S,S,S,PS,S,S,S,NS,NS,PS,PS,PS,NS,NS,NS -ProjectExec,S,None,Input/Output,S,S,S,S,S,S,S,S,PS,S,S,S,S,NS,PS,PS,PS,NS,S,S +ProjectExec,S,None,Input/Output,S,S,S,S,S,S,S,S,PS,S,S,S,S,NS,PS,PS,PS,NS,NS,NS RangeExec,S,None,Input/Output,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA -SampleExec,S,None,Input/Output,S,S,S,S,S,S,S,S,PS,S,S,S,NS,NS,PS,PS,PS,NS,S,S +SampleExec,S,None,Input/Output,S,S,S,S,S,S,S,S,PS,S,S,S,NS,NS,PS,PS,PS,NS,NS,NS SortExec,S,None,Input/Output,S,S,S,S,S,S,S,S,PS,S,S,S,S,NS,PS,PS,PS,NS,NS,NS SubqueryBroadcastExec,S,None,Input/Output,S,S,S,S,S,S,S,S,PS,S,S,S,S,S,PS,PS,PS,S,S,S TakeOrderedAndProjectExec,S,None,Input/Output,S,S,S,S,S,S,S,S,PS,S,S,S,NS,NS,PS,PS,PS,NS,NS,NS @@ -18,16 +18,16 @@ AQEShuffleReadExec,S,None,Input/Output,S,S,S,S,S,S,S,S,PS,S,S,S,S,NS,PS,PS,PS,NS HashAggregateExec,S,None,Input/Output,S,S,S,S,S,S,S,S,PS,S,S,S,S,NS,PS,PS,PS,NS,NS,NS ObjectHashAggregateExec,S,None,Input/Output,S,S,S,S,S,S,S,S,PS,S,S,S,PS,NS,PS,PS,PS,NS,NS,NS SortAggregateExec,S,None,Input/Output,S,S,S,S,S,S,S,S,PS,S,S,S,S,NS,PS,PS,PS,NS,NS,NS -InMemoryTableScanExec,S,None,Input/Output,S,S,S,S,S,S,S,S,PS,S,S,NS,NS,NS,PS,PS,PS,NS,S,S -DataWritingCommandExec,S,None,Input/Output,S,S,S,S,S,S,S,S,PS,S,PS,NS,S,NS,PS,PS,PS,NS,S,S +InMemoryTableScanExec,S,None,Input/Output,S,S,S,S,S,S,S,S,PS,S,S,NS,NS,NS,PS,PS,PS,NS,NS,NS +DataWritingCommandExec,S,None,Input/Output,S,S,S,S,S,S,S,S,PS,S,PS,NS,S,NS,PS,PS,PS,NS,NS,NS ExecutedCommandExec,S,None,Input/Output,S,S,S,S,S,S,S,S,PS,S,S,S,S,S,PS,PS,PS,S,S,S -AppendDataExecV1,S,None,Input/Output,S,S,S,S,S,S,S,S,PS,S,S,NS,S,NS,PS,PS,PS,NS,S,S -AtomicCreateTableAsSelectExec,S,None,Input/Output,S,S,S,S,S,S,S,S,PS,S,S,NS,S,NS,PS,PS,PS,NS,S,S -AtomicReplaceTableAsSelectExec,S,None,Input/Output,S,S,S,S,S,S,S,S,PS,S,S,NS,S,NS,PS,PS,PS,NS,S,S -BatchScanExec,S,None,Input/Output,S,S,S,S,S,S,S,S,PS,S,S,NS,S,NS,PS,PS,PS,NS,S,S -OverwriteByExpressionExecV1,S,None,Input/Output,S,S,S,S,S,S,S,S,PS,S,S,NS,S,NS,PS,PS,PS,NS,S,S +AppendDataExecV1,S,None,Input/Output,S,S,S,S,S,S,S,S,PS,S,S,NS,S,NS,PS,PS,PS,NS,NS,NS +AtomicCreateTableAsSelectExec,S,None,Input/Output,S,S,S,S,S,S,S,S,PS,S,S,NS,S,NS,PS,PS,PS,NS,NS,NS +AtomicReplaceTableAsSelectExec,S,None,Input/Output,S,S,S,S,S,S,S,S,PS,S,S,NS,S,NS,PS,PS,PS,NS,NS,NS +BatchScanExec,S,None,Input/Output,S,S,S,S,S,S,S,S,PS,S,S,NS,S,NS,PS,PS,PS,NS,NS,NS +OverwriteByExpressionExecV1,S,None,Input/Output,S,S,S,S,S,S,S,S,PS,S,S,NS,S,NS,PS,PS,PS,NS,NS,NS BroadcastExchangeExec,S,None,Input/Output,S,S,S,S,S,S,S,S,PS,S,S,S,S,NS,PS,PS,PS,NS,NS,NS -ShuffleExchangeExec,S,None,Input/Output,S,S,S,S,S,S,S,S,PS,S,S,S,S,NS,PS,PS,PS,NS,S,S +ShuffleExchangeExec,S,None,Input/Output,S,S,S,S,S,S,S,S,PS,S,S,S,S,NS,PS,PS,PS,NS,NS,NS BroadcastHashJoinExec,S,None,leftKeys,S,S,S,S,S,S,S,S,PS,S,S,S,NS,NS,NS,NA,PS,NS,NS,NS BroadcastHashJoinExec,S,None,rightKeys,S,S,S,S,S,S,S,S,PS,S,S,S,NS,NS,NS,NA,PS,NS,NS,NS BroadcastHashJoinExec,S,None,condition,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA @@ -48,7 +48,6 @@ ArrowEvalPythonExec,S,None,Input/Output,S,S,S,S,S,S,S,S,PS,S,NS,NS,NS,NS,PS,NS,P FlatMapCoGroupsInPandasExec,NS,This is disabled by default because Performance is not ideal with many small groups,Input/Output,S,S,S,S,S,S,S,S,PS,S,NS,NS,NS,NS,NS,NS,NS,NS,NS,NS FlatMapGroupsInPandasExec,S,None,Input/Output,S,S,S,S,S,S,S,S,PS,S,NS,NS,NS,NS,NS,NS,NS,NS,NS,NS MapInPandasExec,S,None,Input/Output,S,S,S,S,S,S,S,S,PS,S,NS,NS,NS,NS,PS,NS,PS,NS,NS,NS -PythonMapInArrowExec,S,None,Input/Output,S,S,S,S,S,S,S,S,PS,S,NS,NS,NS,NS,PS,NS,PS,NS,NS,NS WindowInPandasExec,NS,This is disabled by default because it only supports row based frame for now,Input/Output,S,S,S,S,S,S,S,S,PS,S,NS,NS,NS,NS,PS,NS,NS,NS,NS,NS WindowExec,S,None,partitionSpec,S,S,S,S,S,S,S,S,PS,S,S,S,NS,NS,NS,NS,PS,NS,NS,NS WindowExec,S,None,Input/Output,S,S,S,S,S,S,S,S,PS,S,S,S,S,NS,PS,PS,PS,NS,NS,NS diff --git a/tools/generated_files/supportedExprs.csv b/tools/generated_files/supportedExprs.csv index 9c53ec84f52..46ef1c066c5 100644 --- a/tools/generated_files/supportedExprs.csv +++ b/tools/generated_files/supportedExprs.csv @@ -1,8 +1,8 @@ Expression,Supported,SQL Func,Notes,Context,Params,BOOLEAN,BYTE,SHORT,INT,LONG,FLOAT,DOUBLE,DATE,TIMESTAMP,STRING,DECIMAL,NULL,BINARY,CALENDAR,ARRAY,MAP,STRUCT,UDT,DAYTIME,YEARMONTH -Abs,S,`abs`,None,project,input,NA,S,S,S,S,S,S,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,S,S -Abs,S,`abs`,None,project,result,NA,S,S,S,S,S,S,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,S,S -Abs,S,`abs`,None,AST,input,NA,NS,NS,S,S,S,S,NA,NA,NA,NS,NA,NA,NA,NA,NA,NA,NA,NS,NS -Abs,S,`abs`,None,AST,result,NA,NS,NS,S,S,S,S,NA,NA,NA,NS,NA,NA,NA,NA,NA,NA,NA,NS,NS +Abs,S,`abs`,None,project,input,NA,S,S,S,S,S,S,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA +Abs,S,`abs`,None,project,result,NA,S,S,S,S,S,S,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA +Abs,S,`abs`,None,AST,input,NA,NS,NS,S,S,S,S,NA,NA,NA,NS,NA,NA,NA,NA,NA,NA,NA,NA,NA +Abs,S,`abs`,None,AST,result,NA,NS,NS,S,S,S,S,NA,NA,NA,NS,NA,NA,NA,NA,NA,NA,NA,NA,NA Acos,S,`acos`,None,project,input,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA Acos,S,`acos`,None,project,result,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA Acos,S,`acos`,None,AST,input,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA @@ -11,16 +11,16 @@ Acosh,S,`acosh`,None,project,input,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,N Acosh,S,`acosh`,None,project,result,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA Acosh,S,`acosh`,None,AST,input,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA Acosh,S,`acosh`,None,AST,result,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA -Add,S,`+`,None,project,lhs,NA,S,S,S,S,S,S,NA,NA,NA,S,NA,NA,NS,NA,NA,NA,NA,S,S -Add,S,`+`,None,project,rhs,NA,S,S,S,S,S,S,NA,NA,NA,S,NA,NA,NS,NA,NA,NA,NA,S,S -Add,S,`+`,None,project,result,NA,S,S,S,S,S,S,NA,NA,NA,S,NA,NA,NS,NA,NA,NA,NA,S,S +Add,S,`+`,None,project,lhs,NA,S,S,S,S,S,S,NA,NA,NA,S,NA,NA,NS,NA,NA,NA,NA,NS,NS +Add,S,`+`,None,project,rhs,NA,S,S,S,S,S,S,NA,NA,NA,S,NA,NA,NS,NA,NA,NA,NA,NS,NS +Add,S,`+`,None,project,result,NA,S,S,S,S,S,S,NA,NA,NA,S,NA,NA,NS,NA,NA,NA,NA,NS,NS Add,S,`+`,None,AST,lhs,NA,NS,NS,S,S,S,S,NA,NA,NA,NS,NA,NA,NS,NA,NA,NA,NA,NS,NS Add,S,`+`,None,AST,rhs,NA,NS,NS,S,S,S,S,NA,NA,NA,NS,NA,NA,NS,NA,NA,NA,NA,NS,NS Add,S,`+`,None,AST,result,NA,NS,NS,S,S,S,S,NA,NA,NA,NS,NA,NA,NS,NA,NA,NA,NA,NS,NS -Alias,S, ,None,project,input,S,S,S,S,S,S,S,S,PS,S,S,S,S,NS,PS,PS,PS,NS,S,S -Alias,S, ,None,project,result,S,S,S,S,S,S,S,S,PS,S,S,S,S,NS,PS,PS,PS,NS,S,S -Alias,S, ,None,AST,input,S,S,S,S,S,S,S,S,PS,S,NS,NS,NS,NS,NS,NS,NS,NS,S,S -Alias,S, ,None,AST,result,S,S,S,S,S,S,S,S,PS,S,NS,NS,NS,NS,NS,NS,NS,NS,S,S +Alias,S, ,None,project,input,S,S,S,S,S,S,S,S,PS,S,S,S,S,NS,PS,PS,PS,NS,NS,NS +Alias,S, ,None,project,result,S,S,S,S,S,S,S,S,PS,S,S,S,S,NS,PS,PS,PS,NS,NS,NS +Alias,S, ,None,AST,input,S,S,S,S,S,S,S,S,PS,S,NS,NS,NS,NS,NS,NS,NS,NS,NS,NS +Alias,S, ,None,AST,result,S,S,S,S,S,S,S,S,PS,S,NS,NS,NS,NS,NS,NS,NS,NS,NS,NS And,S,`and`,None,project,lhs,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA And,S,`and`,None,project,rhs,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA And,S,`and`,None,project,result,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA @@ -92,8 +92,8 @@ Atanh,S,`atanh`,None,project,input,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,N Atanh,S,`atanh`,None,project,result,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA Atanh,S,`atanh`,None,AST,input,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA Atanh,S,`atanh`,None,AST,result,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA -AttributeReference,S, ,None,project,result,S,S,S,S,S,S,S,S,PS,S,S,S,S,NS,PS,PS,PS,NS,S,S -AttributeReference,S, ,None,AST,result,S,S,S,S,S,S,S,S,PS,S,NS,NS,NS,NS,NS,NS,NS,NS,S,S +AttributeReference,S, ,None,project,result,S,S,S,S,S,S,S,S,PS,S,S,S,S,NS,PS,PS,PS,NS,NS,NS +AttributeReference,S, ,None,AST,result,S,S,S,S,S,S,S,S,PS,S,NS,NS,NS,NS,NS,NS,NS,NS,NS,NS BRound,S,`bround`,None,project,value,NA,S,S,S,S,PS,PS,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA BRound,S,`bround`,None,project,scale,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA BRound,S,`bround`,None,project,result,NA,S,S,S,S,S,S,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA @@ -125,11 +125,8 @@ BitwiseXor,S,`^`,None,project,result,NA,S,S,S,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA BitwiseXor,S,`^`,None,AST,lhs,NA,NS,NS,S,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA BitwiseXor,S,`^`,None,AST,rhs,NA,NS,NS,S,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA BitwiseXor,S,`^`,None,AST,result,NA,NS,NS,S,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA -BloomFilterMightContain,S, ,None,project,lhs,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,S,S,NA,NA,NA,NA,NA,NA,NA -BloomFilterMightContain,S, ,None,project,rhs,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA -BloomFilterMightContain,S, ,None,project,result,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA -BoundReference,S, ,None,project,result,S,S,S,S,S,S,S,S,PS,S,S,S,S,NS,PS,PS,PS,NS,S,S -BoundReference,S, ,None,AST,result,S,S,S,S,S,S,S,S,PS,S,NS,NS,NS,NS,NS,NS,NS,NS,S,S +BoundReference,S, ,None,project,result,S,S,S,S,S,S,S,S,PS,S,S,S,S,NS,PS,PS,PS,NS,NS,NS +BoundReference,S, ,None,AST,result,S,S,S,S,S,S,S,S,PS,S,NS,NS,NS,NS,NS,NS,NS,NS,NS,NS CaseWhen,S,`when`,None,project,predicate,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA CaseWhen,S,`when`,None,project,value,S,S,S,S,S,S,S,S,PS,S,S,S,S,NS,PS,PS,PS,NS,NS,NS CaseWhen,S,`when`,None,project,result,S,S,S,S,S,S,S,S,PS,S,S,S,S,NS,PS,PS,PS,NS,NS,NS @@ -137,12 +134,12 @@ Cbrt,S,`cbrt`,None,project,input,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA, Cbrt,S,`cbrt`,None,project,result,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA Cbrt,S,`cbrt`,None,AST,input,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA Cbrt,S,`cbrt`,None,AST,result,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA -Ceil,S, ,None,project,input,NA,NA,NA,NA,S,NA,S,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA -Ceil,S, ,None,project,result,NA,NA,NA,NA,S,NA,S,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA +Ceil,S,`ceil`; `ceiling`,None,project,input,NA,NA,NA,NA,S,NA,S,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA +Ceil,S,`ceil`; `ceiling`,None,project,result,NA,NA,NA,NA,S,NA,S,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA CheckOverflow,S, ,None,project,input,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA CheckOverflow,S, ,None,project,result,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA -Coalesce,S,`coalesce`,None,project,param,S,S,S,S,S,S,S,S,PS,S,S,S,S,NS,PS,PS,PS,NS,S,S -Coalesce,S,`coalesce`,None,project,result,S,S,S,S,S,S,S,S,PS,S,S,S,S,NS,PS,PS,PS,NS,S,S +Coalesce,S,`coalesce`,None,project,param,S,S,S,S,S,S,S,S,PS,S,S,S,S,NS,PS,PS,PS,NS,NS,NS +Coalesce,S,`coalesce`,None,project,result,S,S,S,S,S,S,S,S,PS,S,S,S,S,NS,PS,PS,PS,NS,NS,NS Concat,S,`concat`,None,project,input,NA,NA,NA,NA,NA,NA,NA,NA,NA,S,NA,NA,NS,NA,PS,NA,NA,NA,NA,NA Concat,S,`concat`,None,project,result,NA,NA,NA,NA,NA,NA,NA,NA,NA,S,NA,NA,NS,NA,PS,NA,NA,NA,NA,NA ConcatWs,S,`concat_ws`,None,project,input,NA,NA,NA,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,S,NA,NA,NA,NA,NA @@ -200,12 +197,6 @@ DenseRank,S,`dense_rank`,None,window,result,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,N Divide,S,`/`,None,project,lhs,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA Divide,S,`/`,None,project,rhs,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA Divide,S,`/`,None,project,result,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA -DivideDTInterval,S, ,None,project,lhs,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,S,NA -DivideDTInterval,S, ,None,project,rhs,NA,S,S,S,S,S,S,NA,NA,NA,NS,NA,NA,NA,NA,NA,NA,NA,NA,NA -DivideDTInterval,S, ,None,project,result,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,S,NA -DivideYMInterval,S, ,None,project,lhs,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,S -DivideYMInterval,S, ,None,project,rhs,NA,S,S,S,S,S,S,NA,NA,NA,NS,NA,NA,NA,NA,NA,NA,NA,NA,NA -DivideYMInterval,S, ,None,project,result,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,S DynamicPruningExpression,S, ,None,project,input,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA DynamicPruningExpression,S, ,None,project,result,S,S,S,S,S,S,S,S,PS,S,S,S,S,S,PS,PS,PS,S,S,S ElementAt,S,`element_at`,None,project,array/map,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,PS,PS,NA,NA,NA,NA @@ -235,8 +226,8 @@ Expm1,S,`expm1`,None,AST,input,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA Expm1,S,`expm1`,None,AST,result,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA Flatten,S,`flatten`,None,project,input,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,PS,NA,NA,NA,NA,NA Flatten,S,`flatten`,None,project,result,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,PS,NA,NA,NA,NA,NA -Floor,S, ,None,project,input,NA,NA,NA,NA,S,NA,S,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA -Floor,S, ,None,project,result,NA,NA,NA,NA,S,NA,S,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA +Floor,S,`floor`,None,project,input,NA,NA,NA,NA,S,NA,S,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA +Floor,S,`floor`,None,project,result,NA,NA,NA,NA,S,NA,S,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA FormatNumber,S,`format_number`,None,project,x,NA,S,S,S,S,S,S,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA FormatNumber,S,`format_number`,None,project,d,NA,NA,NA,PS,NA,NA,NA,NA,NA,NS,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA FormatNumber,S,`format_number`,None,project,result,NA,NA,NA,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA @@ -284,9 +275,9 @@ Hypot,S,`hypot`,None,project,lhs,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA, Hypot,S,`hypot`,None,project,rhs,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA Hypot,S,`hypot`,None,project,result,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA If,S,`if`,None,project,predicate,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA -If,S,`if`,None,project,trueValue,S,S,S,S,S,S,S,S,PS,S,S,S,S,NS,PS,PS,PS,NS,S,S -If,S,`if`,None,project,falseValue,S,S,S,S,S,S,S,S,PS,S,S,S,S,NS,PS,PS,PS,NS,S,S -If,S,`if`,None,project,result,S,S,S,S,S,S,S,S,PS,S,S,S,S,NS,PS,PS,PS,NS,S,S +If,S,`if`,None,project,trueValue,S,S,S,S,S,S,S,S,PS,S,S,S,S,NS,PS,PS,PS,NS,NS,NS +If,S,`if`,None,project,falseValue,S,S,S,S,S,S,S,S,PS,S,S,S,S,NS,PS,PS,PS,NS,NS,NS +If,S,`if`,None,project,result,S,S,S,S,S,S,S,S,PS,S,S,S,S,NS,PS,PS,PS,NS,NS,NS In,S,`in`,None,project,value,S,S,S,S,S,S,S,S,PS,S,S,S,NS,NS,NS,NA,NS,NS,NA,NA In,S,`in`,None,project,list,PS,PS,PS,PS,PS,PS,PS,PS,PS,PS,PS,NS,NS,NS,NS,NA,NS,NS,NA,NA In,S,`in`,None,project,result,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA @@ -302,11 +293,11 @@ IntegralDivide,S,`div`,None,project,rhs,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,S,NA,NA,NA, IntegralDivide,S,`div`,None,project,result,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA IsNaN,S,`isnan`,None,project,input,NA,NA,NA,NA,NA,S,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA IsNaN,S,`isnan`,None,project,result,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA -IsNotNull,S,`isnotnull`,None,project,input,S,S,S,S,S,S,S,S,PS,S,S,S,S,NS,PS,PS,PS,NS,S,NS +IsNotNull,S,`isnotnull`,None,project,input,S,S,S,S,S,S,S,S,PS,S,S,S,S,NS,PS,PS,PS,NS,NS,NS IsNotNull,S,`isnotnull`,None,project,result,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA IsNotNull,S,`isnotnull`,None,AST,input,S,S,S,S,S,S,S,S,PS,S,NS,NS,NS,NS,NS,NS,NS,NS,NS,NS IsNotNull,S,`isnotnull`,None,AST,result,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA -IsNull,S,`isnull`,None,project,input,S,S,S,S,S,S,S,S,PS,S,S,S,S,NS,PS,PS,PS,NS,S,NS +IsNull,S,`isnull`,None,project,input,S,S,S,S,S,S,S,S,PS,S,S,S,S,NS,PS,PS,PS,NS,NS,NS IsNull,S,`isnull`,None,project,result,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA IsNull,S,`isnull`,None,AST,input,S,S,S,S,S,S,S,S,PS,S,NS,NS,NS,NS,NS,NS,NS,NS,NS,NS IsNull,S,`isnull`,None,AST,result,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA @@ -409,12 +400,6 @@ Multiply,S,`*`,None,project,result,NA,S,S,S,S,S,S,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,N Multiply,S,`*`,None,AST,lhs,NA,NS,NS,S,S,S,S,NA,NA,NA,NS,NA,NA,NA,NA,NA,NA,NA,NA,NA Multiply,S,`*`,None,AST,rhs,NA,NS,NS,S,S,S,S,NA,NA,NA,NS,NA,NA,NA,NA,NA,NA,NA,NA,NA Multiply,S,`*`,None,AST,result,NA,NS,NS,S,S,S,S,NA,NA,NA,NS,NA,NA,NA,NA,NA,NA,NA,NA,NA -MultiplyDTInterval,S, ,None,project,lhs,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,S,NA -MultiplyDTInterval,S, ,None,project,rhs,NA,S,S,S,S,S,S,NA,NA,NA,NS,NA,NA,NA,NA,NA,NA,NA,NA,NA -MultiplyDTInterval,S, ,None,project,result,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,S,NA -MultiplyYMInterval,S, ,None,project,lhs,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,S -MultiplyYMInterval,S, ,None,project,rhs,NA,S,S,S,S,S,S,NA,NA,NA,NS,NA,NA,NA,NA,NA,NA,NA,NA,NA -MultiplyYMInterval,S, ,None,project,result,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,S Murmur3Hash,S,`hash`,None,project,input,S,S,S,S,S,S,S,S,PS,S,S,S,NS,NS,PS,NS,PS,NS,NS,NS Murmur3Hash,S,`hash`,None,project,result,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA NaNvl,S,`nanvl`,None,project,lhs,NA,NA,NA,NA,NA,S,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA @@ -503,12 +488,6 @@ Rint,S,`rint`,None,AST,result,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA, Round,S,`round`,None,project,value,NA,S,S,S,S,PS,PS,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA Round,S,`round`,None,project,scale,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA Round,S,`round`,None,project,result,NA,S,S,S,S,S,S,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA -RoundCeil,S, ,None,project,value,NA,S,S,S,S,PS,PS,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA -RoundCeil,S, ,None,project,scale,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA -RoundCeil,S, ,None,project,result,NA,S,S,S,S,S,S,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA -RoundFloor,S, ,None,project,value,NA,S,S,S,S,PS,PS,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA -RoundFloor,S, ,None,project,scale,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA -RoundFloor,S, ,None,project,result,NA,S,S,S,S,S,S,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA RowNumber,S,`row_number`,None,window,ordering,S,S,S,S,S,S,S,S,PS,S,S,S,NS,NS,NS,NS,NS,NS,NS,NS RowNumber,S,`row_number`,None,window,result,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA ScalaUDF,S, ,None,project,param,S,S,S,S,S,S,S,S,PS,S,S,S,S,S,PS,PS,PS,NS,NS,NS @@ -570,18 +549,18 @@ StartsWith,S, ,None,project,result,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,N StringInstr,S,`instr`,None,project,str,NA,NA,NA,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA StringInstr,S,`instr`,None,project,substr,NA,NA,NA,NA,NA,NA,NA,NA,NA,PS,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA StringInstr,S,`instr`,None,project,result,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA -StringLPad,S, ,None,project,str,NA,NA,NA,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA -StringLPad,S, ,None,project,len,NA,NA,NA,PS,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA -StringLPad,S, ,None,project,pad,NA,NA,NA,NA,NA,NA,NA,NA,NA,PS,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA -StringLPad,S, ,None,project,result,NA,NA,NA,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +StringLPad,S,`lpad`,None,project,str,NA,NA,NA,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +StringLPad,S,`lpad`,None,project,len,NA,NA,NA,PS,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +StringLPad,S,`lpad`,None,project,pad,NA,NA,NA,NA,NA,NA,NA,NA,NA,PS,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +StringLPad,S,`lpad`,None,project,result,NA,NA,NA,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA StringLocate,S,`locate`; `position`,None,project,substr,NA,NA,NA,NA,NA,NA,NA,NA,NA,PS,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA StringLocate,S,`locate`; `position`,None,project,str,NA,NA,NA,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA StringLocate,S,`locate`; `position`,None,project,start,NA,NA,NA,PS,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA StringLocate,S,`locate`; `position`,None,project,result,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA -StringRPad,S, ,None,project,str,NA,NA,NA,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA -StringRPad,S, ,None,project,len,NA,NA,NA,PS,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA -StringRPad,S, ,None,project,pad,NA,NA,NA,NA,NA,NA,NA,NA,NA,PS,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA -StringRPad,S, ,None,project,result,NA,NA,NA,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +StringRPad,S,`rpad`,None,project,str,NA,NA,NA,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +StringRPad,S,`rpad`,None,project,len,NA,NA,NA,PS,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +StringRPad,S,`rpad`,None,project,pad,NA,NA,NA,NA,NA,NA,NA,NA,NA,PS,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +StringRPad,S,`rpad`,None,project,result,NA,NA,NA,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA StringRepeat,S,`repeat`,None,project,input,NA,NA,NA,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA StringRepeat,S,`repeat`,None,project,repeatTimes,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA StringRepeat,S,`repeat`,None,project,result,NA,NA,NA,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA @@ -620,9 +599,9 @@ SubstringIndex,S,`substring_index`,None,project,str,NA,NA,NA,NA,NA,NA,NA,NA,NA,S SubstringIndex,S,`substring_index`,None,project,delim,NA,NA,NA,NA,NA,NA,NA,NA,NA,PS,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA SubstringIndex,S,`substring_index`,None,project,count,NA,NA,NA,PS,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA SubstringIndex,S,`substring_index`,None,project,result,NA,NA,NA,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA -Subtract,S,`-`,None,project,lhs,NA,S,S,S,S,S,S,NA,NA,NA,S,NA,NA,NS,NA,NA,NA,NA,S,S -Subtract,S,`-`,None,project,rhs,NA,S,S,S,S,S,S,NA,NA,NA,S,NA,NA,NS,NA,NA,NA,NA,S,S -Subtract,S,`-`,None,project,result,NA,S,S,S,S,S,S,NA,NA,NA,S,NA,NA,NS,NA,NA,NA,NA,S,S +Subtract,S,`-`,None,project,lhs,NA,S,S,S,S,S,S,NA,NA,NA,S,NA,NA,NS,NA,NA,NA,NA,NS,NS +Subtract,S,`-`,None,project,rhs,NA,S,S,S,S,S,S,NA,NA,NA,S,NA,NA,NS,NA,NA,NA,NA,NS,NS +Subtract,S,`-`,None,project,result,NA,S,S,S,S,S,S,NA,NA,NA,S,NA,NA,NS,NA,NA,NA,NA,NS,NS Subtract,S,`-`,None,AST,lhs,NA,NS,NS,S,S,S,S,NA,NA,NA,NS,NA,NA,NS,NA,NA,NA,NA,NS,NS Subtract,S,`-`,None,AST,rhs,NA,NS,NS,S,S,S,S,NA,NA,NA,NS,NA,NA,NS,NA,NA,NA,NA,NS,NS Subtract,S,`-`,None,AST,result,NA,NS,NS,S,S,S,S,NA,NA,NA,NS,NA,NA,NS,NA,NA,NA,NA,NS,NS @@ -635,7 +614,7 @@ Tanh,S,`tanh`,None,project,result,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA Tanh,S,`tanh`,None,AST,input,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA Tanh,S,`tanh`,None,AST,result,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA TimeAdd,S, ,None,project,start,NA,NA,NA,NA,NA,NA,NA,NA,PS,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA -TimeAdd,S, ,None,project,interval,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,PS,NA,NA,NA,NA,S,NA +TimeAdd,S, ,None,project,interval,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,PS,NA,NA,NA,NA,NA,NA TimeAdd,S, ,None,project,result,NA,NA,NA,NA,NA,NA,NA,NA,PS,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA ToDegrees,S,`degrees`,None,project,input,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA ToDegrees,S,`degrees`,None,project,result,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA @@ -659,14 +638,14 @@ TruncDate,S,`trunc`,None,project,result,NA,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA TruncTimestamp,S,`date_trunc`,None,project,format,NA,NA,NA,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA TruncTimestamp,S,`date_trunc`,None,project,date,NA,NA,NA,NA,NA,NA,NA,NA,PS,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA TruncTimestamp,S,`date_trunc`,None,project,result,NA,NA,NA,NA,NA,NA,NA,NA,PS,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA -UnaryMinus,S,`negative`,None,project,input,NA,S,S,S,S,S,S,NA,NA,NA,S,NA,NA,NS,NA,NA,NA,NA,S,S -UnaryMinus,S,`negative`,None,project,result,NA,S,S,S,S,S,S,NA,NA,NA,S,NA,NA,NS,NA,NA,NA,NA,S,S +UnaryMinus,S,`negative`,None,project,input,NA,S,S,S,S,S,S,NA,NA,NA,S,NA,NA,NS,NA,NA,NA,NA,NS,NS +UnaryMinus,S,`negative`,None,project,result,NA,S,S,S,S,S,S,NA,NA,NA,S,NA,NA,NS,NA,NA,NA,NA,NS,NS UnaryMinus,S,`negative`,None,AST,input,NA,NS,NS,S,S,S,S,NA,NA,NA,NS,NA,NA,NS,NA,NA,NA,NA,NS,NS UnaryMinus,S,`negative`,None,AST,result,NA,NS,NS,S,S,S,S,NA,NA,NA,NS,NA,NA,NS,NA,NA,NA,NA,NS,NS -UnaryPositive,S,`positive`,None,project,input,NA,S,S,S,S,S,S,NA,NA,NA,S,NA,NA,NS,NA,NA,NA,NA,S,S -UnaryPositive,S,`positive`,None,project,result,NA,S,S,S,S,S,S,NA,NA,NA,S,NA,NA,NS,NA,NA,NA,NA,S,S -UnaryPositive,S,`positive`,None,AST,input,NA,S,S,S,S,S,S,NA,NA,NA,NS,NA,NA,NS,NA,NA,NA,NA,S,S -UnaryPositive,S,`positive`,None,AST,result,NA,S,S,S,S,S,S,NA,NA,NA,NS,NA,NA,NS,NA,NA,NA,NA,S,S +UnaryPositive,S,`positive`,None,project,input,NA,S,S,S,S,S,S,NA,NA,NA,S,NA,NA,NS,NA,NA,NA,NA,NS,NS +UnaryPositive,S,`positive`,None,project,result,NA,S,S,S,S,S,S,NA,NA,NA,S,NA,NA,NS,NA,NA,NA,NA,NS,NS +UnaryPositive,S,`positive`,None,AST,input,NA,S,S,S,S,S,S,NA,NA,NA,NS,NA,NA,NS,NA,NA,NA,NA,NS,NS +UnaryPositive,S,`positive`,None,AST,result,NA,S,S,S,S,S,S,NA,NA,NA,NS,NA,NA,NS,NA,NA,NA,NA,NS,NS UnboundedFollowing$,S, ,None,project,result,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA UnboundedPreceding$,S, ,None,project,result,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA UnixTimestamp,S,`unix_timestamp`,None,project,timeExp,NA,NA,NA,NA,NA,NA,NA,S,PS,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA @@ -724,16 +703,12 @@ BitXorAgg,S,`bit_xor`,None,aggregation,input,NA,S,S,S,S,NA,NA,NA,NA,NA,NA,NA,NA, BitXorAgg,S,`bit_xor`,None,aggregation,result,NA,S,S,S,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA BitXorAgg,S,`bit_xor`,None,reduction,input,NA,S,S,S,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA BitXorAgg,S,`bit_xor`,None,reduction,result,NA,S,S,S,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA -BloomFilterAggregate,S, ,None,reduction,child,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA -BloomFilterAggregate,S, ,None,reduction,estimatedItems,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA -BloomFilterAggregate,S, ,None,reduction,numBits,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA -BloomFilterAggregate,S, ,None,reduction,result,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA -CollectList,S,`array_agg`; `collect_list`,None,aggregation,input,S,S,S,S,S,S,S,S,PS,S,S,S,S,NS,PS,PS,PS,NS,NS,NS -CollectList,S,`array_agg`; `collect_list`,None,aggregation,result,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,PS,NA,NA,NA,NA,NA -CollectList,S,`array_agg`; `collect_list`,None,reduction,input,S,S,S,S,S,S,S,S,PS,S,S,S,S,NS,PS,PS,PS,NS,NS,NS -CollectList,S,`array_agg`; `collect_list`,None,reduction,result,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,PS,NA,NA,NA,NA,NA -CollectList,S,`array_agg`; `collect_list`,None,window,input,S,S,S,S,S,S,S,S,PS,S,S,S,S,NS,PS,PS,PS,NS,NS,NS -CollectList,S,`array_agg`; `collect_list`,None,window,result,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,PS,NA,NA,NA,NA,NA +CollectList,S,`collect_list`,None,aggregation,input,S,S,S,S,S,S,S,S,PS,S,S,S,S,NS,PS,PS,PS,NS,NS,NS +CollectList,S,`collect_list`,None,aggregation,result,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,PS,NA,NA,NA,NA,NA +CollectList,S,`collect_list`,None,reduction,input,S,S,S,S,S,S,S,S,PS,S,S,S,S,NS,PS,PS,PS,NS,NS,NS +CollectList,S,`collect_list`,None,reduction,result,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,PS,NA,NA,NA,NA,NA +CollectList,S,`collect_list`,None,window,input,S,S,S,S,S,S,S,S,PS,S,S,S,S,NS,PS,PS,PS,NS,NS,NS +CollectList,S,`collect_list`,None,window,result,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,PS,NA,NA,NA,NA,NA CollectSet,S,`collect_set`,None,aggregation,input,S,S,S,S,S,S,S,S,PS,S,S,S,NS,NS,PS,NS,PS,NS,NS,NS CollectSet,S,`collect_set`,None,aggregation,result,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,PS,NA,NA,NA,NA,NA CollectSet,S,`collect_set`,None,reduction,input,S,S,S,S,S,S,S,S,PS,S,S,S,NS,NS,PS,NS,PS,NS,NS,NS @@ -833,8 +808,6 @@ VarianceSamp,S,`var_samp`; `variance`,None,window,result,NA,NA,NA,NA,NA,NA,NS,NA StaticInvoke,S, ,The supported types are not deterministic since it's a dynamic expression,project,result,PS,PS,PS,PS,PS,PS,PS,PS,PS,PS,PS,PS,PS,PS,PS,PS,PS,PS,PS,PS NormalizeNaNAndZero,S, ,None,project,input,NA,NA,NA,NA,NA,S,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA NormalizeNaNAndZero,S, ,None,project,result,NA,NA,NA,NA,NA,S,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA -InSubqueryExec,S, ,None,project,input,S,S,S,S,S,S,S,S,PS,S,S,S,NS,NS,NS,NA,NS,NS,NA,NA -InSubqueryExec,S, ,None,project,result,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA ScalarSubquery,S, ,None,project,result,S,S,S,S,S,S,S,S,PS,S,S,S,S,NS,PS,PS,PS,NS,NS,NS HiveGenericUDF,S, ,None,project,param,S,S,S,S,S,S,S,S,PS,S,S,S,S,S,PS,PS,PS,NS,NS,NS HiveGenericUDF,S, ,None,project,result,S,S,S,S,S,S,S,S,PS,S,S,S,S,S,PS,PS,PS,NS,NS,NS From 352ef8d6b5a5b82a98b3ede7300372b7da3f3cdd Mon Sep 17 00:00:00 2001 From: Chong Gao Date: Tue, 13 Jan 2026 15:28:38 +0800 Subject: [PATCH 22/59] Switch from Spark 410 shim to 411 Signed-off-by: Chong Gao --- .../sql/tests/datagen/DataGenExprShims.scala | 2 +- .../sql/rapids/shims/TrampolineUtilShim.scala | 2 +- pom.xml | 12 +++--- scala2.13/pom.xml | 43 +++++++++---------- .../nvidia/spark/rapids/shims/AQEUtils.scala | 2 +- .../rapids/shims/AggregationTagging.scala | 2 +- .../rapids/shims/CudfUnsafeRowBase.scala | 2 +- .../spark/rapids/shims/DeltaLakeUtils.scala | 2 +- .../rapids/shims/FileSourceScanExecMeta.scala | 2 +- .../rapids/shims/GpuBatchScanExecBase.scala | 2 +- .../shims/GpuFileFormatDataWriterShim.scala | 2 +- .../shims/GpuOrcDataReader320Plus.scala | 2 +- .../rapids/shims/GpuOrcDataReaderBase.scala | 2 +- .../spark/rapids/shims/GpuParquetCrypto.scala | 2 +- .../rapids/shims/GpuWindowInPandasExec.scala | 2 +- .../nvidia/spark/rapids/shims/HashUtils.scala | 2 +- .../shims/HybridFileSourceScanExecMeta.scala | 2 +- .../shims/OffsetWindowFunctionMeta.scala | 2 +- .../spark/rapids/shims/OrcCastingShims.scala | 2 +- .../shims/OrcShims320untilAllBase.scala | 2 +- .../rapids/shims/RapidsCsvScanMeta.scala | 2 +- .../spark/rapids/shims/RebaseShims.scala | 2 +- .../rapids/shims/ShimAQEShuffleReadExec.scala | 2 +- .../rapids/shims/ShimBaseSubqueryExec.scala | 2 +- .../shims/ShimBroadcastExchangeLike.scala | 2 +- .../spark/rapids/shims/ShimLeafExecNode.scala | 2 +- .../rapids/shims/ShimPredicateHelper.scala | 2 +- .../rapids/shims/Spark320PlusNonDBShims.scala | 2 +- .../rapids/shims/Spark320PlusShims.scala | 2 +- .../rapids/shims/StaticPartitionShims.scala | 2 +- .../nvidia/spark/rapids/shims/TreeNode.scala | 2 +- .../spark/rapids/shims/XxHash64Shims.scala | 2 +- .../spark/rapids/shims/YearParseUtil.scala | 2 +- .../rapids/shims/extractValueShims.scala | 2 +- .../spark/rapids/shims/gpuWindows.scala | 2 +- .../spark/rapids/v1FallbackWriters.scala | 2 +- .../shims/GpuShuffleBlockResolver.scala | 2 +- .../rapids/shims/GpuShuffleExchangeExec.scala | 2 +- .../rapids/shims/ShuffledBatchRDDUtil.scala | 2 +- .../shims/storage/ShimDiskBlockManager.scala | 2 +- .../shims/GpuRowBasedHiveGenericUDFShim.scala | 2 +- .../sql/rapids/RapidsShuffleWriter.scala | 2 +- .../shims/GpuGroupedPythonRunnerFactory.scala | 2 +- .../spark/sql/rapids/shims/AvroUtils.scala | 2 +- .../rapids/shims/RapidsQueryErrorUtils.scala | 2 +- .../shims/RapidsShuffleThreadedWriter.scala | 2 +- .../sql/rapids/shims/Spark32XShimsUtils.scala | 2 +- .../storage/RapidsPushBasedFetchHelper.scala | 2 +- .../RapidsShuffleBlockFetcherIterator.scala | 2 +- .../rapids/shims/Spark321PlusShims.scala | 2 +- .../spark/sql/rapids/shims/GpuAscii.scala | 2 +- .../nvidia/spark/rapids/GpuBloomFilter.scala | 2 +- .../rapids/GpuBloomFilterMightContain.scala | 2 +- .../spark/rapids/GpuInSubqueryExec.scala | 2 +- .../nvidia/spark/rapids/shims/AnsiUtil.scala | 2 +- .../spark/rapids/shims/BloomFilterShims.scala | 2 +- .../rapids/shims/BucketingUtilsShim.scala | 2 +- .../rapids/shims/CharVarcharUtilsShims.scala | 2 +- .../rapids/shims/DayTimeIntervalShims.scala | 2 +- .../spark/rapids/shims/DistributionUtil.scala | 2 +- .../rapids/shims/FilteredPartitions.scala | 2 +- .../spark/rapids/shims/GpuDataSourceRDD.scala | 2 +- .../rapids/shims/GpuHashPartitioning.scala | 2 +- .../rapids/shims/GpuIntervalUtilsBase.scala | 2 +- .../rapids/shims/GpuRangePartitioning.scala | 2 +- .../spark/rapids/shims/GpuTypeShims.scala | 2 +- .../spark/rapids/shims/InSubqueryShims.scala | 2 +- .../spark/rapids/shims/OrcReadingShims.scala | 2 +- .../nvidia/spark/rapids/shims/OrcShims.scala | 2 +- .../shims/RapidsFileSourceMetaUtils.scala | 2 +- .../rapids/shims/RapidsOrcScanMeta.scala | 2 +- .../rapids/shims/RapidsParquetScanMeta.scala | 2 +- .../spark/rapids/shims/RoundingShims.scala | 2 +- .../spark/rapids/shims/ScanExecShims.scala | 2 +- .../rapids/shims/Spark330PlusNonDBShims.scala | 2 +- .../rapids/shims/Spark330PlusShims.scala | 2 +- .../shims/parquet/ParquetFieldIdShims.scala | 2 +- .../parquet/ParquetSchemaClipShims.scala | 2 +- .../RapidsVectorizedColumnReader.scala | 2 +- .../parquet/ShimCurrentBatchIterator.scala | 2 +- .../aggregate/GpuBloomFilterAggregate.scala | 2 +- .../shims/RapidsErrorUtilsFor330plus.scala | 2 +- .../shims/RapidsShuffleThreadedReader.scala | 2 +- .../rapids/shims/intervalExpressions.scala | 2 +- .../spark/rapids/shims/AnsiCastShim.scala | 2 +- .../rapids/shims/CastingConfigShim.scala | 2 +- .../shims/ColumnDefaultValuesShims.scala | 2 +- .../shims/DecimalArithmeticOverrides.scala | 2 +- .../spark/rapids/shims/GetMapValueMeta.scala | 2 +- .../spark/rapids/shims/GpuCastShims.scala | 2 +- .../ShimFilePartitionReaderFactory.scala | 2 +- .../spark/rapids/shims/TypeUtilsShims.scala | 2 +- .../parquet/ParquetStringPredShims.scala | 2 +- .../rapids/DataSourceStrategyUtils.scala | 2 +- .../GpuCheckOverflowInTableInsert.scala | 2 +- .../rapids/aggregate/aggregateFunctions.scala | 2 +- .../apache/spark/sql/rapids/arithmetic.scala | 2 +- .../rapids/shims/Spark331PlusNonDBShims.scala | 2 +- ...aSourceTableAsSelectCommandMetaShims.scala | 2 +- .../rapids/shims/GpuInsertIntoHiveTable.scala | 2 +- .../spark/rapids/shims/GpuKnownNullable.scala | 2 +- ...dCreateHiveTableAsSelectCommandShims.scala | 2 +- .../execution/datasources/GpuWriteFiles.scala | 2 +- .../sql/hive/rapids/shims/HiveFileUtil.scala | 2 +- .../rapids/shims/HiveProviderCmdShims.scala | 2 +- .../sql/rapids/GpuFileFormatWriter.scala | 2 +- .../shims/GpuCastToNumberErrorShim.scala | 2 +- ...eDataSourceTableAsSelectCommandShims.scala | 2 +- .../sql/rapids/shims/GpuDataSource.scala | 2 +- .../sql/rapids/shims/SchemaUtilsShims.scala | 2 +- .../shims/SparkDateTimeExceptionShims.scala | 2 +- .../shims/SparkUpgradeExceptionShims.scala | 2 +- .../spark/rapids/shims/GetSequenceSize.scala | 2 +- .../spark/rapids/shims/CastCheckShims.scala | 2 +- .../nvidia/spark/rapids/shims/ConvShim.scala | 2 +- .../spark/rapids/shims/GlobalLimitShims.scala | 2 +- .../rapids/shims/GpuBroadcastJoinMeta.scala | 2 +- .../rapids/shims/OrcProtoWriterShim.scala | 2 +- .../shims/PartitionedFileUtilsShimBase.scala | 2 +- .../rapids/shims/ShuffleOriginUtil.scala | 2 +- .../rapids/shims/Spark340PlusNonDBShims.scala | 2 +- .../shims/TagScanForRuntimeFiltering.scala | 2 +- .../ParquetLegacyNanoAsLongShims.scala | 2 +- .../ParquetTimestampAnnotationShims.scala | 2 +- .../parquet/ParquetTimestampNTZShims.scala | 2 +- .../shuffle/RapidsShuffleIterator.scala | 2 +- .../spark/sql/catalyst/csv/GpuCsvUtils.scala | 2 +- .../sql/catalyst/json/GpuJsonUtils.scala | 2 +- .../apache/spark/sql/errors/ConvUtils.scala | 2 +- .../sql/rapids/RapidsCachingReader.scala | 2 +- .../execution/GpuBroadcastHashJoinExec.scala | 2 +- .../GpuBroadcastNestedLoopJoinExec.scala | 2 +- .../rapids/execution/ShimTrampolineUtil.scala | 2 +- .../rapids/shims/GpuJsonToStructsShim.scala | 2 +- .../shims/RapidsErrorUtils340PlusBase.scala | 2 +- .../rapids/shims/GpuToPrettyString.scala | 2 +- .../shims/GpuWindowGroupLimitExec.scala | 2 +- .../spark/rapids/shims/PlanShimsImpl.scala | 2 +- .../spark/rapids/shims/PythonUDFShim.scala | 2 +- .../execution/rapids/shims/SplitFiles.scala | 2 +- .../hive/rapids/shims/CreateFunctions.scala | 2 +- .../hive/rapids/shims/FileSinkDescShim.scala | 2 +- .../rapids/shims/HiveInspectorsShim.scala | 2 +- .../python/shims/GpuArrowPythonOutput.scala | 2 +- .../python/shims/GpuArrowPythonRunner.scala | 2 +- .../shims/GpuCoGroupedArrowPythonRunner.scala | 2 +- .../rapids/shims/DecimalMultiply128.scala | 2 +- .../nvidia/spark/rapids/GpuDeltaWrite.scala | 2 +- .../spark/rapids/GpuMergeRowsExecMeta.scala | 2 +- .../rapids/shims/BatchScanExecMetaBase.scala | 2 +- .../rapids/shims/ExternalSourceShim.scala | 2 +- .../spark/rapids/shims/GpuIntervalUtils.scala | 2 +- .../shims/KeyGroupedPartitioningShim.scala | 2 +- .../shims/LegacyBehaviorPolicyShim.scala | 2 +- .../rapids/shims/NullOutputStreamShim.scala | 2 +- .../rapids/shims/Spark350PlusNonDBShims.scala | 2 +- .../shims/v2WriteCommandMetasShim.scala | 2 +- .../catalyst/GpuProjectingColumnarBatch.scala | 2 +- .../shims/ShimVectorizedColumnReader.scala | 2 +- .../datasources/v2/GpuMergeRowsExec.scala | 2 +- .../v2/WriteToDataSourceV2Exec.scala | 2 +- .../sql/rapids/execution/GpuShuffleMeta.scala | 2 +- .../sql/rapids/shims/ArrowUtilsShim.scala | 2 +- .../sql/rapids/shims/DataTypeUtilsShim.scala | 2 +- .../rapids/shims/GpuMapInPandasExecMeta.scala | 2 +- .../rapids/shims/SchemaMetadataShims.scala | 2 +- .../spark/rapids/RapidsShuffleManager.scala | 2 +- .../rapids/shims/BatchScanExecMeta.scala | 2 +- .../rapids/shims/CastTimeToIntShim.scala | 2 +- .../spark/rapids/shims/GpuBatchScanExec.scala | 2 +- .../spark/rapids/shims/RaiseErrorShim.scala | 2 +- .../shims/ShuffleManagerShimUtils.scala | 2 +- .../python/shims/PythonArgumentsUtils.scala | 2 +- .../ArrayInvalidArgumentErrorUtils.scala | 2 +- ...equenceSizeExceededLimitErrorBuilder.scala | 2 +- .../apache/spark/sql/rapids/shims/misc.scala | 2 +- .../shims/InMemoryTableScanExecLikeShim.scala | 2 +- .../rapids/shims/InMemoryTableScanUtils.scala | 2 +- .../spark/rapids/shims/CudfUnsafeRow.scala | 2 +- .../rapids/shims/DateTimeUtilsShims.scala | 2 +- .../rapids/shims/GetJsonObjectShim.scala | 2 +- .../spark/rapids/shims/GpuOrcDataReader.scala | 2 +- .../spark/rapids/shims/LogicalPlanShims.scala | 2 +- .../rapids/shims/MapInArrowExecShims.scala | 2 +- .../rapids/shims/NullIntolerantShim.scala | 2 +- .../rapids/shims/OperatorsUtilShims.scala | 2 +- .../shims/PartitionedFileUtilsShim.scala | 2 +- .../shims/Spark400PlusCommonShims.scala | 2 +- .../GpuAtomicCreateTableAsSelectExec.scala | 2 +- .../GpuAtomicReplaceTableAsSelectExec.scala | 2 +- .../rapids/shims/FilePartitionShims.scala | 2 +- .../hive/rapids/shims/CommandUtilsShim.scala | 2 +- .../apache/spark/sql/nvidia/DFUDFShims.scala | 2 +- .../execution/GpuSubqueryBroadcastMeta.scala | 2 +- .../python/shims/GpuBasePythonRunner.scala | 2 +- .../python/shims/WritePythonUDFUtils.scala | 2 +- .../rapids/shims/GpuMapInArrowExecMeta.scala | 2 +- .../sql/rapids/shims/InvokeExprMeta.scala | 2 +- .../sql/rapids/shims/RapidsErrorUtils.scala | 2 +- .../sql/rapids/shims/SparkSessionUtils.scala | 2 +- .../rapids/shims/TrampolineConnectShims.scala | 2 +- .../spark/rapids/shims/SparkShims.scala | 2 +- .../spark401/SparkShimServiceProvider.scala | 2 +- .../shims/AggregateInPandasExecShims.scala | 2 +- .../rapids/shims/BroadcastExchangeShims.scala | 2 +- .../rapids/shims/DayTimeIntervalShims.scala | 2 +- .../rapids/shims/InvalidateCacheShims.scala | 2 +- .../rapids/shims/ParquetVariantShims.scala | 2 +- .../shims/ShowNamespacesExecShims.scala | 2 +- .../spark/rapids/shims/SparkShims.scala | 2 +- .../shims/StoragePartitionJoinShims.scala | 2 +- .../spark/rapids/shims/TimeAddShims.scala | 2 +- .../spark/rapids/shims/TryModeShim.scala | 2 +- .../shims/WindowInPandasExecShims.scala | 2 +- .../rapids/shims/WindowInPandasShims.scala | 2 +- .../spark411}/SparkShimServiceProvider.scala | 2 +- .../parquet/rapids/shims/ParquetCVShims.scala | 2 +- .../GpuAtomicReplaceTableAsSelectExec.scala | 2 +- .../shims/WindowInPandasExecTypeShim.scala | 2 +- .../shims/FileCommitProtocolShims.scala | 2 +- .../rapids/shims/FileStreamSinkShims.scala | 2 +- .../rapids/shims/datetimeExpressions.scala | 2 +- .../shims/spark411}/SparkShimsSuite.scala | 2 +- .../spark/rapids/shims/OrcStatisticShim.scala | 2 +- .../sql/rapids/GpuInSubqueryExecSuite.scala | 2 +- .../shuffle/RapidsShuffleTestHelper.scala | 2 +- .../spark/rapids/ToPrettyStringSuite.scala | 2 +- ...eDataSourceTableAsSelectCommandSuite.scala | 2 +- .../{410 => 411}/operatorsScore.csv | 0 .../{410 => 411}/supportedDataSource.csv | 0 .../{410 => 411}/supportedExecs.csv | 0 .../{410 => 411}/supportedExprs.csv | 0 232 files changed, 252 insertions(+), 255 deletions(-) rename sql-plugin/src/main/{spark410 => spark411}/scala/com/nvidia/spark/rapids/shims/AggregateInPandasExecShims.scala (98%) rename sql-plugin/src/main/{spark410 => spark411}/scala/com/nvidia/spark/rapids/shims/BroadcastExchangeShims.scala (98%) rename sql-plugin/src/main/{spark410 => spark411}/scala/com/nvidia/spark/rapids/shims/DayTimeIntervalShims.scala (99%) rename sql-plugin/src/main/{spark410 => spark411}/scala/com/nvidia/spark/rapids/shims/InvalidateCacheShims.scala (98%) rename sql-plugin/src/main/{spark410 => spark411}/scala/com/nvidia/spark/rapids/shims/ParquetVariantShims.scala (98%) rename sql-plugin/src/main/{spark410 => spark411}/scala/com/nvidia/spark/rapids/shims/ShowNamespacesExecShims.scala (98%) rename sql-plugin/src/main/{spark410 => spark411}/scala/com/nvidia/spark/rapids/shims/SparkShims.scala (99%) rename sql-plugin/src/main/{spark410 => spark411}/scala/com/nvidia/spark/rapids/shims/StoragePartitionJoinShims.scala (98%) rename sql-plugin/src/main/{spark410 => spark411}/scala/com/nvidia/spark/rapids/shims/TimeAddShims.scala (98%) rename sql-plugin/src/main/{spark410 => spark411}/scala/com/nvidia/spark/rapids/shims/TryModeShim.scala (99%) rename sql-plugin/src/main/{spark410 => spark411}/scala/com/nvidia/spark/rapids/shims/WindowInPandasExecShims.scala (98%) rename sql-plugin/src/main/{spark410 => spark411}/scala/com/nvidia/spark/rapids/shims/WindowInPandasShims.scala (98%) rename sql-plugin/src/main/{spark410/scala/com/nvidia/spark/rapids/shims/spark410 => spark411/scala/com/nvidia/spark/rapids/shims/spark411}/SparkShimServiceProvider.scala (98%) rename sql-plugin/src/main/{spark410 => spark411}/scala/org/apache/spark/sql/execution/datasources/parquet/rapids/shims/ParquetCVShims.scala (98%) rename sql-plugin/src/main/{spark410 => spark411}/scala/org/apache/spark/sql/execution/datasources/v2/rapids/GpuAtomicReplaceTableAsSelectExec.scala (99%) rename sql-plugin/src/main/{spark410 => spark411}/scala/org/apache/spark/sql/rapids/execution/python/shims/WindowInPandasExecTypeShim.scala (98%) rename sql-plugin/src/main/{spark410 => spark411}/scala/org/apache/spark/sql/rapids/shims/FileCommitProtocolShims.scala (99%) rename sql-plugin/src/main/{spark410 => spark411}/scala/org/apache/spark/sql/rapids/shims/FileStreamSinkShims.scala (98%) rename sql-plugin/src/main/{spark410 => spark411}/scala/org/apache/spark/sql/rapids/shims/datetimeExpressions.scala (99%) rename sql-plugin/src/test/{spark410/scala/com/nvidia/spark/rapids/shims/spark410 => spark411/scala/com/nvidia/spark/rapids/shims/spark411}/SparkShimsSuite.scala (98%) rename tools/generated_files/{410 => 411}/operatorsScore.csv (100%) rename tools/generated_files/{410 => 411}/supportedDataSource.csv (100%) rename tools/generated_files/{410 => 411}/supportedExecs.csv (100%) rename tools/generated_files/{410 => 411}/supportedExprs.csv (100%) diff --git a/datagen/src/main/spark400/scala/org/apache/spark/sql/tests/datagen/DataGenExprShims.scala b/datagen/src/main/spark400/scala/org/apache/spark/sql/tests/datagen/DataGenExprShims.scala index 2eeac0d839b..ad42aa6bfdd 100644 --- a/datagen/src/main/spark400/scala/org/apache/spark/sql/tests/datagen/DataGenExprShims.scala +++ b/datagen/src/main/spark400/scala/org/apache/spark/sql/tests/datagen/DataGenExprShims.scala @@ -17,7 +17,7 @@ /*** spark-rapids-shim-json-lines {"spark": "400"} {"spark": "401"} -{"spark": "410"} +{"spark": "411"} spark-rapids-shim-json-lines ***/ package org.apache.spark.sql.tests.datagen diff --git a/integration_tests/src/test/spark400/scala/org/apache/spark/sql/rapids/shims/TrampolineUtilShim.scala b/integration_tests/src/test/spark400/scala/org/apache/spark/sql/rapids/shims/TrampolineUtilShim.scala index 31670d66696..61924942c45 100644 --- a/integration_tests/src/test/spark400/scala/org/apache/spark/sql/rapids/shims/TrampolineUtilShim.scala +++ b/integration_tests/src/test/spark400/scala/org/apache/spark/sql/rapids/shims/TrampolineUtilShim.scala @@ -17,7 +17,7 @@ /*** spark-rapids-shim-json-lines {"spark": "400"} {"spark": "401"} -{"spark": "410"} +{"spark": "411"} spark-rapids-shim-json-lines ***/ package org.apache.spark.sql.rapids.shims diff --git a/pom.xml b/pom.xml index c689559b10f..dce045c416e 100644 --- a/pom.xml +++ b/pom.xml @@ -770,19 +770,19 @@
- release410 + release411 buildver - 410 + 411 - 410 + 411 17 2.13.17 - ${spark410.version} - ${spark410.version} + ${spark411.version} + ${spark411.version} 1.13.1 rapids-4-spark-delta-40x 2.0.7 @@ -1006,7 +1006,7 @@ 3.5.7 4.0.0 4.0.1 - 4.1.0 + 4.1.1 3.12.4 4.9.2 diff --git a/scala2.13/pom.xml b/scala2.13/pom.xml index c4568026ddd..d99a94fc0c3 100644 --- a/scala2.13/pom.xml +++ b/scala2.13/pom.xml @@ -770,19 +770,18 @@ - release410 + release411 buildver - 410 + 411 - 410 - 17 + 411 2.13.17 - ${spark410.version} - ${spark410.version} + ${spark411.version} + ${spark411.version} 1.13.1 @@ -791,25 +790,23 @@ [17,) Support for Spark ${spark.version} is only available with Java 17+ - - - - net.alchim31.maven - scala-maven-plugin - - - -release:17 - -feature - -unchecked - -deprecation - - - - - delta-lake/delta-stub + + + + + net.alchim31.maven + scala-maven-plugin + + + ${java.major.version} + + + + + @@ -1008,7 +1005,7 @@ 3.5.7 4.0.0 4.0.1 - 4.1.0 + 4.1.1 3.12.4 4.9.2 diff --git a/sql-plugin/src/main/spark320/scala/com/nvidia/spark/rapids/shims/AQEUtils.scala b/sql-plugin/src/main/spark320/scala/com/nvidia/spark/rapids/shims/AQEUtils.scala index a32f58b90ed..2e41e9ce8e4 100644 --- a/sql-plugin/src/main/spark320/scala/com/nvidia/spark/rapids/shims/AQEUtils.scala +++ b/sql-plugin/src/main/spark320/scala/com/nvidia/spark/rapids/shims/AQEUtils.scala @@ -43,7 +43,7 @@ {"spark": "357"} {"spark": "400"} {"spark": "401"} -{"spark": "410"} +{"spark": "411"} spark-rapids-shim-json-lines ***/ package com.nvidia.spark.rapids.shims diff --git a/sql-plugin/src/main/spark320/scala/com/nvidia/spark/rapids/shims/AggregationTagging.scala b/sql-plugin/src/main/spark320/scala/com/nvidia/spark/rapids/shims/AggregationTagging.scala index d9529c81289..148790fe6c2 100644 --- a/sql-plugin/src/main/spark320/scala/com/nvidia/spark/rapids/shims/AggregationTagging.scala +++ b/sql-plugin/src/main/spark320/scala/com/nvidia/spark/rapids/shims/AggregationTagging.scala @@ -43,7 +43,7 @@ {"spark": "357"} {"spark": "400"} {"spark": "401"} -{"spark": "410"} +{"spark": "411"} spark-rapids-shim-json-lines ***/ package com.nvidia.spark.rapids.shims diff --git a/sql-plugin/src/main/spark320/scala/com/nvidia/spark/rapids/shims/CudfUnsafeRowBase.scala b/sql-plugin/src/main/spark320/scala/com/nvidia/spark/rapids/shims/CudfUnsafeRowBase.scala index 28cc0207d0d..ef28867c66c 100644 --- a/sql-plugin/src/main/spark320/scala/com/nvidia/spark/rapids/shims/CudfUnsafeRowBase.scala +++ b/sql-plugin/src/main/spark320/scala/com/nvidia/spark/rapids/shims/CudfUnsafeRowBase.scala @@ -46,7 +46,7 @@ {"spark": "357"} {"spark": "400"} {"spark": "401"} -{"spark": "410"} +{"spark": "411"} spark-rapids-shim-json-lines ***/ package com.nvidia.spark.rapids.shims diff --git a/sql-plugin/src/main/spark320/scala/com/nvidia/spark/rapids/shims/DeltaLakeUtils.scala b/sql-plugin/src/main/spark320/scala/com/nvidia/spark/rapids/shims/DeltaLakeUtils.scala index 981b2b8b210..791d5eb6932 100644 --- a/sql-plugin/src/main/spark320/scala/com/nvidia/spark/rapids/shims/DeltaLakeUtils.scala +++ b/sql-plugin/src/main/spark320/scala/com/nvidia/spark/rapids/shims/DeltaLakeUtils.scala @@ -43,7 +43,7 @@ {"spark": "357"} {"spark": "400"} {"spark": "401"} -{"spark": "410"} +{"spark": "411"} spark-rapids-shim-json-lines ***/ package com.nvidia.spark.rapids.shims diff --git a/sql-plugin/src/main/spark320/scala/com/nvidia/spark/rapids/shims/FileSourceScanExecMeta.scala b/sql-plugin/src/main/spark320/scala/com/nvidia/spark/rapids/shims/FileSourceScanExecMeta.scala index 59a5a594def..6f49ced200a 100644 --- a/sql-plugin/src/main/spark320/scala/com/nvidia/spark/rapids/shims/FileSourceScanExecMeta.scala +++ b/sql-plugin/src/main/spark320/scala/com/nvidia/spark/rapids/shims/FileSourceScanExecMeta.scala @@ -43,7 +43,7 @@ {"spark": "357"} {"spark": "400"} {"spark": "401"} -{"spark": "410"} +{"spark": "411"} spark-rapids-shim-json-lines ***/ package com.nvidia.spark.rapids.shims diff --git a/sql-plugin/src/main/spark320/scala/com/nvidia/spark/rapids/shims/GpuBatchScanExecBase.scala b/sql-plugin/src/main/spark320/scala/com/nvidia/spark/rapids/shims/GpuBatchScanExecBase.scala index 2d112a03d2d..815efb2be3f 100644 --- a/sql-plugin/src/main/spark320/scala/com/nvidia/spark/rapids/shims/GpuBatchScanExecBase.scala +++ b/sql-plugin/src/main/spark320/scala/com/nvidia/spark/rapids/shims/GpuBatchScanExecBase.scala @@ -47,7 +47,7 @@ {"spark": "357"} {"spark": "400"} {"spark": "401"} -{"spark": "410"} +{"spark": "411"} spark-rapids-shim-json-lines ***/ package com.nvidia.spark.rapids.shims diff --git a/sql-plugin/src/main/spark320/scala/com/nvidia/spark/rapids/shims/GpuFileFormatDataWriterShim.scala b/sql-plugin/src/main/spark320/scala/com/nvidia/spark/rapids/shims/GpuFileFormatDataWriterShim.scala index 11a57daf18b..f4b3a725c46 100644 --- a/sql-plugin/src/main/spark320/scala/com/nvidia/spark/rapids/shims/GpuFileFormatDataWriterShim.scala +++ b/sql-plugin/src/main/spark320/scala/com/nvidia/spark/rapids/shims/GpuFileFormatDataWriterShim.scala @@ -44,7 +44,7 @@ {"spark": "357"} {"spark": "400"} {"spark": "401"} -{"spark": "410"} +{"spark": "411"} spark-rapids-shim-json-lines ***/ package com.nvidia.spark.rapids.shims diff --git a/sql-plugin/src/main/spark320/scala/com/nvidia/spark/rapids/shims/GpuOrcDataReader320Plus.scala b/sql-plugin/src/main/spark320/scala/com/nvidia/spark/rapids/shims/GpuOrcDataReader320Plus.scala index 25e14b2d7be..e532f8940b6 100644 --- a/sql-plugin/src/main/spark320/scala/com/nvidia/spark/rapids/shims/GpuOrcDataReader320Plus.scala +++ b/sql-plugin/src/main/spark320/scala/com/nvidia/spark/rapids/shims/GpuOrcDataReader320Plus.scala @@ -43,7 +43,7 @@ {"spark": "357"} {"spark": "400"} {"spark": "401"} -{"spark": "410"} +{"spark": "411"} spark-rapids-shim-json-lines ***/ package com.nvidia.spark.rapids.shims diff --git a/sql-plugin/src/main/spark320/scala/com/nvidia/spark/rapids/shims/GpuOrcDataReaderBase.scala b/sql-plugin/src/main/spark320/scala/com/nvidia/spark/rapids/shims/GpuOrcDataReaderBase.scala index 61cc691efda..fbbf51fa8f3 100644 --- a/sql-plugin/src/main/spark320/scala/com/nvidia/spark/rapids/shims/GpuOrcDataReaderBase.scala +++ b/sql-plugin/src/main/spark320/scala/com/nvidia/spark/rapids/shims/GpuOrcDataReaderBase.scala @@ -46,7 +46,7 @@ {"spark": "357"} {"spark": "400"} {"spark": "401"} -{"spark": "410"} +{"spark": "411"} spark-rapids-shim-json-lines ***/ package com.nvidia.spark.rapids.shims diff --git a/sql-plugin/src/main/spark320/scala/com/nvidia/spark/rapids/shims/GpuParquetCrypto.scala b/sql-plugin/src/main/spark320/scala/com/nvidia/spark/rapids/shims/GpuParquetCrypto.scala index 17ab82c5296..ced20391e48 100644 --- a/sql-plugin/src/main/spark320/scala/com/nvidia/spark/rapids/shims/GpuParquetCrypto.scala +++ b/sql-plugin/src/main/spark320/scala/com/nvidia/spark/rapids/shims/GpuParquetCrypto.scala @@ -43,7 +43,7 @@ {"spark": "357"} {"spark": "400"} {"spark": "401"} -{"spark": "410"} +{"spark": "411"} spark-rapids-shim-json-lines ***/ package com.nvidia.spark.rapids.shims diff --git a/sql-plugin/src/main/spark320/scala/com/nvidia/spark/rapids/shims/GpuWindowInPandasExec.scala b/sql-plugin/src/main/spark320/scala/com/nvidia/spark/rapids/shims/GpuWindowInPandasExec.scala index 25422d6cf43..01f08260a19 100644 --- a/sql-plugin/src/main/spark320/scala/com/nvidia/spark/rapids/shims/GpuWindowInPandasExec.scala +++ b/sql-plugin/src/main/spark320/scala/com/nvidia/spark/rapids/shims/GpuWindowInPandasExec.scala @@ -43,7 +43,7 @@ {"spark": "357"} {"spark": "400"} {"spark": "401"} -{"spark": "410"} +{"spark": "411"} spark-rapids-shim-json-lines ***/ package com.nvidia.spark.rapids.shims diff --git a/sql-plugin/src/main/spark320/scala/com/nvidia/spark/rapids/shims/HashUtils.scala b/sql-plugin/src/main/spark320/scala/com/nvidia/spark/rapids/shims/HashUtils.scala index fffffa29ea0..df5db8b45d0 100644 --- a/sql-plugin/src/main/spark320/scala/com/nvidia/spark/rapids/shims/HashUtils.scala +++ b/sql-plugin/src/main/spark320/scala/com/nvidia/spark/rapids/shims/HashUtils.scala @@ -46,7 +46,7 @@ {"spark": "357"} {"spark": "400"} {"spark": "401"} -{"spark": "410"} +{"spark": "411"} spark-rapids-shim-json-lines ***/ package com.nvidia.spark.rapids.shims diff --git a/sql-plugin/src/main/spark320/scala/com/nvidia/spark/rapids/shims/HybridFileSourceScanExecMeta.scala b/sql-plugin/src/main/spark320/scala/com/nvidia/spark/rapids/shims/HybridFileSourceScanExecMeta.scala index 5f74f150587..40350dae7b0 100644 --- a/sql-plugin/src/main/spark320/scala/com/nvidia/spark/rapids/shims/HybridFileSourceScanExecMeta.scala +++ b/sql-plugin/src/main/spark320/scala/com/nvidia/spark/rapids/shims/HybridFileSourceScanExecMeta.scala @@ -47,7 +47,7 @@ {"spark": "357"} {"spark": "400"} {"spark": "401"} -{"spark": "410"} +{"spark": "411"} spark-rapids-shim-json-lines ***/ package com.nvidia.spark.rapids.shims diff --git a/sql-plugin/src/main/spark320/scala/com/nvidia/spark/rapids/shims/OffsetWindowFunctionMeta.scala b/sql-plugin/src/main/spark320/scala/com/nvidia/spark/rapids/shims/OffsetWindowFunctionMeta.scala index 4f4d0d2bd21..642d56b7d82 100644 --- a/sql-plugin/src/main/spark320/scala/com/nvidia/spark/rapids/shims/OffsetWindowFunctionMeta.scala +++ b/sql-plugin/src/main/spark320/scala/com/nvidia/spark/rapids/shims/OffsetWindowFunctionMeta.scala @@ -47,7 +47,7 @@ {"spark": "357"} {"spark": "400"} {"spark": "401"} -{"spark": "410"} +{"spark": "411"} spark-rapids-shim-json-lines ***/ package com.nvidia.spark.rapids.shims diff --git a/sql-plugin/src/main/spark320/scala/com/nvidia/spark/rapids/shims/OrcCastingShims.scala b/sql-plugin/src/main/spark320/scala/com/nvidia/spark/rapids/shims/OrcCastingShims.scala index 4d57a3ef8d9..e7d01b217f0 100644 --- a/sql-plugin/src/main/spark320/scala/com/nvidia/spark/rapids/shims/OrcCastingShims.scala +++ b/sql-plugin/src/main/spark320/scala/com/nvidia/spark/rapids/shims/OrcCastingShims.scala @@ -44,7 +44,7 @@ {"spark": "357"} {"spark": "400"} {"spark": "401"} -{"spark": "410"} +{"spark": "411"} spark-rapids-shim-json-lines ***/ package com.nvidia.spark.rapids.shims diff --git a/sql-plugin/src/main/spark320/scala/com/nvidia/spark/rapids/shims/OrcShims320untilAllBase.scala b/sql-plugin/src/main/spark320/scala/com/nvidia/spark/rapids/shims/OrcShims320untilAllBase.scala index f026cd91173..a6f997b47e2 100644 --- a/sql-plugin/src/main/spark320/scala/com/nvidia/spark/rapids/shims/OrcShims320untilAllBase.scala +++ b/sql-plugin/src/main/spark320/scala/com/nvidia/spark/rapids/shims/OrcShims320untilAllBase.scala @@ -44,7 +44,7 @@ {"spark": "357"} {"spark": "400"} {"spark": "401"} -{"spark": "410"} +{"spark": "411"} spark-rapids-shim-json-lines ***/ package com.nvidia.spark.rapids.shims diff --git a/sql-plugin/src/main/spark320/scala/com/nvidia/spark/rapids/shims/RapidsCsvScanMeta.scala b/sql-plugin/src/main/spark320/scala/com/nvidia/spark/rapids/shims/RapidsCsvScanMeta.scala index 3b616189fc4..8d4e4eb4390 100644 --- a/sql-plugin/src/main/spark320/scala/com/nvidia/spark/rapids/shims/RapidsCsvScanMeta.scala +++ b/sql-plugin/src/main/spark320/scala/com/nvidia/spark/rapids/shims/RapidsCsvScanMeta.scala @@ -47,7 +47,7 @@ {"spark": "357"} {"spark": "400"} {"spark": "401"} -{"spark": "410"} +{"spark": "411"} spark-rapids-shim-json-lines ***/ package com.nvidia.spark.rapids.shims diff --git a/sql-plugin/src/main/spark320/scala/com/nvidia/spark/rapids/shims/RebaseShims.scala b/sql-plugin/src/main/spark320/scala/com/nvidia/spark/rapids/shims/RebaseShims.scala index 2cc0483dd53..f5f743e8659 100644 --- a/sql-plugin/src/main/spark320/scala/com/nvidia/spark/rapids/shims/RebaseShims.scala +++ b/sql-plugin/src/main/spark320/scala/com/nvidia/spark/rapids/shims/RebaseShims.scala @@ -46,7 +46,7 @@ {"spark": "357"} {"spark": "400"} {"spark": "401"} -{"spark": "410"} +{"spark": "411"} spark-rapids-shim-json-lines ***/ package com.nvidia.spark.rapids.shims diff --git a/sql-plugin/src/main/spark320/scala/com/nvidia/spark/rapids/shims/ShimAQEShuffleReadExec.scala b/sql-plugin/src/main/spark320/scala/com/nvidia/spark/rapids/shims/ShimAQEShuffleReadExec.scala index 7ffebc5c5ae..a14b245d8ea 100644 --- a/sql-plugin/src/main/spark320/scala/com/nvidia/spark/rapids/shims/ShimAQEShuffleReadExec.scala +++ b/sql-plugin/src/main/spark320/scala/com/nvidia/spark/rapids/shims/ShimAQEShuffleReadExec.scala @@ -47,7 +47,7 @@ {"spark": "357"} {"spark": "400"} {"spark": "401"} -{"spark": "410"} +{"spark": "411"} spark-rapids-shim-json-lines ***/ package com.nvidia.spark.rapids.shims diff --git a/sql-plugin/src/main/spark320/scala/com/nvidia/spark/rapids/shims/ShimBaseSubqueryExec.scala b/sql-plugin/src/main/spark320/scala/com/nvidia/spark/rapids/shims/ShimBaseSubqueryExec.scala index f2bd28c402b..963b0f2821c 100644 --- a/sql-plugin/src/main/spark320/scala/com/nvidia/spark/rapids/shims/ShimBaseSubqueryExec.scala +++ b/sql-plugin/src/main/spark320/scala/com/nvidia/spark/rapids/shims/ShimBaseSubqueryExec.scala @@ -47,7 +47,7 @@ {"spark": "357"} {"spark": "400"} {"spark": "401"} -{"spark": "410"} +{"spark": "411"} spark-rapids-shim-json-lines ***/ package com.nvidia.spark.rapids.shims diff --git a/sql-plugin/src/main/spark320/scala/com/nvidia/spark/rapids/shims/ShimBroadcastExchangeLike.scala b/sql-plugin/src/main/spark320/scala/com/nvidia/spark/rapids/shims/ShimBroadcastExchangeLike.scala index 5f9d691cb63..df7c1421953 100644 --- a/sql-plugin/src/main/spark320/scala/com/nvidia/spark/rapids/shims/ShimBroadcastExchangeLike.scala +++ b/sql-plugin/src/main/spark320/scala/com/nvidia/spark/rapids/shims/ShimBroadcastExchangeLike.scala @@ -43,7 +43,7 @@ {"spark": "357"} {"spark": "400"} {"spark": "401"} -{"spark": "410"} +{"spark": "411"} spark-rapids-shim-json-lines ***/ package com.nvidia.spark.rapids.shims diff --git a/sql-plugin/src/main/spark320/scala/com/nvidia/spark/rapids/shims/ShimLeafExecNode.scala b/sql-plugin/src/main/spark320/scala/com/nvidia/spark/rapids/shims/ShimLeafExecNode.scala index 54dc8cd4194..21223185fb8 100644 --- a/sql-plugin/src/main/spark320/scala/com/nvidia/spark/rapids/shims/ShimLeafExecNode.scala +++ b/sql-plugin/src/main/spark320/scala/com/nvidia/spark/rapids/shims/ShimLeafExecNode.scala @@ -43,7 +43,7 @@ {"spark": "357"} {"spark": "400"} {"spark": "401"} -{"spark": "410"} +{"spark": "411"} spark-rapids-shim-json-lines ***/ package com.nvidia.spark.rapids.shims diff --git a/sql-plugin/src/main/spark320/scala/com/nvidia/spark/rapids/shims/ShimPredicateHelper.scala b/sql-plugin/src/main/spark320/scala/com/nvidia/spark/rapids/shims/ShimPredicateHelper.scala index 725bc6baedc..882d8d5d66a 100644 --- a/sql-plugin/src/main/spark320/scala/com/nvidia/spark/rapids/shims/ShimPredicateHelper.scala +++ b/sql-plugin/src/main/spark320/scala/com/nvidia/spark/rapids/shims/ShimPredicateHelper.scala @@ -47,7 +47,7 @@ {"spark": "357"} {"spark": "400"} {"spark": "401"} -{"spark": "410"} +{"spark": "411"} spark-rapids-shim-json-lines ***/ package com.nvidia.spark.rapids.shims diff --git a/sql-plugin/src/main/spark320/scala/com/nvidia/spark/rapids/shims/Spark320PlusNonDBShims.scala b/sql-plugin/src/main/spark320/scala/com/nvidia/spark/rapids/shims/Spark320PlusNonDBShims.scala index 67da94119cb..18db4b1aada 100644 --- a/sql-plugin/src/main/spark320/scala/com/nvidia/spark/rapids/shims/Spark320PlusNonDBShims.scala +++ b/sql-plugin/src/main/spark320/scala/com/nvidia/spark/rapids/shims/Spark320PlusNonDBShims.scala @@ -42,7 +42,7 @@ {"spark": "357"} {"spark": "400"} {"spark": "401"} -{"spark": "410"} +{"spark": "411"} spark-rapids-shim-json-lines ***/ package com.nvidia.spark.rapids.shims diff --git a/sql-plugin/src/main/spark320/scala/com/nvidia/spark/rapids/shims/Spark320PlusShims.scala b/sql-plugin/src/main/spark320/scala/com/nvidia/spark/rapids/shims/Spark320PlusShims.scala index 7cc8b83f4a4..cac21123d07 100644 --- a/sql-plugin/src/main/spark320/scala/com/nvidia/spark/rapids/shims/Spark320PlusShims.scala +++ b/sql-plugin/src/main/spark320/scala/com/nvidia/spark/rapids/shims/Spark320PlusShims.scala @@ -47,7 +47,7 @@ {"spark": "357"} {"spark": "400"} {"spark": "401"} -{"spark": "410"} +{"spark": "411"} spark-rapids-shim-json-lines ***/ package com.nvidia.spark.rapids.shims diff --git a/sql-plugin/src/main/spark320/scala/com/nvidia/spark/rapids/shims/StaticPartitionShims.scala b/sql-plugin/src/main/spark320/scala/com/nvidia/spark/rapids/shims/StaticPartitionShims.scala index 572e87d59bf..380f83a715a 100644 --- a/sql-plugin/src/main/spark320/scala/com/nvidia/spark/rapids/shims/StaticPartitionShims.scala +++ b/sql-plugin/src/main/spark320/scala/com/nvidia/spark/rapids/shims/StaticPartitionShims.scala @@ -45,7 +45,7 @@ {"spark": "357"} {"spark": "400"} {"spark": "401"} -{"spark": "410"} +{"spark": "411"} spark-rapids-shim-json-lines ***/ package com.nvidia.spark.rapids.shims diff --git a/sql-plugin/src/main/spark320/scala/com/nvidia/spark/rapids/shims/TreeNode.scala b/sql-plugin/src/main/spark320/scala/com/nvidia/spark/rapids/shims/TreeNode.scala index 15a6c0b773d..0d424672390 100644 --- a/sql-plugin/src/main/spark320/scala/com/nvidia/spark/rapids/shims/TreeNode.scala +++ b/sql-plugin/src/main/spark320/scala/com/nvidia/spark/rapids/shims/TreeNode.scala @@ -47,7 +47,7 @@ {"spark": "357"} {"spark": "400"} {"spark": "401"} -{"spark": "410"} +{"spark": "411"} spark-rapids-shim-json-lines ***/ package com.nvidia.spark.rapids.shims diff --git a/sql-plugin/src/main/spark320/scala/com/nvidia/spark/rapids/shims/XxHash64Shims.scala b/sql-plugin/src/main/spark320/scala/com/nvidia/spark/rapids/shims/XxHash64Shims.scala index 9594e46c6a9..f93cc93b49c 100644 --- a/sql-plugin/src/main/spark320/scala/com/nvidia/spark/rapids/shims/XxHash64Shims.scala +++ b/sql-plugin/src/main/spark320/scala/com/nvidia/spark/rapids/shims/XxHash64Shims.scala @@ -47,7 +47,7 @@ {"spark": "357"} {"spark": "400"} {"spark": "401"} -{"spark": "410"} +{"spark": "411"} spark-rapids-shim-json-lines ***/ package com.nvidia.spark.rapids.shims diff --git a/sql-plugin/src/main/spark320/scala/com/nvidia/spark/rapids/shims/YearParseUtil.scala b/sql-plugin/src/main/spark320/scala/com/nvidia/spark/rapids/shims/YearParseUtil.scala index b1949da0346..520d143ba1e 100644 --- a/sql-plugin/src/main/spark320/scala/com/nvidia/spark/rapids/shims/YearParseUtil.scala +++ b/sql-plugin/src/main/spark320/scala/com/nvidia/spark/rapids/shims/YearParseUtil.scala @@ -46,7 +46,7 @@ {"spark": "357"} {"spark": "400"} {"spark": "401"} -{"spark": "410"} +{"spark": "411"} spark-rapids-shim-json-lines ***/ package com.nvidia.spark.rapids.shims diff --git a/sql-plugin/src/main/spark320/scala/com/nvidia/spark/rapids/shims/extractValueShims.scala b/sql-plugin/src/main/spark320/scala/com/nvidia/spark/rapids/shims/extractValueShims.scala index 5ee2d1bfdcd..b899c61b8cb 100644 --- a/sql-plugin/src/main/spark320/scala/com/nvidia/spark/rapids/shims/extractValueShims.scala +++ b/sql-plugin/src/main/spark320/scala/com/nvidia/spark/rapids/shims/extractValueShims.scala @@ -43,7 +43,7 @@ {"spark": "357"} {"spark": "400"} {"spark": "401"} -{"spark": "410"} +{"spark": "411"} spark-rapids-shim-json-lines ***/ package com.nvidia.spark.rapids.shims diff --git a/sql-plugin/src/main/spark320/scala/com/nvidia/spark/rapids/shims/gpuWindows.scala b/sql-plugin/src/main/spark320/scala/com/nvidia/spark/rapids/shims/gpuWindows.scala index 626f9acebdc..bc4bfade839 100644 --- a/sql-plugin/src/main/spark320/scala/com/nvidia/spark/rapids/shims/gpuWindows.scala +++ b/sql-plugin/src/main/spark320/scala/com/nvidia/spark/rapids/shims/gpuWindows.scala @@ -47,7 +47,7 @@ {"spark": "357"} {"spark": "400"} {"spark": "401"} -{"spark": "410"} +{"spark": "411"} spark-rapids-shim-json-lines ***/ package com.nvidia.spark.rapids.shims diff --git a/sql-plugin/src/main/spark320/scala/com/nvidia/spark/rapids/v1FallbackWriters.scala b/sql-plugin/src/main/spark320/scala/com/nvidia/spark/rapids/v1FallbackWriters.scala index 6dee91a0150..2a82a65bac4 100644 --- a/sql-plugin/src/main/spark320/scala/com/nvidia/spark/rapids/v1FallbackWriters.scala +++ b/sql-plugin/src/main/spark320/scala/com/nvidia/spark/rapids/v1FallbackWriters.scala @@ -47,7 +47,7 @@ {"spark": "357"} {"spark": "400"} {"spark": "401"} -{"spark": "410"} +{"spark": "411"} spark-rapids-shim-json-lines ***/ package com.nvidia.spark.rapids diff --git a/sql-plugin/src/main/spark320/scala/org/apache/spark/rapids/shims/GpuShuffleBlockResolver.scala b/sql-plugin/src/main/spark320/scala/org/apache/spark/rapids/shims/GpuShuffleBlockResolver.scala index 47668945435..223561fc6d0 100644 --- a/sql-plugin/src/main/spark320/scala/org/apache/spark/rapids/shims/GpuShuffleBlockResolver.scala +++ b/sql-plugin/src/main/spark320/scala/org/apache/spark/rapids/shims/GpuShuffleBlockResolver.scala @@ -47,7 +47,7 @@ {"spark": "357"} {"spark": "400"} {"spark": "401"} -{"spark": "410"} +{"spark": "411"} spark-rapids-shim-json-lines ***/ package org.apache.spark.sql.rapids.shims diff --git a/sql-plugin/src/main/spark320/scala/org/apache/spark/rapids/shims/GpuShuffleExchangeExec.scala b/sql-plugin/src/main/spark320/scala/org/apache/spark/rapids/shims/GpuShuffleExchangeExec.scala index e198a3d8824..428d265fc31 100644 --- a/sql-plugin/src/main/spark320/scala/org/apache/spark/rapids/shims/GpuShuffleExchangeExec.scala +++ b/sql-plugin/src/main/spark320/scala/org/apache/spark/rapids/shims/GpuShuffleExchangeExec.scala @@ -43,7 +43,7 @@ {"spark": "357"} {"spark": "400"} {"spark": "401"} -{"spark": "410"} +{"spark": "411"} spark-rapids-shim-json-lines ***/ package org.apache.spark.rapids.shims diff --git a/sql-plugin/src/main/spark320/scala/org/apache/spark/rapids/shims/ShuffledBatchRDDUtil.scala b/sql-plugin/src/main/spark320/scala/org/apache/spark/rapids/shims/ShuffledBatchRDDUtil.scala index 81fc7fef03d..dc4a7f6a834 100644 --- a/sql-plugin/src/main/spark320/scala/org/apache/spark/rapids/shims/ShuffledBatchRDDUtil.scala +++ b/sql-plugin/src/main/spark320/scala/org/apache/spark/rapids/shims/ShuffledBatchRDDUtil.scala @@ -47,7 +47,7 @@ {"spark": "357"} {"spark": "400"} {"spark": "401"} -{"spark": "410"} +{"spark": "411"} spark-rapids-shim-json-lines ***/ package org.apache.spark.rapids.shims diff --git a/sql-plugin/src/main/spark320/scala/org/apache/spark/rapids/shims/storage/ShimDiskBlockManager.scala b/sql-plugin/src/main/spark320/scala/org/apache/spark/rapids/shims/storage/ShimDiskBlockManager.scala index 250a3ea7e23..476e2d6cb20 100644 --- a/sql-plugin/src/main/spark320/scala/org/apache/spark/rapids/shims/storage/ShimDiskBlockManager.scala +++ b/sql-plugin/src/main/spark320/scala/org/apache/spark/rapids/shims/storage/ShimDiskBlockManager.scala @@ -47,7 +47,7 @@ {"spark": "357"} {"spark": "400"} {"spark": "401"} -{"spark": "410"} +{"spark": "411"} spark-rapids-shim-json-lines ***/ package org.apache.spark.rapids.shims.storage diff --git a/sql-plugin/src/main/spark320/scala/org/apache/spark/sql/hive/rapids/shims/GpuRowBasedHiveGenericUDFShim.scala b/sql-plugin/src/main/spark320/scala/org/apache/spark/sql/hive/rapids/shims/GpuRowBasedHiveGenericUDFShim.scala index 1d9811514e6..c032bd2d47a 100644 --- a/sql-plugin/src/main/spark320/scala/org/apache/spark/sql/hive/rapids/shims/GpuRowBasedHiveGenericUDFShim.scala +++ b/sql-plugin/src/main/spark320/scala/org/apache/spark/sql/hive/rapids/shims/GpuRowBasedHiveGenericUDFShim.scala @@ -42,7 +42,7 @@ {"spark": "357"} {"spark": "400"} {"spark": "401"} -{"spark": "410"} +{"spark": "411"} spark-rapids-shim-json-lines ***/ package org.apache.spark.sql.hive.rapids.shims diff --git a/sql-plugin/src/main/spark320/scala/org/apache/spark/sql/rapids/RapidsShuffleWriter.scala b/sql-plugin/src/main/spark320/scala/org/apache/spark/sql/rapids/RapidsShuffleWriter.scala index f5a76967aeb..93e3b307af4 100644 --- a/sql-plugin/src/main/spark320/scala/org/apache/spark/sql/rapids/RapidsShuffleWriter.scala +++ b/sql-plugin/src/main/spark320/scala/org/apache/spark/sql/rapids/RapidsShuffleWriter.scala @@ -47,7 +47,7 @@ {"spark": "357"} {"spark": "400"} {"spark": "401"} -{"spark": "410"} +{"spark": "411"} spark-rapids-shim-json-lines ***/ package org.apache.spark.sql.rapids diff --git a/sql-plugin/src/main/spark320/scala/org/apache/spark/sql/rapids/execution/python/shims/GpuGroupedPythonRunnerFactory.scala b/sql-plugin/src/main/spark320/scala/org/apache/spark/sql/rapids/execution/python/shims/GpuGroupedPythonRunnerFactory.scala index 20ba333a7be..5fff48ae5bc 100644 --- a/sql-plugin/src/main/spark320/scala/org/apache/spark/sql/rapids/execution/python/shims/GpuGroupedPythonRunnerFactory.scala +++ b/sql-plugin/src/main/spark320/scala/org/apache/spark/sql/rapids/execution/python/shims/GpuGroupedPythonRunnerFactory.scala @@ -43,7 +43,7 @@ {"spark": "357"} {"spark": "400"} {"spark": "401"} -{"spark": "410"} +{"spark": "411"} spark-rapids-shim-json-lines ***/ package org.apache.spark.sql.rapids.execution.python.shims diff --git a/sql-plugin/src/main/spark320/scala/org/apache/spark/sql/rapids/shims/AvroUtils.scala b/sql-plugin/src/main/spark320/scala/org/apache/spark/sql/rapids/shims/AvroUtils.scala index 882122a17ac..272550c0215 100644 --- a/sql-plugin/src/main/spark320/scala/org/apache/spark/sql/rapids/shims/AvroUtils.scala +++ b/sql-plugin/src/main/spark320/scala/org/apache/spark/sql/rapids/shims/AvroUtils.scala @@ -47,7 +47,7 @@ {"spark": "357"} {"spark": "400"} {"spark": "401"} -{"spark": "410"} +{"spark": "411"} spark-rapids-shim-json-lines ***/ package org.apache.spark.sql.rapids.shims diff --git a/sql-plugin/src/main/spark320/scala/org/apache/spark/sql/rapids/shims/RapidsQueryErrorUtils.scala b/sql-plugin/src/main/spark320/scala/org/apache/spark/sql/rapids/shims/RapidsQueryErrorUtils.scala index 746b6fe65cc..f20f70b954d 100644 --- a/sql-plugin/src/main/spark320/scala/org/apache/spark/sql/rapids/shims/RapidsQueryErrorUtils.scala +++ b/sql-plugin/src/main/spark320/scala/org/apache/spark/sql/rapids/shims/RapidsQueryErrorUtils.scala @@ -47,7 +47,7 @@ {"spark": "357"} {"spark": "400"} {"spark": "401"} -{"spark": "410"} +{"spark": "411"} spark-rapids-shim-json-lines ***/ package org.apache.spark.sql.rapids.shims diff --git a/sql-plugin/src/main/spark320/scala/org/apache/spark/sql/rapids/shims/RapidsShuffleThreadedWriter.scala b/sql-plugin/src/main/spark320/scala/org/apache/spark/sql/rapids/shims/RapidsShuffleThreadedWriter.scala index 82e243b8380..5d2c28f86a8 100644 --- a/sql-plugin/src/main/spark320/scala/org/apache/spark/sql/rapids/shims/RapidsShuffleThreadedWriter.scala +++ b/sql-plugin/src/main/spark320/scala/org/apache/spark/sql/rapids/shims/RapidsShuffleThreadedWriter.scala @@ -47,7 +47,7 @@ {"spark": "357"} {"spark": "400"} {"spark": "401"} -{"spark": "410"} +{"spark": "411"} spark-rapids-shim-json-lines ***/ package org.apache.spark.sql.rapids.shims diff --git a/sql-plugin/src/main/spark320/scala/org/apache/spark/sql/rapids/shims/Spark32XShimsUtils.scala b/sql-plugin/src/main/spark320/scala/org/apache/spark/sql/rapids/shims/Spark32XShimsUtils.scala index 86ea5c2e7ad..473f38fb230 100644 --- a/sql-plugin/src/main/spark320/scala/org/apache/spark/sql/rapids/shims/Spark32XShimsUtils.scala +++ b/sql-plugin/src/main/spark320/scala/org/apache/spark/sql/rapids/shims/Spark32XShimsUtils.scala @@ -47,7 +47,7 @@ {"spark": "357"} {"spark": "400"} {"spark": "401"} -{"spark": "410"} +{"spark": "411"} spark-rapids-shim-json-lines ***/ package org.apache.spark.sql.rapids.shims diff --git a/sql-plugin/src/main/spark320/scala/org/apache/spark/storage/RapidsPushBasedFetchHelper.scala b/sql-plugin/src/main/spark320/scala/org/apache/spark/storage/RapidsPushBasedFetchHelper.scala index 6e2bae374c6..83ba97429c0 100644 --- a/sql-plugin/src/main/spark320/scala/org/apache/spark/storage/RapidsPushBasedFetchHelper.scala +++ b/sql-plugin/src/main/spark320/scala/org/apache/spark/storage/RapidsPushBasedFetchHelper.scala @@ -47,7 +47,7 @@ {"spark": "357"} {"spark": "400"} {"spark": "401"} -{"spark": "410"} +{"spark": "411"} spark-rapids-shim-json-lines ***/ package org.apache.spark.storage diff --git a/sql-plugin/src/main/spark320/scala/org/apache/spark/storage/RapidsShuffleBlockFetcherIterator.scala b/sql-plugin/src/main/spark320/scala/org/apache/spark/storage/RapidsShuffleBlockFetcherIterator.scala index 9a3a2d6adc8..620f2bb38bd 100644 --- a/sql-plugin/src/main/spark320/scala/org/apache/spark/storage/RapidsShuffleBlockFetcherIterator.scala +++ b/sql-plugin/src/main/spark320/scala/org/apache/spark/storage/RapidsShuffleBlockFetcherIterator.scala @@ -47,7 +47,7 @@ {"spark": "357"} {"spark": "400"} {"spark": "401"} -{"spark": "410"} +{"spark": "411"} spark-rapids-shim-json-lines ***/ package org.apache.spark.storage diff --git a/sql-plugin/src/main/spark321/scala/com/nvidia/spark/rapids/shims/Spark321PlusShims.scala b/sql-plugin/src/main/spark321/scala/com/nvidia/spark/rapids/shims/Spark321PlusShims.scala index d4b881b157a..768d385eb86 100644 --- a/sql-plugin/src/main/spark321/scala/com/nvidia/spark/rapids/shims/Spark321PlusShims.scala +++ b/sql-plugin/src/main/spark321/scala/com/nvidia/spark/rapids/shims/Spark321PlusShims.scala @@ -46,7 +46,7 @@ {"spark": "357"} {"spark": "400"} {"spark": "401"} -{"spark": "410"} +{"spark": "411"} spark-rapids-shim-json-lines ***/ package com.nvidia.spark.rapids.shims diff --git a/sql-plugin/src/main/spark323/scala/org/apache/spark/sql/rapids/shims/GpuAscii.scala b/sql-plugin/src/main/spark323/scala/org/apache/spark/sql/rapids/shims/GpuAscii.scala index 7935e788efa..f97fe3063de 100644 --- a/sql-plugin/src/main/spark323/scala/org/apache/spark/sql/rapids/shims/GpuAscii.scala +++ b/sql-plugin/src/main/spark323/scala/org/apache/spark/sql/rapids/shims/GpuAscii.scala @@ -41,7 +41,7 @@ {"spark": "357"} {"spark": "400"} {"spark": "401"} -{"spark": "410"} +{"spark": "411"} spark-rapids-shim-json-lines ***/ package org.apache.spark.sql.rapids.shims diff --git a/sql-plugin/src/main/spark330/scala/com/nvidia/spark/rapids/GpuBloomFilter.scala b/sql-plugin/src/main/spark330/scala/com/nvidia/spark/rapids/GpuBloomFilter.scala index bf630527569..d8db0c0c48b 100644 --- a/sql-plugin/src/main/spark330/scala/com/nvidia/spark/rapids/GpuBloomFilter.scala +++ b/sql-plugin/src/main/spark330/scala/com/nvidia/spark/rapids/GpuBloomFilter.scala @@ -41,7 +41,7 @@ {"spark": "357"} {"spark": "400"} {"spark": "401"} -{"spark": "410"} +{"spark": "411"} spark-rapids-shim-json-lines ***/ package com.nvidia.spark.rapids diff --git a/sql-plugin/src/main/spark330/scala/com/nvidia/spark/rapids/GpuBloomFilterMightContain.scala b/sql-plugin/src/main/spark330/scala/com/nvidia/spark/rapids/GpuBloomFilterMightContain.scala index 81a26c5d4b4..c0f4b36e929 100644 --- a/sql-plugin/src/main/spark330/scala/com/nvidia/spark/rapids/GpuBloomFilterMightContain.scala +++ b/sql-plugin/src/main/spark330/scala/com/nvidia/spark/rapids/GpuBloomFilterMightContain.scala @@ -41,7 +41,7 @@ {"spark": "357"} {"spark": "400"} {"spark": "401"} -{"spark": "410"} +{"spark": "411"} spark-rapids-shim-json-lines ***/ package com.nvidia.spark.rapids diff --git a/sql-plugin/src/main/spark330/scala/com/nvidia/spark/rapids/GpuInSubqueryExec.scala b/sql-plugin/src/main/spark330/scala/com/nvidia/spark/rapids/GpuInSubqueryExec.scala index e7da3e77af2..eb9e72a3b5a 100644 --- a/sql-plugin/src/main/spark330/scala/com/nvidia/spark/rapids/GpuInSubqueryExec.scala +++ b/sql-plugin/src/main/spark330/scala/com/nvidia/spark/rapids/GpuInSubqueryExec.scala @@ -37,7 +37,7 @@ {"spark": "357"} {"spark": "400"} {"spark": "401"} -{"spark": "410"} +{"spark": "411"} spark-rapids-shim-json-lines ***/ package com.nvidia.spark.rapids diff --git a/sql-plugin/src/main/spark330/scala/com/nvidia/spark/rapids/shims/AnsiUtil.scala b/sql-plugin/src/main/spark330/scala/com/nvidia/spark/rapids/shims/AnsiUtil.scala index a95d625605f..1c400ec02b8 100644 --- a/sql-plugin/src/main/spark330/scala/com/nvidia/spark/rapids/shims/AnsiUtil.scala +++ b/sql-plugin/src/main/spark330/scala/com/nvidia/spark/rapids/shims/AnsiUtil.scala @@ -41,7 +41,7 @@ {"spark": "357"} {"spark": "400"} {"spark": "401"} -{"spark": "410"} +{"spark": "411"} spark-rapids-shim-json-lines ***/ package com.nvidia.spark.rapids.shims diff --git a/sql-plugin/src/main/spark330/scala/com/nvidia/spark/rapids/shims/BloomFilterShims.scala b/sql-plugin/src/main/spark330/scala/com/nvidia/spark/rapids/shims/BloomFilterShims.scala index 9a3b367d4ab..7a9c1360460 100644 --- a/sql-plugin/src/main/spark330/scala/com/nvidia/spark/rapids/shims/BloomFilterShims.scala +++ b/sql-plugin/src/main/spark330/scala/com/nvidia/spark/rapids/shims/BloomFilterShims.scala @@ -41,7 +41,7 @@ {"spark": "357"} {"spark": "400"} {"spark": "401"} -{"spark": "410"} +{"spark": "411"} spark-rapids-shim-json-lines ***/ package com.nvidia.spark.rapids.shims diff --git a/sql-plugin/src/main/spark330/scala/com/nvidia/spark/rapids/shims/BucketingUtilsShim.scala b/sql-plugin/src/main/spark330/scala/com/nvidia/spark/rapids/shims/BucketingUtilsShim.scala index c94a365b013..c150ee997be 100644 --- a/sql-plugin/src/main/spark330/scala/com/nvidia/spark/rapids/shims/BucketingUtilsShim.scala +++ b/sql-plugin/src/main/spark330/scala/com/nvidia/spark/rapids/shims/BucketingUtilsShim.scala @@ -41,7 +41,7 @@ {"spark": "357"} {"spark": "400"} {"spark": "401"} -{"spark": "410"} +{"spark": "411"} spark-rapids-shim-json-lines ***/ package com.nvidia.spark.rapids.shims diff --git a/sql-plugin/src/main/spark330/scala/com/nvidia/spark/rapids/shims/CharVarcharUtilsShims.scala b/sql-plugin/src/main/spark330/scala/com/nvidia/spark/rapids/shims/CharVarcharUtilsShims.scala index 429c3984afa..9e7f216ce4b 100644 --- a/sql-plugin/src/main/spark330/scala/com/nvidia/spark/rapids/shims/CharVarcharUtilsShims.scala +++ b/sql-plugin/src/main/spark330/scala/com/nvidia/spark/rapids/shims/CharVarcharUtilsShims.scala @@ -41,7 +41,7 @@ {"spark": "357"} {"spark": "400"} {"spark": "401"} -{"spark": "410"} +{"spark": "411"} spark-rapids-shim-json-lines ***/ package com.nvidia.spark.rapids.shims diff --git a/sql-plugin/src/main/spark330/scala/com/nvidia/spark/rapids/shims/DayTimeIntervalShims.scala b/sql-plugin/src/main/spark330/scala/com/nvidia/spark/rapids/shims/DayTimeIntervalShims.scala index 22a7441be0c..a29ae81dcfd 100644 --- a/sql-plugin/src/main/spark330/scala/com/nvidia/spark/rapids/shims/DayTimeIntervalShims.scala +++ b/sql-plugin/src/main/spark330/scala/com/nvidia/spark/rapids/shims/DayTimeIntervalShims.scala @@ -41,7 +41,7 @@ {"spark": "357"} {"spark": "400"} {"spark": "401"} -{"spark": "410"} +{"spark": "411"} spark-rapids-shim-json-lines ***/ package com.nvidia.spark.rapids.shims diff --git a/sql-plugin/src/main/spark330/scala/com/nvidia/spark/rapids/shims/DistributionUtil.scala b/sql-plugin/src/main/spark330/scala/com/nvidia/spark/rapids/shims/DistributionUtil.scala index db55affc2eb..95b9eaca7c6 100644 --- a/sql-plugin/src/main/spark330/scala/com/nvidia/spark/rapids/shims/DistributionUtil.scala +++ b/sql-plugin/src/main/spark330/scala/com/nvidia/spark/rapids/shims/DistributionUtil.scala @@ -37,7 +37,7 @@ {"spark": "357"} {"spark": "400"} {"spark": "401"} -{"spark": "410"} +{"spark": "411"} spark-rapids-shim-json-lines ***/ package com.nvidia.spark.rapids.shims diff --git a/sql-plugin/src/main/spark330/scala/com/nvidia/spark/rapids/shims/FilteredPartitions.scala b/sql-plugin/src/main/spark330/scala/com/nvidia/spark/rapids/shims/FilteredPartitions.scala index d87b551762c..41bfb0fcfe3 100644 --- a/sql-plugin/src/main/spark330/scala/com/nvidia/spark/rapids/shims/FilteredPartitions.scala +++ b/sql-plugin/src/main/spark330/scala/com/nvidia/spark/rapids/shims/FilteredPartitions.scala @@ -41,7 +41,7 @@ {"spark": "357"} {"spark": "400"} {"spark": "401"} -{"spark": "410"} +{"spark": "411"} spark-rapids-shim-json-lines ***/ package com.nvidia.spark.rapids.shims diff --git a/sql-plugin/src/main/spark330/scala/com/nvidia/spark/rapids/shims/GpuDataSourceRDD.scala b/sql-plugin/src/main/spark330/scala/com/nvidia/spark/rapids/shims/GpuDataSourceRDD.scala index 8f05c92a97e..3fd8fd92afe 100644 --- a/sql-plugin/src/main/spark330/scala/com/nvidia/spark/rapids/shims/GpuDataSourceRDD.scala +++ b/sql-plugin/src/main/spark330/scala/com/nvidia/spark/rapids/shims/GpuDataSourceRDD.scala @@ -41,7 +41,7 @@ {"spark": "357"} {"spark": "400"} {"spark": "401"} -{"spark": "410"} +{"spark": "411"} spark-rapids-shim-json-lines ***/ package com.nvidia.spark.rapids.shims diff --git a/sql-plugin/src/main/spark330/scala/com/nvidia/spark/rapids/shims/GpuHashPartitioning.scala b/sql-plugin/src/main/spark330/scala/com/nvidia/spark/rapids/shims/GpuHashPartitioning.scala index dc870ad7fab..a8d357d5492 100644 --- a/sql-plugin/src/main/spark330/scala/com/nvidia/spark/rapids/shims/GpuHashPartitioning.scala +++ b/sql-plugin/src/main/spark330/scala/com/nvidia/spark/rapids/shims/GpuHashPartitioning.scala @@ -41,7 +41,7 @@ {"spark": "357"} {"spark": "400"} {"spark": "401"} -{"spark": "410"} +{"spark": "411"} spark-rapids-shim-json-lines ***/ package com.nvidia.spark.rapids.shims diff --git a/sql-plugin/src/main/spark330/scala/com/nvidia/spark/rapids/shims/GpuIntervalUtilsBase.scala b/sql-plugin/src/main/spark330/scala/com/nvidia/spark/rapids/shims/GpuIntervalUtilsBase.scala index 3e75e6bc684..bb71d54e734 100644 --- a/sql-plugin/src/main/spark330/scala/com/nvidia/spark/rapids/shims/GpuIntervalUtilsBase.scala +++ b/sql-plugin/src/main/spark330/scala/com/nvidia/spark/rapids/shims/GpuIntervalUtilsBase.scala @@ -40,7 +40,7 @@ {"spark": "357"} {"spark": "400"} {"spark": "401"} -{"spark": "410"} +{"spark": "411"} spark-rapids-shim-json-lines ***/ package com.nvidia.spark.rapids.shims diff --git a/sql-plugin/src/main/spark330/scala/com/nvidia/spark/rapids/shims/GpuRangePartitioning.scala b/sql-plugin/src/main/spark330/scala/com/nvidia/spark/rapids/shims/GpuRangePartitioning.scala index 45dac2ff61e..d57cb99e929 100644 --- a/sql-plugin/src/main/spark330/scala/com/nvidia/spark/rapids/shims/GpuRangePartitioning.scala +++ b/sql-plugin/src/main/spark330/scala/com/nvidia/spark/rapids/shims/GpuRangePartitioning.scala @@ -41,7 +41,7 @@ {"spark": "357"} {"spark": "400"} {"spark": "401"} -{"spark": "410"} +{"spark": "411"} spark-rapids-shim-json-lines ***/ package com.nvidia.spark.rapids.shims diff --git a/sql-plugin/src/main/spark330/scala/com/nvidia/spark/rapids/shims/GpuTypeShims.scala b/sql-plugin/src/main/spark330/scala/com/nvidia/spark/rapids/shims/GpuTypeShims.scala index 34ff51c54d5..31be102f38e 100644 --- a/sql-plugin/src/main/spark330/scala/com/nvidia/spark/rapids/shims/GpuTypeShims.scala +++ b/sql-plugin/src/main/spark330/scala/com/nvidia/spark/rapids/shims/GpuTypeShims.scala @@ -40,7 +40,7 @@ {"spark": "357"} {"spark": "400"} {"spark": "401"} -{"spark": "410"} +{"spark": "411"} spark-rapids-shim-json-lines ***/ package com.nvidia.spark.rapids.shims diff --git a/sql-plugin/src/main/spark330/scala/com/nvidia/spark/rapids/shims/InSubqueryShims.scala b/sql-plugin/src/main/spark330/scala/com/nvidia/spark/rapids/shims/InSubqueryShims.scala index d39228e4d63..f221308e28a 100644 --- a/sql-plugin/src/main/spark330/scala/com/nvidia/spark/rapids/shims/InSubqueryShims.scala +++ b/sql-plugin/src/main/spark330/scala/com/nvidia/spark/rapids/shims/InSubqueryShims.scala @@ -37,7 +37,7 @@ {"spark": "357"} {"spark": "400"} {"spark": "401"} -{"spark": "410"} +{"spark": "411"} spark-rapids-shim-json-lines ***/ package com.nvidia.spark.rapids.shims diff --git a/sql-plugin/src/main/spark330/scala/com/nvidia/spark/rapids/shims/OrcReadingShims.scala b/sql-plugin/src/main/spark330/scala/com/nvidia/spark/rapids/shims/OrcReadingShims.scala index 62d76b8412b..2e761588670 100644 --- a/sql-plugin/src/main/spark330/scala/com/nvidia/spark/rapids/shims/OrcReadingShims.scala +++ b/sql-plugin/src/main/spark330/scala/com/nvidia/spark/rapids/shims/OrcReadingShims.scala @@ -41,7 +41,7 @@ {"spark": "357"} {"spark": "400"} {"spark": "401"} -{"spark": "410"} +{"spark": "411"} spark-rapids-shim-json-lines ***/ package com.nvidia.spark.rapids.shims diff --git a/sql-plugin/src/main/spark330/scala/com/nvidia/spark/rapids/shims/OrcShims.scala b/sql-plugin/src/main/spark330/scala/com/nvidia/spark/rapids/shims/OrcShims.scala index 4cb2948f6ad..7cabfdd533b 100644 --- a/sql-plugin/src/main/spark330/scala/com/nvidia/spark/rapids/shims/OrcShims.scala +++ b/sql-plugin/src/main/spark330/scala/com/nvidia/spark/rapids/shims/OrcShims.scala @@ -38,7 +38,7 @@ {"spark": "357"} {"spark": "400"} {"spark": "401"} -{"spark": "410"} +{"spark": "411"} spark-rapids-shim-json-lines ***/ package com.nvidia.spark.rapids.shims diff --git a/sql-plugin/src/main/spark330/scala/com/nvidia/spark/rapids/shims/RapidsFileSourceMetaUtils.scala b/sql-plugin/src/main/spark330/scala/com/nvidia/spark/rapids/shims/RapidsFileSourceMetaUtils.scala index 5ddc99136ad..47e6ebc13ed 100644 --- a/sql-plugin/src/main/spark330/scala/com/nvidia/spark/rapids/shims/RapidsFileSourceMetaUtils.scala +++ b/sql-plugin/src/main/spark330/scala/com/nvidia/spark/rapids/shims/RapidsFileSourceMetaUtils.scala @@ -41,7 +41,7 @@ {"spark": "357"} {"spark": "400"} {"spark": "401"} -{"spark": "410"} +{"spark": "411"} spark-rapids-shim-json-lines ***/ package com.nvidia.spark.rapids.shims diff --git a/sql-plugin/src/main/spark330/scala/com/nvidia/spark/rapids/shims/RapidsOrcScanMeta.scala b/sql-plugin/src/main/spark330/scala/com/nvidia/spark/rapids/shims/RapidsOrcScanMeta.scala index 9e43541d186..07555a654ed 100644 --- a/sql-plugin/src/main/spark330/scala/com/nvidia/spark/rapids/shims/RapidsOrcScanMeta.scala +++ b/sql-plugin/src/main/spark330/scala/com/nvidia/spark/rapids/shims/RapidsOrcScanMeta.scala @@ -41,7 +41,7 @@ {"spark": "357"} {"spark": "400"} {"spark": "401"} -{"spark": "410"} +{"spark": "411"} spark-rapids-shim-json-lines ***/ package com.nvidia.spark.rapids.shims diff --git a/sql-plugin/src/main/spark330/scala/com/nvidia/spark/rapids/shims/RapidsParquetScanMeta.scala b/sql-plugin/src/main/spark330/scala/com/nvidia/spark/rapids/shims/RapidsParquetScanMeta.scala index 50f08afe247..4216546333f 100644 --- a/sql-plugin/src/main/spark330/scala/com/nvidia/spark/rapids/shims/RapidsParquetScanMeta.scala +++ b/sql-plugin/src/main/spark330/scala/com/nvidia/spark/rapids/shims/RapidsParquetScanMeta.scala @@ -41,7 +41,7 @@ {"spark": "357"} {"spark": "400"} {"spark": "401"} -{"spark": "410"} +{"spark": "411"} spark-rapids-shim-json-lines ***/ package com.nvidia.spark.rapids.shims diff --git a/sql-plugin/src/main/spark330/scala/com/nvidia/spark/rapids/shims/RoundingShims.scala b/sql-plugin/src/main/spark330/scala/com/nvidia/spark/rapids/shims/RoundingShims.scala index b5ea21d4b24..56c010696f9 100644 --- a/sql-plugin/src/main/spark330/scala/com/nvidia/spark/rapids/shims/RoundingShims.scala +++ b/sql-plugin/src/main/spark330/scala/com/nvidia/spark/rapids/shims/RoundingShims.scala @@ -41,7 +41,7 @@ {"spark": "357"} {"spark": "400"} {"spark": "401"} -{"spark": "410"} +{"spark": "411"} spark-rapids-shim-json-lines ***/ package com.nvidia.spark.rapids.shims diff --git a/sql-plugin/src/main/spark330/scala/com/nvidia/spark/rapids/shims/ScanExecShims.scala b/sql-plugin/src/main/spark330/scala/com/nvidia/spark/rapids/shims/ScanExecShims.scala index 364cf622a6c..fb45424acf3 100644 --- a/sql-plugin/src/main/spark330/scala/com/nvidia/spark/rapids/shims/ScanExecShims.scala +++ b/sql-plugin/src/main/spark330/scala/com/nvidia/spark/rapids/shims/ScanExecShims.scala @@ -41,7 +41,7 @@ {"spark": "357"} {"spark": "400"} {"spark": "401"} -{"spark": "410"} +{"spark": "411"} spark-rapids-shim-json-lines ***/ package com.nvidia.spark.rapids.shims diff --git a/sql-plugin/src/main/spark330/scala/com/nvidia/spark/rapids/shims/Spark330PlusNonDBShims.scala b/sql-plugin/src/main/spark330/scala/com/nvidia/spark/rapids/shims/Spark330PlusNonDBShims.scala index d6f249685fd..bf2d0e1da18 100644 --- a/sql-plugin/src/main/spark330/scala/com/nvidia/spark/rapids/shims/Spark330PlusNonDBShims.scala +++ b/sql-plugin/src/main/spark330/scala/com/nvidia/spark/rapids/shims/Spark330PlusNonDBShims.scala @@ -37,7 +37,7 @@ {"spark": "357"} {"spark": "400"} {"spark": "401"} -{"spark": "410"} +{"spark": "411"} spark-rapids-shim-json-lines ***/ package com.nvidia.spark.rapids.shims diff --git a/sql-plugin/src/main/spark330/scala/com/nvidia/spark/rapids/shims/Spark330PlusShims.scala b/sql-plugin/src/main/spark330/scala/com/nvidia/spark/rapids/shims/Spark330PlusShims.scala index d936a6bd796..6277dee9823 100644 --- a/sql-plugin/src/main/spark330/scala/com/nvidia/spark/rapids/shims/Spark330PlusShims.scala +++ b/sql-plugin/src/main/spark330/scala/com/nvidia/spark/rapids/shims/Spark330PlusShims.scala @@ -37,7 +37,7 @@ {"spark": "357"} {"spark": "400"} {"spark": "401"} -{"spark": "410"} +{"spark": "411"} spark-rapids-shim-json-lines ***/ package com.nvidia.spark.rapids.shims diff --git a/sql-plugin/src/main/spark330/scala/com/nvidia/spark/rapids/shims/parquet/ParquetFieldIdShims.scala b/sql-plugin/src/main/spark330/scala/com/nvidia/spark/rapids/shims/parquet/ParquetFieldIdShims.scala index e4e21bb6e3b..f4a28888b29 100644 --- a/sql-plugin/src/main/spark330/scala/com/nvidia/spark/rapids/shims/parquet/ParquetFieldIdShims.scala +++ b/sql-plugin/src/main/spark330/scala/com/nvidia/spark/rapids/shims/parquet/ParquetFieldIdShims.scala @@ -40,7 +40,7 @@ {"spark": "357"} {"spark": "400"} {"spark": "401"} -{"spark": "410"} +{"spark": "411"} spark-rapids-shim-json-lines ***/ package com.nvidia.spark.rapids.shims.parquet diff --git a/sql-plugin/src/main/spark330/scala/com/nvidia/spark/rapids/shims/parquet/ParquetSchemaClipShims.scala b/sql-plugin/src/main/spark330/scala/com/nvidia/spark/rapids/shims/parquet/ParquetSchemaClipShims.scala index b11ec25b85a..d5051b3b497 100644 --- a/sql-plugin/src/main/spark330/scala/com/nvidia/spark/rapids/shims/parquet/ParquetSchemaClipShims.scala +++ b/sql-plugin/src/main/spark330/scala/com/nvidia/spark/rapids/shims/parquet/ParquetSchemaClipShims.scala @@ -41,7 +41,7 @@ {"spark": "357"} {"spark": "400"} {"spark": "401"} -{"spark": "410"} +{"spark": "411"} spark-rapids-shim-json-lines ***/ package com.nvidia.spark.rapids.shims.parquet diff --git a/sql-plugin/src/main/spark330/scala/org/apache/spark/sql/execution/datasources/parquet/RapidsVectorizedColumnReader.scala b/sql-plugin/src/main/spark330/scala/org/apache/spark/sql/execution/datasources/parquet/RapidsVectorizedColumnReader.scala index d2e8226eb5a..45a2b6b2010 100644 --- a/sql-plugin/src/main/spark330/scala/org/apache/spark/sql/execution/datasources/parquet/RapidsVectorizedColumnReader.scala +++ b/sql-plugin/src/main/spark330/scala/org/apache/spark/sql/execution/datasources/parquet/RapidsVectorizedColumnReader.scala @@ -40,7 +40,7 @@ {"spark": "357"} {"spark": "400"} {"spark": "401"} -{"spark": "410"} +{"spark": "411"} spark-rapids-shim-json-lines ***/ package org.apache.spark.sql.execution.datasources.parquet diff --git a/sql-plugin/src/main/spark330/scala/org/apache/spark/sql/execution/datasources/parquet/ShimCurrentBatchIterator.scala b/sql-plugin/src/main/spark330/scala/org/apache/spark/sql/execution/datasources/parquet/ShimCurrentBatchIterator.scala index e69656f0dc0..13c56b590ef 100644 --- a/sql-plugin/src/main/spark330/scala/org/apache/spark/sql/execution/datasources/parquet/ShimCurrentBatchIterator.scala +++ b/sql-plugin/src/main/spark330/scala/org/apache/spark/sql/execution/datasources/parquet/ShimCurrentBatchIterator.scala @@ -41,7 +41,7 @@ {"spark": "357"} {"spark": "400"} {"spark": "401"} -{"spark": "410"} +{"spark": "411"} spark-rapids-shim-json-lines ***/ package org.apache.spark.sql.execution.datasources.parquet diff --git a/sql-plugin/src/main/spark330/scala/org/apache/spark/sql/rapids/aggregate/GpuBloomFilterAggregate.scala b/sql-plugin/src/main/spark330/scala/org/apache/spark/sql/rapids/aggregate/GpuBloomFilterAggregate.scala index bb226092afe..31605ecf0df 100644 --- a/sql-plugin/src/main/spark330/scala/org/apache/spark/sql/rapids/aggregate/GpuBloomFilterAggregate.scala +++ b/sql-plugin/src/main/spark330/scala/org/apache/spark/sql/rapids/aggregate/GpuBloomFilterAggregate.scala @@ -41,7 +41,7 @@ {"spark": "357"} {"spark": "400"} {"spark": "401"} -{"spark": "410"} +{"spark": "411"} spark-rapids-shim-json-lines ***/ package org.apache.spark.sql.rapids.aggregate diff --git a/sql-plugin/src/main/spark330/scala/org/apache/spark/sql/rapids/shims/RapidsErrorUtilsFor330plus.scala b/sql-plugin/src/main/spark330/scala/org/apache/spark/sql/rapids/shims/RapidsErrorUtilsFor330plus.scala index 1bc42508e0d..2fa244b2859 100644 --- a/sql-plugin/src/main/spark330/scala/org/apache/spark/sql/rapids/shims/RapidsErrorUtilsFor330plus.scala +++ b/sql-plugin/src/main/spark330/scala/org/apache/spark/sql/rapids/shims/RapidsErrorUtilsFor330plus.scala @@ -41,7 +41,7 @@ {"spark": "357"} {"spark": "400"} {"spark": "401"} -{"spark": "410"} +{"spark": "411"} spark-rapids-shim-json-lines ***/ package org.apache.spark.sql.rapids.shims diff --git a/sql-plugin/src/main/spark330/scala/org/apache/spark/sql/rapids/shims/RapidsShuffleThreadedReader.scala b/sql-plugin/src/main/spark330/scala/org/apache/spark/sql/rapids/shims/RapidsShuffleThreadedReader.scala index 988bbeb51b0..35edb17e51d 100644 --- a/sql-plugin/src/main/spark330/scala/org/apache/spark/sql/rapids/shims/RapidsShuffleThreadedReader.scala +++ b/sql-plugin/src/main/spark330/scala/org/apache/spark/sql/rapids/shims/RapidsShuffleThreadedReader.scala @@ -41,7 +41,7 @@ {"spark": "357"} {"spark": "400"} {"spark": "401"} -{"spark": "410"} +{"spark": "411"} spark-rapids-shim-json-lines ***/ package org.apache.spark.sql.rapids.shims diff --git a/sql-plugin/src/main/spark330/scala/org/apache/spark/sql/rapids/shims/intervalExpressions.scala b/sql-plugin/src/main/spark330/scala/org/apache/spark/sql/rapids/shims/intervalExpressions.scala index 8e7d461aee1..429ef2d01d5 100644 --- a/sql-plugin/src/main/spark330/scala/org/apache/spark/sql/rapids/shims/intervalExpressions.scala +++ b/sql-plugin/src/main/spark330/scala/org/apache/spark/sql/rapids/shims/intervalExpressions.scala @@ -41,7 +41,7 @@ {"spark": "357"} {"spark": "400"} {"spark": "401"} -{"spark": "410"} +{"spark": "411"} spark-rapids-shim-json-lines ***/ package org.apache.spark.sql.rapids.shims diff --git a/sql-plugin/src/main/spark330db/scala/com/nvidia/spark/rapids/shims/AnsiCastShim.scala b/sql-plugin/src/main/spark330db/scala/com/nvidia/spark/rapids/shims/AnsiCastShim.scala index 0e9e317549a..d652a0b8eb3 100644 --- a/sql-plugin/src/main/spark330db/scala/com/nvidia/spark/rapids/shims/AnsiCastShim.scala +++ b/sql-plugin/src/main/spark330db/scala/com/nvidia/spark/rapids/shims/AnsiCastShim.scala @@ -33,7 +33,7 @@ {"spark": "357"} {"spark": "400"} {"spark": "401"} -{"spark": "410"} +{"spark": "411"} spark-rapids-shim-json-lines ***/ package com.nvidia.spark.rapids.shims diff --git a/sql-plugin/src/main/spark330db/scala/com/nvidia/spark/rapids/shims/CastingConfigShim.scala b/sql-plugin/src/main/spark330db/scala/com/nvidia/spark/rapids/shims/CastingConfigShim.scala index 9efb4ee67b9..14682e77f4e 100644 --- a/sql-plugin/src/main/spark330db/scala/com/nvidia/spark/rapids/shims/CastingConfigShim.scala +++ b/sql-plugin/src/main/spark330db/scala/com/nvidia/spark/rapids/shims/CastingConfigShim.scala @@ -34,7 +34,7 @@ {"spark": "357"} {"spark": "400"} {"spark": "401"} -{"spark": "410"} +{"spark": "411"} spark-rapids-shim-json-lines ***/ package com.nvidia.spark.rapids.shims diff --git a/sql-plugin/src/main/spark330db/scala/com/nvidia/spark/rapids/shims/ColumnDefaultValuesShims.scala b/sql-plugin/src/main/spark330db/scala/com/nvidia/spark/rapids/shims/ColumnDefaultValuesShims.scala index a025f2026d3..e82df102794 100644 --- a/sql-plugin/src/main/spark330db/scala/com/nvidia/spark/rapids/shims/ColumnDefaultValuesShims.scala +++ b/sql-plugin/src/main/spark330db/scala/com/nvidia/spark/rapids/shims/ColumnDefaultValuesShims.scala @@ -33,7 +33,7 @@ {"spark": "357"} {"spark": "400"} {"spark": "401"} -{"spark": "410"} +{"spark": "411"} spark-rapids-shim-json-lines ***/ package com.nvidia.spark.rapids.shims diff --git a/sql-plugin/src/main/spark330db/scala/com/nvidia/spark/rapids/shims/DecimalArithmeticOverrides.scala b/sql-plugin/src/main/spark330db/scala/com/nvidia/spark/rapids/shims/DecimalArithmeticOverrides.scala index afe3e82324e..396c6181add 100644 --- a/sql-plugin/src/main/spark330db/scala/com/nvidia/spark/rapids/shims/DecimalArithmeticOverrides.scala +++ b/sql-plugin/src/main/spark330db/scala/com/nvidia/spark/rapids/shims/DecimalArithmeticOverrides.scala @@ -34,7 +34,7 @@ {"spark": "357"} {"spark": "400"} {"spark": "401"} -{"spark": "410"} +{"spark": "411"} spark-rapids-shim-json-lines ***/ package com.nvidia.spark.rapids.shims diff --git a/sql-plugin/src/main/spark330db/scala/com/nvidia/spark/rapids/shims/GetMapValueMeta.scala b/sql-plugin/src/main/spark330db/scala/com/nvidia/spark/rapids/shims/GetMapValueMeta.scala index 5a6cf3c026a..39f26a1b91f 100644 --- a/sql-plugin/src/main/spark330db/scala/com/nvidia/spark/rapids/shims/GetMapValueMeta.scala +++ b/sql-plugin/src/main/spark330db/scala/com/nvidia/spark/rapids/shims/GetMapValueMeta.scala @@ -34,7 +34,7 @@ {"spark": "357"} {"spark": "400"} {"spark": "401"} -{"spark": "410"} +{"spark": "411"} spark-rapids-shim-json-lines ***/ package com.nvidia.spark.rapids.shims diff --git a/sql-plugin/src/main/spark330db/scala/com/nvidia/spark/rapids/shims/GpuCastShims.scala b/sql-plugin/src/main/spark330db/scala/com/nvidia/spark/rapids/shims/GpuCastShims.scala index 988658d320b..47b33fdebb4 100644 --- a/sql-plugin/src/main/spark330db/scala/com/nvidia/spark/rapids/shims/GpuCastShims.scala +++ b/sql-plugin/src/main/spark330db/scala/com/nvidia/spark/rapids/shims/GpuCastShims.scala @@ -34,7 +34,7 @@ {"spark": "357"} {"spark": "400"} {"spark": "401"} -{"spark": "410"} +{"spark": "411"} spark-rapids-shim-json-lines ***/ package com.nvidia.spark.rapids.shims diff --git a/sql-plugin/src/main/spark330db/scala/com/nvidia/spark/rapids/shims/ShimFilePartitionReaderFactory.scala b/sql-plugin/src/main/spark330db/scala/com/nvidia/spark/rapids/shims/ShimFilePartitionReaderFactory.scala index daa25d7dded..e3ff25f642f 100644 --- a/sql-plugin/src/main/spark330db/scala/com/nvidia/spark/rapids/shims/ShimFilePartitionReaderFactory.scala +++ b/sql-plugin/src/main/spark330db/scala/com/nvidia/spark/rapids/shims/ShimFilePartitionReaderFactory.scala @@ -34,7 +34,7 @@ {"spark": "357"} {"spark": "400"} {"spark": "401"} -{"spark": "410"} +{"spark": "411"} spark-rapids-shim-json-lines ***/ package com.nvidia.spark.rapids.shims diff --git a/sql-plugin/src/main/spark330db/scala/com/nvidia/spark/rapids/shims/TypeUtilsShims.scala b/sql-plugin/src/main/spark330db/scala/com/nvidia/spark/rapids/shims/TypeUtilsShims.scala index b135bfeda1e..56ba82286c7 100644 --- a/sql-plugin/src/main/spark330db/scala/com/nvidia/spark/rapids/shims/TypeUtilsShims.scala +++ b/sql-plugin/src/main/spark330db/scala/com/nvidia/spark/rapids/shims/TypeUtilsShims.scala @@ -34,7 +34,7 @@ {"spark": "357"} {"spark": "400"} {"spark": "401"} -{"spark": "410"} +{"spark": "411"} spark-rapids-shim-json-lines ***/ package com.nvidia.spark.rapids.shims diff --git a/sql-plugin/src/main/spark330db/scala/com/nvidia/spark/rapids/shims/parquet/ParquetStringPredShims.scala b/sql-plugin/src/main/spark330db/scala/com/nvidia/spark/rapids/shims/parquet/ParquetStringPredShims.scala index f90f06bb3e0..dcd5d161168 100644 --- a/sql-plugin/src/main/spark330db/scala/com/nvidia/spark/rapids/shims/parquet/ParquetStringPredShims.scala +++ b/sql-plugin/src/main/spark330db/scala/com/nvidia/spark/rapids/shims/parquet/ParquetStringPredShims.scala @@ -34,7 +34,7 @@ {"spark": "357"} {"spark": "400"} {"spark": "401"} -{"spark": "410"} +{"spark": "411"} spark-rapids-shim-json-lines ***/ package com.nvidia.spark.rapids.shims.parquet diff --git a/sql-plugin/src/main/spark330db/scala/org/apache/spark/sql/execution/datasources/rapids/DataSourceStrategyUtils.scala b/sql-plugin/src/main/spark330db/scala/org/apache/spark/sql/execution/datasources/rapids/DataSourceStrategyUtils.scala index 024b32029bb..749f0bc5265 100644 --- a/sql-plugin/src/main/spark330db/scala/org/apache/spark/sql/execution/datasources/rapids/DataSourceStrategyUtils.scala +++ b/sql-plugin/src/main/spark330db/scala/org/apache/spark/sql/execution/datasources/rapids/DataSourceStrategyUtils.scala @@ -34,7 +34,7 @@ {"spark": "357"} {"spark": "400"} {"spark": "401"} -{"spark": "410"} +{"spark": "411"} spark-rapids-shim-json-lines ***/ package org.apache.spark.sql.execution.datasources.rapids diff --git a/sql-plugin/src/main/spark330db/scala/org/apache/spark/sql/rapids/GpuCheckOverflowInTableInsert.scala b/sql-plugin/src/main/spark330db/scala/org/apache/spark/sql/rapids/GpuCheckOverflowInTableInsert.scala index c215fe7b296..57ed2db6c21 100644 --- a/sql-plugin/src/main/spark330db/scala/org/apache/spark/sql/rapids/GpuCheckOverflowInTableInsert.scala +++ b/sql-plugin/src/main/spark330db/scala/org/apache/spark/sql/rapids/GpuCheckOverflowInTableInsert.scala @@ -39,7 +39,7 @@ {"spark": "357"} {"spark": "400"} {"spark": "401"} -{"spark": "410"} +{"spark": "411"} spark-rapids-shim-json-lines ***/ package org.apache.spark.sql.rapids diff --git a/sql-plugin/src/main/spark330db/scala/org/apache/spark/sql/rapids/aggregate/aggregateFunctions.scala b/sql-plugin/src/main/spark330db/scala/org/apache/spark/sql/rapids/aggregate/aggregateFunctions.scala index e5a45b80f34..15038b0f234 100644 --- a/sql-plugin/src/main/spark330db/scala/org/apache/spark/sql/rapids/aggregate/aggregateFunctions.scala +++ b/sql-plugin/src/main/spark330db/scala/org/apache/spark/sql/rapids/aggregate/aggregateFunctions.scala @@ -34,7 +34,7 @@ {"spark": "357"} {"spark": "400"} {"spark": "401"} -{"spark": "410"} +{"spark": "411"} spark-rapids-shim-json-lines ***/ package org.apache.spark.sql.rapids.aggregate diff --git a/sql-plugin/src/main/spark330db/scala/org/apache/spark/sql/rapids/arithmetic.scala b/sql-plugin/src/main/spark330db/scala/org/apache/spark/sql/rapids/arithmetic.scala index ef280f41a64..2ccd50756de 100644 --- a/sql-plugin/src/main/spark330db/scala/org/apache/spark/sql/rapids/arithmetic.scala +++ b/sql-plugin/src/main/spark330db/scala/org/apache/spark/sql/rapids/arithmetic.scala @@ -34,7 +34,7 @@ {"spark": "357"} {"spark": "400"} {"spark": "401"} -{"spark": "410"} +{"spark": "411"} spark-rapids-shim-json-lines ***/ package org.apache.spark.sql.rapids diff --git a/sql-plugin/src/main/spark331/scala/com/nvidia/spark/rapids/shims/Spark331PlusNonDBShims.scala b/sql-plugin/src/main/spark331/scala/com/nvidia/spark/rapids/shims/Spark331PlusNonDBShims.scala index 50874f08c87..117b00d7a1e 100644 --- a/sql-plugin/src/main/spark331/scala/com/nvidia/spark/rapids/shims/Spark331PlusNonDBShims.scala +++ b/sql-plugin/src/main/spark331/scala/com/nvidia/spark/rapids/shims/Spark331PlusNonDBShims.scala @@ -35,7 +35,7 @@ {"spark": "357"} {"spark": "400"} {"spark": "401"} -{"spark": "410"} +{"spark": "411"} spark-rapids-shim-json-lines ***/ package com.nvidia.spark.rapids.shims diff --git a/sql-plugin/src/main/spark332db/scala/com/nvidia/spark/rapids/shims/CreateDataSourceTableAsSelectCommandMetaShims.scala b/sql-plugin/src/main/spark332db/scala/com/nvidia/spark/rapids/shims/CreateDataSourceTableAsSelectCommandMetaShims.scala index bda176311db..2247b0ff5f6 100644 --- a/sql-plugin/src/main/spark332db/scala/com/nvidia/spark/rapids/shims/CreateDataSourceTableAsSelectCommandMetaShims.scala +++ b/sql-plugin/src/main/spark332db/scala/com/nvidia/spark/rapids/shims/CreateDataSourceTableAsSelectCommandMetaShims.scala @@ -33,7 +33,7 @@ {"spark": "357"} {"spark": "400"} {"spark": "401"} -{"spark": "410"} +{"spark": "411"} spark-rapids-shim-json-lines ***/ package com.nvidia.spark.rapids.shims diff --git a/sql-plugin/src/main/spark332db/scala/com/nvidia/spark/rapids/shims/GpuInsertIntoHiveTable.scala b/sql-plugin/src/main/spark332db/scala/com/nvidia/spark/rapids/shims/GpuInsertIntoHiveTable.scala index 38c62e1b2cd..3a3077682e2 100644 --- a/sql-plugin/src/main/spark332db/scala/com/nvidia/spark/rapids/shims/GpuInsertIntoHiveTable.scala +++ b/sql-plugin/src/main/spark332db/scala/com/nvidia/spark/rapids/shims/GpuInsertIntoHiveTable.scala @@ -33,7 +33,7 @@ {"spark": "357"} {"spark": "400"} {"spark": "401"} -{"spark": "410"} +{"spark": "411"} spark-rapids-shim-json-lines ***/ package org.apache.spark.sql.hive.rapids.shims diff --git a/sql-plugin/src/main/spark332db/scala/com/nvidia/spark/rapids/shims/GpuKnownNullable.scala b/sql-plugin/src/main/spark332db/scala/com/nvidia/spark/rapids/shims/GpuKnownNullable.scala index 7b1be5fde11..fb1e6979825 100644 --- a/sql-plugin/src/main/spark332db/scala/com/nvidia/spark/rapids/shims/GpuKnownNullable.scala +++ b/sql-plugin/src/main/spark332db/scala/com/nvidia/spark/rapids/shims/GpuKnownNullable.scala @@ -33,7 +33,7 @@ {"spark": "357"} {"spark": "400"} {"spark": "401"} -{"spark": "410"} +{"spark": "411"} spark-rapids-shim-json-lines ***/ package com.nvidia.spark.rapids.shims diff --git a/sql-plugin/src/main/spark332db/scala/com/nvidia/spark/rapids/shims/GpuOptimizedCreateHiveTableAsSelectCommandShims.scala b/sql-plugin/src/main/spark332db/scala/com/nvidia/spark/rapids/shims/GpuOptimizedCreateHiveTableAsSelectCommandShims.scala index b500f164292..a8ab4a70fc7 100644 --- a/sql-plugin/src/main/spark332db/scala/com/nvidia/spark/rapids/shims/GpuOptimizedCreateHiveTableAsSelectCommandShims.scala +++ b/sql-plugin/src/main/spark332db/scala/com/nvidia/spark/rapids/shims/GpuOptimizedCreateHiveTableAsSelectCommandShims.scala @@ -33,7 +33,7 @@ {"spark": "357"} {"spark": "400"} {"spark": "401"} -{"spark": "410"} +{"spark": "411"} spark-rapids-shim-json-lines ***/ package com.nvidia.spark.rapids.shims diff --git a/sql-plugin/src/main/spark332db/scala/org/apache/spark/sql/execution/datasources/GpuWriteFiles.scala b/sql-plugin/src/main/spark332db/scala/org/apache/spark/sql/execution/datasources/GpuWriteFiles.scala index 6315f4d4d98..a9d510ed20a 100644 --- a/sql-plugin/src/main/spark332db/scala/org/apache/spark/sql/execution/datasources/GpuWriteFiles.scala +++ b/sql-plugin/src/main/spark332db/scala/org/apache/spark/sql/execution/datasources/GpuWriteFiles.scala @@ -33,7 +33,7 @@ {"spark": "357"} {"spark": "400"} {"spark": "401"} -{"spark": "410"} +{"spark": "411"} spark-rapids-shim-json-lines ***/ package org.apache.spark.sql.execution.datasources diff --git a/sql-plugin/src/main/spark332db/scala/org/apache/spark/sql/hive/rapids/shims/HiveFileUtil.scala b/sql-plugin/src/main/spark332db/scala/org/apache/spark/sql/hive/rapids/shims/HiveFileUtil.scala index ce2928d00d0..385c1f36a87 100644 --- a/sql-plugin/src/main/spark332db/scala/org/apache/spark/sql/hive/rapids/shims/HiveFileUtil.scala +++ b/sql-plugin/src/main/spark332db/scala/org/apache/spark/sql/hive/rapids/shims/HiveFileUtil.scala @@ -33,7 +33,7 @@ {"spark": "357"} {"spark": "400"} {"spark": "401"} -{"spark": "410"} +{"spark": "411"} spark-rapids-shim-json-lines ***/ package org.apache.spark.sql.hive.rapids.shims diff --git a/sql-plugin/src/main/spark332db/scala/org/apache/spark/sql/hive/rapids/shims/HiveProviderCmdShims.scala b/sql-plugin/src/main/spark332db/scala/org/apache/spark/sql/hive/rapids/shims/HiveProviderCmdShims.scala index 7aad54b061f..3ff97a85025 100644 --- a/sql-plugin/src/main/spark332db/scala/org/apache/spark/sql/hive/rapids/shims/HiveProviderCmdShims.scala +++ b/sql-plugin/src/main/spark332db/scala/org/apache/spark/sql/hive/rapids/shims/HiveProviderCmdShims.scala @@ -33,7 +33,7 @@ {"spark": "357"} {"spark": "400"} {"spark": "401"} -{"spark": "410"} +{"spark": "411"} spark-rapids-shim-json-lines ***/ package org.apache.spark.sql.hive.rapids.shims diff --git a/sql-plugin/src/main/spark332db/scala/org/apache/spark/sql/rapids/GpuFileFormatWriter.scala b/sql-plugin/src/main/spark332db/scala/org/apache/spark/sql/rapids/GpuFileFormatWriter.scala index 68eb854231a..60d4d9dbad2 100644 --- a/sql-plugin/src/main/spark332db/scala/org/apache/spark/sql/rapids/GpuFileFormatWriter.scala +++ b/sql-plugin/src/main/spark332db/scala/org/apache/spark/sql/rapids/GpuFileFormatWriter.scala @@ -33,7 +33,7 @@ {"spark": "357"} {"spark": "400"} {"spark": "401"} -{"spark": "410"} +{"spark": "411"} spark-rapids-shim-json-lines ***/ package org.apache.spark.sql.rapids diff --git a/sql-plugin/src/main/spark332db/scala/org/apache/spark/sql/rapids/shims/GpuCastToNumberErrorShim.scala b/sql-plugin/src/main/spark332db/scala/org/apache/spark/sql/rapids/shims/GpuCastToNumberErrorShim.scala index 7bc5c4b4914..a69edc89afa 100644 --- a/sql-plugin/src/main/spark332db/scala/org/apache/spark/sql/rapids/shims/GpuCastToNumberErrorShim.scala +++ b/sql-plugin/src/main/spark332db/scala/org/apache/spark/sql/rapids/shims/GpuCastToNumberErrorShim.scala @@ -33,7 +33,7 @@ {"spark": "357"} {"spark": "400"} {"spark": "401"} -{"spark": "410"} +{"spark": "411"} spark-rapids-shim-json-lines ***/ package org.apache.spark.sql.rapids.shims diff --git a/sql-plugin/src/main/spark332db/scala/org/apache/spark/sql/rapids/shims/GpuCreateDataSourceTableAsSelectCommandShims.scala b/sql-plugin/src/main/spark332db/scala/org/apache/spark/sql/rapids/shims/GpuCreateDataSourceTableAsSelectCommandShims.scala index 99f4c428af8..787be00eb8d 100644 --- a/sql-plugin/src/main/spark332db/scala/org/apache/spark/sql/rapids/shims/GpuCreateDataSourceTableAsSelectCommandShims.scala +++ b/sql-plugin/src/main/spark332db/scala/org/apache/spark/sql/rapids/shims/GpuCreateDataSourceTableAsSelectCommandShims.scala @@ -33,7 +33,7 @@ {"spark": "357"} {"spark": "400"} {"spark": "401"} -{"spark": "410"} +{"spark": "411"} spark-rapids-shim-json-lines ***/ package org.apache.spark.sql.rapids.shims diff --git a/sql-plugin/src/main/spark332db/scala/org/apache/spark/sql/rapids/shims/GpuDataSource.scala b/sql-plugin/src/main/spark332db/scala/org/apache/spark/sql/rapids/shims/GpuDataSource.scala index 662e5e845be..d62c7bdc709 100644 --- a/sql-plugin/src/main/spark332db/scala/org/apache/spark/sql/rapids/shims/GpuDataSource.scala +++ b/sql-plugin/src/main/spark332db/scala/org/apache/spark/sql/rapids/shims/GpuDataSource.scala @@ -33,7 +33,7 @@ {"spark": "357"} {"spark": "400"} {"spark": "401"} -{"spark": "410"} +{"spark": "411"} spark-rapids-shim-json-lines ***/ package org.apache.spark.sql.rapids diff --git a/sql-plugin/src/main/spark332db/scala/org/apache/spark/sql/rapids/shims/SchemaUtilsShims.scala b/sql-plugin/src/main/spark332db/scala/org/apache/spark/sql/rapids/shims/SchemaUtilsShims.scala index 892dd306443..6740fe0cb4b 100644 --- a/sql-plugin/src/main/spark332db/scala/org/apache/spark/sql/rapids/shims/SchemaUtilsShims.scala +++ b/sql-plugin/src/main/spark332db/scala/org/apache/spark/sql/rapids/shims/SchemaUtilsShims.scala @@ -33,7 +33,7 @@ {"spark": "357"} {"spark": "400"} {"spark": "401"} -{"spark": "410"} +{"spark": "411"} spark-rapids-shim-json-lines ***/ package org.apache.spark.sql.rapids.shims diff --git a/sql-plugin/src/main/spark332db/scala/org/apache/spark/sql/rapids/shims/SparkDateTimeExceptionShims.scala b/sql-plugin/src/main/spark332db/scala/org/apache/spark/sql/rapids/shims/SparkDateTimeExceptionShims.scala index b5651a1fdad..faef3cbc868 100644 --- a/sql-plugin/src/main/spark332db/scala/org/apache/spark/sql/rapids/shims/SparkDateTimeExceptionShims.scala +++ b/sql-plugin/src/main/spark332db/scala/org/apache/spark/sql/rapids/shims/SparkDateTimeExceptionShims.scala @@ -33,7 +33,7 @@ {"spark": "357"} {"spark": "400"} {"spark": "401"} -{"spark": "410"} +{"spark": "411"} spark-rapids-shim-json-lines ***/ package org.apache.spark.sql.rapids.shims diff --git a/sql-plugin/src/main/spark332db/scala/org/apache/spark/sql/rapids/shims/SparkUpgradeExceptionShims.scala b/sql-plugin/src/main/spark332db/scala/org/apache/spark/sql/rapids/shims/SparkUpgradeExceptionShims.scala index 475f383d031..8796d78b2ec 100644 --- a/sql-plugin/src/main/spark332db/scala/org/apache/spark/sql/rapids/shims/SparkUpgradeExceptionShims.scala +++ b/sql-plugin/src/main/spark332db/scala/org/apache/spark/sql/rapids/shims/SparkUpgradeExceptionShims.scala @@ -33,7 +33,7 @@ {"spark": "357"} {"spark": "400"} {"spark": "401"} -{"spark": "410"} +{"spark": "411"} spark-rapids-shim-json-lines ***/ package org.apache.spark.sql.rapids.shims diff --git a/sql-plugin/src/main/spark334/scala/com/nvidia/spark/rapids/shims/GetSequenceSize.scala b/sql-plugin/src/main/spark334/scala/com/nvidia/spark/rapids/shims/GetSequenceSize.scala index f431fbadc32..7f659dd1e29 100644 --- a/sql-plugin/src/main/spark334/scala/com/nvidia/spark/rapids/shims/GetSequenceSize.scala +++ b/sql-plugin/src/main/spark334/scala/com/nvidia/spark/rapids/shims/GetSequenceSize.scala @@ -28,7 +28,7 @@ {"spark": "357"} {"spark": "400"} {"spark": "401"} -{"spark": "410"} +{"spark": "411"} spark-rapids-shim-json-lines ***/ package com.nvidia.spark.rapids.shims diff --git a/sql-plugin/src/main/spark340/scala/com/nvidia/spark/rapids/shims/CastCheckShims.scala b/sql-plugin/src/main/spark340/scala/com/nvidia/spark/rapids/shims/CastCheckShims.scala index 86b72246c9e..01045a44e06 100644 --- a/sql-plugin/src/main/spark340/scala/com/nvidia/spark/rapids/shims/CastCheckShims.scala +++ b/sql-plugin/src/main/spark340/scala/com/nvidia/spark/rapids/shims/CastCheckShims.scala @@ -33,7 +33,7 @@ {"spark": "357"} {"spark": "400"} {"spark": "401"} -{"spark": "410"} +{"spark": "411"} spark-rapids-shim-json-lines ***/ package com.nvidia.spark.rapids.shims diff --git a/sql-plugin/src/main/spark340/scala/com/nvidia/spark/rapids/shims/ConvShim.scala b/sql-plugin/src/main/spark340/scala/com/nvidia/spark/rapids/shims/ConvShim.scala index f36b11762c7..312da67bee6 100644 --- a/sql-plugin/src/main/spark340/scala/com/nvidia/spark/rapids/shims/ConvShim.scala +++ b/sql-plugin/src/main/spark340/scala/com/nvidia/spark/rapids/shims/ConvShim.scala @@ -32,7 +32,7 @@ {"spark": "357"} {"spark": "400"} {"spark": "401"} -{"spark": "410"} +{"spark": "411"} spark-rapids-shim-json-lines ***/ package com.nvidia.spark.rapids.shims diff --git a/sql-plugin/src/main/spark340/scala/com/nvidia/spark/rapids/shims/GlobalLimitShims.scala b/sql-plugin/src/main/spark340/scala/com/nvidia/spark/rapids/shims/GlobalLimitShims.scala index 292e681e0e7..f14a1a6fdcb 100644 --- a/sql-plugin/src/main/spark340/scala/com/nvidia/spark/rapids/shims/GlobalLimitShims.scala +++ b/sql-plugin/src/main/spark340/scala/com/nvidia/spark/rapids/shims/GlobalLimitShims.scala @@ -32,7 +32,7 @@ {"spark": "357"} {"spark": "400"} {"spark": "401"} -{"spark": "410"} +{"spark": "411"} spark-rapids-shim-json-lines ***/ package com.nvidia.spark.rapids.shims diff --git a/sql-plugin/src/main/spark340/scala/com/nvidia/spark/rapids/shims/GpuBroadcastJoinMeta.scala b/sql-plugin/src/main/spark340/scala/com/nvidia/spark/rapids/shims/GpuBroadcastJoinMeta.scala index b177984ca91..34e5d633118 100644 --- a/sql-plugin/src/main/spark340/scala/com/nvidia/spark/rapids/shims/GpuBroadcastJoinMeta.scala +++ b/sql-plugin/src/main/spark340/scala/com/nvidia/spark/rapids/shims/GpuBroadcastJoinMeta.scala @@ -29,7 +29,7 @@ {"spark": "357"} {"spark": "400"} {"spark": "401"} -{"spark": "410"} +{"spark": "411"} spark-rapids-shim-json-lines ***/ package com.nvidia.spark.rapids.shims diff --git a/sql-plugin/src/main/spark340/scala/com/nvidia/spark/rapids/shims/OrcProtoWriterShim.scala b/sql-plugin/src/main/spark340/scala/com/nvidia/spark/rapids/shims/OrcProtoWriterShim.scala index 68261f636f1..0197014347b 100644 --- a/sql-plugin/src/main/spark340/scala/com/nvidia/spark/rapids/shims/OrcProtoWriterShim.scala +++ b/sql-plugin/src/main/spark340/scala/com/nvidia/spark/rapids/shims/OrcProtoWriterShim.scala @@ -33,7 +33,7 @@ {"spark": "357"} {"spark": "400"} {"spark": "401"} -{"spark": "410"} +{"spark": "411"} spark-rapids-shim-json-lines ***/ package com.nvidia.spark.rapids.shims diff --git a/sql-plugin/src/main/spark340/scala/com/nvidia/spark/rapids/shims/PartitionedFileUtilsShimBase.scala b/sql-plugin/src/main/spark340/scala/com/nvidia/spark/rapids/shims/PartitionedFileUtilsShimBase.scala index 55650f62e0e..ef9413ca96d 100644 --- a/sql-plugin/src/main/spark340/scala/com/nvidia/spark/rapids/shims/PartitionedFileUtilsShimBase.scala +++ b/sql-plugin/src/main/spark340/scala/com/nvidia/spark/rapids/shims/PartitionedFileUtilsShimBase.scala @@ -30,7 +30,7 @@ {"spark": "357"} {"spark": "400"} {"spark": "401"} -{"spark": "410"} +{"spark": "411"} spark-rapids-shim-json-lines ***/ package com.nvidia.spark.rapids.shims diff --git a/sql-plugin/src/main/spark340/scala/com/nvidia/spark/rapids/shims/ShuffleOriginUtil.scala b/sql-plugin/src/main/spark340/scala/com/nvidia/spark/rapids/shims/ShuffleOriginUtil.scala index 4e673296dbe..f07eb31313a 100644 --- a/sql-plugin/src/main/spark340/scala/com/nvidia/spark/rapids/shims/ShuffleOriginUtil.scala +++ b/sql-plugin/src/main/spark340/scala/com/nvidia/spark/rapids/shims/ShuffleOriginUtil.scala @@ -30,7 +30,7 @@ {"spark": "357"} {"spark": "400"} {"spark": "401"} -{"spark": "410"} +{"spark": "411"} spark-rapids-shim-json-lines ***/ package com.nvidia.spark.rapids.shims diff --git a/sql-plugin/src/main/spark340/scala/com/nvidia/spark/rapids/shims/Spark340PlusNonDBShims.scala b/sql-plugin/src/main/spark340/scala/com/nvidia/spark/rapids/shims/Spark340PlusNonDBShims.scala index b7e8137454a..c9251004312 100644 --- a/sql-plugin/src/main/spark340/scala/com/nvidia/spark/rapids/shims/Spark340PlusNonDBShims.scala +++ b/sql-plugin/src/main/spark340/scala/com/nvidia/spark/rapids/shims/Spark340PlusNonDBShims.scala @@ -30,7 +30,7 @@ {"spark": "357"} {"spark": "400"} {"spark": "401"} -{"spark": "410"} +{"spark": "411"} spark-rapids-shim-json-lines ***/ package com.nvidia.spark.rapids.shims diff --git a/sql-plugin/src/main/spark340/scala/com/nvidia/spark/rapids/shims/TagScanForRuntimeFiltering.scala b/sql-plugin/src/main/spark340/scala/com/nvidia/spark/rapids/shims/TagScanForRuntimeFiltering.scala index 05a816e34f5..f10b1472e99 100644 --- a/sql-plugin/src/main/spark340/scala/com/nvidia/spark/rapids/shims/TagScanForRuntimeFiltering.scala +++ b/sql-plugin/src/main/spark340/scala/com/nvidia/spark/rapids/shims/TagScanForRuntimeFiltering.scala @@ -32,7 +32,7 @@ {"spark": "357"} {"spark": "400"} {"spark": "401"} -{"spark": "410"} +{"spark": "411"} spark-rapids-shim-json-lines ***/ package com.nvidia.spark.rapids.shims diff --git a/sql-plugin/src/main/spark340/scala/com/nvidia/spark/rapids/shims/parquet/ParquetLegacyNanoAsLongShims.scala b/sql-plugin/src/main/spark340/scala/com/nvidia/spark/rapids/shims/parquet/ParquetLegacyNanoAsLongShims.scala index e5d54d0d848..8df32098a14 100644 --- a/sql-plugin/src/main/spark340/scala/com/nvidia/spark/rapids/shims/parquet/ParquetLegacyNanoAsLongShims.scala +++ b/sql-plugin/src/main/spark340/scala/com/nvidia/spark/rapids/shims/parquet/ParquetLegacyNanoAsLongShims.scala @@ -32,7 +32,7 @@ {"spark": "357"} {"spark": "400"} {"spark": "401"} -{"spark": "410"} +{"spark": "411"} spark-rapids-shim-json-lines ***/ package com.nvidia.spark.rapids.shims.parquet diff --git a/sql-plugin/src/main/spark340/scala/com/nvidia/spark/rapids/shims/parquet/ParquetTimestampAnnotationShims.scala b/sql-plugin/src/main/spark340/scala/com/nvidia/spark/rapids/shims/parquet/ParquetTimestampAnnotationShims.scala index cbdd1fade9f..af3ebe1da97 100644 --- a/sql-plugin/src/main/spark340/scala/com/nvidia/spark/rapids/shims/parquet/ParquetTimestampAnnotationShims.scala +++ b/sql-plugin/src/main/spark340/scala/com/nvidia/spark/rapids/shims/parquet/ParquetTimestampAnnotationShims.scala @@ -32,7 +32,7 @@ {"spark": "357"} {"spark": "400"} {"spark": "401"} -{"spark": "410"} +{"spark": "411"} spark-rapids-shim-json-lines ***/ package com.nvidia.spark.rapids.shims.parquet diff --git a/sql-plugin/src/main/spark340/scala/com/nvidia/spark/rapids/shims/parquet/ParquetTimestampNTZShims.scala b/sql-plugin/src/main/spark340/scala/com/nvidia/spark/rapids/shims/parquet/ParquetTimestampNTZShims.scala index 8fc96095899..c5350ac30b7 100644 --- a/sql-plugin/src/main/spark340/scala/com/nvidia/spark/rapids/shims/parquet/ParquetTimestampNTZShims.scala +++ b/sql-plugin/src/main/spark340/scala/com/nvidia/spark/rapids/shims/parquet/ParquetTimestampNTZShims.scala @@ -32,7 +32,7 @@ {"spark": "357"} {"spark": "400"} {"spark": "401"} -{"spark": "410"} +{"spark": "411"} spark-rapids-shim-json-lines ***/ package com.nvidia.spark.rapids.shims.parquet diff --git a/sql-plugin/src/main/spark340/scala/com/nvidia/spark/rapids/shuffle/RapidsShuffleIterator.scala b/sql-plugin/src/main/spark340/scala/com/nvidia/spark/rapids/shuffle/RapidsShuffleIterator.scala index daf66422bef..093c7f56a90 100644 --- a/sql-plugin/src/main/spark340/scala/com/nvidia/spark/rapids/shuffle/RapidsShuffleIterator.scala +++ b/sql-plugin/src/main/spark340/scala/com/nvidia/spark/rapids/shuffle/RapidsShuffleIterator.scala @@ -32,7 +32,7 @@ {"spark": "357"} {"spark": "400"} {"spark": "401"} -{"spark": "410"} +{"spark": "411"} spark-rapids-shim-json-lines ***/ package com.nvidia.spark.rapids.shuffle diff --git a/sql-plugin/src/main/spark340/scala/org/apache/spark/sql/catalyst/csv/GpuCsvUtils.scala b/sql-plugin/src/main/spark340/scala/org/apache/spark/sql/catalyst/csv/GpuCsvUtils.scala index 8fcb9a3f973..3cd3b99a6ed 100644 --- a/sql-plugin/src/main/spark340/scala/org/apache/spark/sql/catalyst/csv/GpuCsvUtils.scala +++ b/sql-plugin/src/main/spark340/scala/org/apache/spark/sql/catalyst/csv/GpuCsvUtils.scala @@ -32,7 +32,7 @@ {"spark": "357"} {"spark": "400"} {"spark": "401"} -{"spark": "410"} +{"spark": "411"} spark-rapids-shim-json-lines ***/ package org.apache.spark.sql.catalyst.csv diff --git a/sql-plugin/src/main/spark340/scala/org/apache/spark/sql/catalyst/json/GpuJsonUtils.scala b/sql-plugin/src/main/spark340/scala/org/apache/spark/sql/catalyst/json/GpuJsonUtils.scala index 4bf54eb08cb..bde3c26a3dd 100644 --- a/sql-plugin/src/main/spark340/scala/org/apache/spark/sql/catalyst/json/GpuJsonUtils.scala +++ b/sql-plugin/src/main/spark340/scala/org/apache/spark/sql/catalyst/json/GpuJsonUtils.scala @@ -32,7 +32,7 @@ {"spark": "357"} {"spark": "400"} {"spark": "401"} -{"spark": "410"} +{"spark": "411"} spark-rapids-shim-json-lines ***/ package org.apache.spark.sql.catalyst.json diff --git a/sql-plugin/src/main/spark340/scala/org/apache/spark/sql/errors/ConvUtils.scala b/sql-plugin/src/main/spark340/scala/org/apache/spark/sql/errors/ConvUtils.scala index c7778902a7a..707ce91687f 100644 --- a/sql-plugin/src/main/spark340/scala/org/apache/spark/sql/errors/ConvUtils.scala +++ b/sql-plugin/src/main/spark340/scala/org/apache/spark/sql/errors/ConvUtils.scala @@ -32,7 +32,7 @@ {"spark": "357"} {"spark": "400"} {"spark": "401"} -{"spark": "410"} +{"spark": "411"} spark-rapids-shim-json-lines ***/ package org.apache.spark.sql.errors diff --git a/sql-plugin/src/main/spark340/scala/org/apache/spark/sql/rapids/RapidsCachingReader.scala b/sql-plugin/src/main/spark340/scala/org/apache/spark/sql/rapids/RapidsCachingReader.scala index 9738e655324..ed1a1d93f58 100644 --- a/sql-plugin/src/main/spark340/scala/org/apache/spark/sql/rapids/RapidsCachingReader.scala +++ b/sql-plugin/src/main/spark340/scala/org/apache/spark/sql/rapids/RapidsCachingReader.scala @@ -32,7 +32,7 @@ {"spark": "357"} {"spark": "400"} {"spark": "401"} -{"spark": "410"} +{"spark": "411"} spark-rapids-shim-json-lines ***/ package org.apache.spark.sql.rapids diff --git a/sql-plugin/src/main/spark340/scala/org/apache/spark/sql/rapids/execution/GpuBroadcastHashJoinExec.scala b/sql-plugin/src/main/spark340/scala/org/apache/spark/sql/rapids/execution/GpuBroadcastHashJoinExec.scala index d8fb5c1c2e9..ef3edfd130a 100644 --- a/sql-plugin/src/main/spark340/scala/org/apache/spark/sql/rapids/execution/GpuBroadcastHashJoinExec.scala +++ b/sql-plugin/src/main/spark340/scala/org/apache/spark/sql/rapids/execution/GpuBroadcastHashJoinExec.scala @@ -30,7 +30,7 @@ {"spark": "357"} {"spark": "400"} {"spark": "401"} -{"spark": "410"} +{"spark": "411"} spark-rapids-shim-json-lines ***/ package org.apache.spark.sql.rapids.execution diff --git a/sql-plugin/src/main/spark340/scala/org/apache/spark/sql/rapids/execution/GpuBroadcastNestedLoopJoinExec.scala b/sql-plugin/src/main/spark340/scala/org/apache/spark/sql/rapids/execution/GpuBroadcastNestedLoopJoinExec.scala index c2a9ef5fc66..4e5288cb53c 100644 --- a/sql-plugin/src/main/spark340/scala/org/apache/spark/sql/rapids/execution/GpuBroadcastNestedLoopJoinExec.scala +++ b/sql-plugin/src/main/spark340/scala/org/apache/spark/sql/rapids/execution/GpuBroadcastNestedLoopJoinExec.scala @@ -30,7 +30,7 @@ {"spark": "357"} {"spark": "400"} {"spark": "401"} -{"spark": "410"} +{"spark": "411"} spark-rapids-shim-json-lines ***/ package org.apache.spark.sql.rapids.execution diff --git a/sql-plugin/src/main/spark340/scala/org/apache/spark/sql/rapids/execution/ShimTrampolineUtil.scala b/sql-plugin/src/main/spark340/scala/org/apache/spark/sql/rapids/execution/ShimTrampolineUtil.scala index 2a20f8e6977..40eb8ee6998 100644 --- a/sql-plugin/src/main/spark340/scala/org/apache/spark/sql/rapids/execution/ShimTrampolineUtil.scala +++ b/sql-plugin/src/main/spark340/scala/org/apache/spark/sql/rapids/execution/ShimTrampolineUtil.scala @@ -30,7 +30,7 @@ {"spark": "357"} {"spark": "400"} {"spark": "401"} -{"spark": "410"} +{"spark": "411"} spark-rapids-shim-json-lines ***/ package org.apache.spark.sql.rapids.execution diff --git a/sql-plugin/src/main/spark340/scala/org/apache/spark/sql/rapids/shims/GpuJsonToStructsShim.scala b/sql-plugin/src/main/spark340/scala/org/apache/spark/sql/rapids/shims/GpuJsonToStructsShim.scala index f520d5b34ea..c124a8fe981 100644 --- a/sql-plugin/src/main/spark340/scala/org/apache/spark/sql/rapids/shims/GpuJsonToStructsShim.scala +++ b/sql-plugin/src/main/spark340/scala/org/apache/spark/sql/rapids/shims/GpuJsonToStructsShim.scala @@ -31,7 +31,7 @@ {"spark": "357"} {"spark": "400"} {"spark": "401"} -{"spark": "410"} +{"spark": "411"} spark-rapids-shim-json-lines ***/ package org.apache.spark.sql.rapids.shims diff --git a/sql-plugin/src/main/spark340/scala/org/apache/spark/sql/rapids/shims/RapidsErrorUtils340PlusBase.scala b/sql-plugin/src/main/spark340/scala/org/apache/spark/sql/rapids/shims/RapidsErrorUtils340PlusBase.scala index a080c25de09..3cceda61a1e 100644 --- a/sql-plugin/src/main/spark340/scala/org/apache/spark/sql/rapids/shims/RapidsErrorUtils340PlusBase.scala +++ b/sql-plugin/src/main/spark340/scala/org/apache/spark/sql/rapids/shims/RapidsErrorUtils340PlusBase.scala @@ -30,7 +30,7 @@ {"spark": "357"} {"spark": "400"} {"spark": "401"} -{"spark": "410"} +{"spark": "411"} spark-rapids-shim-json-lines ***/ package org.apache.spark.sql.rapids.shims diff --git a/sql-plugin/src/main/spark341db/scala/com/nvidia/spark/rapids/shims/GpuToPrettyString.scala b/sql-plugin/src/main/spark341db/scala/com/nvidia/spark/rapids/shims/GpuToPrettyString.scala index 19ca30b8505..b600cbc601e 100644 --- a/sql-plugin/src/main/spark341db/scala/com/nvidia/spark/rapids/shims/GpuToPrettyString.scala +++ b/sql-plugin/src/main/spark341db/scala/com/nvidia/spark/rapids/shims/GpuToPrettyString.scala @@ -27,7 +27,7 @@ {"spark": "357"} {"spark": "400"} {"spark": "401"} -{"spark": "410"} +{"spark": "411"} spark-rapids-shim-json-lines ***/ package com.nvidia.spark.rapids.shims diff --git a/sql-plugin/src/main/spark341db/scala/com/nvidia/spark/rapids/shims/GpuWindowGroupLimitExec.scala b/sql-plugin/src/main/spark341db/scala/com/nvidia/spark/rapids/shims/GpuWindowGroupLimitExec.scala index 9685275367d..f0871cb03e5 100644 --- a/sql-plugin/src/main/spark341db/scala/com/nvidia/spark/rapids/shims/GpuWindowGroupLimitExec.scala +++ b/sql-plugin/src/main/spark341db/scala/com/nvidia/spark/rapids/shims/GpuWindowGroupLimitExec.scala @@ -27,7 +27,7 @@ {"spark": "357"} {"spark": "400"} {"spark": "401"} -{"spark": "410"} +{"spark": "411"} spark-rapids-shim-json-lines ***/ package com.nvidia.spark.rapids.shims diff --git a/sql-plugin/src/main/spark341db/scala/com/nvidia/spark/rapids/shims/PlanShimsImpl.scala b/sql-plugin/src/main/spark341db/scala/com/nvidia/spark/rapids/shims/PlanShimsImpl.scala index 25818de0b0c..b9a5f27f320 100644 --- a/sql-plugin/src/main/spark341db/scala/com/nvidia/spark/rapids/shims/PlanShimsImpl.scala +++ b/sql-plugin/src/main/spark341db/scala/com/nvidia/spark/rapids/shims/PlanShimsImpl.scala @@ -19,7 +19,7 @@ {"spark": "350db143"} {"spark": "400"} {"spark": "401"} -{"spark": "410"} +{"spark": "411"} spark-rapids-shim-json-lines ***/ package com.nvidia.spark.rapids.shims diff --git a/sql-plugin/src/main/spark341db/scala/com/nvidia/spark/rapids/shims/PythonUDFShim.scala b/sql-plugin/src/main/spark341db/scala/com/nvidia/spark/rapids/shims/PythonUDFShim.scala index c8601afd63f..6008a3dc539 100644 --- a/sql-plugin/src/main/spark341db/scala/com/nvidia/spark/rapids/shims/PythonUDFShim.scala +++ b/sql-plugin/src/main/spark341db/scala/com/nvidia/spark/rapids/shims/PythonUDFShim.scala @@ -27,7 +27,7 @@ {"spark": "357"} {"spark": "400"} {"spark": "401"} -{"spark": "410"} +{"spark": "411"} spark-rapids-shim-json-lines ***/ package com.nvidia.spark.rapids.shims diff --git a/sql-plugin/src/main/spark341db/scala/org/apache/spark/sql/execution/rapids/shims/SplitFiles.scala b/sql-plugin/src/main/spark341db/scala/org/apache/spark/sql/execution/rapids/shims/SplitFiles.scala index dfb9371985e..b90bbde0eed 100644 --- a/sql-plugin/src/main/spark341db/scala/org/apache/spark/sql/execution/rapids/shims/SplitFiles.scala +++ b/sql-plugin/src/main/spark341db/scala/org/apache/spark/sql/execution/rapids/shims/SplitFiles.scala @@ -28,7 +28,7 @@ {"spark": "357"} {"spark": "400"} {"spark": "401"} -{"spark": "410"} +{"spark": "411"} spark-rapids-shim-json-lines ***/ package org.apache.spark.sql.execution.rapids.shims diff --git a/sql-plugin/src/main/spark341db/scala/org/apache/spark/sql/hive/rapids/shims/CreateFunctions.scala b/sql-plugin/src/main/spark341db/scala/org/apache/spark/sql/hive/rapids/shims/CreateFunctions.scala index 72fe3f5604f..e98d292614b 100644 --- a/sql-plugin/src/main/spark341db/scala/org/apache/spark/sql/hive/rapids/shims/CreateFunctions.scala +++ b/sql-plugin/src/main/spark341db/scala/org/apache/spark/sql/hive/rapids/shims/CreateFunctions.scala @@ -27,7 +27,7 @@ {"spark": "357"} {"spark": "400"} {"spark": "401"} -{"spark": "410"} +{"spark": "411"} spark-rapids-shim-json-lines ***/ package org.apache.spark.sql.hive.rapids.shims diff --git a/sql-plugin/src/main/spark341db/scala/org/apache/spark/sql/hive/rapids/shims/FileSinkDescShim.scala b/sql-plugin/src/main/spark341db/scala/org/apache/spark/sql/hive/rapids/shims/FileSinkDescShim.scala index 938e2d29723..b30cac05725 100644 --- a/sql-plugin/src/main/spark341db/scala/org/apache/spark/sql/hive/rapids/shims/FileSinkDescShim.scala +++ b/sql-plugin/src/main/spark341db/scala/org/apache/spark/sql/hive/rapids/shims/FileSinkDescShim.scala @@ -28,7 +28,7 @@ {"spark": "357"} {"spark": "400"} {"spark": "401"} -{"spark": "410"} +{"spark": "411"} spark-rapids-shim-json-lines ***/ package org.apache.spark.sql.hive.rapids.shims diff --git a/sql-plugin/src/main/spark341db/scala/org/apache/spark/sql/hive/rapids/shims/HiveInspectorsShim.scala b/sql-plugin/src/main/spark341db/scala/org/apache/spark/sql/hive/rapids/shims/HiveInspectorsShim.scala index fc48b548ef5..1cdb5625bb8 100644 --- a/sql-plugin/src/main/spark341db/scala/org/apache/spark/sql/hive/rapids/shims/HiveInspectorsShim.scala +++ b/sql-plugin/src/main/spark341db/scala/org/apache/spark/sql/hive/rapids/shims/HiveInspectorsShim.scala @@ -28,7 +28,7 @@ {"spark": "357"} {"spark": "400"} {"spark": "401"} -{"spark": "410"} +{"spark": "411"} spark-rapids-shim-json-lines ***/ package org.apache.spark.sql.hive.rapids.shims diff --git a/sql-plugin/src/main/spark341db/scala/org/apache/spark/sql/rapids/execution/python/shims/GpuArrowPythonOutput.scala b/sql-plugin/src/main/spark341db/scala/org/apache/spark/sql/rapids/execution/python/shims/GpuArrowPythonOutput.scala index 12c8bef827f..551a748258e 100644 --- a/sql-plugin/src/main/spark341db/scala/org/apache/spark/sql/rapids/execution/python/shims/GpuArrowPythonOutput.scala +++ b/sql-plugin/src/main/spark341db/scala/org/apache/spark/sql/rapids/execution/python/shims/GpuArrowPythonOutput.scala @@ -19,7 +19,7 @@ {"spark": "350db143"} {"spark": "400"} {"spark": "401"} -{"spark": "410"} +{"spark": "411"} spark-rapids-shim-json-lines ***/ package org.apache.spark.sql.rapids.execution.python.shims diff --git a/sql-plugin/src/main/spark341db/scala/org/apache/spark/sql/rapids/execution/python/shims/GpuArrowPythonRunner.scala b/sql-plugin/src/main/spark341db/scala/org/apache/spark/sql/rapids/execution/python/shims/GpuArrowPythonRunner.scala index 47306245636..62fc67a09a3 100644 --- a/sql-plugin/src/main/spark341db/scala/org/apache/spark/sql/rapids/execution/python/shims/GpuArrowPythonRunner.scala +++ b/sql-plugin/src/main/spark341db/scala/org/apache/spark/sql/rapids/execution/python/shims/GpuArrowPythonRunner.scala @@ -18,7 +18,7 @@ {"spark": "350db143"} {"spark": "400"} {"spark": "401"} -{"spark": "410"} +{"spark": "411"} spark-rapids-shim-json-lines ***/ package org.apache.spark.sql.rapids.execution.python.shims diff --git a/sql-plugin/src/main/spark341db/scala/org/apache/spark/sql/rapids/execution/python/shims/GpuCoGroupedArrowPythonRunner.scala b/sql-plugin/src/main/spark341db/scala/org/apache/spark/sql/rapids/execution/python/shims/GpuCoGroupedArrowPythonRunner.scala index 4b7d9bf2b5a..19e0181cd3f 100644 --- a/sql-plugin/src/main/spark341db/scala/org/apache/spark/sql/rapids/execution/python/shims/GpuCoGroupedArrowPythonRunner.scala +++ b/sql-plugin/src/main/spark341db/scala/org/apache/spark/sql/rapids/execution/python/shims/GpuCoGroupedArrowPythonRunner.scala @@ -19,7 +19,7 @@ {"spark": "350db143"} {"spark": "400"} {"spark": "401"} -{"spark": "410"} +{"spark": "411"} spark-rapids-shim-json-lines ***/ package org.apache.spark.sql.rapids.execution.python.shims diff --git a/sql-plugin/src/main/spark342/scala/com/nvidia/spark/rapids/shims/DecimalMultiply128.scala b/sql-plugin/src/main/spark342/scala/com/nvidia/spark/rapids/shims/DecimalMultiply128.scala index d9f1762d94a..e77c38b78b7 100644 --- a/sql-plugin/src/main/spark342/scala/com/nvidia/spark/rapids/shims/DecimalMultiply128.scala +++ b/sql-plugin/src/main/spark342/scala/com/nvidia/spark/rapids/shims/DecimalMultiply128.scala @@ -28,7 +28,7 @@ {"spark": "357"} {"spark": "400"} {"spark": "401"} -{"spark": "410"} +{"spark": "411"} spark-rapids-shim-json-lines ***/ package com.nvidia.spark.rapids.shims diff --git a/sql-plugin/src/main/spark350/scala/com/nvidia/spark/rapids/GpuDeltaWrite.scala b/sql-plugin/src/main/spark350/scala/com/nvidia/spark/rapids/GpuDeltaWrite.scala index 2cbf276dbbd..1b56b4a10d9 100644 --- a/sql-plugin/src/main/spark350/scala/com/nvidia/spark/rapids/GpuDeltaWrite.scala +++ b/sql-plugin/src/main/spark350/scala/com/nvidia/spark/rapids/GpuDeltaWrite.scala @@ -25,7 +25,7 @@ {"spark": "357"} {"spark": "400"} {"spark": "401"} -{"spark": "410"} +{"spark": "411"} spark-rapids-shim-json-lines ***/ package com.nvidia.spark.rapids diff --git a/sql-plugin/src/main/spark350/scala/com/nvidia/spark/rapids/GpuMergeRowsExecMeta.scala b/sql-plugin/src/main/spark350/scala/com/nvidia/spark/rapids/GpuMergeRowsExecMeta.scala index 5458deaefa2..db7b8db9cc5 100644 --- a/sql-plugin/src/main/spark350/scala/com/nvidia/spark/rapids/GpuMergeRowsExecMeta.scala +++ b/sql-plugin/src/main/spark350/scala/com/nvidia/spark/rapids/GpuMergeRowsExecMeta.scala @@ -25,7 +25,7 @@ {"spark": "357"} {"spark": "400"} {"spark": "401"} -{"spark": "410"} +{"spark": "411"} spark-rapids-shim-json-lines ***/ package com.nvidia.spark.rapids diff --git a/sql-plugin/src/main/spark350/scala/com/nvidia/spark/rapids/shims/BatchScanExecMetaBase.scala b/sql-plugin/src/main/spark350/scala/com/nvidia/spark/rapids/shims/BatchScanExecMetaBase.scala index 7449c283428..35063d34944 100644 --- a/sql-plugin/src/main/spark350/scala/com/nvidia/spark/rapids/shims/BatchScanExecMetaBase.scala +++ b/sql-plugin/src/main/spark350/scala/com/nvidia/spark/rapids/shims/BatchScanExecMetaBase.scala @@ -26,7 +26,7 @@ {"spark": "357"} {"spark": "400"} {"spark": "401"} -{"spark": "410"} +{"spark": "411"} spark-rapids-shim-json-lines ***/ package com.nvidia.spark.rapids.shims diff --git a/sql-plugin/src/main/spark350/scala/com/nvidia/spark/rapids/shims/ExternalSourceShim.scala b/sql-plugin/src/main/spark350/scala/com/nvidia/spark/rapids/shims/ExternalSourceShim.scala index aeeb65195ad..6f430e7d0d6 100644 --- a/sql-plugin/src/main/spark350/scala/com/nvidia/spark/rapids/shims/ExternalSourceShim.scala +++ b/sql-plugin/src/main/spark350/scala/com/nvidia/spark/rapids/shims/ExternalSourceShim.scala @@ -26,7 +26,7 @@ {"spark": "357"} {"spark": "400"} {"spark": "401"} -{"spark": "410"} +{"spark": "411"} spark-rapids-shim-json-lines ***/ package com.nvidia.spark.rapids.shims diff --git a/sql-plugin/src/main/spark350/scala/com/nvidia/spark/rapids/shims/GpuIntervalUtils.scala b/sql-plugin/src/main/spark350/scala/com/nvidia/spark/rapids/shims/GpuIntervalUtils.scala index 104e49bcaf7..688892d73d1 100644 --- a/sql-plugin/src/main/spark350/scala/com/nvidia/spark/rapids/shims/GpuIntervalUtils.scala +++ b/sql-plugin/src/main/spark350/scala/com/nvidia/spark/rapids/shims/GpuIntervalUtils.scala @@ -24,7 +24,7 @@ {"spark": "357"} {"spark": "400"} {"spark": "401"} -{"spark": "410"} +{"spark": "411"} spark-rapids-shim-json-lines ***/ package com.nvidia.spark.rapids.shims diff --git a/sql-plugin/src/main/spark350/scala/com/nvidia/spark/rapids/shims/KeyGroupedPartitioningShim.scala b/sql-plugin/src/main/spark350/scala/com/nvidia/spark/rapids/shims/KeyGroupedPartitioningShim.scala index 60979e1a939..06f794f44c9 100644 --- a/sql-plugin/src/main/spark350/scala/com/nvidia/spark/rapids/shims/KeyGroupedPartitioningShim.scala +++ b/sql-plugin/src/main/spark350/scala/com/nvidia/spark/rapids/shims/KeyGroupedPartitioningShim.scala @@ -24,7 +24,7 @@ {"spark": "357"} {"spark": "400"} {"spark": "401"} -{"spark": "410"} +{"spark": "411"} spark-rapids-shim-json-lines ***/ package com.nvidia.spark.rapids.shims diff --git a/sql-plugin/src/main/spark350/scala/com/nvidia/spark/rapids/shims/LegacyBehaviorPolicyShim.scala b/sql-plugin/src/main/spark350/scala/com/nvidia/spark/rapids/shims/LegacyBehaviorPolicyShim.scala index c07b687c10a..753f5a2783e 100644 --- a/sql-plugin/src/main/spark350/scala/com/nvidia/spark/rapids/shims/LegacyBehaviorPolicyShim.scala +++ b/sql-plugin/src/main/spark350/scala/com/nvidia/spark/rapids/shims/LegacyBehaviorPolicyShim.scala @@ -26,7 +26,7 @@ {"spark": "357"} {"spark": "400"} {"spark": "401"} -{"spark": "410"} +{"spark": "411"} spark-rapids-shim-json-lines ***/ package com.nvidia.spark.rapids.shims diff --git a/sql-plugin/src/main/spark350/scala/com/nvidia/spark/rapids/shims/NullOutputStreamShim.scala b/sql-plugin/src/main/spark350/scala/com/nvidia/spark/rapids/shims/NullOutputStreamShim.scala index 7c4ab68164f..673d4f92f83 100644 --- a/sql-plugin/src/main/spark350/scala/com/nvidia/spark/rapids/shims/NullOutputStreamShim.scala +++ b/sql-plugin/src/main/spark350/scala/com/nvidia/spark/rapids/shims/NullOutputStreamShim.scala @@ -26,7 +26,7 @@ {"spark": "357"} {"spark": "400"} {"spark": "401"} -{"spark": "410"} +{"spark": "411"} spark-rapids-shim-json-lines ***/ package com.nvidia.spark.rapids.shims diff --git a/sql-plugin/src/main/spark350/scala/com/nvidia/spark/rapids/shims/Spark350PlusNonDBShims.scala b/sql-plugin/src/main/spark350/scala/com/nvidia/spark/rapids/shims/Spark350PlusNonDBShims.scala index 6f1e64fd3b1..3d272fa4ce6 100644 --- a/sql-plugin/src/main/spark350/scala/com/nvidia/spark/rapids/shims/Spark350PlusNonDBShims.scala +++ b/sql-plugin/src/main/spark350/scala/com/nvidia/spark/rapids/shims/Spark350PlusNonDBShims.scala @@ -25,7 +25,7 @@ {"spark": "357"} {"spark": "400"} {"spark": "401"} -{"spark": "410"} +{"spark": "411"} spark-rapids-shim-json-lines ***/ package com.nvidia.spark.rapids.shims diff --git a/sql-plugin/src/main/spark350/scala/com/nvidia/spark/rapids/shims/v2WriteCommandMetasShim.scala b/sql-plugin/src/main/spark350/scala/com/nvidia/spark/rapids/shims/v2WriteCommandMetasShim.scala index db5bef6934f..b85196eba89 100644 --- a/sql-plugin/src/main/spark350/scala/com/nvidia/spark/rapids/shims/v2WriteCommandMetasShim.scala +++ b/sql-plugin/src/main/spark350/scala/com/nvidia/spark/rapids/shims/v2WriteCommandMetasShim.scala @@ -26,7 +26,7 @@ {"spark": "357"} {"spark": "400"} {"spark": "401"} -{"spark": "410"} +{"spark": "411"} spark-rapids-shim-json-lines ***/ package com.nvidia.spark.rapids.shims diff --git a/sql-plugin/src/main/spark350/scala/org/apache/spark/sql/catalyst/GpuProjectingColumnarBatch.scala b/sql-plugin/src/main/spark350/scala/org/apache/spark/sql/catalyst/GpuProjectingColumnarBatch.scala index 7ecb157a968..34f1d2f1791 100644 --- a/sql-plugin/src/main/spark350/scala/org/apache/spark/sql/catalyst/GpuProjectingColumnarBatch.scala +++ b/sql-plugin/src/main/spark350/scala/org/apache/spark/sql/catalyst/GpuProjectingColumnarBatch.scala @@ -25,7 +25,7 @@ {"spark": "357"} {"spark": "400"} {"spark": "401"} -{"spark": "410"} +{"spark": "411"} spark-rapids-shim-json-lines ***/ package org.apache.spark.sql.catalyst diff --git a/sql-plugin/src/main/spark350/scala/org/apache/spark/sql/execution/datasources/parquet/rapids/shims/ShimVectorizedColumnReader.scala b/sql-plugin/src/main/spark350/scala/org/apache/spark/sql/execution/datasources/parquet/rapids/shims/ShimVectorizedColumnReader.scala index 7183644fbe8..4f46eb7c7df 100644 --- a/sql-plugin/src/main/spark350/scala/org/apache/spark/sql/execution/datasources/parquet/rapids/shims/ShimVectorizedColumnReader.scala +++ b/sql-plugin/src/main/spark350/scala/org/apache/spark/sql/execution/datasources/parquet/rapids/shims/ShimVectorizedColumnReader.scala @@ -25,7 +25,7 @@ {"spark": "357"} {"spark": "400"} {"spark": "401"} -{"spark": "410"} +{"spark": "411"} spark-rapids-shim-json-lines ***/ package org.apache.spark.sql.execution.datasources.parquet.rapids.shims diff --git a/sql-plugin/src/main/spark350/scala/org/apache/spark/sql/execution/datasources/v2/GpuMergeRowsExec.scala b/sql-plugin/src/main/spark350/scala/org/apache/spark/sql/execution/datasources/v2/GpuMergeRowsExec.scala index 2063a5f4793..add1466950b 100644 --- a/sql-plugin/src/main/spark350/scala/org/apache/spark/sql/execution/datasources/v2/GpuMergeRowsExec.scala +++ b/sql-plugin/src/main/spark350/scala/org/apache/spark/sql/execution/datasources/v2/GpuMergeRowsExec.scala @@ -25,7 +25,7 @@ {"spark": "357"} {"spark": "400"} {"spark": "401"} -{"spark": "410"} +{"spark": "411"} spark-rapids-shim-json-lines ***/ package org.apache.spark.sql.execution.datasources.v2 diff --git a/sql-plugin/src/main/spark350/scala/org/apache/spark/sql/execution/datasources/v2/WriteToDataSourceV2Exec.scala b/sql-plugin/src/main/spark350/scala/org/apache/spark/sql/execution/datasources/v2/WriteToDataSourceV2Exec.scala index ba4ed142a5e..efdb1698e00 100644 --- a/sql-plugin/src/main/spark350/scala/org/apache/spark/sql/execution/datasources/v2/WriteToDataSourceV2Exec.scala +++ b/sql-plugin/src/main/spark350/scala/org/apache/spark/sql/execution/datasources/v2/WriteToDataSourceV2Exec.scala @@ -26,7 +26,7 @@ {"spark": "357"} {"spark": "400"} {"spark": "401"} -{"spark": "410"} +{"spark": "411"} spark-rapids-shim-json-lines ***/ package org.apache.spark.sql.execution.datasources.v2 diff --git a/sql-plugin/src/main/spark350/scala/org/apache/spark/sql/rapids/execution/GpuShuffleMeta.scala b/sql-plugin/src/main/spark350/scala/org/apache/spark/sql/rapids/execution/GpuShuffleMeta.scala index 895b2c6aebb..4499856b297 100644 --- a/sql-plugin/src/main/spark350/scala/org/apache/spark/sql/rapids/execution/GpuShuffleMeta.scala +++ b/sql-plugin/src/main/spark350/scala/org/apache/spark/sql/rapids/execution/GpuShuffleMeta.scala @@ -25,7 +25,7 @@ {"spark": "357"} {"spark": "400"} {"spark": "401"} -{"spark": "410"} +{"spark": "411"} spark-rapids-shim-json-lines ***/ package org.apache.spark.sql.rapids.execution diff --git a/sql-plugin/src/main/spark350/scala/org/apache/spark/sql/rapids/shims/ArrowUtilsShim.scala b/sql-plugin/src/main/spark350/scala/org/apache/spark/sql/rapids/shims/ArrowUtilsShim.scala index f135c1a2a06..63331e00dad 100644 --- a/sql-plugin/src/main/spark350/scala/org/apache/spark/sql/rapids/shims/ArrowUtilsShim.scala +++ b/sql-plugin/src/main/spark350/scala/org/apache/spark/sql/rapids/shims/ArrowUtilsShim.scala @@ -26,7 +26,7 @@ {"spark": "357"} {"spark": "400"} {"spark": "401"} -{"spark": "410"} +{"spark": "411"} spark-rapids-shim-json-lines ***/ package org.apache.spark.sql.rapids.shims diff --git a/sql-plugin/src/main/spark350/scala/org/apache/spark/sql/rapids/shims/DataTypeUtilsShim.scala b/sql-plugin/src/main/spark350/scala/org/apache/spark/sql/rapids/shims/DataTypeUtilsShim.scala index 18a845f39d6..910d4419ef0 100644 --- a/sql-plugin/src/main/spark350/scala/org/apache/spark/sql/rapids/shims/DataTypeUtilsShim.scala +++ b/sql-plugin/src/main/spark350/scala/org/apache/spark/sql/rapids/shims/DataTypeUtilsShim.scala @@ -26,7 +26,7 @@ {"spark": "357"} {"spark": "400"} {"spark": "401"} -{"spark": "410"} +{"spark": "411"} spark-rapids-shim-json-lines ***/ package org.apache.spark.sql.rapids.shims diff --git a/sql-plugin/src/main/spark350/scala/org/apache/spark/sql/rapids/shims/GpuMapInPandasExecMeta.scala b/sql-plugin/src/main/spark350/scala/org/apache/spark/sql/rapids/shims/GpuMapInPandasExecMeta.scala index 548376119d9..2d0e134a854 100644 --- a/sql-plugin/src/main/spark350/scala/org/apache/spark/sql/rapids/shims/GpuMapInPandasExecMeta.scala +++ b/sql-plugin/src/main/spark350/scala/org/apache/spark/sql/rapids/shims/GpuMapInPandasExecMeta.scala @@ -25,7 +25,7 @@ {"spark": "357"} {"spark": "400"} {"spark": "401"} -{"spark": "410"} +{"spark": "411"} spark-rapids-shim-json-lines ***/ package org.apache.spark.sql.rapids.shims diff --git a/sql-plugin/src/main/spark350/scala/org/apache/spark/sql/rapids/shims/SchemaMetadataShims.scala b/sql-plugin/src/main/spark350/scala/org/apache/spark/sql/rapids/shims/SchemaMetadataShims.scala index 93c76bd1d4b..a520a3e2f17 100644 --- a/sql-plugin/src/main/spark350/scala/org/apache/spark/sql/rapids/shims/SchemaMetadataShims.scala +++ b/sql-plugin/src/main/spark350/scala/org/apache/spark/sql/rapids/shims/SchemaMetadataShims.scala @@ -25,7 +25,7 @@ {"spark": "357"} {"spark": "400"} {"spark": "401"} -{"spark": "410"} +{"spark": "411"} spark-rapids-shim-json-lines ***/ package org.apache.spark.sql.rapids.shims diff --git a/sql-plugin/src/main/spark350db143/scala/com/nvidia/spark/rapids/RapidsShuffleManager.scala b/sql-plugin/src/main/spark350db143/scala/com/nvidia/spark/rapids/RapidsShuffleManager.scala index 554332eb7d7..082effb7b18 100644 --- a/sql-plugin/src/main/spark350db143/scala/com/nvidia/spark/rapids/RapidsShuffleManager.scala +++ b/sql-plugin/src/main/spark350db143/scala/com/nvidia/spark/rapids/RapidsShuffleManager.scala @@ -18,7 +18,7 @@ {"spark": "350db143"} {"spark": "400"} {"spark": "401"} -{"spark": "410"} +{"spark": "411"} spark-rapids-shim-json-lines ***/ package com.nvidia.spark.rapids.$_spark.version.classifier_ diff --git a/sql-plugin/src/main/spark350db143/scala/com/nvidia/spark/rapids/shims/BatchScanExecMeta.scala b/sql-plugin/src/main/spark350db143/scala/com/nvidia/spark/rapids/shims/BatchScanExecMeta.scala index d993133b4e6..8c5097947e4 100644 --- a/sql-plugin/src/main/spark350db143/scala/com/nvidia/spark/rapids/shims/BatchScanExecMeta.scala +++ b/sql-plugin/src/main/spark350db143/scala/com/nvidia/spark/rapids/shims/BatchScanExecMeta.scala @@ -18,7 +18,7 @@ {"spark": "350db143"} {"spark": "400"} {"spark": "401"} -{"spark": "410"} +{"spark": "411"} spark-rapids-shim-json-lines ***/ package com.nvidia.spark.rapids.shims diff --git a/sql-plugin/src/main/spark350db143/scala/com/nvidia/spark/rapids/shims/CastTimeToIntShim.scala b/sql-plugin/src/main/spark350db143/scala/com/nvidia/spark/rapids/shims/CastTimeToIntShim.scala index b349ee2bc7a..c13917827af 100644 --- a/sql-plugin/src/main/spark350db143/scala/com/nvidia/spark/rapids/shims/CastTimeToIntShim.scala +++ b/sql-plugin/src/main/spark350db143/scala/com/nvidia/spark/rapids/shims/CastTimeToIntShim.scala @@ -17,7 +17,7 @@ {"spark": "350db143"} {"spark": "400"} {"spark": "401"} -{"spark": "410"} +{"spark": "411"} spark-rapids-shim-json-lines ***/ package com.nvidia.spark.rapids.shims diff --git a/sql-plugin/src/main/spark350db143/scala/com/nvidia/spark/rapids/shims/GpuBatchScanExec.scala b/sql-plugin/src/main/spark350db143/scala/com/nvidia/spark/rapids/shims/GpuBatchScanExec.scala index c7bf3ffb640..33e01c055f5 100644 --- a/sql-plugin/src/main/spark350db143/scala/com/nvidia/spark/rapids/shims/GpuBatchScanExec.scala +++ b/sql-plugin/src/main/spark350db143/scala/com/nvidia/spark/rapids/shims/GpuBatchScanExec.scala @@ -18,7 +18,7 @@ {"spark": "350db143"} {"spark": "400"} {"spark": "401"} -{"spark": "410"} +{"spark": "411"} spark-rapids-shim-json-lines ***/ package com.nvidia.spark.rapids.shims diff --git a/sql-plugin/src/main/spark350db143/scala/com/nvidia/spark/rapids/shims/RaiseErrorShim.scala b/sql-plugin/src/main/spark350db143/scala/com/nvidia/spark/rapids/shims/RaiseErrorShim.scala index bf94e2cd571..77c692e5ca2 100644 --- a/sql-plugin/src/main/spark350db143/scala/com/nvidia/spark/rapids/shims/RaiseErrorShim.scala +++ b/sql-plugin/src/main/spark350db143/scala/com/nvidia/spark/rapids/shims/RaiseErrorShim.scala @@ -17,7 +17,7 @@ {"spark": "350db143"} {"spark": "400"} {"spark": "401"} -{"spark": "410"} +{"spark": "411"} spark-rapids-shim-json-lines ***/ package com.nvidia.spark.rapids.shims diff --git a/sql-plugin/src/main/spark350db143/scala/com/nvidia/spark/rapids/shims/ShuffleManagerShimUtils.scala b/sql-plugin/src/main/spark350db143/scala/com/nvidia/spark/rapids/shims/ShuffleManagerShimUtils.scala index c205b3c13e6..e5446131b52 100644 --- a/sql-plugin/src/main/spark350db143/scala/com/nvidia/spark/rapids/shims/ShuffleManagerShimUtils.scala +++ b/sql-plugin/src/main/spark350db143/scala/com/nvidia/spark/rapids/shims/ShuffleManagerShimUtils.scala @@ -18,7 +18,7 @@ {"spark": "350db143"} {"spark": "400"} {"spark": "401"} -{"spark": "410"} +{"spark": "411"} spark-rapids-shim-json-lines ***/ package com.nvidia.spark.rapids.shims diff --git a/sql-plugin/src/main/spark350db143/scala/org/apache/spark/sql/rapids/execution/python/shims/PythonArgumentsUtils.scala b/sql-plugin/src/main/spark350db143/scala/org/apache/spark/sql/rapids/execution/python/shims/PythonArgumentsUtils.scala index ee377048107..36bad6b7929 100644 --- a/sql-plugin/src/main/spark350db143/scala/org/apache/spark/sql/rapids/execution/python/shims/PythonArgumentsUtils.scala +++ b/sql-plugin/src/main/spark350db143/scala/org/apache/spark/sql/rapids/execution/python/shims/PythonArgumentsUtils.scala @@ -18,7 +18,7 @@ {"spark": "350db143"} {"spark": "400"} {"spark": "401"} -{"spark": "410"} +{"spark": "411"} spark-rapids-shim-json-lines ***/ package org.apache.spark.sql.rapids.execution.python.shims diff --git a/sql-plugin/src/main/spark350db143/scala/org/apache/spark/sql/rapids/shims/ArrayInvalidArgumentErrorUtils.scala b/sql-plugin/src/main/spark350db143/scala/org/apache/spark/sql/rapids/shims/ArrayInvalidArgumentErrorUtils.scala index 3742812cc80..3df2036a545 100644 --- a/sql-plugin/src/main/spark350db143/scala/org/apache/spark/sql/rapids/shims/ArrayInvalidArgumentErrorUtils.scala +++ b/sql-plugin/src/main/spark350db143/scala/org/apache/spark/sql/rapids/shims/ArrayInvalidArgumentErrorUtils.scala @@ -18,7 +18,7 @@ {"spark": "350db143"} {"spark": "400"} {"spark": "401"} -{"spark": "410"} +{"spark": "411"} spark-rapids-shim-json-lines ***/ package org.apache.spark.sql.rapids.shims diff --git a/sql-plugin/src/main/spark350db143/scala/org/apache/spark/sql/rapids/shims/SequenceSizeExceededLimitErrorBuilder.scala b/sql-plugin/src/main/spark350db143/scala/org/apache/spark/sql/rapids/shims/SequenceSizeExceededLimitErrorBuilder.scala index bffa4e91f2c..d8f8adb8a87 100644 --- a/sql-plugin/src/main/spark350db143/scala/org/apache/spark/sql/rapids/shims/SequenceSizeExceededLimitErrorBuilder.scala +++ b/sql-plugin/src/main/spark350db143/scala/org/apache/spark/sql/rapids/shims/SequenceSizeExceededLimitErrorBuilder.scala @@ -18,7 +18,7 @@ {"spark": "350db143"} {"spark": "400"} {"spark": "401"} -{"spark": "410"} +{"spark": "411"} spark-rapids-shim-json-lines ***/ package org.apache.spark.sql.rapids.shims diff --git a/sql-plugin/src/main/spark350db143/scala/org/apache/spark/sql/rapids/shims/misc.scala b/sql-plugin/src/main/spark350db143/scala/org/apache/spark/sql/rapids/shims/misc.scala index c76ef8972b8..65f2514fe5d 100644 --- a/sql-plugin/src/main/spark350db143/scala/org/apache/spark/sql/rapids/shims/misc.scala +++ b/sql-plugin/src/main/spark350db143/scala/org/apache/spark/sql/rapids/shims/misc.scala @@ -17,7 +17,7 @@ {"spark": "350db143"} {"spark": "400"} {"spark": "401"} -{"spark": "410"} +{"spark": "411"} spark-rapids-shim-json-lines ***/ package org.apache.spark.sql.rapids.shims diff --git a/sql-plugin/src/main/spark352/scala/com/nvidia/spark/rapids/shims/InMemoryTableScanExecLikeShim.scala b/sql-plugin/src/main/spark352/scala/com/nvidia/spark/rapids/shims/InMemoryTableScanExecLikeShim.scala index 38bbad0ba07..3c6833473ac 100644 --- a/sql-plugin/src/main/spark352/scala/com/nvidia/spark/rapids/shims/InMemoryTableScanExecLikeShim.scala +++ b/sql-plugin/src/main/spark352/scala/com/nvidia/spark/rapids/shims/InMemoryTableScanExecLikeShim.scala @@ -23,7 +23,7 @@ {"spark": "357"} {"spark": "400"} {"spark": "401"} -{"spark": "410"} +{"spark": "411"} spark-rapids-shim-json-lines ***/ package com.nvidia.spark.rapids.shims diff --git a/sql-plugin/src/main/spark352/scala/com/nvidia/spark/rapids/shims/InMemoryTableScanUtils.scala b/sql-plugin/src/main/spark352/scala/com/nvidia/spark/rapids/shims/InMemoryTableScanUtils.scala index 87162bcc91d..0cae8ac08d9 100644 --- a/sql-plugin/src/main/spark352/scala/com/nvidia/spark/rapids/shims/InMemoryTableScanUtils.scala +++ b/sql-plugin/src/main/spark352/scala/com/nvidia/spark/rapids/shims/InMemoryTableScanUtils.scala @@ -23,7 +23,7 @@ {"spark": "357"} {"spark": "400"} {"spark": "401"} -{"spark": "410"} +{"spark": "411"} spark-rapids-shim-json-lines ***/ package com.nvidia.spark.rapids.shims diff --git a/sql-plugin/src/main/spark400/scala/com/nvidia/spark/rapids/shims/CudfUnsafeRow.scala b/sql-plugin/src/main/spark400/scala/com/nvidia/spark/rapids/shims/CudfUnsafeRow.scala index 587280b8b54..e68ecefa93d 100644 --- a/sql-plugin/src/main/spark400/scala/com/nvidia/spark/rapids/shims/CudfUnsafeRow.scala +++ b/sql-plugin/src/main/spark400/scala/com/nvidia/spark/rapids/shims/CudfUnsafeRow.scala @@ -16,7 +16,7 @@ /*** spark-rapids-shim-json-lines {"spark": "400"} {"spark": "401"} -{"spark": "410"} +{"spark": "411"} spark-rapids-shim-json-lines ***/ package com.nvidia.spark.rapids.shims diff --git a/sql-plugin/src/main/spark400/scala/com/nvidia/spark/rapids/shims/DateTimeUtilsShims.scala b/sql-plugin/src/main/spark400/scala/com/nvidia/spark/rapids/shims/DateTimeUtilsShims.scala index fc9190034d4..865f71c81a8 100644 --- a/sql-plugin/src/main/spark400/scala/com/nvidia/spark/rapids/shims/DateTimeUtilsShims.scala +++ b/sql-plugin/src/main/spark400/scala/com/nvidia/spark/rapids/shims/DateTimeUtilsShims.scala @@ -17,7 +17,7 @@ /*** spark-rapids-shim-json-lines {"spark": "400"} {"spark": "401"} -{"spark": "410"} +{"spark": "411"} spark-rapids-shim-json-lines ***/ package com.nvidia.spark.rapids.shims diff --git a/sql-plugin/src/main/spark400/scala/com/nvidia/spark/rapids/shims/GetJsonObjectShim.scala b/sql-plugin/src/main/spark400/scala/com/nvidia/spark/rapids/shims/GetJsonObjectShim.scala index 20741bb047b..ce6b346b079 100644 --- a/sql-plugin/src/main/spark400/scala/com/nvidia/spark/rapids/shims/GetJsonObjectShim.scala +++ b/sql-plugin/src/main/spark400/scala/com/nvidia/spark/rapids/shims/GetJsonObjectShim.scala @@ -17,7 +17,7 @@ /*** spark-rapids-shim-json-lines {"spark": "400"} {"spark": "401"} -{"spark": "410"} +{"spark": "411"} spark-rapids-shim-json-lines ***/ package com.nvidia.spark.rapids.shims diff --git a/sql-plugin/src/main/spark400/scala/com/nvidia/spark/rapids/shims/GpuOrcDataReader.scala b/sql-plugin/src/main/spark400/scala/com/nvidia/spark/rapids/shims/GpuOrcDataReader.scala index 714374d9bea..943c3b568f3 100644 --- a/sql-plugin/src/main/spark400/scala/com/nvidia/spark/rapids/shims/GpuOrcDataReader.scala +++ b/sql-plugin/src/main/spark400/scala/com/nvidia/spark/rapids/shims/GpuOrcDataReader.scala @@ -16,7 +16,7 @@ /*** spark-rapids-shim-json-lines {"spark": "400"} {"spark": "401"} -{"spark": "410"} +{"spark": "411"} spark-rapids-shim-json-lines ***/ package com.nvidia.spark.rapids.shims diff --git a/sql-plugin/src/main/spark400/scala/com/nvidia/spark/rapids/shims/LogicalPlanShims.scala b/sql-plugin/src/main/spark400/scala/com/nvidia/spark/rapids/shims/LogicalPlanShims.scala index 26302ca5e88..6d94f7a40b4 100644 --- a/sql-plugin/src/main/spark400/scala/com/nvidia/spark/rapids/shims/LogicalPlanShims.scala +++ b/sql-plugin/src/main/spark400/scala/com/nvidia/spark/rapids/shims/LogicalPlanShims.scala @@ -17,7 +17,7 @@ /*** spark-rapids-shim-json-lines {"spark": "400"} {"spark": "401"} -{"spark": "410"} +{"spark": "411"} spark-rapids-shim-json-lines ***/ package com.nvidia.spark.rapids.shims diff --git a/sql-plugin/src/main/spark400/scala/com/nvidia/spark/rapids/shims/MapInArrowExecShims.scala b/sql-plugin/src/main/spark400/scala/com/nvidia/spark/rapids/shims/MapInArrowExecShims.scala index 42491b3e9c3..bb1eb86b374 100644 --- a/sql-plugin/src/main/spark400/scala/com/nvidia/spark/rapids/shims/MapInArrowExecShims.scala +++ b/sql-plugin/src/main/spark400/scala/com/nvidia/spark/rapids/shims/MapInArrowExecShims.scala @@ -17,7 +17,7 @@ /*** spark-rapids-shim-json-lines {"spark": "400"} {"spark": "401"} -{"spark": "410"} +{"spark": "411"} spark-rapids-shim-json-lines ***/ package com.nvidia.spark.rapids.shims diff --git a/sql-plugin/src/main/spark400/scala/com/nvidia/spark/rapids/shims/NullIntolerantShim.scala b/sql-plugin/src/main/spark400/scala/com/nvidia/spark/rapids/shims/NullIntolerantShim.scala index 23693d06925..2491c7446a1 100644 --- a/sql-plugin/src/main/spark400/scala/com/nvidia/spark/rapids/shims/NullIntolerantShim.scala +++ b/sql-plugin/src/main/spark400/scala/com/nvidia/spark/rapids/shims/NullIntolerantShim.scala @@ -17,7 +17,7 @@ /*** spark-rapids-shim-json-lines {"spark": "400"} {"spark": "401"} -{"spark": "410"} +{"spark": "411"} spark-rapids-shim-json-lines ***/ package com.nvidia.spark.rapids.shims diff --git a/sql-plugin/src/main/spark400/scala/com/nvidia/spark/rapids/shims/OperatorsUtilShims.scala b/sql-plugin/src/main/spark400/scala/com/nvidia/spark/rapids/shims/OperatorsUtilShims.scala index 5805473d65b..4a2bc138d89 100644 --- a/sql-plugin/src/main/spark400/scala/com/nvidia/spark/rapids/shims/OperatorsUtilShims.scala +++ b/sql-plugin/src/main/spark400/scala/com/nvidia/spark/rapids/shims/OperatorsUtilShims.scala @@ -17,7 +17,7 @@ /*** spark-rapids-shim-json-lines {"spark": "400"} {"spark": "401"} -{"spark": "410"} +{"spark": "411"} spark-rapids-shim-json-lines ***/ package com.nvidia.spark.rapids.shims diff --git a/sql-plugin/src/main/spark400/scala/com/nvidia/spark/rapids/shims/PartitionedFileUtilsShim.scala b/sql-plugin/src/main/spark400/scala/com/nvidia/spark/rapids/shims/PartitionedFileUtilsShim.scala index afbb691abed..a7dcc9dcd89 100644 --- a/sql-plugin/src/main/spark400/scala/com/nvidia/spark/rapids/shims/PartitionedFileUtilsShim.scala +++ b/sql-plugin/src/main/spark400/scala/com/nvidia/spark/rapids/shims/PartitionedFileUtilsShim.scala @@ -18,7 +18,7 @@ /*** spark-rapids-shim-json-lines {"spark": "400"} {"spark": "401"} -{"spark": "410"} +{"spark": "411"} spark-rapids-shim-json-lines ***/ package com.nvidia.spark.rapids.shims diff --git a/sql-plugin/src/main/spark400/scala/com/nvidia/spark/rapids/shims/Spark400PlusCommonShims.scala b/sql-plugin/src/main/spark400/scala/com/nvidia/spark/rapids/shims/Spark400PlusCommonShims.scala index 7353b1e379a..984b53474a7 100644 --- a/sql-plugin/src/main/spark400/scala/com/nvidia/spark/rapids/shims/Spark400PlusCommonShims.scala +++ b/sql-plugin/src/main/spark400/scala/com/nvidia/spark/rapids/shims/Spark400PlusCommonShims.scala @@ -17,7 +17,7 @@ /*** spark-rapids-shim-json-lines {"spark": "400"} {"spark": "401"} -{"spark": "410"} +{"spark": "411"} spark-rapids-shim-json-lines ***/ package com.nvidia.spark.rapids.shims diff --git a/sql-plugin/src/main/spark400/scala/org/apache/spark/sql/execution/datasources/v2/rapids/GpuAtomicCreateTableAsSelectExec.scala b/sql-plugin/src/main/spark400/scala/org/apache/spark/sql/execution/datasources/v2/rapids/GpuAtomicCreateTableAsSelectExec.scala index a203b8e4eaa..1813b788d49 100644 --- a/sql-plugin/src/main/spark400/scala/org/apache/spark/sql/execution/datasources/v2/rapids/GpuAtomicCreateTableAsSelectExec.scala +++ b/sql-plugin/src/main/spark400/scala/org/apache/spark/sql/execution/datasources/v2/rapids/GpuAtomicCreateTableAsSelectExec.scala @@ -17,7 +17,7 @@ /*** spark-rapids-shim-json-lines {"spark": "400"} {"spark": "401"} -{"spark": "410"} +{"spark": "411"} spark-rapids-shim-json-lines ***/ package org.apache.spark.sql.execution.datasources.v2.rapids diff --git a/sql-plugin/src/main/spark400/scala/org/apache/spark/sql/execution/datasources/v2/rapids/GpuAtomicReplaceTableAsSelectExec.scala b/sql-plugin/src/main/spark400/scala/org/apache/spark/sql/execution/datasources/v2/rapids/GpuAtomicReplaceTableAsSelectExec.scala index 27a6a8ea99b..c878d435e16 100644 --- a/sql-plugin/src/main/spark400/scala/org/apache/spark/sql/execution/datasources/v2/rapids/GpuAtomicReplaceTableAsSelectExec.scala +++ b/sql-plugin/src/main/spark400/scala/org/apache/spark/sql/execution/datasources/v2/rapids/GpuAtomicReplaceTableAsSelectExec.scala @@ -17,7 +17,7 @@ /*** spark-rapids-shim-json-lines {"spark": "400"} {"spark": "401"} -{"spark": "410"} +{"spark": "411"} spark-rapids-shim-json-lines ***/ package org.apache.spark.sql.execution.datasources.v2.rapids diff --git a/sql-plugin/src/main/spark400/scala/org/apache/spark/sql/execution/rapids/shims/FilePartitionShims.scala b/sql-plugin/src/main/spark400/scala/org/apache/spark/sql/execution/rapids/shims/FilePartitionShims.scala index 6136e95ca0f..147232509f1 100644 --- a/sql-plugin/src/main/spark400/scala/org/apache/spark/sql/execution/rapids/shims/FilePartitionShims.scala +++ b/sql-plugin/src/main/spark400/scala/org/apache/spark/sql/execution/rapids/shims/FilePartitionShims.scala @@ -17,7 +17,7 @@ /*** spark-rapids-shim-json-lines {"spark": "400"} {"spark": "401"} -{"spark": "410"} +{"spark": "411"} spark-rapids-shim-json-lines ***/ package org.apache.spark.sql.execution.rapids.shims diff --git a/sql-plugin/src/main/spark400/scala/org/apache/spark/sql/hive/rapids/shims/CommandUtilsShim.scala b/sql-plugin/src/main/spark400/scala/org/apache/spark/sql/hive/rapids/shims/CommandUtilsShim.scala index 4b890de8176..848c1b17353 100644 --- a/sql-plugin/src/main/spark400/scala/org/apache/spark/sql/hive/rapids/shims/CommandUtilsShim.scala +++ b/sql-plugin/src/main/spark400/scala/org/apache/spark/sql/hive/rapids/shims/CommandUtilsShim.scala @@ -17,7 +17,7 @@ /*** spark-rapids-shim-json-lines {"spark": "400"} {"spark": "401"} -{"spark": "410"} +{"spark": "411"} spark-rapids-shim-json-lines ***/ package org.apache.spark.sql.hive.rapids.shims diff --git a/sql-plugin/src/main/spark400/scala/org/apache/spark/sql/nvidia/DFUDFShims.scala b/sql-plugin/src/main/spark400/scala/org/apache/spark/sql/nvidia/DFUDFShims.scala index 741889e3ad2..68a1412016c 100644 --- a/sql-plugin/src/main/spark400/scala/org/apache/spark/sql/nvidia/DFUDFShims.scala +++ b/sql-plugin/src/main/spark400/scala/org/apache/spark/sql/nvidia/DFUDFShims.scala @@ -17,7 +17,7 @@ /*** spark-rapids-shim-json-lines {"spark": "400"} {"spark": "401"} -{"spark": "410"} +{"spark": "411"} spark-rapids-shim-json-lines ***/ package org.apache.spark.sql.nvidia diff --git a/sql-plugin/src/main/spark400/scala/org/apache/spark/sql/rapids/execution/GpuSubqueryBroadcastMeta.scala b/sql-plugin/src/main/spark400/scala/org/apache/spark/sql/rapids/execution/GpuSubqueryBroadcastMeta.scala index 7c50cd08fdb..b783ee3b7d9 100644 --- a/sql-plugin/src/main/spark400/scala/org/apache/spark/sql/rapids/execution/GpuSubqueryBroadcastMeta.scala +++ b/sql-plugin/src/main/spark400/scala/org/apache/spark/sql/rapids/execution/GpuSubqueryBroadcastMeta.scala @@ -16,7 +16,7 @@ /*** spark-rapids-shim-json-lines {"spark": "400"} {"spark": "401"} -{"spark": "410"} +{"spark": "411"} spark-rapids-shim-json-lines ***/ package org.apache.spark.sql.rapids.execution diff --git a/sql-plugin/src/main/spark400/scala/org/apache/spark/sql/rapids/execution/python/shims/GpuBasePythonRunner.scala b/sql-plugin/src/main/spark400/scala/org/apache/spark/sql/rapids/execution/python/shims/GpuBasePythonRunner.scala index 3cf27e223d0..e59935b0596 100644 --- a/sql-plugin/src/main/spark400/scala/org/apache/spark/sql/rapids/execution/python/shims/GpuBasePythonRunner.scala +++ b/sql-plugin/src/main/spark400/scala/org/apache/spark/sql/rapids/execution/python/shims/GpuBasePythonRunner.scala @@ -17,7 +17,7 @@ /*** spark-rapids-shim-json-lines {"spark": "400"} {"spark": "401"} -{"spark": "410"} +{"spark": "411"} spark-rapids-shim-json-lines ***/ package org.apache.spark.sql.rapids.execution.python.shims diff --git a/sql-plugin/src/main/spark400/scala/org/apache/spark/sql/rapids/execution/python/shims/WritePythonUDFUtils.scala b/sql-plugin/src/main/spark400/scala/org/apache/spark/sql/rapids/execution/python/shims/WritePythonUDFUtils.scala index 64a03f19624..ca756c71ee6 100644 --- a/sql-plugin/src/main/spark400/scala/org/apache/spark/sql/rapids/execution/python/shims/WritePythonUDFUtils.scala +++ b/sql-plugin/src/main/spark400/scala/org/apache/spark/sql/rapids/execution/python/shims/WritePythonUDFUtils.scala @@ -17,7 +17,7 @@ /*** spark-rapids-shim-json-lines {"spark": "400"} {"spark": "401"} -{"spark": "410"} +{"spark": "411"} spark-rapids-shim-json-lines ***/ package org.apache.spark.sql.rapids.execution.python.shims diff --git a/sql-plugin/src/main/spark400/scala/org/apache/spark/sql/rapids/shims/GpuMapInArrowExecMeta.scala b/sql-plugin/src/main/spark400/scala/org/apache/spark/sql/rapids/shims/GpuMapInArrowExecMeta.scala index 3a69475b9c7..3c6859773a4 100644 --- a/sql-plugin/src/main/spark400/scala/org/apache/spark/sql/rapids/shims/GpuMapInArrowExecMeta.scala +++ b/sql-plugin/src/main/spark400/scala/org/apache/spark/sql/rapids/shims/GpuMapInArrowExecMeta.scala @@ -17,7 +17,7 @@ /*** spark-rapids-shim-json-lines {"spark": "400"} {"spark": "401"} -{"spark": "410"} +{"spark": "411"} spark-rapids-shim-json-lines ***/ package org.apache.spark.sql.rapids.shims diff --git a/sql-plugin/src/main/spark400/scala/org/apache/spark/sql/rapids/shims/InvokeExprMeta.scala b/sql-plugin/src/main/spark400/scala/org/apache/spark/sql/rapids/shims/InvokeExprMeta.scala index 04b99e42b2a..10dc60be81f 100644 --- a/sql-plugin/src/main/spark400/scala/org/apache/spark/sql/rapids/shims/InvokeExprMeta.scala +++ b/sql-plugin/src/main/spark400/scala/org/apache/spark/sql/rapids/shims/InvokeExprMeta.scala @@ -17,7 +17,7 @@ /*** spark-rapids-shim-json-lines {"spark": "400"} {"spark": "401"} -{"spark": "410"} +{"spark": "411"} spark-rapids-shim-json-lines ***/ package org.apache.spark.sql.rapids.shims diff --git a/sql-plugin/src/main/spark400/scala/org/apache/spark/sql/rapids/shims/RapidsErrorUtils.scala b/sql-plugin/src/main/spark400/scala/org/apache/spark/sql/rapids/shims/RapidsErrorUtils.scala index 87c7b50b72d..046ce5d353c 100644 --- a/sql-plugin/src/main/spark400/scala/org/apache/spark/sql/rapids/shims/RapidsErrorUtils.scala +++ b/sql-plugin/src/main/spark400/scala/org/apache/spark/sql/rapids/shims/RapidsErrorUtils.scala @@ -17,7 +17,7 @@ /*** spark-rapids-shim-json-lines {"spark": "400"} {"spark": "401"} -{"spark": "410"} +{"spark": "411"} spark-rapids-shim-json-lines ***/ package org.apache.spark.sql.rapids.shims diff --git a/sql-plugin/src/main/spark400/scala/org/apache/spark/sql/rapids/shims/SparkSessionUtils.scala b/sql-plugin/src/main/spark400/scala/org/apache/spark/sql/rapids/shims/SparkSessionUtils.scala index ba92d3db78e..4ac54515a9f 100644 --- a/sql-plugin/src/main/spark400/scala/org/apache/spark/sql/rapids/shims/SparkSessionUtils.scala +++ b/sql-plugin/src/main/spark400/scala/org/apache/spark/sql/rapids/shims/SparkSessionUtils.scala @@ -17,7 +17,7 @@ /*** spark-rapids-shim-json-lines {"spark": "400"} {"spark": "401"} -{"spark": "410"} +{"spark": "411"} spark-rapids-shim-json-lines ***/ package org.apache.spark.sql.rapids.shims diff --git a/sql-plugin/src/main/spark400/scala/org/apache/spark/sql/rapids/shims/TrampolineConnectShims.scala b/sql-plugin/src/main/spark400/scala/org/apache/spark/sql/rapids/shims/TrampolineConnectShims.scala index f9cbbb3abda..8d79cbf0073 100644 --- a/sql-plugin/src/main/spark400/scala/org/apache/spark/sql/rapids/shims/TrampolineConnectShims.scala +++ b/sql-plugin/src/main/spark400/scala/org/apache/spark/sql/rapids/shims/TrampolineConnectShims.scala @@ -17,7 +17,7 @@ /*** spark-rapids-shim-json-lines {"spark": "400"} {"spark": "401"} -{"spark": "410"} +{"spark": "411"} spark-rapids-shim-json-lines ***/ package org.apache.spark.sql.rapids.shims diff --git a/sql-plugin/src/main/spark401/scala/com/nvidia/spark/rapids/shims/SparkShims.scala b/sql-plugin/src/main/spark401/scala/com/nvidia/spark/rapids/shims/SparkShims.scala index 5c50b1af570..39bcd6c5f80 100644 --- a/sql-plugin/src/main/spark401/scala/com/nvidia/spark/rapids/shims/SparkShims.scala +++ b/sql-plugin/src/main/spark401/scala/com/nvidia/spark/rapids/shims/SparkShims.scala @@ -16,7 +16,7 @@ /*** spark-rapids-shim-json-lines {"spark": "401"} -{"spark": "410"} +{"spark": "411"} spark-rapids-shim-json-lines ***/ package com.nvidia.spark.rapids.shims diff --git a/sql-plugin/src/main/spark401/scala/com/nvidia/spark/rapids/shims/spark401/SparkShimServiceProvider.scala b/sql-plugin/src/main/spark401/scala/com/nvidia/spark/rapids/shims/spark401/SparkShimServiceProvider.scala index f6af7f622e3..f6d9cb85dc5 100644 --- a/sql-plugin/src/main/spark401/scala/com/nvidia/spark/rapids/shims/spark401/SparkShimServiceProvider.scala +++ b/sql-plugin/src/main/spark401/scala/com/nvidia/spark/rapids/shims/spark401/SparkShimServiceProvider.scala @@ -16,7 +16,7 @@ /*** spark-rapids-shim-json-lines {"spark": "401"} -{"spark": "410"} +{"spark": "411"} spark-rapids-shim-json-lines ***/ package com.nvidia.spark.rapids.shims.spark401 diff --git a/sql-plugin/src/main/spark410/scala/com/nvidia/spark/rapids/shims/AggregateInPandasExecShims.scala b/sql-plugin/src/main/spark411/scala/com/nvidia/spark/rapids/shims/AggregateInPandasExecShims.scala similarity index 98% rename from sql-plugin/src/main/spark410/scala/com/nvidia/spark/rapids/shims/AggregateInPandasExecShims.scala rename to sql-plugin/src/main/spark411/scala/com/nvidia/spark/rapids/shims/AggregateInPandasExecShims.scala index 2bcebffdbd4..e34ec1f7807 100644 --- a/sql-plugin/src/main/spark410/scala/com/nvidia/spark/rapids/shims/AggregateInPandasExecShims.scala +++ b/sql-plugin/src/main/spark411/scala/com/nvidia/spark/rapids/shims/AggregateInPandasExecShims.scala @@ -15,7 +15,7 @@ */ /*** spark-rapids-shim-json-lines -{"spark": "410"} +{"spark": "411"} spark-rapids-shim-json-lines ***/ package com.nvidia.spark.rapids.shims diff --git a/sql-plugin/src/main/spark410/scala/com/nvidia/spark/rapids/shims/BroadcastExchangeShims.scala b/sql-plugin/src/main/spark411/scala/com/nvidia/spark/rapids/shims/BroadcastExchangeShims.scala similarity index 98% rename from sql-plugin/src/main/spark410/scala/com/nvidia/spark/rapids/shims/BroadcastExchangeShims.scala rename to sql-plugin/src/main/spark411/scala/com/nvidia/spark/rapids/shims/BroadcastExchangeShims.scala index 3a449b12bb6..121c2f5c993 100644 --- a/sql-plugin/src/main/spark410/scala/com/nvidia/spark/rapids/shims/BroadcastExchangeShims.scala +++ b/sql-plugin/src/main/spark411/scala/com/nvidia/spark/rapids/shims/BroadcastExchangeShims.scala @@ -15,7 +15,7 @@ */ /*** spark-rapids-shim-json-lines -{"spark": "410"} +{"spark": "411"} spark-rapids-shim-json-lines ***/ package com.nvidia.spark.rapids.shims diff --git a/sql-plugin/src/main/spark410/scala/com/nvidia/spark/rapids/shims/DayTimeIntervalShims.scala b/sql-plugin/src/main/spark411/scala/com/nvidia/spark/rapids/shims/DayTimeIntervalShims.scala similarity index 99% rename from sql-plugin/src/main/spark410/scala/com/nvidia/spark/rapids/shims/DayTimeIntervalShims.scala rename to sql-plugin/src/main/spark411/scala/com/nvidia/spark/rapids/shims/DayTimeIntervalShims.scala index 85723ec699e..103bd353ddc 100644 --- a/sql-plugin/src/main/spark410/scala/com/nvidia/spark/rapids/shims/DayTimeIntervalShims.scala +++ b/sql-plugin/src/main/spark411/scala/com/nvidia/spark/rapids/shims/DayTimeIntervalShims.scala @@ -15,7 +15,7 @@ */ /*** spark-rapids-shim-json-lines -{"spark": "410"} +{"spark": "411"} spark-rapids-shim-json-lines ***/ package com.nvidia.spark.rapids.shims diff --git a/sql-plugin/src/main/spark410/scala/com/nvidia/spark/rapids/shims/InvalidateCacheShims.scala b/sql-plugin/src/main/spark411/scala/com/nvidia/spark/rapids/shims/InvalidateCacheShims.scala similarity index 98% rename from sql-plugin/src/main/spark410/scala/com/nvidia/spark/rapids/shims/InvalidateCacheShims.scala rename to sql-plugin/src/main/spark411/scala/com/nvidia/spark/rapids/shims/InvalidateCacheShims.scala index 263cb4fb2c3..01ec9f78f1d 100644 --- a/sql-plugin/src/main/spark410/scala/com/nvidia/spark/rapids/shims/InvalidateCacheShims.scala +++ b/sql-plugin/src/main/spark411/scala/com/nvidia/spark/rapids/shims/InvalidateCacheShims.scala @@ -15,7 +15,7 @@ */ /*** spark-rapids-shim-json-lines -{"spark": "410"} +{"spark": "411"} spark-rapids-shim-json-lines ***/ package com.nvidia.spark.rapids.shims diff --git a/sql-plugin/src/main/spark410/scala/com/nvidia/spark/rapids/shims/ParquetVariantShims.scala b/sql-plugin/src/main/spark411/scala/com/nvidia/spark/rapids/shims/ParquetVariantShims.scala similarity index 98% rename from sql-plugin/src/main/spark410/scala/com/nvidia/spark/rapids/shims/ParquetVariantShims.scala rename to sql-plugin/src/main/spark411/scala/com/nvidia/spark/rapids/shims/ParquetVariantShims.scala index 04552935fd7..6e093cc4299 100644 --- a/sql-plugin/src/main/spark410/scala/com/nvidia/spark/rapids/shims/ParquetVariantShims.scala +++ b/sql-plugin/src/main/spark411/scala/com/nvidia/spark/rapids/shims/ParquetVariantShims.scala @@ -15,7 +15,7 @@ */ /*** spark-rapids-shim-json-lines -{"spark": "410"} +{"spark": "411"} spark-rapids-shim-json-lines ***/ package com.nvidia.spark.rapids.shims diff --git a/sql-plugin/src/main/spark410/scala/com/nvidia/spark/rapids/shims/ShowNamespacesExecShims.scala b/sql-plugin/src/main/spark411/scala/com/nvidia/spark/rapids/shims/ShowNamespacesExecShims.scala similarity index 98% rename from sql-plugin/src/main/spark410/scala/com/nvidia/spark/rapids/shims/ShowNamespacesExecShims.scala rename to sql-plugin/src/main/spark411/scala/com/nvidia/spark/rapids/shims/ShowNamespacesExecShims.scala index 68d558d8c21..46a7c8d854a 100644 --- a/sql-plugin/src/main/spark410/scala/com/nvidia/spark/rapids/shims/ShowNamespacesExecShims.scala +++ b/sql-plugin/src/main/spark411/scala/com/nvidia/spark/rapids/shims/ShowNamespacesExecShims.scala @@ -15,7 +15,7 @@ */ /*** spark-rapids-shim-json-lines -{"spark": "410"} +{"spark": "411"} spark-rapids-shim-json-lines ***/ package com.nvidia.spark.rapids.shims diff --git a/sql-plugin/src/main/spark410/scala/com/nvidia/spark/rapids/shims/SparkShims.scala b/sql-plugin/src/main/spark411/scala/com/nvidia/spark/rapids/shims/SparkShims.scala similarity index 99% rename from sql-plugin/src/main/spark410/scala/com/nvidia/spark/rapids/shims/SparkShims.scala rename to sql-plugin/src/main/spark411/scala/com/nvidia/spark/rapids/shims/SparkShims.scala index 81f72bcf35e..4ab04c7b69f 100644 --- a/sql-plugin/src/main/spark410/scala/com/nvidia/spark/rapids/shims/SparkShims.scala +++ b/sql-plugin/src/main/spark411/scala/com/nvidia/spark/rapids/shims/SparkShims.scala @@ -15,7 +15,7 @@ */ /*** spark-rapids-shim-json-lines -{"spark": "410"} +{"spark": "411"} spark-rapids-shim-json-lines ***/ package com.nvidia.spark.rapids.shims diff --git a/sql-plugin/src/main/spark410/scala/com/nvidia/spark/rapids/shims/StoragePartitionJoinShims.scala b/sql-plugin/src/main/spark411/scala/com/nvidia/spark/rapids/shims/StoragePartitionJoinShims.scala similarity index 98% rename from sql-plugin/src/main/spark410/scala/com/nvidia/spark/rapids/shims/StoragePartitionJoinShims.scala rename to sql-plugin/src/main/spark411/scala/com/nvidia/spark/rapids/shims/StoragePartitionJoinShims.scala index 0842f3e21f6..2bfe24ebcf9 100644 --- a/sql-plugin/src/main/spark410/scala/com/nvidia/spark/rapids/shims/StoragePartitionJoinShims.scala +++ b/sql-plugin/src/main/spark411/scala/com/nvidia/spark/rapids/shims/StoragePartitionJoinShims.scala @@ -15,7 +15,7 @@ */ /*** spark-rapids-shim-json-lines -{"spark": "410"} +{"spark": "411"} spark-rapids-shim-json-lines ***/ package com.nvidia.spark.rapids.shims diff --git a/sql-plugin/src/main/spark410/scala/com/nvidia/spark/rapids/shims/TimeAddShims.scala b/sql-plugin/src/main/spark411/scala/com/nvidia/spark/rapids/shims/TimeAddShims.scala similarity index 98% rename from sql-plugin/src/main/spark410/scala/com/nvidia/spark/rapids/shims/TimeAddShims.scala rename to sql-plugin/src/main/spark411/scala/com/nvidia/spark/rapids/shims/TimeAddShims.scala index 602215f0e0d..000b39e7a81 100644 --- a/sql-plugin/src/main/spark410/scala/com/nvidia/spark/rapids/shims/TimeAddShims.scala +++ b/sql-plugin/src/main/spark411/scala/com/nvidia/spark/rapids/shims/TimeAddShims.scala @@ -15,7 +15,7 @@ */ /*** spark-rapids-shim-json-lines -{"spark": "410"} +{"spark": "411"} spark-rapids-shim-json-lines ***/ package com.nvidia.spark.rapids.shims diff --git a/sql-plugin/src/main/spark410/scala/com/nvidia/spark/rapids/shims/TryModeShim.scala b/sql-plugin/src/main/spark411/scala/com/nvidia/spark/rapids/shims/TryModeShim.scala similarity index 99% rename from sql-plugin/src/main/spark410/scala/com/nvidia/spark/rapids/shims/TryModeShim.scala rename to sql-plugin/src/main/spark411/scala/com/nvidia/spark/rapids/shims/TryModeShim.scala index 5baab0e2766..907bcbdb861 100644 --- a/sql-plugin/src/main/spark410/scala/com/nvidia/spark/rapids/shims/TryModeShim.scala +++ b/sql-plugin/src/main/spark411/scala/com/nvidia/spark/rapids/shims/TryModeShim.scala @@ -15,7 +15,7 @@ */ /*** spark-rapids-shim-json-lines -{"spark": "410"} +{"spark": "411"} spark-rapids-shim-json-lines ***/ package com.nvidia.spark.rapids.shims diff --git a/sql-plugin/src/main/spark410/scala/com/nvidia/spark/rapids/shims/WindowInPandasExecShims.scala b/sql-plugin/src/main/spark411/scala/com/nvidia/spark/rapids/shims/WindowInPandasExecShims.scala similarity index 98% rename from sql-plugin/src/main/spark410/scala/com/nvidia/spark/rapids/shims/WindowInPandasExecShims.scala rename to sql-plugin/src/main/spark411/scala/com/nvidia/spark/rapids/shims/WindowInPandasExecShims.scala index c3dbf496826..72218f7f27b 100644 --- a/sql-plugin/src/main/spark410/scala/com/nvidia/spark/rapids/shims/WindowInPandasExecShims.scala +++ b/sql-plugin/src/main/spark411/scala/com/nvidia/spark/rapids/shims/WindowInPandasExecShims.scala @@ -15,7 +15,7 @@ */ /*** spark-rapids-shim-json-lines -{"spark": "410"} +{"spark": "411"} spark-rapids-shim-json-lines ***/ package com.nvidia.spark.rapids.shims diff --git a/sql-plugin/src/main/spark410/scala/com/nvidia/spark/rapids/shims/WindowInPandasShims.scala b/sql-plugin/src/main/spark411/scala/com/nvidia/spark/rapids/shims/WindowInPandasShims.scala similarity index 98% rename from sql-plugin/src/main/spark410/scala/com/nvidia/spark/rapids/shims/WindowInPandasShims.scala rename to sql-plugin/src/main/spark411/scala/com/nvidia/spark/rapids/shims/WindowInPandasShims.scala index 5b4b4d4775a..ec4ada6a0b7 100644 --- a/sql-plugin/src/main/spark410/scala/com/nvidia/spark/rapids/shims/WindowInPandasShims.scala +++ b/sql-plugin/src/main/spark411/scala/com/nvidia/spark/rapids/shims/WindowInPandasShims.scala @@ -15,7 +15,7 @@ */ /*** spark-rapids-shim-json-lines -{"spark": "410"} +{"spark": "411"} spark-rapids-shim-json-lines ***/ package com.nvidia.spark.rapids.shims diff --git a/sql-plugin/src/main/spark410/scala/com/nvidia/spark/rapids/shims/spark410/SparkShimServiceProvider.scala b/sql-plugin/src/main/spark411/scala/com/nvidia/spark/rapids/shims/spark411/SparkShimServiceProvider.scala similarity index 98% rename from sql-plugin/src/main/spark410/scala/com/nvidia/spark/rapids/shims/spark410/SparkShimServiceProvider.scala rename to sql-plugin/src/main/spark411/scala/com/nvidia/spark/rapids/shims/spark411/SparkShimServiceProvider.scala index fa41297f65c..df6ce70dffa 100644 --- a/sql-plugin/src/main/spark410/scala/com/nvidia/spark/rapids/shims/spark410/SparkShimServiceProvider.scala +++ b/sql-plugin/src/main/spark411/scala/com/nvidia/spark/rapids/shims/spark411/SparkShimServiceProvider.scala @@ -15,7 +15,7 @@ */ /*** spark-rapids-shim-json-lines -{"spark": "410"} +{"spark": "411"} spark-rapids-shim-json-lines ***/ package com.nvidia.spark.rapids.shims.spark410 diff --git a/sql-plugin/src/main/spark410/scala/org/apache/spark/sql/execution/datasources/parquet/rapids/shims/ParquetCVShims.scala b/sql-plugin/src/main/spark411/scala/org/apache/spark/sql/execution/datasources/parquet/rapids/shims/ParquetCVShims.scala similarity index 98% rename from sql-plugin/src/main/spark410/scala/org/apache/spark/sql/execution/datasources/parquet/rapids/shims/ParquetCVShims.scala rename to sql-plugin/src/main/spark411/scala/org/apache/spark/sql/execution/datasources/parquet/rapids/shims/ParquetCVShims.scala index 390e7bffa3d..6c835cb9f85 100644 --- a/sql-plugin/src/main/spark410/scala/org/apache/spark/sql/execution/datasources/parquet/rapids/shims/ParquetCVShims.scala +++ b/sql-plugin/src/main/spark411/scala/org/apache/spark/sql/execution/datasources/parquet/rapids/shims/ParquetCVShims.scala @@ -15,7 +15,7 @@ */ /*** spark-rapids-shim-json-lines -{"spark": "410"} +{"spark": "411"} spark-rapids-shim-json-lines ***/ package org.apache.spark.sql.execution.datasources.parquet diff --git a/sql-plugin/src/main/spark410/scala/org/apache/spark/sql/execution/datasources/v2/rapids/GpuAtomicReplaceTableAsSelectExec.scala b/sql-plugin/src/main/spark411/scala/org/apache/spark/sql/execution/datasources/v2/rapids/GpuAtomicReplaceTableAsSelectExec.scala similarity index 99% rename from sql-plugin/src/main/spark410/scala/org/apache/spark/sql/execution/datasources/v2/rapids/GpuAtomicReplaceTableAsSelectExec.scala rename to sql-plugin/src/main/spark411/scala/org/apache/spark/sql/execution/datasources/v2/rapids/GpuAtomicReplaceTableAsSelectExec.scala index 4a19a476b8b..d355a6b51bd 100644 --- a/sql-plugin/src/main/spark410/scala/org/apache/spark/sql/execution/datasources/v2/rapids/GpuAtomicReplaceTableAsSelectExec.scala +++ b/sql-plugin/src/main/spark411/scala/org/apache/spark/sql/execution/datasources/v2/rapids/GpuAtomicReplaceTableAsSelectExec.scala @@ -15,7 +15,7 @@ */ /*** spark-rapids-shim-json-lines -{"spark": "410"} +{"spark": "411"} spark-rapids-shim-json-lines ***/ package org.apache.spark.sql.execution.datasources.v2.rapids diff --git a/sql-plugin/src/main/spark410/scala/org/apache/spark/sql/rapids/execution/python/shims/WindowInPandasExecTypeShim.scala b/sql-plugin/src/main/spark411/scala/org/apache/spark/sql/rapids/execution/python/shims/WindowInPandasExecTypeShim.scala similarity index 98% rename from sql-plugin/src/main/spark410/scala/org/apache/spark/sql/rapids/execution/python/shims/WindowInPandasExecTypeShim.scala rename to sql-plugin/src/main/spark411/scala/org/apache/spark/sql/rapids/execution/python/shims/WindowInPandasExecTypeShim.scala index b3b524d09f0..a8c7d28b630 100644 --- a/sql-plugin/src/main/spark410/scala/org/apache/spark/sql/rapids/execution/python/shims/WindowInPandasExecTypeShim.scala +++ b/sql-plugin/src/main/spark411/scala/org/apache/spark/sql/rapids/execution/python/shims/WindowInPandasExecTypeShim.scala @@ -15,7 +15,7 @@ */ /*** spark-rapids-shim-json-lines -{"spark": "410"} +{"spark": "411"} spark-rapids-shim-json-lines ***/ package org.apache.spark.sql.rapids.execution.python.shims diff --git a/sql-plugin/src/main/spark410/scala/org/apache/spark/sql/rapids/shims/FileCommitProtocolShims.scala b/sql-plugin/src/main/spark411/scala/org/apache/spark/sql/rapids/shims/FileCommitProtocolShims.scala similarity index 99% rename from sql-plugin/src/main/spark410/scala/org/apache/spark/sql/rapids/shims/FileCommitProtocolShims.scala rename to sql-plugin/src/main/spark411/scala/org/apache/spark/sql/rapids/shims/FileCommitProtocolShims.scala index 9402b7736fa..9f664bdb705 100644 --- a/sql-plugin/src/main/spark410/scala/org/apache/spark/sql/rapids/shims/FileCommitProtocolShims.scala +++ b/sql-plugin/src/main/spark411/scala/org/apache/spark/sql/rapids/shims/FileCommitProtocolShims.scala @@ -15,7 +15,7 @@ */ /*** spark-rapids-shim-json-lines -{"spark": "410"} +{"spark": "411"} spark-rapids-shim-json-lines ***/ package org.apache.spark.sql.rapids.shims diff --git a/sql-plugin/src/main/spark410/scala/org/apache/spark/sql/rapids/shims/FileStreamSinkShims.scala b/sql-plugin/src/main/spark411/scala/org/apache/spark/sql/rapids/shims/FileStreamSinkShims.scala similarity index 98% rename from sql-plugin/src/main/spark410/scala/org/apache/spark/sql/rapids/shims/FileStreamSinkShims.scala rename to sql-plugin/src/main/spark411/scala/org/apache/spark/sql/rapids/shims/FileStreamSinkShims.scala index 4577fd7a46a..cd98593fe81 100644 --- a/sql-plugin/src/main/spark410/scala/org/apache/spark/sql/rapids/shims/FileStreamSinkShims.scala +++ b/sql-plugin/src/main/spark411/scala/org/apache/spark/sql/rapids/shims/FileStreamSinkShims.scala @@ -15,7 +15,7 @@ */ /*** spark-rapids-shim-json-lines -{"spark": "410"} +{"spark": "411"} spark-rapids-shim-json-lines ***/ package org.apache.spark.sql.rapids.shims diff --git a/sql-plugin/src/main/spark410/scala/org/apache/spark/sql/rapids/shims/datetimeExpressions.scala b/sql-plugin/src/main/spark411/scala/org/apache/spark/sql/rapids/shims/datetimeExpressions.scala similarity index 99% rename from sql-plugin/src/main/spark410/scala/org/apache/spark/sql/rapids/shims/datetimeExpressions.scala rename to sql-plugin/src/main/spark411/scala/org/apache/spark/sql/rapids/shims/datetimeExpressions.scala index d6a42db838b..7b82072b5db 100644 --- a/sql-plugin/src/main/spark410/scala/org/apache/spark/sql/rapids/shims/datetimeExpressions.scala +++ b/sql-plugin/src/main/spark411/scala/org/apache/spark/sql/rapids/shims/datetimeExpressions.scala @@ -15,7 +15,7 @@ */ /*** spark-rapids-shim-json-lines -{"spark": "410"} +{"spark": "411"} spark-rapids-shim-json-lines ***/ package org.apache.spark.sql.rapids.shims diff --git a/sql-plugin/src/test/spark410/scala/com/nvidia/spark/rapids/shims/spark410/SparkShimsSuite.scala b/sql-plugin/src/test/spark411/scala/com/nvidia/spark/rapids/shims/spark411/SparkShimsSuite.scala similarity index 98% rename from sql-plugin/src/test/spark410/scala/com/nvidia/spark/rapids/shims/spark410/SparkShimsSuite.scala rename to sql-plugin/src/test/spark411/scala/com/nvidia/spark/rapids/shims/spark411/SparkShimsSuite.scala index dd69a11d180..de782eb2325 100644 --- a/sql-plugin/src/test/spark410/scala/com/nvidia/spark/rapids/shims/spark410/SparkShimsSuite.scala +++ b/sql-plugin/src/test/spark411/scala/com/nvidia/spark/rapids/shims/spark411/SparkShimsSuite.scala @@ -15,7 +15,7 @@ */ /*** spark-rapids-shim-json-lines -{"spark": "410"} +{"spark": "411"} spark-rapids-shim-json-lines ***/ package com.nvidia.spark.rapids.shims.spark410 diff --git a/tests/src/test/spark320/scala/com/nvidia/spark/rapids/shims/OrcStatisticShim.scala b/tests/src/test/spark320/scala/com/nvidia/spark/rapids/shims/OrcStatisticShim.scala index cb1231fac66..ed096b78435 100644 --- a/tests/src/test/spark320/scala/com/nvidia/spark/rapids/shims/OrcStatisticShim.scala +++ b/tests/src/test/spark320/scala/com/nvidia/spark/rapids/shims/OrcStatisticShim.scala @@ -44,7 +44,7 @@ {"spark": "357"} {"spark": "400"} {"spark": "401"} -{"spark": "410"} +{"spark": "411"} spark-rapids-shim-json-lines ***/ package com.nvidia.spark.rapids.shims diff --git a/tests/src/test/spark330/scala/org/apache/spark/sql/rapids/GpuInSubqueryExecSuite.scala b/tests/src/test/spark330/scala/org/apache/spark/sql/rapids/GpuInSubqueryExecSuite.scala index 7141191fe59..e792bac67ed 100644 --- a/tests/src/test/spark330/scala/org/apache/spark/sql/rapids/GpuInSubqueryExecSuite.scala +++ b/tests/src/test/spark330/scala/org/apache/spark/sql/rapids/GpuInSubqueryExecSuite.scala @@ -34,7 +34,7 @@ {"spark": "354"} {"spark": "400"} {"spark": "401"} -{"spark": "410"} +{"spark": "411"} spark-rapids-shim-json-lines ***/ package org.apache.spark.sql.rapids diff --git a/tests/src/test/spark340/scala/com/nvidia/spark/rapids/shuffle/RapidsShuffleTestHelper.scala b/tests/src/test/spark340/scala/com/nvidia/spark/rapids/shuffle/RapidsShuffleTestHelper.scala index 13b09bef7d8..83cc8479ae9 100644 --- a/tests/src/test/spark340/scala/com/nvidia/spark/rapids/shuffle/RapidsShuffleTestHelper.scala +++ b/tests/src/test/spark340/scala/com/nvidia/spark/rapids/shuffle/RapidsShuffleTestHelper.scala @@ -32,7 +32,7 @@ {"spark": "357"} {"spark": "400"} {"spark": "401"} -{"spark": "410"} +{"spark": "411"} spark-rapids-shim-json-lines ***/ package com.nvidia.spark.rapids.shuffle diff --git a/tests/src/test/spark341db/scala/com/nvidia/spark/rapids/ToPrettyStringSuite.scala b/tests/src/test/spark341db/scala/com/nvidia/spark/rapids/ToPrettyStringSuite.scala index 7f1def8ab94..8e72a96f8ff 100644 --- a/tests/src/test/spark341db/scala/com/nvidia/spark/rapids/ToPrettyStringSuite.scala +++ b/tests/src/test/spark341db/scala/com/nvidia/spark/rapids/ToPrettyStringSuite.scala @@ -27,7 +27,7 @@ {"spark": "357"} {"spark": "400"} {"spark": "401"} -{"spark": "410"} +{"spark": "411"} spark-rapids-shim-json-lines ***/ package com.nvidia.spark.rapids diff --git a/tests/src/test/spark350/scala/org/apache/spark/sql/rapids/GpuCreateDataSourceTableAsSelectCommandSuite.scala b/tests/src/test/spark350/scala/org/apache/spark/sql/rapids/GpuCreateDataSourceTableAsSelectCommandSuite.scala index 297306b59e6..7b6e8add4a9 100644 --- a/tests/src/test/spark350/scala/org/apache/spark/sql/rapids/GpuCreateDataSourceTableAsSelectCommandSuite.scala +++ b/tests/src/test/spark350/scala/org/apache/spark/sql/rapids/GpuCreateDataSourceTableAsSelectCommandSuite.scala @@ -25,7 +25,7 @@ {"spark": "357"} {"spark": "400"} {"spark": "401"} -{"spark": "410"} +{"spark": "411"} spark-rapids-shim-json-lines ***/ package org.apache.spark.sql.rapids diff --git a/tools/generated_files/410/operatorsScore.csv b/tools/generated_files/411/operatorsScore.csv similarity index 100% rename from tools/generated_files/410/operatorsScore.csv rename to tools/generated_files/411/operatorsScore.csv diff --git a/tools/generated_files/410/supportedDataSource.csv b/tools/generated_files/411/supportedDataSource.csv similarity index 100% rename from tools/generated_files/410/supportedDataSource.csv rename to tools/generated_files/411/supportedDataSource.csv diff --git a/tools/generated_files/410/supportedExecs.csv b/tools/generated_files/411/supportedExecs.csv similarity index 100% rename from tools/generated_files/410/supportedExecs.csv rename to tools/generated_files/411/supportedExecs.csv diff --git a/tools/generated_files/410/supportedExprs.csv b/tools/generated_files/411/supportedExprs.csv similarity index 100% rename from tools/generated_files/410/supportedExprs.csv rename to tools/generated_files/411/supportedExprs.csv From 479ded4470c984234745f91a352719e6a348dae2 Mon Sep 17 00:00:00 2001 From: Chong Gao Date: Tue, 13 Jan 2026 15:42:27 +0800 Subject: [PATCH 23/59] Use Java 17 release Signed-off-by: Chong Gao --- scala2.13/pom.xml | 31 +++++++++++++++++-------------- 1 file changed, 17 insertions(+), 14 deletions(-) diff --git a/scala2.13/pom.xml b/scala2.13/pom.xml index d99a94fc0c3..3dd715cc67e 100644 --- a/scala2.13/pom.xml +++ b/scala2.13/pom.xml @@ -779,6 +779,7 @@ 411 + 17 2.13.17 ${spark411.version} ${spark411.version} @@ -790,23 +791,25 @@ [17,) Support for Spark ${spark.version} is only available with Java 17+ + + + + net.alchim31.maven + scala-maven-plugin + + + -release:17 + -feature + -unchecked + -deprecation + + + + + delta-lake/delta-stub - - - - - net.alchim31.maven - scala-maven-plugin - - - ${java.major.version} - - - - - From 855edbc362b1b2c6856897097c039740b4785b5a Mon Sep 17 00:00:00 2001 From: Chong Gao Date: Tue, 13 Jan 2026 15:53:57 +0800 Subject: [PATCH 24/59] Fix: Change version from 410 to 411 Signed-off-by: Chong Gao --- .../nvidia/spark/rapids/shims/spark411/SparkShimsSuite.scala | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/sql-plugin/src/test/spark411/scala/com/nvidia/spark/rapids/shims/spark411/SparkShimsSuite.scala b/sql-plugin/src/test/spark411/scala/com/nvidia/spark/rapids/shims/spark411/SparkShimsSuite.scala index de782eb2325..295ba2341d2 100644 --- a/sql-plugin/src/test/spark411/scala/com/nvidia/spark/rapids/shims/spark411/SparkShimsSuite.scala +++ b/sql-plugin/src/test/spark411/scala/com/nvidia/spark/rapids/shims/spark411/SparkShimsSuite.scala @@ -24,7 +24,7 @@ import org.scalatest.funsuite.AnyFunSuite class SparkShimsSuite extends AnyFunSuite with FQSuiteName { test("spark shims version") { - assert(ShimLoader.getShimVersion === SparkShimVersion(4, 1, 0)) + assert(ShimLoader.getShimVersion === SparkShimVersion(4, 1, 1)) } test("shuffle manager class") { From 33c45ece64a50745dac1fcbd4ae85d23e33aabca Mon Sep 17 00:00:00 2001 From: Chong Gao Date: Tue, 13 Jan 2026 15:56:01 +0800 Subject: [PATCH 25/59] Fix Signed-off-by: Chong Gao --- .../nvidia/spark/rapids/shims/spark411/SparkShimsSuite.scala | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/sql-plugin/src/test/spark411/scala/com/nvidia/spark/rapids/shims/spark411/SparkShimsSuite.scala b/sql-plugin/src/test/spark411/scala/com/nvidia/spark/rapids/shims/spark411/SparkShimsSuite.scala index 295ba2341d2..c114e819013 100644 --- a/sql-plugin/src/test/spark411/scala/com/nvidia/spark/rapids/shims/spark411/SparkShimsSuite.scala +++ b/sql-plugin/src/test/spark411/scala/com/nvidia/spark/rapids/shims/spark411/SparkShimsSuite.scala @@ -29,7 +29,7 @@ class SparkShimsSuite extends AnyFunSuite with FQSuiteName { test("shuffle manager class") { assert(ShimLoader.getRapidsShuffleManagerClass === - classOf[com.nvidia.spark.rapids.spark410.RapidsShuffleManager].getCanonicalName) + classOf[com.nvidia.spark.rapids.spark411.RapidsShuffleManager].getCanonicalName) } } From 2069825e936d1838a67ee19794c0b972c216fd51 Mon Sep 17 00:00:00 2001 From: Chong Gao Date: Tue, 13 Jan 2026 15:57:02 +0800 Subject: [PATCH 26/59] Fix Signed-off-by: Chong Gao --- .../spark/rapids/shims/spark411/SparkShimServiceProvider.scala | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/sql-plugin/src/main/spark411/scala/com/nvidia/spark/rapids/shims/spark411/SparkShimServiceProvider.scala b/sql-plugin/src/main/spark411/scala/com/nvidia/spark/rapids/shims/spark411/SparkShimServiceProvider.scala index df6ce70dffa..4a6e1a6fb39 100644 --- a/sql-plugin/src/main/spark411/scala/com/nvidia/spark/rapids/shims/spark411/SparkShimServiceProvider.scala +++ b/sql-plugin/src/main/spark411/scala/com/nvidia/spark/rapids/shims/spark411/SparkShimServiceProvider.scala @@ -22,7 +22,7 @@ package com.nvidia.spark.rapids.shims.spark410 import com.nvidia.spark.rapids.SparkShimVersion object SparkShimServiceProvider { - val VERSION = SparkShimVersion(4, 1, 0) + val VERSION = SparkShimVersion(4, 1, 1) val VERSIONNAMES = Seq(s"$VERSION") } From 7df38c839beae3b3cecae168deb9e7e9c66ad84e Mon Sep 17 00:00:00 2001 From: Chong Gao Date: Tue, 13 Jan 2026 16:41:33 +0800 Subject: [PATCH 27/59] Fix Signed-off-by: Chong Gao --- .../spark/rapids/shims/spark411/SparkShimServiceProvider.scala | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/sql-plugin/src/main/spark411/scala/com/nvidia/spark/rapids/shims/spark411/SparkShimServiceProvider.scala b/sql-plugin/src/main/spark411/scala/com/nvidia/spark/rapids/shims/spark411/SparkShimServiceProvider.scala index 4a6e1a6fb39..bd5a848206a 100644 --- a/sql-plugin/src/main/spark411/scala/com/nvidia/spark/rapids/shims/spark411/SparkShimServiceProvider.scala +++ b/sql-plugin/src/main/spark411/scala/com/nvidia/spark/rapids/shims/spark411/SparkShimServiceProvider.scala @@ -17,7 +17,7 @@ /*** spark-rapids-shim-json-lines {"spark": "411"} spark-rapids-shim-json-lines ***/ -package com.nvidia.spark.rapids.shims.spark410 +package com.nvidia.spark.rapids.shims.spark411 import com.nvidia.spark.rapids.SparkShimVersion From b5cb2109e0d0f993cc98adecbb669ebce0e2b16c Mon Sep 17 00:00:00 2001 From: Chong Gao Date: Tue, 13 Jan 2026 16:43:40 +0800 Subject: [PATCH 28/59] Fix Signed-off-by: Chong Gao --- .../spark/rapids/shims/spark401/SparkShimServiceProvider.scala | 1 - 1 file changed, 1 deletion(-) diff --git a/sql-plugin/src/main/spark401/scala/com/nvidia/spark/rapids/shims/spark401/SparkShimServiceProvider.scala b/sql-plugin/src/main/spark401/scala/com/nvidia/spark/rapids/shims/spark401/SparkShimServiceProvider.scala index f6d9cb85dc5..1779ea4f649 100644 --- a/sql-plugin/src/main/spark401/scala/com/nvidia/spark/rapids/shims/spark401/SparkShimServiceProvider.scala +++ b/sql-plugin/src/main/spark401/scala/com/nvidia/spark/rapids/shims/spark401/SparkShimServiceProvider.scala @@ -16,7 +16,6 @@ /*** spark-rapids-shim-json-lines {"spark": "401"} -{"spark": "411"} spark-rapids-shim-json-lines ***/ package com.nvidia.spark.rapids.shims.spark401 From 4041e205bc1046b708b70cd624a6f39e153de04f Mon Sep 17 00:00:00 2001 From: Chong Gao Date: Tue, 13 Jan 2026 17:10:39 +0800 Subject: [PATCH 29/59] Fix ITs: Spark 4.1.0+ returns bytes instead of bytearray for binary data Signed-off-by: Chong Gao --- integration_tests/src/main/python/asserts.py | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/integration_tests/src/main/python/asserts.py b/integration_tests/src/main/python/asserts.py index b211b8b2bcc..cc9013cd845 100644 --- a/integration_tests/src/main/python/asserts.py +++ b/integration_tests/src/main/python/asserts.py @@ -1,4 +1,4 @@ -# Copyright (c) 2020-2025, NVIDIA CORPORATION. +# Copyright (c) 2020-2026, NVIDIA CORPORATION. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -95,6 +95,9 @@ def _assert_equal(cpu, gpu, float_check, path): assert cpu == gpu, f"GPU ({gpu}) and CPU ({cpu}) decimal values are different at {path}" elif isinstance(cpu, bytearray): assert cpu == gpu, f"GPU ({gpu}) and CPU ({cpu}) bytearray values are different at {path}" + elif isinstance(cpu, bytes): + # Spark 4.1.0+ returns bytes instead of bytearray for binary data + assert cpu == gpu, f"GPU ({gpu}) and CPU ({cpu}) bytes values are different at {path}" elif isinstance(cpu, timedelta): # Used by interval type DayTimeInterval for Pyspark 3.3.0+ assert cpu == gpu, f"GPU ({gpu}) and CPU ({cpu}) timedelta values are different at {path}" From 4b16f3857dac0794c4e481268dece237d3f2eb68 Mon Sep 17 00:00:00 2001 From: Chong Gao Date: Tue, 13 Jan 2026 18:05:27 +0800 Subject: [PATCH 30/59] Format code Signed-off-by: Chong Gao --- .../org/apache/spark/sql/rapids/GpuFileFormatDataWriter.scala | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/sql-plugin/src/main/scala/org/apache/spark/sql/rapids/GpuFileFormatDataWriter.scala b/sql-plugin/src/main/scala/org/apache/spark/sql/rapids/GpuFileFormatDataWriter.scala index f003f65c20e..923f34b3d65 100644 --- a/sql-plugin/src/main/scala/org/apache/spark/sql/rapids/GpuFileFormatDataWriter.scala +++ b/sql-plugin/src/main/scala/org/apache/spark/sql/rapids/GpuFileFormatDataWriter.scala @@ -33,7 +33,6 @@ import org.apache.hadoop.mapreduce.TaskAttemptContext import org.apache.spark.internal.Logging import org.apache.spark.internal.io.FileCommitProtocol -import org.apache.spark.sql.rapids.shims.FileCommitProtocolShims import org.apache.spark.sql.catalyst.InternalRow import org.apache.spark.sql.catalyst.catalog.{BucketSpec, ExternalCatalogUtils} import org.apache.spark.sql.catalyst.catalog.CatalogTypes.TablePartitionSpec @@ -42,6 +41,7 @@ import org.apache.spark.sql.connector.write.DataWriter import org.apache.spark.sql.execution.datasources.{BucketingUtils, PartitioningUtils, WriteTaskResult} import org.apache.spark.sql.rapids.GpuFileFormatDataWriter._ import org.apache.spark.sql.rapids.GpuFileFormatWriter.GpuConcurrentOutputWriterSpec +import org.apache.spark.sql.rapids.shims.FileCommitProtocolShims import org.apache.spark.sql.types._ import org.apache.spark.sql.vectorized.ColumnarBatch import org.apache.spark.util.SerializableConfiguration From 0c8dd92c8cadc0c54ffdb9b8cdb88ff19c688c5c Mon Sep 17 00:00:00 2001 From: Chong Gao Date: Wed, 14 Jan 2026 11:36:04 +0800 Subject: [PATCH 31/59] Update pom Signed-off-by: Chong Gao --- pom.xml | 4 ++-- scala2.13/pom.xml | 2 -- 2 files changed, 2 insertions(+), 4 deletions(-) diff --git a/pom.xml b/pom.xml index dce045c416e..2639852ebb2 100644 --- a/pom.xml +++ b/pom.xml @@ -784,7 +784,7 @@ ${spark411.version} ${spark411.version} 1.13.1 - rapids-4-spark-delta-40x + rapids-4-spark-delta-stub 2.0.7 [17,) Support for Spark ${spark.version} is only available with Java 17+ @@ -806,7 +806,7 @@ - delta-lake/delta-40x + delta-lake/delta-stub --> diff --git a/scala2.13/pom.xml b/scala2.13/pom.xml index 3dd715cc67e..167dcc342a1 100644 --- a/scala2.13/pom.xml +++ b/scala2.13/pom.xml @@ -784,8 +784,6 @@ ${spark411.version} ${spark411.version} 1.13.1 - - rapids-4-spark-delta-stub 2.0.7 [17,) From 6a6efdf2ab8d42b0c5bdd98c28d6637c4d8f5e83 Mon Sep 17 00:00:00 2001 From: Chong Gao Date: Wed, 14 Jan 2026 13:57:23 +0800 Subject: [PATCH 32/59] 411 docs Signed-off-by: Chong Gao --- tools/generated_files/411/operatorsScore.csv | 1 + tools/generated_files/411/supportedDataSource.csv | 2 +- tools/generated_files/411/supportedExprs.csv | 3 +++ 3 files changed, 5 insertions(+), 1 deletion(-) diff --git a/tools/generated_files/411/operatorsScore.csv b/tools/generated_files/411/operatorsScore.csv index 1ddea11f3d4..ef316b38590 100644 --- a/tools/generated_files/411/operatorsScore.csv +++ b/tools/generated_files/411/operatorsScore.csv @@ -256,6 +256,7 @@ Second,4 SecondsToTimestamp,4 Sequence,4 Sha1,4 +Sha2,4 ShiftLeft,4 ShiftRight,4 ShiftRightUnsigned,4 diff --git a/tools/generated_files/411/supportedDataSource.csv b/tools/generated_files/411/supportedDataSource.csv index 67669d28435..577555d6de0 100644 --- a/tools/generated_files/411/supportedDataSource.csv +++ b/tools/generated_files/411/supportedDataSource.csv @@ -6,7 +6,7 @@ Delta,write,S,S,S,S,S,S,S,S,PS,S,S,NA,S,NA,PS,PS,PS,NS,S,S HiveText,read,S,S,S,S,S,S,S,S,PS,S,S,NS,NS,NS,NS,NS,NS,NS,NS,NS HiveText,write,S,S,S,S,S,S,S,S,PS,S,S,NS,NS,NS,NS,NS,NS,NS,NS,NS Iceberg,read,S,S,S,S,S,S,S,S,PS,S,S,NA,S,NA,PS,PS,PS,NS,S,S -Iceberg,write,CO,CO,CO,CO,CO,CO,CO,CO,CO,CO,CO,CO,CO,CO,CO,CO,CO,CO,CO,CO +Iceberg,write,S,S,S,S,S,S,S,S,PS,S,S,NA,S,NA,NS,NS,NS,NS,S,S JSON,read,S,S,S,S,S,S,S,PS,PS,S,S,NA,NS,NA,PS,NS,PS,NS,NA,NA ORC,read,S,S,S,S,S,S,S,S,PS,S,S,NA,NS,NA,PS,PS,PS,NS,NA,NA ORC,write,S,S,S,S,S,S,S,S,PS,S,S,NA,NS,NA,PS,PS,PS,NS,NA,NA diff --git a/tools/generated_files/411/supportedExprs.csv b/tools/generated_files/411/supportedExprs.csv index 9b5978383c7..fc88e99d754 100644 --- a/tools/generated_files/411/supportedExprs.csv +++ b/tools/generated_files/411/supportedExprs.csv @@ -532,6 +532,9 @@ Sequence,S,`sequence`,None,project,step,NA,S,S,S,S,NA,NA,NA,NA,NA,NA,NA,NA,NS,NA Sequence,S,`sequence`,None,project,result,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,PS,NA,NA,NA,NA,NA Sha1,S,`sha1`; `sha`,None,project,input,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA Sha1,S,`sha1`; `sha`,None,project,result,NA,NA,NA,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +Sha2,S,`sha2`,None,project,input,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA +Sha2,S,`sha2`,None,project,bitLength,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +Sha2,S,`sha2`,None,project,result,NA,NA,NA,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA ShiftLeft,S,`<<`; `shiftleft`,None,project,value,NA,NA,NA,S,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA ShiftLeft,S,`<<`; `shiftleft`,None,project,amount,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA ShiftLeft,S,`<<`; `shiftleft`,None,project,result,NA,NA,NA,S,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA From 6856dfb624c5fe7fb6fd0a6865ef9d8a7724694f Mon Sep 17 00:00:00 2001 From: Chong Gao Date: Wed, 14 Jan 2026 14:14:25 +0800 Subject: [PATCH 33/59] Fix DayTimeInterval shims for Spark 4.1.1 - Remove 411 marker from spark330/DayTimeIntervalShims.scala - Add Abs, MultiplyDTInterval, DivideDTInterval to spark411/DayTimeIntervalShims.scala Fixes DayTimeInterval operations (abs, multiply, divide) that were not being converted to GPU in Spark 4.1.1 builds. --- .../rapids/shims/DayTimeIntervalShims.scala | 1 - .../rapids/shims/DayTimeIntervalShims.scala | 47 ++++++++++++++++++- tools/generated_files/411/operatorsScore.csv | 2 + tools/generated_files/411/supportedExprs.csv | 14 ++++-- 4 files changed, 57 insertions(+), 7 deletions(-) diff --git a/sql-plugin/src/main/spark330/scala/com/nvidia/spark/rapids/shims/DayTimeIntervalShims.scala b/sql-plugin/src/main/spark330/scala/com/nvidia/spark/rapids/shims/DayTimeIntervalShims.scala index a29ae81dcfd..567ffffe840 100644 --- a/sql-plugin/src/main/spark330/scala/com/nvidia/spark/rapids/shims/DayTimeIntervalShims.scala +++ b/sql-plugin/src/main/spark330/scala/com/nvidia/spark/rapids/shims/DayTimeIntervalShims.scala @@ -41,7 +41,6 @@ {"spark": "357"} {"spark": "400"} {"spark": "401"} -{"spark": "411"} spark-rapids-shim-json-lines ***/ package com.nvidia.spark.rapids.shims diff --git a/sql-plugin/src/main/spark411/scala/com/nvidia/spark/rapids/shims/DayTimeIntervalShims.scala b/sql-plugin/src/main/spark411/scala/com/nvidia/spark/rapids/shims/DayTimeIntervalShims.scala index 103bd353ddc..cc225c0acd0 100644 --- a/sql-plugin/src/main/spark411/scala/com/nvidia/spark/rapids/shims/DayTimeIntervalShims.scala +++ b/sql-plugin/src/main/spark411/scala/com/nvidia/spark/rapids/shims/DayTimeIntervalShims.scala @@ -22,16 +22,59 @@ package com.nvidia.spark.rapids.shims import com.nvidia.spark.rapids._ import org.apache.spark.sql.catalyst.expressions._ -import org.apache.spark.sql.rapids.shims.GpuTimestampAddInterval +import org.apache.spark.sql.internal.SQLConf +import org.apache.spark.sql.rapids._ +import org.apache.spark.sql.rapids.shims.{GpuDivideDTInterval, GpuMultiplyDTInterval, + GpuTimestampAddInterval} import org.apache.spark.sql.types.{CalendarIntervalType, DayTimeIntervalType} import org.apache.spark.unsafe.types.CalendarInterval /** - * DayTimeInterval shims for Spark 4.1.0+ + * DayTimeInterval shims for Spark 4.1.1+ * TimeAdd was renamed to TimestampAddInterval in Spark 4.1.0 */ object DayTimeIntervalShims { def exprs: Map[Class[_ <: Expression], ExprRule[_ <: Expression]] = Seq( + GpuOverrides.expr[Abs]( + "Absolute value", + ExprChecks.unaryProjectAndAstInputMatchesOutput( + TypeSig.implicitCastsAstTypes, + TypeSig.gpuNumeric + GpuTypeShims.additionalArithmeticSupportedTypes, + TypeSig.cpuNumeric + GpuTypeShims.additionalArithmeticSupportedTypes), + (a, conf, p, r) => new UnaryAstExprMeta[Abs](a, conf, p, r) { + val ansiEnabled = SQLConf.get.ansiEnabled + + override def tagSelfForAst(): Unit = { + if (ansiEnabled && GpuAnsi.needBasicOpOverflowCheck(a.dataType)) { + willNotWorkInAst("AST unary minus does not support ANSI mode.") + } + } + + // ANSI support for ABS was added in 3.2.0 SPARK-33275 + override def convertToGpu(child: Expression): GpuExpression = GpuAbs(child, ansiEnabled) + }), + GpuOverrides.expr[MultiplyDTInterval]( + "Day-time interval * number", + ExprChecks.binaryProject( + TypeSig.DAYTIME, + TypeSig.DAYTIME, + ("lhs", TypeSig.DAYTIME, TypeSig.DAYTIME), + ("rhs", TypeSig.gpuNumeric - TypeSig.DECIMAL_128, TypeSig.gpuNumeric)), + (a, conf, p, r) => new BinaryExprMeta[MultiplyDTInterval](a, conf, p, r) { + override def convertToGpu(lhs: Expression, rhs: Expression): GpuExpression = + GpuMultiplyDTInterval(lhs, rhs) + }), + GpuOverrides.expr[DivideDTInterval]( + "Day-time interval / number", + ExprChecks.binaryProject( + TypeSig.DAYTIME, + TypeSig.DAYTIME, + ("lhs", TypeSig.DAYTIME, TypeSig.DAYTIME), + ("rhs", TypeSig.gpuNumeric - TypeSig.DECIMAL_128, TypeSig.gpuNumeric)), + (a, conf, p, r) => new BinaryExprMeta[DivideDTInterval](a, conf, p, r) { + override def convertToGpu(lhs: Expression, rhs: Expression): GpuExpression = + GpuDivideDTInterval(lhs, rhs) + }), GpuOverrides.expr[TimestampAddInterval]( "Adds interval to timestamp", ExprChecks.binaryProject(TypeSig.TIMESTAMP, TypeSig.TIMESTAMP, diff --git a/tools/generated_files/411/operatorsScore.csv b/tools/generated_files/411/operatorsScore.csv index ef316b38590..9e2af70c529 100644 --- a/tools/generated_files/411/operatorsScore.csv +++ b/tools/generated_files/411/operatorsScore.csv @@ -127,6 +127,7 @@ DayOfYear,4 DenseRank,4 Discard,4 Divide,4 +DivideDTInterval,4 DivideYMInterval,4 DynamicPruningExpression,4 ElementAt,4 @@ -215,6 +216,7 @@ MonotonicallyIncreasingID,4 Month,4 MonthsBetween,4 Multiply,4 +MultiplyDTInterval,4 MultiplyYMInterval,4 Murmur3Hash,4 NaNvl,4 diff --git a/tools/generated_files/411/supportedExprs.csv b/tools/generated_files/411/supportedExprs.csv index fc88e99d754..3b227815554 100644 --- a/tools/generated_files/411/supportedExprs.csv +++ b/tools/generated_files/411/supportedExprs.csv @@ -1,8 +1,8 @@ Expression,Supported,SQL Func,Notes,Context,Params,BOOLEAN,BYTE,SHORT,INT,LONG,FLOAT,DOUBLE,DATE,TIMESTAMP,STRING,DECIMAL,NULL,BINARY,CALENDAR,ARRAY,MAP,STRUCT,UDT,DAYTIME,YEARMONTH -Abs,S,`abs`,None,project,input,NA,S,S,S,S,S,S,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA -Abs,S,`abs`,None,project,result,NA,S,S,S,S,S,S,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA -Abs,S,`abs`,None,AST,input,NA,NS,NS,S,S,S,S,NA,NA,NA,NS,NA,NA,NA,NA,NA,NA,NA,NA,NA -Abs,S,`abs`,None,AST,result,NA,NS,NS,S,S,S,S,NA,NA,NA,NS,NA,NA,NA,NA,NA,NA,NA,NA,NA +Abs,S,`abs`,None,project,input,NA,S,S,S,S,S,S,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,S,S +Abs,S,`abs`,None,project,result,NA,S,S,S,S,S,S,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,S,S +Abs,S,`abs`,None,AST,input,NA,NS,NS,S,S,S,S,NA,NA,NA,NS,NA,NA,NA,NA,NA,NA,NA,NS,NS +Abs,S,`abs`,None,AST,result,NA,NS,NS,S,S,S,S,NA,NA,NA,NS,NA,NA,NA,NA,NA,NA,NA,NS,NS Acos,S,`acos`,None,project,input,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA Acos,S,`acos`,None,project,result,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA Acos,S,`acos`,None,AST,input,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA @@ -204,6 +204,9 @@ DenseRank,S,`dense_rank`,None,window,result,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,N Divide,S,`/`,None,project,lhs,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA Divide,S,`/`,None,project,rhs,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA Divide,S,`/`,None,project,result,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA +DivideDTInterval,S, ,None,project,lhs,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,S,NA +DivideDTInterval,S, ,None,project,rhs,NA,S,S,S,S,S,S,NA,NA,NA,NS,NA,NA,NA,NA,NA,NA,NA,NA,NA +DivideDTInterval,S, ,None,project,result,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,S,NA DivideYMInterval,S, ,None,project,lhs,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,S DivideYMInterval,S, ,None,project,rhs,NA,S,S,S,S,S,S,NA,NA,NA,NS,NA,NA,NA,NA,NA,NA,NA,NA,NA DivideYMInterval,S, ,None,project,result,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,S @@ -414,6 +417,9 @@ Multiply,S,`*`,None,project,result,NA,S,S,S,S,S,S,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,N Multiply,S,`*`,None,AST,lhs,NA,NS,NS,S,S,S,S,NA,NA,NA,NS,NA,NA,NA,NA,NA,NA,NA,NA,NA Multiply,S,`*`,None,AST,rhs,NA,NS,NS,S,S,S,S,NA,NA,NA,NS,NA,NA,NA,NA,NA,NA,NA,NA,NA Multiply,S,`*`,None,AST,result,NA,NS,NS,S,S,S,S,NA,NA,NA,NS,NA,NA,NA,NA,NA,NA,NA,NA,NA +MultiplyDTInterval,S, ,None,project,lhs,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,S,NA +MultiplyDTInterval,S, ,None,project,rhs,NA,S,S,S,S,S,S,NA,NA,NA,NS,NA,NA,NA,NA,NA,NA,NA,NA,NA +MultiplyDTInterval,S, ,None,project,result,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,S,NA MultiplyYMInterval,S, ,None,project,lhs,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,S MultiplyYMInterval,S, ,None,project,rhs,NA,S,S,S,S,S,S,NA,NA,NA,NS,NA,NA,NA,NA,NA,NA,NA,NA,NA MultiplyYMInterval,S, ,None,project,result,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,S From f48291729ad35e9680fba65f15eb3f5561e7ab6d Mon Sep 17 00:00:00 2001 From: Chong Gao Date: Wed, 14 Jan 2026 17:01:47 +0800 Subject: [PATCH 34/59] Add Gpu version for OneRowRelationExec Signed-off-by: Chong Gao --- .../spark/rapids/shims/SparkShims.scala | 1 - .../rapids/shims/GpuOneRowRelationExec.scala | 83 +++++++++++++++++++ .../spark/rapids/shims/SparkShims.scala | 20 ++++- .../spark/rapids/ParquetWriterSuite.scala | 4 +- tools/generated_files/411/operatorsScore.csv | 1 + tools/generated_files/411/supportedExecs.csv | 1 + 6 files changed, 103 insertions(+), 7 deletions(-) create mode 100644 sql-plugin/src/main/spark411/scala/com/nvidia/spark/rapids/shims/GpuOneRowRelationExec.scala diff --git a/sql-plugin/src/main/spark401/scala/com/nvidia/spark/rapids/shims/SparkShims.scala b/sql-plugin/src/main/spark401/scala/com/nvidia/spark/rapids/shims/SparkShims.scala index 858a2b5be2b..b5878058927 100644 --- a/sql-plugin/src/main/spark401/scala/com/nvidia/spark/rapids/shims/SparkShims.scala +++ b/sql-plugin/src/main/spark401/scala/com/nvidia/spark/rapids/shims/SparkShims.scala @@ -16,7 +16,6 @@ /*** spark-rapids-shim-json-lines {"spark": "401"} -{"spark": "411"} spark-rapids-shim-json-lines ***/ package com.nvidia.spark.rapids.shims diff --git a/sql-plugin/src/main/spark411/scala/com/nvidia/spark/rapids/shims/GpuOneRowRelationExec.scala b/sql-plugin/src/main/spark411/scala/com/nvidia/spark/rapids/shims/GpuOneRowRelationExec.scala new file mode 100644 index 00000000000..a4bf2aa9359 --- /dev/null +++ b/sql-plugin/src/main/spark411/scala/com/nvidia/spark/rapids/shims/GpuOneRowRelationExec.scala @@ -0,0 +1,83 @@ +/* + * Copyright (c) 2026, NVIDIA CORPORATION. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +/*** spark-rapids-shim-json-lines +{"spark": "411"} +spark-rapids-shim-json-lines ***/ +package com.nvidia.spark.rapids.shims + +import com.nvidia.spark.rapids.{DataFromReplacementRule, GpuExec, GpuMetric, RapidsConf, + RapidsMeta, SparkPlanMeta} +import com.nvidia.spark.rapids.GpuMetric.NUM_OUTPUT_ROWS + +import org.apache.spark.rdd.RDD +import org.apache.spark.sql.catalyst.InternalRow +import org.apache.spark.sql.catalyst.expressions.Attribute +import org.apache.spark.sql.execution.{LeafExecNode, OneRowRelationExec} +import org.apache.spark.sql.vectorized.ColumnarBatch + +/** + * GPU version of OneRowRelationExec. + * OneRowRelationExec is used for queries like "SELECT 1" that have no FROM clause. + * It produces a single row with no columns. + * + * This GPU version produces a single ColumnarBatch with one row and zero columns, + * which uses no GPU memory since there are no actual columns. + */ +case class GpuOneRowRelationExec() extends LeafExecNode with GpuExec { + + override val nodeName: String = "GpuScan OneRowRelation" + + override val output: Seq[Attribute] = Nil + + override protected def internalDoExecuteColumnar(): RDD[ColumnarBatch] = { + val numOutputRows = gpuLongMetric(NUM_OUTPUT_ROWS) + sparkContext.parallelize(Seq(null), 1).mapPartitions { _ => + // Create a ColumnarBatch with 1 row and 0 columns + val batch = new ColumnarBatch(Array.empty, 1) + numOutputRows += 1 + Iterator.single(batch) + } + } + + // Row-based execution fallback + override protected def doExecute(): RDD[InternalRow] = { + throw new IllegalStateException(s"Row-based execution should not occur for $this") + } + + // Override makeCopy to handle Spark's TreeNode reflection issue with no-arg case classes + override def makeCopy(newArgs: Array[AnyRef]): GpuOneRowRelationExec = { + GpuOneRowRelationExec() + } + + // Override doCanonicalize to avoid reflection issues during plan canonicalization + override protected def doCanonicalize(): GpuOneRowRelationExec = { + GpuOneRowRelationExec() + } +} + +/** + * Meta class for OneRowRelationExec to convert it to GPU. + */ +class GpuOneRowRelationExecMeta( + exec: OneRowRelationExec, + conf: RapidsConf, + parent: Option[RapidsMeta[_, _, _]], + rule: DataFromReplacementRule) + extends SparkPlanMeta[OneRowRelationExec](exec, conf, parent, rule) { + + override def convertToGpu(): GpuExec = GpuOneRowRelationExec() +} diff --git a/sql-plugin/src/main/spark411/scala/com/nvidia/spark/rapids/shims/SparkShims.scala b/sql-plugin/src/main/spark411/scala/com/nvidia/spark/rapids/shims/SparkShims.scala index 4ab04c7b69f..5258335519b 100644 --- a/sql-plugin/src/main/spark411/scala/com/nvidia/spark/rapids/shims/SparkShims.scala +++ b/sql-plugin/src/main/spark411/scala/com/nvidia/spark/rapids/shims/SparkShims.scala @@ -22,12 +22,14 @@ package com.nvidia.spark.rapids.shims import com.nvidia.spark.rapids._ import com.nvidia.spark.rapids.{HashExprChecks, Murmur3HashExprMeta, XxHash64ExprMeta} -import org.apache.spark.sql.catalyst.expressions.{CollationAwareMurmur3Hash, CollationAwareXxHash64, Expression} +import org.apache.spark.sql.catalyst.expressions.{CollationAwareMurmur3Hash, CollationAwareXxHash64, + Expression} +import org.apache.spark.sql.execution.{OneRowRelationExec, SparkPlan} import org.apache.spark.sql.rapids.{GpuMurmur3Hash, GpuXxHash64} /** - * SparkShimImpl for Spark 4.1.0 - * Extends Spark400PlusCommonShims with 4.1.0 specific overrides. + * SparkShimImpl for Spark 4.1.1 + * Extends Spark400PlusCommonShims with 4.1.1 specific overrides. */ object SparkShimImpl extends Spark400PlusCommonShims with RebaseShims { override def getExprs: Map[Class[_ <: Expression], ExprRule[_ <: Expression]] = { @@ -46,4 +48,16 @@ object SparkShimImpl extends Spark400PlusCommonShims with RebaseShims { // Include TimeAddShims for TimestampAddInterval support in 4.1.0 super.getExprs ++ shimExprs ++ TimeAddShims.exprs } + + override def getExecs: Map[Class[_ <: SparkPlan], ExecRule[_ <: SparkPlan]] = { + val shimExecs: Map[Class[_ <: SparkPlan], ExecRule[_ <: SparkPlan]] = Seq( + // OneRowRelationExec is a new class in Spark 4.1.0 for single-row queries (e.g. SELECT 1) + // GPU version produces a single ColumnarBatch with one row and zero columns + GpuOverrides.exec[OneRowRelationExec]( + "Single row relation for literal queries without FROM clause", + ExecChecks(TypeSig.all, TypeSig.all), + (exec, conf, parent, rule) => new GpuOneRowRelationExecMeta(exec, conf, parent, rule)) + ).map(r => (r.getClassFor.asSubclass(classOf[SparkPlan]), r)).toMap + super.getExecs ++ shimExecs + } } diff --git a/tests/src/test/scala/com/nvidia/spark/rapids/ParquetWriterSuite.scala b/tests/src/test/scala/com/nvidia/spark/rapids/ParquetWriterSuite.scala index 10ad51d7148..1688e22ba84 100644 --- a/tests/src/test/scala/com/nvidia/spark/rapids/ParquetWriterSuite.scala +++ b/tests/src/test/scala/com/nvidia/spark/rapids/ParquetWriterSuite.scala @@ -311,9 +311,7 @@ class ParquetWriterSuite extends SparkQueryCompareTestSuite { spark.sql("DROP TABLE IF EXISTS tempmetricstable") } }, new SparkConf() - .set("spark.sql.sources.commitProtocolClass", slowCommitClass) - // OneRowRelationExec cannot run on GPU - it's a CPU-only leaf node for single-row SELECT - .set(RapidsConf.TEST_ALLOWED_NONGPU.key, "OneRowRelationExec")) + .set("spark.sql.sources.commitProtocolClass", slowCommitClass)) } } diff --git a/tools/generated_files/411/operatorsScore.csv b/tools/generated_files/411/operatorsScore.csv index 9e2af70c529..a935cd83fd7 100644 --- a/tools/generated_files/411/operatorsScore.csv +++ b/tools/generated_files/411/operatorsScore.csv @@ -7,6 +7,7 @@ FilterExec,2.8 GenerateExec,3.0 GlobalLimitExec,3.0 LocalLimitExec,3.0 +OneRowRelationExec,3.0 ProjectExec,3.0 RangeExec,3.0 SampleExec,3.0 diff --git a/tools/generated_files/411/supportedExecs.csv b/tools/generated_files/411/supportedExecs.csv index afa89d9ea74..4cc6945b174 100644 --- a/tools/generated_files/411/supportedExecs.csv +++ b/tools/generated_files/411/supportedExecs.csv @@ -7,6 +7,7 @@ FilterExec,S,None,Input/Output,S,S,S,S,S,S,S,S,PS,S,S,S,S,NS,PS,PS,PS,NS,S,S GenerateExec,S,None,Input/Output,S,S,S,S,S,S,S,S,PS,S,S,S,S,NS,PS,PS,PS,NS,NS,NS GlobalLimitExec,S,None,Input/Output,S,S,S,S,S,S,S,S,PS,S,S,S,NS,NS,PS,PS,PS,NS,NS,NS LocalLimitExec,S,None,Input/Output,S,S,S,S,S,S,S,S,PS,S,S,S,NS,NS,PS,PS,PS,NS,NS,NS +OneRowRelationExec,S,None,Input/Output,S,S,S,S,S,S,S,S,PS,S,S,S,S,S,PS,PS,PS,S,S,S ProjectExec,S,None,Input/Output,S,S,S,S,S,S,S,S,PS,S,S,S,S,NS,PS,PS,PS,NS,S,S RangeExec,S,None,Input/Output,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA SampleExec,S,None,Input/Output,S,S,S,S,S,S,S,S,PS,S,S,S,NS,NS,PS,PS,PS,NS,S,S From a310cadc4dccaac3b718baafa4eb5895a2262efc Mon Sep 17 00:00:00 2001 From: Chong Gao Date: Thu, 15 Jan 2026 11:07:51 +0800 Subject: [PATCH 35/59] Fix shim bug: missing for some Spark versions Signed-off-by: Chong Gao --- .../rapids/shims/InvalidateCacheShims.scala | 23 +++++++++++++++++-- 1 file changed, 21 insertions(+), 2 deletions(-) rename sql-plugin/src/main/{spark400 => spark320}/scala/com/nvidia/spark/rapids/shims/InvalidateCacheShims.scala (72%) diff --git a/sql-plugin/src/main/spark400/scala/com/nvidia/spark/rapids/shims/InvalidateCacheShims.scala b/sql-plugin/src/main/spark320/scala/com/nvidia/spark/rapids/shims/InvalidateCacheShims.scala similarity index 72% rename from sql-plugin/src/main/spark400/scala/com/nvidia/spark/rapids/shims/InvalidateCacheShims.scala rename to sql-plugin/src/main/spark320/scala/com/nvidia/spark/rapids/shims/InvalidateCacheShims.scala index c503278815f..c1db231c20e 100644 --- a/sql-plugin/src/main/spark400/scala/com/nvidia/spark/rapids/shims/InvalidateCacheShims.scala +++ b/sql-plugin/src/main/spark320/scala/com/nvidia/spark/rapids/shims/InvalidateCacheShims.scala @@ -15,6 +15,25 @@ */ /*** spark-rapids-shim-json-lines +{"spark": "320"} +{"spark": "321"} +{"spark": "322"} +{"spark": "323"} +{"spark": "324"} +{"spark": "330"} +{"spark": "331"} +{"spark": "332"} +{"spark": "333"} +{"spark": "334"} +{"spark": "340"} +{"spark": "341"} +{"spark": "342"} +{"spark": "343"} +{"spark": "350"} +{"spark": "351"} +{"spark": "352"} +{"spark": "353"} +{"spark": "354"} {"spark": "400"} {"spark": "401"} spark-rapids-shim-json-lines ***/ @@ -24,8 +43,8 @@ import org.apache.spark.sql.connector.catalog.{Identifier, Table, TableCatalog} /** * Shim for invalidateCache callback signature differences between Spark versions. - * In Spark 4.0.x: (TableCatalog, Table, Identifier) => Unit - * In Spark 4.1.0: (TableCatalog, Identifier) => Unit + * In Spark 3.x and 4.0.x: (TableCatalog, Table, Identifier) => Unit + * In Spark 4.1.x: (TableCatalog, Identifier) => Unit */ object InvalidateCacheShims { type InvalidateCacheType = (TableCatalog, Table, Identifier) => Unit From 4e5fa8dbb5f1272421d39d5b09b4a11938cb6945 Mon Sep 17 00:00:00 2001 From: Chong Gao Date: Thu, 15 Jan 2026 11:48:09 +0800 Subject: [PATCH 36/59] Fix bug in make-scala-version-build-files.sh Signed-off-by: Chong Gao --- build/make-scala-version-build-files.sh | 11 ++++++----- 1 file changed, 6 insertions(+), 5 deletions(-) diff --git a/build/make-scala-version-build-files.sh b/build/make-scala-version-build-files.sh index 21bf4471147..7b6eafe1eb3 100755 --- a/build/make-scala-version-build-files.sh +++ b/build/make-scala-version-build-files.sh @@ -1,6 +1,6 @@ #!/usr/bin/env bash # -# Copyright (c) 2023-2025, NVIDIA CORPORATION. All rights reserved. +# Copyright (c) 2023-2026, NVIDIA CORPORATION. All rights reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -91,11 +91,12 @@ for f in $(git ls-files '**pom.xml'); do done # Update spark.version to spark350.version for Scala 2.13 +# Only update the default spark.version in main section (after spark-rapids-jni.version), +# not the ones inside sections +# The pattern matches spark330.version (from 2.12 pom) and replaces with spark350.version SPARK_VERSION=${DEFAULT_SPARK[$TO_VERSION]} -sed_i '//,/\${spark[0-9]\+\.version}\${spark[0-9]\+\.version}\${'$SPARK_VERSION'.version}/,/\${spark[0-9]\+\.version}\${spark[0-9]\+\.version}\${'$SPARK_VERSION'.version}/,/\${'$FROM_SPARK_VERSION'\.version}\${'$FROM_SPARK_VERSION'\.version}\${'$SPARK_VERSION'.version} in parent POM From cd01ef74232e825ec76b8b3f87ace402ef0520af Mon Sep 17 00:00:00 2001 From: Chong Gao Date: Thu, 15 Jan 2026 13:45:11 +0800 Subject: [PATCH 37/59] Fix shim bug: missed some Spark versions Signed-off-by: Chong Gao --- .../com/nvidia/spark/rapids/shims/InvalidateCacheShims.scala | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/sql-plugin/src/main/spark320/scala/com/nvidia/spark/rapids/shims/InvalidateCacheShims.scala b/sql-plugin/src/main/spark320/scala/com/nvidia/spark/rapids/shims/InvalidateCacheShims.scala index c1db231c20e..ce4e83a9913 100644 --- a/sql-plugin/src/main/spark320/scala/com/nvidia/spark/rapids/shims/InvalidateCacheShims.scala +++ b/sql-plugin/src/main/spark320/scala/com/nvidia/spark/rapids/shims/InvalidateCacheShims.scala @@ -29,11 +29,15 @@ {"spark": "341"} {"spark": "342"} {"spark": "343"} +{"spark": "344"} {"spark": "350"} {"spark": "351"} {"spark": "352"} {"spark": "353"} {"spark": "354"} +{"spark": "355"} +{"spark": "356"} +{"spark": "357"} {"spark": "400"} {"spark": "401"} spark-rapids-shim-json-lines ***/ From a84280952ba685ef40ccf0fb6b40ab0d7a753a50 Mon Sep 17 00:00:00 2001 From: Chong Gao Date: Thu, 15 Jan 2026 14:36:00 +0800 Subject: [PATCH 38/59] Revert "Fix bug in make-scala-version-build-files.sh" This reverts commit 4e5fa8dbb5f1272421d39d5b09b4a11938cb6945. --- build/make-scala-version-build-files.sh | 11 +++++------ 1 file changed, 5 insertions(+), 6 deletions(-) diff --git a/build/make-scala-version-build-files.sh b/build/make-scala-version-build-files.sh index 7b6eafe1eb3..21bf4471147 100755 --- a/build/make-scala-version-build-files.sh +++ b/build/make-scala-version-build-files.sh @@ -1,6 +1,6 @@ #!/usr/bin/env bash # -# Copyright (c) 2023-2026, NVIDIA CORPORATION. All rights reserved. +# Copyright (c) 2023-2025, NVIDIA CORPORATION. All rights reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -91,12 +91,11 @@ for f in $(git ls-files '**pom.xml'); do done # Update spark.version to spark350.version for Scala 2.13 -# Only update the default spark.version in main section (after spark-rapids-jni.version), -# not the ones inside sections -# The pattern matches spark330.version (from 2.12 pom) and replaces with spark350.version SPARK_VERSION=${DEFAULT_SPARK[$TO_VERSION]} -FROM_SPARK_VERSION=${DEFAULT_SPARK[$FROM_VERSION]} -sed_i '//,/\${'$FROM_SPARK_VERSION'\.version}\${'$FROM_SPARK_VERSION'\.version}\${'$SPARK_VERSION'.version}/,/\${spark[0-9]\+\.version}\${spark[0-9]\+\.version}\${'$SPARK_VERSION'.version}/,/\${spark[0-9]\+\.version}\${spark[0-9]\+\.version}\${'$SPARK_VERSION'.version} in parent POM From b5f21011d68193cbcfaf5d18ee9a632b95992cff Mon Sep 17 00:00:00 2001 From: Chong Gao Date: Thu, 15 Jan 2026 14:37:57 +0800 Subject: [PATCH 39/59] Fix make-scala-version-build-files.sh: reorder properties in release411 profile Move before in the release411 profile so that the sed pattern in make-scala-version-build-files.sh does not incorrectly match and replace the spark.version inside profiles. The sed pattern uses //,// range, which now only matches in the default properties section where 8 comes before . --- pom.xml | 2 +- scala2.13/pom.xml | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/pom.xml b/pom.xml index 9298d657088..bc957949359 100644 --- a/pom.xml +++ b/pom.xml @@ -779,9 +779,9 @@ 411 + ${spark411.version} 17 2.13.17 - ${spark411.version} ${spark411.version} 1.13.1 rapids-4-spark-delta-stub diff --git a/scala2.13/pom.xml b/scala2.13/pom.xml index 144eb096038..1251e6bb81b 100644 --- a/scala2.13/pom.xml +++ b/scala2.13/pom.xml @@ -779,9 +779,9 @@ 411 + ${spark411.version} 17 2.13.17 - ${spark411.version} ${spark411.version} 1.13.1 rapids-4-spark-delta-stub From eab7b21aa16ee81c2d072b958ad1f244a9dc1ca1 Mon Sep 17 00:00:00 2001 From: Chong Gao Date: Thu, 15 Jan 2026 14:51:03 +0800 Subject: [PATCH 40/59] Fix WindowInPandasShims for Databricks: use projectList instead of windowExpression On Databricks, WindowInPandasExec uses projectList instead of windowExpression. Split WindowInPandasShims into: - spark320/WindowInPandasShims.scala: non-Databricks (uses windowExpression) - spark330db/WindowInPandasShims.scala: Databricks (uses projectList) --- .../rapids/shims/WindowInPandasShims.scala | 4 --- .../rapids/shims/WindowInPandasShims.scala | 35 +++++++++++++++++++ 2 files changed, 35 insertions(+), 4 deletions(-) create mode 100644 sql-plugin/src/main/spark330db/scala/com/nvidia/spark/rapids/shims/WindowInPandasShims.scala diff --git a/sql-plugin/src/main/spark320/scala/com/nvidia/spark/rapids/shims/WindowInPandasShims.scala b/sql-plugin/src/main/spark320/scala/com/nvidia/spark/rapids/shims/WindowInPandasShims.scala index c6a1fded30b..e80b26627d6 100644 --- a/sql-plugin/src/main/spark320/scala/com/nvidia/spark/rapids/shims/WindowInPandasShims.scala +++ b/sql-plugin/src/main/spark320/scala/com/nvidia/spark/rapids/shims/WindowInPandasShims.scala @@ -23,21 +23,17 @@ {"spark": "324"} {"spark": "330"} {"spark": "330cdh"} -{"spark": "330db"} {"spark": "331"} {"spark": "332"} {"spark": "332cdh"} -{"spark": "332db"} {"spark": "333"} {"spark": "334"} {"spark": "340"} {"spark": "341"} -{"spark": "341db"} {"spark": "342"} {"spark": "343"} {"spark": "344"} {"spark": "350"} -{"spark": "350db143"} {"spark": "351"} {"spark": "352"} {"spark": "353"} diff --git a/sql-plugin/src/main/spark330db/scala/com/nvidia/spark/rapids/shims/WindowInPandasShims.scala b/sql-plugin/src/main/spark330db/scala/com/nvidia/spark/rapids/shims/WindowInPandasShims.scala new file mode 100644 index 00000000000..de08285146c --- /dev/null +++ b/sql-plugin/src/main/spark330db/scala/com/nvidia/spark/rapids/shims/WindowInPandasShims.scala @@ -0,0 +1,35 @@ +/* + * Copyright (c) 2025-2026, NVIDIA CORPORATION. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +/*** spark-rapids-shim-json-lines +{"spark": "330db"} +{"spark": "332db"} +{"spark": "341db"} +{"spark": "350db143"} +spark-rapids-shim-json-lines ***/ +package com.nvidia.spark.rapids.shims + +import org.apache.spark.sql.catalyst.expressions.NamedExpression +import org.apache.spark.sql.execution.python.WindowInPandasExec + +/** + * Trait providing getWindowExpressions method for Databricks versions. + * On Databricks, WindowInPandasExec uses projectList instead of windowExpression. + */ +trait WindowInPandasShims { + def getWindowExpressions(winPy: WindowInPandasExec): Seq[NamedExpression] = + winPy.projectList +} From 9105d8122bdb6d486a4b82d57b50a537f59a5330 Mon Sep 17 00:00:00 2001 From: Chong Gao Date: Thu, 15 Jan 2026 15:40:01 +0800 Subject: [PATCH 41/59] Fix import conflict: remove redundant StoragePartitionJoinShims import The import is unnecessary because StoragePartitionJoinShims is in the same package. The import causes a conflict with the spark411 version of StoragePartitionJoinShims when compiling for spark350db143 with 411 shim markers. --- .../scala/com/nvidia/spark/rapids/shims/GpuBatchScanExec.scala | 1 - 1 file changed, 1 deletion(-) diff --git a/sql-plugin/src/main/spark350db143/scala/com/nvidia/spark/rapids/shims/GpuBatchScanExec.scala b/sql-plugin/src/main/spark350db143/scala/com/nvidia/spark/rapids/shims/GpuBatchScanExec.scala index 33e01c055f5..a902bc7991a 100644 --- a/sql-plugin/src/main/spark350db143/scala/com/nvidia/spark/rapids/shims/GpuBatchScanExec.scala +++ b/sql-plugin/src/main/spark350db143/scala/com/nvidia/spark/rapids/shims/GpuBatchScanExec.scala @@ -24,7 +24,6 @@ package com.nvidia.spark.rapids.shims import com.google.common.base.Objects import com.nvidia.spark.rapids.GpuScan -import com.nvidia.spark.rapids.shims.StoragePartitionJoinShims import org.apache.spark.SparkException import org.apache.spark.rdd.RDD From e572077dab8e5c2c37737bb40126eef8185c4c20 Mon Sep 17 00:00:00 2001 From: Chong Gao Date: Thu, 15 Jan 2026 16:33:12 +0800 Subject: [PATCH 42/59] Fix Window UDF protocol for Spark 4.1.1 Spark 4.1.1 changed the Python UDF protocol for SQL_WINDOW_AGG_PANDAS_UDF: - Old (Spark 3.x, 4.0.x): Uses ArrowStreamPandasUDFSerializer with config key 'pandas_window_bound_types' and no grouped markers - New (Spark 4.1.x): Uses GroupPandasUDFSerializer with config key 'window_bound_types' and expects 1/0 protocol markers Changes: - Add WindowBoundTypeConfShims for config key shimming - Add GpuWindowArrowPythonRunner for Spark 4.1.1 grouped protocol - Add GpuWindowPythonRunnerFactory shim to create appropriate runner - Update GpuWindowInPandasExecBase to use the factory - Update WindowInPandasExecShims for ArrowWindowPythonExec - Enable ArrowWindowPythonExec in test config --- integration_tests/src/main/python/udf_test.py | 4 +- .../python/GpuWindowInPandasExecBase.scala | 7 +- .../shims/GpuWindowPythonRunnerFactory.scala | 88 +++++++++++++++ .../shims/WindowBoundTypeConfShims.scala | 60 ++++++++++ .../shims/WindowInPandasExecShims.scala | 36 +++++- .../shims/GpuWindowArrowPythonRunner.scala | 105 ++++++++++++++++++ .../shims/GpuWindowPythonRunnerFactory.scala | 61 ++++++++++ .../shims/WindowBoundTypeConfShims.scala | 28 +++++ tools/generated_files/411/operatorsScore.csv | 1 + tools/generated_files/411/supportedExecs.csv | 1 + 10 files changed, 383 insertions(+), 8 deletions(-) create mode 100644 sql-plugin/src/main/spark320/scala/org/apache/spark/sql/rapids/execution/python/shims/GpuWindowPythonRunnerFactory.scala create mode 100644 sql-plugin/src/main/spark320/scala/org/apache/spark/sql/rapids/execution/python/shims/WindowBoundTypeConfShims.scala create mode 100644 sql-plugin/src/main/spark411/scala/org/apache/spark/sql/rapids/execution/python/shims/GpuWindowArrowPythonRunner.scala create mode 100644 sql-plugin/src/main/spark411/scala/org/apache/spark/sql/rapids/execution/python/shims/GpuWindowPythonRunnerFactory.scala create mode 100644 sql-plugin/src/main/spark411/scala/org/apache/spark/sql/rapids/execution/python/shims/WindowBoundTypeConfShims.scala diff --git a/integration_tests/src/main/python/udf_test.py b/integration_tests/src/main/python/udf_test.py index 4f883b9318e..efbdb0cd677 100644 --- a/integration_tests/src/main/python/udf_test.py +++ b/integration_tests/src/main/python/udf_test.py @@ -1,4 +1,4 @@ -# Copyright (c) 2020-2025, NVIDIA CORPORATION. +# Copyright (c) 2020-2026, NVIDIA CORPORATION. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -49,6 +49,8 @@ arrow_udf_conf = { 'spark.sql.execution.arrow.pyspark.enabled': 'true', 'spark.rapids.sql.exec.WindowInPandasExec': 'true', + # ArrowWindowPythonExec is the new name for WindowInPandasExec in Spark 4.1+ + 'spark.rapids.sql.exec.ArrowWindowPythonExec': 'true', 'spark.rapids.sql.exec.FlatMapCoGroupsInPandasExec': 'true' } diff --git a/sql-plugin/src/main/scala/org/apache/spark/sql/rapids/execution/python/GpuWindowInPandasExecBase.scala b/sql-plugin/src/main/scala/org/apache/spark/sql/rapids/execution/python/GpuWindowInPandasExecBase.scala index b0abb34e991..165a092b489 100644 --- a/sql-plugin/src/main/scala/org/apache/spark/sql/rapids/execution/python/GpuWindowInPandasExecBase.scala +++ b/sql-plugin/src/main/scala/org/apache/spark/sql/rapids/execution/python/GpuWindowInPandasExecBase.scala @@ -35,7 +35,7 @@ import org.apache.spark.rdd.RDD import org.apache.spark.sql.catalyst.expressions._ import org.apache.spark.sql.catalyst.plans.physical.{AllTuples, ClusteredDistribution, Distribution, Partitioning} import org.apache.spark.sql.rapids.aggregate.GpuAggregateExpression -import org.apache.spark.sql.rapids.execution.python.shims.{GpuArrowPythonRunner, PythonArgumentUtils} +import org.apache.spark.sql.rapids.execution.python.shims.{GpuWindowPythonRunnerFactory, PythonArgumentUtils} import org.apache.spark.sql.rapids.execution.python.shims.WindowInPandasExecTypeShim.WindowInPandasExecType import org.apache.spark.sql.rapids.shims.{ArrowUtilsShim, DataTypeUtilsShim} import org.apache.spark.sql.types.{IntegerType, StructField, StructType} @@ -233,7 +233,8 @@ trait GpuWindowInPandasExecBase extends ShimUnaryExecNode with GpuPythonExecBase protected object UnboundedWindow extends WindowBoundType("unbounded") protected object BoundedWindow extends WindowBoundType("bounded") - protected val windowBoundTypeConf = "pandas_window_bound_types" + protected val windowBoundTypeConf = + org.apache.spark.sql.rapids.execution.python.shims.WindowBoundTypeConfShims.windowBoundTypeConf protected def collectFunctions( udf: GpuPythonFunction): ((ChainedPythonFunctions, Long), Seq[Expression]) = { @@ -521,7 +522,7 @@ trait GpuWindowInPandasExecBase extends ShimUnaryExecNode with GpuPythonExecBase } if (pyInputIterator.hasNext) { - val pyRunner = new GpuArrowPythonRunner( + val pyRunner = GpuWindowPythonRunnerFactory.createRunner( pyFuncs, PythonEvalType.SQL_WINDOW_AGG_PANDAS_UDF, udfArgs.argOffsets, diff --git a/sql-plugin/src/main/spark320/scala/org/apache/spark/sql/rapids/execution/python/shims/GpuWindowPythonRunnerFactory.scala b/sql-plugin/src/main/spark320/scala/org/apache/spark/sql/rapids/execution/python/shims/GpuWindowPythonRunnerFactory.scala new file mode 100644 index 00000000000..754081622fa --- /dev/null +++ b/sql-plugin/src/main/spark320/scala/org/apache/spark/sql/rapids/execution/python/shims/GpuWindowPythonRunnerFactory.scala @@ -0,0 +1,88 @@ +/* + * Copyright (c) 2026, NVIDIA CORPORATION. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +/*** spark-rapids-shim-json-lines +{"spark": "320"} +{"spark": "321"} +{"spark": "321cdh"} +{"spark": "322"} +{"spark": "323"} +{"spark": "324"} +{"spark": "330"} +{"spark": "330cdh"} +{"spark": "330db"} +{"spark": "331"} +{"spark": "332"} +{"spark": "332cdh"} +{"spark": "332db"} +{"spark": "333"} +{"spark": "334"} +{"spark": "340"} +{"spark": "341"} +{"spark": "341db"} +{"spark": "342"} +{"spark": "343"} +{"spark": "344"} +{"spark": "350"} +{"spark": "350db143"} +{"spark": "351"} +{"spark": "352"} +{"spark": "353"} +{"spark": "354"} +{"spark": "355"} +{"spark": "356"} +{"spark": "357"} +{"spark": "400"} +{"spark": "401"} +spark-rapids-shim-json-lines ***/ +package org.apache.spark.sql.rapids.execution.python.shims + +import org.apache.spark.api.python.ChainedPythonFunctions +import org.apache.spark.sql.rapids.execution.python.GpuArrowOutput +import org.apache.spark.sql.types.StructType +import org.apache.spark.sql.vectorized.ColumnarBatch + +/** + * Factory object to create Python runner for Window UDFs. + * This shim allows different Spark versions to use different runner implementations. + * + * In Spark 3.x and 4.0.x, the Python worker uses ArrowStreamPandasUDFSerializer for + * SQL_WINDOW_AGG_PANDAS_UDF, which doesn't require grouped protocol markers. + */ +object GpuWindowPythonRunnerFactory { + def createRunner( + funcs: Seq[(ChainedPythonFunctions, Long)], + evalType: Int, + argOffsets: Array[Array[Int]], + pythonInSchema: StructType, + timeZoneId: String, + conf: Map[String, String], + batchSize: Long, + pythonOutSchema: StructType, + argNames: Option[Array[Array[Option[String]]]] + ): GpuBasePythonRunner[ColumnarBatch] with GpuArrowOutput = { + new GpuArrowPythonRunner( + funcs, + evalType, + argOffsets, + pythonInSchema, + timeZoneId, + conf, + batchSize, + pythonOutSchema, + argNames) + } +} diff --git a/sql-plugin/src/main/spark320/scala/org/apache/spark/sql/rapids/execution/python/shims/WindowBoundTypeConfShims.scala b/sql-plugin/src/main/spark320/scala/org/apache/spark/sql/rapids/execution/python/shims/WindowBoundTypeConfShims.scala new file mode 100644 index 00000000000..54a57089055 --- /dev/null +++ b/sql-plugin/src/main/spark320/scala/org/apache/spark/sql/rapids/execution/python/shims/WindowBoundTypeConfShims.scala @@ -0,0 +1,60 @@ +/* + * Copyright (c) 2026, NVIDIA CORPORATION. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +/*** spark-rapids-shim-json-lines +{"spark": "320"} +{"spark": "321"} +{"spark": "321cdh"} +{"spark": "322"} +{"spark": "323"} +{"spark": "324"} +{"spark": "330"} +{"spark": "330cdh"} +{"spark": "330db"} +{"spark": "331"} +{"spark": "332"} +{"spark": "332cdh"} +{"spark": "332db"} +{"spark": "333"} +{"spark": "334"} +{"spark": "340"} +{"spark": "341"} +{"spark": "341db"} +{"spark": "342"} +{"spark": "343"} +{"spark": "344"} +{"spark": "350"} +{"spark": "350db143"} +{"spark": "351"} +{"spark": "352"} +{"spark": "353"} +{"spark": "354"} +{"spark": "355"} +{"spark": "356"} +{"spark": "357"} +{"spark": "400"} +{"spark": "401"} +spark-rapids-shim-json-lines ***/ +package org.apache.spark.sql.rapids.execution.python.shims + +/** + * Shim for window bound type config key. + * In Spark 3.x/4.0.x: "pandas_window_bound_types" + * In Spark 4.1.x: "window_bound_types" + */ +object WindowBoundTypeConfShims { + val windowBoundTypeConf: String = "pandas_window_bound_types" +} diff --git a/sql-plugin/src/main/spark411/scala/com/nvidia/spark/rapids/shims/WindowInPandasExecShims.scala b/sql-plugin/src/main/spark411/scala/com/nvidia/spark/rapids/shims/WindowInPandasExecShims.scala index 72218f7f27b..44486add156 100644 --- a/sql-plugin/src/main/spark411/scala/com/nvidia/spark/rapids/shims/WindowInPandasExecShims.scala +++ b/sql-plugin/src/main/spark411/scala/com/nvidia/spark/rapids/shims/WindowInPandasExecShims.scala @@ -21,13 +21,41 @@ package com.nvidia.spark.rapids.shims import com.nvidia.spark.rapids._ +import org.apache.spark.sql.catalyst.expressions.NamedExpression import org.apache.spark.sql.execution.SparkPlan +import org.apache.spark.sql.execution.python.ArrowWindowPythonExec +import org.apache.spark.sql.rapids.execution.python.GpuWindowInPandasExecMetaBase /** - * WindowInPandasExec was renamed to ArrowWindowPythonExec in Spark 4.1. - * This shim provides an empty implementation for 4.1+. + * Exec rules for ArrowWindowPythonExec (Spark 4.1+ - renamed from WindowInPandasExec). */ object WindowInPandasExecShims { - // Empty map - WindowInPandasExec doesn't exist in Spark 4.1+ - val execs: Map[Class[_ <: SparkPlan], ExecRule[_ <: SparkPlan]] = Map.empty + val execs: Map[Class[_ <: SparkPlan], ExecRule[_ <: SparkPlan]] = { + Seq( + GpuOverrides.exec[ArrowWindowPythonExec]( + "The backend for Window Aggregation Pandas UDF, Accelerates the data transfer between" + + " the Java process and the Python process. It also supports scheduling GPU resources" + + " for the Python process when enabled. For now it only supports row based window frame.", + ExecChecks( + (TypeSig.commonCudfTypes + TypeSig.ARRAY).nested(TypeSig.commonCudfTypes), + TypeSig.all), + (winPy, conf, p, r) => new GpuWindowInPandasExecMetaBase(winPy, conf, p, r) { + override val windowExpressions: Seq[BaseExprMeta[NamedExpression]] = + SparkShimImpl.getWindowExpressions(winPy).map( + GpuOverrides.wrapExpr(_, this.conf, Some(this))) + + override def convertToGpu(): GpuExec = { + val windowExprGpu = windowExpressions.map(_.convertToGpu()) + val partitionGpu = partitionSpec.map(_.convertToGpu()) + GpuWindowInPandasExec( + windowExprGpu, + partitionGpu, + // leave ordering expression on the CPU, it's not used for GPU computation + winPy.orderSpec, + childPlans.head.convertIfNeeded() + )(winPy.partitionSpec) + } + }).disabledByDefault("it only supports row based frame for now") + ).map(r => (r.getClassFor.asSubclass(classOf[SparkPlan]), r)).toMap + } } diff --git a/sql-plugin/src/main/spark411/scala/org/apache/spark/sql/rapids/execution/python/shims/GpuWindowArrowPythonRunner.scala b/sql-plugin/src/main/spark411/scala/org/apache/spark/sql/rapids/execution/python/shims/GpuWindowArrowPythonRunner.scala new file mode 100644 index 00000000000..abf3ad7e7aa --- /dev/null +++ b/sql-plugin/src/main/spark411/scala/org/apache/spark/sql/rapids/execution/python/shims/GpuWindowArrowPythonRunner.scala @@ -0,0 +1,105 @@ +/* + * Copyright (c) 2026, NVIDIA CORPORATION. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +/*** spark-rapids-shim-json-lines +{"spark": "411"} +spark-rapids-shim-json-lines ***/ +package org.apache.spark.sql.rapids.execution.python.shims + +import java.io.DataOutputStream + +import com.nvidia.spark.rapids.GpuSemaphore + +import org.apache.spark.{SparkEnv, TaskContext} +import org.apache.spark.api.python._ +import org.apache.spark.sql.rapids.execution.python.{GpuArrowPythonWriter, GpuPythonRunnerCommon} +import org.apache.spark.sql.types.StructType +import org.apache.spark.sql.vectorized.ColumnarBatch + +/** + * Python runner for Window UDFs in Spark 4.1.x. + * + * In Spark 4.1.x, the Python worker uses GroupPandasUDFSerializer for SQL_WINDOW_AGG_PANDAS_UDF, + * which expects the grouped protocol: + * - Send 1 before each batch to indicate more data is coming + * - Create a new Arrow Stream for each batch + * - Send 0 to indicate end of data + * + * This is different from earlier Spark versions which used ArrowStreamPandasUDFSerializer + * that didn't require the 1/0 markers. + */ +class GpuWindowArrowPythonRunner( + funcs: Seq[(ChainedPythonFunctions, Long)], + evalType: Int, + argOffsets: Array[Array[Int]], + pythonInSchema: StructType, + timeZoneId: String, + conf: Map[String, String], + batchSize: Long, + override val pythonOutSchema: StructType, + argNames: Option[Array[Array[Option[String]]]] = None, + jobArtifactUUID: Option[String] = None) + extends GpuBasePythonRunner[ColumnarBatch](funcs.map(_._1), evalType, argOffsets, + jobArtifactUUID) with GpuArrowPythonOutput with GpuPythonRunnerCommon { + + protected override def newWriter( + env: SparkEnv, + worker: PythonWorker, + inputIterator: Iterator[ColumnarBatch], + partitionIndex: Int, + context: TaskContext): Writer = { + new Writer(env, worker, inputIterator, partitionIndex, context) { + + val arrowWriter = new GpuArrowPythonWriter(pythonInSchema, batchSize) { + override protected def writeUDFs(dataOut: DataOutputStream): Unit = { + WritePythonUDFUtils.writeUDFs(dataOut, funcs, argOffsets, argNames) + } + } + + protected override def writeCommand(dataOut: DataOutputStream): Unit = { + arrowWriter.writeCommand(dataOut, conf) + } + + override def writeNextInputToStream(dataOut: DataOutputStream): Boolean = { + try { + if (inputIterator.hasNext) { + // Send 1 to indicate there's more data + dataOut.writeInt(1) + arrowWriter.start(dataOut) + arrowWriter.writeAndClose(inputIterator.next()) + // Reset the writer to start a new Arrow stream for the next batch + arrowWriter.reset() + dataOut.flush() + true + } else { + // Release semaphore before blocking operation + GpuSemaphore.releaseIfNecessary(TaskContext.get()) + // Send 0 to indicate end of data + dataOut.writeInt(0) + dataOut.flush() + false + } + } catch { + case t: Throwable => + arrowWriter.close() + // Release semaphore in case of exception + GpuSemaphore.releaseIfNecessary(TaskContext.get()) + throw t + } + } + } + } +} diff --git a/sql-plugin/src/main/spark411/scala/org/apache/spark/sql/rapids/execution/python/shims/GpuWindowPythonRunnerFactory.scala b/sql-plugin/src/main/spark411/scala/org/apache/spark/sql/rapids/execution/python/shims/GpuWindowPythonRunnerFactory.scala new file mode 100644 index 00000000000..9e7110e1b9b --- /dev/null +++ b/sql-plugin/src/main/spark411/scala/org/apache/spark/sql/rapids/execution/python/shims/GpuWindowPythonRunnerFactory.scala @@ -0,0 +1,61 @@ +/* + * Copyright (c) 2026, NVIDIA CORPORATION. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +/*** spark-rapids-shim-json-lines +{"spark": "411"} +spark-rapids-shim-json-lines ***/ +package org.apache.spark.sql.rapids.execution.python.shims + +import org.apache.spark.api.python.ChainedPythonFunctions +import org.apache.spark.sql.rapids.execution.python.GpuArrowOutput +import org.apache.spark.sql.types.StructType +import org.apache.spark.sql.vectorized.ColumnarBatch + +/** + * Factory object to create Python runner for Window UDFs in Spark 4.1.x. + * + * In Spark 4.1.x, the Python worker uses GroupPandasUDFSerializer for SQL_WINDOW_AGG_PANDAS_UDF, + * which expects the grouped protocol: + * - Send 1 before each batch + * - Create new Arrow stream for each batch + * - Send 0 to indicate end of data + * + * This is different from earlier Spark versions which used ArrowStreamPandasUDFSerializer. + */ +object GpuWindowPythonRunnerFactory { + def createRunner( + funcs: Seq[(ChainedPythonFunctions, Long)], + evalType: Int, + argOffsets: Array[Array[Int]], + pythonInSchema: StructType, + timeZoneId: String, + conf: Map[String, String], + batchSize: Long, + pythonOutSchema: StructType, + argNames: Option[Array[Array[Option[String]]]] + ): GpuBasePythonRunner[ColumnarBatch] with GpuArrowOutput = { + new GpuWindowArrowPythonRunner( + funcs, + evalType, + argOffsets, + pythonInSchema, + timeZoneId, + conf, + batchSize, + pythonOutSchema, + argNames) + } +} diff --git a/sql-plugin/src/main/spark411/scala/org/apache/spark/sql/rapids/execution/python/shims/WindowBoundTypeConfShims.scala b/sql-plugin/src/main/spark411/scala/org/apache/spark/sql/rapids/execution/python/shims/WindowBoundTypeConfShims.scala new file mode 100644 index 00000000000..652673924da --- /dev/null +++ b/sql-plugin/src/main/spark411/scala/org/apache/spark/sql/rapids/execution/python/shims/WindowBoundTypeConfShims.scala @@ -0,0 +1,28 @@ +/* + * Copyright (c) 2026, NVIDIA CORPORATION. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +/*** spark-rapids-shim-json-lines +{"spark": "411"} +spark-rapids-shim-json-lines ***/ +package org.apache.spark.sql.rapids.execution.python.shims + +/** + * Shim for window bound type config key. + * In Spark 4.1.x, the config key changed from "pandas_window_bound_types" to "window_bound_types". + */ +object WindowBoundTypeConfShims { + val windowBoundTypeConf: String = "window_bound_types" +} diff --git a/tools/generated_files/411/operatorsScore.csv b/tools/generated_files/411/operatorsScore.csv index a935cd83fd7..537a2953902 100644 --- a/tools/generated_files/411/operatorsScore.csv +++ b/tools/generated_files/411/operatorsScore.csv @@ -43,6 +43,7 @@ CartesianProductExec,3.0 ShuffledHashJoinExec,3.0 SortMergeJoinExec,22.7 ArrowEvalPythonExec,1.2 +ArrowWindowPythonExec,3.0 FlatMapCoGroupsInPandasExec,3.0 FlatMapGroupsInPandasExec,1.2 MapInArrowExec,3.0 diff --git a/tools/generated_files/411/supportedExecs.csv b/tools/generated_files/411/supportedExecs.csv index 4cc6945b174..ef57bf64f79 100644 --- a/tools/generated_files/411/supportedExecs.csv +++ b/tools/generated_files/411/supportedExecs.csv @@ -53,6 +53,7 @@ SortMergeJoinExec,S,None,rightKeys,S,S,S,S,S,S,S,S,PS,S,S,S,NS,NS,NS,NA,PS,NS,NS SortMergeJoinExec,S,None,condition,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA SortMergeJoinExec,S,None,Input/Output,S,S,S,S,S,S,S,S,PS,S,S,S,S,NS,PS,PS,PS,NS,NS,NS ArrowEvalPythonExec,S,None,Input/Output,S,S,S,S,S,S,S,S,PS,S,NS,NS,NS,NS,PS,NS,PS,NS,NS,NS +ArrowWindowPythonExec,NS,This is disabled by default because it only supports row based frame for now,Input/Output,S,S,S,S,S,S,S,S,PS,S,NS,NS,NS,NS,PS,NS,NS,NS,NS,NS FlatMapCoGroupsInPandasExec,NS,This is disabled by default because Performance is not ideal with many small groups,Input/Output,S,S,S,S,S,S,S,S,PS,S,NS,NS,NS,NS,NS,NS,NS,NS,NS,NS FlatMapGroupsInPandasExec,S,None,Input/Output,S,S,S,S,S,S,S,S,PS,S,NS,NS,NS,NS,NS,NS,NS,NS,NS,NS MapInArrowExec,S,None,Input/Output,S,S,S,S,S,S,S,S,PS,S,NS,NS,NS,NS,PS,NS,PS,NS,NS,NS From 8abd36ff796bc54434add02b69c7052ff052fed5 Mon Sep 17 00:00:00 2001 From: Chong Gao Date: Thu, 15 Jan 2026 17:03:46 +0800 Subject: [PATCH 43/59] Fix Aggregate UDF protocol for Spark 4.1.1 In Spark 4.1.1, AggregateInPandasExec was renamed to ArrowAggregatePythonExec. The Python worker also uses GroupPandasUDFSerializer which expects the grouped protocol (1/0 markers). Changes: - Update AggregateInPandasExecShims to register ExecRule for ArrowAggregatePythonExec - Add GpuArrowAggregatePythonExecMeta for the renamed class - Create spark411-specific GpuGroupedPythonRunnerFactory that uses GpuWindowArrowPythonRunner (which sends the grouped protocol) - Remove spark411 from spark320 GpuGroupedPythonRunnerFactory shim markers --- .../shims/GpuGroupedPythonRunnerFactory.scala | 1 - .../shims/AggregateInPandasExecShims.scala | 21 ++++-- .../GpuArrowAggregatePythonExecMeta.scala | 64 +++++++++++++++++++ .../shims/GpuGroupedPythonRunnerFactory.scala | 62 ++++++++++++++++++ tools/generated_files/411/operatorsScore.csv | 1 + tools/generated_files/411/supportedExecs.csv | 1 + 6 files changed, 144 insertions(+), 6 deletions(-) create mode 100644 sql-plugin/src/main/spark411/scala/com/nvidia/spark/rapids/shims/GpuArrowAggregatePythonExecMeta.scala create mode 100644 sql-plugin/src/main/spark411/scala/org/apache/spark/sql/rapids/execution/python/shims/GpuGroupedPythonRunnerFactory.scala diff --git a/sql-plugin/src/main/spark320/scala/org/apache/spark/sql/rapids/execution/python/shims/GpuGroupedPythonRunnerFactory.scala b/sql-plugin/src/main/spark320/scala/org/apache/spark/sql/rapids/execution/python/shims/GpuGroupedPythonRunnerFactory.scala index 5fff48ae5bc..f5951bb728b 100644 --- a/sql-plugin/src/main/spark320/scala/org/apache/spark/sql/rapids/execution/python/shims/GpuGroupedPythonRunnerFactory.scala +++ b/sql-plugin/src/main/spark320/scala/org/apache/spark/sql/rapids/execution/python/shims/GpuGroupedPythonRunnerFactory.scala @@ -43,7 +43,6 @@ {"spark": "357"} {"spark": "400"} {"spark": "401"} -{"spark": "411"} spark-rapids-shim-json-lines ***/ package org.apache.spark.sql.rapids.execution.python.shims diff --git a/sql-plugin/src/main/spark411/scala/com/nvidia/spark/rapids/shims/AggregateInPandasExecShims.scala b/sql-plugin/src/main/spark411/scala/com/nvidia/spark/rapids/shims/AggregateInPandasExecShims.scala index e34ec1f7807..46148f178e8 100644 --- a/sql-plugin/src/main/spark411/scala/com/nvidia/spark/rapids/shims/AggregateInPandasExecShims.scala +++ b/sql-plugin/src/main/spark411/scala/com/nvidia/spark/rapids/shims/AggregateInPandasExecShims.scala @@ -19,19 +19,30 @@ spark-rapids-shim-json-lines ***/ package com.nvidia.spark.rapids.shims -import com.nvidia.spark.rapids.ExecRule +import com.nvidia.spark.rapids.{ExecChecks, ExecRule, GpuOverrides, TypeSig} import org.apache.spark.sql.catalyst.expressions.NamedExpression import org.apache.spark.sql.execution.SparkPlan +import org.apache.spark.sql.execution.python.ArrowAggregatePythonExec /** * AggregateInPandasExec was renamed to ArrowAggregatePythonExec in Spark 4.1.0. - * Return None to skip this exec rule for now. */ object AggregateInPandasExecShims { - val execRule: Option[ExecRule[_ <: SparkPlan]] = None + val execRule: Option[ExecRule[_ <: SparkPlan]] = Some( + GpuOverrides.exec[ArrowAggregatePythonExec]( + "The backend for an Aggregation Pandas UDF." + + " This accelerates the data transfer between the Java process and the Python process." + + " It also supports scheduling GPU resources for the Python process" + + " when enabled.", + ExecChecks(TypeSig.commonCudfTypes, TypeSig.all), + (aggPy, conf, p, r) => new GpuArrowAggregatePythonExecMeta(aggPy, conf, p, r)) + ) - def isAggregateInPandasExec(plan: SparkPlan): Boolean = false + def isAggregateInPandasExec(plan: SparkPlan): Boolean = + plan.isInstanceOf[ArrowAggregatePythonExec] - def getGroupingExpressions(plan: SparkPlan): Seq[NamedExpression] = Seq.empty + def getGroupingExpressions(plan: SparkPlan): Seq[NamedExpression] = { + plan.asInstanceOf[ArrowAggregatePythonExec].groupingExpressions + } } diff --git a/sql-plugin/src/main/spark411/scala/com/nvidia/spark/rapids/shims/GpuArrowAggregatePythonExecMeta.scala b/sql-plugin/src/main/spark411/scala/com/nvidia/spark/rapids/shims/GpuArrowAggregatePythonExecMeta.scala new file mode 100644 index 00000000000..c95ccd5b5ac --- /dev/null +++ b/sql-plugin/src/main/spark411/scala/com/nvidia/spark/rapids/shims/GpuArrowAggregatePythonExecMeta.scala @@ -0,0 +1,64 @@ +/* + * Copyright (c) 2026, NVIDIA CORPORATION. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +/*** spark-rapids-shim-json-lines +{"spark": "411"} +spark-rapids-shim-json-lines ***/ +package com.nvidia.spark.rapids.shims + +import com.nvidia.spark.rapids._ + +import org.apache.spark.sql.catalyst.expressions._ +import org.apache.spark.sql.execution.python.ArrowAggregatePythonExec +import org.apache.spark.sql.rapids.execution.python.{GpuAggregateInPandasExec, GpuPythonUDAF} + +/** + * Meta class for ArrowAggregatePythonExec (renamed from AggregateInPandasExec in Spark 4.1.0). + */ +class GpuArrowAggregatePythonExecMeta( + aggPandas: ArrowAggregatePythonExec, + conf: RapidsConf, + parent: Option[RapidsMeta[_, _, _]], + rule: DataFromReplacementRule) + extends SparkPlanMeta[ArrowAggregatePythonExec](aggPandas, conf, parent, rule) { + + override def replaceMessage: String = "partially run on GPU" + override def noReplacementPossibleMessage(reasons: String): String = + s"cannot run even partially on the GPU because $reasons" + + private val groupingNamedExprs: Seq[BaseExprMeta[NamedExpression]] = + aggPandas.groupingExpressions.map(GpuOverrides.wrapExpr(_, conf, Some(this))) + + val pythonUDAFs: Seq[PythonUDAF] = + aggPandas.aggExpressions.map(_.aggregateFunction.asInstanceOf[PythonUDAF]) + + private val udfs: Seq[BaseExprMeta[PythonUDAF]] = + pythonUDAFs.map(GpuOverrides.wrapExpr(_, conf, Some(this))) + + private val resultNamedExprs: Seq[BaseExprMeta[NamedExpression]] = + aggPandas.resultExpressions.map(GpuOverrides.wrapExpr(_, conf, Some(this))) + + override val childExprs: Seq[BaseExprMeta[_]] = groupingNamedExprs ++ udfs ++ resultNamedExprs + + override def convertToGpu(): GpuExec = + GpuAggregateInPandasExec( + groupingNamedExprs.map(_.convertToGpu()).asInstanceOf[Seq[NamedExpression]], + udfs.map(_.convertToGpu()).asInstanceOf[Seq[GpuPythonUDAF]], + aggPandas.aggExpressions.map(_.resultAttribute), + resultNamedExprs.map(_.convertToGpu()).asInstanceOf[Seq[NamedExpression]], + childPlans.head.convertIfNeeded() + )(aggPandas.groupingExpressions) +} diff --git a/sql-plugin/src/main/spark411/scala/org/apache/spark/sql/rapids/execution/python/shims/GpuGroupedPythonRunnerFactory.scala b/sql-plugin/src/main/spark411/scala/org/apache/spark/sql/rapids/execution/python/shims/GpuGroupedPythonRunnerFactory.scala new file mode 100644 index 00000000000..e51f5bc5b42 --- /dev/null +++ b/sql-plugin/src/main/spark411/scala/org/apache/spark/sql/rapids/execution/python/shims/GpuGroupedPythonRunnerFactory.scala @@ -0,0 +1,62 @@ +/* + * Copyright (c) 2026, NVIDIA CORPORATION. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +/*** spark-rapids-shim-json-lines +{"spark": "411"} +spark-rapids-shim-json-lines ***/ +package org.apache.spark.sql.rapids.execution.python.shims + +import org.apache.spark.api.python.ChainedPythonFunctions +import org.apache.spark.sql.rapids.execution.python.GpuArrowOutput +import org.apache.spark.sql.rapids.shims.ArrowUtilsShim +import org.apache.spark.sql.types.StructType +import org.apache.spark.sql.vectorized.ColumnarBatch + +/** + * Factory for creating Python runners for grouped UDFs in Spark 4.1.x. + * + * In Spark 4.1.x, the Python worker uses GroupPandasUDFSerializer for grouped/window UDFs, + * which expects the grouped protocol: + * - Send 1 before each batch + * - Create new Arrow stream for each batch + * - Send 0 to indicate end of data + */ +case class GpuGroupedPythonRunnerFactory( + conf: org.apache.spark.sql.internal.SQLConf, + chainedFunc: Seq[(ChainedPythonFunctions, Long)], + argOffsets: Array[Array[Int]], + dedupAttrs: StructType, + pythonOutputSchema: StructType, + evalType: Int, + argNames: Option[Array[Array[Option[String]]]] = None) { + val sessionLocalTimeZone = conf.sessionLocalTimeZone + val pythonRunnerConf = ArrowUtilsShim.getPythonRunnerConfMap(conf) + + def getRunner(): GpuBasePythonRunner[ColumnarBatch] with GpuArrowOutput = { + // Use the grouped protocol runner for Spark 4.1.x + new GpuWindowArrowPythonRunner( + chainedFunc, + evalType, + argOffsets, + dedupAttrs, + sessionLocalTimeZone, + pythonRunnerConf, + // The whole group data should be written in a single call, so here is unlimited + Int.MaxValue, + pythonOutputSchema, + argNames) + } +} diff --git a/tools/generated_files/411/operatorsScore.csv b/tools/generated_files/411/operatorsScore.csv index 537a2953902..d3ea1d82643 100644 --- a/tools/generated_files/411/operatorsScore.csv +++ b/tools/generated_files/411/operatorsScore.csv @@ -42,6 +42,7 @@ BroadcastNestedLoopJoinExec,3.0 CartesianProductExec,3.0 ShuffledHashJoinExec,3.0 SortMergeJoinExec,22.7 +ArrowAggregatePythonExec,3.0 ArrowEvalPythonExec,1.2 ArrowWindowPythonExec,3.0 FlatMapCoGroupsInPandasExec,3.0 diff --git a/tools/generated_files/411/supportedExecs.csv b/tools/generated_files/411/supportedExecs.csv index ef57bf64f79..618cfe6a3c9 100644 --- a/tools/generated_files/411/supportedExecs.csv +++ b/tools/generated_files/411/supportedExecs.csv @@ -52,6 +52,7 @@ SortMergeJoinExec,S,None,leftKeys,S,S,S,S,S,S,S,S,PS,S,S,S,NS,NS,NS,NA,PS,NS,NS, SortMergeJoinExec,S,None,rightKeys,S,S,S,S,S,S,S,S,PS,S,S,S,NS,NS,NS,NA,PS,NS,NS,NS SortMergeJoinExec,S,None,condition,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA SortMergeJoinExec,S,None,Input/Output,S,S,S,S,S,S,S,S,PS,S,S,S,S,NS,PS,PS,PS,NS,NS,NS +ArrowAggregatePythonExec,S,None,Input/Output,S,S,S,S,S,S,S,S,PS,S,NS,NS,NS,NS,NS,NS,NS,NS,NS,NS ArrowEvalPythonExec,S,None,Input/Output,S,S,S,S,S,S,S,S,PS,S,NS,NS,NS,NS,PS,NS,PS,NS,NS,NS ArrowWindowPythonExec,NS,This is disabled by default because it only supports row based frame for now,Input/Output,S,S,S,S,S,S,S,S,PS,S,NS,NS,NS,NS,PS,NS,NS,NS,NS,NS FlatMapCoGroupsInPandasExec,NS,This is disabled by default because Performance is not ideal with many small groups,Input/Output,S,S,S,S,S,S,S,S,PS,S,NS,NS,NS,NS,NS,NS,NS,NS,NS,NS From dba476d9ae1921d5b2868f73778d2a3abca7139f Mon Sep 17 00:00:00 2001 From: Chong Gao Date: Mon, 19 Jan 2026 18:10:13 +0800 Subject: [PATCH 44/59] Fix docs Signed-off-by: Chong Gao --- .../advanced_configs.md | 22 +- docs/configs.md | 2 +- docs/dev/nvtx_ranges.md | 2 +- docs/supported_ops.md | 2854 +++++++++++------ tools/generated_files/operatorsScore.csv | 10 + tools/generated_files/supportedDataSource.csv | 12 +- tools/generated_files/supportedExecs.csv | 27 +- tools/generated_files/supportedExprs.csv | 127 +- 8 files changed, 1912 insertions(+), 1144 deletions(-) diff --git a/docs/additional-functionality/advanced_configs.md b/docs/additional-functionality/advanced_configs.md index 2993923fae8..9700d4af74c 100644 --- a/docs/additional-functionality/advanced_configs.md +++ b/docs/additional-functionality/advanced_configs.md @@ -6,7 +6,7 @@ nav_order: 10 --- - > This page is generated by `RapidsConf.helpAdvanced` against Apache Spark 3.2.1. + > This page is generated by `RapidsConf.helpAdvanced` against Apache Spark 3.3.0. > Most of the content in this page is applicable to other versions of Apache Spark with > variations @@ -228,11 +228,12 @@ Name | SQL Function(s) | Description | Default Value | Notes spark.rapids.sql.expression.BitwiseNot|`~`|Returns the bitwise NOT of the operands|true|None| spark.rapids.sql.expression.BitwiseOr|`\|`|Returns the bitwise OR of the operands|true|None| spark.rapids.sql.expression.BitwiseXor|`^`|Returns the bitwise XOR of the operands|true|None| +spark.rapids.sql.expression.BloomFilterMightContain| |Bloom filter query|true|None| spark.rapids.sql.expression.BoundReference| |Reference to a bound variable|true|None| spark.rapids.sql.expression.CaseWhen|`when`|CASE WHEN expression|true|None| spark.rapids.sql.expression.Cast|`bigint`, `binary`, `boolean`, `cast`, `date`, `decimal`, `double`, `float`, `int`, `smallint`, `string`, `timestamp`, `tinyint`|Convert a column of one type of data into another type|true|None| spark.rapids.sql.expression.Cbrt|`cbrt`|Cube root|true|None| -spark.rapids.sql.expression.Ceil|`ceil`, `ceiling`|Ceiling of a number|true|None| +spark.rapids.sql.expression.Ceil| |Ceiling of a number|true|None| spark.rapids.sql.expression.CheckOverflow| |CheckOverflow after arithmetic operations between DecimalType data|true|None| spark.rapids.sql.expression.Coalesce|`coalesce`|Returns the first non-null argument if exists. Otherwise, null|true|None| spark.rapids.sql.expression.Concat|`concat`|List/String concatenate|true|None| @@ -256,6 +257,8 @@ Name | SQL Function(s) | Description | Default Value | Notes spark.rapids.sql.expression.DayOfYear|`dayofyear`|Returns the day of the year from a date or timestamp|true|None| spark.rapids.sql.expression.DenseRank|`dense_rank`|Window function that returns the dense rank value within the aggregation window|true|None| spark.rapids.sql.expression.Divide|`/`|Division|true|None| +spark.rapids.sql.expression.DivideDTInterval| |Day-time interval * operator|true|None| +spark.rapids.sql.expression.DivideYMInterval| |Year-month interval * operator|true|None| spark.rapids.sql.expression.DynamicPruningExpression| |Dynamic pruning expression marker|true|None| spark.rapids.sql.expression.ElementAt|`element_at`|Returns element of array at given(1-based) index in value if column is array. Returns value for the given key in value if column is map.|true|None| spark.rapids.sql.expression.EndsWith| |Ends with|true|None| @@ -265,7 +268,7 @@ Name | SQL Function(s) | Description | Default Value | Notes spark.rapids.sql.expression.Explode|`explode_outer`, `explode`|Given an input array produces a sequence of rows for each value in the array|true|None| spark.rapids.sql.expression.Expm1|`expm1`|Euler's number e raised to a power minus 1|true|None| spark.rapids.sql.expression.Flatten|`flatten`|Creates a single array from an array of arrays|true|None| -spark.rapids.sql.expression.Floor|`floor`|Floor of a number|true|None| +spark.rapids.sql.expression.Floor| |Floor of a number|true|None| spark.rapids.sql.expression.FormatNumber|`format_number`|Formats the number x like '#,###,###.##', rounded to d decimal places.|true|None| spark.rapids.sql.expression.FromUTCTimestamp|`from_utc_timestamp`|Render the input UTC timestamp in the input timezone|true|None| spark.rapids.sql.expression.FromUnixTime|`from_unixtime`|Get the string from a unix timestamp|true|None| @@ -329,6 +332,8 @@ Name | SQL Function(s) | Description | Default Value | Notes spark.rapids.sql.expression.Month|`month`|Returns the month from a date or timestamp|true|None| spark.rapids.sql.expression.MonthsBetween|`months_between`|If `timestamp1` is later than `timestamp2`, then the result is positive. If `timestamp1` and `timestamp2` are on the same day of month, or both are the last day of month, time of day will be ignored. Otherwise, the difference is calculated based on 31 days per month, and rounded to 8 digits unless roundOff=false.|true|None| spark.rapids.sql.expression.Multiply|`*`|Multiplication|true|None| +spark.rapids.sql.expression.MultiplyDTInterval| |Day-time interval * number|true|None| +spark.rapids.sql.expression.MultiplyYMInterval| |Year-month interval * number|true|None| spark.rapids.sql.expression.Murmur3Hash|`hash`|Murmur3 hash operator|true|None| spark.rapids.sql.expression.NaNvl|`nanvl`|Evaluates to `left` iff left is not NaN, `right` otherwise|true|None| spark.rapids.sql.expression.NamedLambdaVariable| |A parameter to a higher order SQL function|true|None| @@ -357,6 +362,8 @@ Name | SQL Function(s) | Description | Default Value | Notes spark.rapids.sql.expression.Reverse|`reverse`|Returns a reversed string or an array with reverse order of elements|true|None| spark.rapids.sql.expression.Rint|`rint`|Rounds up a double value to the nearest double equal to an integer|true|None| spark.rapids.sql.expression.Round|`round`|Round an expression to d decimal places using HALF_UP rounding mode|true|None| +spark.rapids.sql.expression.RoundCeil| |Computes the ceiling of the given expression to d decimal places|true|None| +spark.rapids.sql.expression.RoundFloor| |Computes the floor of the given expression to d decimal places|true|None| spark.rapids.sql.expression.RowNumber|`row_number`|Window function that returns the index for the row within the aggregation window|true|None| spark.rapids.sql.expression.ScalaUDF| |User Defined Function, the UDF can choose to implement a RAPIDS accelerated interface to get better performance.|true|None| spark.rapids.sql.expression.Second|`second`|Returns the second component of the string/timestamp|true|None| @@ -380,9 +387,9 @@ Name | SQL Function(s) | Description | Default Value | Notes spark.rapids.sql.expression.Stack|`stack`|Separates expr1, ..., exprk into n rows.|true|None| spark.rapids.sql.expression.StartsWith| |Starts with|true|None| spark.rapids.sql.expression.StringInstr|`instr`|Instr string operator|true|None| -spark.rapids.sql.expression.StringLPad|`lpad`|Pad a string on the left|true|None| +spark.rapids.sql.expression.StringLPad| |Pad a string on the left|true|None| spark.rapids.sql.expression.StringLocate|`locate`, `position`|Substring search operator|true|None| -spark.rapids.sql.expression.StringRPad|`rpad`|Pad a string on the right|true|None| +spark.rapids.sql.expression.StringRPad| |Pad a string on the right|true|None| spark.rapids.sql.expression.StringRepeat|`repeat`|StringRepeat operator that repeats the given strings with numbers of times given by repeatTimes|true|None| spark.rapids.sql.expression.StringReplace|`replace`|StringReplace operator|true|None| spark.rapids.sql.expression.StringSplit|`split`|Splits `str` around occurrences that match `regex`|true|None| @@ -425,7 +432,8 @@ Name | SQL Function(s) | Description | Default Value | Notes spark.rapids.sql.expression.BitAndAgg|`bit_and`|Returns the bitwise AND of all non-null input values|true|None| spark.rapids.sql.expression.BitOrAgg|`bit_or`|Returns the bitwise OR of all non-null input values|true|None| spark.rapids.sql.expression.BitXorAgg|`bit_xor`|Returns the bitwise XOR of all non-null input values|true|None| -spark.rapids.sql.expression.CollectList|`collect_list`|Collect a list of non-unique elements, not supported in reduction|true|None| +spark.rapids.sql.expression.BloomFilterAggregate| |Bloom filter build|true|None| +spark.rapids.sql.expression.CollectList|`array_agg`, `collect_list`|Collect a list of non-unique elements, not supported in reduction|true|None| spark.rapids.sql.expression.CollectSet|`collect_set`|Collect a set of unique elements, not supported in reduction|true|None| spark.rapids.sql.expression.Count|`count`|Count aggregate operator|true|None| spark.rapids.sql.expression.First|`first_value`, `first`|first aggregate operator|true|None| @@ -444,6 +452,7 @@ Name | SQL Function(s) | Description | Default Value | Notes spark.rapids.sql.expression.VarianceSamp|`var_samp`, `variance`|Aggregation computing sample variance|true|None| spark.rapids.sql.expression.StaticInvoke| |StaticInvoke|true|The supported types are not deterministic since it's a dynamic expression| spark.rapids.sql.expression.NormalizeNaNAndZero| |Normalize NaN and zero|true|None| +spark.rapids.sql.expression.InSubqueryExec| |Evaluates to true if values are in a subquery's result set|true|None| spark.rapids.sql.expression.ScalarSubquery| |Subquery that will return only one row and one column|true|None| spark.rapids.sql.expression.HiveGenericUDF| |Hive Generic UDF, the UDF can choose to implement a RAPIDS accelerated interface to get better performance|true|None| spark.rapids.sql.expression.HiveSimpleUDF| |Hive UDF, the UDF can choose to implement a RAPIDS accelerated interface to get better performance|true|None| @@ -491,6 +500,7 @@ Name | Description | Default Value | Notes spark.rapids.sql.exec.FlatMapCoGroupsInPandasExec|The backend for CoGrouped Aggregation Pandas UDF. Accelerates the data transfer between the Java process and the Python process. It also supports scheduling GPU resources for the Python process when enabled.|false|This is disabled by default because Performance is not ideal with many small groups| spark.rapids.sql.exec.FlatMapGroupsInPandasExec|The backend for Flat Map Groups Pandas UDF, Accelerates the data transfer between the Java process and the Python process. It also supports scheduling GPU resources for the Python process when enabled.|true|None| spark.rapids.sql.exec.MapInPandasExec|The backend for Map Pandas Iterator UDF. Accelerates the data transfer between the Java process and the Python process. It also supports scheduling GPU resources for the Python process when enabled.|true|None| +spark.rapids.sql.exec.PythonMapInArrowExec|The backend for Map Arrow Iterator UDF. Accelerates the data transfer between the Java process and the Python process. It also supports scheduling GPU resources for the Python process when enabled.|true|None| spark.rapids.sql.exec.WindowInPandasExec|The backend for Window Aggregation Pandas UDF, Accelerates the data transfer between the Java process and the Python process. It also supports scheduling GPU resources for the Python process when enabled. For now it only supports row based window frame.|false|This is disabled by default because it only supports row based frame for now| spark.rapids.sql.exec.WindowExec|Window-operator backend|true|None| spark.rapids.sql.exec.HiveTableScanExec|Scan Exec to read Hive delimited text tables|true|None| diff --git a/docs/configs.md b/docs/configs.md index 30a4c15ebdf..5f76a34812b 100644 --- a/docs/configs.md +++ b/docs/configs.md @@ -5,7 +5,7 @@ nav_order: 4 --- - > This page is generated by `RapidsConf.helpCommon` against Apache Spark 3.2.1. + > This page is generated by `RapidsConf.helpCommon` against Apache Spark 3.3.0. > Most of the content in this page is applicable to other versions of Apache Spark with > variations diff --git a/docs/dev/nvtx_ranges.md b/docs/dev/nvtx_ranges.md index 11dfed787df..87ee1f53dda 100644 --- a/docs/dev/nvtx_ranges.md +++ b/docs/dev/nvtx_ranges.md @@ -6,7 +6,7 @@ parent: Developer Overview --- - > This page is generated by `NvtxRangeDocs.help` against Apache Spark 3.2.1. + > This page is generated by `NvtxRangeDocs.help` against Apache Spark 3.3.0. > Most of the content in this page is applicable to other versions of Apache Spark with > variations diff --git a/docs/supported_ops.md b/docs/supported_ops.md index b92aaace943..69f9881854f 100644 --- a/docs/supported_ops.md +++ b/docs/supported_ops.md @@ -5,7 +5,7 @@ nav_order: 6 --- - > This page is generated by `SupportedOpsDocs.help` against Apache Spark 3.2.1. + > This page is generated by `SupportedOpsDocs.help` against Apache Spark 3.3.0. > Most of the content in this page is applicable to other versions of Apache Spark with > variations @@ -153,12 +153,12 @@ Accelerator supports are described below. S S NS -PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT, DAYTIME, YEARMONTH
-PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT, DAYTIME, YEARMONTH
-PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT, DAYTIME, YEARMONTH
-NS -NS +PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT
+PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT
+PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT
NS +S +S CollectLimitExec @@ -231,12 +231,12 @@ Accelerator supports are described below. S S NS -PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT, DAYTIME, YEARMONTH
-PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT, DAYTIME, YEARMONTH
-PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT, DAYTIME, YEARMONTH
-NS -NS +PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT
+PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT
+PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT
NS +S +S FilterExec @@ -257,12 +257,12 @@ Accelerator supports are described below. S S NS -PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT, DAYTIME, YEARMONTH
-PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT, DAYTIME, YEARMONTH
-PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT, DAYTIME, YEARMONTH
-NS -NS +PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT
+PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT
+PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT
NS +S +S GenerateExec @@ -361,12 +361,12 @@ Accelerator supports are described below. S S NS -PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT, DAYTIME, YEARMONTH
-PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT, DAYTIME, YEARMONTH
-PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT, DAYTIME, YEARMONTH
-NS -NS +PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT
+PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT
+PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT
NS +S +S RangeExec @@ -413,12 +413,12 @@ Accelerator supports are described below. S NS NS -PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types BINARY, CALENDAR, UDT, DAYTIME, YEARMONTH
-PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types BINARY, CALENDAR, UDT, DAYTIME, YEARMONTH
-PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types BINARY, CALENDAR, UDT, DAYTIME, YEARMONTH
-NS -NS +PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types BINARY, CALENDAR, UDT
+PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types BINARY, CALENDAR, UDT
+PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types BINARY, CALENDAR, UDT
NS +S +S SortExec @@ -673,12 +673,12 @@ Accelerator supports are described below. NS NS NS -PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types NULL, BINARY, CALENDAR, UDT, DAYTIME, YEARMONTH
-PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types NULL, BINARY, CALENDAR, UDT, DAYTIME, YEARMONTH
-PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types NULL, BINARY, CALENDAR, UDT, DAYTIME, YEARMONTH
-NS -NS +PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types NULL, BINARY, CALENDAR, UDT
+PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types NULL, BINARY, CALENDAR, UDT
+PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types NULL, BINARY, CALENDAR, UDT
NS +S +S DataWritingCommandExec @@ -699,12 +699,12 @@ Accelerator supports are described below. NS S NS -PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types NULL, CALENDAR, UDT, DAYTIME, YEARMONTH
-PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types NULL, CALENDAR, UDT, DAYTIME, YEARMONTH
-PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types NULL, CALENDAR, UDT, DAYTIME, YEARMONTH
-NS -NS +PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types NULL, CALENDAR, UDT
+PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types NULL, CALENDAR, UDT
+PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types NULL, CALENDAR, UDT
NS +S +S ExecutedCommandExec @@ -751,12 +751,12 @@ Accelerator supports are described below. NS S NS -PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types NULL, CALENDAR, UDT, DAYTIME, YEARMONTH
-PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types NULL, CALENDAR, UDT, DAYTIME, YEARMONTH
-PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types NULL, CALENDAR, UDT, DAYTIME, YEARMONTH
-NS -NS +PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types NULL, CALENDAR, UDT
+PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types NULL, CALENDAR, UDT
+PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types NULL, CALENDAR, UDT
NS +S +S AtomicCreateTableAsSelectExec @@ -777,12 +777,12 @@ Accelerator supports are described below. NS S NS -PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types NULL, CALENDAR, UDT, DAYTIME, YEARMONTH
-PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types NULL, CALENDAR, UDT, DAYTIME, YEARMONTH
-PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types NULL, CALENDAR, UDT, DAYTIME, YEARMONTH
-NS -NS +PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types NULL, CALENDAR, UDT
+PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types NULL, CALENDAR, UDT
+PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types NULL, CALENDAR, UDT
NS +S +S AtomicReplaceTableAsSelectExec @@ -803,12 +803,12 @@ Accelerator supports are described below. NS S NS -PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types NULL, CALENDAR, UDT, DAYTIME, YEARMONTH
-PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types NULL, CALENDAR, UDT, DAYTIME, YEARMONTH
-PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types NULL, CALENDAR, UDT, DAYTIME, YEARMONTH
-NS -NS +PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types NULL, CALENDAR, UDT
+PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types NULL, CALENDAR, UDT
+PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types NULL, CALENDAR, UDT
NS +S +S BatchScanExec @@ -829,12 +829,12 @@ Accelerator supports are described below. NS S NS -PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types NULL, CALENDAR, UDT, DAYTIME, YEARMONTH
-PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types NULL, CALENDAR, UDT, DAYTIME, YEARMONTH
-PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types NULL, CALENDAR, UDT, DAYTIME, YEARMONTH
-NS -NS +PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types NULL, CALENDAR, UDT
+PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types NULL, CALENDAR, UDT
+PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types NULL, CALENDAR, UDT
NS +S +S OverwriteByExpressionExecV1 @@ -855,12 +855,12 @@ Accelerator supports are described below. NS S NS -PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types NULL, CALENDAR, UDT, DAYTIME, YEARMONTH
-PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types NULL, CALENDAR, UDT, DAYTIME, YEARMONTH
-PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types NULL, CALENDAR, UDT, DAYTIME, YEARMONTH
-NS -NS +PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types NULL, CALENDAR, UDT
+PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types NULL, CALENDAR, UDT
+PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types NULL, CALENDAR, UDT
NS +S +S BroadcastExchangeExec @@ -907,12 +907,12 @@ Accelerator supports are described below. S S NS -PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT, DAYTIME, YEARMONTH
-PS
Round-robin partitioning is not supported if spark.sql.execution.sortBeforeRepartition is true;
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT, DAYTIME, YEARMONTH
-PS
Round-robin partitioning is not supported for nested structs if spark.sql.execution.sortBeforeRepartition is true;
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT, DAYTIME, YEARMONTH
-NS -NS +PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT
+PS
Round-robin partitioning is not supported if spark.sql.execution.sortBeforeRepartition is true;
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT
+PS
Round-robin partitioning is not supported for nested structs if spark.sql.execution.sortBeforeRepartition is true;
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT
NS +S +S BroadcastHashJoinExec @@ -1457,6 +1457,32 @@ Accelerator supports are described below. NS +PythonMapInArrowExec +The backend for Map Arrow Iterator UDF. Accelerates the data transfer between the Java process and the Python process. It also supports scheduling GPU resources for the Python process when enabled. +None +Input/Output +S +S +S +S +S +S +S +S +PS
UTC is only supported TZ for TIMESTAMP
+S +NS +NS +NS +NS +PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types DECIMAL, NULL, BINARY, CALENDAR, MAP, UDT, DAYTIME, YEARMONTH
+NS +PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types DECIMAL, NULL, BINARY, CALENDAR, MAP, UDT, DAYTIME, YEARMONTH
+NS +NS +NS + + WindowInPandasExec The backend for Window Aggregation Pandas UDF, Accelerates the data transfer between the Java process and the Python process. It also supports scheduling GPU resources for the Python process when enabled. For now it only supports row based window frame. This is disabled by default because it only supports row based frame for now @@ -1647,8 +1673,8 @@ are limited. - - +S +S result @@ -1670,8 +1696,8 @@ are limited. - - +S +S AST @@ -1694,8 +1720,8 @@ are limited. - - +NS +NS result @@ -1717,8 +1743,8 @@ are limited. - - +NS +NS Acos @@ -1941,8 +1967,8 @@ are limited. -NS -NS +S +S rhs @@ -1964,8 +1990,8 @@ are limited. -NS -NS +S +S result @@ -1987,8 +2013,8 @@ are limited. -NS -NS +S +S AST @@ -2109,12 +2135,12 @@ are limited. S S NS -PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT, DAYTIME, YEARMONTH
-PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT, DAYTIME, YEARMONTH
-PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT, DAYTIME, YEARMONTH
-NS -NS +PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT
+PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT
+PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT
NS +S +S result @@ -2132,12 +2158,12 @@ are limited. S S NS -PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT, DAYTIME, YEARMONTH
-PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT, DAYTIME, YEARMONTH
-PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT, DAYTIME, YEARMONTH
-NS -NS +PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT
+PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT
+PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT
NS +S +S AST @@ -2160,8 +2186,8 @@ are limited. NS NS NS -NS -NS +S +S result @@ -2183,8 +2209,8 @@ are limited. NS NS NS -NS -NS +S +S And @@ -4072,12 +4098,12 @@ are limited. S S NS -PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT, DAYTIME, YEARMONTH
-PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT, DAYTIME, YEARMONTH
-PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT, DAYTIME, YEARMONTH
-NS -NS +PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT
+PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT
+PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT
NS +S +S AST @@ -4100,8 +4126,8 @@ are limited. NS NS NS -NS -NS +S +S BRound @@ -4917,48 +4943,148 @@ are limited. -BoundReference - -Reference to a bound variable -None -project -result -S -S -S -S -S -S -S -S -PS
UTC is only supported TZ for TIMESTAMP
-S -S +BloomFilterMightContain + +Bloom filter query +None +project +lhs + + + + + + + + + + + S S -NS -PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT, DAYTIME, YEARMONTH
-PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT, DAYTIME, YEARMONTH
-PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT, DAYTIME, YEARMONTH
-NS -NS -NS + + + + + + + -AST -result -S -S -S -S +rhs + + + + +S + + + + + + +S + + + + + + + + + + +result +S + + + + + + + + + + + + + + + + + + + + + +Expression +SQL Functions(s) +Description +Notes +Context +Param/Output +BOOLEAN +BYTE +SHORT +INT +LONG +FLOAT +DOUBLE +DATE +TIMESTAMP +STRING +DECIMAL +NULL +BINARY +CALENDAR +ARRAY +MAP +STRUCT +UDT +DAYTIME +YEARMONTH + + +BoundReference + +Reference to a bound variable +None +project +result +S +S +S +S S S S S PS
UTC is only supported TZ for TIMESTAMP
S +S +S +S NS +PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT
+PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT
+PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT
NS +S +S + + +AST +result +S +S +S +S +S +S +S +S +PS
UTC is only supported TZ for TIMESTAMP
+S NS NS NS @@ -4967,6 +5093,8 @@ are limited. NS NS NS +S +S CaseWhen @@ -5043,34 +5171,6 @@ are limited. NS -Expression -SQL Functions(s) -Description -Notes -Context -Param/Output -BOOLEAN -BYTE -SHORT -INT -LONG -FLOAT -DOUBLE -DATE -TIMESTAMP -STRING -DECIMAL -NULL -BINARY -CALENDAR -ARRAY -MAP -STRUCT -UDT -DAYTIME -YEARMONTH - - Cbrt `cbrt` Cube root @@ -5170,7 +5270,7 @@ are limited. Ceil -`ceil`, `ceiling` + Ceiling of a number None project @@ -5291,12 +5391,12 @@ are limited. S S NS -PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT, DAYTIME, YEARMONTH
-PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT, DAYTIME, YEARMONTH
-PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT, DAYTIME, YEARMONTH
-NS -NS +PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT
+PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT
+PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT
NS +S +S result @@ -5314,12 +5414,40 @@ are limited. S S NS -PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT, DAYTIME, YEARMONTH
-PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT, DAYTIME, YEARMONTH
-PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT, DAYTIME, YEARMONTH
-NS -NS +PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT
+PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT
+PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT
NS +S +S + + +Expression +SQL Functions(s) +Description +Notes +Context +Param/Output +BOOLEAN +BYTE +SHORT +INT +LONG +FLOAT +DOUBLE +DATE +TIMESTAMP +STRING +DECIMAL +NULL +BINARY +CALENDAR +ARRAY +MAP +STRUCT +UDT +DAYTIME +YEARMONTH Concat @@ -5498,34 +5626,6 @@ are limited. -Expression -SQL Functions(s) -Description -Notes -Context -Param/Output -BOOLEAN -BYTE -SHORT -INT -LONG -FLOAT -DOUBLE -DATE -TIMESTAMP -STRING -DECIMAL -NULL -BINARY -CALENDAR -ARRAY -MAP -STRUCT -UDT -DAYTIME -YEARMONTH - - Conv `conv` Convert string representing a number from one base to another @@ -5721,6 +5821,34 @@ are limited. +Expression +SQL Functions(s) +Description +Notes +Context +Param/Output +BOOLEAN +BYTE +SHORT +INT +LONG +FLOAT +DOUBLE +DATE +TIMESTAMP +STRING +DECIMAL +NULL +BINARY +CALENDAR +ARRAY +MAP +STRUCT +UDT +DAYTIME +YEARMONTH + + Cosh `cosh` Hyperbolic cosine @@ -5917,34 +6045,6 @@ are limited. -Expression -SQL Functions(s) -Description -Notes -Context -Param/Output -BOOLEAN -BYTE -SHORT -INT -LONG -FLOAT -DOUBLE -DATE -TIMESTAMP -STRING -DECIMAL -NULL -BINARY -CALENDAR -ARRAY -MAP -STRUCT -UDT -DAYTIME -YEARMONTH - - CreateArray `array` Returns an array with the given elements @@ -6121,6 +6221,34 @@ are limited. +Expression +SQL Functions(s) +Description +Notes +Context +Param/Output +BOOLEAN +BYTE +SHORT +INT +LONG +FLOAT +DOUBLE +DATE +TIMESTAMP +STRING +DECIMAL +NULL +BINARY +CALENDAR +ARRAY +MAP +STRUCT +UDT +DAYTIME +YEARMONTH + + CurrentRow$ Special boundary for a window frame, indicating stopping at the current row @@ -6371,34 +6499,6 @@ are limited. -Expression -SQL Functions(s) -Description -Notes -Context -Param/Output -BOOLEAN -BYTE -SHORT -INT -LONG -FLOAT -DOUBLE -DATE -TIMESTAMP -STRING -DECIMAL -NULL -BINARY -CALENDAR -ARRAY -MAP -STRUCT -UDT -DAYTIME -YEARMONTH - - DateFormatClass `date_format` Converts timestamp to a value of string in the format specified by the date format @@ -6547,6 +6647,34 @@ are limited. +Expression +SQL Functions(s) +Description +Notes +Context +Param/Output +BOOLEAN +BYTE +SHORT +INT +LONG +FLOAT +DOUBLE +DATE +TIMESTAMP +STRING +DECIMAL +NULL +BINARY +CALENDAR +ARRAY +MAP +STRUCT +UDT +DAYTIME +YEARMONTH + + DayOfMonth `day`, `dayofmonth` Returns the day of the month from a date or timestamp @@ -6732,7 +6860,154 @@ are limited. -S +S + + + + + + + + + + + + + + + + + + +Divide +`/` +Division +None +project +lhs + + + + + + +S + + + +S + + + + + + + + + + + +rhs + + + + + + +S + + + +S + + + + + + + + + + + +result + + + + + + +S + + + +S + + + + + + + + + + + +DivideDTInterval + +Day-time interval * operator +None +project +lhs + + + + + + + + + + + + + + + + + + +S + + + +rhs + +S +S +S +S +S +S + + + +NS + + + + + + + + + + + +result + + + @@ -6748,12 +7023,13 @@ are limited. +S -Divide -`/` -Division +DivideYMInterval + +Year-month interval * operator None project lhs @@ -6763,11 +7039,9 @@ are limited. -S -S @@ -6777,20 +7051,22 @@ are limited. + +S rhs - - - - - +S +S +S +S +S S -S +NS @@ -6809,11 +7085,9 @@ are limited. -S -S @@ -6823,6 +7097,8 @@ are limited. + +S Expression @@ -7597,7 +7873,7 @@ are limited. Floor -`floor` + Floor of a number None project @@ -8909,12 +9185,12 @@ are limited. S S NS -PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT, DAYTIME, YEARMONTH
-PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT, DAYTIME, YEARMONTH
-PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT, DAYTIME, YEARMONTH
-NS -NS +PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT
+PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT
+PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT
NS +S +S falseValue @@ -8932,12 +9208,12 @@ are limited. S S NS -PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT, DAYTIME, YEARMONTH
-PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT, DAYTIME, YEARMONTH
-PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT, DAYTIME, YEARMONTH
-NS -NS +PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT
+PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT
+PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT
NS +S +S result @@ -8955,12 +9231,12 @@ are limited. S S NS -PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT, DAYTIME, YEARMONTH
-PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT, DAYTIME, YEARMONTH
-PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT, DAYTIME, YEARMONTH
-NS -NS +PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT
+PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT
+PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT
NS +S +S In @@ -9396,11 +9672,11 @@ are limited. S S NS -PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT, DAYTIME, YEARMONTH
-PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT, DAYTIME, YEARMONTH
-PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT, DAYTIME, YEARMONTH
-NS +PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT, YEARMONTH
+PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT, YEARMONTH
+PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT, YEARMONTH
NS +S NS @@ -9522,11 +9798,11 @@ are limited. S S NS -PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT, DAYTIME, YEARMONTH
-PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT, DAYTIME, YEARMONTH
-PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT, DAYTIME, YEARMONTH
-NS +PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT, YEARMONTH
+PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT, YEARMONTH
+PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT, YEARMONTH
NS +S NS @@ -12002,32 +12278,227 @@ are limited. -PS
UTC is only supported TZ for TIMESTAMP
+PS
UTC is only supported TZ for TIMESTAMP
+ + + + + + + + + + + + + +round +PS
Literal value only
+ + + + + + + + + + + + + + + + + + + + + +result + + + + + + +S + + + + + + + + + + + + + + + +Expression +SQL Functions(s) +Description +Notes +Context +Param/Output +BOOLEAN +BYTE +SHORT +INT +LONG +FLOAT +DOUBLE +DATE +TIMESTAMP +STRING +DECIMAL +NULL +BINARY +CALENDAR +ARRAY +MAP +STRUCT +UDT +DAYTIME +YEARMONTH + + +Multiply +`*` +Multiplication +None +project +lhs + +S +S +S +S +S +S + + + +S + + + + + + + + + + + +rhs + +S +S +S +S +S +S + + + +S + + + + + + + + + + + +result + +S +S +S +S +S +S + + + +S + + + + + + + + + + + +AST +lhs + +NS +NS +S +S +S +S + + + +NS + + + + + + + +rhs +NS +NS +S +S +S +S +NS - - -round -PS
Literal value only
+ + +result +NS +NS +S +S +S +S +NS @@ -12039,14 +12510,18 @@ are limited. -result +MultiplyDTInterval + +Day-time interval * number +None +project +lhs -S @@ -12059,43 +12534,11 @@ are limited. +S -Expression -SQL Functions(s) -Description -Notes -Context -Param/Output -BOOLEAN -BYTE -SHORT -INT -LONG -FLOAT -DOUBLE -DATE -TIMESTAMP -STRING -DECIMAL -NULL -BINARY -CALENDAR -ARRAY -MAP -STRUCT -UDT -DAYTIME -YEARMONTH - - -Multiply -`*` -Multiplication -None -project -lhs +rhs S S @@ -12106,7 +12549,7 @@ are limited. -S +NS @@ -12118,18 +12561,10 @@ are limited. -rhs - -S -S -S -S -S -S +result -S @@ -12139,44 +12574,32 @@ are limited. - - -result -S -S -S -S -S -S -S +S + + +MultiplyYMInterval + +Year-month interval * number +None +project +lhs - - -AST -lhs -NS -NS -S -S -S -S -NS @@ -12186,12 +12609,13 @@ are limited. +S rhs -NS -NS +S +S S S S @@ -12213,16 +12637,9 @@ are limited. result -NS -NS -S -S -S -S -NS @@ -12232,6 +12649,13 @@ are limited. + + + + + + +S Murmur3Hash @@ -12359,6 +12783,34 @@ are limited. +Expression +SQL Functions(s) +Description +Notes +Context +Param/Output +BOOLEAN +BYTE +SHORT +INT +LONG +FLOAT +DOUBLE +DATE +TIMESTAMP +STRING +DECIMAL +NULL +BINARY +CALENDAR +ARRAY +MAP +STRUCT +UDT +DAYTIME +YEARMONTH + + NamedLambdaVariable A parameter to a higher order SQL function @@ -12485,34 +12937,6 @@ are limited. -Expression -SQL Functions(s) -Description -Notes -Context -Param/Output -BOOLEAN -BYTE -SHORT -INT -LONG -FLOAT -DOUBLE -DATE -TIMESTAMP -STRING -DECIMAL -NULL -BINARY -CALENDAR -ARRAY -MAP -STRUCT -UDT -DAYTIME -YEARMONTH - - NthValue `nth_value` nth window operator @@ -12782,6 +13206,34 @@ are limited. +Expression +SQL Functions(s) +Description +Notes +Context +Param/Output +BOOLEAN +BYTE +SHORT +INT +LONG +FLOAT +DOUBLE +DATE +TIMESTAMP +STRING +DECIMAL +NULL +BINARY +CALENDAR +ARRAY +MAP +STRUCT +UDT +DAYTIME +YEARMONTH + + ParseUrl `parse_url` Extracts a part from a URL @@ -12879,34 +13331,6 @@ are limited. -Expression -SQL Functions(s) -Description -Notes -Context -Param/Output -BOOLEAN -BYTE -SHORT -INT -LONG -FLOAT -DOUBLE -DATE -TIMESTAMP -STRING -DECIMAL -NULL -BINARY -CALENDAR -ARRAY -MAP -STRUCT -UDT -DAYTIME -YEARMONTH - - PercentRank `percent_rank` Window function that returns the percent rank value within the aggregation window @@ -13227,6 +13651,34 @@ are limited. +Expression +SQL Functions(s) +Description +Notes +Context +Param/Output +BOOLEAN +BYTE +SHORT +INT +LONG +FLOAT +DOUBLE +DATE +TIMESTAMP +STRING +DECIMAL +NULL +BINARY +CALENDAR +ARRAY +MAP +STRUCT +UDT +DAYTIME +YEARMONTH + + PreciseTimestampConversion Expression used internally to convert the TimestampType to Long and back without losing precision, i.e. in microseconds. Used in time windowing @@ -13278,34 +13730,6 @@ are limited. -Expression -SQL Functions(s) -Description -Notes -Context -Param/Output -BOOLEAN -BYTE -SHORT -INT -LONG -FLOAT -DOUBLE -DATE -TIMESTAMP -STRING -DECIMAL -NULL -BINARY -CALENDAR -ARRAY -MAP -STRUCT -UDT -DAYTIME -YEARMONTH - - PromotePrecision PromotePrecision before arithmetic operations between DecimalType data @@ -14545,35 +14969,35 @@ are limited. -RowNumber -`row_number` -Window function that returns the index for the row within the aggregation window -None -window -ordering -S -S -S -S -S -S +RoundCeil + +Computes the ceiling of the given expression to d decimal places +None +project +value + S S -PS
UTC is only supported TZ for TIMESTAMP
S S +PS
result may round slightly differently
+PS
result may round slightly differently
+ + + S -NS -NS -NS -NS -NS -NS -NS -NS + + + + + + + + + -result +scale @@ -14596,6 +15020,29 @@ are limited. +result + +S +S +S +S +S +S + + + +S + + + + + + + + + + + Expression SQL Functions(s) Description @@ -14624,6 +15071,131 @@ are limited. YEARMONTH +RoundFloor + +Computes the floor of the given expression to d decimal places +None +project +value + +S +S +S +S +PS
result may round slightly differently
+PS
result may round slightly differently
+ + + +S + + + + + + + + + + + +scale + + + +S + + + + + + + + + + + + + + + + + + +result + +S +S +S +S +S +S + + + +S + + + + + + + + + + + +RowNumber +`row_number` +Window function that returns the index for the row within the aggregation window +None +window +ordering +S +S +S +S +S +S +S +S +PS
UTC is only supported TZ for TIMESTAMP
+S +S +S +NS +NS +NS +NS +NS +NS +NS +NS + + +result + + + +S + + + + + + + + + + + + + + + + + + ScalaUDF User Defined Function, the UDF can choose to implement a RAPIDS accelerated interface to get better performance. @@ -14874,6 +15446,34 @@ are limited. +Expression +SQL Functions(s) +Description +Notes +Context +Param/Output +BOOLEAN +BYTE +SHORT +INT +LONG +FLOAT +DOUBLE +DATE +TIMESTAMP +STRING +DECIMAL +NULL +BINARY +CALENDAR +ARRAY +MAP +STRUCT +UDT +DAYTIME +YEARMONTH + + Sha1 `sha1`, `sha` Sha1 hash operator @@ -14999,34 +15599,6 @@ are limited. -Expression -SQL Functions(s) -Description -Notes -Context -Param/Output -BOOLEAN -BYTE -SHORT -INT -LONG -FLOAT -DOUBLE -DATE -TIMESTAMP -STRING -DECIMAL -NULL -BINARY -CALENDAR -ARRAY -MAP -STRUCT -UDT -DAYTIME -YEARMONTH - - ShiftLeft `shiftleft` Bitwise shift left (<<) @@ -15300,6 +15872,34 @@ are limited. +Expression +SQL Functions(s) +Description +Notes +Context +Param/Output +BOOLEAN +BYTE +SHORT +INT +LONG +FLOAT +DOUBLE +DATE +TIMESTAMP +STRING +DECIMAL +NULL +BINARY +CALENDAR +ARRAY +MAP +STRUCT +UDT +DAYTIME +YEARMONTH + + Sin `sin` Sine @@ -15398,34 +15998,6 @@ are limited. -Expression -SQL Functions(s) -Description -Notes -Context -Param/Output -BOOLEAN -BYTE -SHORT -INT -LONG -FLOAT -DOUBLE -DATE -TIMESTAMP -STRING -DECIMAL -NULL -BINARY -CALENDAR -ARRAY -MAP -STRUCT -UDT -DAYTIME -YEARMONTH - - Sinh `sinh` Hyperbolic sine @@ -15746,6 +16318,34 @@ are limited. +Expression +SQL Functions(s) +Description +Notes +Context +Param/Output +BOOLEAN +BYTE +SHORT +INT +LONG +FLOAT +DOUBLE +DATE +TIMESTAMP +STRING +DECIMAL +NULL +BINARY +CALENDAR +ARRAY +MAP +STRUCT +UDT +DAYTIME +YEARMONTH + + SortOrder Sort order @@ -15797,34 +16397,6 @@ are limited. -Expression -SQL Functions(s) -Description -Notes -Context -Param/Output -BOOLEAN -BYTE -SHORT -INT -LONG -FLOAT -DOUBLE -DATE -TIMESTAMP -STRING -DECIMAL -NULL -BINARY -CALENDAR -ARRAY -MAP -STRUCT -UDT -DAYTIME -YEARMONTH - - SparkPartitionID `spark_partition_id` Returns the current partition id @@ -16173,6 +16745,34 @@ are limited. +Expression +SQL Functions(s) +Description +Notes +Context +Param/Output +BOOLEAN +BYTE +SHORT +INT +LONG +FLOAT +DOUBLE +DATE +TIMESTAMP +STRING +DECIMAL +NULL +BINARY +CALENDAR +ARRAY +MAP +STRUCT +UDT +DAYTIME +YEARMONTH + + StringInstr `instr` Instr string operator @@ -16247,36 +16847,8 @@ are limited. -Expression -SQL Functions(s) -Description -Notes -Context -Param/Output -BOOLEAN -BYTE -SHORT -INT -LONG -FLOAT -DOUBLE -DATE -TIMESTAMP -STRING -DECIMAL -NULL -BINARY -CALENDAR -ARRAY -MAP -STRUCT -UDT -DAYTIME -YEARMONTH - - StringLPad -`lpad` + Pad a string on the left None project @@ -16470,7 +17042,7 @@ are limited. StringRPad -`rpad` + Pad a string on the right None project @@ -16566,6 +17138,34 @@ are limited. +Expression +SQL Functions(s) +Description +Notes +Context +Param/Output +BOOLEAN +BYTE +SHORT +INT +LONG +FLOAT +DOUBLE +DATE +TIMESTAMP +STRING +DECIMAL +NULL +BINARY +CALENDAR +ARRAY +MAP +STRUCT +UDT +DAYTIME +YEARMONTH + + StringRepeat `repeat` StringRepeat operator that repeats the given strings with numbers of times given by repeatTimes @@ -16640,34 +17240,6 @@ are limited. -Expression -SQL Functions(s) -Description -Notes -Context -Param/Output -BOOLEAN -BYTE -SHORT -INT -LONG -FLOAT -DOUBLE -DATE -TIMESTAMP -STRING -DECIMAL -NULL -BINARY -CALENDAR -ARRAY -MAP -STRUCT -UDT -DAYTIME -YEARMONTH - - StringReplace `replace` StringReplace operator @@ -16959,6 +17531,34 @@ are limited. +Expression +SQL Functions(s) +Description +Notes +Context +Param/Output +BOOLEAN +BYTE +SHORT +INT +LONG +FLOAT +DOUBLE +DATE +TIMESTAMP +STRING +DECIMAL +NULL +BINARY +CALENDAR +ARRAY +MAP +STRUCT +UDT +DAYTIME +YEARMONTH + + StringTranslate `translate` StringTranslate operator @@ -17056,34 +17656,6 @@ are limited. -Expression -SQL Functions(s) -Description -Notes -Context -Param/Output -BOOLEAN -BYTE -SHORT -INT -LONG -FLOAT -DOUBLE -DATE -TIMESTAMP -STRING -DECIMAL -NULL -BINARY -CALENDAR -ARRAY -MAP -STRUCT -UDT -DAYTIME -YEARMONTH - - StringTrim `trim` StringTrim operator @@ -17357,6 +17929,34 @@ are limited. +Expression +SQL Functions(s) +Description +Notes +Context +Param/Output +BOOLEAN +BYTE +SHORT +INT +LONG +FLOAT +DOUBLE +DATE +TIMESTAMP +STRING +DECIMAL +NULL +BINARY +CALENDAR +ARRAY +MAP +STRUCT +UDT +DAYTIME +YEARMONTH + + Substring `substr`, `substring` Substring operator @@ -17454,34 +18054,6 @@ are limited. -Expression -SQL Functions(s) -Description -Notes -Context -Param/Output -BOOLEAN -BYTE -SHORT -INT -LONG -FLOAT -DOUBLE -DATE -TIMESTAMP -STRING -DECIMAL -NULL -BINARY -CALENDAR -ARRAY -MAP -STRUCT -UDT -DAYTIME -YEARMONTH - - SubstringIndex `substring_index` substring_index operator @@ -17603,8 +18175,8 @@ are limited. -NS -NS +S +S rhs @@ -17626,8 +18198,8 @@ are limited. -NS -NS +S +S result @@ -17649,8 +18221,8 @@ are limited. -NS -NS +S +S AST @@ -17821,6 +18393,34 @@ are limited. +Expression +SQL Functions(s) +Description +Notes +Context +Param/Output +BOOLEAN +BYTE +SHORT +INT +LONG +FLOAT +DOUBLE +DATE +TIMESTAMP +STRING +DECIMAL +NULL +BINARY +CALENDAR +ARRAY +MAP +STRUCT +UDT +DAYTIME +YEARMONTH + + Tanh `tanh` Hyperbolic tangent @@ -17919,34 +18519,6 @@ are limited. -Expression -SQL Functions(s) -Description -Notes -Context -Param/Output -BOOLEAN -BYTE -SHORT -INT -LONG -FLOAT -DOUBLE -DATE -TIMESTAMP -STRING -DECIMAL -NULL -BINARY -CALENDAR -ARRAY -MAP -STRUCT -UDT -DAYTIME -YEARMONTH - - TimeAdd Adds interval to timestamp @@ -17994,7 +18566,7 @@ are limited. - +S @@ -18271,6 +18843,34 @@ are limited. +Expression +SQL Functions(s) +Description +Notes +Context +Param/Output +BOOLEAN +BYTE +SHORT +INT +LONG +FLOAT +DOUBLE +DATE +TIMESTAMP +STRING +DECIMAL +NULL +BINARY +CALENDAR +ARRAY +MAP +STRUCT +UDT +DAYTIME +YEARMONTH + + TransformKeys `transform_keys` Transform keys in a map using a transform function @@ -18323,54 +18923,26 @@ are limited. result - - - - - - - - - - - - - - - -PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types BINARY, CALENDAR, UDT, DAYTIME, YEARMONTH
- - - - - - -Expression -SQL Functions(s) -Description -Notes -Context -Param/Output -BOOLEAN -BYTE -SHORT -INT -LONG -FLOAT -DOUBLE -DATE -TIMESTAMP -STRING -DECIMAL -NULL -BINARY -CALENDAR -ARRAY -MAP -STRUCT -UDT -DAYTIME -YEARMONTH + + + + + + + + + + + + + + + +PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types BINARY, CALENDAR, UDT, DAYTIME, YEARMONTH
+ + + + TransformValues @@ -18619,8 +19191,8 @@ are limited. -NS -NS +S +S result @@ -18642,8 +19214,8 @@ are limited. -NS -NS +S +S AST @@ -18693,6 +19265,34 @@ are limited. NS +Expression +SQL Functions(s) +Description +Notes +Context +Param/Output +BOOLEAN +BYTE +SHORT +INT +LONG +FLOAT +DOUBLE +DATE +TIMESTAMP +STRING +DECIMAL +NULL +BINARY +CALENDAR +ARRAY +MAP +STRUCT +UDT +DAYTIME +YEARMONTH + + UnaryPositive `positive` A numeric value with a + in front of it @@ -18717,8 +19317,8 @@ are limited. -NS -NS +S +S result @@ -18740,8 +19340,8 @@ are limited. -NS -NS +S +S AST @@ -18764,8 +19364,8 @@ are limited. -NS -NS +S +S result @@ -18787,36 +19387,8 @@ are limited. -NS -NS - - -Expression -SQL Functions(s) -Description -Notes -Context -Param/Output -BOOLEAN -BYTE -SHORT -INT -LONG -FLOAT -DOUBLE -DATE -TIMESTAMP -STRING -DECIMAL -NULL -BINARY -CALENDAR -ARRAY -MAP -STRUCT -UDT -DAYTIME -YEARMONTH +S +S UnboundedFollowing$ @@ -19130,6 +19702,34 @@ are limited. +Expression +SQL Functions(s) +Description +Notes +Context +Param/Output +BOOLEAN +BYTE +SHORT +INT +LONG +FLOAT +DOUBLE +DATE +TIMESTAMP +STRING +DECIMAL +NULL +BINARY +CALENDAR +ARRAY +MAP +STRUCT +UDT +DAYTIME +YEARMONTH + + WindowExpression Calculates a return value for every input row of a table based on a group (or "window") of rows @@ -19204,34 +19804,6 @@ are limited. S -Expression -SQL Functions(s) -Description -Notes -Context -Param/Output -BOOLEAN -BYTE -SHORT -INT -LONG -FLOAT -DOUBLE -DATE -TIMESTAMP -STRING -DECIMAL -NULL -BINARY -CALENDAR -ARRAY -MAP -STRUCT -UDT -DAYTIME -YEARMONTH - - WindowSpecDefinition Specification of a window function, indicating the partitioning-expression, the row ordering, and the width of the window @@ -20235,14 +20807,112 @@ are limited. - - -result + + +result + +S +S +S +S + + + + + + + + + + + + + + + + + +reduction +input + +S +S +S +S + + + + + + + + + + + + + + + + + +result + +S +S +S +S + + + + + + + + + + + + + + + + + +BloomFilterAggregate + +Bloom filter build +None +reduction +child + + + + +S + + + + + + + + + + + + + + + + + +estimatedItems + + + S -S -S -S @@ -20260,12 +20930,11 @@ are limited. -reduction -input +numBits + + + -S -S -S S @@ -20286,10 +20955,6 @@ are limited. result -S -S -S -S @@ -20301,6 +20966,10 @@ are limited. +S + + + @@ -20308,7 +20977,7 @@ are limited. CollectList -`collect_list` +`array_agg`, `collect_list` Collect a list of non-unique elements, not supported in reduction None aggregation @@ -20452,6 +21121,34 @@ are limited. +Expression +SQL Functions(s) +Description +Notes +Context +Param/Output +BOOLEAN +BYTE +SHORT +INT +LONG +FLOAT +DOUBLE +DATE +TIMESTAMP +STRING +DECIMAL +NULL +BINARY +CALENDAR +ARRAY +MAP +STRUCT +UDT +DAYTIME +YEARMONTH + + CollectSet `collect_set` Collect a set of unique elements, not supported in reduction @@ -20597,34 +21294,6 @@ are limited. -Expression -SQL Functions(s) -Description -Notes -Context -Param/Output -BOOLEAN -BYTE -SHORT -INT -LONG -FLOAT -DOUBLE -DATE -TIMESTAMP -STRING -DECIMAL -NULL -BINARY -CALENDAR -ARRAY -MAP -STRUCT -UDT -DAYTIME -YEARMONTH - - Count `count` Count aggregate operator @@ -20915,6 +21584,34 @@ are limited. NS +Expression +SQL Functions(s) +Description +Notes +Context +Param/Output +BOOLEAN +BYTE +SHORT +INT +LONG +FLOAT +DOUBLE +DATE +TIMESTAMP +STRING +DECIMAL +NULL +BINARY +CALENDAR +ARRAY +MAP +STRUCT +UDT +DAYTIME +YEARMONTH + + HyperLogLogPlusPlus `approx_count_distinct` Aggregation approximate count distinct @@ -21013,34 +21710,6 @@ are limited. -Expression -SQL Functions(s) -Description -Notes -Context -Param/Output -BOOLEAN -BYTE -SHORT -INT -LONG -FLOAT -DOUBLE -DATE -TIMESTAMP -STRING -DECIMAL -NULL -BINARY -CALENDAR -ARRAY -MAP -STRUCT -UDT -DAYTIME -YEARMONTH - - Last `last_value`, `last` last aggregate operator @@ -21331,6 +22000,34 @@ are limited. +Expression +SQL Functions(s) +Description +Notes +Context +Param/Output +BOOLEAN +BYTE +SHORT +INT +LONG +FLOAT +DOUBLE +DATE +TIMESTAMP +STRING +DECIMAL +NULL +BINARY +CALENDAR +ARRAY +MAP +STRUCT +UDT +DAYTIME +YEARMONTH + + MaxBy `max_by` MaxBy aggregate operator. It may produce different results than CPU when multiple rows in a group have same minimum value in the ordering column and different associated values in the value column. @@ -21475,34 +22172,6 @@ are limited. NS -Expression -SQL Functions(s) -Description -Notes -Context -Param/Output -BOOLEAN -BYTE -SHORT -INT -LONG -FLOAT -DOUBLE -DATE -TIMESTAMP -STRING -DECIMAL -NULL -BINARY -CALENDAR -ARRAY -MAP -STRUCT -UDT -DAYTIME -YEARMONTH - - Min `min` Min aggregate operator @@ -21792,6 +22461,34 @@ are limited. NS +Expression +SQL Functions(s) +Description +Notes +Context +Param/Output +BOOLEAN +BYTE +SHORT +INT +LONG +FLOAT +DOUBLE +DATE +TIMESTAMP +STRING +DECIMAL +NULL +BINARY +CALENDAR +ARRAY +MAP +STRUCT +UDT +DAYTIME +YEARMONTH + + Percentile `percentile` Aggregation computing exact percentile @@ -21970,44 +22667,16 @@ are limited. - - - - -S - - - - - - - -Expression -SQL Functions(s) -Description -Notes -Context -Param/Output -BOOLEAN -BYTE -SHORT -INT -LONG -FLOAT -DOUBLE -DATE -TIMESTAMP -STRING -DECIMAL -NULL -BINARY -CALENDAR -ARRAY -MAP -STRUCT -UDT -DAYTIME -YEARMONTH + + + + +S + + + + + PivotFirst @@ -22299,6 +22968,34 @@ are limited. +Expression +SQL Functions(s) +Description +Notes +Context +Param/Output +BOOLEAN +BYTE +SHORT +INT +LONG +FLOAT +DOUBLE +DATE +TIMESTAMP +STRING +DECIMAL +NULL +BINARY +CALENDAR +ARRAY +MAP +STRUCT +UDT +DAYTIME +YEARMONTH + + StddevSamp `std`, `stddev_samp`, `stddev` Aggregation computing sample standard deviation @@ -22444,34 +23141,6 @@ are limited. -Expression -SQL Functions(s) -Description -Notes -Context -Param/Output -BOOLEAN -BYTE -SHORT -INT -LONG -FLOAT -DOUBLE -DATE -TIMESTAMP -STRING -DECIMAL -NULL -BINARY -CALENDAR -ARRAY -MAP -STRUCT -UDT -DAYTIME -YEARMONTH - - Sum `sum` Sum aggregate operator @@ -22762,6 +23431,34 @@ are limited. +Expression +SQL Functions(s) +Description +Notes +Context +Param/Output +BOOLEAN +BYTE +SHORT +INT +LONG +FLOAT +DOUBLE +DATE +TIMESTAMP +STRING +DECIMAL +NULL +BINARY +CALENDAR +ARRAY +MAP +STRUCT +UDT +DAYTIME +YEARMONTH + + VarianceSamp `var_samp`, `variance` Aggregation computing sample variance @@ -22907,34 +23604,6 @@ are limited. -Expression -SQL Functions(s) -Description -Notes -Context -Param/Output -BOOLEAN -BYTE -SHORT -INT -LONG -FLOAT -DOUBLE -DATE -TIMESTAMP -STRING -DECIMAL -NULL -BINARY -CALENDAR -ARRAY -MAP -STRUCT -UDT -DAYTIME -YEARMONTH - - StaticInvoke StaticInvoke @@ -23014,6 +23683,57 @@ are limited. +InSubqueryExec + +Evaluates to true if values are in a subquery's result set +None +project +input +S +S +S +S +S +S +S +S +PS
UTC is only supported TZ for TIMESTAMP
+S +S +S +NS +NS +NS + +NS +NS + + + + +result +S + + + + + + + + + + + + + + + + + + + + + ScalarSubquery Subquery that will return only one row and one column @@ -23412,7 +24132,7 @@ and the accelerator produces the same result. - +NS @@ -23602,15 +24322,15 @@ and the accelerator produces the same result. DAYTIME +S +S +S +S - - - - -NS +S @@ -23619,16 +24339,16 @@ and the accelerator produces the same result. -NS +S YEARMONTH - - - - +S +S +S +S @@ -23716,8 +24436,8 @@ and the accelerator produces the same result. - - +S +S SHORT @@ -23739,8 +24459,8 @@ and the accelerator produces the same result. - - +S +S INT @@ -23762,8 +24482,8 @@ and the accelerator produces the same result. - - +S +S LONG @@ -23785,8 +24505,8 @@ and the accelerator produces the same result. - - +S +S FLOAT @@ -23900,7 +24620,7 @@ and the accelerator produces the same result. - +S @@ -24011,7 +24731,7 @@ and the accelerator produces the same result. -PS
The array's child type must also support being cast to the desired child type(s);
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT, DAYTIME, YEARMONTH
+PS
The array's child type must also support being cast to the desired child type(s);
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT
@@ -24035,7 +24755,7 @@ and the accelerator produces the same result. -PS
the map's key and value must also support being cast to the desired child types;
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT, DAYTIME, YEARMONTH
+PS
the map's key and value must also support being cast to the desired child types;
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT
@@ -24059,7 +24779,7 @@ and the accelerator produces the same result. -PS
the struct's children must also support being cast to the desired child type(s);
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT, DAYTIME, YEARMONTH
+PS
the struct's children must also support being cast to the desired child type(s);
UTC is only supported TZ for child TIMESTAMP;
unsupported child types CALENDAR, UDT
@@ -24090,15 +24810,15 @@ and the accelerator produces the same result. DAYTIME +S +S +S +S - - - - -NS +S @@ -24107,16 +24827,16 @@ and the accelerator produces the same result. -NS +S YEARMONTH - - - - +S +S +S +S @@ -24422,8 +25142,8 @@ dates or timestamps, or for a lack of type coercion support. PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types UDT
PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types UDT
NS - - +S +S Write @@ -24445,8 +25165,8 @@ dates or timestamps, or for a lack of type coercion support. PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types UDT
PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types UDT
NS - - +S +S HiveText @@ -24516,8 +25236,8 @@ dates or timestamps, or for a lack of type coercion support. PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types UDT
PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types UDT
NS - - +S +S Write @@ -24539,8 +25259,8 @@ dates or timestamps, or for a lack of type coercion support. NS NS NS - - +S +S JSON @@ -24657,8 +25377,8 @@ dates or timestamps, or for a lack of type coercion support. PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types UDT
PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types UDT
NS - - +S +S Write @@ -24680,8 +25400,8 @@ dates or timestamps, or for a lack of type coercion support. PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types UDT
PS
UTC is only supported TZ for child TIMESTAMP;
unsupported child types UDT
NS - - +S +S diff --git a/tools/generated_files/operatorsScore.csv b/tools/generated_files/operatorsScore.csv index 3901f4481b1..b082fb264ee 100644 --- a/tools/generated_files/operatorsScore.csv +++ b/tools/generated_files/operatorsScore.csv @@ -38,6 +38,7 @@ ArrowEvalPythonExec,1.2 FlatMapCoGroupsInPandasExec,3.0 FlatMapGroupsInPandasExec,1.2 MapInPandasExec,1.2 +PythonMapInArrowExec,3.0 WindowInPandasExec,1.2 WindowExec,3.0 HiveTableScanExec,3.0 @@ -84,6 +85,8 @@ BitwiseCount,4 BitwiseNot,4 BitwiseOr,4 BitwiseXor,4 +BloomFilterAggregate,4 +BloomFilterMightContain,4 BoundReference,4 CaseWhen,4 Cbrt,4 @@ -114,6 +117,8 @@ DayOfWeek,4 DayOfYear,4 DenseRank,4 Divide,4 +DivideDTInterval,4 +DivideYMInterval,4 DynamicPruningExpression,4 ElementAt,4 EndsWith,4 @@ -146,6 +151,7 @@ Hypot,4 If,4 In,4 InSet,4 +InSubqueryExec,4 InitCap,4 InputFileBlockLength,4 InputFileBlockStart,4 @@ -196,6 +202,8 @@ MonotonicallyIncreasingID,4 Month,4 MonthsBetween,4 Multiply,4 +MultiplyDTInterval,4 +MultiplyYMInterval,4 Murmur3Hash,4 NaNvl,4 NamedLambdaVariable,4 @@ -227,6 +235,8 @@ ReplicateRows,4 Reverse,4 Rint,4 Round,4 +RoundCeil,4 +RoundFloor,4 RowNumber,4 ScalaUDF,4 ScalarSubquery,4 diff --git a/tools/generated_files/supportedDataSource.csv b/tools/generated_files/supportedDataSource.csv index 68a56a20ae7..577555d6de0 100644 --- a/tools/generated_files/supportedDataSource.csv +++ b/tools/generated_files/supportedDataSource.csv @@ -1,14 +1,14 @@ Format,Direction,BOOLEAN,BYTE,SHORT,INT,LONG,FLOAT,DOUBLE,DATE,TIMESTAMP,STRING,DECIMAL,NULL,BINARY,CALENDAR,ARRAY,MAP,STRUCT,UDT,DAYTIME,YEARMONTH Avro,read,CO,CO,CO,CO,CO,CO,CO,CO,CO,CO,CO,CO,CO,CO,CO,CO,CO,CO,CO,CO CSV,read,S,S,S,S,S,S,S,S,PS,S,S,NA,NS,NA,NA,NA,NA,NA,NA,NA -Delta,read,S,S,S,S,S,S,S,S,PS,S,S,NA,S,NA,PS,PS,PS,NS,NA,NA -Delta,write,S,S,S,S,S,S,S,S,PS,S,S,NA,S,NA,PS,PS,PS,NS,NA,NA +Delta,read,S,S,S,S,S,S,S,S,PS,S,S,NA,S,NA,PS,PS,PS,NS,S,S +Delta,write,S,S,S,S,S,S,S,S,PS,S,S,NA,S,NA,PS,PS,PS,NS,S,S HiveText,read,S,S,S,S,S,S,S,S,PS,S,S,NS,NS,NS,NS,NS,NS,NS,NS,NS HiveText,write,S,S,S,S,S,S,S,S,PS,S,S,NS,NS,NS,NS,NS,NS,NS,NS,NS -Iceberg,read,S,S,S,S,S,S,S,S,PS,S,S,NA,S,NA,PS,PS,PS,NS,NA,NA -Iceberg,write,S,S,S,S,S,S,S,S,PS,S,S,NA,S,NA,NS,NS,NS,NS,NA,NA +Iceberg,read,S,S,S,S,S,S,S,S,PS,S,S,NA,S,NA,PS,PS,PS,NS,S,S +Iceberg,write,S,S,S,S,S,S,S,S,PS,S,S,NA,S,NA,NS,NS,NS,NS,S,S JSON,read,S,S,S,S,S,S,S,PS,PS,S,S,NA,NS,NA,PS,NS,PS,NS,NA,NA ORC,read,S,S,S,S,S,S,S,S,PS,S,S,NA,NS,NA,PS,PS,PS,NS,NA,NA ORC,write,S,S,S,S,S,S,S,S,PS,S,S,NA,NS,NA,PS,PS,PS,NS,NA,NA -Parquet,read,S,S,S,S,S,S,S,S,PS,S,S,NA,S,NA,PS,PS,PS,NS,NA,NA -Parquet,write,S,S,S,S,S,S,S,S,PS,S,S,NA,S,NA,PS,PS,PS,NS,NA,NA +Parquet,read,S,S,S,S,S,S,S,S,PS,S,S,NA,S,NA,PS,PS,PS,NS,S,S +Parquet,write,S,S,S,S,S,S,S,S,PS,S,S,NA,S,NA,PS,PS,PS,NS,S,S diff --git a/tools/generated_files/supportedExecs.csv b/tools/generated_files/supportedExecs.csv index 317fb6c5ca3..8afd7c5d795 100644 --- a/tools/generated_files/supportedExecs.csv +++ b/tools/generated_files/supportedExecs.csv @@ -1,15 +1,15 @@ Exec,Supported,Notes,Params,BOOLEAN,BYTE,SHORT,INT,LONG,FLOAT,DOUBLE,DATE,TIMESTAMP,STRING,DECIMAL,NULL,BINARY,CALENDAR,ARRAY,MAP,STRUCT,UDT,DAYTIME,YEARMONTH -CoalesceExec,S,None,Input/Output,S,S,S,S,S,S,S,S,PS,S,S,S,S,NS,PS,PS,PS,NS,NS,NS +CoalesceExec,S,None,Input/Output,S,S,S,S,S,S,S,S,PS,S,S,S,S,NS,PS,PS,PS,NS,S,S CollectLimitExec,NS,This is disabled by default because Collect Limit replacement can be slower on the GPU; if huge number of rows in a batch it could help by limiting the number of rows transferred from GPU to CPU,Input/Output,S,S,S,S,S,S,S,S,PS,S,S,S,NS,NS,PS,PS,PS,NS,NS,NS ExpandExec,S,None,Input/Output,S,S,S,S,S,S,S,S,PS,S,S,S,NS,NS,PS,PS,PS,NS,NS,NS -FileSourceScanExec,S,None,Input/Output,S,S,S,S,S,S,S,S,PS,S,S,S,S,NS,PS,PS,PS,NS,NS,NS -FilterExec,S,None,Input/Output,S,S,S,S,S,S,S,S,PS,S,S,S,S,NS,PS,PS,PS,NS,NS,NS +FileSourceScanExec,S,None,Input/Output,S,S,S,S,S,S,S,S,PS,S,S,S,S,NS,PS,PS,PS,NS,S,S +FilterExec,S,None,Input/Output,S,S,S,S,S,S,S,S,PS,S,S,S,S,NS,PS,PS,PS,NS,S,S GenerateExec,S,None,Input/Output,S,S,S,S,S,S,S,S,PS,S,S,S,S,NS,PS,PS,PS,NS,NS,NS GlobalLimitExec,S,None,Input/Output,S,S,S,S,S,S,S,S,PS,S,S,S,NS,NS,PS,PS,PS,NS,NS,NS LocalLimitExec,S,None,Input/Output,S,S,S,S,S,S,S,S,PS,S,S,S,NS,NS,PS,PS,PS,NS,NS,NS -ProjectExec,S,None,Input/Output,S,S,S,S,S,S,S,S,PS,S,S,S,S,NS,PS,PS,PS,NS,NS,NS +ProjectExec,S,None,Input/Output,S,S,S,S,S,S,S,S,PS,S,S,S,S,NS,PS,PS,PS,NS,S,S RangeExec,S,None,Input/Output,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA -SampleExec,S,None,Input/Output,S,S,S,S,S,S,S,S,PS,S,S,S,NS,NS,PS,PS,PS,NS,NS,NS +SampleExec,S,None,Input/Output,S,S,S,S,S,S,S,S,PS,S,S,S,NS,NS,PS,PS,PS,NS,S,S SortExec,S,None,Input/Output,S,S,S,S,S,S,S,S,PS,S,S,S,S,NS,PS,PS,PS,NS,NS,NS SubqueryBroadcastExec,S,None,Input/Output,S,S,S,S,S,S,S,S,PS,S,S,S,S,S,PS,PS,PS,S,S,S TakeOrderedAndProjectExec,S,None,Input/Output,S,S,S,S,S,S,S,S,PS,S,S,S,NS,NS,PS,PS,PS,NS,NS,NS @@ -18,16 +18,16 @@ AQEShuffleReadExec,S,None,Input/Output,S,S,S,S,S,S,S,S,PS,S,S,S,S,NS,PS,PS,PS,NS HashAggregateExec,S,None,Input/Output,S,S,S,S,S,S,S,S,PS,S,S,S,S,NS,PS,PS,PS,NS,NS,NS ObjectHashAggregateExec,S,None,Input/Output,S,S,S,S,S,S,S,S,PS,S,S,S,PS,NS,PS,PS,PS,NS,NS,NS SortAggregateExec,S,None,Input/Output,S,S,S,S,S,S,S,S,PS,S,S,S,S,NS,PS,PS,PS,NS,NS,NS -InMemoryTableScanExec,S,None,Input/Output,S,S,S,S,S,S,S,S,PS,S,S,NS,NS,NS,PS,PS,PS,NS,NS,NS -DataWritingCommandExec,S,None,Input/Output,S,S,S,S,S,S,S,S,PS,S,PS,NS,S,NS,PS,PS,PS,NS,NS,NS +InMemoryTableScanExec,S,None,Input/Output,S,S,S,S,S,S,S,S,PS,S,S,NS,NS,NS,PS,PS,PS,NS,S,S +DataWritingCommandExec,S,None,Input/Output,S,S,S,S,S,S,S,S,PS,S,PS,NS,S,NS,PS,PS,PS,NS,S,S ExecutedCommandExec,S,None,Input/Output,S,S,S,S,S,S,S,S,PS,S,S,S,S,S,PS,PS,PS,S,S,S -AppendDataExecV1,S,None,Input/Output,S,S,S,S,S,S,S,S,PS,S,S,NS,S,NS,PS,PS,PS,NS,NS,NS -AtomicCreateTableAsSelectExec,S,None,Input/Output,S,S,S,S,S,S,S,S,PS,S,S,NS,S,NS,PS,PS,PS,NS,NS,NS -AtomicReplaceTableAsSelectExec,S,None,Input/Output,S,S,S,S,S,S,S,S,PS,S,S,NS,S,NS,PS,PS,PS,NS,NS,NS -BatchScanExec,S,None,Input/Output,S,S,S,S,S,S,S,S,PS,S,S,NS,S,NS,PS,PS,PS,NS,NS,NS -OverwriteByExpressionExecV1,S,None,Input/Output,S,S,S,S,S,S,S,S,PS,S,S,NS,S,NS,PS,PS,PS,NS,NS,NS +AppendDataExecV1,S,None,Input/Output,S,S,S,S,S,S,S,S,PS,S,S,NS,S,NS,PS,PS,PS,NS,S,S +AtomicCreateTableAsSelectExec,S,None,Input/Output,S,S,S,S,S,S,S,S,PS,S,S,NS,S,NS,PS,PS,PS,NS,S,S +AtomicReplaceTableAsSelectExec,S,None,Input/Output,S,S,S,S,S,S,S,S,PS,S,S,NS,S,NS,PS,PS,PS,NS,S,S +BatchScanExec,S,None,Input/Output,S,S,S,S,S,S,S,S,PS,S,S,NS,S,NS,PS,PS,PS,NS,S,S +OverwriteByExpressionExecV1,S,None,Input/Output,S,S,S,S,S,S,S,S,PS,S,S,NS,S,NS,PS,PS,PS,NS,S,S BroadcastExchangeExec,S,None,Input/Output,S,S,S,S,S,S,S,S,PS,S,S,S,S,NS,PS,PS,PS,NS,NS,NS -ShuffleExchangeExec,S,None,Input/Output,S,S,S,S,S,S,S,S,PS,S,S,S,S,NS,PS,PS,PS,NS,NS,NS +ShuffleExchangeExec,S,None,Input/Output,S,S,S,S,S,S,S,S,PS,S,S,S,S,NS,PS,PS,PS,NS,S,S BroadcastHashJoinExec,S,None,leftKeys,S,S,S,S,S,S,S,S,PS,S,S,S,NS,NS,NS,NA,PS,NS,NS,NS BroadcastHashJoinExec,S,None,rightKeys,S,S,S,S,S,S,S,S,PS,S,S,S,NS,NS,NS,NA,PS,NS,NS,NS BroadcastHashJoinExec,S,None,condition,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA @@ -48,6 +48,7 @@ ArrowEvalPythonExec,S,None,Input/Output,S,S,S,S,S,S,S,S,PS,S,NS,NS,NS,NS,PS,NS,P FlatMapCoGroupsInPandasExec,NS,This is disabled by default because Performance is not ideal with many small groups,Input/Output,S,S,S,S,S,S,S,S,PS,S,NS,NS,NS,NS,NS,NS,NS,NS,NS,NS FlatMapGroupsInPandasExec,S,None,Input/Output,S,S,S,S,S,S,S,S,PS,S,NS,NS,NS,NS,NS,NS,NS,NS,NS,NS MapInPandasExec,S,None,Input/Output,S,S,S,S,S,S,S,S,PS,S,NS,NS,NS,NS,PS,NS,PS,NS,NS,NS +PythonMapInArrowExec,S,None,Input/Output,S,S,S,S,S,S,S,S,PS,S,NS,NS,NS,NS,PS,NS,PS,NS,NS,NS WindowInPandasExec,NS,This is disabled by default because it only supports row based frame for now,Input/Output,S,S,S,S,S,S,S,S,PS,S,NS,NS,NS,NS,PS,NS,NS,NS,NS,NS WindowExec,S,None,partitionSpec,S,S,S,S,S,S,S,S,PS,S,S,S,NS,NS,NS,NS,PS,NS,NS,NS WindowExec,S,None,Input/Output,S,S,S,S,S,S,S,S,PS,S,S,S,S,NS,PS,PS,PS,NS,NS,NS diff --git a/tools/generated_files/supportedExprs.csv b/tools/generated_files/supportedExprs.csv index b2de26d9158..393ee152dd4 100644 --- a/tools/generated_files/supportedExprs.csv +++ b/tools/generated_files/supportedExprs.csv @@ -1,8 +1,8 @@ Expression,Supported,SQL Func,Notes,Context,Params,BOOLEAN,BYTE,SHORT,INT,LONG,FLOAT,DOUBLE,DATE,TIMESTAMP,STRING,DECIMAL,NULL,BINARY,CALENDAR,ARRAY,MAP,STRUCT,UDT,DAYTIME,YEARMONTH -Abs,S,`abs`,None,project,input,NA,S,S,S,S,S,S,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA -Abs,S,`abs`,None,project,result,NA,S,S,S,S,S,S,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA -Abs,S,`abs`,None,AST,input,NA,NS,NS,S,S,S,S,NA,NA,NA,NS,NA,NA,NA,NA,NA,NA,NA,NA,NA -Abs,S,`abs`,None,AST,result,NA,NS,NS,S,S,S,S,NA,NA,NA,NS,NA,NA,NA,NA,NA,NA,NA,NA,NA +Abs,S,`abs`,None,project,input,NA,S,S,S,S,S,S,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,S,S +Abs,S,`abs`,None,project,result,NA,S,S,S,S,S,S,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,S,S +Abs,S,`abs`,None,AST,input,NA,NS,NS,S,S,S,S,NA,NA,NA,NS,NA,NA,NA,NA,NA,NA,NA,NS,NS +Abs,S,`abs`,None,AST,result,NA,NS,NS,S,S,S,S,NA,NA,NA,NS,NA,NA,NA,NA,NA,NA,NA,NS,NS Acos,S,`acos`,None,project,input,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA Acos,S,`acos`,None,project,result,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA Acos,S,`acos`,None,AST,input,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA @@ -11,16 +11,16 @@ Acosh,S,`acosh`,None,project,input,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,N Acosh,S,`acosh`,None,project,result,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA Acosh,S,`acosh`,None,AST,input,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA Acosh,S,`acosh`,None,AST,result,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA -Add,S,`+`,None,project,lhs,NA,S,S,S,S,S,S,NA,NA,NA,S,NA,NA,NS,NA,NA,NA,NA,NS,NS -Add,S,`+`,None,project,rhs,NA,S,S,S,S,S,S,NA,NA,NA,S,NA,NA,NS,NA,NA,NA,NA,NS,NS -Add,S,`+`,None,project,result,NA,S,S,S,S,S,S,NA,NA,NA,S,NA,NA,NS,NA,NA,NA,NA,NS,NS +Add,S,`+`,None,project,lhs,NA,S,S,S,S,S,S,NA,NA,NA,S,NA,NA,NS,NA,NA,NA,NA,S,S +Add,S,`+`,None,project,rhs,NA,S,S,S,S,S,S,NA,NA,NA,S,NA,NA,NS,NA,NA,NA,NA,S,S +Add,S,`+`,None,project,result,NA,S,S,S,S,S,S,NA,NA,NA,S,NA,NA,NS,NA,NA,NA,NA,S,S Add,S,`+`,None,AST,lhs,NA,NS,NS,S,S,S,S,NA,NA,NA,NS,NA,NA,NS,NA,NA,NA,NA,NS,NS Add,S,`+`,None,AST,rhs,NA,NS,NS,S,S,S,S,NA,NA,NA,NS,NA,NA,NS,NA,NA,NA,NA,NS,NS Add,S,`+`,None,AST,result,NA,NS,NS,S,S,S,S,NA,NA,NA,NS,NA,NA,NS,NA,NA,NA,NA,NS,NS -Alias,S, ,None,project,input,S,S,S,S,S,S,S,S,PS,S,S,S,S,NS,PS,PS,PS,NS,NS,NS -Alias,S, ,None,project,result,S,S,S,S,S,S,S,S,PS,S,S,S,S,NS,PS,PS,PS,NS,NS,NS -Alias,S, ,None,AST,input,S,S,S,S,S,S,S,S,PS,S,NS,NS,NS,NS,NS,NS,NS,NS,NS,NS -Alias,S, ,None,AST,result,S,S,S,S,S,S,S,S,PS,S,NS,NS,NS,NS,NS,NS,NS,NS,NS,NS +Alias,S, ,None,project,input,S,S,S,S,S,S,S,S,PS,S,S,S,S,NS,PS,PS,PS,NS,S,S +Alias,S, ,None,project,result,S,S,S,S,S,S,S,S,PS,S,S,S,S,NS,PS,PS,PS,NS,S,S +Alias,S, ,None,AST,input,S,S,S,S,S,S,S,S,PS,S,NS,NS,NS,NS,NS,NS,NS,NS,S,S +Alias,S, ,None,AST,result,S,S,S,S,S,S,S,S,PS,S,NS,NS,NS,NS,NS,NS,NS,NS,S,S And,S,`and`,None,project,lhs,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA And,S,`and`,None,project,rhs,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA And,S,`and`,None,project,result,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA @@ -92,8 +92,8 @@ Atanh,S,`atanh`,None,project,input,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,N Atanh,S,`atanh`,None,project,result,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA Atanh,S,`atanh`,None,AST,input,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA Atanh,S,`atanh`,None,AST,result,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA -AttributeReference,S, ,None,project,result,S,S,S,S,S,S,S,S,PS,S,S,S,S,NS,PS,PS,PS,NS,NS,NS -AttributeReference,S, ,None,AST,result,S,S,S,S,S,S,S,S,PS,S,NS,NS,NS,NS,NS,NS,NS,NS,NS,NS +AttributeReference,S, ,None,project,result,S,S,S,S,S,S,S,S,PS,S,S,S,S,NS,PS,PS,PS,NS,S,S +AttributeReference,S, ,None,AST,result,S,S,S,S,S,S,S,S,PS,S,NS,NS,NS,NS,NS,NS,NS,NS,S,S BRound,S,`bround`,None,project,value,NA,S,S,S,S,PS,PS,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA BRound,S,`bround`,None,project,scale,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA BRound,S,`bround`,None,project,result,NA,S,S,S,S,S,S,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA @@ -125,8 +125,11 @@ BitwiseXor,S,`^`,None,project,result,NA,S,S,S,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA BitwiseXor,S,`^`,None,AST,lhs,NA,NS,NS,S,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA BitwiseXor,S,`^`,None,AST,rhs,NA,NS,NS,S,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA BitwiseXor,S,`^`,None,AST,result,NA,NS,NS,S,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA -BoundReference,S, ,None,project,result,S,S,S,S,S,S,S,S,PS,S,S,S,S,NS,PS,PS,PS,NS,NS,NS -BoundReference,S, ,None,AST,result,S,S,S,S,S,S,S,S,PS,S,NS,NS,NS,NS,NS,NS,NS,NS,NS,NS +BloomFilterMightContain,S, ,None,project,lhs,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,S,S,NA,NA,NA,NA,NA,NA,NA +BloomFilterMightContain,S, ,None,project,rhs,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA +BloomFilterMightContain,S, ,None,project,result,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +BoundReference,S, ,None,project,result,S,S,S,S,S,S,S,S,PS,S,S,S,S,NS,PS,PS,PS,NS,S,S +BoundReference,S, ,None,AST,result,S,S,S,S,S,S,S,S,PS,S,NS,NS,NS,NS,NS,NS,NS,NS,S,S CaseWhen,S,`when`,None,project,predicate,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA CaseWhen,S,`when`,None,project,value,S,S,S,S,S,S,S,S,PS,S,S,S,S,NS,PS,PS,PS,NS,NS,NS CaseWhen,S,`when`,None,project,result,S,S,S,S,S,S,S,S,PS,S,S,S,S,NS,PS,PS,PS,NS,NS,NS @@ -134,12 +137,12 @@ Cbrt,S,`cbrt`,None,project,input,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA, Cbrt,S,`cbrt`,None,project,result,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA Cbrt,S,`cbrt`,None,AST,input,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA Cbrt,S,`cbrt`,None,AST,result,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA -Ceil,S,`ceil`; `ceiling`,None,project,input,NA,NA,NA,NA,S,NA,S,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA -Ceil,S,`ceil`; `ceiling`,None,project,result,NA,NA,NA,NA,S,NA,S,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA +Ceil,S, ,None,project,input,NA,NA,NA,NA,S,NA,S,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA +Ceil,S, ,None,project,result,NA,NA,NA,NA,S,NA,S,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA CheckOverflow,S, ,None,project,input,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA CheckOverflow,S, ,None,project,result,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA -Coalesce,S,`coalesce`,None,project,param,S,S,S,S,S,S,S,S,PS,S,S,S,S,NS,PS,PS,PS,NS,NS,NS -Coalesce,S,`coalesce`,None,project,result,S,S,S,S,S,S,S,S,PS,S,S,S,S,NS,PS,PS,PS,NS,NS,NS +Coalesce,S,`coalesce`,None,project,param,S,S,S,S,S,S,S,S,PS,S,S,S,S,NS,PS,PS,PS,NS,S,S +Coalesce,S,`coalesce`,None,project,result,S,S,S,S,S,S,S,S,PS,S,S,S,S,NS,PS,PS,PS,NS,S,S Concat,S,`concat`,None,project,input,NA,NA,NA,NA,NA,NA,NA,NA,NA,S,NA,NA,NS,NA,PS,NA,NA,NA,NA,NA Concat,S,`concat`,None,project,result,NA,NA,NA,NA,NA,NA,NA,NA,NA,S,NA,NA,NS,NA,PS,NA,NA,NA,NA,NA ConcatWs,S,`concat_ws`,None,project,input,NA,NA,NA,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,S,NA,NA,NA,NA,NA @@ -197,6 +200,12 @@ DenseRank,S,`dense_rank`,None,window,result,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,N Divide,S,`/`,None,project,lhs,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA Divide,S,`/`,None,project,rhs,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA Divide,S,`/`,None,project,result,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA +DivideDTInterval,S, ,None,project,lhs,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,S,NA +DivideDTInterval,S, ,None,project,rhs,NA,S,S,S,S,S,S,NA,NA,NA,NS,NA,NA,NA,NA,NA,NA,NA,NA,NA +DivideDTInterval,S, ,None,project,result,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,S,NA +DivideYMInterval,S, ,None,project,lhs,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,S +DivideYMInterval,S, ,None,project,rhs,NA,S,S,S,S,S,S,NA,NA,NA,NS,NA,NA,NA,NA,NA,NA,NA,NA,NA +DivideYMInterval,S, ,None,project,result,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,S DynamicPruningExpression,S, ,None,project,input,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA DynamicPruningExpression,S, ,None,project,result,S,S,S,S,S,S,S,S,PS,S,S,S,S,S,PS,PS,PS,S,S,S ElementAt,S,`element_at`,None,project,array/map,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,PS,PS,NA,NA,NA,NA @@ -226,8 +235,8 @@ Expm1,S,`expm1`,None,AST,input,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA Expm1,S,`expm1`,None,AST,result,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA Flatten,S,`flatten`,None,project,input,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,PS,NA,NA,NA,NA,NA Flatten,S,`flatten`,None,project,result,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,PS,NA,NA,NA,NA,NA -Floor,S,`floor`,None,project,input,NA,NA,NA,NA,S,NA,S,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA -Floor,S,`floor`,None,project,result,NA,NA,NA,NA,S,NA,S,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA +Floor,S, ,None,project,input,NA,NA,NA,NA,S,NA,S,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA +Floor,S, ,None,project,result,NA,NA,NA,NA,S,NA,S,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA FormatNumber,S,`format_number`,None,project,x,NA,S,S,S,S,S,S,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA FormatNumber,S,`format_number`,None,project,d,NA,NA,NA,PS,NA,NA,NA,NA,NA,NS,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA FormatNumber,S,`format_number`,None,project,result,NA,NA,NA,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA @@ -275,9 +284,9 @@ Hypot,S,`hypot`,None,project,lhs,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA, Hypot,S,`hypot`,None,project,rhs,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA Hypot,S,`hypot`,None,project,result,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA If,S,`if`,None,project,predicate,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA -If,S,`if`,None,project,trueValue,S,S,S,S,S,S,S,S,PS,S,S,S,S,NS,PS,PS,PS,NS,NS,NS -If,S,`if`,None,project,falseValue,S,S,S,S,S,S,S,S,PS,S,S,S,S,NS,PS,PS,PS,NS,NS,NS -If,S,`if`,None,project,result,S,S,S,S,S,S,S,S,PS,S,S,S,S,NS,PS,PS,PS,NS,NS,NS +If,S,`if`,None,project,trueValue,S,S,S,S,S,S,S,S,PS,S,S,S,S,NS,PS,PS,PS,NS,S,S +If,S,`if`,None,project,falseValue,S,S,S,S,S,S,S,S,PS,S,S,S,S,NS,PS,PS,PS,NS,S,S +If,S,`if`,None,project,result,S,S,S,S,S,S,S,S,PS,S,S,S,S,NS,PS,PS,PS,NS,S,S In,S,`in`,None,project,value,S,S,S,S,S,S,S,S,PS,S,S,S,NS,NS,NS,NA,NS,NS,NA,NA In,S,`in`,None,project,list,PS,PS,PS,PS,PS,PS,PS,PS,PS,PS,PS,NS,NS,NS,NS,NA,NS,NS,NA,NA In,S,`in`,None,project,result,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA @@ -293,11 +302,11 @@ IntegralDivide,S,`div`,None,project,rhs,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,S,NA,NA,NA, IntegralDivide,S,`div`,None,project,result,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA IsNaN,S,`isnan`,None,project,input,NA,NA,NA,NA,NA,S,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA IsNaN,S,`isnan`,None,project,result,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA -IsNotNull,S,`isnotnull`,None,project,input,S,S,S,S,S,S,S,S,PS,S,S,S,S,NS,PS,PS,PS,NS,NS,NS +IsNotNull,S,`isnotnull`,None,project,input,S,S,S,S,S,S,S,S,PS,S,S,S,S,NS,PS,PS,PS,NS,S,NS IsNotNull,S,`isnotnull`,None,project,result,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA IsNotNull,S,`isnotnull`,None,AST,input,S,S,S,S,S,S,S,S,PS,S,NS,NS,NS,NS,NS,NS,NS,NS,NS,NS IsNotNull,S,`isnotnull`,None,AST,result,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA -IsNull,S,`isnull`,None,project,input,S,S,S,S,S,S,S,S,PS,S,S,S,S,NS,PS,PS,PS,NS,NS,NS +IsNull,S,`isnull`,None,project,input,S,S,S,S,S,S,S,S,PS,S,S,S,S,NS,PS,PS,PS,NS,S,NS IsNull,S,`isnull`,None,project,result,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA IsNull,S,`isnull`,None,AST,input,S,S,S,S,S,S,S,S,PS,S,NS,NS,NS,NS,NS,NS,NS,NS,NS,NS IsNull,S,`isnull`,None,AST,result,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA @@ -400,6 +409,12 @@ Multiply,S,`*`,None,project,result,NA,S,S,S,S,S,S,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,N Multiply,S,`*`,None,AST,lhs,NA,NS,NS,S,S,S,S,NA,NA,NA,NS,NA,NA,NA,NA,NA,NA,NA,NA,NA Multiply,S,`*`,None,AST,rhs,NA,NS,NS,S,S,S,S,NA,NA,NA,NS,NA,NA,NA,NA,NA,NA,NA,NA,NA Multiply,S,`*`,None,AST,result,NA,NS,NS,S,S,S,S,NA,NA,NA,NS,NA,NA,NA,NA,NA,NA,NA,NA,NA +MultiplyDTInterval,S, ,None,project,lhs,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,S,NA +MultiplyDTInterval,S, ,None,project,rhs,NA,S,S,S,S,S,S,NA,NA,NA,NS,NA,NA,NA,NA,NA,NA,NA,NA,NA +MultiplyDTInterval,S, ,None,project,result,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,S,NA +MultiplyYMInterval,S, ,None,project,lhs,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,S +MultiplyYMInterval,S, ,None,project,rhs,NA,S,S,S,S,S,S,NA,NA,NA,NS,NA,NA,NA,NA,NA,NA,NA,NA,NA +MultiplyYMInterval,S, ,None,project,result,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,S Murmur3Hash,S,`hash`,None,project,input,S,S,S,S,S,S,S,S,PS,S,S,S,NS,NS,PS,NS,PS,NS,NS,NS Murmur3Hash,S,`hash`,None,project,result,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA NaNvl,S,`nanvl`,None,project,lhs,NA,NA,NA,NA,NA,S,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA @@ -488,6 +503,12 @@ Rint,S,`rint`,None,AST,result,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA, Round,S,`round`,None,project,value,NA,S,S,S,S,PS,PS,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA Round,S,`round`,None,project,scale,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA Round,S,`round`,None,project,result,NA,S,S,S,S,S,S,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA +RoundCeil,S, ,None,project,value,NA,S,S,S,S,PS,PS,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA +RoundCeil,S, ,None,project,scale,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +RoundCeil,S, ,None,project,result,NA,S,S,S,S,S,S,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA +RoundFloor,S, ,None,project,value,NA,S,S,S,S,PS,PS,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA +RoundFloor,S, ,None,project,scale,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +RoundFloor,S, ,None,project,result,NA,S,S,S,S,S,S,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA RowNumber,S,`row_number`,None,window,ordering,S,S,S,S,S,S,S,S,PS,S,S,S,NS,NS,NS,NS,NS,NS,NS,NS RowNumber,S,`row_number`,None,window,result,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA ScalaUDF,S, ,None,project,param,S,S,S,S,S,S,S,S,PS,S,S,S,S,S,PS,PS,PS,NS,NS,NS @@ -552,18 +573,18 @@ StartsWith,S, ,None,project,result,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,N StringInstr,S,`instr`,None,project,str,NA,NA,NA,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA StringInstr,S,`instr`,None,project,substr,NA,NA,NA,NA,NA,NA,NA,NA,NA,PS,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA StringInstr,S,`instr`,None,project,result,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA -StringLPad,S,`lpad`,None,project,str,NA,NA,NA,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA -StringLPad,S,`lpad`,None,project,len,NA,NA,NA,PS,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA -StringLPad,S,`lpad`,None,project,pad,NA,NA,NA,NA,NA,NA,NA,NA,NA,PS,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA -StringLPad,S,`lpad`,None,project,result,NA,NA,NA,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +StringLPad,S, ,None,project,str,NA,NA,NA,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +StringLPad,S, ,None,project,len,NA,NA,NA,PS,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +StringLPad,S, ,None,project,pad,NA,NA,NA,NA,NA,NA,NA,NA,NA,PS,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +StringLPad,S, ,None,project,result,NA,NA,NA,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA StringLocate,S,`locate`; `position`,None,project,substr,NA,NA,NA,NA,NA,NA,NA,NA,NA,PS,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA StringLocate,S,`locate`; `position`,None,project,str,NA,NA,NA,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA StringLocate,S,`locate`; `position`,None,project,start,NA,NA,NA,PS,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA StringLocate,S,`locate`; `position`,None,project,result,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA -StringRPad,S,`rpad`,None,project,str,NA,NA,NA,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA -StringRPad,S,`rpad`,None,project,len,NA,NA,NA,PS,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA -StringRPad,S,`rpad`,None,project,pad,NA,NA,NA,NA,NA,NA,NA,NA,NA,PS,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA -StringRPad,S,`rpad`,None,project,result,NA,NA,NA,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +StringRPad,S, ,None,project,str,NA,NA,NA,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +StringRPad,S, ,None,project,len,NA,NA,NA,PS,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +StringRPad,S, ,None,project,pad,NA,NA,NA,NA,NA,NA,NA,NA,NA,PS,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +StringRPad,S, ,None,project,result,NA,NA,NA,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA StringRepeat,S,`repeat`,None,project,input,NA,NA,NA,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA StringRepeat,S,`repeat`,None,project,repeatTimes,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA StringRepeat,S,`repeat`,None,project,result,NA,NA,NA,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA @@ -602,9 +623,9 @@ SubstringIndex,S,`substring_index`,None,project,str,NA,NA,NA,NA,NA,NA,NA,NA,NA,S SubstringIndex,S,`substring_index`,None,project,delim,NA,NA,NA,NA,NA,NA,NA,NA,NA,PS,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA SubstringIndex,S,`substring_index`,None,project,count,NA,NA,NA,PS,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA SubstringIndex,S,`substring_index`,None,project,result,NA,NA,NA,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA -Subtract,S,`-`,None,project,lhs,NA,S,S,S,S,S,S,NA,NA,NA,S,NA,NA,NS,NA,NA,NA,NA,NS,NS -Subtract,S,`-`,None,project,rhs,NA,S,S,S,S,S,S,NA,NA,NA,S,NA,NA,NS,NA,NA,NA,NA,NS,NS -Subtract,S,`-`,None,project,result,NA,S,S,S,S,S,S,NA,NA,NA,S,NA,NA,NS,NA,NA,NA,NA,NS,NS +Subtract,S,`-`,None,project,lhs,NA,S,S,S,S,S,S,NA,NA,NA,S,NA,NA,NS,NA,NA,NA,NA,S,S +Subtract,S,`-`,None,project,rhs,NA,S,S,S,S,S,S,NA,NA,NA,S,NA,NA,NS,NA,NA,NA,NA,S,S +Subtract,S,`-`,None,project,result,NA,S,S,S,S,S,S,NA,NA,NA,S,NA,NA,NS,NA,NA,NA,NA,S,S Subtract,S,`-`,None,AST,lhs,NA,NS,NS,S,S,S,S,NA,NA,NA,NS,NA,NA,NS,NA,NA,NA,NA,NS,NS Subtract,S,`-`,None,AST,rhs,NA,NS,NS,S,S,S,S,NA,NA,NA,NS,NA,NA,NS,NA,NA,NA,NA,NS,NS Subtract,S,`-`,None,AST,result,NA,NS,NS,S,S,S,S,NA,NA,NA,NS,NA,NA,NS,NA,NA,NA,NA,NS,NS @@ -617,7 +638,7 @@ Tanh,S,`tanh`,None,project,result,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA Tanh,S,`tanh`,None,AST,input,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA Tanh,S,`tanh`,None,AST,result,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA TimeAdd,S, ,None,project,start,NA,NA,NA,NA,NA,NA,NA,NA,PS,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA -TimeAdd,S, ,None,project,interval,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,PS,NA,NA,NA,NA,NA,NA +TimeAdd,S, ,None,project,interval,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,PS,NA,NA,NA,NA,S,NA TimeAdd,S, ,None,project,result,NA,NA,NA,NA,NA,NA,NA,NA,PS,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA ToDegrees,S,`degrees`,None,project,input,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA ToDegrees,S,`degrees`,None,project,result,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA @@ -641,14 +662,14 @@ TruncDate,S,`trunc`,None,project,result,NA,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA TruncTimestamp,S,`date_trunc`,None,project,format,NA,NA,NA,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA TruncTimestamp,S,`date_trunc`,None,project,date,NA,NA,NA,NA,NA,NA,NA,NA,PS,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA TruncTimestamp,S,`date_trunc`,None,project,result,NA,NA,NA,NA,NA,NA,NA,NA,PS,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA -UnaryMinus,S,`negative`,None,project,input,NA,S,S,S,S,S,S,NA,NA,NA,S,NA,NA,NS,NA,NA,NA,NA,NS,NS -UnaryMinus,S,`negative`,None,project,result,NA,S,S,S,S,S,S,NA,NA,NA,S,NA,NA,NS,NA,NA,NA,NA,NS,NS +UnaryMinus,S,`negative`,None,project,input,NA,S,S,S,S,S,S,NA,NA,NA,S,NA,NA,NS,NA,NA,NA,NA,S,S +UnaryMinus,S,`negative`,None,project,result,NA,S,S,S,S,S,S,NA,NA,NA,S,NA,NA,NS,NA,NA,NA,NA,S,S UnaryMinus,S,`negative`,None,AST,input,NA,NS,NS,S,S,S,S,NA,NA,NA,NS,NA,NA,NS,NA,NA,NA,NA,NS,NS UnaryMinus,S,`negative`,None,AST,result,NA,NS,NS,S,S,S,S,NA,NA,NA,NS,NA,NA,NS,NA,NA,NA,NA,NS,NS -UnaryPositive,S,`positive`,None,project,input,NA,S,S,S,S,S,S,NA,NA,NA,S,NA,NA,NS,NA,NA,NA,NA,NS,NS -UnaryPositive,S,`positive`,None,project,result,NA,S,S,S,S,S,S,NA,NA,NA,S,NA,NA,NS,NA,NA,NA,NA,NS,NS -UnaryPositive,S,`positive`,None,AST,input,NA,S,S,S,S,S,S,NA,NA,NA,NS,NA,NA,NS,NA,NA,NA,NA,NS,NS -UnaryPositive,S,`positive`,None,AST,result,NA,S,S,S,S,S,S,NA,NA,NA,NS,NA,NA,NS,NA,NA,NA,NA,NS,NS +UnaryPositive,S,`positive`,None,project,input,NA,S,S,S,S,S,S,NA,NA,NA,S,NA,NA,NS,NA,NA,NA,NA,S,S +UnaryPositive,S,`positive`,None,project,result,NA,S,S,S,S,S,S,NA,NA,NA,S,NA,NA,NS,NA,NA,NA,NA,S,S +UnaryPositive,S,`positive`,None,AST,input,NA,S,S,S,S,S,S,NA,NA,NA,NS,NA,NA,NS,NA,NA,NA,NA,S,S +UnaryPositive,S,`positive`,None,AST,result,NA,S,S,S,S,S,S,NA,NA,NA,NS,NA,NA,NS,NA,NA,NA,NA,S,S UnboundedFollowing$,S, ,None,project,result,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA UnboundedPreceding$,S, ,None,project,result,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA UnixTimestamp,S,`unix_timestamp`,None,project,timeExp,NA,NA,NA,NA,NA,NA,NA,S,PS,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA @@ -706,12 +727,16 @@ BitXorAgg,S,`bit_xor`,None,aggregation,input,NA,S,S,S,S,NA,NA,NA,NA,NA,NA,NA,NA, BitXorAgg,S,`bit_xor`,None,aggregation,result,NA,S,S,S,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA BitXorAgg,S,`bit_xor`,None,reduction,input,NA,S,S,S,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA BitXorAgg,S,`bit_xor`,None,reduction,result,NA,S,S,S,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA -CollectList,S,`collect_list`,None,aggregation,input,S,S,S,S,S,S,S,S,PS,S,S,S,S,NS,PS,PS,PS,NS,NS,NS -CollectList,S,`collect_list`,None,aggregation,result,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,PS,NA,NA,NA,NA,NA -CollectList,S,`collect_list`,None,reduction,input,S,S,S,S,S,S,S,S,PS,S,S,S,S,NS,PS,PS,PS,NS,NS,NS -CollectList,S,`collect_list`,None,reduction,result,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,PS,NA,NA,NA,NA,NA -CollectList,S,`collect_list`,None,window,input,S,S,S,S,S,S,S,S,PS,S,S,S,S,NS,PS,PS,PS,NS,NS,NS -CollectList,S,`collect_list`,None,window,result,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,PS,NA,NA,NA,NA,NA +BloomFilterAggregate,S, ,None,reduction,child,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +BloomFilterAggregate,S, ,None,reduction,estimatedItems,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +BloomFilterAggregate,S, ,None,reduction,numBits,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +BloomFilterAggregate,S, ,None,reduction,result,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,S,NA,NA,NA,NA,NA,NA,NA +CollectList,S,`array_agg`; `collect_list`,None,aggregation,input,S,S,S,S,S,S,S,S,PS,S,S,S,S,NS,PS,PS,PS,NS,NS,NS +CollectList,S,`array_agg`; `collect_list`,None,aggregation,result,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,PS,NA,NA,NA,NA,NA +CollectList,S,`array_agg`; `collect_list`,None,reduction,input,S,S,S,S,S,S,S,S,PS,S,S,S,S,NS,PS,PS,PS,NS,NS,NS +CollectList,S,`array_agg`; `collect_list`,None,reduction,result,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,PS,NA,NA,NA,NA,NA +CollectList,S,`array_agg`; `collect_list`,None,window,input,S,S,S,S,S,S,S,S,PS,S,S,S,S,NS,PS,PS,PS,NS,NS,NS +CollectList,S,`array_agg`; `collect_list`,None,window,result,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,PS,NA,NA,NA,NA,NA CollectSet,S,`collect_set`,None,aggregation,input,S,S,S,S,S,S,S,S,PS,S,S,S,NS,NS,PS,NS,PS,NS,NS,NS CollectSet,S,`collect_set`,None,aggregation,result,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,PS,NA,NA,NA,NA,NA CollectSet,S,`collect_set`,None,reduction,input,S,S,S,S,S,S,S,S,PS,S,S,S,NS,NS,PS,NS,PS,NS,NS,NS @@ -811,6 +836,8 @@ VarianceSamp,S,`var_samp`; `variance`,None,window,result,NA,NA,NA,NA,NA,NA,NS,NA StaticInvoke,S, ,The supported types are not deterministic since it's a dynamic expression,project,result,PS,PS,PS,PS,PS,PS,PS,PS,PS,PS,PS,PS,PS,PS,PS,PS,PS,PS,PS,PS NormalizeNaNAndZero,S, ,None,project,input,NA,NA,NA,NA,NA,S,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA NormalizeNaNAndZero,S, ,None,project,result,NA,NA,NA,NA,NA,S,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA +InSubqueryExec,S, ,None,project,input,S,S,S,S,S,S,S,S,PS,S,S,S,NS,NS,NS,NA,NS,NS,NA,NA +InSubqueryExec,S, ,None,project,result,S,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA,NA ScalarSubquery,S, ,None,project,result,S,S,S,S,S,S,S,S,PS,S,S,S,S,NS,PS,PS,PS,NS,NS,NS HiveGenericUDF,S, ,None,project,param,S,S,S,S,S,S,S,S,PS,S,S,S,S,S,PS,PS,PS,NS,NS,NS HiveGenericUDF,S, ,None,project,result,S,S,S,S,S,S,S,S,PS,S,S,S,S,S,PS,PS,PS,NS,NS,NS From 404a06337a07a99d065d8f6d7ae2a78937a35410 Mon Sep 17 00:00:00 2001 From: Chong Gao Date: Mon, 19 Jan 2026 18:13:33 +0800 Subject: [PATCH 45/59] Revert a inadvertent change Signed-off-by: Chong Gao --- .../nvidia/spark/rapids/shims/spark401/SparkShimsSuite.scala | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/sql-plugin/src/test/spark401/scala/com/nvidia/spark/rapids/shims/spark401/SparkShimsSuite.scala b/sql-plugin/src/test/spark401/scala/com/nvidia/spark/rapids/shims/spark401/SparkShimsSuite.scala index cd2130ef66e..7e0d19e8ce8 100644 --- a/sql-plugin/src/test/spark401/scala/com/nvidia/spark/rapids/shims/spark401/SparkShimsSuite.scala +++ b/sql-plugin/src/test/spark401/scala/com/nvidia/spark/rapids/shims/spark401/SparkShimsSuite.scala @@ -1,5 +1,5 @@ /* - * Copyright (c) 2025-2026, NVIDIA CORPORATION. + * Copyright (c) 2025, NVIDIA CORPORATION. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. From 5e4bfff537cf5e14625c5327229a7d1c4df9d097 Mon Sep 17 00:00:00 2001 From: Gera Shegalov Date: Sat, 24 Jan 2026 01:43:30 -0800 Subject: [PATCH 46/59] Java8 Target Signed-off-by: Gera Shegalov --- jdk-profiles/pom.xml | 33 +++++++++++++++++-- pom.xml | 20 +---------- scala2.13/jdk-profiles/pom.xml | 33 +++++++++++++++++-- scala2.13/pom.xml | 20 +---------- .../spark/rapids/AvroDataFileReader.scala | 2 +- .../RapidsShuffleHeartbeatManager.scala | 15 ++++++--- .../rapids/python/PythonWorkerSemaphore.scala | 13 +++++--- .../GpuAtomicCreateTableAsSelectExec.scala | 6 ++-- .../spark/rapids/shims/SparkShims.scala | 4 +-- .../GpuAtomicReplaceTableAsSelectExec.scala | 13 +++++--- 10 files changed, 97 insertions(+), 62 deletions(-) diff --git a/jdk-profiles/pom.xml b/jdk-profiles/pom.xml index 3d743f857bb..157a02acdb6 100644 --- a/jdk-profiles/pom.xml +++ b/jdk-profiles/pom.xml @@ -42,7 +42,6 @@ net.alchim31.maven scala-maven-plugin - ${scala.plugin.version} ${java.major.version} @@ -54,8 +53,11 @@ jdk9plus - [9,) + + buildver + !411 + @@ -63,7 +65,6 @@ net.alchim31.maven scala-maven-plugin - ${scala.plugin.version} ${java.major.version} @@ -72,5 +73,31 @@ + + release411 + + + buildver + 411 + + + + + -Xlint:all,-serial,-path,-try,-processing|-target|${java.major.version} + + + + + + net.alchim31.maven + scala-maven-plugin + + + + + \ No newline at end of file diff --git a/pom.xml b/pom.xml index bc957949359..2aaf24181a2 100644 --- a/pom.xml +++ b/pom.xml @@ -780,7 +780,6 @@ 411 ${spark411.version} - 17 2.13.17 ${spark411.version} 1.13.1 @@ -789,22 +788,6 @@ [17,) Support for Spark ${spark.version} is only available with Java 17+ - - - - net.alchim31.maven - scala-maven-plugin - - - -release:17 - -feature - -unchecked - -deprecation - - - - - delta-lake/delta-stub @@ -1009,7 +992,7 @@ 4.1.1 3.12.4 - 4.9.2 + 4.9.8 3.1.1 3.3.0 2.0.2 @@ -1513,7 +1496,6 @@ This will force full Scala code rebuild in downstream modules. -Ywarn-unused:locals,patvars,privates -Wconf:cat=deprecation:e,any:e -Wconf:cat=scaladoc:wv - -Wconf:cat=lint-multiarg-infix:wv -Wconf:cat=other-nullary-override:e -Wconf:msg=^(?=.*?method|value|type|object|trait|inheritance)(?=.*?deprecated)(?=.*?since 2.13).+$:s -Wconf:msg=^(?=.*?Widening conversion from)(?=.*?is deprecated because it loses precision).+$:s diff --git a/scala2.13/jdk-profiles/pom.xml b/scala2.13/jdk-profiles/pom.xml index d20e01a02ef..c157b350773 100644 --- a/scala2.13/jdk-profiles/pom.xml +++ b/scala2.13/jdk-profiles/pom.xml @@ -42,7 +42,6 @@ net.alchim31.maven scala-maven-plugin - ${scala.plugin.version} ${java.major.version} @@ -54,8 +53,11 @@ jdk9plus - [9,) + + buildver + !411 + @@ -63,7 +65,6 @@ net.alchim31.maven scala-maven-plugin - ${scala.plugin.version} ${java.major.version} @@ -72,5 +73,31 @@ + + release411 + + + buildver + 411 + + + + + -Xlint:all,-serial,-path,-try,-processing|-target|${java.major.version} + + + + + + net.alchim31.maven + scala-maven-plugin + + + + + \ No newline at end of file diff --git a/scala2.13/pom.xml b/scala2.13/pom.xml index 1251e6bb81b..91e4b913c80 100644 --- a/scala2.13/pom.xml +++ b/scala2.13/pom.xml @@ -780,7 +780,6 @@ 411 ${spark411.version} - 17 2.13.17 ${spark411.version} 1.13.1 @@ -789,22 +788,6 @@ [17,) Support for Spark ${spark.version} is only available with Java 17+ - - - - net.alchim31.maven - scala-maven-plugin - - - -release:17 - -feature - -unchecked - -deprecation - - - - - delta-lake/delta-stub @@ -1009,7 +992,7 @@ 4.1.1 3.12.4 - 4.9.2 + 4.9.8 3.1.1 3.3.0 2.0.2 @@ -1513,7 +1496,6 @@ This will force full Scala code rebuild in downstream modules. -Ywarn-unused:locals,patvars,privates -Wconf:cat=deprecation:e,any:e -Wconf:cat=scaladoc:wv - -Wconf:cat=lint-multiarg-infix:wv -Wconf:cat=other-nullary-override:e -Wconf:msg=^(?=.*?method|value|type|object|trait|inheritance)(?=.*?deprecated)(?=.*?since 2.13).+$:s -Wconf:msg=^(?=.*?Widening conversion from)(?=.*?is deprecated because it loses precision).+$:s diff --git a/sql-plugin/src/main/scala/com/nvidia/spark/rapids/AvroDataFileReader.scala b/sql-plugin/src/main/scala/com/nvidia/spark/rapids/AvroDataFileReader.scala index 0e88b974589..ef8b43e7849 100644 --- a/sql-plugin/src/main/scala/com/nvidia/spark/rapids/AvroDataFileReader.scala +++ b/sql-plugin/src/main/scala/com/nvidia/spark/rapids/AvroDataFileReader.scala @@ -185,7 +185,7 @@ abstract class AvroFileReader(si: SeekableInput) extends AutoCloseable { var l = vin.readMapStart().toInt if (l > 0) { do { - for (i <- 1 to l) { + for (_ <- 1 to l) { val key = vin.readString(null).toString val value = vin.readBytes(null) val bb = new Array[Byte](value.remaining()) diff --git a/sql-plugin/src/main/scala/com/nvidia/spark/rapids/RapidsShuffleHeartbeatManager.scala b/sql-plugin/src/main/scala/com/nvidia/spark/rapids/RapidsShuffleHeartbeatManager.scala index 09d84d13696..77d6a6df52e 100644 --- a/sql-plugin/src/main/scala/com/nvidia/spark/rapids/RapidsShuffleHeartbeatManager.scala +++ b/sql-plugin/src/main/scala/com/nvidia/spark/rapids/RapidsShuffleHeartbeatManager.scala @@ -18,6 +18,7 @@ package com.nvidia.spark.rapids import java.util.concurrent.{Executors, ScheduledExecutorService, TimeUnit} +import scala.annotation.nowarn import scala.collection.mutable.ArrayBuffer import com.nvidia.spark.rapids.jni.RmmSpark @@ -110,7 +111,8 @@ class RapidsShuffleHeartbeatManager(heartbeatIntervalMillis: Long, while (iter.hasNext && !done) { val entry = iter.next() - if (entry.registrationOrder >= lastRegistration.lastRegistrationOrderSeen.getValue) { + if (entry.registrationOrder >= (lastRegistration.lastRegistrationOrderSeen.getValue: + @nowarn("msg=getValue in class MutableLong is deprecated"))) { if (entry.id != id) { logDebug(s"Found new executor (to $id): $entry while handling a heartbeat.") newExecutors += entry.id @@ -126,7 +128,8 @@ class RapidsShuffleHeartbeatManager(heartbeatIntervalMillis: Long, // update this executor's registration with a new heartbeat time, and that last order // from the executors list, indicating the order we should stop at next time lastRegistration.lastHeartbeatMillis.setValue(getCurrentTimeMillis) - lastRegistration.lastRegistrationOrderSeen.setValue(registrationOrder) + (lastRegistration.lastRegistrationOrderSeen.setValue( + registrationOrder): @nowarn("msg=getValue in class MutableLong is deprecated")) // since we updated our heartbeat, update our min-heap leastRecentHeartbeat.priorityUpdated(lastRegistration.id) @@ -143,12 +146,14 @@ class RapidsShuffleHeartbeatManager(heartbeatIntervalMillis: Long, private def removeDeadExecutors(currentTime: Long): Unit = { val leastRecentHb = leastRecentHeartbeat.peek() // look at the executor that is lagging most if (leastRecentHb != null && - isStaleHeartbeat( - executorRegistrations.get(leastRecentHb).lastHeartbeatMillis.getValue, currentTime)) { + (isStaleHeartbeat( + executorRegistrations.get(leastRecentHb).lastHeartbeatMillis.getValue, + currentTime): @nowarn("msg=getValue in class MutableLong is deprecated"))) { // make a new buffer of alive executors and replace the old one val aliveExecutors = new ArrayBuffer[ExecutorRegistration]() executors.foreach { e => - if (isStaleHeartbeat(e.lastHeartbeatMillis.getValue, currentTime)) { + if ((isStaleHeartbeat(e.lastHeartbeatMillis.getValue, + currentTime): @nowarn("msg=getValue in class MutableLong is deprecated"))) { logDebug(s"Stale exec, removing $e") executorRegistrations.remove(e.id) leastRecentHeartbeat.remove(e.id) diff --git a/sql-plugin/src/main/scala/com/nvidia/spark/rapids/python/PythonWorkerSemaphore.scala b/sql-plugin/src/main/scala/com/nvidia/spark/rapids/python/PythonWorkerSemaphore.scala index d576a1e0958..c3961db269b 100644 --- a/sql-plugin/src/main/scala/com/nvidia/spark/rapids/python/PythonWorkerSemaphore.scala +++ b/sql-plugin/src/main/scala/com/nvidia/spark/rapids/python/PythonWorkerSemaphore.scala @@ -18,6 +18,8 @@ package com.nvidia.spark.rapids.python import java.util.concurrent.{ConcurrentHashMap, Semaphore} +import scala.annotation.nowarn + import com.nvidia.spark.rapids.RapidsConf import com.nvidia.spark.rapids.ScalableTaskCompletion.onTaskCompletion import com.nvidia.spark.rapids.python.PythonConfEntries.CONCURRENT_PYTHON_WORKERS @@ -117,9 +119,11 @@ private final class PythonWorkerSemaphore(tasksPerGpu: Int) extends Logging { def releaseIfNecessary(context: TaskContext): Unit = { val taskAttemptId = context.taskAttemptId() val refs = activeTasks.get(taskAttemptId) - if (refs != null && refs.getValue > 0) { + if (refs != null && + (refs.getValue > 0: @nowarn("msg=getValue in class MutableInt is deprecated"))) { logDebug(s"Task $taskAttemptId releasing GPU for python worker") - semaphore.release(refs.getValue) + (semaphore.release( + refs.getValue): @nowarn("msg=getValue in class MutableInt is deprecated")) refs.setValue(0) } } @@ -130,9 +134,10 @@ private final class PythonWorkerSemaphore(tasksPerGpu: Int) extends Logging { if (refs == null) { throw new IllegalStateException(s"Completion of unknown task $taskAttemptId") } - if (refs.getValue > 0) { + if ((refs.getValue > 0: @nowarn("msg=getValue in class MutableInt is deprecated"))) { logDebug(s"Task $taskAttemptId releasing all GPU resources for python worker") - semaphore.release(refs.getValue) + (semaphore.release( + refs.getValue): @nowarn("msg=getValue in class MutableInt is deprecated")) } } diff --git a/sql-plugin/src/main/spark400/scala/org/apache/spark/sql/execution/datasources/v2/rapids/GpuAtomicCreateTableAsSelectExec.scala b/sql-plugin/src/main/spark400/scala/org/apache/spark/sql/execution/datasources/v2/rapids/GpuAtomicCreateTableAsSelectExec.scala index 1813b788d49..161388d2e0f 100644 --- a/sql-plugin/src/main/spark400/scala/org/apache/spark/sql/execution/datasources/v2/rapids/GpuAtomicCreateTableAsSelectExec.scala +++ b/sql-plugin/src/main/spark400/scala/org/apache/spark/sql/execution/datasources/v2/rapids/GpuAtomicCreateTableAsSelectExec.scala @@ -21,6 +21,7 @@ spark-rapids-shim-json-lines ***/ package org.apache.spark.sql.execution.datasources.v2.rapids +import scala.annotation.nowarn import scala.collection.JavaConverters._ import com.nvidia.spark.rapids.GpuExec @@ -67,9 +68,10 @@ case class GpuAtomicCreateTableAsSelectExec( throw QueryCompilationErrors.tableAlreadyExistsError(ident) } - val stagedTable = catalog.stageCreate( + val stagedTable = (catalog.stageCreate( ident, getV2Columns(query.schema, catalog.useNullableQuerySchema), - partitioning.toArray, properties.asJava) + partitioning.toArray, + properties.asJava): @nowarn("msg=stageCreate in trait StagingTableCatalog is deprecated")) writeToTable(catalog, stagedTable, writeOptions, ident, query, overwrite = false) } diff --git a/sql-plugin/src/main/spark411/scala/com/nvidia/spark/rapids/shims/SparkShims.scala b/sql-plugin/src/main/spark411/scala/com/nvidia/spark/rapids/shims/SparkShims.scala index 5258335519b..434203189da 100644 --- a/sql-plugin/src/main/spark411/scala/com/nvidia/spark/rapids/shims/SparkShims.scala +++ b/sql-plugin/src/main/spark411/scala/com/nvidia/spark/rapids/shims/SparkShims.scala @@ -37,12 +37,12 @@ object SparkShimImpl extends Spark400PlusCommonShims with RebaseShims { GpuOverrides.expr[CollationAwareMurmur3Hash]( "Collation-aware murmur3 hash operator", HashExprChecks.murmur3ProjectChecks, - Murmur3HashExprMeta + Murmur3HashExprMeta.apply ), GpuOverrides.expr[CollationAwareXxHash64]( "Collation-aware xxhash64 operator", HashExprChecks.xxhash64ProjectChecks, - XxHash64ExprMeta + XxHash64ExprMeta.apply ) ).map(r => (r.getClassFor.asSubclass(classOf[Expression]), r)).toMap // Include TimeAddShims for TimestampAddInterval support in 4.1.0 diff --git a/sql-plugin/src/main/spark411/scala/org/apache/spark/sql/execution/datasources/v2/rapids/GpuAtomicReplaceTableAsSelectExec.scala b/sql-plugin/src/main/spark411/scala/org/apache/spark/sql/execution/datasources/v2/rapids/GpuAtomicReplaceTableAsSelectExec.scala index d355a6b51bd..4cd0da6c0a4 100644 --- a/sql-plugin/src/main/spark411/scala/org/apache/spark/sql/execution/datasources/v2/rapids/GpuAtomicReplaceTableAsSelectExec.scala +++ b/sql-plugin/src/main/spark411/scala/org/apache/spark/sql/execution/datasources/v2/rapids/GpuAtomicReplaceTableAsSelectExec.scala @@ -19,6 +19,7 @@ spark-rapids-shim-json-lines ***/ package org.apache.spark.sql.execution.datasources.v2.rapids +import scala.annotation.nowarn import scala.collection.JavaConverters._ import com.nvidia.spark.rapids.GpuExec @@ -70,12 +71,16 @@ case class GpuAtomicReplaceTableAsSelectExec( invalidateCache(catalog, ident) } val staged = if (orCreate) { - catalog.stageCreateOrReplace( - ident, columns, partitioning.toArray, properties.asJava) + (catalog.stageCreateOrReplace( + ident, columns, partitioning.toArray, + properties.asJava + ): @nowarn("msg=stageCreateOrReplace in trait StagingTableCatalog is deprecated")) } else if (catalog.tableExists(ident)) { try { - catalog.stageReplace( - ident, columns, partitioning.toArray, properties.asJava) + (catalog.stageReplace( + ident, columns, partitioning.toArray, + properties.asJava + ): @nowarn("msg=stageReplace in trait StagingTableCatalog is deprecated")) } catch { case e: NoSuchTableException => throw QueryCompilationErrors.cannotReplaceMissingTableError(ident, Some(e)) From 9b8131158bd9f64acbcb9ffb8ff74b8dbe1ef888 Mon Sep 17 00:00:00 2001 From: Gera Shegalov Date: Sat, 24 Jan 2026 11:02:04 -0800 Subject: [PATCH 47/59] jdk8 target Signed-off-by: Gera Shegalov --- jdk-profiles/pom.xml | 14 +++++++------- scala2.13/jdk-profiles/pom.xml | 14 +++++++------- scala2.13/sql-plugin-api/pom.xml | 25 +++++++++++++++++++++++++ sql-plugin-api/pom.xml | 25 +++++++++++++++++++++++++ 4 files changed, 64 insertions(+), 14 deletions(-) diff --git a/jdk-profiles/pom.xml b/jdk-profiles/pom.xml index 157a02acdb6..a8b04a0c61b 100644 --- a/jdk-profiles/pom.xml +++ b/jdk-profiles/pom.xml @@ -81,19 +81,19 @@ 411 - - - -Xlint:all,-serial,-path,-try,-processing|-target|${java.major.version} - net.alchim31.maven scala-maven-plugin + + + -Xlint:all,-serial,-path,-try,-processing|-target|${java.major.version} + diff --git a/scala2.13/jdk-profiles/pom.xml b/scala2.13/jdk-profiles/pom.xml index c157b350773..352333211d2 100644 --- a/scala2.13/jdk-profiles/pom.xml +++ b/scala2.13/jdk-profiles/pom.xml @@ -81,19 +81,19 @@ 411 - - - -Xlint:all,-serial,-path,-try,-processing|-target|${java.major.version} - net.alchim31.maven scala-maven-plugin + + + -Xlint:all,-serial,-path,-try,-processing|-target|${java.major.version} + diff --git a/scala2.13/sql-plugin-api/pom.xml b/scala2.13/sql-plugin-api/pom.xml index c3c28b8c3c9..4644031778f 100644 --- a/scala2.13/sql-plugin-api/pom.xml +++ b/scala2.13/sql-plugin-api/pom.xml @@ -61,4 +61,29 @@ + + + release411 + + + buildver + 411 + + + + + + + net.alchim31.maven + scala-maven-plugin + + ${java.major.version} + -Xlint:all,-serial,-path,-try,-processing + + + + + + + \ No newline at end of file diff --git a/sql-plugin-api/pom.xml b/sql-plugin-api/pom.xml index 549b75afe69..7fd3d79e075 100644 --- a/sql-plugin-api/pom.xml +++ b/sql-plugin-api/pom.xml @@ -61,4 +61,29 @@ + + + release411 + + + buildver + 411 + + + + + + + net.alchim31.maven + scala-maven-plugin + + ${java.major.version} + -Xlint:all,-serial,-path,-try,-processing + + + + + + + \ No newline at end of file From 0d5157a7b7a523a48ad8cce7bade59f343b65a8a Mon Sep 17 00:00:00 2001 From: Gera Shegalov Date: Tue, 27 Jan 2026 03:19:55 -0800 Subject: [PATCH 48/59] Upgrade to Scala 2.13.18 and modernize unused warnings configuration This commit includes the following changes: - Upgrade Scala 2.13 from version 2.13.14 to 2.13.18 - Modernize compiler warning flags by replacing the deprecated -Ywarn-unused:locals,patvars,privates with more granular -Wconf and -Wunused syntax for better control over unused code detection - Remove unused imports across Delta Lake and SQL plugin files identified by stricter compiler settings - Simplify Scala 2.13 build profile handling in buildall script by consolidating POM file selection and removing redundant profile-specific version collection logic - Update documentation references from "unshimmed-common-from-spark320.txt" to "unshimmed-common-from-single-shim.txt" to reflect generalized shim naming - Add --scala213 command-line option to buildall for explicit Scala 2.13 builds Signed-off-by: Gera Shegalov --- build/buildall | 24 ++++++++----------- .../GpuDeltaParquetFileFormatBase.scala | 1 - .../rapids/GpuOptimizeWriteExchangeExec.scala | 4 +--- .../sql/delta/hooks/GpuAutoCompact.scala | 3 --- .../sql/delta/rapids/GpuWriteIntoDelta.scala | 2 -- .../rapids/delta40x/GpuMergeIntoCommand.scala | 7 +++--- .../delta40x/GpuOptimisticTransaction.scala | 3 +-- dist/README.md | 2 +- dist/build/package-parallel-worlds.py | 9 +++---- dist/maven-antrun/build-parallel-worlds.xml | 2 +- dist/scripts/binary-dedupe.sh | 13 +++++----- ... => unshimmed-common-from-single-shim.txt} | 0 pom.xml | 16 +++++++++---- scala2.13/pom.xml | 16 +++++++++---- ...mmedExecutionPlanCaptureCallbackImpl.scala | 2 +- .../rapids/shims/Spark320PlusShims.scala | 1 - .../spark/rapids/shims/RoundShims.scala | 1 + .../spark/rapids/shims/CudfUnsafeRow.scala | 2 -- .../spark/rapids/shims/SparkShims.scala | 2 -- .../rapids/shims/TrampolineConnectShims.scala | 16 ++++++------- .../spark/rapids/shims/SparkShims.scala | 1 - .../rapids/shims/GpuOneRowRelationExec.scala | 2 +- .../spark/rapids/shims/SparkShims.scala | 1 - 23 files changed, 62 insertions(+), 68 deletions(-) rename dist/{unshimmed-common-from-spark320.txt => unshimmed-common-from-single-shim.txt} (100%) diff --git a/build/buildall b/build/buildall index 4ec9e691c0d..18162fc835c 100755 --- a/build/buildall +++ b/build/buildall @@ -39,7 +39,7 @@ function print_usage() { echo " generate projects for Bloop clients: IDE (Scala Metals, IntelliJ) or Bloop CLI" echo " -p=DIST_PROFILE, --profile=DIST_PROFILE" echo " use this profile for the dist module, default: noSnapshots, also supported: snapshots, minimumFeatureVersionMix," - echo " snapshotsWithDatabricks, noSnapshotsWithDatabricks, noSnapshotsScala213, snapshotsScala213." + echo " snapshotsWithDatabricks, noSnapshotsWithDatabricks" echo " NOTE: the Databricks-related spark3XYdb shims are not built locally, the jars are fetched prebuilt from a" echo " . remote Maven repo. You can also supply a comma-separated list of build versions. E.g., --profile=330,331 will" echo " build only the distribution jar only for 3.3.0 and 3.3.1" @@ -54,6 +54,8 @@ function print_usage() { echo " use this option to build project with maven. E.g., --option='-Dcudf.version=cuda12'" echo " --rebuild-dist-only" echo " repackage the dist module artifact using installed dependencies" + echo " --scala213" + echo " build 2.13 shims" } function bloopInstall() { @@ -176,32 +178,26 @@ fi export MVN="mvn -Dmaven.wagon.http.retryHandler.count=3 ${MVN_OPT}" if [[ "$SCALA213" == "1" ]]; then + POM_FILE="scala2.13/pom.xml" MVN="$MVN -f scala2.13/" - DIST_PROFILE=${DIST_PROFILE:-"noSnapshotsScala213"} $(dirname $0)/make-scala-version-build-files.sh 2.13 -else - DIST_PROFILE=${DIST_PROFILE:-"noSnapshots"} +else + POM_FILE="pom.xml" fi +DIST_PROFILE=${DIST_PROFILE:-"noSnapshots"} + [[ "$MODULE" != "" ]] && MODULE_OPT="--projects $MODULE --also-make" || MODULE_OPT="" echo "Collecting Spark versions..." case $DIST_PROFILE in - snapshotsScala213) - SPARK_SHIM_VERSIONS=($(versionsFromReleaseProfiles "snap_and_no_snap" "scala2.13/pom.xml")) - ;; - - noSnapshotsScala213) - SPARK_SHIM_VERSIONS=($(versionsFromReleaseProfiles "no_snapshots" "scala2.13/pom.xml")) - ;; - snapshots?(WithDatabricks)) - SPARK_SHIM_VERSIONS=($(versionsFromReleaseProfiles "snap_and_no_snap" "pom.xml")) + SPARK_SHIM_VERSIONS=($(versionsFromReleaseProfiles "snap_and_no_snap" $POM_FILE)) ;; noSnapshots?(WithDatabricks)) - SPARK_SHIM_VERSIONS=($(versionsFromReleaseProfiles "no_snapshots" "pom.xml")) + SPARK_SHIM_VERSIONS=($(versionsFromReleaseProfiles "no_snapshots" $POM_FILE)) ;; minimumFeatureVersionMix) diff --git a/delta-lake/common/src/main/delta-33x-40x/scala/com/nvidia/spark/rapids/delta/common/GpuDeltaParquetFileFormatBase.scala b/delta-lake/common/src/main/delta-33x-40x/scala/com/nvidia/spark/rapids/delta/common/GpuDeltaParquetFileFormatBase.scala index c512721c5f1..0696868da30 100644 --- a/delta-lake/common/src/main/delta-33x-40x/scala/com/nvidia/spark/rapids/delta/common/GpuDeltaParquetFileFormatBase.scala +++ b/delta-lake/common/src/main/delta-33x-40x/scala/com/nvidia/spark/rapids/delta/common/GpuDeltaParquetFileFormatBase.scala @@ -17,7 +17,6 @@ package com.nvidia.spark.rapids.delta.common import ai.rapids.cudf._ -import ai.rapids.cudf.HostColumnVector._ import com.nvidia.spark.rapids._ import com.nvidia.spark.rapids.Arm.withResource import com.nvidia.spark.rapids.RapidsPluginImplicits._ diff --git a/delta-lake/common/src/main/delta-33x/scala/org/apache/spark/sql/delta/rapids/GpuOptimizeWriteExchangeExec.scala b/delta-lake/common/src/main/delta-33x/scala/org/apache/spark/sql/delta/rapids/GpuOptimizeWriteExchangeExec.scala index 1f332fc66d9..15d5e77b8fd 100644 --- a/delta-lake/common/src/main/delta-33x/scala/org/apache/spark/sql/delta/rapids/GpuOptimizeWriteExchangeExec.scala +++ b/delta-lake/common/src/main/delta-33x/scala/org/apache/spark/sql/delta/rapids/GpuOptimizeWriteExchangeExec.scala @@ -26,8 +26,7 @@ import scala.concurrent.Future import scala.concurrent.duration.Duration import com.nvidia.spark.rapids.{GpuColumnarBatchSerializer, GpuExec, GpuMetric, GpuPartitioning, GpuRoundRobinPartitioning, RapidsConf} -import com.nvidia.spark.rapids.GpuMetric.{OP_TIME_NEW_SHUFFLE_READ, OP_TIME_NEW_SHUFFLE_WRITE} -import com.nvidia.spark.rapids.GpuMetric.{DESCRIPTION_OP_TIME_NEW_SHUFFLE_READ, DESCRIPTION_OP_TIME_NEW_SHUFFLE_WRITE, MODERATE_LEVEL} +import com.nvidia.spark.rapids.GpuMetric._ import com.nvidia.spark.rapids.delta.RapidsDeltaSQLConf import com.nvidia.spark.rapids.shims.GpuHashPartitioning @@ -60,7 +59,6 @@ case class GpuOptimizeWriteExchangeExec( partitioning: GpuPartitioning, override val child: SparkPlan, @transient deltaLog: DeltaLog) extends Exchange with GpuExec with DeltaLogging { - import GpuMetric._ // Use 150% of target file size hint config considering parquet compression. // Still the result file can be smaller/larger than the config due to data skew or diff --git a/delta-lake/common/src/main/delta-40x/scala/org/apache/spark/sql/delta/hooks/GpuAutoCompact.scala b/delta-lake/common/src/main/delta-40x/scala/org/apache/spark/sql/delta/hooks/GpuAutoCompact.scala index 4ac4cac83a9..767c839d1a2 100644 --- a/delta-lake/common/src/main/delta-40x/scala/org/apache/spark/sql/delta/hooks/GpuAutoCompact.scala +++ b/delta-lake/common/src/main/delta-40x/scala/org/apache/spark/sql/delta/hooks/GpuAutoCompact.scala @@ -23,11 +23,8 @@ package org.apache.spark.sql.delta.hooks import org.apache.spark.internal.MDC import org.apache.spark.sql.SparkSession -import org.apache.spark.sql.catalyst.catalog.CatalogTable import org.apache.spark.sql.delta._ import org.apache.spark.sql.delta.actions._ -import org.apache.spark.sql.delta.commands.DeltaOptimizeContext -import org.apache.spark.sql.delta.commands.optimize._ import org.apache.spark.sql.delta.logging.DeltaLogKeys import org.apache.spark.sql.delta.rapids.GpuOptimisticTransactionBase import org.apache.spark.sql.delta.stats.AutoCompactPartitionStats diff --git a/delta-lake/common/src/main/delta-40x/scala/org/apache/spark/sql/delta/rapids/GpuWriteIntoDelta.scala b/delta-lake/common/src/main/delta-40x/scala/org/apache/spark/sql/delta/rapids/GpuWriteIntoDelta.scala index 9f7bf72ec37..dab0803cd90 100644 --- a/delta-lake/common/src/main/delta-40x/scala/org/apache/spark/sql/delta/rapids/GpuWriteIntoDelta.scala +++ b/delta-lake/common/src/main/delta-40x/scala/org/apache/spark/sql/delta/rapids/GpuWriteIntoDelta.scala @@ -45,8 +45,6 @@ import org.apache.spark.sql.execution.datasources.LogicalRelation import org.apache.spark.sql.execution.metric.{SQLMetric, SQLMetrics} import org.apache.spark.sql.functions.{array, col, explode, lit, struct} import org.apache.spark.sql.internal.SQLConf -import org.apache.spark.sql.nvidia.DFUDFShims -import org.apache.spark.sql.rapids.shims.TrampolineConnectShims.SparkSession import org.apache.spark.sql.types.StructType /** GPU version of Delta Lake's WriteIntoDelta. */ diff --git a/delta-lake/delta-40x/src/main/scala/org/apache/spark/sql/delta/rapids/delta40x/GpuMergeIntoCommand.scala b/delta-lake/delta-40x/src/main/scala/org/apache/spark/sql/delta/rapids/delta40x/GpuMergeIntoCommand.scala index e491febc243..7e929afd391 100644 --- a/delta-lake/delta-40x/src/main/scala/org/apache/spark/sql/delta/rapids/delta40x/GpuMergeIntoCommand.scala +++ b/delta-lake/delta-40x/src/main/scala/org/apache/spark/sql/delta/rapids/delta40x/GpuMergeIntoCommand.scala @@ -30,11 +30,11 @@ import com.nvidia.spark.rapids.RapidsConf import com.nvidia.spark.rapids.delta._ import org.apache.spark.SparkContext -import org.apache.spark.sql.{DataFrame, Row, SparkSession => SqlSparkSession} +import org.apache.spark.sql.{Row, SparkSession => SqlSparkSession} import org.apache.spark.sql.catalyst.catalog.CatalogTable import org.apache.spark.sql.catalyst.expressions.{Attribute, AttributeReference, Expression, Literal, Or} import org.apache.spark.sql.catalyst.plans.logical._ -import org.apache.spark.sql.classic.{ColumnNodeToExpressionConverter, ExpressionUtils, SparkSession => ClassicSparkSession} +import org.apache.spark.sql.classic.{SparkSession => ClassicSparkSession} import org.apache.spark.sql.delta._ import org.apache.spark.sql.delta.actions.{AddFile, FileAction} import org.apache.spark.sql.delta.commands.MergeIntoCommandBase @@ -384,7 +384,7 @@ case class GpuMergeIntoCommand( } } commitAndRecordStats( - org.apache.spark.sql.classic.SparkSession.active, + ClassicSparkSession.active, gpuDeltaTxn, mergeActions, startTime, @@ -583,7 +583,6 @@ case class GpuMergeIntoCommand( val matchedRowCounts = collectTouchedFiles.groupBy(ROW_ID_COL).agg(sum("one").as("count")) // Get multiple matches and simultaneously collect (using touchedFilesAccum) the file names - import org.apache.spark.sql.delta.implicits._ val mmRow = matchedRowCounts .filter(col("count") > lit(1)) .select( diff --git a/delta-lake/delta-40x/src/main/scala/org/apache/spark/sql/delta/rapids/delta40x/GpuOptimisticTransaction.scala b/delta-lake/delta-40x/src/main/scala/org/apache/spark/sql/delta/rapids/delta40x/GpuOptimisticTransaction.scala index b0acf681806..194ca8a70f4 100644 --- a/delta-lake/delta-40x/src/main/scala/org/apache/spark/sql/delta/rapids/delta40x/GpuOptimisticTransaction.scala +++ b/delta-lake/delta-40x/src/main/scala/org/apache/spark/sql/delta/rapids/delta40x/GpuOptimisticTransaction.scala @@ -32,7 +32,7 @@ import org.apache.spark.sql.{DataFrame, Dataset} import org.apache.spark.sql.{SparkSession => SqlSparkSession} import org.apache.spark.sql.catalyst.InternalRow import org.apache.spark.sql.catalyst.catalog.CatalogTable -import org.apache.spark.sql.catalyst.expressions.{Attribute, Expression, RuntimeReplaceable} +import org.apache.spark.sql.catalyst.expressions.{Attribute, Expression} import org.apache.spark.sql.catalyst.plans.logical.LocalRelation import org.apache.spark.sql.delta._ import org.apache.spark.sql.delta.actions.{AddFile, FileAction} @@ -47,7 +47,6 @@ import org.apache.spark.sql.execution.metric.SQLMetric import org.apache.spark.sql.functions.to_json import org.apache.spark.sql.rapids.{BasicColumnarWriteJobStatsTracker, ColumnarWriteJobStatsTracker, GpuWriteJobStatsTracker} import org.apache.spark.sql.rapids.delta.GpuIdentityColumn -import org.apache.spark.sql.rapids.shims.TrampolineConnectShims import org.apache.spark.sql.rapids.shims.TrampolineConnectShims.SparkSession import org.apache.spark.sql.types.StructType import org.apache.spark.sql.vectorized.ColumnarBatch diff --git a/dist/README.md b/dist/README.md index af53b7a39e0..aa23b6a6332 100644 --- a/dist/README.md +++ b/dist/README.md @@ -30,7 +30,7 @@ for each version of Spark supported in the jar, i.e., spark330/, spark341/, etc. If you have to change the contents of the uber jar the following files control what goes into the base jar as classes that are not shaded. -1. `unshimmed-common-from-spark320.txt` - This has classes and files that should go into the base jar with their normal +1. `unshimmed-common-from-single-shim.txt` - This has classes and files that should go into the base jar with their normal package name (not shaded). This includes user visible classes (i.e., com/nvidia/spark/SQLPlugin), python files, and other files that aren't version specific. Uses Spark 3.2.0 built jar for these base classes as explained above. 2. `unshimmed-from-each-spark3xx.txt` - This is applied to all the individual Spark specific version jars to pull diff --git a/dist/build/package-parallel-worlds.py b/dist/build/package-parallel-worlds.py index ef64a4cd6bd..109f3b6bcca 100644 --- a/dist/build/package-parallel-worlds.py +++ b/dist/build/package-parallel-worlds.py @@ -29,6 +29,7 @@ def shell_exec(shell_cmd): artifacts = attributes.get('artifact_csv').split(',') buildver_list = re.sub(r'\s+', '', project.getProperty('included_buildvers'), flags=re.UNICODE).split(',') +buildver_list = sorted(buildver_list, reverse=True) source_basedir = project.getProperty('spark.rapids.source.basedir') project_basedir = project.getProperty('spark.rapids.project.basedir') project_version = project.getProperty('project.version') @@ -73,8 +74,8 @@ def shell_exec(shell_cmd): shell_exec(mvn_cmd) dist_dir = os.sep.join([source_basedir, 'dist']) - with open(os.sep.join([dist_dir, 'unshimmed-common-from-spark320.txt']), 'r') as f: - from_spark320 = f.read().splitlines() + with open(os.sep.join([dist_dir, 'unshimmed-common-from-single-shim.txt']), 'r') as f: + from_single_shim = f.read().splitlines() with open(os.sep.join([dist_dir, 'unshimmed-from-each-spark3xx.txt']), 'r') as f: from_each = f.read().splitlines() with zipfile.ZipFile(os.sep.join([deps_dir, art_jar]), 'r') as zip_handle: @@ -82,13 +83,13 @@ def shell_exec(shell_cmd): zip_handle.extractall(path=top_dist_jar_dir) else: zip_handle.extractall(path=os.sep.join([top_dist_jar_dir, classifier])) - # IMPORTANT unconditional extract from first to the top + # IMPORTANT unconditional extract from the highest Spark version to the top if bv == buildver_list[0] and art == 'sql-plugin-api': zip_handle.extractall(path=top_dist_jar_dir) # TODO deprecate namelist = zip_handle.namelist() matching_members = [] - glob_list = from_spark320 + from_each if bv == buildver_list[0] else from_each + glob_list = from_single_shim + from_each if bv == buildver_list[0] else from_each for pat in glob_list: new_matches = fnmatch.filter(namelist, pat) matching_members += new_matches diff --git a/dist/maven-antrun/build-parallel-worlds.xml b/dist/maven-antrun/build-parallel-worlds.xml index bc4d7c9991c..8d85ec38935 100644 --- a/dist/maven-antrun/build-parallel-worlds.xml +++ b/dist/maven-antrun/build-parallel-worlds.xml @@ -132,7 +132,7 @@ + includesfile="${spark.rapids.source.basedir}/${rapids.module}/unshimmed-common-from-single-shim.txt"/> diff --git a/dist/scripts/binary-dedupe.sh b/dist/scripts/binary-dedupe.sh index b761ea57826..ad319d98721 100755 --- a/dist/scripts/binary-dedupe.sh +++ b/dist/scripts/binary-dedupe.sh @@ -85,9 +85,6 @@ function retain_single_copy() { package_class_parts=("${path_parts[@]:2}") - package_len=$((${#package_class_parts[@]} - 1)) - package_parts=("${package_class_parts[@]::$package_len}") - package_class_with_spaces="${package_class_parts[*]}" # com/nvidia/spark/udf/Repr\$UnknownCapturedArg\$.class package_class="${package_class_with_spaces// //}" @@ -164,12 +161,16 @@ function verify_same_sha_for_unshimmed() { # sha1 look up if there is an entry with the unshimmed class as a suffix class_file_quoted=$(printf '%q' "$class_file") - # TODO currently RapidsShuffleManager is "removed" from /spark* by construction in # dist pom.xml via ant. We could delegate this logic to this script # and make both simmpler - if [[ ! "$class_file_quoted" =~ com/nvidia/spark/rapids/spark[34].*/.*ShuffleManager.class ]]; then - + # + # TODO ParquetCachedBatchSerializer is not bitwise-identical after 411, + # but it is compatible with previous versions because it merely adds a new method. + # we might need to replace this strict check with MiMa + # https://github.com/apache/spark/blob/7011706a0a8dbec6adb5b5b121921b29b314335f/sql/core/src/main/scala/org/apache/spark/sql/columnar/CachedBatchSerializer.scala#L75-L95 + if [[ ! "$class_file_quoted" =~ com/nvidia/spark/rapids/spark[34].*/.*ShuffleManager.class && \ + "$class_file_quoted" != "com/nvidia/spark/ParquetCachedBatchSerializer.class" ]]; then if ! grep -q "/spark.\+/$class_file_quoted" "$SPARK_SHARED_TXT"; then echo >&2 "$class_file is not bitwise-identical across shims" exit 255 diff --git a/dist/unshimmed-common-from-spark320.txt b/dist/unshimmed-common-from-single-shim.txt similarity index 100% rename from dist/unshimmed-common-from-spark320.txt rename to dist/unshimmed-common-from-single-shim.txt diff --git a/pom.xml b/pom.xml index 2aaf24181a2..a12a895fca1 100644 --- a/pom.xml +++ b/pom.xml @@ -780,7 +780,6 @@ 411 ${spark411.version} - 2.13.17 ${spark411.version} 1.13.1 rapids-4-spark-delta-stub @@ -817,14 +816,14 @@ scala-2.12 2.12 - 2.12.15 + 2.12.21
scala-2.13 2.13 - 2.13.14 + 2.13.18 @@ -917,7 +916,7 @@ 26.02.0-SNAPSHOT 2.12 incremental - 2.12.15 + 2.12.21 diff --git a/scala2.13/pom.xml b/scala2.13/pom.xml index 91e4b913c80..011c99781be 100644 --- a/scala2.13/pom.xml +++ b/scala2.13/pom.xml @@ -780,7 +780,6 @@ 411 ${spark411.version} - 2.13.17 ${spark411.version} 1.13.1 rapids-4-spark-delta-stub @@ -817,14 +816,14 @@ scala-2.12 2.12 - 2.12.15 + 2.12.21 scala-2.13 2.13 - 2.13.14 + 2.13.18 @@ -917,7 +916,7 @@ 26.02.0-SNAPSHOT 2.13 incremental - 2.13.14 + 2.13.18 -Xsource:2.13 - -Ywarn-unused:locals,patvars,privates -Wconf:cat=deprecation:e,any:e -Wconf:cat=scaladoc:wv -Wconf:cat=other-nullary-override:e @@ -1503,6 +1501,14 @@ This will force full Scala code rebuild in downstream modules. -Wconf:cat=unchecked&msg=outer reference:s -Wconf:cat=unchecked&msg=eliminated by erasure:s -Wconf:msg=^(?=.*?a value of type)(?=.*?cannot also be).+$:s + -Wconf:cat=unused:e + -Wconf:cat=unused-imports:e + -Wconf:cat=unused-locals:e + -Wconf:cat=unused-nowarn:e + -Wconf:cat=unused-params:e + -Wconf:cat=unused-pat-vars:e + -Wconf:cat=unused-privates:e + -Wunused:imports,locals,patvars,privates diff --git a/sql-plugin/src/main/scala/org/apache/spark/sql/rapids/ShimmedExecutionPlanCaptureCallbackImpl.scala b/sql-plugin/src/main/scala/org/apache/spark/sql/rapids/ShimmedExecutionPlanCaptureCallbackImpl.scala index fa7f6426da4..f3b2ea1f72b 100644 --- a/sql-plugin/src/main/scala/org/apache/spark/sql/rapids/ShimmedExecutionPlanCaptureCallbackImpl.scala +++ b/sql-plugin/src/main/scala/org/apache/spark/sql/rapids/ShimmedExecutionPlanCaptureCallbackImpl.scala @@ -31,7 +31,7 @@ import org.apache.spark.sql.execution.exchange.ReusedExchangeExec /** * Note that the name is prefixed with "Shimmed" such that wildcard rules - * under unshimmed-common-from-spark320.txt don't get confused and pick this class to be + * under unshimmed-common-from-single-shim.txt don't get confused and pick this class to be * un-shimmed. */ class ShimmedExecutionPlanCaptureCallbackImpl extends ExecutionPlanCaptureCallbackBase { diff --git a/sql-plugin/src/main/spark320/scala/com/nvidia/spark/rapids/shims/Spark320PlusShims.scala b/sql-plugin/src/main/spark320/scala/com/nvidia/spark/rapids/shims/Spark320PlusShims.scala index d6ff6891841..923034f5bad 100644 --- a/sql-plugin/src/main/spark320/scala/com/nvidia/spark/rapids/shims/Spark320PlusShims.scala +++ b/sql-plugin/src/main/spark320/scala/com/nvidia/spark/rapids/shims/Spark320PlusShims.scala @@ -76,7 +76,6 @@ import org.apache.spark.sql.internal.SQLConf import org.apache.spark.sql.rapids._ import org.apache.spark.sql.rapids.aggregate._ import org.apache.spark.sql.rapids.execution._ -import org.apache.spark.sql.rapids.shims._ import org.apache.spark.sql.rapids.shims.SparkSessionUtils import org.apache.spark.sql.rapids.shims.TrampolineConnectShims.SparkSession diff --git a/sql-plugin/src/main/spark340/scala/com/nvidia/spark/rapids/shims/RoundShims.scala b/sql-plugin/src/main/spark340/scala/com/nvidia/spark/rapids/shims/RoundShims.scala index 97d1f0fde71..c1b71c2cd6b 100644 --- a/sql-plugin/src/main/spark340/scala/com/nvidia/spark/rapids/shims/RoundShims.scala +++ b/sql-plugin/src/main/spark340/scala/com/nvidia/spark/rapids/shims/RoundShims.scala @@ -32,6 +32,7 @@ {"spark": "357"} {"spark": "400"} {"spark": "401"} +{"spark": "411"} spark-rapids-shim-json-lines ***/ package com.nvidia.spark.rapids.shims diff --git a/sql-plugin/src/main/spark400/scala/com/nvidia/spark/rapids/shims/CudfUnsafeRow.scala b/sql-plugin/src/main/spark400/scala/com/nvidia/spark/rapids/shims/CudfUnsafeRow.scala index e68ecefa93d..a050912770b 100644 --- a/sql-plugin/src/main/spark400/scala/com/nvidia/spark/rapids/shims/CudfUnsafeRow.scala +++ b/sql-plugin/src/main/spark400/scala/com/nvidia/spark/rapids/shims/CudfUnsafeRow.scala @@ -21,8 +21,6 @@ spark-rapids-shim-json-lines ***/ package com.nvidia.spark.rapids.shims import org.apache.spark.sql.catalyst.expressions.Attribute -import org.apache.spark.unsafe.types.VariantVal - final class CudfUnsafeRow( attributes: Array[Attribute], diff --git a/sql-plugin/src/main/spark400/scala/com/nvidia/spark/rapids/shims/SparkShims.scala b/sql-plugin/src/main/spark400/scala/com/nvidia/spark/rapids/shims/SparkShims.scala index 3e5d94ed5a4..0028dd14ede 100644 --- a/sql-plugin/src/main/spark400/scala/com/nvidia/spark/rapids/shims/SparkShims.scala +++ b/sql-plugin/src/main/spark400/scala/com/nvidia/spark/rapids/shims/SparkShims.scala @@ -19,7 +19,5 @@ spark-rapids-shim-json-lines ***/ package com.nvidia.spark.rapids.shims -import com.nvidia.spark.rapids._ - object SparkShimImpl extends Spark400PlusCommonShims { } diff --git a/sql-plugin/src/main/spark400/scala/org/apache/spark/sql/rapids/shims/TrampolineConnectShims.scala b/sql-plugin/src/main/spark400/scala/org/apache/spark/sql/rapids/shims/TrampolineConnectShims.scala index 8d79cbf0073..a72160b9baa 100644 --- a/sql-plugin/src/main/spark400/scala/org/apache/spark/sql/rapids/shims/TrampolineConnectShims.scala +++ b/sql-plugin/src/main/spark400/scala/org/apache/spark/sql/rapids/shims/TrampolineConnectShims.scala @@ -26,30 +26,30 @@ import org.apache.avro.NameValidator import org.apache.avro.Schema import org.apache.spark.sql.catalyst.plans.logical.LogicalPlan -import org.apache.spark.sql.classic.{DataFrame, Dataset, SparkSession} - object TrampolineConnectShims { type SparkSession = org.apache.spark.sql.classic.SparkSession type DataFrame = org.apache.spark.sql.classic.DataFrame type Dataset = org.apache.spark.sql.classic.Dataset[org.apache.spark.sql.Row] - def cleanupAnyExistingSession(): Unit = SparkSession.cleanupAnyExistingSession() + def cleanupAnyExistingSession(): Unit = { + org.apache.spark.sql.classic.SparkSession.cleanupAnyExistingSession() + } def createDataFrame(spark: SparkSession, plan: LogicalPlan): DataFrame = { - Dataset.ofRows(spark, plan) + org.apache.spark.sql.classic.Dataset.ofRows(spark, plan) } - def getBuilder(): SparkSession.Builder = { - SparkSession.builder() + def getBuilder(): org.apache.spark.sql.classic.SparkSession.Builder = { + org.apache.spark.sql.classic.SparkSession.builder() } def hasActiveSession: Boolean = { - SparkSession.getActiveSession.isDefined + org.apache.spark.sql.classic.SparkSession.getActiveSession.isDefined } def getActiveSession: SparkSession = { - SparkSession.getActiveSession.getOrElse( + org.apache.spark.sql.classic.SparkSession.getActiveSession.getOrElse( throw new IllegalStateException("No active SparkSession found") ) } diff --git a/sql-plugin/src/main/spark401/scala/com/nvidia/spark/rapids/shims/SparkShims.scala b/sql-plugin/src/main/spark401/scala/com/nvidia/spark/rapids/shims/SparkShims.scala index b5878058927..accc85e5bb4 100644 --- a/sql-plugin/src/main/spark401/scala/com/nvidia/spark/rapids/shims/SparkShims.scala +++ b/sql-plugin/src/main/spark401/scala/com/nvidia/spark/rapids/shims/SparkShims.scala @@ -23,7 +23,6 @@ import com.nvidia.spark.rapids._ import com.nvidia.spark.rapids.{HashExprChecks, Murmur3HashExprMeta, XxHash64ExprMeta} import org.apache.spark.sql.catalyst.expressions.{CollationAwareMurmur3Hash, CollationAwareXxHash64, Expression} -import org.apache.spark.sql.rapids.{GpuMurmur3Hash, GpuXxHash64} object SparkShimImpl extends Spark400PlusCommonShims { override def getExprs: Map[Class[_ <: Expression], ExprRule[_ <: Expression]] = { diff --git a/sql-plugin/src/main/spark411/scala/com/nvidia/spark/rapids/shims/GpuOneRowRelationExec.scala b/sql-plugin/src/main/spark411/scala/com/nvidia/spark/rapids/shims/GpuOneRowRelationExec.scala index a4bf2aa9359..ec22c7014dd 100644 --- a/sql-plugin/src/main/spark411/scala/com/nvidia/spark/rapids/shims/GpuOneRowRelationExec.scala +++ b/sql-plugin/src/main/spark411/scala/com/nvidia/spark/rapids/shims/GpuOneRowRelationExec.scala @@ -19,7 +19,7 @@ spark-rapids-shim-json-lines ***/ package com.nvidia.spark.rapids.shims -import com.nvidia.spark.rapids.{DataFromReplacementRule, GpuExec, GpuMetric, RapidsConf, +import com.nvidia.spark.rapids.{DataFromReplacementRule, GpuExec, RapidsConf, RapidsMeta, SparkPlanMeta} import com.nvidia.spark.rapids.GpuMetric.NUM_OUTPUT_ROWS diff --git a/sql-plugin/src/main/spark411/scala/com/nvidia/spark/rapids/shims/SparkShims.scala b/sql-plugin/src/main/spark411/scala/com/nvidia/spark/rapids/shims/SparkShims.scala index 434203189da..f3f6c6bb675 100644 --- a/sql-plugin/src/main/spark411/scala/com/nvidia/spark/rapids/shims/SparkShims.scala +++ b/sql-plugin/src/main/spark411/scala/com/nvidia/spark/rapids/shims/SparkShims.scala @@ -25,7 +25,6 @@ import com.nvidia.spark.rapids.{HashExprChecks, Murmur3HashExprMeta, XxHash64Exp import org.apache.spark.sql.catalyst.expressions.{CollationAwareMurmur3Hash, CollationAwareXxHash64, Expression} import org.apache.spark.sql.execution.{OneRowRelationExec, SparkPlan} -import org.apache.spark.sql.rapids.{GpuMurmur3Hash, GpuXxHash64} /** * SparkShimImpl for Spark 4.1.1 From c3bd85e093d612a6a59e3ac5b336cec3a0b80391 Mon Sep 17 00:00:00 2001 From: Chong Gao Date: Wed, 28 Jan 2026 14:47:26 +0800 Subject: [PATCH 49/59] Fix Signed-off-by: Chong Gao --- .../v2/rapids/GpuAtomicReplaceTableAsSelectExec.scala | 1 - 1 file changed, 1 deletion(-) diff --git a/sql-plugin/src/main/spark400/scala/org/apache/spark/sql/execution/datasources/v2/rapids/GpuAtomicReplaceTableAsSelectExec.scala b/sql-plugin/src/main/spark400/scala/org/apache/spark/sql/execution/datasources/v2/rapids/GpuAtomicReplaceTableAsSelectExec.scala index c878d435e16..1723c5f34f2 100644 --- a/sql-plugin/src/main/spark400/scala/org/apache/spark/sql/execution/datasources/v2/rapids/GpuAtomicReplaceTableAsSelectExec.scala +++ b/sql-plugin/src/main/spark400/scala/org/apache/spark/sql/execution/datasources/v2/rapids/GpuAtomicReplaceTableAsSelectExec.scala @@ -17,7 +17,6 @@ /*** spark-rapids-shim-json-lines {"spark": "400"} {"spark": "401"} -{"spark": "411"} spark-rapids-shim-json-lines ***/ package org.apache.spark.sql.execution.datasources.v2.rapids From d5e36ac33ddad3677968d9fa29a053be0dc552ba Mon Sep 17 00:00:00 2001 From: Chong Gao Date: Wed, 28 Jan 2026 15:09:23 +0800 Subject: [PATCH 50/59] Copyright Signed-off-by: Chong Gao --- build/buildall | 2 +- .../rapids/delta/common/GpuDeltaParquetFileFormatBase.scala | 2 +- .../spark/sql/delta/rapids/GpuOptimizeWriteExchangeExec.scala | 2 +- .../scala/org/apache/spark/sql/delta/hooks/GpuAutoCompact.scala | 2 +- .../org/apache/spark/sql/delta/rapids/GpuWriteIntoDelta.scala | 2 +- .../spark/sql/delta/rapids/delta40x/GpuMergeIntoCommand.scala | 2 +- .../sql/delta/rapids/delta40x/GpuOptimisticTransaction.scala | 2 +- dist/build/package-parallel-worlds.py | 2 +- dist/maven-antrun/build-parallel-worlds.xml | 2 +- dist/scripts/binary-dedupe.sh | 2 +- jdk-profiles/pom.xml | 2 +- scala2.13/jdk-profiles/pom.xml | 2 +- scala2.13/sql-plugin-api/pom.xml | 2 +- sql-plugin-api/pom.xml | 2 +- .../main/scala/com/nvidia/spark/rapids/AvroDataFileReader.scala | 2 +- .../com/nvidia/spark/rapids/RapidsShuffleHeartbeatManager.scala | 2 +- .../com/nvidia/spark/rapids/python/PythonWorkerSemaphore.scala | 2 +- .../sql/rapids/ShimmedExecutionPlanCaptureCallbackImpl.scala | 2 +- .../scala/com/nvidia/spark/rapids/shims/SparkShims.scala | 2 +- 19 files changed, 19 insertions(+), 19 deletions(-) diff --git a/build/buildall b/build/buildall index 18162fc835c..a7e77138922 100755 --- a/build/buildall +++ b/build/buildall @@ -1,6 +1,6 @@ #!/bin/bash # -# Copyright (c) 2021-2025, NVIDIA CORPORATION. All rights reserved. +# Copyright (c) 2021-2026, NVIDIA CORPORATION. All rights reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/delta-lake/common/src/main/delta-33x-40x/scala/com/nvidia/spark/rapids/delta/common/GpuDeltaParquetFileFormatBase.scala b/delta-lake/common/src/main/delta-33x-40x/scala/com/nvidia/spark/rapids/delta/common/GpuDeltaParquetFileFormatBase.scala index 0696868da30..824788c03e2 100644 --- a/delta-lake/common/src/main/delta-33x-40x/scala/com/nvidia/spark/rapids/delta/common/GpuDeltaParquetFileFormatBase.scala +++ b/delta-lake/common/src/main/delta-33x-40x/scala/com/nvidia/spark/rapids/delta/common/GpuDeltaParquetFileFormatBase.scala @@ -1,5 +1,5 @@ /* - * Copyright (c) 2025, NVIDIA CORPORATION. + * Copyright (c) 2025-2026, NVIDIA CORPORATION. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. diff --git a/delta-lake/common/src/main/delta-33x/scala/org/apache/spark/sql/delta/rapids/GpuOptimizeWriteExchangeExec.scala b/delta-lake/common/src/main/delta-33x/scala/org/apache/spark/sql/delta/rapids/GpuOptimizeWriteExchangeExec.scala index 15d5e77b8fd..5640376de72 100644 --- a/delta-lake/common/src/main/delta-33x/scala/org/apache/spark/sql/delta/rapids/GpuOptimizeWriteExchangeExec.scala +++ b/delta-lake/common/src/main/delta-33x/scala/org/apache/spark/sql/delta/rapids/GpuOptimizeWriteExchangeExec.scala @@ -1,5 +1,5 @@ /* - * Copyright (c) 2025, NVIDIA CORPORATION. + * Copyright (c) 2025-2026, NVIDIA CORPORATION. * * This file was derived from OptimizeWriteExchange.scala * in the Delta Lake project at https://github.com/delta-io/delta diff --git a/delta-lake/common/src/main/delta-40x/scala/org/apache/spark/sql/delta/hooks/GpuAutoCompact.scala b/delta-lake/common/src/main/delta-40x/scala/org/apache/spark/sql/delta/hooks/GpuAutoCompact.scala index 767c839d1a2..eed441e4006 100644 --- a/delta-lake/common/src/main/delta-40x/scala/org/apache/spark/sql/delta/hooks/GpuAutoCompact.scala +++ b/delta-lake/common/src/main/delta-40x/scala/org/apache/spark/sql/delta/hooks/GpuAutoCompact.scala @@ -1,5 +1,5 @@ /* - * Copyright (c) 2025, NVIDIA CORPORATION. + * Copyright (c) 2025-2026, NVIDIA CORPORATION. * * This file was derived from OptimisticTransaction.scala and TransactionalWrite.scala * in the Delta Lake project at https://github.com/delta-io/delta. diff --git a/delta-lake/common/src/main/delta-40x/scala/org/apache/spark/sql/delta/rapids/GpuWriteIntoDelta.scala b/delta-lake/common/src/main/delta-40x/scala/org/apache/spark/sql/delta/rapids/GpuWriteIntoDelta.scala index dab0803cd90..d3ee99d43b7 100644 --- a/delta-lake/common/src/main/delta-40x/scala/org/apache/spark/sql/delta/rapids/GpuWriteIntoDelta.scala +++ b/delta-lake/common/src/main/delta-40x/scala/org/apache/spark/sql/delta/rapids/GpuWriteIntoDelta.scala @@ -1,5 +1,5 @@ /* - * Copyright (c) 2022-2025, NVIDIA CORPORATION. + * Copyright (c) 2022-2026, NVIDIA CORPORATION. * * This file was derived from WriteIntoDelta.scala * in the Delta Lake project at https://github.com/delta-io/delta. diff --git a/delta-lake/delta-40x/src/main/scala/org/apache/spark/sql/delta/rapids/delta40x/GpuMergeIntoCommand.scala b/delta-lake/delta-40x/src/main/scala/org/apache/spark/sql/delta/rapids/delta40x/GpuMergeIntoCommand.scala index 7e929afd391..717bd641b47 100644 --- a/delta-lake/delta-40x/src/main/scala/org/apache/spark/sql/delta/rapids/delta40x/GpuMergeIntoCommand.scala +++ b/delta-lake/delta-40x/src/main/scala/org/apache/spark/sql/delta/rapids/delta40x/GpuMergeIntoCommand.scala @@ -1,5 +1,5 @@ /* - * Copyright (c) 2025, NVIDIA CORPORATION. + * Copyright (c) 2025-2026, NVIDIA CORPORATION. * * This file was derived from MergeIntoCommand.scala * in the Delta Lake project at https://github.com/delta-io/delta. diff --git a/delta-lake/delta-40x/src/main/scala/org/apache/spark/sql/delta/rapids/delta40x/GpuOptimisticTransaction.scala b/delta-lake/delta-40x/src/main/scala/org/apache/spark/sql/delta/rapids/delta40x/GpuOptimisticTransaction.scala index 194ca8a70f4..c3ad36504b1 100644 --- a/delta-lake/delta-40x/src/main/scala/org/apache/spark/sql/delta/rapids/delta40x/GpuOptimisticTransaction.scala +++ b/delta-lake/delta-40x/src/main/scala/org/apache/spark/sql/delta/rapids/delta40x/GpuOptimisticTransaction.scala @@ -1,5 +1,5 @@ /* - * Copyright (c) 2025, NVIDIA CORPORATION. + * Copyright (c) 2025-2026, NVIDIA CORPORATION. * * This file was derived from OptimisticTransaction.scala and TransactionalWrite.scala * in the Delta Lake project at https://github.com/delta-io/delta. diff --git a/dist/build/package-parallel-worlds.py b/dist/build/package-parallel-worlds.py index 109f3b6bcca..652b34410cf 100644 --- a/dist/build/package-parallel-worlds.py +++ b/dist/build/package-parallel-worlds.py @@ -1,4 +1,4 @@ -# Copyright (c) 2023-2024, NVIDIA CORPORATION. +# Copyright (c) 2023-2026, NVIDIA CORPORATION. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/dist/maven-antrun/build-parallel-worlds.xml b/dist/maven-antrun/build-parallel-worlds.xml index 8d85ec38935..9f422bc95fa 100644 --- a/dist/maven-antrun/build-parallel-worlds.xml +++ b/dist/maven-antrun/build-parallel-worlds.xml @@ -1,6 +1,6 @@ - 4.9.8 + 4.9.2 3.1.1 3.3.0 2.0.2 diff --git a/scala2.13/pom.xml b/scala2.13/pom.xml index 011c99781be..d53fea2325b 100644 --- a/scala2.13/pom.xml +++ b/scala2.13/pom.xml @@ -991,7 +991,7 @@ 4.1.1 3.12.4 - 4.9.8 + 4.9.2 3.1.1 3.3.0 2.0.2 From 24f37c9e66b5d478b684fc36bbb510246652bd3b Mon Sep 17 00:00:00 2001 From: Chong Gao Date: Thu, 29 Jan 2026 17:26:18 +0800 Subject: [PATCH 54/59] 411 shim: get max broadcase table size from conf Signed-off-by: Chong Gao --- .../execution/GpuBroadcastExchangeExec.scala | 19 ++++++++++--------- .../rapids/shims/BroadcastExchangeShims.scala | 11 ++++++++++- .../rapids/shims/BroadcastExchangeShims.scala | 15 ++++++++++++--- .../spark/rapids/SerializationSuite.scala | 7 ++++--- 4 files changed, 36 insertions(+), 16 deletions(-) diff --git a/sql-plugin/src/main/scala/org/apache/spark/sql/rapids/execution/GpuBroadcastExchangeExec.scala b/sql-plugin/src/main/scala/org/apache/spark/sql/rapids/execution/GpuBroadcastExchangeExec.scala index 67dcfd43750..8420a482cdf 100644 --- a/sql-plugin/src/main/scala/org/apache/spark/sql/rapids/execution/GpuBroadcastExchangeExec.scala +++ b/sql-plugin/src/main/scala/org/apache/spark/sql/rapids/execution/GpuBroadcastExchangeExec.scala @@ -33,8 +33,7 @@ import com.nvidia.spark.rapids.GpuMetric._ import com.nvidia.spark.rapids.RapidsPluginImplicits._ import com.nvidia.spark.rapids.lore.{GpuLoreDumpRDD, SimpleRDD} import com.nvidia.spark.rapids.lore.GpuLore.LORE_DUMP_RDD_TAG -import com.nvidia.spark.rapids.shims.{ShimBroadcastExchangeLike, ShimUnaryExecNode, SparkShimImpl} -import com.nvidia.spark.rapids.shims.BroadcastExchangeShims.MAX_BROADCAST_TABLE_BYTES +import com.nvidia.spark.rapids.shims.{BroadcastExchangeShims, ShimBroadcastExchangeLike, ShimUnaryExecNode, SparkShimImpl} import org.apache.spark.SparkException import org.apache.spark.broadcast.Broadcast @@ -412,7 +411,7 @@ abstract class GpuBroadcastExchangeExecBase( } emptyRelation.getOrElse { GpuBroadcastExchangeExecBase.makeBroadcastBatch( - collected, output, numOutputBatches, numOutputRows, dataSize) + collected, output, numOutputBatches, numOutputRows, dataSize, conf) } } } @@ -549,12 +548,13 @@ object GpuBroadcastExchangeExecBase { } } - protected def checkSizeLimit(sizeInBytes: Long) = { - // Spark restricts the size of broadcast relations to be less than 8GB - if (sizeInBytes >= MAX_BROADCAST_TABLE_BYTES) { + protected def checkSizeLimit(sizeInBytes: Long, conf: SQLConf) = { + // Spark restricts the size of broadcast relations + val maxBytes = BroadcastExchangeShims.getMaxBroadcastTableBytes(conf) + if (sizeInBytes >= maxBytes) { throw new SparkException( s"Cannot broadcast the table that is larger than" + - s"${MAX_BROADCAST_TABLE_BYTES >> 30}GB: ${sizeInBytes >> 30} GB") + s"${maxBytes >> 30}GB: ${sizeInBytes >> 30} GB") } } @@ -569,7 +569,8 @@ object GpuBroadcastExchangeExecBase { output: Seq[Attribute], numOutputBatches: GpuMetric, numOutputRows: GpuMetric, - dataSize: GpuMetric): SerializeConcatHostBuffersDeserializeBatch = { + dataSize: GpuMetric, + conf: SQLConf): SerializeConcatHostBuffersDeserializeBatch = { val rowsOnly = buffers.isEmpty || buffers.head.header.getNumColumns == 0 var numRows = 0 var dataLen: Long = 0 @@ -589,7 +590,7 @@ object GpuBroadcastExchangeExecBase { } closeOnExcept(hostConcatResult) { _ => checkRowLimit(hostConcatResult.getTableHeader.getNumRows) - checkSizeLimit(hostConcatResult.getTableHeader.getDataLen) + checkSizeLimit(hostConcatResult.getTableHeader.getDataLen, conf) } // this result will be GC'ed later, so we mark it as such hostConcatResult.getHostBuffer.noWarnLeakExpected() diff --git a/sql-plugin/src/main/spark320/scala/com/nvidia/spark/rapids/shims/BroadcastExchangeShims.scala b/sql-plugin/src/main/spark320/scala/com/nvidia/spark/rapids/shims/BroadcastExchangeShims.scala index 97f4b370a1d..20fbd95d500 100644 --- a/sql-plugin/src/main/spark320/scala/com/nvidia/spark/rapids/shims/BroadcastExchangeShims.scala +++ b/sql-plugin/src/main/spark320/scala/com/nvidia/spark/rapids/shims/BroadcastExchangeShims.scala @@ -1,5 +1,5 @@ /* - * Copyright (c) 2025-2026, NVIDIA CORPORATION. + * Copyright (c) 2026, NVIDIA CORPORATION. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -51,10 +51,19 @@ spark-rapids-shim-json-lines ***/ package com.nvidia.spark.rapids.shims import org.apache.spark.sql.execution.exchange.BroadcastExchangeExec +import org.apache.spark.sql.internal.SQLConf /** * Shim for MAX_BROADCAST_TABLE_BYTES which was removed in Spark 4.1.0 */ object BroadcastExchangeShims { val MAX_BROADCAST_TABLE_BYTES: Long = BroadcastExchangeExec.MAX_BROADCAST_TABLE_BYTES + + /** + * Get the maximum broadcast table size in bytes. + * In Spark <= 4.0.x, this returns the hardcoded constant. + */ + def getMaxBroadcastTableBytes(conf: SQLConf): Long = { + MAX_BROADCAST_TABLE_BYTES + } } diff --git a/sql-plugin/src/main/spark411/scala/com/nvidia/spark/rapids/shims/BroadcastExchangeShims.scala b/sql-plugin/src/main/spark411/scala/com/nvidia/spark/rapids/shims/BroadcastExchangeShims.scala index 121c2f5c993..e7415fb2b63 100644 --- a/sql-plugin/src/main/spark411/scala/com/nvidia/spark/rapids/shims/BroadcastExchangeShims.scala +++ b/sql-plugin/src/main/spark411/scala/com/nvidia/spark/rapids/shims/BroadcastExchangeShims.scala @@ -1,5 +1,5 @@ /* - * Copyright (c) 2025-2026, NVIDIA CORPORATION. + * Copyright (c) 2026, NVIDIA CORPORATION. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -19,12 +19,21 @@ spark-rapids-shim-json-lines ***/ package com.nvidia.spark.rapids.shims +import org.apache.spark.sql.internal.SQLConf + /** * Shim for MAX_BROADCAST_TABLE_BYTES which was removed in Spark 4.1.0. * The constant was 8GB (8L << 30) and is now configurable via conf.maxBroadcastTableSizeInBytes. - * We keep the old hardcoded value for backwards compatibility. */ object BroadcastExchangeShims { - // 8GB - the original hardcoded value from Spark + // 8GB - the original hardcoded value from Spark (kept for backwards compatibility) val MAX_BROADCAST_TABLE_BYTES: Long = 8L << 30 + + /** + * Get the maximum broadcast table size in bytes. + * In Spark 4.1.0+, this reads from the configurable value. + */ + def getMaxBroadcastTableBytes(conf: SQLConf): Long = { + conf.maxBroadcastTableSizeInBytes + } } diff --git a/tests/src/test/scala/com/nvidia/spark/rapids/SerializationSuite.scala b/tests/src/test/scala/com/nvidia/spark/rapids/SerializationSuite.scala index c6c251aeabb..c9b6d5d1814 100644 --- a/tests/src/test/scala/com/nvidia/spark/rapids/SerializationSuite.scala +++ b/tests/src/test/scala/com/nvidia/spark/rapids/SerializationSuite.scala @@ -1,5 +1,5 @@ /* - * Copyright (c) 2021-2024, NVIDIA CORPORATION. + * Copyright (c) 2021-2026, NVIDIA CORPORATION. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -28,6 +28,7 @@ import org.scalatest.funsuite.AnyFunSuite import org.apache.spark.SparkConf import org.apache.spark.sql.catalyst.expressions.AttributeReference +import org.apache.spark.sql.internal.SQLConf import org.apache.spark.sql.rapids.execution.{GpuBroadcastExchangeExecBase, SerializeBatchDeserializeHostBuffer, SerializeConcatHostBuffersDeserializeBatch} import org.apache.spark.sql.types.{DoubleType, FloatType, IntegerType, StringType} import org.apache.spark.sql.vectorized.{ColumnarBatch, ColumnVector} @@ -101,7 +102,7 @@ class SerializationSuite extends AnyFunSuite val attrs = GpuColumnVector.extractTypes(gpuBatch).map(t => AttributeReference("", t)()) if (gpuBatch.numRows() == 0 && gpuBatch.numCols == 0) { GpuBroadcastExchangeExecBase.makeBroadcastBatch( - Array.empty, Seq.empty, NoopMetric, NoopMetric, NoopMetric) + Array.empty, Seq.empty, NoopMetric, NoopMetric, NoopMetric, SQLConf.get) } else if (gpuBatch.numCols() == 0) { new SerializeConcatHostBuffersDeserializeBatch( null, @@ -112,7 +113,7 @@ class SerializationSuite extends AnyFunSuite val buffer = createDeserializedHostBuffer(gpuBatch) // makeBroadcastBatch consumes `buffer` GpuBroadcastExchangeExecBase.makeBroadcastBatch( - Array(buffer), attrs, NoopMetric, NoopMetric, NoopMetric) + Array(buffer), attrs, NoopMetric, NoopMetric, NoopMetric, SQLConf.get) } } From abe75a6a56fd801382ac1355a6f559d2f61d8420 Mon Sep 17 00:00:00 2001 From: Chong Gao Date: Thu, 29 Jan 2026 17:46:33 +0800 Subject: [PATCH 55/59] Fix comments Signed-off-by: Chong Gao --- .../rapids/execution/python/GpuWindowInPandasExecBase.scala | 5 ++--- .../spark/rapids/shims/AggregateInPandasExecShims.scala | 2 +- 2 files changed, 3 insertions(+), 4 deletions(-) diff --git a/sql-plugin/src/main/scala/org/apache/spark/sql/rapids/execution/python/GpuWindowInPandasExecBase.scala b/sql-plugin/src/main/scala/org/apache/spark/sql/rapids/execution/python/GpuWindowInPandasExecBase.scala index 165a092b489..7ff5aa840a2 100644 --- a/sql-plugin/src/main/scala/org/apache/spark/sql/rapids/execution/python/GpuWindowInPandasExecBase.scala +++ b/sql-plugin/src/main/scala/org/apache/spark/sql/rapids/execution/python/GpuWindowInPandasExecBase.scala @@ -35,7 +35,7 @@ import org.apache.spark.rdd.RDD import org.apache.spark.sql.catalyst.expressions._ import org.apache.spark.sql.catalyst.plans.physical.{AllTuples, ClusteredDistribution, Distribution, Partitioning} import org.apache.spark.sql.rapids.aggregate.GpuAggregateExpression -import org.apache.spark.sql.rapids.execution.python.shims.{GpuWindowPythonRunnerFactory, PythonArgumentUtils} +import org.apache.spark.sql.rapids.execution.python.shims.{GpuWindowPythonRunnerFactory, PythonArgumentUtils, WindowBoundTypeConfShims} import org.apache.spark.sql.rapids.execution.python.shims.WindowInPandasExecTypeShim.WindowInPandasExecType import org.apache.spark.sql.rapids.shims.{ArrowUtilsShim, DataTypeUtilsShim} import org.apache.spark.sql.types.{IntegerType, StructField, StructType} @@ -233,8 +233,7 @@ trait GpuWindowInPandasExecBase extends ShimUnaryExecNode with GpuPythonExecBase protected object UnboundedWindow extends WindowBoundType("unbounded") protected object BoundedWindow extends WindowBoundType("bounded") - protected val windowBoundTypeConf = - org.apache.spark.sql.rapids.execution.python.shims.WindowBoundTypeConfShims.windowBoundTypeConf + protected val windowBoundTypeConf = WindowBoundTypeConfShims.windowBoundTypeConf protected def collectFunctions( udf: GpuPythonFunction): ((ChainedPythonFunctions, Long), Seq[Expression]) = { diff --git a/sql-plugin/src/main/spark320/scala/com/nvidia/spark/rapids/shims/AggregateInPandasExecShims.scala b/sql-plugin/src/main/spark320/scala/com/nvidia/spark/rapids/shims/AggregateInPandasExecShims.scala index 19daa5a3b3b..658f4c935b6 100644 --- a/sql-plugin/src/main/spark320/scala/com/nvidia/spark/rapids/shims/AggregateInPandasExecShims.scala +++ b/sql-plugin/src/main/spark320/scala/com/nvidia/spark/rapids/shims/AggregateInPandasExecShims.scala @@ -59,7 +59,7 @@ import org.apache.spark.sql.execution.python.AggregateInPandasExec object AggregateInPandasExecShims { val execRule: Option[ExecRule[_ <: SparkPlan]] = Some( GpuOverrides.exec[AggregateInPandasExec]( - "The backend for an ideAggregateInPandasExec Aggregation Pandas UDF." + + "The backend for an Aggregation Pandas UDF." + " This accelerates the data transfer between the Java process and the Python process." + " It also supports scheduling GPU resources for the Python process" + " when enabled.", From e2eb4819a00d8201b747661a998c9364758fe66a Mon Sep 17 00:00:00 2001 From: Gera Shegalov Date: Thu, 29 Jan 2026 02:16:46 -0800 Subject: [PATCH 56/59] Refactor build scripts and update dependencies - Modified the buildall script to ensure the MVN variable is correctly exported with options. - Moved user-facing ParquetCachedBatchSerializer class to sql-plugin-api. - Updated integration test requirements to include pytz. --- build/buildall | 11 +++++------ dist/unshimmed-common-from-single-shim.txt | 2 -- integration_tests/requirements.txt | 1 + scala2.13/pom.xml | 2 -- .../nvidia/spark/ParquetCachedBatchSerializer.scala | 4 ++-- .../scala/com/nvidia/spark/rapids/ShimLoader.scala | 7 +++++++ .../com/nvidia/spark/rapids/ShimLoaderTemp.scala | 6 ------ 7 files changed, 15 insertions(+), 18 deletions(-) rename {sql-plugin => sql-plugin-api}/src/main/scala/com/nvidia/spark/ParquetCachedBatchSerializer.scala (98%) diff --git a/build/buildall b/build/buildall index 9fa7df8e40f..087e6aabbc3 100755 --- a/build/buildall +++ b/build/buildall @@ -38,7 +38,7 @@ function print_usage() { echo " -gb, --generate-bloop" echo " generate projects for Bloop clients: IDE (Scala Metals, IntelliJ) or Bloop CLI" echo " -p=DIST_PROFILE, --profile=DIST_PROFILE" - echo " use this profile for the dist module, default: noSnapshots, also supported: snapshots, minimumFeatureVersionMix," + echo " use this profile for the dist module, default: noSnapshots, also supported: snapshots," echo " snapshotsWithDatabricks, noSnapshotsWithDatabricks" echo " NOTE: the Databricks-related spark3XYdb shims are not built locally, the jars are fetched prebuilt from a" echo " . remote Maven repo. You can also supply a comma-separated list of build versions. E.g., --profile=330,331 will" @@ -154,7 +154,7 @@ case "$1" in ;; -o=*|--option=*) - MVN_OPT="${1#*=}" + export MVN_OPT="${1#*=}" ;; *) @@ -180,11 +180,12 @@ export MVN="$MVN -Dmaven.wagon.http.retryHandler.count=3 ${MVN_OPT}" if [[ "$SCALA213" == "1" ]]; then POM_FILE="scala2.13/pom.xml" - MVN="$MVN -f scala2.13/" + export MVN="$MVN -f scala2.13/" $(dirname $0)/make-scala-version-build-files.sh 2.13 else POM_FILE="pom.xml" fi + DIST_PROFILE=${DIST_PROFILE:-"noSnapshots"} @@ -201,9 +202,7 @@ case $DIST_PROFILE in SPARK_SHIM_VERSIONS=($(versionsFromReleaseProfiles "no_snapshots" $POM_FILE)) ;; - minimumFeatureVersionMix) - SPARK_SHIM_VERSIONS=($(versionsFromDistProfile "minimumFeatureVersionMix")) - ;; + [34]*) <<< $DIST_PROFILE IFS="," read -ra SPARK_SHIM_VERSIONS diff --git a/dist/unshimmed-common-from-single-shim.txt b/dist/unshimmed-common-from-single-shim.txt index 3871188e1fc..81a5c61003c 100644 --- a/dist/unshimmed-common-from-single-shim.txt +++ b/dist/unshimmed-common-from-single-shim.txt @@ -1,8 +1,6 @@ META-INF/DEPENDENCIES META-INF/LICENSE META-INF/NOTICE -com/nvidia/spark/GpuCachedBatchSerializer* -com/nvidia/spark/ParquetCachedBatchSerializer* com/nvidia/spark/rapids/ExplainPlan.class com/nvidia/spark/rapids/ExplainPlan$.class com/nvidia/spark/rapids/ExplainPlanBase.class diff --git a/integration_tests/requirements.txt b/integration_tests/requirements.txt index 616e9318949..ce0fc4318ec 100644 --- a/integration_tests/requirements.txt +++ b/integration_tests/requirements.txt @@ -17,6 +17,7 @@ pandas pyarrow == 17.0.0 ; python_version == '3.8' pyarrow == 19.0.1 ; python_version >= '3.9' pytest-xdist >= 2.0.0 +pytz findspark fsspec == 2025.3.0 fastparquet == 2024.5.0 ; python_version >= '3.9' diff --git a/scala2.13/pom.xml b/scala2.13/pom.xml index d53fea2325b..0c208bd2230 100644 --- a/scala2.13/pom.xml +++ b/scala2.13/pom.xml @@ -784,8 +784,6 @@ 1.13.1 rapids-4-spark-delta-stub 2.0.7 - [17,) - Support for Spark ${spark.version} is only available with Java 17+
delta-lake/delta-stub diff --git a/sql-plugin/src/main/scala/com/nvidia/spark/ParquetCachedBatchSerializer.scala b/sql-plugin-api/src/main/scala/com/nvidia/spark/ParquetCachedBatchSerializer.scala similarity index 98% rename from sql-plugin/src/main/scala/com/nvidia/spark/ParquetCachedBatchSerializer.scala rename to sql-plugin-api/src/main/scala/com/nvidia/spark/ParquetCachedBatchSerializer.scala index b27c0da634a..697a299d08b 100644 --- a/sql-plugin/src/main/scala/com/nvidia/spark/ParquetCachedBatchSerializer.scala +++ b/sql-plugin-api/src/main/scala/com/nvidia/spark/ParquetCachedBatchSerializer.scala @@ -16,7 +16,7 @@ package com.nvidia.spark -import com.nvidia.spark.rapids.ShimLoaderTemp +import com.nvidia.spark.rapids.ShimLoader import org.apache.spark.rdd.RDD import org.apache.spark.sql.catalyst.InternalRow @@ -41,7 +41,7 @@ trait GpuCachedBatchSerializer extends CachedBatchSerializer { */ class ParquetCachedBatchSerializer extends GpuCachedBatchSerializer { - private lazy val realImpl = ShimLoaderTemp.newParquetCachedBatchSerializer() + private lazy val realImpl = ShimLoader.newParquetCachedBatchSerializer() /** * Can `convertColumnarBatchToCachedBatch()` be called instead of diff --git a/sql-plugin-api/src/main/scala/com/nvidia/spark/rapids/ShimLoader.scala b/sql-plugin-api/src/main/scala/com/nvidia/spark/rapids/ShimLoader.scala index 22e2c1c4d69..d58cacb1d21 100644 --- a/sql-plugin-api/src/main/scala/com/nvidia/spark/rapids/ShimLoader.scala +++ b/sql-plugin-api/src/main/scala/com/nvidia/spark/rapids/ShimLoader.scala @@ -21,6 +21,7 @@ import java.net.URL import scala.collection.JavaConverters.enumerationAsScalaIteratorConverter import scala.util.Try +import com.nvidia.spark.GpuCachedBatchSerializer import org.apache.commons.lang3.reflect.MethodUtils import org.apache.spark.{SPARK_BRANCH, SPARK_BUILD_DATE, SPARK_BUILD_USER, SPARK_REPO_URL, SPARK_REVISION, SPARK_VERSION, SparkConf, SparkEnv} @@ -382,4 +383,10 @@ object ShimLoader { def loadGpuColumnVector(): Class[_] = { ShimReflectionUtils.loadClass("com.nvidia.spark.rapids.GpuColumnVector") } + + def newParquetCachedBatchSerializer(): GpuCachedBatchSerializer = { + ShimReflectionUtils.newInstanceOf( + "com.nvidia.spark.rapids.parquet.ParquetCachedBatchSerializer") + } + } diff --git a/sql-plugin/src/main/scala/com/nvidia/spark/rapids/ShimLoaderTemp.scala b/sql-plugin/src/main/scala/com/nvidia/spark/rapids/ShimLoaderTemp.scala index 5faef61f8fb..456d623b96b 100644 --- a/sql-plugin/src/main/scala/com/nvidia/spark/rapids/ShimLoaderTemp.scala +++ b/sql-plugin/src/main/scala/com/nvidia/spark/rapids/ShimLoaderTemp.scala @@ -16,7 +16,6 @@ package com.nvidia.spark.rapids -import com.nvidia.spark.GpuCachedBatchSerializer import com.nvidia.spark.rapids.delta.DeltaProbe import com.nvidia.spark.rapids.iceberg.IcebergProvider @@ -32,11 +31,6 @@ object ShimLoaderTemp { ShimReflectionUtils.newInstanceOf[Optimizer](className) } - def newParquetCachedBatchSerializer(): GpuCachedBatchSerializer = { - ShimReflectionUtils.newInstanceOf( - "com.nvidia.spark.rapids.parquet.ParquetCachedBatchSerializer") - } - def newExplainPlan(): ExplainPlanBase = { ShimReflectionUtils.newInstanceOf[ExplainPlanBase]("com.nvidia.spark.rapids.ExplainPlanImpl") } From 991e73e9f22ca9b193f00f9da2ccb249309abcda Mon Sep 17 00:00:00 2001 From: Gera Shegalov Date: Thu, 29 Jan 2026 03:13:36 -0800 Subject: [PATCH 57/59] Update copyright years in multiple files to 2026 and add ParquetVariantShims setup in ParquetCachedBatchSerializer --- .../com/nvidia/spark/ParquetCachedBatchSerializer.scala | 2 +- .../src/main/scala/com/nvidia/spark/rapids/ShimLoader.scala | 2 +- .../main/scala/com/nvidia/spark/rapids/ShimLoaderTemp.scala | 2 +- .../spark/rapids/parquet/ParquetCachedBatchSerializer.scala | 5 ++++- 4 files changed, 7 insertions(+), 4 deletions(-) diff --git a/sql-plugin-api/src/main/scala/com/nvidia/spark/ParquetCachedBatchSerializer.scala b/sql-plugin-api/src/main/scala/com/nvidia/spark/ParquetCachedBatchSerializer.scala index 697a299d08b..9188e10acf7 100644 --- a/sql-plugin-api/src/main/scala/com/nvidia/spark/ParquetCachedBatchSerializer.scala +++ b/sql-plugin-api/src/main/scala/com/nvidia/spark/ParquetCachedBatchSerializer.scala @@ -1,5 +1,5 @@ /* - * Copyright (c) 2021-2023, NVIDIA CORPORATION. + * Copyright (c) 2021-2026, NVIDIA CORPORATION. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. diff --git a/sql-plugin-api/src/main/scala/com/nvidia/spark/rapids/ShimLoader.scala b/sql-plugin-api/src/main/scala/com/nvidia/spark/rapids/ShimLoader.scala index d58cacb1d21..7917cee9d65 100644 --- a/sql-plugin-api/src/main/scala/com/nvidia/spark/rapids/ShimLoader.scala +++ b/sql-plugin-api/src/main/scala/com/nvidia/spark/rapids/ShimLoader.scala @@ -1,5 +1,5 @@ /* - * Copyright (c) 2020-2025, NVIDIA CORPORATION. + * Copyright (c) 2020-2026, NVIDIA CORPORATION. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. diff --git a/sql-plugin/src/main/scala/com/nvidia/spark/rapids/ShimLoaderTemp.scala b/sql-plugin/src/main/scala/com/nvidia/spark/rapids/ShimLoaderTemp.scala index 456d623b96b..dc35d6ae9e7 100644 --- a/sql-plugin/src/main/scala/com/nvidia/spark/rapids/ShimLoaderTemp.scala +++ b/sql-plugin/src/main/scala/com/nvidia/spark/rapids/ShimLoaderTemp.scala @@ -1,5 +1,5 @@ /* - * Copyright (c) 2023-2025, NVIDIA CORPORATION. + * Copyright (c) 2023-2026, NVIDIA CORPORATION. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. diff --git a/sql-plugin/src/main/scala/com/nvidia/spark/rapids/parquet/ParquetCachedBatchSerializer.scala b/sql-plugin/src/main/scala/com/nvidia/spark/rapids/parquet/ParquetCachedBatchSerializer.scala index 67fedcc2bbe..6f8ef5f3670 100644 --- a/sql-plugin/src/main/scala/com/nvidia/spark/rapids/parquet/ParquetCachedBatchSerializer.scala +++ b/sql-plugin/src/main/scala/com/nvidia/spark/rapids/parquet/ParquetCachedBatchSerializer.scala @@ -31,7 +31,7 @@ import com.nvidia.spark.rapids.Arm.withResource import com.nvidia.spark.rapids.GpuColumnVector.GpuColumnarBatchBuilder import com.nvidia.spark.rapids.RapidsPluginImplicits._ import com.nvidia.spark.rapids.ScalableTaskCompletion.onTaskCompletion -import com.nvidia.spark.rapids.shims.{LegacyBehaviorPolicyShim, SparkShimImpl} +import com.nvidia.spark.rapids.shims.{LegacyBehaviorPolicyShim, ParquetVariantShims, SparkShimImpl} import com.nvidia.spark.rapids.shims.parquet.{ParquetFieldIdShims, ParquetLegacyNanoAsLongShims, ParquetTimestampNTZShims} import org.apache.commons.io.output.ByteArrayOutputStream import org.apache.hadoop.conf.Configuration @@ -1304,6 +1304,9 @@ class ParquetCachedBatchSerializer extends GpuCachedBatchSerializer { // From 3.3.2, Spark schema converter needs this conf ParquetLegacyNanoAsLongShims.setupLegacyParquetNanosAsLongForPCBS(hadoopConf) + // From 4.1.1, Spark will check this variant config + ParquetVariantShims.setupParquetVariantConfig(hadoopConf, sqlConf) + hadoopConf } From d2319448236891593a56adef780f4dceaa037f76 Mon Sep 17 00:00:00 2001 From: Gera Shegalov Date: Thu, 29 Jan 2026 03:14:48 -0800 Subject: [PATCH 58/59] sign Signed-off-by: Gera Shegalov From cc1ae8147458ce76dafaca792ce79f320ea0d58a Mon Sep 17 00:00:00 2001 From: Chong Gao Date: Fri, 30 Jan 2026 13:20:16 +0800 Subject: [PATCH 59/59] Revert inadvertent changes for two doc files Signed-off-by: Chong Gao --- docs/additional-functionality/advanced_configs.md | 2 +- docs/supported_ops.md | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/docs/additional-functionality/advanced_configs.md b/docs/additional-functionality/advanced_configs.md index 9700d4af74c..f7878cf44a4 100644 --- a/docs/additional-functionality/advanced_configs.md +++ b/docs/additional-functionality/advanced_configs.md @@ -495,7 +495,7 @@ Name | Description | Default Value | Notes spark.rapids.sql.exec.CartesianProductExec|Implementation of join using brute force|true|None| spark.rapids.sql.exec.ShuffledHashJoinExec|Implementation of join using hashed shuffled data|true|None| spark.rapids.sql.exec.SortMergeJoinExec|Sort merge join, replacing with shuffled hash join|true|None| -spark.rapids.sql.exec.AggregateInPandasExec|The backend for an ideAggregateInPandasExec Aggregation Pandas UDF. This accelerates the data transfer between the Java process and the Python process. It also supports scheduling GPU resources for the Python process when enabled.|true|None| +spark.rapids.sql.exec.AggregateInPandasExec|The backend for an Aggregation Pandas UDF, this accelerates the data transfer between the Java process and the Python process. It also supports scheduling GPU resources for the Python process when enabled.|true|None| spark.rapids.sql.exec.ArrowEvalPythonExec|The backend of the Scalar Pandas UDFs. Accelerates the data transfer between the Java process and the Python process. It also supports scheduling GPU resources for the Python process when enabled|true|None| spark.rapids.sql.exec.FlatMapCoGroupsInPandasExec|The backend for CoGrouped Aggregation Pandas UDF. Accelerates the data transfer between the Java process and the Python process. It also supports scheduling GPU resources for the Python process when enabled.|false|This is disabled by default because Performance is not ideal with many small groups| spark.rapids.sql.exec.FlatMapGroupsInPandasExec|The backend for Flat Map Groups Pandas UDF, Accelerates the data transfer between the Java process and the Python process. It also supports scheduling GPU resources for the Python process when enabled.|true|None| diff --git a/docs/supported_ops.md b/docs/supported_ops.md index 69f9881854f..0241f5eae58 100644 --- a/docs/supported_ops.md +++ b/docs/supported_ops.md @@ -1302,7 +1302,7 @@ Accelerator supports are described below. AggregateInPandasExec -The backend for an ideAggregateInPandasExec Aggregation Pandas UDF. This accelerates the data transfer between the Java process and the Python process. It also supports scheduling GPU resources for the Python process when enabled. +The backend for an Aggregation Pandas UDF, this accelerates the data transfer between the Java process and the Python process. It also supports scheduling GPU resources for the Python process when enabled. None Input/Output S