Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions Cargo.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

6 changes: 2 additions & 4 deletions datafusion-cli/tests/cli_integration.rs
Original file line number Diff line number Diff line change
Expand Up @@ -414,14 +414,12 @@ fn test_backtrace_output(#[case] query: &str) {
let output = cmd.output().expect("Failed to execute command");
let stdout = String::from_utf8_lossy(&output.stdout);
let stderr = String::from_utf8_lossy(&output.stderr);
let combined_output = format!("{}{}", stdout, stderr);
let combined_output = format!("{stdout}{stderr}");

// Assert that the output includes literal 'backtrace'
assert!(
combined_output.to_lowercase().contains("backtrace"),
"Expected output to contain 'backtrace', but got stdout: '{}' stderr: '{}'",
stdout,
stderr
"Expected output to contain 'backtrace', but got stdout: '{stdout}' stderr: '{stderr}'"
);
}

Expand Down
1 change: 0 additions & 1 deletion datafusion/common/src/error.rs
Original file line number Diff line number Diff line change
Expand Up @@ -1274,7 +1274,6 @@ mod test {
// To pass the test the environment variable RUST_BACKTRACE should be set to 1 to enforce backtrace
#[cfg(feature = "backtrace")]
#[test]
#[expect(clippy::unnecessary_literal_unwrap)]
fn test_enabled_backtrace() {
match std::env::var("RUST_BACKTRACE") {
Ok(val) if val == "1" => {}
Expand Down
6 changes: 6 additions & 0 deletions datafusion/common/src/hash_utils.rs
Original file line number Diff line number Diff line change
Expand Up @@ -19,12 +19,15 @@

use arrow::array::types::{IntervalDayTime, IntervalMonthDayNano};
use arrow::array::*;
#[cfg(not(feature = "force_hash_collisions"))]
use arrow::compute::take;
use arrow::datatypes::*;
#[cfg(not(feature = "force_hash_collisions"))]
use arrow::{downcast_dictionary_array, downcast_primitive_array};
use foldhash::fast::FixedState;
#[cfg(not(feature = "force_hash_collisions"))]
use itertools::Itertools;
#[cfg(not(feature = "force_hash_collisions"))]
use std::collections::HashMap;
use std::hash::{BuildHasher, Hash, Hasher};

Expand Down Expand Up @@ -198,6 +201,7 @@ hash_float_value!((half::f16, u16), (f32, u32), (f64, u64));
/// Create a `SeedableRandomState` whose per-hasher seed incorporates `seed`.
/// This folds the previous hash into the hasher's initial state so only the
/// new value needs to pass through the hash function — same cost as `hash_one`.
#[cfg(not(feature = "force_hash_collisions"))]
#[inline]
fn seeded_state(seed: u64) -> foldhash::fast::SeedableRandomState {
foldhash::fast::SeedableRandomState::with_seed(
Expand Down Expand Up @@ -303,6 +307,7 @@ fn hash_array<T>(
/// HAS_NULLS: do we have to check null in the inner loop
/// HAS_BUFFERS: if true, array has external buffers; if false, all strings are inlined/ less then 12 bytes
/// REHASH: if true, combining with existing hash, otherwise initializing
#[cfg(not(feature = "force_hash_collisions"))]
#[inline(never)]
fn hash_string_view_array_inner<
T: ByteViewType,
Expand Down Expand Up @@ -429,6 +434,7 @@ fn hash_generic_byte_view_array<T: ByteViewType>(
/// - `HAS_NULL_KEYS`: Whether to check for null dictionary keys
/// - `HAS_NULL_VALUES`: Whether to check for null dictionary values
/// - `MULTI_COL`: Whether to combine with existing hash (true) or initialize (false)
#[cfg(not(feature = "force_hash_collisions"))]
#[inline(never)]
fn hash_dictionary_inner<
K: ArrowDictionaryKeyType,
Expand Down
2 changes: 2 additions & 0 deletions datafusion/common/src/pruning.rs
Original file line number Diff line number Diff line change
Expand Up @@ -121,6 +121,7 @@ pub trait PruningStatistics {
/// container, return `None` (the default).
///
/// Note: the returned array must contain [`Self::num_containers`] rows
#[allow(clippy::allow_attributes, clippy::mutable_key_type)] // ScalarValue has interior mutability but is intentionally used as hash key
fn contained(
&self,
column: &Column,
Expand Down Expand Up @@ -526,6 +527,7 @@ impl PruningStatistics for CompositePruningStatistics {

#[cfg(test)]
#[expect(deprecated)]
#[allow(clippy::allow_attributes, clippy::mutable_key_type)] // ScalarValue has interior mutability but is intentionally used as hash key
mod tests {
use crate::{
ColumnStatistics,
Expand Down
3 changes: 3 additions & 0 deletions datafusion/common/src/scalar/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -4601,6 +4601,7 @@ impl ScalarValue {
/// Estimates [size](Self::size) of [`HashSet`] in bytes.
///
/// Includes the size of the [`HashSet`] container itself.
#[allow(clippy::allow_attributes, clippy::mutable_key_type)] // ScalarValue has interior mutability but is intentionally used as hash key
pub fn size_of_hashset<S>(set: &HashSet<Self, S>) -> usize {
size_of_val(set)
+ (size_of::<ScalarValue>() * set.capacity())
Expand Down Expand Up @@ -7263,6 +7264,8 @@ mod tests {
size_of::<Vec<ScalarValue>>() + (9 * size_of::<ScalarValue>()) + sv_size,
);

#[allow(clippy::allow_attributes, clippy::mutable_key_type)]
// ScalarValue has interior mutability but is intentionally used as hash key
let mut s = HashSet::with_capacity(0);
// do NOT clone `sv` here because this may shrink the vector capacity
s.insert(v.pop().unwrap());
Expand Down
Loading
Loading