Skip to content

Commit 29e1d61

Browse files
committed
Move helpers to the top
1 parent 2374aac commit 29e1d61

File tree

1 file changed

+32
-32
lines changed

1 file changed

+32
-32
lines changed

arrow-json/benches/json_reader.rs

Lines changed: 32 additions & 32 deletions
Original file line numberDiff line numberDiff line change
@@ -63,6 +63,38 @@ fn decode_and_flush(decoder: &mut Decoder, data: &[u8]) {
6363
}
6464
}
6565

66+
fn bench_decode_schema(c: &mut Criterion, name: &str, data: &[u8], schema: Arc<Schema>) {
67+
let mut group = c.benchmark_group(name);
68+
group.throughput(Throughput::Bytes(data.len() as u64));
69+
group.sample_size(50);
70+
group.measurement_time(std::time::Duration::from_secs(5));
71+
group.warm_up_time(std::time::Duration::from_secs(2));
72+
group.sampling_mode(SamplingMode::Flat);
73+
group.bench_function(BenchmarkId::from_parameter(ROWS), |b| {
74+
b.iter(|| {
75+
let mut decoder = ReaderBuilder::new(schema.clone())
76+
.with_batch_size(BATCH_SIZE)
77+
.build_decoder()
78+
.unwrap();
79+
decode_and_flush(&mut decoder, data);
80+
})
81+
});
82+
group.finish();
83+
}
84+
85+
fn bench_serialize_values(c: &mut Criterion, name: &str, values: &[Value], schema: Arc<Schema>) {
86+
c.bench_function(name, |b| {
87+
b.iter(|| {
88+
let mut decoder = ReaderBuilder::new(schema.clone())
89+
.with_batch_size(BATCH_SIZE)
90+
.build_decoder()
91+
.unwrap();
92+
decoder.serialize(values).unwrap();
93+
while let Some(_batch) = decoder.flush().unwrap() {}
94+
})
95+
});
96+
}
97+
6698
fn build_schema(field_count: usize) -> Arc<Schema> {
6799
// Builds a schema with fields named f0..f{field_count-1}, all Int64 and non-nullable.
68100
let fields: Vec<Field> = (0..field_count)
@@ -177,38 +209,6 @@ fn bench_binary_hex(c: &mut Criterion) {
177209
bench_decode_binary(c, "decode_binary_view_hex_json", &binary_data, view_field);
178210
}
179211

180-
fn bench_decode_schema(c: &mut Criterion, name: &str, data: &[u8], schema: Arc<Schema>) {
181-
let mut group = c.benchmark_group(name);
182-
group.throughput(Throughput::Bytes(data.len() as u64));
183-
group.sample_size(50);
184-
group.measurement_time(std::time::Duration::from_secs(5));
185-
group.warm_up_time(std::time::Duration::from_secs(2));
186-
group.sampling_mode(SamplingMode::Flat);
187-
group.bench_function(BenchmarkId::from_parameter(ROWS), |b| {
188-
b.iter(|| {
189-
let mut decoder = ReaderBuilder::new(schema.clone())
190-
.with_batch_size(BATCH_SIZE)
191-
.build_decoder()
192-
.unwrap();
193-
decode_and_flush(&mut decoder, data);
194-
})
195-
});
196-
group.finish();
197-
}
198-
199-
fn bench_serialize_values(c: &mut Criterion, name: &str, values: &[Value], schema: Arc<Schema>) {
200-
c.bench_function(name, |b| {
201-
b.iter(|| {
202-
let mut decoder = ReaderBuilder::new(schema.clone())
203-
.with_batch_size(BATCH_SIZE)
204-
.build_decoder()
205-
.unwrap();
206-
decoder.serialize(values).unwrap();
207-
while let Some(_batch) = decoder.flush().unwrap() {}
208-
})
209-
});
210-
}
211-
212212
fn build_wide_projection_json(rows: usize, total_fields: usize) -> Vec<u8> {
213213
// Estimate: each field ~15 bytes ("fXX":VVVVVVV,), total ~15*100 + overhead
214214
let per_row_size = total_fields * 15 + 10;

0 commit comments

Comments
 (0)