File tree Expand file tree Collapse file tree 2 files changed +31
-0
lines changed
Expand file tree Collapse file tree 2 files changed +31
-0
lines changed Original file line number Diff line number Diff line change @@ -171,6 +171,22 @@ async fn row_count_demuxer(
171171 max_rows_per_file
172172 } ;
173173
174+ // Single-file output requires creating at least one file stream in advance.
175+ // If no record batches are present in the input stream (zero-row scenario),
176+ if single_file_output {
177+ open_file_streams. push ( create_new_file_stream (
178+ & base_output_path,
179+ & write_id,
180+ part_idx,
181+ & file_extension,
182+ single_file_output,
183+ max_buffered_batches,
184+ & mut tx,
185+ ) ?) ;
186+ row_counts. push ( 0 ) ;
187+ part_idx += 1 ;
188+ }
189+
174190 while let Some ( rb) = input. next ( ) . await . transpose ( ) ? {
175191 // ensure we have at least minimum_parallel_files open
176192 if open_file_streams. len ( ) < minimum_parallel_files {
Original file line number Diff line number Diff line change @@ -426,6 +426,21 @@ select * from validate_parquet_single;
4264261 Foo
4274272 Bar
428428
429+ # copy 0 rows to a single parquet file output
430+ query I
431+ COPY (SELECT 1 AS id WHERE FALSE) TO 'test_files/scratch/copy/table_no_rows.parquet';
432+ ----
433+ 0
434+
435+ statement ok
436+ CREATE EXTERNAL TABLE validate_parquet_single_no_rows STORED AS PARQUET LOCATION 'test_files/scratch/copy/table_no_rows.parquet';
437+
438+ # validate the parquet file contains 0 rows.
439+ query I
440+ SELECT COUNT(*) FROM validate_parquet_single_no_rows;
441+ ----
442+ 0
443+
429444# copy from table to folder of compressed json files
430445query I
431446COPY source_table to 'test_files/scratch/copy/table_json_gz' STORED AS JSON OPTIONS ('format.compression' gzip);
You can’t perform that action at this time.
0 commit comments