Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
78 changes: 69 additions & 9 deletions arrow-json/src/reader/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -161,7 +161,7 @@ use crate::reader::run_end_array::RunEndEncodedArrayDecoder;
use crate::reader::string_array::StringArrayDecoder;
use crate::reader::string_view_array::StringViewArrayDecoder;
use crate::reader::struct_array::StructArrayDecoder;
use crate::reader::tape::{Tape, TapeDecoder};
use crate::reader::tape::{Tape, TapeDecoder, TapeDecoderOptions};
use crate::reader::timestamp_array::TimestampArrayDecoder;

mod binary_array;
Expand All @@ -181,12 +181,14 @@ mod tape;
mod timestamp_array;

/// A builder for [`Reader`] and [`Decoder`]
#[derive(Clone)]
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Why does it need to be Clone now where previously it was not?

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I clone the ReaderBuilder in do_read_config, which was the most straightforward way of allowing tests to pass in additional config. Otherwise I woud've has to add an impl Fn() -> ReaderBuilder which seemed more convoluted than just having ReaderBuilder implement Clone.

pub struct ReaderBuilder {
batch_size: usize,
coerce_primitive: bool,
strict_mode: bool,
is_field: bool,
struct_mode: StructMode,
flatten_top_level_arrays: bool,

schema: SchemaRef,
}
Expand All @@ -207,6 +209,7 @@ impl ReaderBuilder {
strict_mode: false,
is_field: false,
struct_mode: Default::default(),
flatten_top_level_arrays: false,
schema,
}
}
Expand Down Expand Up @@ -248,6 +251,7 @@ impl ReaderBuilder {
strict_mode: false,
is_field: true,
struct_mode: Default::default(),
flatten_top_level_arrays: false,
schema: Arc::new(Schema::new([field.into()])),
}
}
Expand Down Expand Up @@ -288,6 +292,26 @@ impl ReaderBuilder {
}
}

/// Sets whether to flatten top-level arrays.
///
/// * When `true`, each element of a top-level array will be treated as its own row.
/// * When `false` (the default), the entire top-level array will be treated as one row.
///
/// For example, consider this input file:
/// ```text
/// [{ "a": 1 }, { "a": 2 }, { "b": 3 }]
/// [{ "a": 4 }, { "a": 5 }, { "b": 6 }]
/// ```
///
/// By default, this would be parsed as two rows, each an array containing three elements.
/// With this option set to `true`, however, this would be parsed as six rows.
pub fn with_flatten(self, flatten_top_level_arrays: bool) -> Self {
Self {
flatten_top_level_arrays,
..self
}
}

/// Create a [`Reader`] with the provided [`BufRead`]
pub fn build<R: BufRead>(self, reader: R) -> Result<Reader<R>, ArrowError> {
Ok(Reader {
Expand Down Expand Up @@ -319,7 +343,11 @@ impl ReaderBuilder {
Ok(Decoder {
decoder,
is_field: self.is_field,
tape_decoder: TapeDecoder::new(self.batch_size, num_fields),
tape_decoder: TapeDecoder::new(TapeDecoderOptions {
batch_size: self.batch_size,
num_fields,
flatten_top_level_arrays: self.flatten_top_level_arrays,
}),
batch_size: self.batch_size,
schema: self.schema,
})
Expand Down Expand Up @@ -813,7 +841,9 @@ mod tests {
use std::io::{BufReader, Cursor, Seek};

use arrow_array::cast::AsArray;
use arrow_array::{Array, BooleanArray, Float64Array, ListArray, StringArray, StringViewArray};
use arrow_array::{
Array, BooleanArray, Float64Array, Int32Array, ListArray, StringArray, StringViewArray,
};
use arrow_buffer::{ArrowNativeType, Buffer};
use arrow_cast::display::{ArrayFormatter, FormatOptions};
use arrow_data::ArrayDataBuilder;
Expand All @@ -828,13 +858,21 @@ mod tests {
strict_mode: bool,
schema: SchemaRef,
) -> Vec<RecordBatch> {
let config = ReaderBuilder::new(schema)
.with_batch_size(batch_size)
.with_strict_mode(strict_mode)
.with_coerce_primitive(coerce_primitive);
do_read_config(buf, config)
}

fn do_read_config(buf: &str, builder: ReaderBuilder) -> Vec<RecordBatch> {
let mut unbuffered = vec![];

// Test with different batch sizes to test for boundary conditions
for batch_size in [1, 3, 100, batch_size] {
unbuffered = ReaderBuilder::new(schema.clone())
for batch_size in [1, 3, 100, builder.batch_size] {
unbuffered = builder
.clone()
.with_batch_size(batch_size)
.with_coerce_primitive(coerce_primitive)
.build(Cursor::new(buf.as_bytes()))
.unwrap()
.collect::<Result<Vec<_>, _>>()
Expand All @@ -846,10 +884,9 @@ mod tests {

// Test with different buffer sizes to test for boundary conditions
for b in [1, 3, 5] {
let buffered = ReaderBuilder::new(schema.clone())
let buffered = builder
.clone()
.with_batch_size(batch_size)
.with_coerce_primitive(coerce_primitive)
.with_strict_mode(strict_mode)
.build(BufReader::with_capacity(b, Cursor::new(buf.as_bytes())))
.unwrap()
.collect::<Result<Vec<_>, _>>()
Expand Down Expand Up @@ -2975,4 +3012,27 @@ mod tests {
assert_eq!(run_array.len(), 3);
assert_eq!(run_array.run_ends().values(), &[2i16, 3]);
}

#[test]
fn test_flatten_top_level_arrays() {
let buf = r#"
[
{"a": 1},
{"a": 2}
]
{"a": 3}
[{"a": 4}, {"a": 5}, {"a": 6}]
"#;

let schema = Arc::new(Schema::new(vec![Field::new("a", DataType::Int32, true)]));
let batches = do_read_config(buf, ReaderBuilder::new(schema).with_flatten(true));
assert_eq!(batches.len(), 1);

let col = batches[0].column(0);
let col = col.as_any().downcast_ref::<Int32Array>().unwrap();
assert_eq!(col.len(), 6);
for i in 0..6 {
assert_eq!(col.value(i), (i as i32) + 1);
}
}
}
Loading
Loading