Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

refine: refine writer interface #741

Merged
merged 2 commits into from
Dec 10, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion crates/iceberg/src/arrow/record_batch_projector.rs
Original file line number Diff line number Diff line change
Expand Up @@ -24,7 +24,7 @@ use crate::error::Result;
use crate::{Error, ErrorKind};

/// Help to project specific field from `RecordBatch`` according to the fields id.
#[derive(Clone)]
#[derive(Clone, Debug)]
pub(crate) struct RecordBatchProjector {
// A vector of vectors, where each inner vector represents the index path to access a specific field in a nested structure.
// E.g. [[0], [1, 2]] means the first field is accessed directly from the first column,
Expand Down
34 changes: 10 additions & 24 deletions crates/iceberg/src/writer/base_writer/data_file_writer.rs
Original file line number Diff line number Diff line change
Expand Up @@ -26,46 +26,36 @@ use crate::writer::{CurrentFileStatus, IcebergWriter, IcebergWriterBuilder};
use crate::Result;

/// Builder for `DataFileWriter`.
#[derive(Clone)]
#[derive(Clone, Debug)]
pub struct DataFileWriterBuilder<B: FileWriterBuilder> {
Copy link
Collaborator

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Should we maybe also derive Debug?

inner: B,
partition_value: Option<Struct>,
}

impl<B: FileWriterBuilder> DataFileWriterBuilder<B> {
/// Create a new `DataFileWriterBuilder` using a `FileWriterBuilder`.
pub fn new(inner: B) -> Self {
Self { inner }
}
}

/// Config for `DataFileWriter`.
pub struct DataFileWriterConfig {
partition_value: Struct,
}

impl DataFileWriterConfig {
/// Create a new `DataFileWriterConfig` with partition value.
pub fn new(partition_value: Option<Struct>) -> Self {
pub fn new(inner: B, partition_value: Option<Struct>) -> Self {
Self {
partition_value: partition_value.unwrap_or(Struct::empty()),
inner,
partition_value,
}
}
}

#[async_trait::async_trait]
impl<B: FileWriterBuilder> IcebergWriterBuilder for DataFileWriterBuilder<B> {
type R = DataFileWriter<B>;
type C = DataFileWriterConfig;

async fn build(self, config: Self::C) -> Result<Self::R> {
async fn build(self) -> Result<Self::R> {
Ok(DataFileWriter {
inner_writer: Some(self.inner.clone().build().await?),
partition_value: config.partition_value,
partition_value: self.partition_value.unwrap_or(Struct::empty()),
})
}
}

/// A writer write data is within one spec/partition.
#[derive(Debug)]
pub struct DataFileWriter<B: FileWriterBuilder> {
inner_writer: Option<B::R>,
partition_value: Struct,
Expand Down Expand Up @@ -115,9 +105,7 @@ mod test {

use crate::io::FileIOBuilder;
use crate::spec::{DataContentType, DataFileFormat, Schema, Struct};
use crate::writer::base_writer::data_file_writer::{
DataFileWriterBuilder, DataFileWriterConfig,
};
use crate::writer::base_writer::data_file_writer::DataFileWriterBuilder;
use crate::writer::file_writer::location_generator::test::MockLocationGenerator;
use crate::writer::file_writer::location_generator::DefaultFileNameGenerator;
use crate::writer::file_writer::ParquetWriterBuilder;
Expand All @@ -140,9 +128,7 @@ mod test {
location_gen,
file_name_gen,
);
let mut data_file_writer = DataFileWriterBuilder::new(pw)
.build(DataFileWriterConfig::new(None))
.await?;
let mut data_file_writer = DataFileWriterBuilder::new(pw, None).build().await?;

let data_file = data_file_writer.close().await.unwrap();
assert_eq!(data_file.len(), 1);
Expand Down
27 changes: 14 additions & 13 deletions crates/iceberg/src/writer/base_writer/equality_delete_writer.rs
Original file line number Diff line number Diff line change
Expand Up @@ -32,19 +32,21 @@ use crate::writer::{IcebergWriter, IcebergWriterBuilder};
use crate::{Error, ErrorKind, Result};

/// Builder for `EqualityDeleteWriter`.
#[derive(Clone)]
#[derive(Clone, Debug)]
pub struct EqualityDeleteFileWriterBuilder<B: FileWriterBuilder> {
inner: B,
config: EqualityDeleteWriterConfig,
}

impl<B: FileWriterBuilder> EqualityDeleteFileWriterBuilder<B> {
/// Create a new `EqualityDeleteFileWriterBuilder` using a `FileWriterBuilder`.
pub fn new(inner: B) -> Self {
Self { inner }
pub fn new(inner: B, config: EqualityDeleteWriterConfig) -> Self {
Self { inner, config }
}
}

/// Config for `EqualityDeleteWriter`.
#[derive(Clone, Debug)]
pub struct EqualityDeleteWriterConfig {
// Field ids used to determine row equality in equality delete files.
equality_ids: Vec<i32>,
Expand Down Expand Up @@ -108,19 +110,19 @@ impl EqualityDeleteWriterConfig {
#[async_trait::async_trait]
impl<B: FileWriterBuilder> IcebergWriterBuilder for EqualityDeleteFileWriterBuilder<B> {
type R = EqualityDeleteFileWriter<B>;
type C = EqualityDeleteWriterConfig;

async fn build(self, config: Self::C) -> Result<Self::R> {
async fn build(self) -> Result<Self::R> {
Ok(EqualityDeleteFileWriter {
inner_writer: Some(self.inner.clone().build().await?),
projector: config.projector,
equality_ids: config.equality_ids,
partition_value: config.partition_value,
projector: self.config.projector,
equality_ids: self.config.equality_ids,
partition_value: self.config.partition_value,
})
}
}

/// Writer used to write equality delete files.
#[derive(Debug)]
pub struct EqualityDeleteFileWriter<B: FileWriterBuilder> {
inner_writer: Option<B::R>,
projector: RecordBatchProjector,
Expand Down Expand Up @@ -396,9 +398,8 @@ mod test {
location_gen,
file_name_gen,
);

let mut equality_delete_writer = EqualityDeleteFileWriterBuilder::new(pb)
.build(equality_config)
let mut equality_delete_writer = EqualityDeleteFileWriterBuilder::new(pb, equality_config)
.build()
.await?;

// write
Expand Down Expand Up @@ -561,8 +562,8 @@ mod test {
location_gen,
file_name_gen,
);
let mut equality_delete_writer = EqualityDeleteFileWriterBuilder::new(pb)
.build(config)
let mut equality_delete_writer = EqualityDeleteFileWriterBuilder::new(pb, config)
.build()
.await?;

// prepare data
Expand Down
4 changes: 2 additions & 2 deletions crates/iceberg/src/writer/file_writer/location_generator.rs
Original file line number Diff line number Diff line change
Expand Up @@ -35,7 +35,7 @@ const WRITE_DATA_LOCATION: &str = "write.data.path";
const WRITE_FOLDER_STORAGE_LOCATION: &str = "write.folder-storage.path";
const DEFAULT_DATA_DIR: &str = "/data";

#[derive(Clone)]
#[derive(Clone, Debug)]
/// `DefaultLocationGenerator` used to generate the data dir location of data file.
/// The location is generated based on the table location and the data location in table properties.
pub struct DefaultLocationGenerator {
Expand Down Expand Up @@ -87,7 +87,7 @@ pub trait FileNameGenerator: Clone + Send + 'static {
/// `DefaultFileNameGenerator` used to generate file name for data file. The file name can be
/// passed to `LocationGenerator` to generate the location of the file.
/// The file name format is "{prefix}-{file_count}[-{suffix}].{file_format}".
#[derive(Clone)]
#[derive(Clone, Debug)]
pub struct DefaultFileNameGenerator {
prefix: String,
suffix: String,
Expand Down
2 changes: 1 addition & 1 deletion crates/iceberg/src/writer/file_writer/parquet_writer.rs
Original file line number Diff line number Diff line change
Expand Up @@ -46,7 +46,7 @@ use crate::writer::CurrentFileStatus;
use crate::{Error, ErrorKind, Result};

/// ParquetWriterBuilder is used to builder a [`ParquetWriter`]
#[derive(Clone)]
#[derive(Clone, Debug)]
pub struct ParquetWriterBuilder<T: LocationGenerator, F: FileNameGenerator> {
props: WriterProperties,
schema: SchemaRef,
Expand Down
4 changes: 1 addition & 3 deletions crates/iceberg/src/writer/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -63,10 +63,8 @@ pub trait IcebergWriterBuilder<I = DefaultInput, O = DefaultOutput>:
{
/// The associated writer type.
type R: IcebergWriter<I, O>;
/// The associated writer config type used to build the writer.
type C;
/// Build the iceberg writer.
async fn build(self, config: Self::C) -> Result<Self::R>;
async fn build(self) -> Result<Self::R>;
}

/// The iceberg writer used to write data to iceberg table.
Expand Down
9 changes: 3 additions & 6 deletions crates/integration_tests/tests/append_data_file_test.rs
Original file line number Diff line number Diff line change
Expand Up @@ -24,7 +24,7 @@ use arrow_array::{ArrayRef, BooleanArray, Int32Array, RecordBatch, StringArray};
use futures::TryStreamExt;
use iceberg::spec::{NestedField, PrimitiveType, Schema, Type};
use iceberg::transaction::Transaction;
use iceberg::writer::base_writer::data_file_writer::{DataFileWriterBuilder, DataFileWriterConfig};
use iceberg::writer::base_writer::data_file_writer::DataFileWriterBuilder;
use iceberg::writer::file_writer::location_generator::{
DefaultFileNameGenerator, DefaultLocationGenerator,
};
Expand Down Expand Up @@ -97,11 +97,8 @@ async fn test_append_data_file() {
location_generator.clone(),
file_name_generator.clone(),
);
let data_file_writer_builder = DataFileWriterBuilder::new(parquet_writer_builder);
let mut data_file_writer = data_file_writer_builder
.build(DataFileWriterConfig::new(None))
.await
.unwrap();
let data_file_writer_builder = DataFileWriterBuilder::new(parquet_writer_builder, None);
let mut data_file_writer = data_file_writer_builder.build().await.unwrap();
let col1 = StringArray::from(vec![Some("foo"), Some("bar"), None, Some("baz")]);
let col2 = Int32Array::from(vec![Some(1), Some(2), Some(3), Some(4)]);
let col3 = BooleanArray::from(vec![Some(true), Some(false), None, Some(false)]);
Expand Down
9 changes: 3 additions & 6 deletions crates/integration_tests/tests/conflict_commit_test.rs
Original file line number Diff line number Diff line change
Expand Up @@ -24,7 +24,7 @@ use arrow_array::{ArrayRef, BooleanArray, Int32Array, RecordBatch, StringArray};
use futures::TryStreamExt;
use iceberg::spec::{NestedField, PrimitiveType, Schema, Type};
use iceberg::transaction::Transaction;
use iceberg::writer::base_writer::data_file_writer::{DataFileWriterBuilder, DataFileWriterConfig};
use iceberg::writer::base_writer::data_file_writer::DataFileWriterBuilder;
use iceberg::writer::file_writer::location_generator::{
DefaultFileNameGenerator, DefaultLocationGenerator,
};
Expand Down Expand Up @@ -96,11 +96,8 @@ async fn test_append_data_file_conflict() {
location_generator.clone(),
file_name_generator.clone(),
);
let data_file_writer_builder = DataFileWriterBuilder::new(parquet_writer_builder);
let mut data_file_writer = data_file_writer_builder
.build(DataFileWriterConfig::new(None))
.await
.unwrap();
let data_file_writer_builder = DataFileWriterBuilder::new(parquet_writer_builder, None);
let mut data_file_writer = data_file_writer_builder.build().await.unwrap();
let col1 = StringArray::from(vec![Some("foo"), Some("bar"), None, Some("baz")]);
let col2 = Int32Array::from(vec![Some(1), Some(2), Some(3), Some(4)]);
let col3 = BooleanArray::from(vec![Some(true), Some(false), None, Some(false)]);
Expand Down
Loading