Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion datafusion-testing
2 changes: 1 addition & 1 deletion datafusion/datasource-parquet/src/opener.rs
Original file line number Diff line number Diff line change
Expand Up @@ -174,7 +174,7 @@ impl FileOpener for ParquetOpener {
builder.metadata(),
reorder_predicates,
&file_metrics,
schema_adapter_factory.clone(),
&schema_adapter_factory,
);

match row_filter {
Expand Down
15 changes: 6 additions & 9 deletions datafusion/datasource-parquet/src/row_filter.rs
Original file line number Diff line number Diff line change
Expand Up @@ -449,12 +449,12 @@ fn columns_sorted(_columns: &[usize], _metadata: &ParquetMetaData) -> Result<boo
/// `a = 1` and `c = 3`.
pub fn build_row_filter(
expr: &Arc<dyn PhysicalExpr>,
file_schema: &Schema,
table_schema: &Schema,
file_schema: &SchemaRef,
table_schema: &SchemaRef,
metadata: &ParquetMetaData,
reorder_predicates: bool,
file_metrics: &ParquetFileMetrics,
schema_adapter_factory: Arc<dyn SchemaAdapterFactory>,
schema_adapter_factory: &Arc<dyn SchemaAdapterFactory>,
) -> Result<Option<RowFilter>> {
let rows_pruned = &file_metrics.pushdown_rows_pruned;
let rows_matched = &file_metrics.pushdown_rows_matched;
Expand All @@ -464,18 +464,15 @@ pub fn build_row_filter(
// `a = 1 AND b = 2 AND c = 3` -> [`a = 1`, `b = 2`, `c = 3`]
let predicates = split_conjunction(expr);

let file_schema = Arc::new(file_schema.clone());
Copy link
Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

these are deep clones (clone the entire Schema) where as with this PR only the Arc is cloned (ref count incremented)

Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Yup makes total sense, I was just cautious to update the signature for build_row_filter since it seems to be a public function.

let table_schema = Arc::new(table_schema.clone());

// Determine which conjuncts can be evaluated as ArrowPredicates, if any
let mut candidates: Vec<FilterCandidate> = predicates
.into_iter()
.map(|expr| {
FilterCandidateBuilder::new(
Arc::clone(expr),
file_schema.clone(),
table_schema.clone(),
schema_adapter_factory.clone(),
Arc::clone(file_schema),
Arc::clone(table_schema),
Arc::clone(schema_adapter_factory),
)
.build(metadata)
})
Expand Down