Skip to content

Commit 72f0600

Browse files
authored
Upgrade to DataFusion 14.0.0 (apache#67)
* upgrade to DataFusion 14.0.0 * fmt
1 parent 9d2b974 commit 72f0600

8 files changed

Lines changed: 128 additions & 60 deletions

File tree

Cargo.lock

Lines changed: 101 additions & 32 deletions
Some generated files are not rendered by default. Learn more about customizing how changed files appear on GitHub.

Cargo.toml

Lines changed: 4 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -34,9 +34,10 @@ default = ["mimalloc"]
3434
tokio = { version = "1.0", features = ["macros", "rt", "rt-multi-thread", "sync"] }
3535
rand = "0.7"
3636
pyo3 = { version = "~0.17.1", features = ["extension-module", "abi3", "abi3-py37"] }
37-
datafusion = { version = "^13.0.0", features = ["pyarrow", "avro"] }
38-
datafusion-expr = { version = "^13.0.0" }
39-
datafusion-common = { version = "^13.0.0", features = ["pyarrow"] }
37+
datafusion = { version = "^14.0.0", features = ["pyarrow", "avro"] }
38+
datafusion-expr = { version = "^14.0.0" }
39+
datafusion-optimizer = { version = "^14.0.0" }
40+
datafusion-common = { version = "^14.0.0", features = ["pyarrow"] }
4041
uuid = { version = "0.8", features = ["v4"] }
4142
mimalloc = { version = "*", optional = true, default-features = false }
4243
async-trait = "0.1"

datafusion/tests/test_catalog.py

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -33,8 +33,8 @@ def test_basic(ctx, database):
3333
assert table.kind == "physical"
3434
assert table.schema == pa.schema(
3535
[
36-
pa.field("int", pa.int64(), nullable=False),
37-
pa.field("str", pa.string(), nullable=False),
38-
pa.field("float", pa.float64(), nullable=False),
36+
pa.field("int", pa.int64(), nullable=True),
37+
pa.field("str", pa.string(), nullable=True),
38+
pa.field("float", pa.float64(), nullable=True),
3939
]
4040
)

src/dataframe.rs

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -170,8 +170,8 @@ impl PyDataFrame {
170170
"left" => JoinType::Left,
171171
"right" => JoinType::Right,
172172
"full" => JoinType::Full,
173-
"semi" => JoinType::Semi,
174-
"anti" => JoinType::Anti,
173+
"semi" => JoinType::LeftSemi,
174+
"anti" => JoinType::LeftAnti,
175175
how => {
176176
return Err(DataFusionError::Common(format!(
177177
"The join type {} does not exist or is not implemented",

src/dataset_exec.rs

Lines changed: 3 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -37,7 +37,8 @@ use datafusion::physical_plan::stream::RecordBatchStreamAdapter;
3737
use datafusion::physical_plan::{
3838
DisplayFormatType, ExecutionPlan, Partitioning, SendableRecordBatchStream, Statistics,
3939
};
40-
use datafusion_expr::{combine_filters, Expr};
40+
use datafusion_expr::Expr;
41+
use datafusion_optimizer::utils::conjunction;
4142

4243
use crate::errors::DataFusionError;
4344
use crate::pyarrow_filter_expression::PyArrowFilterExpression;
@@ -93,7 +94,7 @@ impl DatasetExec {
9394
.collect()
9495
});
9596
let columns: Option<Vec<String>> = columns.transpose()?;
96-
let filter_expr: Option<PyObject> = combine_filters(filters)
97+
let filter_expr: Option<PyObject> = conjunction(filters.to_owned())
9798
.map(|filters| {
9899
PyArrowFilterExpression::try_from(&filters)
99100
.map(|filter_expr| filter_expr.inner().clone_ref(py))

0 commit comments

Comments
 (0)