Skip to content

Commit eaa7131

Browse files
authored
Adjust visibility of crate private members & Functions (#537)
1 parent d1a7505 commit eaa7131

File tree

7 files changed

+63
-51
lines changed

7 files changed

+63
-51
lines changed

src/catalog.rs

Lines changed: 5 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -30,18 +30,18 @@ use datafusion::{
3030
};
3131

3232
#[pyclass(name = "Catalog", module = "datafusion", subclass)]
33-
pub(crate) struct PyCatalog {
34-
catalog: Arc<dyn CatalogProvider>,
33+
pub struct PyCatalog {
34+
pub catalog: Arc<dyn CatalogProvider>,
3535
}
3636

3737
#[pyclass(name = "Database", module = "datafusion", subclass)]
38-
pub(crate) struct PyDatabase {
39-
database: Arc<dyn SchemaProvider>,
38+
pub struct PyDatabase {
39+
pub database: Arc<dyn SchemaProvider>,
4040
}
4141

4242
#[pyclass(name = "Table", module = "datafusion", subclass)]
4343
pub struct PyTable {
44-
table: Arc<dyn TableProvider>,
44+
pub table: Arc<dyn TableProvider>,
4545
}
4646

4747
impl PyCatalog {

src/common/df_field.rs

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -27,7 +27,7 @@ use super::data_type::PyDataType;
2727
#[pyclass(name = "DFField", module = "datafusion.common", subclass)]
2828
#[derive(Debug, Clone)]
2929
pub struct PyDFField {
30-
field: DFField,
30+
pub field: DFField,
3131
}
3232

3333
impl From<PyDFField> for DFField {

src/context.rs

Lines changed: 40 additions & 37 deletions
Original file line numberDiff line numberDiff line change
@@ -60,8 +60,8 @@ use tokio::task::JoinHandle;
6060
/// Configuration options for a SessionContext
6161
#[pyclass(name = "SessionConfig", module = "datafusion", subclass)]
6262
#[derive(Clone, Default)]
63-
pub(crate) struct PySessionConfig {
64-
pub(crate) config: SessionConfig,
63+
pub struct PySessionConfig {
64+
pub config: SessionConfig,
6565
}
6666

6767
impl From<SessionConfig> for PySessionConfig {
@@ -153,8 +153,8 @@ impl PySessionConfig {
153153
/// Runtime options for a SessionContext
154154
#[pyclass(name = "RuntimeConfig", module = "datafusion", subclass)]
155155
#[derive(Clone)]
156-
pub(crate) struct PyRuntimeConfig {
157-
pub(crate) config: RuntimeConfig,
156+
pub struct PyRuntimeConfig {
157+
pub config: RuntimeConfig,
158158
}
159159

160160
#[pymethods]
@@ -215,15 +215,18 @@ impl PyRuntimeConfig {
215215
/// multi-threaded execution engine to perform the execution.
216216
#[pyclass(name = "SessionContext", module = "datafusion", subclass)]
217217
#[derive(Clone)]
218-
pub(crate) struct PySessionContext {
219-
pub(crate) ctx: SessionContext,
218+
pub struct PySessionContext {
219+
pub ctx: SessionContext,
220220
}
221221

222222
#[pymethods]
223223
impl PySessionContext {
224224
#[pyo3(signature = (config=None, runtime=None))]
225225
#[new]
226-
fn new(config: Option<PySessionConfig>, runtime: Option<PyRuntimeConfig>) -> PyResult<Self> {
226+
pub fn new(
227+
config: Option<PySessionConfig>,
228+
runtime: Option<PyRuntimeConfig>,
229+
) -> PyResult<Self> {
227230
let config = if let Some(c) = config {
228231
c.config
229232
} else {
@@ -242,7 +245,7 @@ impl PySessionContext {
242245
}
243246

244247
/// Register a an object store with the given name
245-
fn register_object_store(
248+
pub fn register_object_store(
246249
&mut self,
247250
scheme: &str,
248251
store: &PyAny,
@@ -276,13 +279,13 @@ impl PySessionContext {
276279
}
277280

278281
/// Returns a PyDataFrame whose plan corresponds to the SQL statement.
279-
fn sql(&mut self, query: &str, py: Python) -> PyResult<PyDataFrame> {
282+
pub fn sql(&mut self, query: &str, py: Python) -> PyResult<PyDataFrame> {
280283
let result = self.ctx.sql(query);
281284
let df = wait_for_future(py, result).map_err(DataFusionError::from)?;
282285
Ok(PyDataFrame::new(df))
283286
}
284287

285-
fn create_dataframe(
288+
pub fn create_dataframe(
286289
&mut self,
287290
partitions: PyArrowType<Vec<Vec<RecordBatch>>>,
288291
name: Option<&str>,
@@ -314,13 +317,13 @@ impl PySessionContext {
314317
}
315318

316319
/// Create a DataFrame from an existing logical plan
317-
fn create_dataframe_from_logical_plan(&mut self, plan: PyLogicalPlan) -> PyDataFrame {
320+
pub fn create_dataframe_from_logical_plan(&mut self, plan: PyLogicalPlan) -> PyDataFrame {
318321
PyDataFrame::new(DataFrame::new(self.ctx.state(), plan.plan.as_ref().clone()))
319322
}
320323

321324
/// Construct datafusion dataframe from Python list
322325
#[allow(clippy::wrong_self_convention)]
323-
fn from_pylist(
326+
pub fn from_pylist(
324327
&mut self,
325328
data: PyObject,
326329
name: Option<&str>,
@@ -340,7 +343,7 @@ impl PySessionContext {
340343

341344
/// Construct datafusion dataframe from Python dictionary
342345
#[allow(clippy::wrong_self_convention)]
343-
fn from_pydict(
346+
pub fn from_pydict(
344347
&mut self,
345348
data: PyObject,
346349
name: Option<&str>,
@@ -360,7 +363,7 @@ impl PySessionContext {
360363

361364
/// Construct datafusion dataframe from Arrow Table
362365
#[allow(clippy::wrong_self_convention)]
363-
fn from_arrow_table(
366+
pub fn from_arrow_table(
364367
&mut self,
365368
data: PyObject,
366369
name: Option<&str>,
@@ -381,7 +384,7 @@ impl PySessionContext {
381384

382385
/// Construct datafusion dataframe from pandas
383386
#[allow(clippy::wrong_self_convention)]
384-
fn from_pandas(
387+
pub fn from_pandas(
385388
&mut self,
386389
data: PyObject,
387390
name: Option<&str>,
@@ -401,7 +404,7 @@ impl PySessionContext {
401404

402405
/// Construct datafusion dataframe from polars
403406
#[allow(clippy::wrong_self_convention)]
404-
fn from_polars(
407+
pub fn from_polars(
405408
&mut self,
406409
data: PyObject,
407410
name: Option<&str>,
@@ -417,21 +420,21 @@ impl PySessionContext {
417420
})
418421
}
419422

420-
fn register_table(&mut self, name: &str, table: &PyTable) -> PyResult<()> {
423+
pub fn register_table(&mut self, name: &str, table: &PyTable) -> PyResult<()> {
421424
self.ctx
422425
.register_table(name, table.table())
423426
.map_err(DataFusionError::from)?;
424427
Ok(())
425428
}
426429

427-
fn deregister_table(&mut self, name: &str) -> PyResult<()> {
430+
pub fn deregister_table(&mut self, name: &str) -> PyResult<()> {
428431
self.ctx
429432
.deregister_table(name)
430433
.map_err(DataFusionError::from)?;
431434
Ok(())
432435
}
433436

434-
fn register_record_batches(
437+
pub fn register_record_batches(
435438
&mut self,
436439
name: &str,
437440
partitions: PyArrowType<Vec<Vec<RecordBatch>>>,
@@ -451,7 +454,7 @@ impl PySessionContext {
451454
skip_metadata=true,
452455
schema=None,
453456
file_sort_order=None))]
454-
fn register_parquet(
457+
pub fn register_parquet(
455458
&mut self,
456459
name: &str,
457460
path: &str,
@@ -489,7 +492,7 @@ impl PySessionContext {
489492
schema_infer_max_records=1000,
490493
file_extension=".csv",
491494
file_compression_type=None))]
492-
fn register_csv(
495+
pub fn register_csv(
493496
&mut self,
494497
name: &str,
495498
path: PathBuf,
@@ -533,7 +536,7 @@ impl PySessionContext {
533536
file_extension=".json",
534537
table_partition_cols=vec![],
535538
file_compression_type=None))]
536-
fn register_json(
539+
pub fn register_json(
537540
&mut self,
538541
name: &str,
539542
path: PathBuf,
@@ -568,7 +571,7 @@ impl PySessionContext {
568571
file_extension=".avro",
569572
table_partition_cols=vec![],
570573
infinite=false))]
571-
fn register_avro(
574+
pub fn register_avro(
572575
&mut self,
573576
name: &str,
574577
path: PathBuf,
@@ -595,7 +598,7 @@ impl PySessionContext {
595598
}
596599

597600
// Registers a PyArrow.Dataset
598-
fn register_dataset(&self, name: &str, dataset: &PyAny, py: Python) -> PyResult<()> {
601+
pub fn register_dataset(&self, name: &str, dataset: &PyAny, py: Python) -> PyResult<()> {
599602
let table: Arc<dyn TableProvider> = Arc::new(Dataset::new(dataset, py)?);
600603

601604
self.ctx
@@ -605,18 +608,18 @@ impl PySessionContext {
605608
Ok(())
606609
}
607610

608-
fn register_udf(&mut self, udf: PyScalarUDF) -> PyResult<()> {
611+
pub fn register_udf(&mut self, udf: PyScalarUDF) -> PyResult<()> {
609612
self.ctx.register_udf(udf.function);
610613
Ok(())
611614
}
612615

613-
fn register_udaf(&mut self, udaf: PyAggregateUDF) -> PyResult<()> {
616+
pub fn register_udaf(&mut self, udaf: PyAggregateUDF) -> PyResult<()> {
614617
self.ctx.register_udaf(udaf.function);
615618
Ok(())
616619
}
617620

618621
#[pyo3(signature = (name="datafusion"))]
619-
fn catalog(&self, name: &str) -> PyResult<PyCatalog> {
622+
pub fn catalog(&self, name: &str) -> PyResult<PyCatalog> {
620623
match self.ctx.catalog(name) {
621624
Some(catalog) => Ok(PyCatalog::new(catalog)),
622625
None => Err(PyKeyError::new_err(format!(
@@ -626,31 +629,31 @@ impl PySessionContext {
626629
}
627630
}
628631

629-
fn tables(&self) -> HashSet<String> {
632+
pub fn tables(&self) -> HashSet<String> {
630633
#[allow(deprecated)]
631634
self.ctx.tables().unwrap()
632635
}
633636

634-
fn table(&self, name: &str, py: Python) -> PyResult<PyDataFrame> {
637+
pub fn table(&self, name: &str, py: Python) -> PyResult<PyDataFrame> {
635638
let x = wait_for_future(py, self.ctx.table(name)).map_err(DataFusionError::from)?;
636639
Ok(PyDataFrame::new(x))
637640
}
638641

639-
fn table_exist(&self, name: &str) -> PyResult<bool> {
642+
pub fn table_exist(&self, name: &str) -> PyResult<bool> {
640643
Ok(self.ctx.table_exist(name)?)
641644
}
642645

643-
fn empty_table(&self) -> PyResult<PyDataFrame> {
646+
pub fn empty_table(&self) -> PyResult<PyDataFrame> {
644647
Ok(PyDataFrame::new(self.ctx.read_empty()?))
645648
}
646649

647-
fn session_id(&self) -> String {
650+
pub fn session_id(&self) -> String {
648651
self.ctx.session_id()
649652
}
650653

651654
#[allow(clippy::too_many_arguments)]
652655
#[pyo3(signature = (path, schema=None, schema_infer_max_records=1000, file_extension=".json", table_partition_cols=vec![], file_compression_type=None))]
653-
fn read_json(
656+
pub fn read_json(
654657
&mut self,
655658
path: PathBuf,
656659
schema: Option<PyArrowType<Schema>>,
@@ -689,7 +692,7 @@ impl PySessionContext {
689692
file_extension=".csv",
690693
table_partition_cols=vec![],
691694
file_compression_type=None))]
692-
fn read_csv(
695+
pub fn read_csv(
693696
&self,
694697
path: PathBuf,
695698
schema: Option<PyArrowType<Schema>>,
@@ -741,7 +744,7 @@ impl PySessionContext {
741744
skip_metadata=true,
742745
schema=None,
743746
file_sort_order=None))]
744-
fn read_parquet(
747+
pub fn read_parquet(
745748
&self,
746749
path: &str,
747750
table_partition_cols: Vec<(String, String)>,
@@ -771,7 +774,7 @@ impl PySessionContext {
771774

772775
#[allow(clippy::too_many_arguments)]
773776
#[pyo3(signature = (path, schema=None, table_partition_cols=vec![], file_extension=".avro"))]
774-
fn read_avro(
777+
pub fn read_avro(
775778
&self,
776779
path: &str,
777780
schema: Option<PyArrowType<Schema>>,
@@ -793,7 +796,7 @@ impl PySessionContext {
793796
Ok(PyDataFrame::new(df))
794797
}
795798

796-
fn read_table(&self, table: &PyTable) -> PyResult<PyDataFrame> {
799+
pub fn read_table(&self, table: &PyTable) -> PyResult<PyDataFrame> {
797800
let df = self
798801
.ctx
799802
.read_table(table.table())

src/expr.rs

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -128,6 +128,7 @@ impl PyExpr {
128128
Expr::ScalarVariable(data_type, variables) => {
129129
Ok(PyScalarVariable::new(data_type, variables).into_py(py))
130130
}
131+
Expr::Like(value) => Ok(PyLike::from(value.clone()).into_py(py)),
131132
Expr::Literal(value) => Ok(PyLiteral::from(value.clone()).into_py(py)),
132133
Expr::BinaryExpr(expr) => Ok(PyBinaryExpr::from(expr.clone()).into_py(py)),
133134
Expr::Not(expr) => Ok(PyNot::new(*expr.clone()).into_py(py)),

src/lib.rs

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -35,7 +35,7 @@ pub mod common;
3535
#[allow(clippy::borrow_deref_ref)]
3636
mod config;
3737
#[allow(clippy::borrow_deref_ref)]
38-
mod context;
38+
pub mod context;
3939
#[allow(clippy::borrow_deref_ref)]
4040
mod dataframe;
4141
mod dataset;

src/substrait.rs

Lines changed: 6 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -29,8 +29,8 @@ use prost::Message;
2929

3030
#[pyclass(name = "plan", module = "datafusion.substrait", subclass)]
3131
#[derive(Debug, Clone)]
32-
pub(crate) struct PyPlan {
33-
pub(crate) plan: Plan,
32+
pub struct PyPlan {
33+
pub plan: Plan,
3434
}
3535

3636
#[pymethods]
@@ -61,7 +61,7 @@ impl From<Plan> for PyPlan {
6161
/// to a valid `LogicalPlan` instance.
6262
#[pyclass(name = "serde", module = "datafusion.substrait", subclass)]
6363
#[derive(Debug, Clone)]
64-
pub(crate) struct PySubstraitSerializer;
64+
pub struct PySubstraitSerializer;
6565

6666
#[pymethods]
6767
impl PySubstraitSerializer {
@@ -107,7 +107,7 @@ impl PySubstraitSerializer {
107107

108108
#[pyclass(name = "producer", module = "datafusion.substrait", subclass)]
109109
#[derive(Debug, Clone)]
110-
pub(crate) struct PySubstraitProducer;
110+
pub struct PySubstraitProducer;
111111

112112
#[pymethods]
113113
impl PySubstraitProducer {
@@ -123,7 +123,7 @@ impl PySubstraitProducer {
123123

124124
#[pyclass(name = "consumer", module = "datafusion.substrait", subclass)]
125125
#[derive(Debug, Clone)]
126-
pub(crate) struct PySubstraitConsumer;
126+
pub struct PySubstraitConsumer;
127127

128128
#[pymethods]
129129
impl PySubstraitConsumer {
@@ -140,7 +140,7 @@ impl PySubstraitConsumer {
140140
}
141141
}
142142

143-
pub(crate) fn init_module(m: &PyModule) -> PyResult<()> {
143+
pub fn init_module(m: &PyModule) -> PyResult<()> {
144144
m.add_class::<PyPlan>()?;
145145
m.add_class::<PySubstraitConsumer>()?;
146146
m.add_class::<PySubstraitProducer>()?;

0 commit comments

Comments
 (0)