@@ -42,16 +42,16 @@ use datafusion::datasource::file_format::parquet::ParquetFormat;
42
42
use datafusion:: datasource:: file_format:: {
43
43
file_type_to_format, format_as_file_type, FileFormatFactory ,
44
44
} ;
45
+ use datafusion:: datasource:: DefaultTableSource ;
45
46
use datafusion:: {
47
+ datasource:: provider_as_source,
46
48
datasource:: {
47
49
file_format:: {
48
50
csv:: CsvFormat , json:: JsonFormat as OtherNdJsonFormat , FileFormat ,
49
51
} ,
50
52
listing:: { ListingOptions , ListingTable , ListingTableConfig , ListingTableUrl } ,
51
53
view:: ViewTable ,
52
- TableProvider ,
53
54
} ,
54
- datasource:: { provider_as_source, source_as_provider} ,
55
55
prelude:: SessionContext ,
56
56
} ;
57
57
use datafusion_common:: file_options:: file_type:: FileType ;
@@ -118,18 +118,18 @@ pub trait LogicalExtensionCodec: Debug + Send + Sync {
118
118
119
119
fn try_encode ( & self , node : & Extension , buf : & mut Vec < u8 > ) -> Result < ( ) > ;
120
120
121
- fn try_decode_table_provider (
121
+ fn try_decode_table_source (
122
122
& self ,
123
123
buf : & [ u8 ] ,
124
124
table_ref : & TableReference ,
125
125
schema : SchemaRef ,
126
126
ctx : & SessionContext ,
127
- ) -> Result < Arc < dyn TableProvider > > ;
127
+ ) -> Result < Arc < dyn TableSource > > ;
128
128
129
- fn try_encode_table_provider (
129
+ fn try_encode_table_source (
130
130
& self ,
131
131
table_ref : & TableReference ,
132
- node : Arc < dyn TableProvider > ,
132
+ node : Arc < dyn TableSource > ,
133
133
buf : & mut Vec < u8 > ,
134
134
) -> Result < ( ) > ;
135
135
@@ -193,20 +193,20 @@ impl LogicalExtensionCodec for DefaultLogicalExtensionCodec {
193
193
not_impl_err ! ( "LogicalExtensionCodec is not provided" )
194
194
}
195
195
196
- fn try_decode_table_provider (
196
+ fn try_decode_table_source (
197
197
& self ,
198
198
_buf : & [ u8 ] ,
199
199
_table_ref : & TableReference ,
200
200
_schema : SchemaRef ,
201
201
_ctx : & SessionContext ,
202
- ) -> Result < Arc < dyn TableProvider > > {
202
+ ) -> Result < Arc < dyn TableSource > > {
203
203
not_impl_err ! ( "LogicalExtensionCodec is not provided" )
204
204
}
205
205
206
- fn try_encode_table_provider (
206
+ fn try_encode_table_source (
207
207
& self ,
208
208
_table_ref : & TableReference ,
209
- _node : Arc < dyn TableProvider > ,
209
+ _node : Arc < dyn TableSource > ,
210
210
_buf : & mut Vec < u8 > ,
211
211
) -> Result < ( ) > {
212
212
not_impl_err ! ( "LogicalExtensionCodec is not provided" )
@@ -440,7 +440,7 @@ impl AsLogicalPlan for LogicalPlanNode {
440
440
}
441
441
#[ cfg_attr( not( feature = "avro" ) , allow( unused_variables) ) ]
442
442
FileFormatType :: Avro ( ..) => {
443
- #[ cfg( feature = "avro" ) ]
443
+ #[ cfg( feature = "avro" ) ]
444
444
{
445
445
Arc :: new ( AvroFormat )
446
446
}
@@ -520,18 +520,15 @@ impl AsLogicalPlan for LogicalPlanNode {
520
520
let table_name =
521
521
from_table_reference ( scan. table_name . as_ref ( ) , "CustomScan" ) ?;
522
522
523
- let provider = extension_codec. try_decode_table_provider (
523
+ let source = extension_codec. try_decode_table_source (
524
524
& scan. custom_table_data ,
525
525
& table_name,
526
526
schema,
527
527
ctx,
528
528
) ?;
529
529
530
530
LogicalPlanBuilder :: scan_with_filters (
531
- table_name,
532
- provider_as_source ( provider) ,
533
- projection,
534
- filters,
531
+ table_name, source, projection, filters,
535
532
) ?
536
533
. build ( )
537
534
}
@@ -1029,9 +1026,7 @@ impl AsLogicalPlan for LogicalPlanNode {
1029
1026
projection,
1030
1027
..
1031
1028
} ) => {
1032
- let provider = source_as_provider ( source) ?;
1033
- let schema = provider. schema ( ) ;
1034
- let source = provider. as_any ( ) ;
1029
+ let schema = source. schema ( ) ;
1035
1030
1036
1031
let projection = match projection {
1037
1032
None => None ,
@@ -1049,7 +1044,9 @@ impl AsLogicalPlan for LogicalPlanNode {
1049
1044
let filters: Vec < protobuf:: LogicalExprNode > =
1050
1045
serialize_exprs ( filters, extension_codec) ?;
1051
1046
1052
- if let Some ( listing_table) = source. downcast_ref :: < ListingTable > ( ) {
1047
+ if let Ok ( listing_table) =
1048
+ DefaultTableSource :: unwrap_provider :: < ListingTable > ( source)
1049
+ {
1053
1050
let any = listing_table. options ( ) . format . as_any ( ) ;
1054
1051
let file_format_type = {
1055
1052
let mut maybe_some_type = None ;
@@ -1158,7 +1155,9 @@ impl AsLogicalPlan for LogicalPlanNode {
1158
1155
} ,
1159
1156
) ) ,
1160
1157
} )
1161
- } else if let Some ( view_table) = source. downcast_ref :: < ViewTable > ( ) {
1158
+ } else if let Ok ( view_table) =
1159
+ DefaultTableSource :: unwrap_provider :: < ViewTable > ( source)
1160
+ {
1162
1161
let schema: protobuf:: Schema = schema. as_ref ( ) . try_into ( ) ?;
1163
1162
Ok ( LogicalPlanNode {
1164
1163
logical_plan_type : Some ( LogicalPlanType :: ViewScan ( Box :: new (
@@ -1179,7 +1178,8 @@ impl AsLogicalPlan for LogicalPlanNode {
1179
1178
} ,
1180
1179
) ) ) ,
1181
1180
} )
1182
- } else if let Some ( cte_work_table) = source. downcast_ref :: < CteWorkTable > ( )
1181
+ } else if let Ok ( cte_work_table) =
1182
+ DefaultTableSource :: unwrap_provider :: < CteWorkTable > ( source)
1183
1183
{
1184
1184
let name = cte_work_table. name ( ) . to_string ( ) ;
1185
1185
let schema = cte_work_table. schema ( ) ;
@@ -1197,7 +1197,11 @@ impl AsLogicalPlan for LogicalPlanNode {
1197
1197
let schema: protobuf:: Schema = schema. as_ref ( ) . try_into ( ) ?;
1198
1198
let mut bytes = vec ! [ ] ;
1199
1199
extension_codec
1200
- . try_encode_table_provider ( table_name, provider, & mut bytes)
1200
+ . try_encode_table_source (
1201
+ table_name,
1202
+ Arc :: clone ( source) ,
1203
+ & mut bytes,
1204
+ )
1201
1205
. map_err ( |e| context ! ( "Error serializing custom table" , e) ) ?;
1202
1206
let scan = CustomScan ( CustomTableScanNode {
1203
1207
table_name : Some ( table_name. clone ( ) . into ( ) ) ,
0 commit comments