@@ -42,16 +42,16 @@ use datafusion::datasource::file_format::parquet::ParquetFormat;
42
42
use datafusion:: datasource:: file_format:: {
43
43
file_type_to_format, format_as_file_type, FileFormatFactory ,
44
44
} ;
45
+ use datafusion:: datasource:: DefaultTableSource ;
45
46
use datafusion:: {
47
+ datasource:: provider_as_source,
46
48
datasource:: {
47
49
file_format:: {
48
50
csv:: CsvFormat , json:: JsonFormat as OtherNdJsonFormat , FileFormat ,
49
51
} ,
50
52
listing:: { ListingOptions , ListingTable , ListingTableConfig , ListingTableUrl } ,
51
53
view:: ViewTable ,
52
- TableProvider ,
53
54
} ,
54
- datasource:: { provider_as_source, source_as_provider} ,
55
55
prelude:: SessionContext ,
56
56
} ;
57
57
use datafusion_common:: file_options:: file_type:: FileType ;
@@ -117,18 +117,18 @@ pub trait LogicalExtensionCodec: Debug + Send + Sync {
117
117
118
118
fn try_encode ( & self , node : & Extension , buf : & mut Vec < u8 > ) -> Result < ( ) > ;
119
119
120
- fn try_decode_table_provider (
120
+ fn try_decode_table_source (
121
121
& self ,
122
122
buf : & [ u8 ] ,
123
123
table_ref : & TableReference ,
124
124
schema : SchemaRef ,
125
125
ctx : & SessionContext ,
126
- ) -> Result < Arc < dyn TableProvider > > ;
126
+ ) -> Result < Arc < dyn TableSource > > ;
127
127
128
- fn try_encode_table_provider (
128
+ fn try_encode_table_source (
129
129
& self ,
130
130
table_ref : & TableReference ,
131
- node : Arc < dyn TableProvider > ,
131
+ node : Arc < dyn TableSource > ,
132
132
buf : & mut Vec < u8 > ,
133
133
) -> Result < ( ) > ;
134
134
@@ -192,20 +192,20 @@ impl LogicalExtensionCodec for DefaultLogicalExtensionCodec {
192
192
not_impl_err ! ( "LogicalExtensionCodec is not provided" )
193
193
}
194
194
195
- fn try_decode_table_provider (
195
+ fn try_decode_table_source (
196
196
& self ,
197
197
_buf : & [ u8 ] ,
198
198
_table_ref : & TableReference ,
199
199
_schema : SchemaRef ,
200
200
_ctx : & SessionContext ,
201
- ) -> Result < Arc < dyn TableProvider > > {
201
+ ) -> Result < Arc < dyn TableSource > > {
202
202
not_impl_err ! ( "LogicalExtensionCodec is not provided" )
203
203
}
204
204
205
- fn try_encode_table_provider (
205
+ fn try_encode_table_source (
206
206
& self ,
207
207
_table_ref : & TableReference ,
208
- _node : Arc < dyn TableProvider > ,
208
+ _node : Arc < dyn TableSource > ,
209
209
_buf : & mut Vec < u8 > ,
210
210
) -> Result < ( ) > {
211
211
not_impl_err ! ( "LogicalExtensionCodec is not provided" )
@@ -439,7 +439,7 @@ impl AsLogicalPlan for LogicalPlanNode {
439
439
}
440
440
#[ cfg_attr( not( feature = "avro" ) , allow( unused_variables) ) ]
441
441
FileFormatType :: Avro ( ..) => {
442
- #[ cfg( feature = "avro" ) ]
442
+ #[ cfg( feature = "avro" ) ]
443
443
{
444
444
Arc :: new ( AvroFormat )
445
445
}
@@ -519,18 +519,15 @@ impl AsLogicalPlan for LogicalPlanNode {
519
519
let table_name =
520
520
from_table_reference ( scan. table_name . as_ref ( ) , "CustomScan" ) ?;
521
521
522
- let provider = extension_codec. try_decode_table_provider (
522
+ let source = extension_codec. try_decode_table_source (
523
523
& scan. custom_table_data ,
524
524
& table_name,
525
525
schema,
526
526
ctx,
527
527
) ?;
528
528
529
529
LogicalPlanBuilder :: scan_with_filters (
530
- table_name,
531
- provider_as_source ( provider) ,
532
- projection,
533
- filters,
530
+ table_name, source, projection, filters,
534
531
) ?
535
532
. build ( )
536
533
}
@@ -1001,9 +998,7 @@ impl AsLogicalPlan for LogicalPlanNode {
1001
998
projection,
1002
999
..
1003
1000
} ) => {
1004
- let provider = source_as_provider ( source) ?;
1005
- let schema = provider. schema ( ) ;
1006
- let source = provider. as_any ( ) ;
1001
+ let schema = source. schema ( ) ;
1007
1002
1008
1003
let projection = match projection {
1009
1004
None => None ,
@@ -1021,7 +1016,9 @@ impl AsLogicalPlan for LogicalPlanNode {
1021
1016
let filters: Vec < protobuf:: LogicalExprNode > =
1022
1017
serialize_exprs ( filters, extension_codec) ?;
1023
1018
1024
- if let Some ( listing_table) = source. downcast_ref :: < ListingTable > ( ) {
1019
+ if let Ok ( listing_table) =
1020
+ DefaultTableSource :: unwrap_provider :: < ListingTable > ( source)
1021
+ {
1025
1022
let any = listing_table. options ( ) . format . as_any ( ) ;
1026
1023
let file_format_type = {
1027
1024
let mut maybe_some_type = None ;
@@ -1130,7 +1127,9 @@ impl AsLogicalPlan for LogicalPlanNode {
1130
1127
} ,
1131
1128
) ) ,
1132
1129
} )
1133
- } else if let Some ( view_table) = source. downcast_ref :: < ViewTable > ( ) {
1130
+ } else if let Ok ( view_table) =
1131
+ DefaultTableSource :: unwrap_provider :: < ViewTable > ( source)
1132
+ {
1134
1133
let schema: protobuf:: Schema = schema. as_ref ( ) . try_into ( ) ?;
1135
1134
Ok ( LogicalPlanNode {
1136
1135
logical_plan_type : Some ( LogicalPlanType :: ViewScan ( Box :: new (
@@ -1151,7 +1150,8 @@ impl AsLogicalPlan for LogicalPlanNode {
1151
1150
} ,
1152
1151
) ) ) ,
1153
1152
} )
1154
- } else if let Some ( cte_work_table) = source. downcast_ref :: < CteWorkTable > ( )
1153
+ } else if let Ok ( cte_work_table) =
1154
+ DefaultTableSource :: unwrap_provider :: < CteWorkTable > ( source)
1155
1155
{
1156
1156
let name = cte_work_table. name ( ) . to_string ( ) ;
1157
1157
let schema = cte_work_table. schema ( ) ;
@@ -1169,7 +1169,11 @@ impl AsLogicalPlan for LogicalPlanNode {
1169
1169
let schema: protobuf:: Schema = schema. as_ref ( ) . try_into ( ) ?;
1170
1170
let mut bytes = vec ! [ ] ;
1171
1171
extension_codec
1172
- . try_encode_table_provider ( table_name, provider, & mut bytes)
1172
+ . try_encode_table_source (
1173
+ table_name,
1174
+ Arc :: clone ( source) ,
1175
+ & mut bytes,
1176
+ )
1173
1177
. map_err ( |e| context ! ( "Error serializing custom table" , e) ) ?;
1174
1178
let scan = CustomScan ( CustomTableScanNode {
1175
1179
table_name : Some ( table_name. clone ( ) . into ( ) ) ,
0 commit comments