|
11 | 11 |
|
12 | 12 | import org.apache.spark.sql.connector.catalog.SupportsDelete; |
13 | 13 | import org.apache.spark.sql.connector.catalog.SupportsRead; |
| 14 | +import org.apache.spark.sql.connector.catalog.SupportsRowLevelOperations; |
14 | 15 | import org.apache.spark.sql.connector.catalog.SupportsWrite; |
15 | 16 | import org.apache.spark.sql.connector.catalog.Table; |
16 | 17 | import org.apache.spark.sql.connector.catalog.TableCapability; |
17 | 18 | import org.apache.spark.sql.connector.expressions.Expressions; |
18 | 19 | import org.apache.spark.sql.connector.expressions.Transform; |
19 | 20 | import org.apache.spark.sql.connector.read.ScanBuilder; |
20 | 21 | import org.apache.spark.sql.connector.write.LogicalWriteInfo; |
| 22 | +import org.apache.spark.sql.connector.write.RowLevelOperationBuilder; |
| 23 | +import org.apache.spark.sql.connector.write.RowLevelOperationInfo; |
21 | 24 | import org.apache.spark.sql.connector.write.WriteBuilder; |
22 | 25 | import org.apache.spark.sql.sources.Filter; |
23 | 26 | import org.apache.spark.sql.types.DataType; |
|
38 | 41 | * |
39 | 42 | * @author zinal |
40 | 43 | */ |
41 | | -public class YdbTable implements Table, SupportsRead, SupportsWrite, SupportsDelete { |
| 44 | +public class YdbTable implements Table, |
| 45 | + SupportsRead, SupportsWrite, SupportsDelete, SupportsRowLevelOperations { |
42 | 46 |
|
43 | 47 | private static final org.slf4j.Logger LOG |
44 | 48 | = org.slf4j.LoggerFactory.getLogger(YdbTable.class); |
@@ -291,6 +295,11 @@ public boolean truncateTable() { |
291 | 295 | throw new UnsupportedOperationException("Not supported yet."); |
292 | 296 | } |
293 | 297 |
|
| 298 | + @Override |
| 299 | + public RowLevelOperationBuilder newRowLevelOperationBuilder(RowLevelOperationInfo info) { |
| 300 | + return new YdbRowLevelBuilder(); |
| 301 | + } |
| 302 | + |
294 | 303 | final YdbConnector getConnector() { |
295 | 304 | return connector; |
296 | 305 | } |
|
0 commit comments