|
16 | 16 | */
|
17 | 17 | package org.apache.spark.sql.execution.datasources.v2.jdbc
|
18 | 18 |
|
19 |
| -import java.sql.SQLException |
| 19 | +import java.sql.{Connection, SQLException} |
20 | 20 | import java.util
|
21 | 21 |
|
22 | 22 | import scala.collection.mutable
|
@@ -93,19 +93,21 @@ class JDBCTableCatalog extends TableCatalog
|
93 | 93 | }
|
94 | 94 |
|
95 | 95 | override def tableExists(ident: Identifier): Boolean = {
|
| 96 | + JdbcUtils.withConnection(options)(tableExists(ident, _)) |
| 97 | + } |
| 98 | + |
| 99 | + private def tableExists(ident: Identifier, conn: Connection): Boolean = { |
96 | 100 | checkNamespace(ident.namespace())
|
97 | 101 | val writeOptions = new JdbcOptionsInWrite(
|
98 | 102 | options.parameters + (JDBCOptions.JDBC_TABLE_NAME -> getTableName(ident)))
|
99 |
| - JdbcUtils.withConnection(options) { |
100 |
| - JdbcUtils.classifyException( |
101 |
| - condition = "FAILED_JDBC.TABLE_EXISTS", |
102 |
| - messageParameters = Map( |
103 |
| - "url" -> options.getRedactUrl(), |
104 |
| - "tableName" -> toSQLId(ident)), |
105 |
| - dialect, |
106 |
| - description = s"Failed table existence check: $ident", |
107 |
| - isRuntime = false)(JdbcUtils.tableExists(_, writeOptions)) |
108 |
| - } |
| 103 | + JdbcUtils.classifyException( |
| 104 | + condition = "FAILED_JDBC.TABLE_EXISTS", |
| 105 | + messageParameters = Map( |
| 106 | + "url" -> options.getRedactUrl(), |
| 107 | + "tableName" -> toSQLId(ident)), |
| 108 | + dialect, |
| 109 | + description = s"Failed table existence check: $ident", |
| 110 | + isRuntime = false)(JdbcUtils.tableExists(conn, writeOptions)) |
109 | 111 | }
|
110 | 112 |
|
111 | 113 | override def dropTable(ident: Identifier): Boolean = {
|
@@ -138,28 +140,30 @@ class JDBCTableCatalog extends TableCatalog
|
138 | 140 | }
|
139 | 141 |
|
140 | 142 | override def loadTable(ident: Identifier): Table = {
|
141 |
| - if (!tableExists(ident)) { |
142 |
| - throw QueryCompilationErrors.noSuchTableError(ident) |
143 |
| - } |
| 143 | + JdbcUtils.withConnection(options) { conn => |
| 144 | + if (!tableExists(ident, conn)) { |
| 145 | + throw QueryCompilationErrors.noSuchTableError(ident) |
| 146 | + } |
144 | 147 |
|
145 |
| - val optionsWithTableName = new JDBCOptions( |
146 |
| - options.parameters + (JDBCOptions.JDBC_TABLE_NAME -> getTableName(ident))) |
147 |
| - JdbcUtils.classifyException( |
148 |
| - condition = "FAILED_JDBC.LOAD_TABLE", |
149 |
| - messageParameters = Map( |
150 |
| - "url" -> options.getRedactUrl(), |
151 |
| - "tableName" -> toSQLId(ident)), |
152 |
| - dialect, |
153 |
| - description = s"Failed to load table: $ident", |
154 |
| - isRuntime = false |
155 |
| - ) { |
156 |
| - val remoteSchemaFetchMetric = JdbcUtils |
157 |
| - .createSchemaFetchMetric(SparkSession.active.sparkContext) |
158 |
| - val schema = SQLMetrics.withTimingNs(remoteSchemaFetchMetric) { |
159 |
| - JDBCRDD.resolveTable(optionsWithTableName) |
| 148 | + val optionsWithTableName = new JDBCOptions( |
| 149 | + options.parameters + (JDBCOptions.JDBC_TABLE_NAME -> getTableName(ident))) |
| 150 | + JdbcUtils.classifyException( |
| 151 | + condition = "FAILED_JDBC.LOAD_TABLE", |
| 152 | + messageParameters = Map( |
| 153 | + "url" -> options.getRedactUrl(), |
| 154 | + "tableName" -> toSQLId(ident)), |
| 155 | + dialect, |
| 156 | + description = s"Failed to load table: $ident", |
| 157 | + isRuntime = false |
| 158 | + ) { |
| 159 | + val remoteSchemaFetchMetric = JdbcUtils |
| 160 | + .createSchemaFetchMetric(SparkSession.active.sparkContext) |
| 161 | + val schema = SQLMetrics.withTimingNs(remoteSchemaFetchMetric) { |
| 162 | + JDBCRDD.resolveTable(optionsWithTableName, conn) |
| 163 | + } |
| 164 | + JDBCTable(ident, schema, optionsWithTableName, |
| 165 | + Map(JDBCRelation.schemaFetchKey -> remoteSchemaFetchMetric)) |
160 | 166 | }
|
161 |
| - JDBCTable(ident, schema, optionsWithTableName, |
162 |
| - Map(JDBCRelation.schemaFetchKey -> remoteSchemaFetchMetric)) |
163 | 167 | }
|
164 | 168 | }
|
165 | 169 |
|
|
0 commit comments