Skip to content

Commit fa90e98

Browse files
committed
Remove debug logging from parameter substitution integration
The parameter substitution integration has been verified to work correctly: - Named parameters are properly detected and substituted - Parameter values are correctly converted to SQL literals - Integration flow: SparkSession -> ThreadLocal -> SparkSqlParser -> SubstituteParamsParser Example working usage: spark.sql("create or replace view v(c1) AS VALUES (:parm)", Map("parm" -> "hello")) Successfully substitutes :parm with 'hello' before main parsing.
1 parent 60d2c2e commit fa90e98

File tree

1 file changed

+2
-17
lines changed

1 file changed

+2
-17
lines changed

sql/core/src/main/scala/org/apache/spark/sql/execution/SparkSqlParser.scala

Lines changed: 2 additions & 17 deletions
Original file line numberDiff line numberDiff line change
@@ -53,17 +53,13 @@ class SparkSqlParser extends AbstractSqlParser {
5353
private val substitutor = new VariableSubstitution()
5454
private val paramSubstitutor = new org.apache.spark.sql.catalyst.parser.SubstituteParamsParser()
5555

56-
protected override def parse[T](command: String)(toResult: SqlBaseParser => T): T = {
56+
protected override def parse[T](command: String)(toResult: SqlBaseParser => T): T = {
5757
// Step 1: Check if we have a parameterized query context and substitute parameters
5858
val paramSubstituted =
5959
org.apache.spark.sql.catalyst.parser.ThreadLocalParameterContext.get() match {
6060
case Some(context) =>
61-
println(s"DEBUG: Found parameter context for command: $command")
62-
val result = substituteParametersIfNeeded(command, context)
63-
println(s"DEBUG: After substitution: $result")
64-
result
61+
substituteParametersIfNeeded(command, context)
6562
case None =>
66-
println(s"DEBUG: No parameter context for command: $command")
6763
command // No parameters to substitute
6864
}
6965

@@ -78,40 +74,29 @@ class SparkSqlParser extends AbstractSqlParser {
7874
command: String,
7975
context: org.apache.spark.sql.catalyst.parser.ParameterContext): String = {
8076

81-
println(s"DEBUG: substituteParametersIfNeeded called with context: $context")
82-
8377
// Detect if the SQL has parameter markers
8478
val (hasPositional, hasNamed) = paramSubstitutor.detectParameters(
8579
command, org.apache.spark.sql.catalyst.parser.SubstitutionRule.Statement)
8680

87-
println(s"DEBUG: Parameter detection - hasPositional: $hasPositional, hasNamed: $hasNamed")
88-
8981
if (!hasPositional && !hasNamed) {
90-
println("DEBUG: No parameters detected, returning original command")
9182
return command // No parameters to substitute
9283
}
9384

9485
// Apply parameter substitution based on context type
9586
context match {
9687
case org.apache.spark.sql.catalyst.parser.NamedParameterContext(params) =>
97-
println(s"DEBUG: Using named parameters: $params")
9888
val paramValues = params.map { case (name, expr) =>
9989
(name, expressionToSqlValue(expr))
10090
}
101-
println(s"DEBUG: Converted param values: $paramValues")
10291
val (substituted, _) = paramSubstitutor.substitute(
10392
command, org.apache.spark.sql.catalyst.parser.SubstitutionRule.Statement, paramValues)
104-
println(s"DEBUG: Named substitution result: $substituted")
10593
substituted
10694

10795
case org.apache.spark.sql.catalyst.parser.PositionalParameterContext(params) =>
108-
println(s"DEBUG: Using positional parameters: $params")
10996
val paramValues = params.map(expressionToSqlValue).toList
110-
println(s"DEBUG: Converted param values: $paramValues")
11197
val (substituted, _) = paramSubstitutor.substitute(
11298
command, org.apache.spark.sql.catalyst.parser.SubstitutionRule.Statement,
11399
positionalParams = paramValues)
114-
println(s"DEBUG: Positional substitution result: $substituted")
115100
substituted
116101
}
117102
}

0 commit comments

Comments
 (0)