File tree Expand file tree Collapse file tree 2 files changed +5
-2
lines changed
launcher/src/main/java/org/apache/spark/launcher Expand file tree Collapse file tree 2 files changed +5
-2
lines changed Original file line number Diff line number Diff line change @@ -322,9 +322,11 @@ if [ "$MAKE_TGZ" == "true" ]; then
322322 rm -rf " $TARDIR "
323323 cp -r " $DISTDIR " " $TARDIR "
324324 # Set the Spark Connect system variable in these scripts to enable it by default.
325+ awk ' NR==1{print; print "export SPARK_CONNECT_BEELINE=${SPARK_CONNECT_BEELINE:-1}"; next} {print}' " $TARDIR /bin/beeline" > tmp && cat tmp > " $TARDIR /bin/beeline"
325326 awk ' NR==1{print; print "export SPARK_CONNECT_MODE=${SPARK_CONNECT_MODE:-1}"; next} {print}' " $TARDIR /bin/pyspark" > tmp && cat tmp > " $TARDIR /bin/pyspark"
326327 awk ' NR==1{print; print "export SPARK_CONNECT_MODE=${SPARK_CONNECT_MODE:-1}"; next} {print}' " $TARDIR /bin/spark-shell" > tmp && cat tmp > " $TARDIR /bin/spark-shell"
327328 awk ' NR==1{print; print "export SPARK_CONNECT_MODE=${SPARK_CONNECT_MODE:-1}"; next} {print}' " $TARDIR /bin/spark-submit" > tmp && cat tmp > " $TARDIR /bin/spark-submit"
329+ awk ' NR==1{print; print "if [%SPARK_CONNECT_BEELINE%] == [] set SPARK_CONNECT_BEELINE=1"; next} {print}' " $TARDIR /bin/beeline.cmd" > tmp && cat tmp > " $TARDIR /bin/beeline.cmd"
328330 awk ' NR==1{print; print "if [%SPARK_CONNECT_MODE%] == [] set SPARK_CONNECT_MODE=1"; next} {print}' " $TARDIR /bin/pyspark2.cmd" > tmp && cat tmp > " $TARDIR /bin/pyspark2.cmd"
329331 awk ' NR==1{print; print "if [%SPARK_CONNECT_MODE%] == [] set SPARK_CONNECT_MODE=1"; next} {print}' " $TARDIR /bin/spark-shell2.cmd" > tmp && cat tmp > " $TARDIR /bin/spark-shell2.cmd"
330332 awk ' NR==1{print; print "if [%SPARK_CONNECT_MODE%] == [] set SPARK_CONNECT_MODE=1"; next} {print}' " $TARDIR /bin/spark-submit2.cmd" > tmp && cat tmp > " $TARDIR /bin/spark-submit2.cmd"
Original file line number Diff line number Diff line change @@ -197,7 +197,8 @@ List<String> buildClassPath(String appClassPath) throws IOException {
197197 if (isRemote && "1" .equals (getenv ("SPARK_SCALA_SHELL" )) && project .equals ("sql/core" )) {
198198 continue ;
199199 }
200- if (isBeeLine && project .equals ("sql/core" )) {
200+ if (isBeeLine && "1" .equals (getenv ("SPARK_CONNECT_BEELINE" )) &&
201+ project .equals ("sql/core" )) {
201202 continue ;
202203 }
203204 // SPARK-49534: The assumption here is that if `spark-hive_xxx.jar` is not in the
@@ -246,7 +247,7 @@ List<String> buildClassPath(String appClassPath) throws IOException {
246247 }
247248 }
248249
249- if (isRemote || isBeeLine ) {
250+ if (isRemote || ( isBeeLine && "1" . equals ( getenv ( "SPARK_CONNECT_BEELINE" ))) ) {
250251 for (File f : new File (jarsDir ).listFiles ()) {
251252 // Exclude Spark Classic SQL and Spark Connect server jars
252253 // if we're in Spark Connect Shell. Also exclude Spark SQL API and
You can’t perform that action at this time.
0 commit comments