[SPARK-53789][SQL][CONNECT] Canonicalize error condition CANNOT_MODIFY_STATIC_CONFIG
### What changes were proposed in this pull request?
Introduce `CANNOT_MODIFY_STATIC_CONFIG` and
1. Migrate error condition `_LEGACY_ERROR_TEMP_3050` to `CANNOT_MODIFY_STATIC_CONFIG`
2. Migrate `cannotModifyValueOfStaticConfigError` from `CANNOT_MODIFY_CONFIG`(with suggestion that directs to DDL migration guide) to use `CANNOT_MODIFY_STATIC_CONFIG` because the DDL migration guide does not help for this case.
### Why are the changes needed?
More consistent error message.
### Does this PR introduce _any_ user-facing change?
More consistent error message.
### How was this patch tested?
Pass GHA.
### Was this patch authored or co-authored using generative AI tooling?
No.
Closes #52506 from pan3793/SPARK-53789.
Authored-by: Cheng Pan <chengpan@apache.org>
Signed-off-by: yangjie01 <yangjie01@baidu.com>
diff --git a/common/utils/src/main/resources/error/error-conditions.json b/common/utils/src/main/resources/error/error-conditions.json
index 5f1d3d1..52d9a0b 100644
--- a/common/utils/src/main/resources/error/error-conditions.json
+++ b/common/utils/src/main/resources/error/error-conditions.json
@@ -440,6 +440,12 @@
],
"sqlState" : "46110"
},
+ "CANNOT_MODIFY_STATIC_CONFIG" : {
+ "message" : [
+ "Cannot modify the value of the static Spark config: <key>."
+ ],
+ "sqlState" : "46110"
+ },
"CANNOT_PARSE_DECIMAL" : {
"message" : [
"Cannot parse decimal. Please ensure that the input is a valid number with optional decimal point or comma separators."
@@ -9185,11 +9191,6 @@
"Failed to get block <blockId>, which is not a shuffle block"
]
},
- "_LEGACY_ERROR_TEMP_3050" : {
- "message" : [
- "Cannot modify the value of a static config: <k>"
- ]
- },
"_LEGACY_ERROR_TEMP_3052" : {
"message" : [
"Unexpected resolved action: <other>"
diff --git a/sql/api/src/main/scala/org/apache/spark/sql/errors/CompilationErrors.scala b/sql/api/src/main/scala/org/apache/spark/sql/errors/CompilationErrors.scala
index 617cab4..6a275b9 100644
--- a/sql/api/src/main/scala/org/apache/spark/sql/errors/CompilationErrors.scala
+++ b/sql/api/src/main/scala/org/apache/spark/sql/errors/CompilationErrors.scala
@@ -131,6 +131,18 @@
errorClass = "SPECIFY_CLUSTER_BY_WITH_BUCKETING_IS_NOT_ALLOWED",
messageParameters = Map.empty)
}
+
+ def cannotModifyValueOfStaticConfigError(key: String): Throwable = {
+ new AnalysisException(
+ errorClass = "CANNOT_MODIFY_STATIC_CONFIG",
+ messageParameters = Map("key" -> toSQLConf(key)))
+ }
+
+ def cannotModifyValueOfSparkConfigError(key: String, docroot: String): Throwable = {
+ new AnalysisException(
+ errorClass = "CANNOT_MODIFY_CONFIG",
+ messageParameters = Map("key" -> toSQLConf(key), "docroot" -> docroot))
+ }
}
private[sql] object CompilationErrors extends CompilationErrors
diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/SQLConfHelper.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/SQLConfHelper.scala
index bd0455d..c5c68e9 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/SQLConfHelper.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/SQLConfHelper.scala
@@ -17,7 +17,7 @@
package org.apache.spark.sql.catalyst
-import org.apache.spark.sql.AnalysisException
+import org.apache.spark.sql.errors.CompilationErrors
import org.apache.spark.sql.internal.SQLConf
/**
@@ -47,9 +47,7 @@
}
keys.lazyZip(values).foreach { (k, v) =>
if (SQLConf.isStaticConfigKey(k)) {
- throw new AnalysisException(
- errorClass = "_LEGACY_ERROR_TEMP_3050",
- messageParameters = Map("k" -> k))
+ throw CompilationErrors.cannotModifyValueOfStaticConfigError(k)
}
conf.setConfString(k, v)
}
diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryCompilationErrors.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryCompilationErrors.scala
index 091273a..7d79c5d 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryCompilationErrors.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryCompilationErrors.scala
@@ -3475,19 +3475,6 @@
"config" -> SQLConf.DATAFRAME_PIVOT_MAX_VALUES.key))
}
- def cannotModifyValueOfStaticConfigError(key: String): Throwable = {
- new AnalysisException(
- errorClass = "CANNOT_MODIFY_CONFIG",
- messageParameters = Map("key" -> toSQLConf(key), "docroot" -> SPARK_DOC_ROOT)
- )
- }
-
- def cannotModifyValueOfSparkConfigError(key: String, docroot: String): Throwable = {
- new AnalysisException(
- errorClass = "CANNOT_MODIFY_CONFIG",
- messageParameters = Map("key" -> toSQLConf(key), "docroot" -> docroot))
- }
-
def commandExecutionInRunnerUnsupportedError(runner: String): Throwable = {
new AnalysisException(
errorClass = "_LEGACY_ERROR_TEMP_1327",
diff --git a/sql/connect/client/jvm/src/test/scala/org/apache/spark/sql/connect/test/SQLHelper.scala b/sql/connect/client/jvm/src/test/scala/org/apache/spark/sql/connect/test/SQLHelper.scala
index f23221f..b8d1062 100644
--- a/sql/connect/client/jvm/src/test/scala/org/apache/spark/sql/connect/test/SQLHelper.scala
+++ b/sql/connect/client/jvm/src/test/scala/org/apache/spark/sql/connect/test/SQLHelper.scala
@@ -21,9 +21,9 @@
import org.scalatest.Assertions.fail
-import org.apache.spark.sql.AnalysisException
import org.apache.spark.sql.catalyst.analysis.NoSuchTableException
import org.apache.spark.sql.connect.{DataFrame, SparkSession, SQLImplicits}
+import org.apache.spark.sql.errors.CompilationErrors
import org.apache.spark.util.{SparkErrorUtils, SparkFileUtils}
trait SQLHelper {
@@ -59,11 +59,8 @@
if (spark.conf.isModifiable(k)) {
spark.conf.set(k, v)
} else {
- throw new AnalysisException(
- errorClass = "_LEGACY_ERROR_TEMP_3050",
- messageParameters = Map("k" -> k))
+ throw CompilationErrors.cannotModifyValueOfStaticConfigError(k)
}
-
}
try f
finally {
diff --git a/sql/core/src/test/scala/org/apache/spark/sql/internal/SQLConfSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/internal/SQLConfSuite.scala
index 74a3932..8297747 100644
--- a/sql/core/src/test/scala/org/apache/spark/sql/internal/SQLConfSuite.scala
+++ b/sql/core/src/test/scala/org/apache/spark/sql/internal/SQLConfSuite.scala
@@ -236,8 +236,8 @@
// static sql configs
checkError(
exception = intercept[AnalysisException](sql(s"RESET ${StaticSQLConf.WAREHOUSE_PATH.key}")),
- condition = "CANNOT_MODIFY_CONFIG",
- parameters = Map("key" -> "\"spark.sql.warehouse.dir\"", "docroot" -> SPARK_DOC_ROOT))
+ condition = "CANNOT_MODIFY_STATIC_CONFIG",
+ parameters = Map("key" -> "\"spark.sql.warehouse.dir\""))
}
@@ -348,13 +348,13 @@
test("cannot set/unset static SQL conf") {
checkError(
exception = intercept[AnalysisException](sql(s"SET ${GLOBAL_TEMP_DATABASE.key}=10")),
- condition = "CANNOT_MODIFY_CONFIG",
- parameters = Map("key" -> "\"spark.sql.globalTempDatabase\"", "docroot" -> SPARK_DOC_ROOT)
+ condition = "CANNOT_MODIFY_STATIC_CONFIG",
+ parameters = Map("key" -> "\"spark.sql.globalTempDatabase\"")
)
checkError(
exception = intercept[AnalysisException](spark.conf.unset(GLOBAL_TEMP_DATABASE.key)),
- condition = "CANNOT_MODIFY_CONFIG",
- parameters = Map("key" -> "\"spark.sql.globalTempDatabase\"", "docroot" -> SPARK_DOC_ROOT)
+ condition = "CANNOT_MODIFY_STATIC_CONFIG",
+ parameters = Map("key" -> "\"spark.sql.globalTempDatabase\"")
)
}
diff --git a/sql/hive-thriftserver/src/test/scala/org/apache/spark/sql/hive/thriftserver/HiveThriftServer2Suites.scala b/sql/hive-thriftserver/src/test/scala/org/apache/spark/sql/hive/thriftserver/HiveThriftServer2Suites.scala
index 4400848..9db2bea 100644
--- a/sql/hive-thriftserver/src/test/scala/org/apache/spark/sql/hive/thriftserver/HiveThriftServer2Suites.scala
+++ b/sql/hive-thriftserver/src/test/scala/org/apache/spark/sql/hive/thriftserver/HiveThriftServer2Suites.scala
@@ -1063,7 +1063,7 @@
statement.executeQuery("SET spark.sql.hive.thriftServer.singleSession=false")
}.getMessage
assert(e.contains(
- "CANNOT_MODIFY_CONFIG"))
+ "CANNOT_MODIFY_STATIC_CONFIG"))
}
}
diff --git a/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/SQLQuerySuite.scala b/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/SQLQuerySuite.scala
index ae53691..e6c0f0c 100644
--- a/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/SQLQuerySuite.scala
+++ b/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/SQLQuerySuite.scala
@@ -25,7 +25,7 @@
import org.apache.hadoop.fs.{FileSystem, Path}
-import org.apache.spark.{SPARK_DOC_ROOT, SparkException, TestUtils}
+import org.apache.spark.{SparkException, TestUtils}
import org.apache.spark.scheduler.{SparkListener, SparkListenerEvent}
import org.apache.spark.sql._
import org.apache.spark.sql.catalyst.TableIdentifier
@@ -2464,9 +2464,8 @@
"spark.sql.hive.metastore.barrierPrefixes").foreach { key =>
checkError(
exception = intercept[AnalysisException](sql(s"set $key=abc")),
- condition = "CANNOT_MODIFY_CONFIG",
- parameters = Map(
- "key" -> toSQLConf(key), "docroot" -> SPARK_DOC_ROOT)
+ condition = "CANNOT_MODIFY_STATIC_CONFIG",
+ parameters = Map("key" -> toSQLConf(key))
)
}
}