[SPARK-56609][SQL][TESTS] Remove redundant SparkFunSuite mixin from QueryTest/PlanTest subclasses
### What changes were proposed in this pull request?
Since `QueryTest` extends `SparkFunSuite with QueryTestBase with PlanTest`, and `PlanTest` extends `SparkFunSuite with PlanTestBase`, mixing `SparkFunSuite` in again alongside `QueryTest` or `PlanTest` in the `extends` list is a no-op.
Linearization:
```
QueryTest -> SparkFunSuite
\> QueryTestBase
\> PlanTest -> SparkFunSuite
\> PlanTestBase
PlanTest -> SparkFunSuite
\> PlanTestBase
```
Cleanup across 7 test files in `sql-catalyst`, `sql-core`, and `sql-hive`:
- `extends SparkFunSuite with QueryTest` -> `extends QueryTest` (3 files)
- `MapStatusEndToEndSuite`
- `ExecutorSideSQLConfSuite`
- `ParquetCommitterSuite` (also has `with LocalSparkContext`, retained)
- `extends SparkFunSuite with PlanTest` -> `extends PlanTest` (3 files)
- `ConstraintPropagationSuite`
- `ExtractPredicatesWithinOutputSetSuite`
- `FiltersSuite`
- `extends SparkFunSuite with SQLHelper with AdaptiveSparkPlanHelper with PlanTest` -> `extends PlanTest with SQLHelper with AdaptiveSparkPlanHelper` (1 file)
- `SparkSessionExtensionSuite`
Also removed the now-unused `SparkFunSuite` imports.
### Why are the changes needed?
Follow-up to [SPARK-56591](https://issues.apache.org/jira/browse/SPARK-56591), which removed the analogous `QueryTest with SharedSparkSession` redundancy. Keeps the `extends` clauses minimal and removes imports that no longer serve any purpose.
### Does this PR introduce _any_ user-facing change?
No. Test-only change.
### How was this patch tested?
`dev/scalastyle` passes. Existing tests in the affected suites exercise the unchanged test logic.
### Was this patch authored or co-authored using generative AI tooling?
Generated-by: Claude Code (model: claude-opus-4-7)
Closes #55529 from zhengruifeng/remove-redundant-sparkfunsuite-mixin.
Authored-by: Ruifeng Zheng <ruifengz@apache.org>
Signed-off-by: Ruifeng Zheng <ruifengz@apache.org>
diff --git a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/ExtractPredicatesWithinOutputSetSuite.scala b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/ExtractPredicatesWithinOutputSetSuite.scala
index 10f9a88..1a5dcd9 100644
--- a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/ExtractPredicatesWithinOutputSetSuite.scala
+++ b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/ExtractPredicatesWithinOutputSetSuite.scala
@@ -17,12 +17,11 @@
package org.apache.spark.sql.catalyst.expressions
-import org.apache.spark.SparkFunSuite
import org.apache.spark.sql.catalyst.dsl.expressions._
import org.apache.spark.sql.catalyst.plans.PlanTest
import org.apache.spark.sql.types.BooleanType
-class ExtractPredicatesWithinOutputSetSuite extends SparkFunSuite with PlanTest {
+class ExtractPredicatesWithinOutputSetSuite extends PlanTest {
private val a = AttributeReference("A", BooleanType)(exprId = ExprId(1))
private val b = AttributeReference("B", BooleanType)(exprId = ExprId(2))
private val c = AttributeReference("C", BooleanType)(exprId = ExprId(3))
diff --git a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/plans/ConstraintPropagationSuite.scala b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/plans/ConstraintPropagationSuite.scala
index fb5ab31..5f60842 100644
--- a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/plans/ConstraintPropagationSuite.scala
+++ b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/plans/ConstraintPropagationSuite.scala
@@ -19,7 +19,6 @@
import java.util.TimeZone
-import org.apache.spark.SparkFunSuite
import org.apache.spark.sql.catalyst.analysis._
import org.apache.spark.sql.catalyst.dsl.expressions._
import org.apache.spark.sql.catalyst.dsl.plans._
@@ -28,7 +27,7 @@
import org.apache.spark.sql.internal.SQLConf
import org.apache.spark.sql.types.{DataType, DoubleType, IntegerType, LongType, StringType}
-class ConstraintPropagationSuite extends SparkFunSuite with PlanTest {
+class ConstraintPropagationSuite extends PlanTest {
private def resolveColumn(tr: LocalRelation, columnName: String): Expression =
resolveColumn(tr.analyze, columnName)
diff --git a/sql/core/src/test/scala/org/apache/spark/sql/MapStatusEndToEndSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/MapStatusEndToEndSuite.scala
index b9e3356..0708b2c 100644
--- a/sql/core/src/test/scala/org/apache/spark/sql/MapStatusEndToEndSuite.scala
+++ b/sql/core/src/test/scala/org/apache/spark/sql/MapStatusEndToEndSuite.scala
@@ -17,11 +17,11 @@
package org.apache.spark.sql
-import org.apache.spark.{MapOutputTrackerMaster, SparkFunSuite}
+import org.apache.spark.MapOutputTrackerMaster
import org.apache.spark.sql.classic.SparkSession
import org.apache.spark.sql.internal.SQLConf
-class MapStatusEndToEndSuite extends SparkFunSuite with QueryTest {
+class MapStatusEndToEndSuite extends QueryTest {
override def spark: SparkSession = SparkSession.builder()
.master("local")
.config(SQLConf.LEAF_NODE_DEFAULT_PARALLELISM.key, value = 5)
diff --git a/sql/core/src/test/scala/org/apache/spark/sql/SparkSessionExtensionSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/SparkSessionExtensionSuite.scala
index 5a25e52..bfcf583 100644
--- a/sql/core/src/test/scala/org/apache/spark/sql/SparkSessionExtensionSuite.scala
+++ b/sql/core/src/test/scala/org/apache/spark/sql/SparkSessionExtensionSuite.scala
@@ -21,7 +21,7 @@
import scala.collection.mutable.ListBuffer
import scala.concurrent.Future
-import org.apache.spark.{MapOutputStatistics, SparkFunSuite, TaskContext}
+import org.apache.spark.{MapOutputStatistics, TaskContext}
import org.apache.spark.broadcast.Broadcast
import org.apache.spark.internal.Logging
import org.apache.spark.rdd.RDD
@@ -58,8 +58,7 @@
/**
* Test cases for the [[SparkSessionExtensions]].
*/
-class SparkSessionExtensionSuite extends SparkFunSuite with SQLHelper with AdaptiveSparkPlanHelper
- with PlanTest {
+class SparkSessionExtensionSuite extends PlanTest with SQLHelper with AdaptiveSparkPlanHelper {
private def create(
builder: SparkSessionExtensionsProvider): Seq[SparkSessionExtensionsProvider] = Seq(builder)
diff --git a/sql/core/src/test/scala/org/apache/spark/sql/execution/datasources/parquet/ParquetCommitterSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/execution/datasources/parquet/ParquetCommitterSuite.scala
index 9cc0ec4..d03209c 100644
--- a/sql/core/src/test/scala/org/apache/spark/sql/execution/datasources/parquet/ParquetCommitterSuite.scala
+++ b/sql/core/src/test/scala/org/apache/spark/sql/execution/datasources/parquet/ParquetCommitterSuite.scala
@@ -23,7 +23,7 @@
import org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter
import org.apache.parquet.hadoop.{ParquetOutputCommitter, ParquetOutputFormat}
-import org.apache.spark.{LocalSparkContext, SparkFunSuite}
+import org.apache.spark.LocalSparkContext
import org.apache.spark.sql.QueryTest
import org.apache.spark.sql.classic.SparkSession
import org.apache.spark.sql.internal.SQLConf
@@ -33,8 +33,7 @@
* Test logic related to choice of output committers.
*/
@ExtendedSQLTest
-class ParquetCommitterSuite extends SparkFunSuite with QueryTest
- with LocalSparkContext {
+class ParquetCommitterSuite extends QueryTest with LocalSparkContext {
private val PARQUET_COMMITTER = classOf[ParquetOutputCommitter].getCanonicalName
diff --git a/sql/core/src/test/scala/org/apache/spark/sql/internal/ExecutorSideSQLConfSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/internal/ExecutorSideSQLConfSuite.scala
index 4f0f377..9eb774d 100644
--- a/sql/core/src/test/scala/org/apache/spark/sql/internal/ExecutorSideSQLConfSuite.scala
+++ b/sql/core/src/test/scala/org/apache/spark/sql/internal/ExecutorSideSQLConfSuite.scala
@@ -21,7 +21,7 @@
import org.scalatest.Assertions._
-import org.apache.spark.{SparkFunSuite, SparkNoSuchElementException, TaskContext}
+import org.apache.spark.{SparkNoSuchElementException, TaskContext}
import org.apache.spark.rdd.RDD
import org.apache.spark.sql.QueryTest
import org.apache.spark.sql.catalyst.InternalRow
@@ -35,7 +35,7 @@
import org.apache.spark.tags.ExtendedSQLTest
@ExtendedSQLTest
-class ExecutorSideSQLConfSuite extends SparkFunSuite with QueryTest {
+class ExecutorSideSQLConfSuite extends QueryTest {
import testImplicits._
protected var spark: SparkSession = null
diff --git a/sql/hive/src/test/scala/org/apache/spark/sql/hive/client/FiltersSuite.scala b/sql/hive/src/test/scala/org/apache/spark/sql/hive/client/FiltersSuite.scala
index 27b0276..f1a402e7b 100644
--- a/sql/hive/src/test/scala/org/apache/spark/sql/hive/client/FiltersSuite.scala
+++ b/sql/hive/src/test/scala/org/apache/spark/sql/hive/client/FiltersSuite.scala
@@ -23,7 +23,6 @@
import org.apache.hadoop.hive.metastore.api.FieldSchema
import org.apache.hadoop.hive.serde.serdeConstants
-import org.apache.spark.SparkFunSuite
import org.apache.spark.sql.catalyst.dsl.expressions._
import org.apache.spark.sql.catalyst.expressions._
import org.apache.spark.sql.catalyst.plans.PlanTest
@@ -35,7 +34,7 @@
* A set of tests for the filter conversion logic used when pushing partition pruning into the
* metastore
*/
-class FiltersSuite extends SparkFunSuite with PlanTest {
+class FiltersSuite extends PlanTest {
private val shim = new Shim_v2_0
private val testTable = new org.apache.hadoop.hive.ql.metadata.Table("default", "test")