| /* |
| * Licensed to the Apache Software Foundation (ASF) under one or more |
| * contributor license agreements. See the NOTICE file distributed with |
| * this work for additional information regarding copyright ownership. |
| * The ASF licenses this file to You under the Apache License, Version 2.0 |
| * (the "License"); you may not use this file except in compliance with |
| * the License. You may obtain a copy of the License at |
| * |
| * http://www.apache.org/licenses/LICENSE-2.0 |
| * |
| * Unless required by applicable law or agreed to in writing, software |
| * distributed under the License is distributed on an "AS IS" BASIS, |
| * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
| * See the License for the specific language governing permissions and |
| * limitations under the License. |
| */ |
| |
| import com.typesafe.tools.mima.core._ |
| |
| /** |
| * Additional excludes for checking of Spark's binary compatibility. |
| * |
| * This acts as an official audit of cases where we excluded other classes. Please use the narrowest |
| * possible exclude here. MIMA will usually tell you what exclude to use, e.g.: |
| * |
| * ProblemFilters.exclude[MissingMethodProblem]("org.apache.spark.rdd.RDD.take") |
| * |
| * It is also possible to exclude Spark classes and packages. This should be used sparingly: |
| * |
| * MimaBuild.excludeSparkClass("graphx.util.collection.GraphXPrimitiveKeyOpenHashMap") |
| * |
| * For a new Spark version, please update MimaBuild.scala to reflect the previous version. |
| */ |
| object MimaExcludes { |
| |
| // Exclude rules for 4.0.x from 3.5.0 |
| lazy val v40excludes = defaultExcludes ++ Seq( |
| // [SPARK-44863][UI] Add a button to download thread dump as a txt in Spark UI |
| ProblemFilters.exclude[DirectMissingMethodProblem]("org.apache.spark.status.api.v1.ThreadStackTrace.*"), |
| ProblemFilters.exclude[MissingTypesProblem]("org.apache.spark.status.api.v1.ThreadStackTrace$"), |
| // [SPARK-44705][PYTHON] Make PythonRunner single-threaded |
| ProblemFilters.exclude[IncompatibleMethTypeProblem]("org.apache.spark.api.python.BasePythonRunner#ReaderIterator.this"), |
| // [SPARK-44198][CORE] Support propagation of the log level to the executors |
| ProblemFilters.exclude[MissingTypesProblem]("org.apache.spark.scheduler.cluster.CoarseGrainedClusterMessages$SparkAppConfig$"), |
| // [SPARK-45427][CORE] Add RPC SSL settings to SSLOptions and SparkTransportConf |
| ProblemFilters.exclude[DirectMissingMethodProblem]("org.apache.spark.network.netty.SparkTransportConf.fromSparkConf"), |
| // [SPARK-45136][CONNECT] Enhance ClosureCleaner with Ammonite support |
| ProblemFilters.exclude[MissingClassProblem]("org.apache.spark.util.MethodIdentifier$") |
| ) |
| |
| // Default exclude rules |
| lazy val defaultExcludes = Seq( |
| // Spark Internals |
| ProblemFilters.exclude[Problem]("org.apache.spark.rpc.*"), |
| ProblemFilters.exclude[Problem]("org.spark-project.jetty.*"), |
| ProblemFilters.exclude[Problem]("org.spark_project.jetty.*"), |
| ProblemFilters.exclude[Problem]("org.sparkproject.jetty.*"), |
| ProblemFilters.exclude[Problem]("org.apache.spark.internal.*"), |
| ProblemFilters.exclude[Problem]("org.apache.spark.unused.*"), |
| ProblemFilters.exclude[Problem]("org.apache.spark.unsafe.*"), |
| ProblemFilters.exclude[Problem]("org.apache.spark.memory.*"), |
| ProblemFilters.exclude[Problem]("org.apache.spark.util.collection.unsafe.*"), |
| ProblemFilters.exclude[Problem]("org.apache.spark.sql.catalyst.*"), |
| ProblemFilters.exclude[Problem]("org.apache.spark.sql.execution.*"), |
| ProblemFilters.exclude[Problem]("org.apache.spark.sql.internal.*"), |
| ProblemFilters.exclude[Problem]("org.apache.spark.sql.errors.*"), |
| // DSv2 catalog and expression APIs are unstable yet. We should enable this back. |
| ProblemFilters.exclude[Problem]("org.apache.spark.sql.connector.catalog.*"), |
| ProblemFilters.exclude[Problem]("org.apache.spark.sql.connector.expressions.*"), |
| // Avro source implementation is internal. |
| ProblemFilters.exclude[Problem]("org.apache.spark.sql.v2.avro.*"), |
| |
| // SPARK-43169: shaded and generated protobuf code |
| ProblemFilters.exclude[Problem]("org.sparkproject.spark_core.protobuf.*"), |
| ProblemFilters.exclude[Problem]("org.apache.spark.status.protobuf.StoreTypes*"), |
| |
| // SPARK-44104: shaded protobuf code and Apis with parameters relocated |
| ProblemFilters.exclude[Problem]("org.sparkproject.spark_protobuf.protobuf.*"), |
| ProblemFilters.exclude[Problem]("org.apache.spark.sql.protobuf.utils.SchemaConverters.*"), |
| |
| // SPARK-43299: Convert StreamingQueryException in Scala Client |
| ProblemFilters.exclude[MissingClassProblem]("org.apache.spark.sql.streaming.StreamingQueryException"), |
| |
| (problem: Problem) => problem match { |
| case MissingClassProblem(cls) => !cls.fullName.startsWith("org.sparkproject.jpmml") && |
| !cls.fullName.startsWith("org.sparkproject.dmg.pmml") |
| case _ => true |
| } |
| ) |
| |
| def excludes(version: String): Seq[Problem => Boolean] = version match { |
| case v if v.startsWith("4.0") => v40excludes |
| case _ => Seq() |
| } |
| } |