[#1087] feat(spark)(gluten): Support dynamic allocation for Gluten Uniffle (#1649)

### What changes were proposed in this pull request?
Support dynamic allocation for Uniffle and Gluten when use spark3.2, spark3.3, spark3.4

### Why are the changes needed?
ShuffleManager name in `https://github.com/apache/incubator-gluten/tree/main/gluten-uniffle` differs from `RssShuffleManager`
Fix: https://github.com/apache/incubator-uniffle/issues/1087

### Does this PR introduce _any_ user-facing change?
No.

### How was this patch tested?
manul integration test
diff --git a/patch/spark/spark-3.2.1_dynamic_allocation_support.patch b/patch/spark/spark-3.2.1_dynamic_allocation_support.patch
index 81046de..815f7bd 100644
--- a/patch/spark/spark-3.2.1_dynamic_allocation_support.patch
+++ b/patch/spark/spark-3.2.1_dynamic_allocation_support.patch
@@ -34,14 +34,17 @@
 index 5f37a1abb19..af4bee1e1bb 100644
 --- a/core/src/main/scala/org/apache/spark/SparkConf.scala
 +++ b/core/src/main/scala/org/apache/spark/SparkConf.scala
-@@ -580,6 +580,10 @@ class SparkConf(loadDefaults: Boolean) extends Cloneable with Logging with Seria
+@@ -580,6 +580,13 @@ class SparkConf(loadDefaults: Boolean) extends Cloneable with Logging with Seria
      Utils.redact(this, getAll).sorted.map { case (k, v) => k + "=" + v }.mkString("\n")
    }
  
 +  /**
 +   * Return true if remote shuffle service is enabled.
 +   */
-+  def isRssEnable(): Boolean = get("spark.shuffle.manager", "sort").contains("RssShuffleManager")
++  def isRssEnable(): Boolean = {
++    val shuffleMgr = get("spark.shuffle.manager", "sort")
++    shuffleMgr.contains("RssShuffleManager") || shuffleMgr.contains("UniffleShuffleManager")
++  }
  }
  
  private[spark] object SparkConf extends Logging {
diff --git a/patch/spark/spark-3.3.1_dynamic_allocation_support.patch b/patch/spark/spark-3.3.1_dynamic_allocation_support.patch
index 194e873..517555d 100644
--- a/patch/spark/spark-3.3.1_dynamic_allocation_support.patch
+++ b/patch/spark/spark-3.3.1_dynamic_allocation_support.patch
@@ -34,14 +34,17 @@
 index 5f37a1abb19..af4bee1e1bb 100644
 --- a/core/src/main/scala/org/apache/spark/SparkConf.scala
 +++ b/core/src/main/scala/org/apache/spark/SparkConf.scala
-@@ -580,6 +580,10 @@ class SparkConf(loadDefaults: Boolean) extends Cloneable with Logging with Seria
+@@ -580,6 +580,13 @@ class SparkConf(loadDefaults: Boolean) extends Cloneable with Logging with Seria
      Utils.redact(this, getAll).sorted.map { case (k, v) => k + "=" + v }.mkString("\n")
    }
- 
+
 +  /**
 +   * Return true if remote shuffle service is enabled.
 +   */
-+  def isRssEnable(): Boolean = get("spark.shuffle.manager", "sort").contains("RssShuffleManager")
++  def isRssEnable(): Boolean = {
++    val shuffleMgr = get("spark.shuffle.manager", "sort")
++    shuffleMgr.contains("RssShuffleManager") || shuffleMgr.contains("UniffleShuffleManager")
++  }
  }
  
  private[spark] object SparkConf extends Logging {
diff --git a/patch/spark/spark-3.4.1_dynamic_allocation_support.patch b/patch/spark/spark-3.4.1_dynamic_allocation_support.patch
index 2f79bba..0ea6512 100644
--- a/patch/spark/spark-3.4.1_dynamic_allocation_support.patch
+++ b/patch/spark/spark-3.4.1_dynamic_allocation_support.patch
@@ -34,14 +34,17 @@
 index 08344d8e547..ff3bab6710d 100644
 --- a/core/src/main/scala/org/apache/spark/SparkConf.scala
 +++ b/core/src/main/scala/org/apache/spark/SparkConf.scala
-@@ -580,6 +580,10 @@ class SparkConf(loadDefaults: Boolean) extends Cloneable with Logging with Seria
+@@ -580,6 +580,13 @@ class SparkConf(loadDefaults: Boolean) extends Cloneable with Logging with Seria
      Utils.redact(this, getAll).sorted.map { case (k, v) => k + "=" + v }.mkString("\n")
    }
  
 +  /**
 +   * Return true if remote shuffle service is enabled.
 +   */
-+  def isRssEnable(): Boolean = get("spark.shuffle.manager", "sort").contains("RssShuffleManager")
++  def isRssEnable(): Boolean = {
++    val shuffleMgr = get("spark.shuffle.manager", "sort")
++    shuffleMgr.contains("RssShuffleManager") || shuffleMgr.contains("UniffleShuffleManager")
++  }
  }
  
  private[spark] object SparkConf extends Logging {