[MINOR] Polish maven plugins for root pom (#4512)

diff --git a/.github/workflows/core.yml b/.github/workflows/core.yml
index 66eb1fb..d44ee02 100644
--- a/.github/workflows/core.yml
+++ b/.github/workflows/core.yml
@@ -59,9 +59,9 @@
           restore-keys: |
             ${{ runner.os }}-zeppelin-
       - name: install application with some interpreter
-        run: ./mvnw install -Pbuild-distr -DskipRat -DskipTests -pl zeppelin-server,zeppelin-web,spark-submit,spark/scala-2.11,spark/scala-2.12,spark/scala-2.13,markdown,angular,shell -am -Phelium-dev -Pexamples -P${{ matrix.hadoop }} -B
+        run: ./mvnw install -Pbuild-distr -DskipTests -pl zeppelin-server,zeppelin-web,spark-submit,spark/scala-2.11,spark/scala-2.12,spark/scala-2.13,markdown,angular,shell -am -Phelium-dev -Pexamples -P${{ matrix.hadoop }} -B
       - name: install and test plugins
-        run: ./mvnw package -DskipRat -pl zeppelin-plugins -amd -B
+        run: ./mvnw package -pl zeppelin-plugins -amd -B
       - name: Setup conda environment with python 3.7 and R
         uses: conda-incubator/setup-miniconda@v2
         with:
@@ -79,7 +79,7 @@
           conda list
           conda info
       - name: run tests with ${{ matrix.hadoop }} # skip spark test because we would run them in other CI
-        run: ./mvnw verify -Pusing-packaged-distr -DskipRat -pl zeppelin-server,zeppelin-web,spark-submit,spark/scala-2.11,spark/scala-2.12,spark/scala-2.13,markdown,angular,shell -am -Phelium-dev -Pexamples -P${{ matrix.hadoop }} -Dtests.to.exclude=**/org/apache/zeppelin/spark/* -DfailIfNoTests=false
+        run: ./mvnw verify -Pusing-packaged-distr -pl zeppelin-server,zeppelin-web,spark-submit,spark/scala-2.11,spark/scala-2.12,spark/scala-2.13,markdown,angular,shell -am -Phelium-dev -Pexamples -P${{ matrix.hadoop }} -Dtests.to.exclude=**/org/apache/zeppelin/spark/* -DfailIfNoTests=false
 
   # test interpreter modules except spark, flink, python, rlang, jupyter
   interpreter-test-non-core:
@@ -108,7 +108,7 @@
           restore-keys: |
             ${{ runner.os }}-zeppelin-
       - name: install environment
-        run: ./mvnw install -DskipTests -DskipRat -am -pl .,zeppelin-interpreter,zeppelin-interpreter-shaded,${INTERPRETERS} -Pscala-2.11 -B
+        run: ./mvnw install -DskipTests -am -pl .,zeppelin-interpreter,zeppelin-interpreter-shaded,${INTERPRETERS} -Pscala-2.11 -B
       - name: Setup conda environment with python 3.7 and R
         uses: conda-incubator/setup-miniconda@v2
         with:
@@ -121,7 +121,7 @@
           auto-activate-base: false
           use-mamba: true
       - name: verify interpreter
-        run: ./mvnw verify -DskipRat -pl ${INTERPRETERS} -Pscala-2.11 -B
+        run: ./mvnw verify -pl ${INTERPRETERS} -Pscala-2.11 -B
 
   # test interpreter modules for jupyter, python, rlang
   interpreter-test-jupyter-python-rlang:
@@ -167,10 +167,10 @@
           R -e "IRkernel::installspec()"
       - name: install environment
         run: |
-          ./mvnw install -DskipTests -DskipRat -pl python,rlang,zeppelin-jupyter-interpreter -am -Phadoop2 -B
+          ./mvnw install -DskipTests -pl python,rlang,zeppelin-jupyter-interpreter -am -Phadoop2 -B
       - name: run tests with ${{ matrix.python }}
         run: |
-          ./mvnw test -DskipRat -pl python,rlang,zeppelin-jupyter-interpreter -DfailIfNoTests=false -B
+          ./mvnw test -pl python,rlang,zeppelin-jupyter-interpreter -DfailIfNoTests=false -B
 
   # zeppelin integration test except Spark & Flink
   zeppelin-integration-test:
@@ -201,8 +201,8 @@
             ${{ runner.os }}-zeppelin-
       - name: install environment
         run: |
-          ./mvnw install -DskipTests -DskipRat -Phadoop2 -Pintegration -pl zeppelin-interpreter-integration,zeppelin-web,spark-submit,spark/scala-2.11,spark/scala-2.12,spark/scala-2.13,markdown,flink-cmd,flink/flink-scala-2.11,flink/flink-scala-2.12,jdbc,shell -am -Pflink-114
-          ./mvnw package -DskipRat -pl zeppelin-plugins -amd -DskipTests -B
+          ./mvnw install -DskipTests -Phadoop2 -Pintegration -pl zeppelin-interpreter-integration,zeppelin-web,spark-submit,spark/scala-2.11,spark/scala-2.12,spark/scala-2.13,markdown,flink-cmd,flink/flink-scala-2.11,flink/flink-scala-2.12,jdbc,shell -am -Pflink-114
+          ./mvnw package -pl zeppelin-plugins -amd -DskipTests -B
       - name: Setup conda environment with python 3.7 and R
         uses: conda-incubator/setup-miniconda@v2
         with:
@@ -218,7 +218,7 @@
         run: |
           R -e "IRkernel::installspec()"
       - name: run tests
-        run: ./mvnw test -DskipRat -pl zeppelin-interpreter-integration -Phadoop2 -Pintegration -DfailIfNoTests=false -Dtest=ZeppelinClientIntegrationTest,ZeppelinClientWithAuthIntegrationTest,ZSessionIntegrationTest,ShellIntegrationTest,JdbcIntegrationTest
+        run: ./mvnw test -pl zeppelin-interpreter-integration -Phadoop2 -Pintegration -DfailIfNoTests=false -Dtest=ZeppelinClientIntegrationTest,ZeppelinClientWithAuthIntegrationTest,ZSessionIntegrationTest,ShellIntegrationTest,JdbcIntegrationTest
       - name: Print zeppelin logs
         if: always()
         run: if [ -d "logs" ]; then cat logs/*; fi
@@ -253,12 +253,12 @@
       - name: install environment for flink before 1.15 (exclusive)
         if: matrix.flink != '115'
         run: |
-          ./mvnw install -DskipTests -DskipRat -am -pl flink/flink-scala-2.11,flink/flink-scala-2.12,flink-cmd,zeppelin-interpreter-integration -Pflink-${{ matrix.flink }} -Phadoop2 -Pintegration -B
+          ./mvnw install -DskipTests -am -pl flink/flink-scala-2.11,flink/flink-scala-2.12,flink-cmd,zeppelin-interpreter-integration -Pflink-${{ matrix.flink }} -Phadoop2 -Pintegration -B
           ./mvnw clean package -pl zeppelin-plugins -amd -DskipTests -B
       - name: install environment for flink after 1.15 (inclusive)
         if: matrix.flink == '115'
         run: |
-          ./mvnw install -DskipTests -DskipRat -am -pl flink/flink-scala-2.12,flink-cmd,zeppelin-interpreter-integration -Pflink-${{ matrix.flink }} -Phadoop2 -Pintegration -B
+          ./mvnw install -DskipTests -am -pl flink/flink-scala-2.12,flink-cmd,zeppelin-interpreter-integration -Pflink-${{ matrix.flink }} -Phadoop2 -Pintegration -B
           ./mvnw clean package -pl zeppelin-plugins -amd -DskipTests -B
       - name: Setup conda environment with python 3.7 and
         uses: conda-incubator/setup-miniconda@v2
@@ -273,10 +273,10 @@
           use-mamba: true
       - name: run tests for flink before 1.15 (exclusive)
         if: matrix.flink != '115'
-        run: ./mvnw test -DskipRat -pl flink/flink-scala-2.11,flink/flink-scala-2.12,flink-cmd,zeppelin-interpreter-integration -Pflink-${{ matrix.flink }} -Phadoop2 -Pintegration -DfailIfNoTests=false -B -Dtest=org.apache.zeppelin.flink.*Test,FlinkIntegrationTest${{ matrix.flink }}
+        run: ./mvnw test -pl flink/flink-scala-2.11,flink/flink-scala-2.12,flink-cmd,zeppelin-interpreter-integration -Pflink-${{ matrix.flink }} -Phadoop2 -Pintegration -DfailIfNoTests=false -B -Dtest=org.apache.zeppelin.flink.*Test,FlinkIntegrationTest${{ matrix.flink }}
       - name: run tests for flink before 1.15 (inclusive)
         if: matrix.flink == '115'
-        run: ./mvnw test -DskipRat -pl flink/flink-scala-2.12,flink-cmd,zeppelin-interpreter-integration -Pflink-${{ matrix.flink }} -Phadoop2 -Pintegration -DfailIfNoTests=false -B -Dtest=org.apache.zeppelin.flink.*Test,FlinkIntegrationTest${{ matrix.flink }}
+        run: ./mvnw test -pl flink/flink-scala-2.12,flink-cmd,zeppelin-interpreter-integration -Pflink-${{ matrix.flink }} -Phadoop2 -Pintegration -DfailIfNoTests=false -B -Dtest=org.apache.zeppelin.flink.*Test,FlinkIntegrationTest${{ matrix.flink }}
       - name: Print zeppelin logs
         if: always()
         run: if [ -d "logs" ]; then cat logs/*; fi
@@ -311,7 +311,7 @@
             ${{ runner.os }}-zeppelin-
       - name: install environment
         run: |
-          ./mvnw install -DskipTests -DskipRat -pl zeppelin-interpreter-integration,zeppelin-web,spark-submit,spark/scala-2.11,spark/scala-2.12,spark/scala-2.13,markdown -am -Phadoop2 -Pintegration -B
+          ./mvnw install -DskipTests -pl zeppelin-interpreter-integration,zeppelin-web,spark-submit,spark/scala-2.11,spark/scala-2.12,spark/scala-2.13,markdown -am -Phadoop2 -Pintegration -B
           ./mvnw clean package -pl zeppelin-plugins -amd -DskipTests -B
       - name: Setup conda environment with python 3.7 and R
         uses: conda-incubator/setup-miniconda@v2
@@ -328,7 +328,7 @@
         run: |
           R -e "IRkernel::installspec()"
       - name: run tests on hadoop${{ matrix.hadoop }}
-        run: ./mvnw test -DskipRat -pl zeppelin-interpreter-integration -Phadoop${{ matrix.hadoop }} -Pintegration -B -Dtest=SparkSubmitIntegrationTest,ZeppelinSparkClusterTest24,SparkIntegrationTest24,ZeppelinSparkClusterTest30,SparkIntegrationTest30,ZeppelinSparkClusterTest31,SparkIntegrationTest31,ZeppelinSparkClusterTest32,SparkIntegrationTest32,ZeppelinSparkClusterTest33,SparkIntegrationTest33 -DfailIfNoTests=false
+        run: ./mvnw test -pl zeppelin-interpreter-integration -Phadoop${{ matrix.hadoop }} -Pintegration -B -Dtest=SparkSubmitIntegrationTest,ZeppelinSparkClusterTest24,SparkIntegrationTest24,ZeppelinSparkClusterTest30,SparkIntegrationTest30,ZeppelinSparkClusterTest31,SparkIntegrationTest31,ZeppelinSparkClusterTest32,SparkIntegrationTest32,ZeppelinSparkClusterTest33,SparkIntegrationTest33 -DfailIfNoTests=false
 
   # test on spark for each spark version & scala version
   spark-test:
@@ -359,7 +359,7 @@
           restore-keys: |
             ${{ runner.os }}-zeppelin-
       - name: install environment
-        run: ./mvnw install -DskipTests -DskipRat -pl spark-submit,spark/scala-2.11,spark/scala-2.12,spark/scala-2.13 -am -Phadoop2 -B
+        run: ./mvnw install -DskipTests -pl spark-submit,spark/scala-2.11,spark/scala-2.12,spark/scala-2.13 -am -Phadoop2 -B
       - name: Setup conda environment with python ${{ matrix.python }} and R
         uses: conda-incubator/setup-miniconda@v2
         with:
@@ -376,36 +376,36 @@
           R -e "IRkernel::installspec()"
       - name: run spark-2.4 tests with scala-2.11 and python-${{ matrix.python }}
         if: matrix.python == '3.7'  # Spark 2.4 doesn't support python 3.8
-        run: ./mvnw test -DskipRat -pl spark-submit,spark/interpreter -Pspark-2.4 -Pspark-scala-2.11 -DfailIfNoTests=false -B
+        run: ./mvnw test -pl spark-submit,spark/interpreter -Pspark-2.4 -Pspark-scala-2.11 -DfailIfNoTests=false -B
       - name: run spark-2.4 tests with scala-2.12 and python-${{ matrix.python }}
         if: matrix.python == '3.7'  # Spark 2.4 doesn't support python 3.8
         run: |
           rm -rf spark/interpreter/metastore_db
-          ./mvnw test -DskipRat -pl spark-submit,spark/interpreter -Pspark-2.4 -Pspark-scala-2.12 -Phadoop2 -Pintegration -B -DfailIfNoTests=false
+          ./mvnw test -pl spark-submit,spark/interpreter -Pspark-2.4 -Pspark-scala-2.12 -Phadoop2 -Pintegration -B -DfailIfNoTests=false
       - name: run spark-3.0 tests with scala-2.12 and python-${{ matrix.python }}
         run: |
           rm -rf spark/interpreter/metastore_db
-          ./mvnw test -DskipRat -pl spark-submit,spark/interpreter -Pspark-3.0 -Pspark-scala-2.12 -Phadoop2 -Pintegration -B -DfailIfNoTests=false
+          ./mvnw test -pl spark-submit,spark/interpreter -Pspark-3.0 -Pspark-scala-2.12 -Phadoop2 -Pintegration -B -DfailIfNoTests=false
       - name: run spark-3.1 tests with scala-2.12 and python-${{ matrix.python }}
         run: |
           rm -rf spark/interpreter/metastore_db
-          ./mvnw test -DskipRat -pl spark-submit,spark/interpreter -Pspark-3.1 -Pspark-scala-2.12 -Phadoop2 -Pintegration -B -DfailIfNoTests=false
+          ./mvnw test -pl spark-submit,spark/interpreter -Pspark-3.1 -Pspark-scala-2.12 -Phadoop2 -Pintegration -B -DfailIfNoTests=false
       - name: run spark-3.2 tests with scala-2.12 and python-${{ matrix.python }}
         run: |
           rm -rf spark/interpreter/metastore_db
-          ./mvnw test -DskipRat -pl spark-submit,spark/interpreter -Pspark-3.2 -Pspark-scala-2.12 -Phadoop2 -Pintegration -B -DfailIfNoTests=false
+          ./mvnw test -pl spark-submit,spark/interpreter -Pspark-3.2 -Pspark-scala-2.12 -Phadoop2 -Pintegration -B -DfailIfNoTests=false
       - name: run spark-3.2 tests with scala-2.13 and python-${{ matrix.python }}
         run: |
           rm -rf spark/interpreter/metastore_db
-          ./mvnw test -DskipRat -pl spark-submit,spark/interpreter -Pspark-3.2 -Pspark-scala-2.13 -Phadoop2 -Pintegration -B -DfailIfNoTests=false
+          ./mvnw test -pl spark-submit,spark/interpreter -Pspark-3.2 -Pspark-scala-2.13 -Phadoop2 -Pintegration -B -DfailIfNoTests=false
       - name: run spark-3.3 tests with scala-2.12 and python-${{ matrix.python }}
         run: |
           rm -rf spark/interpreter/metastore_db
-          ./mvnw test -DskipRat -pl spark-submit,spark/interpreter -Pspark-3.3 -Pspark-scala-2.12 -Phadoop3 -Pintegration -B -DfailIfNoTests=false
+          ./mvnw test -pl spark-submit,spark/interpreter -Pspark-3.3 -Pspark-scala-2.12 -Phadoop3 -Pintegration -B -DfailIfNoTests=false
       - name: run spark-3.3 tests with scala-2.13 and python-${{ matrix.python }}
         run: |
           rm -rf spark/interpreter/metastore_db
-          ./mvnw test -DskipRat -pl spark-submit,spark/interpreter -Pspark-3.3 -Pspark-scala-2.13 -Phadoop3 -Pintegration -B -DfailIfNoTests=false
+          ./mvnw test -pl spark-submit,spark/interpreter -Pspark-3.3 -Pspark-scala-2.13 -Phadoop3 -Pintegration -B -DfailIfNoTests=false
 
   livy-0-7-with-spark-2-2-0-under-python3:
     runs-on: ubuntu-20.04
@@ -432,7 +432,7 @@
             ${{ runner.os }}-zeppelin-
       - name: install environment
         run: |
-          ./mvnw install -DskipTests -DskipRat -pl livy -am  -B
+          ./mvnw install -DskipTests -pl livy -am  -B
           ./testing/downloadSpark.sh "2.2.0" "2.6"
           ./testing/downloadLivy.sh "0.7.1-incubating"
       - name: Setup conda environment with python 3.7 and R
@@ -450,7 +450,7 @@
         run: |
           R -e "IRkernel::installspec()"
       - name: run tests
-        run: ./mvnw verify -DskipRat -pl livy -am  -B
+        run: ./mvnw verify -pl livy -am  -B
 
   default-build:
     runs-on: ubuntu-20.04
diff --git a/.github/workflows/frontend.yml b/.github/workflows/frontend.yml
index 28750fc..5679efb 100644
--- a/.github/workflows/frontend.yml
+++ b/.github/workflows/frontend.yml
@@ -49,9 +49,9 @@
           restore-keys: |
             ${{ runner.os }}-zeppelin-
       - name: Install application
-        run: ./mvnw clean install -DskipTests -DskipRat -am -pl zeppelin-web -Pscala-2.11 -Pspark-scala-2.11 -Pspark-2.4 -Phadoop2 -Pweb-dist -B
+        run: ./mvnw clean install -DskipTests -am -pl zeppelin-web -Pscala-2.11 -Pspark-scala-2.11 -Pspark-2.4 -Phadoop2 -Pweb-dist -B
       - name: Run headless test
-        run: xvfb-run --auto-servernum --server-args="-screen 0 1024x768x24" ./mvnw verify -DskipRat -pl zeppelin-web -Pscala-2.11 -Pspark-scala-2.11 -Pspark-2.4 -Phadoop2 -Pweb-dist -Pweb-e2e -B
+        run: xvfb-run --auto-servernum --server-args="-screen 0 1024x768x24" ./mvnw verify -pl zeppelin-web -Pscala-2.11 -Pspark-scala-2.11 -Pspark-2.4 -Phadoop2 -Pweb-dist -Pweb-e2e -B
       - name: Print zeppelin logs
         if: always()
         run: if [ -d "logs" ]; then cat logs/*; fi
@@ -80,7 +80,7 @@
           restore-keys: |
             ${{ runner.os }}-zeppelin-
       - name: Run headless test
-        run: xvfb-run --auto-servernum --server-args="-screen 0 1024x768x24" ./mvnw package -DskipRat -pl zeppelin-web-angular -Pweb-angular -B
+        run: xvfb-run --auto-servernum --server-args="-screen 0 1024x768x24" ./mvnw package -pl zeppelin-web-angular -Pweb-angular -B
 
   test-selenium-with-spark-module-for-spark-2-4:
     runs-on: ubuntu-20.04
@@ -124,10 +124,10 @@
           R -e "IRkernel::installspec()"
       - name: Install Environment
         run: |
-          ./mvnw clean install -DskipTests -DskipRat -am -pl zeppelin-integration -Pintegration -Pspark-scala-2.11 -Pspark-2.4 -Phadoop2 -Pweb-dist -B
+          ./mvnw clean install -DskipTests -am -pl zeppelin-integration -Pintegration -Pspark-scala-2.11 -Pspark-2.4 -Phadoop2 -Pweb-dist -B
       - name: run tests
         run: |
-          source ./testing/downloadSpark.sh "2.4.7" "2.7" && echo "SPARK_HOME: ${SPARK_HOME}" && xvfb-run --auto-servernum --server-args="-screen 0 1600x1024x16" ./mvnw verify -DskipRat -DfailIfNoTests=false -pl zeppelin-integration -Pintegration -Pspark-scala-2.11 -Pspark-2.4 -Phadoop2 -Pweb-dist -Pusing-source-tree -B
+          source ./testing/downloadSpark.sh "2.4.7" "2.7" && echo "SPARK_HOME: ${SPARK_HOME}" && xvfb-run --auto-servernum --server-args="-screen 0 1600x1024x16" ./mvnw verify -DfailIfNoTests=false -pl zeppelin-integration -Pintegration -Pspark-scala-2.11 -Pspark-2.4 -Phadoop2 -Pweb-dist -Pusing-source-tree -B
       - name: Print zeppelin logs
         if: always()
         run: if [ -d "logs" ]; then cat logs/*; fi
diff --git a/.github/workflows/quick.yml b/.github/workflows/quick.yml
index 98bd9ff..826c83e 100644
--- a/.github/workflows/quick.yml
+++ b/.github/workflows/quick.yml
@@ -54,4 +54,4 @@
           distribution: 'temurin'
           java-version: 8
       - name: Run Maven Validate
-        run: ./mvnw validate -DskipRat -P${{ matrix.hadoop }} -Pinclude-hadoop -B
+        run: ./mvnw validate -P${{ matrix.hadoop }} -Pinclude-hadoop -B
diff --git a/livy/README.md b/livy/README.md
index 7bb0241..54311d9 100644
--- a/livy/README.md
+++ b/livy/README.md
@@ -11,5 +11,5 @@
 #!/usr/bin/env bash
 export LIVY_HOME=<path_of_livy_0.2.0>
 export SPARK_HOME=<path_of_spark-1.5.2>
-./mvnw clean verify -pl livy -DfailIfNoTests=false -DskipRat
+./mvnw clean verify -pl livy -DfailIfNoTests=false
 ```
diff --git a/pom.xml b/pom.xml
index aa56ad9..5e7bb0b 100644
--- a/pom.xml
+++ b/pom.xml
@@ -99,8 +99,10 @@
     <project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
 
     <!-- language versions -->
-    <maven.compiler.target>1.8</maven.compiler.target>
     <java.version>1.8</java.version>
+    <!-- These two lines could be changed like `maven.compiler.release` after updating JDK11 -->
+    <maven.compiler.source>${java.version}</maven.compiler.source>
+    <maven.compiler.target>${java.version}</maven.compiler.target>
     <scala.version>${scala.2.11.version}</scala.version>
     <scala.binary.version>2.11</scala.binary.version>
     <scala.2.11.version>2.11.12</scala.2.11.version>
@@ -200,8 +202,6 @@
 
     <testcontainers.version>1.15.1</testcontainers.version>
 
-    <plugin.gitcommitid.useNativeGit>false</plugin.gitcommitid.useNativeGit>
-
     <MaxMetaspace>512m</MaxMetaspace>
 
     <!-- to be able to exclude some tests using command line -->
@@ -1478,27 +1478,6 @@
         <artifactId>maven-deploy-plugin</artifactId>
       </plugin>
 
-      <plugin>
-        <groupId>pl.project13.maven</groupId>
-        <artifactId>git-commit-id-plugin</artifactId>
-        <executions>
-          <execution>
-            <goals>
-              <goal>revision</goal>
-            </goals>
-          </execution>
-        </executions>
-        <configuration>
-          <skipPoms>false</skipPoms>
-          <dotGitDirectory>${project.basedir}/.git</dotGitDirectory>
-          <useNativeGit>${plugin.gitcommitid.useNativeGit}</useNativeGit>
-          <generateGitPropertiesFile>true</generateGitPropertiesFile>
-          <generateGitPropertiesFilename>${project.build.outputDirectory}/git.properties</generateGitPropertiesFilename>
-          <failOnNoGitDirectory>false</failOnNoGitDirectory>
-          <dateFormat>yyyy-MM-dd HH:mm:ss</dateFormat>
-        </configuration>
-      </plugin>
-
     <!--TODO(alex): make part of the build and reconcile conflicts
     <plugin>
       <groupId>com.ning.maven.plugins</groupId>
@@ -1945,9 +1924,6 @@
 
     <profile>
       <id>rat</id>
-      <activation>
-        <property><name>!skipRat</name></property>
-      </activation>
       <build>
         <plugins>
         <plugin>
diff --git a/zeppelin-interpreter/pom.xml b/zeppelin-interpreter/pom.xml
index fe91306..c5ce536 100644
--- a/zeppelin-interpreter/pom.xml
+++ b/zeppelin-interpreter/pom.xml
@@ -221,6 +221,29 @@
   </dependencies>
 
   <build>
+    <plugins>
+      <plugin>
+        <groupId>pl.project13.maven</groupId>
+        <artifactId>git-commit-id-plugin</artifactId>
+        <executions>
+          <execution>
+            <goals>
+              <goal>revision</goal>
+            </goals>
+          </execution>
+        </executions>
+        <configuration>
+          <skipPoms>false</skipPoms>
+          <dotGitDirectory>${project.basedir}/.git</dotGitDirectory>
+          <useNativeGit>false</useNativeGit>
+          <generateGitPropertiesFile>true</generateGitPropertiesFile>
+          <generateGitPropertiesFilename>${project.build.outputDirectory}/git.properties</generateGitPropertiesFilename>
+          <failOnNoGitDirectory>false</failOnNoGitDirectory>
+          <dateFormat>yyyy-MM-dd HH:mm:ss</dateFormat>
+        </configuration>
+      </plugin>
+    </plugins>
+
     <resources>
       <resource>
         <directory>src/main/resources</directory>