[CARBONDATA-3962]Remove unwanted empty fact directory in case of flat_folder table

Why is this PR needed?
In case of flat folder, we write the data files directly at table path,
so fact dir is not required. Fact dir is unwanted and present as empty dir.

What changes were proposed in this PR?
Remove empty fact dirs

This closes #3904
diff --git a/core/src/main/java/org/apache/carbondata/core/metadata/SegmentFileStore.java b/core/src/main/java/org/apache/carbondata/core/metadata/SegmentFileStore.java
index d6dc89e..52939eb 100644
--- a/core/src/main/java/org/apache/carbondata/core/metadata/SegmentFileStore.java
+++ b/core/src/main/java/org/apache/carbondata/core/metadata/SegmentFileStore.java
@@ -466,14 +466,22 @@
   /**
    * Move the loaded data from source folder to destination folder.
    */
-  private static void moveFromTempFolder(String source, String dest) {
+  private static void moveFromTempFolder(String source, String dest) throws IOException {
 
     CarbonFile oldFolder = FileFactory.getCarbonFile(source);
     CarbonFile[] oldFiles = oldFolder.listFiles();
     for (CarbonFile file : oldFiles) {
       file.renameForce(dest + CarbonCommonConstants.FILE_SEPARATOR + file.getName());
     }
+    // delete the segment path at any cost at first, we we dont want to delete fact directory in
+    // case of multiple load scenario or update, delete scenario
     oldFolder.delete();
+    CarbonFile partDir = FileFactory.getCarbonFile(CarbonTablePath.getPartitionDir(dest));
+    // once last segment is processed(in case of update delete), delete the main fact directory
+    if (partDir.listFiles(false).size() == 0) {
+      CarbonFile oldFactDirPath = FileFactory.getCarbonFile(CarbonTablePath.getFactDir(dest));
+      oldFactDirPath.delete();
+    }
   }
 
   /**
diff --git a/integration/spark/src/test/scala/org/apache/carbondata/spark/testsuite/flatfolder/FlatFolderTableLoadingTestCase.scala b/integration/spark/src/test/scala/org/apache/carbondata/spark/testsuite/flatfolder/FlatFolderTableLoadingTestCase.scala
index 8a6cb47..7147138 100644
--- a/integration/spark/src/test/scala/org/apache/carbondata/spark/testsuite/flatfolder/FlatFolderTableLoadingTestCase.scala
+++ b/integration/spark/src/test/scala/org/apache/carbondata/spark/testsuite/flatfolder/FlatFolderTableLoadingTestCase.scala
@@ -44,10 +44,12 @@
 
   }
 
-  def validateDataFiles(tableUniqueName: String, segmentId: String): Unit = {
+  def validateDataFiles(tableUniqueName: String): Unit = {
     val carbonTable = CarbonMetadata.getInstance().getCarbonTable(tableUniqueName)
     val files = FileFactory.getCarbonFile(carbonTable.getTablePath).listFiles()
+    val factPath = FileFactory.getCarbonFile(CarbonTablePath.getFactDir(carbonTable.getTablePath))
     assert(files.exists(_.getName.endsWith(CarbonTablePath.CARBON_DATA_EXT)))
+    assert(!factPath.exists())
   }
 
   test("data loading for flat folder with global sort") {
@@ -61,7 +63,7 @@
       """.stripMargin)
     sql(s"""LOAD DATA local inpath '$resourcesPath/data.csv' INTO TABLE flatfolder_gs OPTIONS('DELIMITER'= ',', 'QUOTECHAR'= '"')""")
 
-    validateDataFiles("default_flatfolder_gs", "0")
+    validateDataFiles("default_flatfolder_gs")
 
     checkAnswer(sql("select empno, empname, designation, doj, workgroupcategory, workgroupcategoryname, deptno, deptname, projectcode, projectjoindate, projectenddate, attendance, utilization, salary from flatfolder_gs order by empno"),
       sql("select  empno, empname, designation, doj, workgroupcategory, workgroupcategoryname, deptno, deptname, projectcode, projectjoindate, projectenddate, attendance, utilization, salary from originTable order by empno"))
@@ -79,7 +81,7 @@
       """.stripMargin)
     sql(s"""LOAD DATA local inpath '$resourcesPath/data.csv' INTO TABLE flatfolder OPTIONS('DELIMITER'= ',', 'QUOTECHAR'= '"')""")
 
-    validateDataFiles("default_flatfolder", "0")
+    validateDataFiles("default_flatfolder")
 
     checkAnswer(sql("select empno, empname, designation, doj, workgroupcategory, workgroupcategoryname, deptno, deptname, projectcode, projectjoindate, projectenddate, attendance, utilization, salary from flatfolder order by empno"),
       sql("select  empno, empname, designation, doj, workgroupcategory, workgroupcategoryname, deptno, deptname, projectcode, projectjoindate, projectenddate, attendance, utilization, salary from originTable order by empno"))