PIG-5316: Initialize mapred.task.id property for PoS jobs (nkollar via szita)

git-svn-id: https://svn.apache.org/repos/asf/pig/trunk@1816542 13f79535-47bb-0310-9956-ffa450edef68
diff --git a/CHANGES.txt b/CHANGES.txt
index 7340b2c..8c037f3 100644
--- a/CHANGES.txt
+++ b/CHANGES.txt
@@ -26,6 +26,8 @@
  
 IMPROVEMENTS
 
+PIG-5316: Initialize mapred.task.id property for PoS jobs (nkollar via szita)
+
 PIG-5302: Remove HttpClient dependency (nkollar via szita)
 
 PIG-5305: Enable yarn-client mode execution of tests in Spark (1) mode (szita)
diff --git a/src/org/apache/pig/backend/hadoop/executionengine/spark/SparkLauncher.java b/src/org/apache/pig/backend/hadoop/executionengine/spark/SparkLauncher.java
index 3143987..6f579e9 100644
--- a/src/org/apache/pig/backend/hadoop/executionengine/spark/SparkLauncher.java
+++ b/src/org/apache/pig/backend/hadoop/executionengine/spark/SparkLauncher.java
@@ -42,6 +42,7 @@
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.mapred.JobConf;
 import org.apache.hadoop.mapreduce.Job;
+import org.apache.hadoop.mapreduce.TaskAttemptID;
 import org.apache.pig.PigConfiguration;
 import org.apache.pig.PigException;
 import org.apache.pig.PigWarning;
@@ -182,6 +183,7 @@
         jobGroupID = String.format("%s-%s",sparkContext.getConf().getAppId(),
                 UUID.randomUUID().toString());
         jobConf.set(MRConfiguration.JOB_ID,jobGroupID);
+        jobConf.set(MRConfiguration.TASK_ID, new TaskAttemptID().toString());
 
         sparkContext.setJobGroup(jobGroupID, "Pig query to Spark cluster",
                 false);