PIG-5221: More fs.default.name deprecation warnings

git-svn-id: https://svn.apache.org/repos/asf/pig/trunk@1791459 13f79535-47bb-0310-9956-ffa450edef68
diff --git a/CHANGES.txt b/CHANGES.txt
index db3a5b9..c701b2c 100644
--- a/CHANGES.txt
+++ b/CHANGES.txt
@@ -97,6 +97,8 @@
  
 BUG FIXES
 
+PIG-5221: More fs.default.name deprecation warnings (wattsinabox via daijy)
+
 PIG-5222: Fix Junit Deprecations (wattsinabox via daijy)
 
 PIG-5223: TestLimitVariable.testNestedLimitVariable1 and TestSecondarySortMR.testNestedLimitedSort failing (jins via daijy)
diff --git a/contrib/piggybank/java/src/test/java/org/apache/pig/piggybank/test/storage/TestIndexedStorage.java b/contrib/piggybank/java/src/test/java/org/apache/pig/piggybank/test/storage/TestIndexedStorage.java
index 0203cda..c8c1421 100644
--- a/contrib/piggybank/java/src/test/java/org/apache/pig/piggybank/test/storage/TestIndexedStorage.java
+++ b/contrib/piggybank/java/src/test/java/org/apache/pig/piggybank/test/storage/TestIndexedStorage.java
@@ -119,7 +119,7 @@
     public void testGetNext() throws IOException, InterruptedException {
         IndexedStorage storage = new IndexedStorage("\t","0,1");
         Configuration conf = new Configuration();
-        conf.set("fs.default.name", "file:///");
+        conf.set("fs.defaultFS", "file:///");
         LocalFileSystem fs = FileSystem.getLocal(conf);
 
         TaskAttemptID taskId = HadoopShims.createTaskAttemptID("jt", 1, true, 1, 1);
@@ -151,7 +151,7 @@
     public void testSeek() throws IOException, InterruptedException {
         IndexedStorage storage = new IndexedStorage("\t","0,1");
         Configuration conf = new Configuration();
-        conf.set("fs.default.name", "file:///");
+        conf.set("fs.defaultFS", "file:///");
         LocalFileSystem fs = FileSystem.getLocal(conf);
 
         TaskAttemptID taskId =  HadoopShims.createTaskAttemptID("jt", 2, true, 2, 2);
diff --git a/src/docs/src/documentation/content/xdocs/perf.xml b/src/docs/src/documentation/content/xdocs/perf.xml
index de9b694..2b70365 100644
--- a/src/docs/src/documentation/content/xdocs/perf.xml
+++ b/src/docs/src/documentation/content/xdocs/perf.xml
@@ -32,7 +32,7 @@
 <source>
   &lt;property&gt;
     &lt;name&gt;tez.lib.uris&lt;/name&gt;
-    &lt;value&gt;${fs.default.name}/apps/tez/tez-0.5.2.tar.gz&lt;/value&gt;
+    &lt;value&gt;${fs.defaultFS}/apps/tez/tez-0.5.2.tar.gz&lt;/value&gt;
   &lt;/property&gt;
 </source>
   </section>
diff --git a/src/org/apache/pig/parser/QueryParserUtils.java b/src/org/apache/pig/parser/QueryParserUtils.java
index 9cd28b7..8ab5ff6 100644
--- a/src/org/apache/pig/parser/QueryParserUtils.java
+++ b/src/org/apache/pig/parser/QueryParserUtils.java
@@ -95,10 +95,11 @@
     }
 
     static void setHdfsServers(String absolutePath, PigContext pigContext) throws URISyntaxException {
-        // Get native host
-        String defaultFS = (String)pigContext.getProperties().get("fs.default.name");
+        // First check for the file system via the new property
+        String defaultFS = (String)pigContext.getProperties().get("fs.defaultFS");
         if (defaultFS==null)
-            defaultFS = (String)pigContext.getProperties().get("fs.defaultFS");
+            // check the deprecated property if we must
+            defaultFS = (String)pigContext.getProperties().get("fs.default.name");
 
         URI defaultFSURI = new URI(defaultFS);
 
diff --git a/test/org/apache/pig/data/TestSchemaTuple.java b/test/org/apache/pig/data/TestSchemaTuple.java
index 1411748..dac0f08 100644
--- a/test/org/apache/pig/data/TestSchemaTuple.java
+++ b/test/org/apache/pig/data/TestSchemaTuple.java
@@ -534,7 +534,7 @@
         writer.close(null);
 
         Configuration conf = new Configuration();
-        conf.set("fs.default.name", "file:///");
+        conf.set("fs.defaultFS", "file:///");
 
         TaskAttemptID taskId = HadoopShims.createTaskAttemptID("jt", 1, true, 1, 1);
         conf.set(MRConfiguration.TASK_ID, taskId.toString());
diff --git a/test/org/apache/pig/parser/TestQueryParserUtils.java b/test/org/apache/pig/parser/TestQueryParserUtils.java
index 5cb7d20..1c217e3 100644
--- a/test/org/apache/pig/parser/TestQueryParserUtils.java
+++ b/test/org/apache/pig/parser/TestQueryParserUtils.java
@@ -42,7 +42,7 @@
     @Test
     public void testSetHDFSServers() throws Exception {
         Properties props = new Properties();
-        props.setProperty("fs.default.name", "hdfs://nn1:8020/tmp");
+        props.setProperty("fs.defaultFS", "hdfs://nn1:8020/tmp");
         PigContext pc = new PigContext(ExecType.LOCAL, props);
 
         //No scheme/host
diff --git a/test/org/apache/pig/test/TestParser.java b/test/org/apache/pig/test/TestParser.java
index 119e498..74d1960 100644
--- a/test/org/apache/pig/test/TestParser.java
+++ b/test/org/apache/pig/test/TestParser.java
@@ -85,7 +85,7 @@
         for (ExecType execType : execTypes) {
             setUp(execType);
             Properties pigProperties = pigServer.getPigContext().getProperties();
-            pigProperties.setProperty("fs.default.name", "hdfs://a.com:8020");
+            pigProperties.setProperty("fs.defaultFS", "hdfs://a.com:8020");
             Configuration conf;
 
             Data data = Storage.resetData(pigServer.getPigContext());
@@ -94,19 +94,19 @@
             pigServer.registerQuery("a = load '/user/pig/1.txt' using mock.Storage;");
             conf = ConfigurationUtil.toConfiguration(pigProperties);
             assertTrue(conf.get(MRConfiguration.JOB_HDFS_SERVERS) == null ||
-                    conf.get(MRConfiguration.JOB_HDFS_SERVERS).equals(pigProperties.get("fs.default.name"))||
+                    conf.get(MRConfiguration.JOB_HDFS_SERVERS).equals(pigProperties.get("fs.defaultFS"))||
                     conf.get(MRConfiguration.JOB_HDFS_SERVERS).equals(pigProperties.get("fs.defaultFS")));
 
             pigServer.registerQuery("a = load 'hdfs://a.com/user/pig/1.txt' using mock.Storage;");
             conf = ConfigurationUtil.toConfiguration(pigProperties);
             assertTrue(pigProperties.getProperty(MRConfiguration.JOB_HDFS_SERVERS) == null ||
-                    conf.get(MRConfiguration.JOB_HDFS_SERVERS).equals(pigProperties.get("fs.default.name"))||
+                    conf.get(MRConfiguration.JOB_HDFS_SERVERS).equals(pigProperties.get("fs.defaultFS"))||
                     conf.get(MRConfiguration.JOB_HDFS_SERVERS).equals(pigProperties.get("fs.defaultFS")));
 
             pigServer.registerQuery("a = load 'har:///1.txt' using mock.Storage;");
             conf = ConfigurationUtil.toConfiguration(pigProperties);
             assertTrue(pigProperties.getProperty(MRConfiguration.JOB_HDFS_SERVERS) == null ||
-                    conf.get(MRConfiguration.JOB_HDFS_SERVERS).equals(pigProperties.get("fs.default.name"))||
+                    conf.get(MRConfiguration.JOB_HDFS_SERVERS).equals(pigProperties.get("fs.defaultFS"))||
                     conf.get(MRConfiguration.JOB_HDFS_SERVERS).equals(pigProperties.get("fs.defaultFS")));
 
             pigServer.registerQuery("a = load 'hdfs://b.com/user/pig/1.txt' using mock.Storage;");
@@ -130,7 +130,7 @@
     public void testRemoteServerList2() throws Exception {
         pigServer = new PigServer(Util.getLocalTestMode());
         Properties pigProperties = pigServer.getPigContext().getProperties();
-        pigProperties.setProperty("fs.default.name", "hdfs://a.com:8020");
+        pigProperties.setProperty("fs.defaultFS", "hdfs://a.com:8020");
         Configuration conf;
 
         pigServer.setBatchOn();
@@ -145,7 +145,7 @@
                 + pigProperties.getProperty(MRConfiguration.JOB_HDFS_SERVERS));
         conf = ConfigurationUtil.toConfiguration(pigProperties);
         assertTrue(conf.get(MRConfiguration.JOB_HDFS_SERVERS) == null ||
-                conf.get(MRConfiguration.JOB_HDFS_SERVERS).equals(pigProperties.get("fs.default.name"))||
+                conf.get(MRConfiguration.JOB_HDFS_SERVERS).equals(pigProperties.get("fs.defaultFS"))||
                 conf.get(MRConfiguration.JOB_HDFS_SERVERS).equals(pigProperties.get("fs.defaultFS")));
 
         pigServer.registerQuery("store a into 'hdfs://b.com/user/pig/1.txt' using mock.Storage;");
diff --git a/test/org/apache/pig/test/TestPigContext.java b/test/org/apache/pig/test/TestPigContext.java
index eef7366..0cff397 100644
--- a/test/org/apache/pig/test/TestPigContext.java
+++ b/test/org/apache/pig/test/TestPigContext.java
@@ -241,7 +241,7 @@
     private static Properties getProperties() {
         Properties props = new Properties();
         props.put(MRConfiguration.JOB_TRACKER, JOB_TRACKER);
-        props.put("fs.default.name", FS_NAME);
+        props.put("fs.defaultFS", FS_NAME);
         props.put("hadoop.tmp.dir", TMP_DIR_PROP);
         return props;
     }
diff --git a/test/org/apache/pig/test/TestTmpFileCompression.java b/test/org/apache/pig/test/TestTmpFileCompression.java
index 29ae031..52fb43c 100644
--- a/test/org/apache/pig/test/TestTmpFileCompression.java
+++ b/test/org/apache/pig/test/TestTmpFileCompression.java
@@ -380,7 +380,7 @@
         tFile.delete();
         Configuration conf = new Configuration();
         conf.set("tfile.io.chunk.size","100");
-        conf.set("fs.default.name", "file:///");
+        conf.set("fs.defaultFS", "file:///");
 
         for (String codec: new String [] {"none", "gz"} ) {
             System.err.println("Testing RecordWriter/Reader with codec: "