Update Accumulo-Examples pom
* update pom.xml to refer to Accumulo-2.1.0-SNAPSHOT
* update pom.xml to use jdk 11
* update Viewer.java to run with jdk 11
* add info message to BulkIngestExample class
diff --git a/pom.xml b/pom.xml
index b95fd82..f2b43e5 100644
--- a/pom.xml
+++ b/pom.xml
@@ -24,17 +24,17 @@
</parent>
<groupId>org.apache.accumulo</groupId>
<artifactId>accumulo-examples</artifactId>
- <version>2.0.0-SNAPSHOT</version>
+ <version>2.1.0-SNAPSHOT</version>
<name>Apache Accumulo Examples</name>
<description>Example code and corresponding documentation for using Apache Accumulo</description>
<properties>
- <accumulo.version>2.0.0</accumulo.version>
+ <accumulo.version>2.1.0-SNAPSHOT</accumulo.version>
<eclipseFormatterStyle>contrib/Eclipse-Accumulo-Codestyle.xml</eclipseFormatterStyle>
- <hadoop.version>3.2.1</hadoop.version>
+ <hadoop.version>3.3.0</hadoop.version>
<log4j.version>2.14.0</log4j.version>
- <maven.compiler.release>8</maven.compiler.release>
- <maven.compiler.source>1.8</maven.compiler.source>
- <maven.compiler.target>1.8</maven.compiler.target>
+ <maven.compiler.release>11</maven.compiler.release>
+ <maven.compiler.source>11</maven.compiler.source>
+ <maven.compiler.target>11</maven.compiler.target>
<zookeeper.version>3.4.14</zookeeper.version>
</properties>
<dependencyManagement>
@@ -92,11 +92,6 @@
<version>3.1.0-incubating</version>
</dependency>
<dependency>
- <groupId>org.apache.zookeeper</groupId>
- <artifactId>zookeeper</artifactId>
- <version>${zookeeper.version}</version>
- </dependency>
- <dependency>
<groupId>org.apache.logging.log4j</groupId>
<artifactId>log4j-api</artifactId>
<version>${log4j.version}</version>
@@ -111,6 +106,11 @@
<artifactId>log4j-slf4j-impl</artifactId>
<version>${log4j.version}</version>
</dependency>
+ <dependency>
+ <groupId>org.apache.zookeeper</groupId>
+ <artifactId>zookeeper</artifactId>
+ <version>${zookeeper.version}</version>
+ </dependency>
<!-- Test dependencies -->
<dependency>
<groupId>junit</groupId>
diff --git a/src/main/java/org/apache/accumulo/examples/dirlist/Viewer.java b/src/main/java/org/apache/accumulo/examples/dirlist/Viewer.java
index 1783efd..ea28179 100644
--- a/src/main/java/org/apache/accumulo/examples/dirlist/Viewer.java
+++ b/src/main/java/org/apache/accumulo/examples/dirlist/Viewer.java
@@ -33,6 +33,7 @@
import javax.swing.event.TreeSelectionListener;
import javax.swing.tree.DefaultMutableTreeNode;
import javax.swing.tree.DefaultTreeModel;
+import javax.swing.tree.TreeNode;
import javax.swing.tree.TreePath;
import org.apache.accumulo.core.client.AccumuloClient;
@@ -120,9 +121,9 @@
public void populateChildren(DefaultMutableTreeNode node) throws TableNotFoundException {
@SuppressWarnings("unchecked")
- Enumeration<DefaultMutableTreeNode> children = node.children();
+ Enumeration<TreeNode> children = node.children();
while (children.hasMoreElements()) {
- populate(children.nextElement());
+ populate((DefaultMutableTreeNode) children.nextElement());
}
}
@@ -176,9 +177,9 @@
public void treeCollapsed(TreeExpansionEvent event) {
DefaultMutableTreeNode node = (DefaultMutableTreeNode) event.getPath().getLastPathComponent();
@SuppressWarnings("unchecked")
- Enumeration<DefaultMutableTreeNode> children = node.children();
+ Enumeration<TreeNode> children = node.children();
while (children.hasMoreElements()) {
- DefaultMutableTreeNode child = children.nextElement();
+ DefaultMutableTreeNode child = (DefaultMutableTreeNode) children.nextElement();
log.debug("removing children of " + ((NodeInfo) child.getUserObject()).getFullName());
child.removeAllChildren();
}
diff --git a/src/main/java/org/apache/accumulo/examples/mapreduce/bulk/BulkIngestExample.java b/src/main/java/org/apache/accumulo/examples/mapreduce/bulk/BulkIngestExample.java
index 0791265..d5b3aa1 100644
--- a/src/main/java/org/apache/accumulo/examples/mapreduce/bulk/BulkIngestExample.java
+++ b/src/main/java/org/apache/accumulo/examples/mapreduce/bulk/BulkIngestExample.java
@@ -157,6 +157,8 @@
// With HDFS permissions on, we need to make sure the Accumulo user can read/move the rfiles
FsShell fsShell = new FsShell(opts.getHadoopConfig());
fsShell.run(new String[] {"-chmod", "-R", "777", workDir});
+ System.err.println("Importing Directory '" + workDir + SLASH_FILES + "' to table '"
+ + SetupTable.tableName + "'");
client.tableOperations().importDirectory(workDir + SLASH_FILES).to(SetupTable.tableName)
.load();
}