navigator content extension ported over
diff --git a/org.apache.hdt.ui/src/org/apache/hdt/ui/cluster/ServerRegistry.java b/org.apache.hdt.core/src/org/apache/hdt/core/cluster/ServerRegistry.java
similarity index 96%
rename from org.apache.hdt.ui/src/org/apache/hdt/ui/cluster/ServerRegistry.java
rename to org.apache.hdt.core/src/org/apache/hdt/core/cluster/ServerRegistry.java
index b1e172b..7529500 100644
--- a/org.apache.hdt.ui/src/org/apache/hdt/ui/cluster/ServerRegistry.java
+++ b/org.apache.hdt.core/src/org/apache/hdt/core/cluster/ServerRegistry.java
@@ -16,7 +16,7 @@
* limitations under the License.
*/
-package org.apache.hdt.ui.cluster;
+package org.apache.hdt.core.cluster;
import java.io.File;
import java.io.FilenameFilter;
@@ -29,9 +29,7 @@
import java.util.TreeMap;
import org.apache.hadoop.conf.Configuration;
-import org.apache.hdt.core.cluster.HadoopCluster;
-import org.apache.hdt.core.cluster.IHadoopClusterListener;
-import org.apache.hdt.ui.Activator;
+import org.apache.hdt.core.Activator;
import org.eclipse.jface.dialogs.MessageDialog;
/**
diff --git a/org.apache.hdt.dfs.core/.classpath b/org.apache.hdt.dfs.core/.classpath
new file mode 100644
index 0000000..ad32c83
--- /dev/null
+++ b/org.apache.hdt.dfs.core/.classpath
@@ -0,0 +1,7 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<classpath>
+ <classpathentry kind="con" path="org.eclipse.jdt.launching.JRE_CONTAINER/org.eclipse.jdt.internal.debug.ui.launcher.StandardVMType/JavaSE-1.6"/>
+ <classpathentry kind="con" path="org.eclipse.pde.core.requiredPlugins"/>
+ <classpathentry kind="src" path="src"/>
+ <classpathentry kind="output" path="bin"/>
+</classpath>
diff --git a/org.apache.hdt.dfs.core/.project b/org.apache.hdt.dfs.core/.project
new file mode 100644
index 0000000..e4cccbd
--- /dev/null
+++ b/org.apache.hdt.dfs.core/.project
@@ -0,0 +1,28 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<projectDescription>
+ <name>org.apache.hdt.dfs.core</name>
+ <comment></comment>
+ <projects>
+ </projects>
+ <buildSpec>
+ <buildCommand>
+ <name>org.eclipse.jdt.core.javabuilder</name>
+ <arguments>
+ </arguments>
+ </buildCommand>
+ <buildCommand>
+ <name>org.eclipse.pde.ManifestBuilder</name>
+ <arguments>
+ </arguments>
+ </buildCommand>
+ <buildCommand>
+ <name>org.eclipse.pde.SchemaBuilder</name>
+ <arguments>
+ </arguments>
+ </buildCommand>
+ </buildSpec>
+ <natures>
+ <nature>org.eclipse.pde.PluginNature</nature>
+ <nature>org.eclipse.jdt.core.javanature</nature>
+ </natures>
+</projectDescription>
diff --git a/org.apache.hdt.dfs.core/.settings/org.eclipse.jdt.core.prefs b/org.apache.hdt.dfs.core/.settings/org.eclipse.jdt.core.prefs
new file mode 100644
index 0000000..c537b63
--- /dev/null
+++ b/org.apache.hdt.dfs.core/.settings/org.eclipse.jdt.core.prefs
@@ -0,0 +1,7 @@
+eclipse.preferences.version=1
+org.eclipse.jdt.core.compiler.codegen.inlineJsrBytecode=enabled
+org.eclipse.jdt.core.compiler.codegen.targetPlatform=1.6
+org.eclipse.jdt.core.compiler.compliance=1.6
+org.eclipse.jdt.core.compiler.problem.assertIdentifier=error
+org.eclipse.jdt.core.compiler.problem.enumIdentifier=error
+org.eclipse.jdt.core.compiler.source=1.6
diff --git a/org.apache.hdt.dfs.core/META-INF/MANIFEST.MF b/org.apache.hdt.dfs.core/META-INF/MANIFEST.MF
new file mode 100644
index 0000000..639bafd
--- /dev/null
+++ b/org.apache.hdt.dfs.core/META-INF/MANIFEST.MF
@@ -0,0 +1,15 @@
+Manifest-Version: 1.0
+Bundle-ManifestVersion: 2
+Bundle-Name: Hadoop Development Tools DFS Core
+Bundle-SymbolicName: org.apache.hdt.dfs.core
+Bundle-Version: 0.0.0
+Bundle-Activator: org.apache.hdt.dfs.core.Activator
+Bundle-Vendor: Apache Software Foundation
+Require-Bundle: org.eclipse.ui,
+ org.eclipse.core.runtime,
+ org.apache.hdt.core,
+ org.apache.hadoop.eclipse,
+ org.apache.hdt.ui,
+ org.eclipse.core.resources
+Bundle-RequiredExecutionEnvironment: JavaSE-1.6
+Bundle-ActivationPolicy: lazy
diff --git a/org.apache.hdt.dfs.core/bin/org/apache/hdt/dfs/core/Activator.class b/org.apache.hdt.dfs.core/bin/org/apache/hdt/dfs/core/Activator.class
new file mode 100644
index 0000000..27ca6d2
--- /dev/null
+++ b/org.apache.hdt.dfs.core/bin/org/apache/hdt/dfs/core/Activator.class
Binary files differ
diff --git a/org.apache.hdt.dfs.core/bin/org/apache/hdt/dfs/core/DFSContent.class b/org.apache.hdt.dfs.core/bin/org/apache/hdt/dfs/core/DFSContent.class
new file mode 100644
index 0000000..8abf44f
--- /dev/null
+++ b/org.apache.hdt.dfs.core/bin/org/apache/hdt/dfs/core/DFSContent.class
Binary files differ
diff --git a/org.apache.hdt.dfs.core/bin/org/apache/hdt/dfs/core/DFSContentProvider$1.class b/org.apache.hdt.dfs.core/bin/org/apache/hdt/dfs/core/DFSContentProvider$1.class
new file mode 100644
index 0000000..ca51c50
--- /dev/null
+++ b/org.apache.hdt.dfs.core/bin/org/apache/hdt/dfs/core/DFSContentProvider$1.class
Binary files differ
diff --git a/org.apache.hdt.dfs.core/bin/org/apache/hdt/dfs/core/DFSContentProvider$2.class b/org.apache.hdt.dfs.core/bin/org/apache/hdt/dfs/core/DFSContentProvider$2.class
new file mode 100644
index 0000000..7851672
--- /dev/null
+++ b/org.apache.hdt.dfs.core/bin/org/apache/hdt/dfs/core/DFSContentProvider$2.class
Binary files differ
diff --git a/org.apache.hdt.dfs.core/bin/org/apache/hdt/dfs/core/DFSContentProvider.class b/org.apache.hdt.dfs.core/bin/org/apache/hdt/dfs/core/DFSContentProvider.class
new file mode 100644
index 0000000..50113d1
--- /dev/null
+++ b/org.apache.hdt.dfs.core/bin/org/apache/hdt/dfs/core/DFSContentProvider.class
Binary files differ
diff --git a/org.apache.hdt.dfs.core/bin/org/apache/hdt/dfs/core/DFSFile$1.class b/org.apache.hdt.dfs.core/bin/org/apache/hdt/dfs/core/DFSFile$1.class
new file mode 100644
index 0000000..b4e4fdd
--- /dev/null
+++ b/org.apache.hdt.dfs.core/bin/org/apache/hdt/dfs/core/DFSFile$1.class
Binary files differ
diff --git a/org.apache.hdt.dfs.core/bin/org/apache/hdt/dfs/core/DFSFile$IStorageAdapter.class b/org.apache.hdt.dfs.core/bin/org/apache/hdt/dfs/core/DFSFile$IStorageAdapter.class
new file mode 100644
index 0000000..a69abbb
--- /dev/null
+++ b/org.apache.hdt.dfs.core/bin/org/apache/hdt/dfs/core/DFSFile$IStorageAdapter.class
Binary files differ
diff --git a/org.apache.hdt.dfs.core/bin/org/apache/hdt/dfs/core/DFSFile.class b/org.apache.hdt.dfs.core/bin/org/apache/hdt/dfs/core/DFSFile.class
new file mode 100644
index 0000000..1fa979b
--- /dev/null
+++ b/org.apache.hdt.dfs.core/bin/org/apache/hdt/dfs/core/DFSFile.class
Binary files differ
diff --git a/org.apache.hdt.dfs.core/bin/org/apache/hdt/dfs/core/DFSFolder$1.class b/org.apache.hdt.dfs.core/bin/org/apache/hdt/dfs/core/DFSFolder$1.class
new file mode 100644
index 0000000..1a62e05
--- /dev/null
+++ b/org.apache.hdt.dfs.core/bin/org/apache/hdt/dfs/core/DFSFolder$1.class
Binary files differ
diff --git a/org.apache.hdt.dfs.core/bin/org/apache/hdt/dfs/core/DFSFolder.class b/org.apache.hdt.dfs.core/bin/org/apache/hdt/dfs/core/DFSFolder.class
new file mode 100644
index 0000000..f5a9625
--- /dev/null
+++ b/org.apache.hdt.dfs.core/bin/org/apache/hdt/dfs/core/DFSFolder.class
Binary files differ
diff --git a/org.apache.hdt.dfs.core/bin/org/apache/hdt/dfs/core/DFSLocation$1.class b/org.apache.hdt.dfs.core/bin/org/apache/hdt/dfs/core/DFSLocation$1.class
new file mode 100644
index 0000000..2f1697b
--- /dev/null
+++ b/org.apache.hdt.dfs.core/bin/org/apache/hdt/dfs/core/DFSLocation$1.class
Binary files differ
diff --git a/org.apache.hdt.dfs.core/bin/org/apache/hdt/dfs/core/DFSLocation.class b/org.apache.hdt.dfs.core/bin/org/apache/hdt/dfs/core/DFSLocation.class
new file mode 100644
index 0000000..6f73f90
--- /dev/null
+++ b/org.apache.hdt.dfs.core/bin/org/apache/hdt/dfs/core/DFSLocation.class
Binary files differ
diff --git a/org.apache.hdt.dfs.core/bin/org/apache/hdt/dfs/core/DFSLocationsRoot$1.class b/org.apache.hdt.dfs.core/bin/org/apache/hdt/dfs/core/DFSLocationsRoot$1.class
new file mode 100644
index 0000000..87fbd0b
--- /dev/null
+++ b/org.apache.hdt.dfs.core/bin/org/apache/hdt/dfs/core/DFSLocationsRoot$1.class
Binary files differ
diff --git a/org.apache.hdt.dfs.core/bin/org/apache/hdt/dfs/core/DFSLocationsRoot.class b/org.apache.hdt.dfs.core/bin/org/apache/hdt/dfs/core/DFSLocationsRoot.class
new file mode 100644
index 0000000..e342c1c
--- /dev/null
+++ b/org.apache.hdt.dfs.core/bin/org/apache/hdt/dfs/core/DFSLocationsRoot.class
Binary files differ
diff --git a/org.apache.hdt.dfs.core/bin/org/apache/hdt/dfs/core/DFSMessage.class b/org.apache.hdt.dfs.core/bin/org/apache/hdt/dfs/core/DFSMessage.class
new file mode 100644
index 0000000..252d9b7
--- /dev/null
+++ b/org.apache.hdt.dfs.core/bin/org/apache/hdt/dfs/core/DFSMessage.class
Binary files differ
diff --git a/org.apache.hdt.dfs.core/bin/org/apache/hdt/dfs/core/DFSPath.class b/org.apache.hdt.dfs.core/bin/org/apache/hdt/dfs/core/DFSPath.class
new file mode 100644
index 0000000..1b9086b
--- /dev/null
+++ b/org.apache.hdt.dfs.core/bin/org/apache/hdt/dfs/core/DFSPath.class
Binary files differ
diff --git a/org.apache.hdt.dfs.core/build.properties b/org.apache.hdt.dfs.core/build.properties
new file mode 100644
index 0000000..34d2e4d
--- /dev/null
+++ b/org.apache.hdt.dfs.core/build.properties
@@ -0,0 +1,4 @@
+source.. = src/
+output.. = bin/
+bin.includes = META-INF/,\
+ .
diff --git a/org.apache.hdt.dfs.core/src/org/apache/hdt/dfs/core/Activator.java b/org.apache.hdt.dfs.core/src/org/apache/hdt/dfs/core/Activator.java
new file mode 100644
index 0000000..c9b8c4e
--- /dev/null
+++ b/org.apache.hdt.dfs.core/src/org/apache/hdt/dfs/core/Activator.java
@@ -0,0 +1,50 @@
+package org.apache.hdt.dfs.core;
+
+import org.eclipse.ui.plugin.AbstractUIPlugin;
+import org.osgi.framework.BundleContext;
+
+/**
+ * The activator class controls the plug-in life cycle
+ */
+public class Activator extends AbstractUIPlugin {
+
+ // The plug-in ID
+ public static final String PLUGIN_ID = "org.apache.hdt.dfs.core"; //$NON-NLS-1$
+
+ // The shared instance
+ private static Activator plugin;
+
+ /**
+ * The constructor
+ */
+ public Activator() {
+ }
+
+ /*
+ * (non-Javadoc)
+ * @see org.eclipse.ui.plugin.AbstractUIPlugin#start(org.osgi.framework.BundleContext)
+ */
+ public void start(BundleContext context) throws Exception {
+ super.start(context);
+ plugin = this;
+ }
+
+ /*
+ * (non-Javadoc)
+ * @see org.eclipse.ui.plugin.AbstractUIPlugin#stop(org.osgi.framework.BundleContext)
+ */
+ public void stop(BundleContext context) throws Exception {
+ plugin = null;
+ super.stop(context);
+ }
+
+ /**
+ * Returns the shared instance
+ *
+ * @return the shared instance
+ */
+ public static Activator getDefault() {
+ return plugin;
+ }
+
+}
diff --git a/org.apache.hdt.dfs.core/src/org/apache/hdt/dfs/core/DFSContent.java b/org.apache.hdt.dfs.core/src/org/apache/hdt/dfs/core/DFSContent.java
new file mode 100644
index 0000000..245b62c
--- /dev/null
+++ b/org.apache.hdt.dfs.core/src/org/apache/hdt/dfs/core/DFSContent.java
@@ -0,0 +1,32 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hdt.dfs.core;
+
+/**
+ * Interface to define content entities in the DFS browser
+ */
+public interface DFSContent {
+
+ boolean hasChildren();
+
+ DFSContent[] getChildren();
+
+ void refresh();
+
+}
diff --git a/org.apache.hdt.dfs.core/src/org/apache/hdt/dfs/core/DFSContentProvider.java b/org.apache.hdt.dfs.core/src/org/apache/hdt/dfs/core/DFSContentProvider.java
new file mode 100644
index 0000000..1d59d61
--- /dev/null
+++ b/org.apache.hdt.dfs.core/src/org/apache/hdt/dfs/core/DFSContentProvider.java
@@ -0,0 +1,244 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hdt.dfs.core;
+
+import java.util.HashMap;
+import java.util.Map;
+
+import org.apache.hdt.ui.ImageLibrary;
+import org.apache.hdt.core.cluster.HadoopCluster;
+import org.apache.hdt.core.cluster.ServerRegistry;
+import org.eclipse.jface.viewers.ILabelProvider;
+import org.eclipse.jface.viewers.ILabelProviderListener;
+import org.eclipse.jface.viewers.ITreeContentProvider;
+import org.eclipse.jface.viewers.StructuredViewer;
+import org.eclipse.jface.viewers.Viewer;
+import org.eclipse.swt.graphics.Image;
+import org.eclipse.swt.widgets.Display;
+
+/**
+ * Handles viewing of DFS locations
+ * <p>
+ *
+ * The content handled by this provider is a tree:
+ *
+ * <tt>
+ * <br>DFSLocationsRoot
+ * <br>\_HadoopCluster
+ * <br>| \_DfsFolder
+ * <br>| | \_DfsFile
+ * <br>| \_DfsFolder
+ * <br>| ...
+ * <br>\_HadoopCluster...
+ * </tt>
+ *
+ * The code should not block here: blocking operations need to be done
+ * asynchronously so as not to freeze the UI!
+ */
+public class DFSContentProvider implements ITreeContentProvider,
+ ILabelProvider {
+
+ /**
+ * The viewer that displays this Tree content
+ */
+ private Viewer viewer;
+
+ private StructuredViewer sviewer;
+
+ private Map<HadoopCluster, DFSContent> rootFolders =
+ new HashMap<HadoopCluster, DFSContent>();
+
+ /**
+ * Constructor: load resources (icons).
+ */
+ public DFSContentProvider() {
+ }
+
+ private final DFSLocationsRoot locationsRoot = new DFSLocationsRoot(this);
+
+ /*
+ * ITreeContentProvider implementation
+ */
+
+ /* @inheritDoc */
+ public Object[] getChildren(Object parent) {
+
+ if (!(parent instanceof DFSContent))
+ return null;
+ DFSContent content = (DFSContent) parent;
+ return content.getChildren();
+ }
+
+ public Object[] test(Object parentElement) {
+ if (parentElement instanceof DFSLocationsRoot) {
+ return ServerRegistry.getInstance().getServers().toArray();
+
+ } else if (parentElement instanceof HadoopCluster) {
+ final HadoopCluster location = (HadoopCluster) parentElement;
+ Object root = rootFolders.get(location);
+ if (root != null)
+ return new Object[] { root };
+
+ return new Object[] { "Connecting to DFS..." };
+
+ } else if (parentElement instanceof DFSFolder) {
+ DFSFolder folder = (DFSFolder) parentElement;
+ return folder.getChildren();
+ }
+
+ return new Object[] { "<Unknown DFSContent>" };
+ }
+
+ /* @inheritDoc */
+ public Object getParent(Object element) {
+
+ if (element instanceof DFSPath) {
+ return ((DFSPath) element).getParent();
+
+ } else if (element instanceof HadoopCluster) {
+ return locationsRoot;
+ }
+
+ return null;
+ }
+
+ /* @inheritDoc */
+ public boolean hasChildren(Object element) {
+ if (element instanceof DFSContent) {
+ DFSContent content = (DFSContent) element;
+ return content.hasChildren();
+ }
+ return false;
+ }
+
+ /*
+ * IStructureContentProvider implementation
+ */
+
+ /* @inheritDoc */
+ public Object[] getElements(final Object inputElement) {
+ return new Object[] { locationsRoot };
+ // return ServerRegistry.getInstance().getServers().toArray();
+ }
+
+ /*
+ * ILabelProvider implementation
+ */
+
+ /* @inheritDoc */
+ public Image getImage(Object element) {
+ if (element instanceof DFSLocationsRoot)
+ return ImageLibrary.getImage("dfs.browser.root.entry");
+
+ else if (element instanceof DFSLocation)
+ return ImageLibrary.getImage("dfs.browser.location.entry");
+
+ else if (element instanceof DFSFolder)
+ return ImageLibrary.getImage("dfs.browser.folder.entry");
+
+ else if (element instanceof DFSFile)
+ return ImageLibrary.getImage("dfs.browser.file.entry");
+
+ return null;
+ }
+
+ /* @inheritDoc */
+ public String getText(Object element) {
+ if (element instanceof DFSFile)
+ return ((DFSFile) element).toDetailedString();
+
+ return element.toString();
+ }
+
+ /*
+ * IBaseLabelProvider implementation
+ */
+
+ /* @inheritDoc */
+ public void addListener(ILabelProviderListener listener) {
+ }
+
+ /* @inheritDoc */
+ public void removeListener(ILabelProviderListener listener) {
+ }
+
+ /* @inheritDoc */
+ public boolean isLabelProperty(Object element, String property) {
+ return false;
+ }
+
+ /*
+ * IContentProvider implementation
+ */
+
+ /* @inheritDoc */
+ public void dispose() {
+ }
+
+ /* @inheritDoc */
+ public void inputChanged(Viewer viewer, Object oldInput, Object newInput) {
+ this.viewer = viewer;
+ if ((viewer != null) && (viewer instanceof StructuredViewer))
+ this.sviewer = (StructuredViewer) viewer;
+ else
+ this.sviewer = null;
+ }
+
+ /*
+ * Miscellaneous
+ */
+
+ /**
+ * Ask the viewer for this content to refresh
+ */
+ void refresh() {
+ // no display, nothing to update
+ if (this.viewer == null)
+ return;
+
+ Display.getDefault().asyncExec(new Runnable() {
+ public void run() {
+ DFSContentProvider.this.viewer.refresh();
+ }
+ });
+ }
+
+ /**
+ * Ask the viewer to refresh a single element
+ *
+ * @param content what to refresh
+ */
+ void refresh(final DFSContent content) {
+ if (this.sviewer != null) {
+ Display.getDefault().asyncExec(new Runnable() {
+ public void run() {
+ DFSContentProvider.this.sviewer.refresh(content);
+ }
+ });
+
+ } else {
+ refresh();
+ }
+ }
+
+ Viewer getViewer() {
+ return this.viewer;
+ }
+
+}
diff --git a/org.apache.hdt.dfs.core/src/org/apache/hdt/dfs/core/DFSFile.java b/org.apache.hdt.dfs.core/src/org/apache/hdt/dfs/core/DFSFile.java
new file mode 100644
index 0000000..f739607
--- /dev/null
+++ b/org.apache.hdt.dfs.core/src/org/apache/hdt/dfs/core/DFSFile.java
@@ -0,0 +1,350 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hdt.dfs.core;
+
+import java.io.BufferedInputStream;
+import java.io.BufferedOutputStream;
+import java.io.DataInputStream;
+import java.io.DataOutputStream;
+import java.io.File;
+import java.io.FileInputStream;
+import java.io.FileOutputStream;
+import java.io.IOException;
+import java.io.InputStream;
+import java.lang.reflect.InvocationTargetException;
+
+import org.apache.hdt.dfs.core.Activator;
+import org.apache.hdt.core.dialogs.ErrorMessageDialog;
+import org.apache.hadoop.fs.FileStatus;
+import org.apache.hadoop.fs.Path;
+import org.eclipse.core.resources.IStorage;
+import org.eclipse.core.runtime.CoreException;
+import org.eclipse.core.runtime.IPath;
+import org.eclipse.core.runtime.IProgressMonitor;
+import org.eclipse.core.runtime.PlatformObject;
+import org.eclipse.core.runtime.Status;
+import org.eclipse.jface.dialogs.MessageDialog;
+import org.eclipse.jface.operation.IRunnableWithProgress;
+import org.eclipse.ui.PlatformUI;
+
+/**
+ * File handling methods for the DFS
+ */
+public class DFSFile extends DFSPath implements DFSContent {
+
+ protected long length;
+
+ protected short replication;
+
+ /**
+ * Constructor to upload a file on the distributed file system
+ *
+ * @param parent
+ * @param path
+ * @param file
+ * @param monitor
+ */
+ public DFSFile(DFSPath parent, Path path, File file,
+ IProgressMonitor monitor) {
+
+ super(parent, path);
+ this.upload(monitor, file);
+ }
+
+ public DFSFile(DFSPath parent, Path path) {
+ super(parent, path);
+
+ try {
+ FileStatus fs = getDFS().getFileStatus(path);
+ this.length = fs.getLen();
+ this.replication = fs.getReplication();
+ } catch (IOException e) {
+ e.printStackTrace();
+ }
+ }
+
+ /**
+ * Download and view contents of a file
+ *
+ * @return a InputStream for the file
+ */
+ public InputStream open() throws IOException {
+
+ return getDFS().open(this.path);
+ }
+
+ /**
+ * Download this file to the local file system. This creates a download
+ * status monitor.
+ *
+ * @param file
+ * @throws JSchException
+ * @throws IOException
+ * @throws InvocationTargetException
+ * @throws InterruptedException
+ *
+ * @deprecated
+ */
+ public void downloadToLocalFile(final File file)
+ throws InvocationTargetException, InterruptedException {
+
+ PlatformUI.getWorkbench().getProgressService().busyCursorWhile(
+ new IRunnableWithProgress() {
+ public void run(IProgressMonitor monitor)
+ throws InvocationTargetException {
+
+ DFSFile.this.downloadToLocalFile(monitor, file);
+ }
+ });
+ }
+
+ /* @inheritDoc */
+ @Override
+ public void downloadToLocalDirectory(IProgressMonitor monitor, File dir) {
+
+ File dfsPath = new File(this.getPath().toString());
+ File destination = new File(dir, dfsPath.getName());
+
+ if (destination.exists()) {
+ boolean answer =
+ MessageDialog.openQuestion(null, "Overwrite existing local file?",
+ "The file you are attempting to download from the DFS "
+ + this.getPath()
+ + ", already exists in your local directory as "
+ + destination + ".\n" + "Overwrite the existing file?");
+ if (!answer)
+ return;
+ }
+
+ try {
+ this.downloadToLocalFile(monitor, destination);
+
+ } catch (Exception e) {
+ e.printStackTrace();
+ MessageDialog.openWarning(null, "Download to local file system",
+ "Downloading of file \"" + this.path + "\" to local directory \""
+ + dir + "\" has failed.\n" + e);
+ }
+ }
+
+ /**
+ * Provides a detailed string for this file
+ *
+ * @return the string formatted as
+ * <tt><filename> (<size>, r<replication>)</tt>
+ */
+ public String toDetailedString() {
+ final String[] units = { "b", "Kb", "Mb", "Gb", "Tb" };
+ int unit = 0;
+ double l = this.length;
+ while ((l >= 1024.0) && (unit < units.length)) {
+ unit += 1;
+ l /= 1024.0;
+ }
+
+ return String.format("%s (%.1f %s, r%d)", super.toString(), l,
+ units[unit], this.replication);
+ }
+
+ /* @inheritDoc */
+ @Override
+ public String toString() {
+ return this.path.toString();
+ }
+
+ /*
+ *
+ */
+
+ /**
+ * Download the DfsFile to a local file. Use the given monitor to report
+ * status of operation.
+ *
+ * @param monitor the status monitor
+ * @param file the local file where to put the downloaded file
+ * @throws InvocationTargetException
+ */
+ public void downloadToLocalFile(IProgressMonitor monitor, File file)
+ throws InvocationTargetException {
+
+ final int taskSize = 1024;
+
+ monitor.setTaskName("Download file " + this.path);
+
+ BufferedOutputStream ostream = null;
+ DataInputStream istream = null;
+
+ try {
+ istream = getDFS().open(this.path);
+ ostream = new BufferedOutputStream(new FileOutputStream(file));
+
+ int bytes;
+ byte[] buffer = new byte[taskSize];
+
+ while ((bytes = istream.read(buffer)) >= 0) {
+ if (monitor.isCanceled())
+ return;
+ ostream.write(buffer, 0, bytes);
+ monitor.worked(1);
+ }
+
+ } catch (Exception e) {
+ throw new InvocationTargetException(e);
+
+ } finally {
+ // Clean all opened resources
+ if (istream != null) {
+ try {
+ istream.close();
+ } catch (IOException e) {
+ e.printStackTrace();
+ // nothing we can do here
+ }
+ }
+ try {
+ ostream.close();
+ } catch (IOException e) {
+ e.printStackTrace();
+ // nothing we can do here
+ }
+ }
+ }
+
+ /**
+ * Upload a local file to this file on the distributed file system
+ *
+ * @param monitor
+ * @param file
+ */
+ public void upload(IProgressMonitor monitor, File file) {
+
+ final int taskSize = 1024;
+
+ monitor.setTaskName("Upload file " + this.path);
+
+ BufferedInputStream istream = null;
+ DataOutputStream ostream = null;
+
+ try {
+ istream = new BufferedInputStream(new FileInputStream(file));
+ ostream = getDFS().create(this.path);
+
+ int bytes;
+ byte[] buffer = new byte[taskSize];
+
+ while ((bytes = istream.read(buffer)) >= 0) {
+ if (monitor.isCanceled())
+ return;
+ ostream.write(buffer, 0, bytes);
+ monitor.worked(1);
+ }
+
+ } catch (Exception e) {
+ ErrorMessageDialog.display(String.format(
+ "Unable to uploade file %s to %s", file, this.path), e
+ .getLocalizedMessage());
+
+ } finally {
+ try {
+ if (istream != null)
+ istream.close();
+ } catch (IOException e) {
+ e.printStackTrace();
+ // nothing we can do here
+ }
+ try {
+ if (ostream != null)
+ ostream.close();
+ } catch (IOException e) {
+ e.printStackTrace();
+ // nothing we can do here
+ }
+ }
+ }
+
+ /* @inheritDoc */
+ @Override
+ public void refresh() {
+ getParent().refresh();
+ }
+
+ /* @inheritDoc */
+ @Override
+ public int computeDownloadWork() {
+ return 1 + (int) (this.length / 1024);
+ }
+
+ /**
+ * Creates an adapter for the file to open it in the Editor
+ *
+ * @return the IStorage
+ */
+ public IStorage getIStorage() {
+ return new IStorageAdapter();
+ }
+
+ /**
+ * IStorage adapter to open the file in the Editor
+ */
+ private class IStorageAdapter extends PlatformObject implements IStorage {
+
+ /* @inheritDoc */
+ public InputStream getContents() throws CoreException {
+ try {
+ return DFSFile.this.open();
+
+ } catch (IOException ioe) {
+ throw new CoreException(new Status(Status.ERROR,
+ Activator.PLUGIN_ID, 0, "Unable to open file \""
+ + DFSFile.this.path + "\"", ioe));
+ }
+ }
+
+ /* @inheritDoc */
+ public IPath getFullPath() {
+ return new org.eclipse.core.runtime.Path(DFSFile.this.path.toString());
+ }
+
+ /* @inheritDoc */
+ public String getName() {
+ return DFSFile.this.path.getName();
+ }
+
+ /* @inheritDoc */
+ public boolean isReadOnly() {
+ return true;
+ }
+
+ }
+
+ /*
+ * Implementation of DFSContent
+ */
+
+ /* @inheritDoc */
+ public DFSContent[] getChildren() {
+ return null;
+ }
+
+ /* @inheritDoc */
+ public boolean hasChildren() {
+ return false;
+ }
+
+}
diff --git a/org.apache.hdt.dfs.core/src/org/apache/hdt/dfs/core/DFSFolder.java b/org.apache.hdt.dfs.core/src/org/apache/hdt/dfs/core/DFSFolder.java
new file mode 100644
index 0000000..55fc8be
--- /dev/null
+++ b/org.apache.hdt.dfs.core/src/org/apache/hdt/dfs/core/DFSFolder.java
@@ -0,0 +1,213 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hdt.dfs.core;
+
+import java.io.File;
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.List;
+import java.util.logging.Logger;
+
+import org.apache.hadoop.fs.FileStatus;
+import org.apache.hadoop.fs.Path;
+import org.apache.hdt.core.cluster.HadoopCluster;
+import org.eclipse.core.runtime.IProgressMonitor;
+import org.eclipse.core.runtime.IStatus;
+import org.eclipse.core.runtime.Status;
+import org.eclipse.core.runtime.jobs.Job;
+import org.eclipse.jface.dialogs.MessageDialog;
+
+/**
+ * Local representation of a folder in the DFS.
+ *
+ * The constructor creates an empty representation of the folder and spawn a
+ * thread that will fill
+ */
+public class DFSFolder extends DFSPath implements DFSContent {
+
+ static Logger log = Logger.getLogger(DFSFolder.class.getName());
+
+ private DFSContent[] children;
+
+ protected DFSFolder(DFSContentProvider provider, HadoopCluster location)
+ throws IOException {
+
+ super(provider, location);
+ }
+
+ private DFSFolder(DFSPath parent, Path path) {
+ super(parent, path);
+ }
+
+ protected void loadDFSFolderChildren() throws IOException {
+ List<DFSPath> list = new ArrayList<DFSPath>();
+
+ for (FileStatus status : getDFS().listStatus(this.getPath())) {
+ if (status.isDir()) {
+ list.add(new DFSFolder(this, status.getPath()));
+ } else {
+ list.add(new DFSFile(this, status.getPath()));
+ }
+ }
+
+ this.children = list.toArray(new DFSContent[list.size()]);
+ }
+
+ /**
+ * Upload the given file or directory into this DfsFolder
+ *
+ * @param file
+ * @throws IOException
+ */
+ public void upload(IProgressMonitor monitor, final File file)
+ throws IOException {
+
+ if (file.isDirectory()) {
+ Path filePath = new Path(this.path, file.getName());
+ getDFS().mkdirs(filePath);
+ DFSFolder newFolder = new DFSFolder(this, filePath);
+ monitor.worked(1);
+ for (File child : file.listFiles()) {
+ if (monitor.isCanceled())
+ return;
+ newFolder.upload(monitor, child);
+ }
+
+ } else if (file.isFile()) {
+ Path filePath = new Path(this.path, file.getName());
+ DFSFile newFile = new DFSFile(this, filePath, file, monitor);
+
+ } else {
+ // XXX don't know what the file is?
+ }
+ }
+
+ /* @inheritDoc */
+ @Override
+ public void downloadToLocalDirectory(IProgressMonitor monitor, File dir) {
+ if (!dir.exists())
+ dir.mkdirs();
+
+ if (!dir.isDirectory()) {
+ MessageDialog.openError(null, "Download to local file system",
+ "Invalid directory location: \"" + dir + "\"");
+ return;
+ }
+
+ File dfsPath = new File(this.getPath().toString());
+ File destination = new File(dir, dfsPath.getName());
+
+ if (!destination.exists()) {
+ if (!destination.mkdir()) {
+ MessageDialog.openError(null, "Download to local directory",
+ "Unable to create directory " + destination.getAbsolutePath());
+ return;
+ }
+ }
+
+ // Download all DfsPath children
+ for (Object childObj : getChildren()) {
+ if (childObj instanceof DFSPath) {
+ ((DFSPath) childObj).downloadToLocalDirectory(monitor, destination);
+ monitor.worked(1);
+ }
+ }
+ }
+
+ /* @inheritDoc */
+ @Override
+ public int computeDownloadWork() {
+ int work = 1;
+ for (DFSContent child : getChildren()) {
+ if (child instanceof DFSPath)
+ work += ((DFSPath) child).computeDownloadWork();
+ }
+
+ return work;
+ }
+
+ /**
+ * Create a new sub directory into this directory
+ *
+ * @param folderName
+ */
+ public void mkdir(String folderName) {
+ try {
+ getDFS().mkdirs(new Path(this.path, folderName));
+ } catch (IOException ioe) {
+ ioe.printStackTrace();
+ }
+ doRefresh();
+ }
+
+ /*
+ * Implementation of DFSContent
+ */
+
+ /* @inheritDoc */
+ public boolean hasChildren() {
+ if (this.children == null)
+ return true;
+ else
+ return (this.children.length > 0);
+ }
+
+ /* @inheritDoc */
+ public DFSContent[] getChildren() {
+ if (children == null) {
+ new Job("Connecting to DFS " + location) {
+ @Override
+ protected IStatus run(IProgressMonitor monitor) {
+ try {
+ loadDFSFolderChildren();
+ return Status.OK_STATUS;
+
+ } catch (IOException ioe) {
+ children =
+ new DFSContent[] { new DFSMessage("Error: "
+ + ioe.getLocalizedMessage()) };
+ return Status.CANCEL_STATUS;
+
+ } finally {
+ // Under all circumstances, update the UI
+ provider.refresh(DFSFolder.this);
+ }
+ }
+ }.schedule();
+
+ return new DFSContent[] { new DFSMessage("Listing folder content...") };
+ }
+ return this.children;
+ }
+
+ /* @inheritDoc */
+ @Override
+ public void refresh() {
+ this.children = null;
+ this.doRefresh();
+ }
+
+ /* @inheritDoc */
+ @Override
+ public String toString() {
+ return String.format("%s (%s)", super.toString(),
+ this.getChildren().length);
+ }
+
+}
diff --git a/org.apache.hdt.dfs.core/src/org/apache/hdt/dfs/core/DFSLocation.java b/org.apache.hdt.dfs.core/src/org/apache/hdt/dfs/core/DFSLocation.java
new file mode 100644
index 0000000..6b6aff5
--- /dev/null
+++ b/org.apache.hdt.dfs.core/src/org/apache/hdt/dfs/core/DFSLocation.java
@@ -0,0 +1,108 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hdt.dfs.core;
+
+import java.io.IOException;
+
+import org.apache.hdt.core.cluster.HadoopCluster;
+import org.eclipse.core.runtime.IProgressMonitor;
+import org.eclipse.core.runtime.IStatus;
+import org.eclipse.core.runtime.Status;
+import org.eclipse.core.runtime.jobs.Job;
+
+/**
+ * DFS Content representation of a HDFS location
+ */
+public class DFSLocation implements DFSContent {
+
+ private final DFSContentProvider provider;
+
+ private final HadoopCluster location;
+
+ private DFSContent rootFolder = null;
+
+ DFSLocation(DFSContentProvider provider, HadoopCluster server) {
+ this.provider = provider;
+ this.location = server;
+ }
+
+ /* @inheritDoc */
+ @Override
+ public String toString() {
+ return this.location.getLocationName();
+ }
+
+ /*
+ * Implementation of DFSContent
+ */
+
+ /* @inheritDoc */
+ public DFSContent[] getChildren() {
+ if (this.rootFolder == null) {
+ /*
+ * DfsFolder constructor might block as it contacts the NameNode: work
+ * asynchronously here or this will potentially freeze the UI
+ */
+ new Job("Connecting to DFS " + location) {
+ @Override
+ protected IStatus run(IProgressMonitor monitor) {
+ try {
+ rootFolder = new DFSFolder(provider, location);
+ return Status.OK_STATUS;
+
+ } catch (IOException ioe) {
+ rootFolder =
+ new DFSMessage("Error: " + ioe.getLocalizedMessage());
+ return Status.CANCEL_STATUS;
+
+ } finally {
+ // Under all circumstances, update the UI
+ provider.refresh(DFSLocation.this);
+ }
+ }
+ }.schedule();
+
+ return new DFSContent[] { new DFSMessage("Connecting to DFS "
+ + toString()) };
+ }
+ return new DFSContent[] { this.rootFolder };
+ }
+
+ /* @inheritDoc */
+ public boolean hasChildren() {
+ return true;
+ }
+
+ /* @inheritDoc */
+ public void refresh() {
+ this.rootFolder = null;
+ this.provider.refresh(this);
+ }
+
+ /*
+ * Actions
+ */
+
+ /**
+ * Refresh the location using a new connection
+ */
+ public void reconnect() {
+ this.refresh();
+ }
+}
diff --git a/org.apache.hdt.dfs.core/src/org/apache/hdt/dfs/core/DFSLocationsRoot.java b/org.apache.hdt.dfs.core/src/org/apache/hdt/dfs/core/DFSLocationsRoot.java
new file mode 100644
index 0000000..1edec58
--- /dev/null
+++ b/org.apache.hdt.dfs.core/src/org/apache/hdt/dfs/core/DFSLocationsRoot.java
@@ -0,0 +1,150 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hdt.dfs.core;
+
+import java.io.IOException;
+import java.util.HashMap;
+import java.util.Map;
+
+import org.apache.hdt.core.cluster.IHadoopClusterListener;
+import org.apache.hdt.core.cluster.ServerRegistry;
+import org.apache.hadoop.fs.FileSystem;
+import org.apache.hdt.core.cluster.HadoopCluster;
+
+/**
+ * Representation of the root element containing all DFS servers. This
+ * content registers an observer on Hadoop servers so as to update itself
+ * when servers are updated.
+ */
+public class DFSLocationsRoot implements DFSContent, IHadoopClusterListener {
+
+ /**
+ *
+ */
+ private final DFSContentProvider provider;
+
+ private Map<HadoopCluster, DFSLocation> map =
+ new HashMap<HadoopCluster, DFSLocation>();
+
+ /**
+ * Register a listeners to track DFS locations updates
+ *
+ * @param provider the content provider this content is the root of
+ */
+ DFSLocationsRoot(DFSContentProvider provider) {
+ this.provider = provider;
+ ServerRegistry.getInstance().addListener(this);
+ this.refresh();
+ }
+
+ /*
+ * Implementation of IHadoopServerListener
+ */
+
+ /* @inheritDoc */
+ public synchronized void serverChanged(final HadoopCluster location,
+ final int type) {
+
+ switch (type) {
+ case ServerRegistry.SERVER_STATE_CHANGED: {
+ this.provider.refresh(map.get(location));
+ break;
+ }
+
+ case ServerRegistry.SERVER_ADDED: {
+ DFSLocation dfsLoc = new DFSLocation(provider, location);
+ map.put(location, dfsLoc);
+ this.provider.refresh(this);
+ break;
+ }
+
+ case ServerRegistry.SERVER_REMOVED: {
+ map.remove(location);
+ this.provider.refresh(this);
+ break;
+ }
+ }
+ }
+
+ /**
+ * Recompute the map of Hadoop locations
+ */
+ private synchronized void reloadLocations() {
+ map.clear();
+ for (HadoopCluster location : ServerRegistry.getInstance().getServers())
+ map.put(location, new DFSLocation(provider, location));
+ }
+
+ /* @inheritDoc */
+ @Override
+ public String toString() {
+ return "DFS Locations";
+ }
+
+ /*
+ * Implementation of DFSContent
+ */
+
+ /* @inheritDoc */
+ public synchronized DFSContent[] getChildren() {
+ return this.map.values().toArray(new DFSContent[this.map.size()]);
+ }
+
+ /* @inheritDoc */
+ public boolean hasChildren() {
+ return (this.map.size() > 0);
+ }
+
+ /* @inheritDoc */
+ public void refresh() {
+ reloadLocations();
+ this.provider.refresh(this);
+ }
+
+ /*
+ * Actions
+ */
+
+ public void disconnect() {
+ Thread closeThread = new Thread() {
+ /* @inheritDoc */
+ @Override
+ public void run() {
+ try {
+ System.out.printf("Closing all opened File Systems...\n");
+ FileSystem.closeAll();
+ System.out.printf("File Systems closed\n");
+
+ } catch (IOException ioe) {
+ ioe.printStackTrace();
+ }
+ }
+ };
+
+ // Wait 5 seconds for the connections to be closed
+ closeThread.start();
+ try {
+ closeThread.join(5000);
+
+ } catch (InterruptedException ie) {
+ // Ignore
+ }
+ }
+
+}
diff --git a/org.apache.hdt.dfs.core/src/org/apache/hdt/dfs/core/DFSMessage.java b/org.apache.hdt.dfs.core/src/org/apache/hdt/dfs/core/DFSMessage.java
new file mode 100644
index 0000000..0d25d45
--- /dev/null
+++ b/org.apache.hdt.dfs.core/src/org/apache/hdt/dfs/core/DFSMessage.java
@@ -0,0 +1,57 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hdt.dfs.core;
+
+/**
+ * DFS Content that displays a message.
+ */
+class DFSMessage implements DFSContent {
+
+ private String message;
+
+ DFSMessage(String message) {
+ this.message = message;
+ }
+
+ /* @inheritDoc */
+ @Override
+ public String toString() {
+ return this.message;
+ }
+
+ /*
+ * Implementation of DFSContent
+ */
+
+ /* @inheritDoc */
+ public DFSContent[] getChildren() {
+ return null;
+ }
+
+ /* @inheritDoc */
+ public boolean hasChildren() {
+ return false;
+ }
+
+ /* @inheritDoc */
+ public void refresh() {
+ // Nothing to do
+ }
+
+}
diff --git a/org.apache.hdt.dfs.core/src/org/apache/hdt/dfs/core/DFSPath.java b/org.apache.hdt.dfs.core/src/org/apache/hdt/dfs/core/DFSPath.java
new file mode 100644
index 0000000..1cff452
--- /dev/null
+++ b/org.apache.hdt.dfs.core/src/org/apache/hdt/dfs/core/DFSPath.java
@@ -0,0 +1,160 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hdt.dfs.core;
+
+import java.io.File;
+import java.io.IOException;
+import java.util.logging.Logger;
+
+import org.apache.hadoop.hdfs.DistributedFileSystem;
+import org.apache.hdt.core.dialogs.ErrorMessageDialog;
+import org.apache.hdt.core.cluster.ConfProp;
+import org.apache.hadoop.fs.FileSystem;
+import org.apache.hadoop.fs.Path;
+import org.apache.hdt.core.cluster.HadoopCluster;
+import org.eclipse.core.runtime.IProgressMonitor;
+import org.eclipse.jface.dialogs.MessageDialog;
+
+/**
+ * DFS Path handling for DFS
+ */
+public abstract class DFSPath implements DFSContent {
+
+ protected final DFSContentProvider provider;
+
+ protected HadoopCluster location;
+
+ private DistributedFileSystem dfs = null;
+
+ protected final Path path;
+
+ protected final DFSPath parent;
+
+ /**
+ * For debugging purpose
+ */
+ static Logger log = Logger.getLogger(DFSPath.class.getName());
+
+ /**
+ * Create a path representation for the given location in the given viewer
+ *
+ * @param location
+ * @param path
+ * @param viewer
+ */
+ public DFSPath(DFSContentProvider provider, HadoopCluster location)
+ throws IOException {
+
+ this.provider = provider;
+ this.location = location;
+ this.path = new Path("/");
+ this.parent = null;
+ }
+
+ /**
+ * Create a sub-path representation for the given parent path
+ *
+ * @param parent
+ * @param path
+ */
+ protected DFSPath(DFSPath parent, Path path) {
+ this.provider = parent.provider;
+ this.location = parent.location;
+ this.dfs = parent.dfs;
+ this.parent = parent;
+ this.path = path;
+ }
+
+ protected void dispose() {
+ // Free the DFS connection
+ }
+
+ /* @inheritDoc */
+ @Override
+ public String toString() {
+ if (path.equals("/")) {
+ return location.getConfProp(ConfProp.FS_DEFAULT_URI);
+
+ } else {
+ return this.path.getName();
+ }
+ }
+
+ /**
+ * Does a recursive delete of the remote directory tree at this node.
+ */
+ public void delete() {
+ try {
+ getDFS().delete(this.path, true);
+
+ } catch (IOException e) {
+ e.printStackTrace();
+ MessageDialog.openWarning(null, "Delete file",
+ "Unable to delete file \"" + this.path + "\"\n" + e);
+ }
+ }
+
+ public DFSPath getParent() {
+ return parent;
+ }
+
+ public abstract void refresh();
+
+ /**
+ * Refresh the UI element for this content
+ */
+ public void doRefresh() {
+ provider.refresh(this);
+ }
+
+ /**
+ * Copy the DfsPath to the given local directory
+ *
+ * @param directory the local directory
+ */
+ public abstract void downloadToLocalDirectory(IProgressMonitor monitor,
+ File dir);
+
+ public Path getPath() {
+ return this.path;
+ }
+
+ /**
+ * Gets a connection to the DFS
+ *
+ * @return a connection to the DFS
+ * @throws IOException
+ */
+ DistributedFileSystem getDFS() throws IOException {
+ if (this.dfs == null) {
+ FileSystem fs = location.getDFS();
+ if (!(fs instanceof DistributedFileSystem)) {
+ ErrorMessageDialog.display("DFS Browser",
+ "The DFS Browser cannot browse anything else "
+ + "but a Distributed File System!");
+ throw new IOException("DFS Browser expects a DistributedFileSystem!");
+ }
+ this.dfs = (DistributedFileSystem) fs;
+ }
+ return this.dfs;
+ }
+
+ public abstract int computeDownloadWork();
+
+}
diff --git a/org.apache.hdt.dfs.ui/.classpath b/org.apache.hdt.dfs.ui/.classpath
new file mode 100644
index 0000000..ad32c83
--- /dev/null
+++ b/org.apache.hdt.dfs.ui/.classpath
@@ -0,0 +1,7 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<classpath>
+ <classpathentry kind="con" path="org.eclipse.jdt.launching.JRE_CONTAINER/org.eclipse.jdt.internal.debug.ui.launcher.StandardVMType/JavaSE-1.6"/>
+ <classpathentry kind="con" path="org.eclipse.pde.core.requiredPlugins"/>
+ <classpathentry kind="src" path="src"/>
+ <classpathentry kind="output" path="bin"/>
+</classpath>
diff --git a/org.apache.hdt.dfs.ui/.project b/org.apache.hdt.dfs.ui/.project
new file mode 100644
index 0000000..efb117d
--- /dev/null
+++ b/org.apache.hdt.dfs.ui/.project
@@ -0,0 +1,28 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<projectDescription>
+ <name>org.apache.hdt.dfs.ui</name>
+ <comment></comment>
+ <projects>
+ </projects>
+ <buildSpec>
+ <buildCommand>
+ <name>org.eclipse.jdt.core.javabuilder</name>
+ <arguments>
+ </arguments>
+ </buildCommand>
+ <buildCommand>
+ <name>org.eclipse.pde.ManifestBuilder</name>
+ <arguments>
+ </arguments>
+ </buildCommand>
+ <buildCommand>
+ <name>org.eclipse.pde.SchemaBuilder</name>
+ <arguments>
+ </arguments>
+ </buildCommand>
+ </buildSpec>
+ <natures>
+ <nature>org.eclipse.pde.PluginNature</nature>
+ <nature>org.eclipse.jdt.core.javanature</nature>
+ </natures>
+</projectDescription>
diff --git a/org.apache.hdt.dfs.ui/.settings/org.eclipse.jdt.core.prefs b/org.apache.hdt.dfs.ui/.settings/org.eclipse.jdt.core.prefs
new file mode 100644
index 0000000..c537b63
--- /dev/null
+++ b/org.apache.hdt.dfs.ui/.settings/org.eclipse.jdt.core.prefs
@@ -0,0 +1,7 @@
+eclipse.preferences.version=1
+org.eclipse.jdt.core.compiler.codegen.inlineJsrBytecode=enabled
+org.eclipse.jdt.core.compiler.codegen.targetPlatform=1.6
+org.eclipse.jdt.core.compiler.compliance=1.6
+org.eclipse.jdt.core.compiler.problem.assertIdentifier=error
+org.eclipse.jdt.core.compiler.problem.enumIdentifier=error
+org.eclipse.jdt.core.compiler.source=1.6
diff --git a/org.apache.hdt.dfs.ui/META-INF/MANIFEST.MF b/org.apache.hdt.dfs.ui/META-INF/MANIFEST.MF
new file mode 100644
index 0000000..b231b7f
--- /dev/null
+++ b/org.apache.hdt.dfs.ui/META-INF/MANIFEST.MF
@@ -0,0 +1,14 @@
+Manifest-Version: 1.0
+Bundle-ManifestVersion: 2
+Bundle-Name: Hadoop Development Tools HDFS UI
+Bundle-SymbolicName: org.apache.hdt.dfs.ui;singleton:=true
+Bundle-Version: 0.0.0
+Bundle-Activator: org.apache.hdt.dfs.ui.Activator
+Bundle-Vendor: Apache Software Foundation
+Require-Bundle: org.eclipse.ui,
+ org.eclipse.core.runtime,
+ org.eclipse.wst.server.core,
+ org.apache.hadoop.eclipse,
+ org.apache.hdt.dfs.core
+Bundle-RequiredExecutionEnvironment: JavaSE-1.6
+Bundle-ActivationPolicy: lazy
diff --git a/org.apache.hdt.dfs.ui/bin/org/apache/hdt/dfs/ui/Activator.class b/org.apache.hdt.dfs.ui/bin/org/apache/hdt/dfs/ui/Activator.class
new file mode 100644
index 0000000..cacc268
--- /dev/null
+++ b/org.apache.hdt.dfs.ui/bin/org/apache/hdt/dfs/ui/Activator.class
Binary files differ
diff --git a/org.apache.hdt.dfs.ui/build.properties b/org.apache.hdt.dfs.ui/build.properties
new file mode 100644
index 0000000..8de5aee
--- /dev/null
+++ b/org.apache.hdt.dfs.ui/build.properties
@@ -0,0 +1,6 @@
+source.. = src/
+output.. = bin/
+bin.includes = META-INF/,\
+ .,\
+ plugin.xml,\
+ resources/
diff --git a/org.apache.hdt.dfs.ui/plugin.xml b/org.apache.hdt.dfs.ui/plugin.xml
new file mode 100644
index 0000000..53f60a5
--- /dev/null
+++ b/org.apache.hdt.dfs.ui/plugin.xml
@@ -0,0 +1,50 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<?eclipse version="3.4"?>
+<plugin>
+ <extension
+ point="org.eclipse.ui.navigator.navigatorContent">
+ <navigatorContent
+ activeByDefault="true"
+ contentProvider="org.apache.hadoop.eclipse.dfs.DFSContentProvider"
+ icon="resources/elephantblue16x16.gif"
+ id="org.apache.hadoop.eclipse.views.dfscontent"
+ labelProvider="org.apache.hadoop.eclipse.dfs.DFSContentProvider"
+ name="Hadoop Distributed File Systems"
+ priority="normal"
+ providesSaveables="false">
+ <triggerPoints>
+ <or>
+ <instanceof
+ value="org.apache.hadoop.eclipse.dfs.DFSPath">
+ </instanceof>
+ <adapt
+ type="org.eclipse.core.resources.IResource">
+ <test
+ forcePluginActivation="true"
+ property="mapreduce.deployable">
+ </test>
+ </adapt>
+ </or>
+ </triggerPoints>
+ <actionProvider
+ class="org.apache.hadoop.eclipse.dfs.ActionProvider">
+ </actionProvider>
+ <possibleChildren>
+ <or>
+ <instanceof
+ value="org.eclipse.wst.server.core.IServer">
+ </instanceof>
+ <instanceof
+ value="org.apache.hdt.dfs.core.DFSLocationsRoot">
+ </instanceof>
+ <instanceof
+ value="org.apache.hdt.dfs.core.DFSLocation">
+ </instanceof>
+ <instanceof
+ value="org.apache.hdt.dfs.core.DFSPath">
+ </instanceof>
+ </or>
+ </possibleChildren>
+ </navigatorContent>
+ </extension>
+</plugin>
diff --git a/org.apache.hdt.dfs.ui/resources/elephantblue16x16.gif b/org.apache.hdt.dfs.ui/resources/elephantblue16x16.gif
new file mode 100644
index 0000000..0927b13
--- /dev/null
+++ b/org.apache.hdt.dfs.ui/resources/elephantblue16x16.gif
Binary files differ
diff --git a/org.apache.hdt.dfs.ui/src/org/apache/hdt/dfs/ui/Activator.java b/org.apache.hdt.dfs.ui/src/org/apache/hdt/dfs/ui/Activator.java
new file mode 100644
index 0000000..4501d3e
--- /dev/null
+++ b/org.apache.hdt.dfs.ui/src/org/apache/hdt/dfs/ui/Activator.java
@@ -0,0 +1,50 @@
+package org.apache.hdt.dfs.ui;
+
+import org.eclipse.ui.plugin.AbstractUIPlugin;
+import org.osgi.framework.BundleContext;
+
+/**
+ * The activator class controls the plug-in life cycle
+ */
+public class Activator extends AbstractUIPlugin {
+
+ // The plug-in ID
+ public static final String PLUGIN_ID = "org.apache.hdt.dfs.ui"; //$NON-NLS-1$
+
+ // The shared instance
+ private static Activator plugin;
+
+ /**
+ * The constructor
+ */
+ public Activator() {
+ }
+
+ /*
+ * (non-Javadoc)
+ * @see org.eclipse.ui.plugin.AbstractUIPlugin#start(org.osgi.framework.BundleContext)
+ */
+ public void start(BundleContext context) throws Exception {
+ super.start(context);
+ plugin = this;
+ }
+
+ /*
+ * (non-Javadoc)
+ * @see org.eclipse.ui.plugin.AbstractUIPlugin#stop(org.osgi.framework.BundleContext)
+ */
+ public void stop(BundleContext context) throws Exception {
+ plugin = null;
+ super.stop(context);
+ }
+
+ /**
+ * Returns the shared instance
+ *
+ * @return the shared instance
+ */
+ public static Activator getDefault() {
+ return plugin;
+ }
+
+}
diff --git a/org.apache.hdt.ui/META-INF/MANIFEST.MF b/org.apache.hdt.ui/META-INF/MANIFEST.MF
index 4195c59..be1cd74 100644
--- a/org.apache.hdt.ui/META-INF/MANIFEST.MF
+++ b/org.apache.hdt.ui/META-INF/MANIFEST.MF
@@ -16,3 +16,4 @@
org.eclipse.ui.console
Bundle-RequiredExecutionEnvironment: JavaSE-1.6
Bundle-ActivationPolicy: lazy
+Export-Package: org.apache.hdt.ui
diff --git a/org.apache.hdt.ui/src/org/apache/hdt/ui/views/ClusterView.java b/org.apache.hdt.ui/src/org/apache/hdt/ui/views/ClusterView.java
index 711ccf3..40c43f7 100644
--- a/org.apache.hdt.ui/src/org/apache/hdt/ui/views/ClusterView.java
+++ b/org.apache.hdt.ui/src/org/apache/hdt/ui/views/ClusterView.java
@@ -24,11 +24,11 @@
import org.apache.hdt.core.cluster.HadoopJob;
import org.apache.hdt.core.cluster.IHadoopClusterListener;
import org.apache.hdt.core.cluster.IJobListener;
+import org.apache.hdt.core.cluster.ServerRegistry;
import org.apache.hdt.core.cluster.utils.JarModule;
import org.apache.hdt.ui.ImageLibrary;
import org.apache.hdt.ui.actions.EditLocationAction;
import org.apache.hdt.ui.actions.NewLocationAction;
-import org.apache.hdt.ui.cluster.ServerRegistry;
import org.eclipse.jface.action.Action;
import org.eclipse.jface.action.IAction;
import org.eclipse.jface.action.IMenuListener;
diff --git a/org.apache.hdt.ui/src/org/apache/hdt/ui/wizards/HadoopLocationWizard.java b/org.apache.hdt.ui/src/org/apache/hdt/ui/wizards/HadoopLocationWizard.java
index 8fe9d19..335d32c 100644
--- a/org.apache.hdt.ui/src/org/apache/hdt/ui/wizards/HadoopLocationWizard.java
+++ b/org.apache.hdt.ui/src/org/apache/hdt/ui/wizards/HadoopLocationWizard.java
@@ -31,7 +31,7 @@
import org.apache.hadoop.conf.Configuration;
import org.apache.hdt.core.cluster.ConfProp;
import org.apache.hdt.core.cluster.HadoopCluster;
-import org.apache.hdt.ui.cluster.ServerRegistry;
+import org.apache.hdt.core.cluster.ServerRegistry;
import org.eclipse.jface.dialogs.IMessageProvider;
import org.eclipse.jface.wizard.WizardPage;
import org.eclipse.swt.SWT;