added debug plugins, this contains the launch code
diff --git a/org.apache.hdt.debug.core/.classpath b/org.apache.hdt.debug.core/.classpath
new file mode 100644
index 0000000..ad32c83
--- /dev/null
+++ b/org.apache.hdt.debug.core/.classpath
@@ -0,0 +1,7 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<classpath>
+ <classpathentry kind="con" path="org.eclipse.jdt.launching.JRE_CONTAINER/org.eclipse.jdt.internal.debug.ui.launcher.StandardVMType/JavaSE-1.6"/>
+ <classpathentry kind="con" path="org.eclipse.pde.core.requiredPlugins"/>
+ <classpathentry kind="src" path="src"/>
+ <classpathentry kind="output" path="bin"/>
+</classpath>
diff --git a/org.apache.hdt.debug.core/.project b/org.apache.hdt.debug.core/.project
new file mode 100644
index 0000000..ced2058
--- /dev/null
+++ b/org.apache.hdt.debug.core/.project
@@ -0,0 +1,28 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<projectDescription>
+ <name>org.apache.hdt.debug.core</name>
+ <comment></comment>
+ <projects>
+ </projects>
+ <buildSpec>
+ <buildCommand>
+ <name>org.eclipse.jdt.core.javabuilder</name>
+ <arguments>
+ </arguments>
+ </buildCommand>
+ <buildCommand>
+ <name>org.eclipse.pde.ManifestBuilder</name>
+ <arguments>
+ </arguments>
+ </buildCommand>
+ <buildCommand>
+ <name>org.eclipse.pde.SchemaBuilder</name>
+ <arguments>
+ </arguments>
+ </buildCommand>
+ </buildSpec>
+ <natures>
+ <nature>org.eclipse.pde.PluginNature</nature>
+ <nature>org.eclipse.jdt.core.javanature</nature>
+ </natures>
+</projectDescription>
diff --git a/org.apache.hdt.debug.core/.settings/org.eclipse.jdt.core.prefs b/org.apache.hdt.debug.core/.settings/org.eclipse.jdt.core.prefs
new file mode 100644
index 0000000..c537b63
--- /dev/null
+++ b/org.apache.hdt.debug.core/.settings/org.eclipse.jdt.core.prefs
@@ -0,0 +1,7 @@
+eclipse.preferences.version=1
+org.eclipse.jdt.core.compiler.codegen.inlineJsrBytecode=enabled
+org.eclipse.jdt.core.compiler.codegen.targetPlatform=1.6
+org.eclipse.jdt.core.compiler.compliance=1.6
+org.eclipse.jdt.core.compiler.problem.assertIdentifier=error
+org.eclipse.jdt.core.compiler.problem.enumIdentifier=error
+org.eclipse.jdt.core.compiler.source=1.6
diff --git a/org.apache.hdt.debug.core/META-INF/MANIFEST.MF b/org.apache.hdt.debug.core/META-INF/MANIFEST.MF
new file mode 100644
index 0000000..9a0e060
--- /dev/null
+++ b/org.apache.hdt.debug.core/META-INF/MANIFEST.MF
@@ -0,0 +1,22 @@
+Manifest-Version: 1.0
+Bundle-ManifestVersion: 2
+Bundle-Name: Hadoop Development Tools Debug Core
+Bundle-SymbolicName: org.apache.hdt.debug.core
+Bundle-Version: 0.0.0
+Bundle-Activator: org.apache.hdt.debug.core.Activator
+Bundle-Vendor: Apache Software Foundation
+Require-Bundle: org.eclipse.ui,
+ org.eclipse.core.runtime,
+ org.apache.hdt.core,
+ org.apache.hadoop.eclipse,
+ org.eclipse.core.resources,
+ org.eclipse.debug.core,
+ org.eclipse.jdt.core,
+ org.eclipse.jdt.debug.ui,
+ org.eclipse.jdt.launching,
+ org.eclipse.debug.ui
+Bundle-RequiredExecutionEnvironment: JavaSE-1.6
+Bundle-ActivationPolicy: lazy
+Export-Package: org.apache.hdt.debug.core,
+ org.apache.hdt.debug.core.cluster,
+ org.apache.hdt.debug.core.launch
diff --git a/org.apache.hdt.debug.core/bin/org/apache/hdt/debug/core/Activator.class b/org.apache.hdt.debug.core/bin/org/apache/hdt/debug/core/Activator.class
new file mode 100644
index 0000000..19a4b01
--- /dev/null
+++ b/org.apache.hdt.debug.core/bin/org/apache/hdt/debug/core/Activator.class
Binary files differ
diff --git a/org.apache.hdt.debug.core/bin/org/apache/hdt/debug/core/cluster/HadoopLocationWizard$1.class b/org.apache.hdt.debug.core/bin/org/apache/hdt/debug/core/cluster/HadoopLocationWizard$1.class
new file mode 100644
index 0000000..228ecf3
--- /dev/null
+++ b/org.apache.hdt.debug.core/bin/org/apache/hdt/debug/core/cluster/HadoopLocationWizard$1.class
Binary files differ
diff --git a/org.apache.hdt.debug.core/bin/org/apache/hdt/debug/core/cluster/HadoopLocationWizard$2.class b/org.apache.hdt.debug.core/bin/org/apache/hdt/debug/core/cluster/HadoopLocationWizard$2.class
new file mode 100644
index 0000000..11c1985
--- /dev/null
+++ b/org.apache.hdt.debug.core/bin/org/apache/hdt/debug/core/cluster/HadoopLocationWizard$2.class
Binary files differ
diff --git a/org.apache.hdt.debug.core/bin/org/apache/hdt/debug/core/cluster/HadoopLocationWizard$3.class b/org.apache.hdt.debug.core/bin/org/apache/hdt/debug/core/cluster/HadoopLocationWizard$3.class
new file mode 100644
index 0000000..0704372
--- /dev/null
+++ b/org.apache.hdt.debug.core/bin/org/apache/hdt/debug/core/cluster/HadoopLocationWizard$3.class
Binary files differ
diff --git a/org.apache.hdt.debug.core/bin/org/apache/hdt/debug/core/cluster/HadoopLocationWizard$4.class b/org.apache.hdt.debug.core/bin/org/apache/hdt/debug/core/cluster/HadoopLocationWizard$4.class
new file mode 100644
index 0000000..0bd744e
--- /dev/null
+++ b/org.apache.hdt.debug.core/bin/org/apache/hdt/debug/core/cluster/HadoopLocationWizard$4.class
Binary files differ
diff --git a/org.apache.hdt.debug.core/bin/org/apache/hdt/debug/core/cluster/HadoopLocationWizard$TabAdvanced$1.class b/org.apache.hdt.debug.core/bin/org/apache/hdt/debug/core/cluster/HadoopLocationWizard$TabAdvanced$1.class
new file mode 100644
index 0000000..6e96e6b
--- /dev/null
+++ b/org.apache.hdt.debug.core/bin/org/apache/hdt/debug/core/cluster/HadoopLocationWizard$TabAdvanced$1.class
Binary files differ
diff --git a/org.apache.hdt.debug.core/bin/org/apache/hdt/debug/core/cluster/HadoopLocationWizard$TabAdvanced.class b/org.apache.hdt.debug.core/bin/org/apache/hdt/debug/core/cluster/HadoopLocationWizard$TabAdvanced.class
new file mode 100644
index 0000000..154e48f
--- /dev/null
+++ b/org.apache.hdt.debug.core/bin/org/apache/hdt/debug/core/cluster/HadoopLocationWizard$TabAdvanced.class
Binary files differ
diff --git a/org.apache.hdt.debug.core/bin/org/apache/hdt/debug/core/cluster/HadoopLocationWizard$TabListener.class b/org.apache.hdt.debug.core/bin/org/apache/hdt/debug/core/cluster/HadoopLocationWizard$TabListener.class
new file mode 100644
index 0000000..986eef8
--- /dev/null
+++ b/org.apache.hdt.debug.core/bin/org/apache/hdt/debug/core/cluster/HadoopLocationWizard$TabListener.class
Binary files differ
diff --git a/org.apache.hdt.debug.core/bin/org/apache/hdt/debug/core/cluster/HadoopLocationWizard$TabMain$1.class b/org.apache.hdt.debug.core/bin/org/apache/hdt/debug/core/cluster/HadoopLocationWizard$TabMain$1.class
new file mode 100644
index 0000000..1175186
--- /dev/null
+++ b/org.apache.hdt.debug.core/bin/org/apache/hdt/debug/core/cluster/HadoopLocationWizard$TabMain$1.class
Binary files differ
diff --git a/org.apache.hdt.debug.core/bin/org/apache/hdt/debug/core/cluster/HadoopLocationWizard$TabMain$2.class b/org.apache.hdt.debug.core/bin/org/apache/hdt/debug/core/cluster/HadoopLocationWizard$TabMain$2.class
new file mode 100644
index 0000000..61a2c20
--- /dev/null
+++ b/org.apache.hdt.debug.core/bin/org/apache/hdt/debug/core/cluster/HadoopLocationWizard$TabMain$2.class
Binary files differ
diff --git a/org.apache.hdt.debug.core/bin/org/apache/hdt/debug/core/cluster/HadoopLocationWizard$TabMain.class b/org.apache.hdt.debug.core/bin/org/apache/hdt/debug/core/cluster/HadoopLocationWizard$TabMain.class
new file mode 100644
index 0000000..59891bd
--- /dev/null
+++ b/org.apache.hdt.debug.core/bin/org/apache/hdt/debug/core/cluster/HadoopLocationWizard$TabMain.class
Binary files differ
diff --git a/org.apache.hdt.debug.core/bin/org/apache/hdt/debug/core/cluster/HadoopLocationWizard$TabMediator$1.class b/org.apache.hdt.debug.core/bin/org/apache/hdt/debug/core/cluster/HadoopLocationWizard$TabMediator$1.class
new file mode 100644
index 0000000..74b3d59
--- /dev/null
+++ b/org.apache.hdt.debug.core/bin/org/apache/hdt/debug/core/cluster/HadoopLocationWizard$TabMediator$1.class
Binary files differ
diff --git a/org.apache.hdt.debug.core/bin/org/apache/hdt/debug/core/cluster/HadoopLocationWizard$TabMediator$2.class b/org.apache.hdt.debug.core/bin/org/apache/hdt/debug/core/cluster/HadoopLocationWizard$TabMediator$2.class
new file mode 100644
index 0000000..b495438
--- /dev/null
+++ b/org.apache.hdt.debug.core/bin/org/apache/hdt/debug/core/cluster/HadoopLocationWizard$TabMediator$2.class
Binary files differ
diff --git a/org.apache.hdt.debug.core/bin/org/apache/hdt/debug/core/cluster/HadoopLocationWizard$TabMediator.class b/org.apache.hdt.debug.core/bin/org/apache/hdt/debug/core/cluster/HadoopLocationWizard$TabMediator.class
new file mode 100644
index 0000000..c7af413
--- /dev/null
+++ b/org.apache.hdt.debug.core/bin/org/apache/hdt/debug/core/cluster/HadoopLocationWizard$TabMediator.class
Binary files differ
diff --git a/org.apache.hdt.debug.core/bin/org/apache/hdt/debug/core/cluster/HadoopLocationWizard.class b/org.apache.hdt.debug.core/bin/org/apache/hdt/debug/core/cluster/HadoopLocationWizard.class
new file mode 100644
index 0000000..b8a8053
--- /dev/null
+++ b/org.apache.hdt.debug.core/bin/org/apache/hdt/debug/core/cluster/HadoopLocationWizard.class
Binary files differ
diff --git a/org.apache.hdt.debug.core/bin/org/apache/hdt/debug/core/cluster/HadoopServerSelectionListContentProvider.class b/org.apache.hdt.debug.core/bin/org/apache/hdt/debug/core/cluster/HadoopServerSelectionListContentProvider.class
new file mode 100644
index 0000000..6af5fee
--- /dev/null
+++ b/org.apache.hdt.debug.core/bin/org/apache/hdt/debug/core/cluster/HadoopServerSelectionListContentProvider.class
Binary files differ
diff --git a/org.apache.hdt.debug.core/bin/org/apache/hdt/debug/core/cluster/IHadoopServerListener.class b/org.apache.hdt.debug.core/bin/org/apache/hdt/debug/core/cluster/IHadoopServerListener.class
new file mode 100644
index 0000000..0d6a4aa
--- /dev/null
+++ b/org.apache.hdt.debug.core/bin/org/apache/hdt/debug/core/cluster/IHadoopServerListener.class
Binary files differ
diff --git a/org.apache.hdt.debug.core/bin/org/apache/hdt/debug/core/cluster/RunOnHadoopWizard$MainWizardPage$1.class b/org.apache.hdt.debug.core/bin/org/apache/hdt/debug/core/cluster/RunOnHadoopWizard$MainWizardPage$1.class
new file mode 100644
index 0000000..e12efd2
--- /dev/null
+++ b/org.apache.hdt.debug.core/bin/org/apache/hdt/debug/core/cluster/RunOnHadoopWizard$MainWizardPage$1.class
Binary files differ
diff --git a/org.apache.hdt.debug.core/bin/org/apache/hdt/debug/core/cluster/RunOnHadoopWizard$MainWizardPage$2.class b/org.apache.hdt.debug.core/bin/org/apache/hdt/debug/core/cluster/RunOnHadoopWizard$MainWizardPage$2.class
new file mode 100644
index 0000000..fb85698
--- /dev/null
+++ b/org.apache.hdt.debug.core/bin/org/apache/hdt/debug/core/cluster/RunOnHadoopWizard$MainWizardPage$2.class
Binary files differ
diff --git a/org.apache.hdt.debug.core/bin/org/apache/hdt/debug/core/cluster/RunOnHadoopWizard$MainWizardPage$3.class b/org.apache.hdt.debug.core/bin/org/apache/hdt/debug/core/cluster/RunOnHadoopWizard$MainWizardPage$3.class
new file mode 100644
index 0000000..b9e4587
--- /dev/null
+++ b/org.apache.hdt.debug.core/bin/org/apache/hdt/debug/core/cluster/RunOnHadoopWizard$MainWizardPage$3.class
Binary files differ
diff --git a/org.apache.hdt.debug.core/bin/org/apache/hdt/debug/core/cluster/RunOnHadoopWizard$MainWizardPage.class b/org.apache.hdt.debug.core/bin/org/apache/hdt/debug/core/cluster/RunOnHadoopWizard$MainWizardPage.class
new file mode 100644
index 0000000..7a979d6
--- /dev/null
+++ b/org.apache.hdt.debug.core/bin/org/apache/hdt/debug/core/cluster/RunOnHadoopWizard$MainWizardPage.class
Binary files differ
diff --git a/org.apache.hdt.debug.core/bin/org/apache/hdt/debug/core/cluster/RunOnHadoopWizard.class b/org.apache.hdt.debug.core/bin/org/apache/hdt/debug/core/cluster/RunOnHadoopWizard.class
new file mode 100644
index 0000000..d4e8547
--- /dev/null
+++ b/org.apache.hdt.debug.core/bin/org/apache/hdt/debug/core/cluster/RunOnHadoopWizard.class
Binary files differ
diff --git a/org.apache.hdt.debug.core/bin/org/apache/hdt/debug/core/launch/HadoopApplicationLaunchShortcut$Dialog.class b/org.apache.hdt.debug.core/bin/org/apache/hdt/debug/core/launch/HadoopApplicationLaunchShortcut$Dialog.class
new file mode 100644
index 0000000..190e342
--- /dev/null
+++ b/org.apache.hdt.debug.core/bin/org/apache/hdt/debug/core/launch/HadoopApplicationLaunchShortcut$Dialog.class
Binary files differ
diff --git a/org.apache.hdt.debug.core/bin/org/apache/hdt/debug/core/launch/HadoopApplicationLaunchShortcut.class b/org.apache.hdt.debug.core/bin/org/apache/hdt/debug/core/launch/HadoopApplicationLaunchShortcut.class
new file mode 100644
index 0000000..73acb76
--- /dev/null
+++ b/org.apache.hdt.debug.core/bin/org/apache/hdt/debug/core/launch/HadoopApplicationLaunchShortcut.class
Binary files differ
diff --git a/org.apache.hdt.debug.core/build.properties b/org.apache.hdt.debug.core/build.properties
new file mode 100644
index 0000000..34d2e4d
--- /dev/null
+++ b/org.apache.hdt.debug.core/build.properties
@@ -0,0 +1,4 @@
+source.. = src/
+output.. = bin/
+bin.includes = META-INF/,\
+ .
diff --git a/org.apache.hdt.debug.core/src/org/apache/hdt/debug/core/Activator.java b/org.apache.hdt.debug.core/src/org/apache/hdt/debug/core/Activator.java
new file mode 100644
index 0000000..7de10ca
--- /dev/null
+++ b/org.apache.hdt.debug.core/src/org/apache/hdt/debug/core/Activator.java
@@ -0,0 +1,50 @@
+package org.apache.hdt.debug.core;
+
+import org.eclipse.ui.plugin.AbstractUIPlugin;
+import org.osgi.framework.BundleContext;
+
+/**
+ * The activator class controls the plug-in life cycle
+ */
+public class Activator extends AbstractUIPlugin {
+
+ // The plug-in ID
+ public static final String PLUGIN_ID = "org.apache.hdt.debug.core"; //$NON-NLS-1$
+
+ // The shared instance
+ private static Activator plugin;
+
+ /**
+ * The constructor
+ */
+ public Activator() {
+ }
+
+ /*
+ * (non-Javadoc)
+ * @see org.eclipse.ui.plugin.AbstractUIPlugin#start(org.osgi.framework.BundleContext)
+ */
+ public void start(BundleContext context) throws Exception {
+ super.start(context);
+ plugin = this;
+ }
+
+ /*
+ * (non-Javadoc)
+ * @see org.eclipse.ui.plugin.AbstractUIPlugin#stop(org.osgi.framework.BundleContext)
+ */
+ public void stop(BundleContext context) throws Exception {
+ plugin = null;
+ super.stop(context);
+ }
+
+ /**
+ * Returns the shared instance
+ *
+ * @return the shared instance
+ */
+ public static Activator getDefault() {
+ return plugin;
+ }
+
+}
diff --git a/org.apache.hdt.debug.core/src/org/apache/hdt/debug/core/cluster/HadoopLocationWizard.java b/org.apache.hdt.debug.core/src/org/apache/hdt/debug/core/cluster/HadoopLocationWizard.java
new file mode 100644
index 0000000..5da3da6
--- /dev/null
+++ b/org.apache.hdt.debug.core/src/org/apache/hdt/debug/core/cluster/HadoopLocationWizard.java
@@ -0,0 +1,973 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hdt.debug.core.cluster;
+
+import java.net.URI;
+import java.net.URISyntaxException;
+import java.util.HashSet;
+import java.util.Iterator;
+import java.util.Map;
+import java.util.Set;
+import java.util.SortedMap;
+import java.util.TreeMap;
+import java.util.Map.Entry;
+
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hdt.core.cluster.ConfProp;
+import org.apache.hdt.core.cluster.HadoopCluster;
+import org.apache.hdt.core.cluster.ServerRegistry;
+import org.eclipse.jface.dialogs.IMessageProvider;
+import org.eclipse.jface.wizard.WizardPage;
+import org.eclipse.swt.SWT;
+import org.eclipse.swt.custom.ScrolledComposite;
+import org.eclipse.swt.events.ModifyEvent;
+import org.eclipse.swt.events.ModifyListener;
+import org.eclipse.swt.events.SelectionEvent;
+import org.eclipse.swt.events.SelectionListener;
+import org.eclipse.swt.graphics.Image;
+import org.eclipse.swt.layout.GridData;
+import org.eclipse.swt.layout.GridLayout;
+import org.eclipse.swt.widgets.Button;
+import org.eclipse.swt.widgets.Composite;
+import org.eclipse.swt.widgets.Control;
+import org.eclipse.swt.widgets.Display;
+import org.eclipse.swt.widgets.Event;
+import org.eclipse.swt.widgets.Group;
+import org.eclipse.swt.widgets.Label;
+import org.eclipse.swt.widgets.Listener;
+import org.eclipse.swt.widgets.TabFolder;
+import org.eclipse.swt.widgets.TabItem;
+import org.eclipse.swt.widgets.Text;
+
+/**
+ * Wizard for editing the settings of a Hadoop location
+ *
+ * The wizard contains 3 tabs: General, Tunneling and Advanced. It edits
+ * parameters of the location member which either a new location or a copy of
+ * an existing registered location.
+ */
+
+public class HadoopLocationWizard extends WizardPage {
+
+ Image circle;
+
+ /**
+ * The location effectively edited by the wizard. This location is a copy
+ * or a new one.
+ */
+ private HadoopCluster location;
+
+ /**
+ * The original location being edited by the wizard (null if we create a
+ * new instance).
+ */
+ private HadoopCluster original;
+
+ /**
+ * New Hadoop location wizard
+ */
+ public HadoopLocationWizard() {
+ super("Hadoop Server", "New Hadoop Location", null);
+
+ this.original = null;
+ this.location = new HadoopCluster();
+ this.location.setLocationName("");
+ }
+
+ /**
+ * Constructor to edit the parameters of an existing Hadoop server
+ *
+ * @param server
+ */
+ public HadoopLocationWizard(HadoopCluster server) {
+ super("Create a new Hadoop location", "Edit Hadoop Location", null);
+
+ this.original = server;
+ this.location = new HadoopCluster(server);
+ }
+
+ /**
+ * Performs any actions appropriate in response to the user having pressed
+ * the Finish button, or refuse if finishing now is not permitted.
+ *
+ * @return the created or updated Hadoop location
+ */
+
+ public HadoopCluster performFinish() {
+ try {
+ if (this.original == null) {
+ // New location
+ Display.getDefault().syncExec(new Runnable() {
+ public void run() {
+ ServerRegistry.getInstance().addServer(
+ HadoopLocationWizard.this.location);
+ }
+ });
+ return this.location;
+
+ } else {
+ // Update location
+ final String originalName = this.original.getLocationName();
+ this.original.load(this.location);
+
+ Display.getDefault().syncExec(new Runnable() {
+ public void run() {
+ ServerRegistry.getInstance().updateServer(originalName,
+ HadoopLocationWizard.this.location);
+ }
+ });
+ return this.original;
+
+ }
+ } catch (Exception e) {
+ e.printStackTrace();
+ setMessage("Invalid server location values", IMessageProvider.ERROR);
+ return null;
+ }
+ }
+
+ /**
+ * Validates the current Hadoop location settings (look for Hadoop
+ * installation directory).
+ *
+ */
+ private void testLocation() {
+ setMessage("Not implemented yet", IMessageProvider.WARNING);
+ }
+
+ /**
+ * Location is not complete (and finish button not available) until a host
+ * name is specified.
+ *
+ * @inheritDoc
+ */
+ @Override
+ public boolean isPageComplete() {
+
+ {
+ String locName = location.getConfProp(ConfProp.PI_LOCATION_NAME);
+ if ((locName == null) || (locName.length() == 0)
+ || locName.contains("/")) {
+
+ setMessage("Bad location name: "
+ + "the location name should not contain "
+ + "any character prohibited in a file name.", WARNING);
+
+ return false;
+ }
+ }
+
+ {
+ String master = location.getConfProp(ConfProp.PI_JOB_TRACKER_HOST);
+ if ((master == null) || (master.length() == 0)) {
+
+ setMessage("Bad master host name: "
+ + "the master host name refers to the machine "
+ + "that runs the Job tracker.", WARNING);
+
+ return false;
+ }
+ }
+
+ {
+ String jobTracker = location.getConfProp(ConfProp.JOB_TRACKER_URI);
+ String[] strs = jobTracker.split(":");
+ boolean ok = (strs.length == 2);
+ if (ok) {
+ try {
+ int port = Integer.parseInt(strs[1]);
+ ok = (port >= 0) && (port < 65536);
+ } catch (NumberFormatException nfe) {
+ ok = false;
+ }
+ }
+ if (!ok) {
+ setMessage("The job tracker information ("
+ + ConfProp.JOB_TRACKER_URI.name + ") is invalid. "
+ + "This usually looks like \"host:port\"", WARNING);
+ return false;
+ }
+ }
+
+ {
+ String fsDefaultURI = location.getConfProp(ConfProp.FS_DEFAULT_URI);
+ try {
+ URI uri = new URI(fsDefaultURI);
+ } catch (URISyntaxException e) {
+
+ setMessage("The default file system URI is invalid. "
+ + "This usually looks like \"hdfs://host:port/\" "
+ + "or \"file:///dir/\"", WARNING);
+ }
+ }
+
+ setMessage("Define the location of a Hadoop infrastructure "
+ + "for running MapReduce applications.");
+ return true;
+ }
+
+ /**
+ * Create the wizard
+ */
+ /* @inheritDoc */
+ public void createControl(Composite parent) {
+ setTitle("Define Hadoop location");
+ setDescription("Define the location of a Hadoop infrastructure "
+ + "for running MapReduce applications.");
+
+ Composite panel = new Composite(parent, SWT.FILL);
+ GridLayout glayout = new GridLayout(2, false);
+ panel.setLayout(glayout);
+
+ TabMediator mediator = new TabMediator(panel);
+ {
+ GridData gdata = new GridData(GridData.FILL_BOTH);
+ gdata.horizontalSpan = 2;
+ mediator.folder.setLayoutData(gdata);
+ }
+ this.setControl(panel /* mediator.folder */);
+ {
+ final Button btn = new Button(panel, SWT.NONE);
+ btn.setText("&Load from file");
+ btn.setEnabled(false);
+ btn.setToolTipText("Not yet implemented");
+ btn.addListener(SWT.Selection, new Listener() {
+ public void handleEvent(Event e) {
+ // TODO
+ }
+ });
+ }
+ {
+ final Button validate = new Button(panel, SWT.NONE);
+ validate.setText("&Validate location");
+ validate.setEnabled(false);
+ validate.setToolTipText("Not yet implemented");
+ validate.addListener(SWT.Selection, new Listener() {
+ public void handleEvent(Event e) {
+ testLocation();
+ }
+ });
+ }
+ }
+
+ private interface TabListener {
+ void notifyChange(ConfProp prop, String propValue);
+ }
+
+ /*
+ * Mediator pattern to keep tabs synchronized with each other and with the
+ * location state.
+ */
+
+ private class TabMediator {
+ TabFolder folder;
+
+ private Set<TabListener> tabs = new HashSet<TabListener>();
+
+ TabMediator(Composite parent) {
+ folder = new TabFolder(parent, SWT.NONE);
+ tabs.add(new TabMain(this));
+ tabs.add(new TabAdvanced(this));
+ }
+
+ /**
+ * Access to current configuration settings
+ *
+ * @param propName the property name
+ * @return the current property value
+ */
+ String get(String propName) {
+ return location.getConfProp(propName);
+ }
+
+ String get(ConfProp prop) {
+ return location.getConfProp(prop);
+ }
+
+ /**
+ * Implements change notifications from any tab: update the location
+ * state and other tabs
+ *
+ * @param source origin of the notification (one of the tree tabs)
+ * @param propName modified property
+ * @param propValue new value
+ */
+ void notifyChange(TabListener source, final ConfProp prop,
+ final String propValue) {
+ // Ignore notification when no change
+ String oldValue = location.getConfProp(prop);
+ if ((oldValue != null) && oldValue.equals(propValue))
+ return;
+
+ location.setConfProp(prop, propValue);
+ Display.getDefault().syncExec(new Runnable() {
+ public void run() {
+ getContainer().updateButtons();
+ }
+ });
+
+ this.fireChange(source, prop, propValue);
+
+ /*
+ * Now we deal with dependencies between settings
+ */
+ final String jobTrackerHost =
+ location.getConfProp(ConfProp.PI_JOB_TRACKER_HOST);
+ final String jobTrackerPort =
+ location.getConfProp(ConfProp.PI_JOB_TRACKER_PORT);
+ final String nameNodeHost =
+ location.getConfProp(ConfProp.PI_NAME_NODE_HOST);
+ final String nameNodePort =
+ location.getConfProp(ConfProp.PI_NAME_NODE_PORT);
+ final boolean colocate =
+ location.getConfProp(ConfProp.PI_COLOCATE_MASTERS)
+ .equalsIgnoreCase("yes");
+ final String jobTrackerURI =
+ location.getConfProp(ConfProp.JOB_TRACKER_URI);
+ final String fsDefaultURI =
+ location.getConfProp(ConfProp.FS_DEFAULT_URI);
+ final String socksServerURI =
+ location.getConfProp(ConfProp.SOCKS_SERVER);
+ final boolean socksProxyEnable =
+ location.getConfProp(ConfProp.PI_SOCKS_PROXY_ENABLE)
+ .equalsIgnoreCase("yes");
+ final String socksProxyHost =
+ location.getConfProp(ConfProp.PI_SOCKS_PROXY_HOST);
+ final String socksProxyPort =
+ location.getConfProp(ConfProp.PI_SOCKS_PROXY_PORT);
+
+ Display.getDefault().syncExec(new Runnable() {
+ public void run() {
+ switch (prop) {
+ case PI_JOB_TRACKER_HOST: {
+ if (colocate)
+ notifyChange(null, ConfProp.PI_NAME_NODE_HOST,
+ jobTrackerHost);
+ String newJobTrackerURI =
+ String.format("%s:%s", jobTrackerHost, jobTrackerPort);
+ notifyChange(null, ConfProp.JOB_TRACKER_URI, newJobTrackerURI);
+ break;
+ }
+ case PI_JOB_TRACKER_PORT: {
+ String newJobTrackerURI =
+ String.format("%s:%s", jobTrackerHost, jobTrackerPort);
+ notifyChange(null, ConfProp.JOB_TRACKER_URI, newJobTrackerURI);
+ break;
+ }
+ case PI_NAME_NODE_HOST: {
+ String newHDFSURI =
+ String.format("hdfs://%s:%s/", nameNodeHost, nameNodePort);
+ notifyChange(null, ConfProp.FS_DEFAULT_URI, newHDFSURI);
+
+ // Break colocation if someone force the DFS Master
+ if (!colocate && !nameNodeHost.equals(jobTrackerHost))
+ notifyChange(null, ConfProp.PI_COLOCATE_MASTERS, "no");
+ break;
+ }
+ case PI_NAME_NODE_PORT: {
+ String newHDFSURI =
+ String.format("hdfs://%s:%s/", nameNodeHost, nameNodePort);
+ notifyChange(null, ConfProp.FS_DEFAULT_URI, newHDFSURI);
+ break;
+ }
+ case PI_SOCKS_PROXY_HOST: {
+ String newSocksProxyURI =
+ String.format("%s:%s", socksProxyHost, socksProxyPort);
+ notifyChange(null, ConfProp.SOCKS_SERVER, newSocksProxyURI);
+ break;
+ }
+ case PI_SOCKS_PROXY_PORT: {
+ String newSocksProxyURI =
+ String.format("%s:%s", socksProxyHost, socksProxyPort);
+ notifyChange(null, ConfProp.SOCKS_SERVER, newSocksProxyURI);
+ break;
+ }
+ case JOB_TRACKER_URI: {
+ String[] strs = jobTrackerURI.split(":", 2);
+ String host = strs[0];
+ String port = (strs.length == 2) ? strs[1] : "";
+ notifyChange(null, ConfProp.PI_JOB_TRACKER_HOST, host);
+ notifyChange(null, ConfProp.PI_JOB_TRACKER_PORT, port);
+ break;
+ }
+ case FS_DEFAULT_URI: {
+ try {
+ URI uri = new URI(fsDefaultURI);
+ if (uri.getScheme().equals("hdfs")) {
+ String host = uri.getHost();
+ String port = Integer.toString(uri.getPort());
+ notifyChange(null, ConfProp.PI_NAME_NODE_HOST, host);
+ notifyChange(null, ConfProp.PI_NAME_NODE_PORT, port);
+ }
+ } catch (URISyntaxException use) {
+ // Ignore the update!
+ }
+ break;
+ }
+ case SOCKS_SERVER: {
+ String[] strs = socksServerURI.split(":", 2);
+ String host = strs[0];
+ String port = (strs.length == 2) ? strs[1] : "";
+ notifyChange(null, ConfProp.PI_SOCKS_PROXY_HOST, host);
+ notifyChange(null, ConfProp.PI_SOCKS_PROXY_PORT, port);
+ break;
+ }
+ case PI_COLOCATE_MASTERS: {
+ if (colocate)
+ notifyChange(null, ConfProp.PI_NAME_NODE_HOST,
+ jobTrackerHost);
+ break;
+ }
+ case PI_SOCKS_PROXY_ENABLE: {
+ if (socksProxyEnable) {
+ notifyChange(null, ConfProp.SOCKET_FACTORY_DEFAULT,
+ "org.apache.hadoop.net.SocksSocketFactory");
+ } else {
+ notifyChange(null, ConfProp.SOCKET_FACTORY_DEFAULT,
+ "org.apache.hadoop.net.StandardSocketFactory");
+ }
+ break;
+ }
+ }
+ }
+ });
+
+ }
+
+ /**
+ * Change notifications on properties (by name). A property might not be
+ * reflected as a ConfProp enum. If it is, the notification is forwarded
+ * to the ConfProp notifyChange method. If not, it is processed here.
+ *
+ * @param source
+ * @param propName
+ * @param propValue
+ */
+ void notifyChange(TabListener source, String propName, String propValue) {
+
+ ConfProp prop = ConfProp.getByName(propName);
+ if (prop != null)
+ notifyChange(source, prop, propValue);
+
+ location.setConfProp(propName, propValue);
+ }
+
+ /**
+ * Broadcast a property change to all registered tabs. If a tab is
+ * identified as the source of the change, this tab will not be notified.
+ *
+ * @param source TODO
+ * @param prop
+ * @param value
+ */
+ private void fireChange(TabListener source, ConfProp prop, String value) {
+ for (TabListener tab : tabs) {
+ if (tab != source)
+ tab.notifyChange(prop, value);
+ }
+ }
+
+ }
+
+ /**
+ * Create a SWT Text component for the given {@link ConfProp} text
+ * configuration property.
+ *
+ * @param listener
+ * @param parent
+ * @param prop
+ * @return
+ */
+ private Text createConfText(ModifyListener listener, Composite parent,
+ ConfProp prop) {
+
+ Text text = new Text(parent, SWT.SINGLE | SWT.BORDER);
+ GridData data = new GridData(GridData.FILL_HORIZONTAL);
+ text.setLayoutData(data);
+ text.setData("hProp", prop);
+ text.setText(location.getConfProp(prop));
+ text.addModifyListener(listener);
+
+ return text;
+ }
+
+ /**
+ * Create a SWT Checked Button component for the given {@link ConfProp}
+ * boolean configuration property.
+ *
+ * @param listener
+ * @param parent
+ * @param prop
+ * @return
+ */
+ private Button createConfCheckButton(SelectionListener listener,
+ Composite parent, ConfProp prop, String text) {
+
+ Button button = new Button(parent, SWT.CHECK);
+ button.setText(text);
+ button.setData("hProp", prop);
+ button.setSelection(location.getConfProp(prop).equalsIgnoreCase("yes"));
+ button.addSelectionListener(listener);
+
+ return button;
+ }
+
+ /**
+ * Create editor entry for the given configuration property. The editor is
+ * a couple (Label, Text).
+ *
+ * @param listener the listener to trigger on property change
+ * @param parent the SWT parent container
+ * @param prop the property to create an editor for
+ * @param labelText a label (null will defaults to the property name)
+ *
+ * @return a SWT Text field
+ */
+ private Text createConfLabelText(ModifyListener listener,
+ Composite parent, ConfProp prop, String labelText) {
+
+ Label label = new Label(parent, SWT.NONE);
+ if (labelText == null)
+ labelText = prop.name;
+ label.setText(labelText);
+
+ return createConfText(listener, parent, prop);
+ }
+
+ /**
+ * Create an editor entry for the given configuration name
+ *
+ * @param listener the listener to trigger on property change
+ * @param parent the SWT parent container
+ * @param propName the name of the property to create an editor for
+ * @param labelText a label (null will defaults to the property name)
+ *
+ * @return a SWT Text field
+ */
+ private Text createConfNameEditor(ModifyListener listener,
+ Composite parent, String propName, String labelText) {
+
+ {
+ ConfProp prop = ConfProp.getByName(propName);
+ if (prop != null)
+ return createConfLabelText(listener, parent, prop, labelText);
+ }
+
+ Label label = new Label(parent, SWT.NONE);
+ if (labelText == null)
+ labelText = propName;
+ label.setText(labelText);
+
+ Text text = new Text(parent, SWT.SINGLE | SWT.BORDER);
+ GridData data = new GridData(GridData.FILL_HORIZONTAL);
+ text.setLayoutData(data);
+ text.setData("hPropName", propName);
+ text.setText(location.getConfProp(propName));
+ text.addModifyListener(listener);
+
+ return text;
+ }
+
+ /**
+ * Main parameters of the Hadoop location:
+ * <li> host and port of the Map/Reduce master (Job tracker)
+ * <li> host and port of the DFS master (Name node)
+ * <li> SOCKS proxy
+ */
+ private class TabMain implements TabListener, ModifyListener,
+ SelectionListener {
+
+ TabMediator mediator;
+
+ Text locationName;
+
+ Text textJTHost;
+
+ Text textNNHost;
+
+ Button colocateMasters;
+
+ Text textJTPort;
+
+ Text textNNPort;
+
+ Text userName;
+
+ Button useSocksProxy;
+
+ Text socksProxyHost;
+
+ Text socksProxyPort;
+
+ TabMain(TabMediator mediator) {
+ this.mediator = mediator;
+ TabItem tab = new TabItem(mediator.folder, SWT.NONE);
+ tab.setText("General");
+ tab.setToolTipText("General location parameters");
+ tab.setImage(circle);
+ tab.setControl(createControl(mediator.folder));
+ }
+
+ private Control createControl(Composite parent) {
+
+ Composite panel = new Composite(parent, SWT.FILL);
+ panel.setLayout(new GridLayout(2, false));
+
+ GridData data;
+
+ /*
+ * Location name
+ */
+ {
+ Composite subpanel = new Composite(panel, SWT.FILL);
+ subpanel.setLayout(new GridLayout(2, false));
+ data = new GridData();
+ data.horizontalSpan = 2;
+ data.horizontalAlignment = SWT.FILL;
+ subpanel.setLayoutData(data);
+
+ locationName =
+ createConfLabelText(this, subpanel, ConfProp.PI_LOCATION_NAME,
+ "&Location name:");
+ }
+
+ /*
+ * Map/Reduce group
+ */
+ {
+ Group groupMR = new Group(panel, SWT.SHADOW_NONE);
+ groupMR.setText("Map/Reduce Master");
+ groupMR.setToolTipText("Address of the Map/Reduce master node "
+ + "(the Job Tracker).");
+ GridLayout layout = new GridLayout(2, false);
+ groupMR.setLayout(layout);
+ data = new GridData();
+ data.verticalAlignment = SWT.FILL;
+ data.horizontalAlignment = SWT.CENTER;
+ data.widthHint = 250;
+ groupMR.setLayoutData(data);
+
+ // Job Tracker host
+ Label label = new Label(groupMR, SWT.NONE);
+ label.setText("Host:");
+ data =
+ new GridData(GridData.BEGINNING, GridData.CENTER, false, true);
+ label.setLayoutData(data);
+
+ textJTHost =
+ createConfText(this, groupMR, ConfProp.PI_JOB_TRACKER_HOST);
+ data = new GridData(GridData.FILL, GridData.CENTER, true, true);
+ textJTHost.setLayoutData(data);
+
+ // Job Tracker port
+ label = new Label(groupMR, SWT.NONE);
+ label.setText("Port:");
+ data =
+ new GridData(GridData.BEGINNING, GridData.CENTER, false, true);
+ label.setLayoutData(data);
+
+ textJTPort =
+ createConfText(this, groupMR, ConfProp.PI_JOB_TRACKER_PORT);
+ data = new GridData(GridData.FILL, GridData.CENTER, true, true);
+ textJTPort.setLayoutData(data);
+ }
+
+ /*
+ * DFS group
+ */
+ {
+ Group groupDFS = new Group(panel, SWT.SHADOW_NONE);
+ groupDFS.setText("DFS Master");
+ groupDFS.setToolTipText("Address of the Distributed FileSystem "
+ + "master node (the Name Node).");
+ GridLayout layout = new GridLayout(2, false);
+ groupDFS.setLayout(layout);
+ data = new GridData();
+ data.horizontalAlignment = SWT.CENTER;
+ data.widthHint = 250;
+ groupDFS.setLayoutData(data);
+
+ colocateMasters =
+ createConfCheckButton(this, groupDFS,
+ ConfProp.PI_COLOCATE_MASTERS, "Use M/R Master host");
+ data = new GridData();
+ data.horizontalSpan = 2;
+ colocateMasters.setLayoutData(data);
+
+ // Job Tracker host
+ Label label = new Label(groupDFS, SWT.NONE);
+ data = new GridData();
+ label.setText("Host:");
+ label.setLayoutData(data);
+
+ textNNHost =
+ createConfText(this, groupDFS, ConfProp.PI_NAME_NODE_HOST);
+
+ // Job Tracker port
+ label = new Label(groupDFS, SWT.NONE);
+ data = new GridData();
+ label.setText("Port:");
+ label.setLayoutData(data);
+
+ textNNPort =
+ createConfText(this, groupDFS, ConfProp.PI_NAME_NODE_PORT);
+ }
+
+ {
+ Composite subpanel = new Composite(panel, SWT.FILL);
+ subpanel.setLayout(new GridLayout(2, false));
+ data = new GridData();
+ data.horizontalSpan = 2;
+ data.horizontalAlignment = SWT.FILL;
+ subpanel.setLayoutData(data);
+
+ userName =
+ createConfLabelText(this, subpanel, ConfProp.PI_USER_NAME,
+ "&User name:");
+ }
+
+ // SOCKS proxy group
+ {
+ Group groupSOCKS = new Group(panel, SWT.SHADOW_NONE);
+ groupSOCKS.setText("SOCKS proxy");
+ groupSOCKS.setToolTipText("Address of the SOCKS proxy to use "
+ + "to connect to the infrastructure.");
+ GridLayout layout = new GridLayout(2, false);
+ groupSOCKS.setLayout(layout);
+ data = new GridData();
+ data.horizontalAlignment = SWT.CENTER;
+ data.horizontalSpan = 2;
+ data.widthHint = 250;
+ groupSOCKS.setLayoutData(data);
+
+ useSocksProxy =
+ createConfCheckButton(this, groupSOCKS,
+ ConfProp.PI_SOCKS_PROXY_ENABLE, "Enable SOCKS proxy");
+ data = new GridData();
+ data.horizontalSpan = 2;
+ useSocksProxy.setLayoutData(data);
+
+ // SOCKS proxy host
+ Label label = new Label(groupSOCKS, SWT.NONE);
+ data = new GridData();
+ label.setText("Host:");
+ label.setLayoutData(data);
+
+ socksProxyHost =
+ createConfText(this, groupSOCKS, ConfProp.PI_SOCKS_PROXY_HOST);
+
+ // SOCKS proxy port
+ label = new Label(groupSOCKS, SWT.NONE);
+ data = new GridData();
+ label.setText("Port:");
+ label.setLayoutData(data);
+
+ socksProxyPort =
+ createConfText(this, groupSOCKS, ConfProp.PI_SOCKS_PROXY_PORT);
+ }
+
+ // Update the state of all widgets according to the current values!
+ reloadConfProp(ConfProp.PI_COLOCATE_MASTERS);
+ reloadConfProp(ConfProp.PI_SOCKS_PROXY_ENABLE);
+ reloadConfProp(ConfProp.PI_JOB_TRACKER_HOST);
+
+ return panel;
+ }
+
+ /**
+ * Reload the given configuration property value
+ *
+ * @param prop
+ */
+ private void reloadConfProp(ConfProp prop) {
+ this.notifyChange(prop, location.getConfProp(prop));
+ }
+
+ public void notifyChange(ConfProp prop, String propValue) {
+ switch (prop) {
+ case PI_JOB_TRACKER_HOST: {
+ textJTHost.setText(propValue);
+ break;
+ }
+ case PI_JOB_TRACKER_PORT: {
+ textJTPort.setText(propValue);
+ break;
+ }
+ case PI_LOCATION_NAME: {
+ locationName.setText(propValue);
+ break;
+ }
+ case PI_USER_NAME: {
+ userName.setText(propValue);
+ break;
+ }
+ case PI_COLOCATE_MASTERS: {
+ if (colocateMasters != null) {
+ boolean colocate = propValue.equalsIgnoreCase("yes");
+ colocateMasters.setSelection(colocate);
+ if (textNNHost != null) {
+ textNNHost.setEnabled(!colocate);
+ }
+ }
+ break;
+ }
+ case PI_NAME_NODE_HOST: {
+ textNNHost.setText(propValue);
+ break;
+ }
+ case PI_NAME_NODE_PORT: {
+ textNNPort.setText(propValue);
+ break;
+ }
+ case PI_SOCKS_PROXY_ENABLE: {
+ if (useSocksProxy != null) {
+ boolean useProxy = propValue.equalsIgnoreCase("yes");
+ useSocksProxy.setSelection(useProxy);
+ if (socksProxyHost != null)
+ socksProxyHost.setEnabled(useProxy);
+ if (socksProxyPort != null)
+ socksProxyPort.setEnabled(useProxy);
+ }
+ break;
+ }
+ case PI_SOCKS_PROXY_HOST: {
+ socksProxyHost.setText(propValue);
+ break;
+ }
+ case PI_SOCKS_PROXY_PORT: {
+ socksProxyPort.setText(propValue);
+ break;
+ }
+ }
+ }
+
+ /* @inheritDoc */
+ public void modifyText(ModifyEvent e) {
+ final Text text = (Text) e.widget;
+ final ConfProp prop = (ConfProp) text.getData("hProp");
+ Display.getDefault().syncExec(new Runnable() {
+ public void run() {
+ mediator.notifyChange(TabMain.this, prop, text.getText());
+ }
+ });
+ }
+
+ /* @inheritDoc */
+ public void widgetDefaultSelected(SelectionEvent e) {
+ this.widgetSelected(e);
+ }
+
+ /* @inheritDoc */
+ public void widgetSelected(SelectionEvent e) {
+ final Button button = (Button) e.widget;
+ final ConfProp prop = (ConfProp) button.getData("hProp");
+
+ Display.getDefault().syncExec(new Runnable() {
+ public void run() {
+ // We want to receive the update also!
+ mediator.notifyChange(null, prop, button.getSelection() ? "yes"
+ : "no");
+ }
+ });
+ }
+
+ }
+
+ private class TabAdvanced implements TabListener, ModifyListener {
+ TabMediator mediator;
+
+ private Composite panel;
+
+ private Map<String, Text> textMap = new TreeMap<String, Text>();
+
+ TabAdvanced(TabMediator mediator) {
+ this.mediator = mediator;
+ TabItem tab = new TabItem(mediator.folder, SWT.NONE);
+ tab.setText("Advanced parameters");
+ tab.setToolTipText("Access to advanced Hadoop parameters");
+ tab.setImage(circle);
+ tab.setControl(createControl(mediator.folder));
+
+ }
+
+ private Control createControl(Composite parent) {
+ ScrolledComposite sc =
+ new ScrolledComposite(parent, SWT.BORDER | SWT.H_SCROLL
+ | SWT.V_SCROLL);
+
+ panel = new Composite(sc, SWT.NONE);
+ sc.setContent(panel);
+
+ sc.setExpandHorizontal(true);
+ sc.setExpandVertical(true);
+
+ sc.setMinSize(640, 480);
+
+ GridLayout layout = new GridLayout();
+ layout.numColumns = 2;
+ layout.makeColumnsEqualWidth = false;
+ panel.setLayout(layout);
+ panel.setLayoutData(new GridData(GridData.FILL, GridData.FILL, true,
+ true, 1, 1));
+
+ // Sort by property name
+ Configuration config = location.getConfiguration();
+ SortedMap<String, String> map = new TreeMap<String, String>();
+ Iterator<Entry<String, String>> it = config.iterator();
+ while (it.hasNext()) {
+ Entry<String, String> entry = it.next();
+ map.put(entry.getKey(), entry.getValue());
+ }
+
+ for (Entry<String, String> entry : map.entrySet()) {
+ Text text = createConfNameEditor(this, panel, entry.getKey(), null);
+ textMap.put(entry.getKey(), text);
+ }
+
+ sc.setMinSize(panel.computeSize(SWT.DEFAULT, SWT.DEFAULT));
+
+ return sc;
+ }
+
+ public void notifyChange(ConfProp prop, final String propValue) {
+ Text text = textMap.get(prop.name);
+ text.setText(propValue);
+ }
+
+ public void modifyText(ModifyEvent e) {
+ final Text text = (Text) e.widget;
+ Object hProp = text.getData("hProp");
+ final ConfProp prop = (hProp != null) ? (ConfProp) hProp : null;
+ Object hPropName = text.getData("hPropName");
+ final String propName =
+ (hPropName != null) ? (String) hPropName : null;
+
+ Display.getDefault().syncExec(new Runnable() {
+ public void run() {
+ if (prop != null)
+ mediator.notifyChange(TabAdvanced.this, prop, text.getText());
+ else
+ mediator
+ .notifyChange(TabAdvanced.this, propName, text.getText());
+ }
+ });
+ }
+ }
+
+}
diff --git a/org.apache.hdt.debug.core/src/org/apache/hdt/debug/core/cluster/HadoopServerSelectionListContentProvider.java b/org.apache.hdt.debug.core/src/org/apache/hdt/debug/core/cluster/HadoopServerSelectionListContentProvider.java
new file mode 100644
index 0000000..73da03f
--- /dev/null
+++ b/org.apache.hdt.debug.core/src/org/apache/hdt/debug/core/cluster/HadoopServerSelectionListContentProvider.java
@@ -0,0 +1,77 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hdt.debug.core.cluster;
+
+import org.apache.hdt.core.cluster.HadoopCluster;
+import org.apache.hdt.core.cluster.ServerRegistry;
+import org.eclipse.jface.viewers.IContentProvider;
+import org.eclipse.jface.viewers.ILabelProviderListener;
+import org.eclipse.jface.viewers.IStructuredContentProvider;
+import org.eclipse.jface.viewers.ITableLabelProvider;
+import org.eclipse.jface.viewers.Viewer;
+import org.eclipse.swt.graphics.Image;
+
+/**
+ * Provider that enables selection of a predefined Hadoop server.
+ */
+
+public class HadoopServerSelectionListContentProvider implements
+ IContentProvider, ITableLabelProvider, IStructuredContentProvider {
+ public void dispose() {
+
+ }
+
+ public void inputChanged(Viewer viewer, Object oldInput, Object newInput) {
+
+ }
+
+ public Image getColumnImage(Object element, int columnIndex) {
+ return null;
+ }
+
+ public String getColumnText(Object element, int columnIndex) {
+ if (element instanceof HadoopCluster) {
+ HadoopCluster location = (HadoopCluster) element;
+ if (columnIndex == 0) {
+ return location.getLocationName();
+
+ } else if (columnIndex == 1) {
+ return location.getMasterHostName();
+ }
+ }
+
+ return element.toString();
+ }
+
+ public void addListener(ILabelProviderListener listener) {
+
+ }
+
+ public boolean isLabelProperty(Object element, String property) {
+ return false;
+ }
+
+ public void removeListener(ILabelProviderListener listener) {
+
+ }
+
+ public Object[] getElements(Object inputElement) {
+ return ServerRegistry.getInstance().getServers().toArray();
+ }
+}
diff --git a/org.apache.hdt.debug.core/src/org/apache/hdt/debug/core/cluster/IHadoopServerListener.java b/org.apache.hdt.debug.core/src/org/apache/hdt/debug/core/cluster/IHadoopServerListener.java
new file mode 100644
index 0000000..dc80300
--- /dev/null
+++ b/org.apache.hdt.debug.core/src/org/apache/hdt/debug/core/cluster/IHadoopServerListener.java
@@ -0,0 +1,28 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hdt.debug.core.cluster;
+
+import org.apache.hdt.core.cluster.HadoopCluster;
+
+/**
+ * Interface for monitoring server changes
+ */
+public interface IHadoopServerListener {
+ void serverChanged(HadoopCluster location, int type);
+}
diff --git a/org.apache.hdt.debug.core/src/org/apache/hdt/debug/core/cluster/RunOnHadoopWizard.java b/org.apache.hdt.debug.core/src/org/apache/hdt/debug/core/cluster/RunOnHadoopWizard.java
new file mode 100644
index 0000000..b3c7250
--- /dev/null
+++ b/org.apache.hdt.debug.core/src/org/apache/hdt/debug/core/cluster/RunOnHadoopWizard.java
@@ -0,0 +1,383 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hdt.debug.core.cluster;
+
+import java.io.File;
+import java.io.FileOutputStream;
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.List;
+
+import org.apache.hdt.debug.core.Activator;
+import org.apache.hdt.core.dialogs.ErrorMessageDialog;
+import org.apache.hdt.core.cluster.HadoopCluster;
+import org.apache.hdt.core.cluster.utils.JarModule;
+import org.apache.hadoop.io.IOUtils;
+import org.apache.hadoop.mapred.JobConf;
+import org.eclipse.core.resources.IFile;
+import org.eclipse.core.runtime.CoreException;
+import org.eclipse.core.runtime.IPath;
+import org.eclipse.core.runtime.IProgressMonitor;
+import org.eclipse.core.runtime.Path;
+import org.eclipse.debug.core.ILaunchConfigurationWorkingCopy;
+import org.eclipse.jdt.launching.IJavaLaunchConfigurationConstants;
+import org.eclipse.jdt.launching.IRuntimeClasspathEntry;
+import org.eclipse.jdt.launching.JavaRuntime;
+import org.eclipse.jface.viewers.TableViewer;
+import org.eclipse.jface.wizard.Wizard;
+import org.eclipse.jface.wizard.WizardPage;
+import org.eclipse.swt.SWT;
+import org.eclipse.swt.events.SelectionEvent;
+import org.eclipse.swt.events.SelectionListener;
+import org.eclipse.swt.layout.FillLayout;
+import org.eclipse.swt.layout.GridData;
+import org.eclipse.swt.layout.GridLayout;
+import org.eclipse.swt.widgets.Button;
+import org.eclipse.swt.widgets.Composite;
+import org.eclipse.swt.widgets.Label;
+import org.eclipse.swt.widgets.Table;
+import org.eclipse.swt.widgets.TableColumn;
+import org.eclipse.swt.widgets.Text;
+
+/**
+ * Wizard for publishing a job to a Hadoop server.
+ */
+
+public class RunOnHadoopWizard extends Wizard {
+
+ private MainWizardPage mainPage;
+
+ private HadoopLocationWizard createNewPage;
+
+ /**
+ * The file resource (containing a main()) to run on the Hadoop location
+ */
+ private IFile resource;
+
+ /**
+ * The launch configuration to update
+ */
+ private ILaunchConfigurationWorkingCopy iConf;
+
+ private IProgressMonitor progressMonitor;
+
+ public RunOnHadoopWizard(IFile resource,
+ ILaunchConfigurationWorkingCopy iConf) {
+ this.resource = resource;
+ this.iConf = iConf;
+ setForcePreviousAndNextButtons(true);
+ setNeedsProgressMonitor(true);
+ setWindowTitle("Run on Hadoop");
+ }
+
+ /**
+ * This wizard contains 2 pages:
+ * <li> the first one lets the user choose an already existing location
+ * <li> the second one allows the user to create a new location, in case it
+ * does not already exist
+ */
+ /* @inheritDoc */
+ @Override
+ public void addPages() {
+ addPage(this.mainPage = new MainWizardPage());
+ addPage(this.createNewPage = new HadoopLocationWizard());
+ }
+
+ /**
+ * Performs any actions appropriate in response to the user having pressed
+ * the Finish button, or refuse if finishing now is not permitted.
+ */
+ /* @inheritDoc */
+ @Override
+ public boolean performFinish() {
+
+ /*
+ * Create a new location or get an existing one
+ */
+ HadoopCluster location = null;
+ if (mainPage.createNew.getSelection()) {
+ location = createNewPage.performFinish();
+
+ } else if (mainPage.table.getSelection().length == 1) {
+ location = (HadoopCluster) mainPage.table.getSelection()[0].getData();
+ }
+
+ if (location == null)
+ return false;
+
+ /*
+ * Get the base directory of the plug-in for storing configurations and
+ * JARs
+ */
+ File baseDir = Activator.getDefault().getStateLocation().toFile();
+
+ // Package the Job into a JAR
+ File jarFile = JarModule.createJarPackage(resource);
+ if (jarFile == null) {
+ ErrorMessageDialog.display("Run on Hadoop",
+ "Unable to create or locate the JAR file for the Job");
+ return false;
+ }
+
+ /*
+ * Generate a temporary Hadoop configuration directory and add it to the
+ * classpath of the launch configuration
+ */
+
+ File confDir;
+ try {
+ confDir = File.createTempFile("hadoop-conf-", "", baseDir);
+ confDir.delete();
+ confDir.mkdirs();
+ if (!confDir.isDirectory()) {
+ ErrorMessageDialog.display("Run on Hadoop",
+ "Cannot create temporary directory: " + confDir);
+ return false;
+ }
+ } catch (IOException ioe) {
+ ioe.printStackTrace();
+ return false;
+ }
+
+ // Prepare the Hadoop configuration
+ JobConf conf = new JobConf(location.getConfiguration());
+ conf.setJar(jarFile.getAbsolutePath());
+
+ // Write it to the disk file
+ try {
+ // File confFile = File.createTempFile("core-site-", ".xml",
+ // confDir);
+ File confFile = new File(confDir, "core-site.xml");
+ FileOutputStream fos = new FileOutputStream(confFile);
+ try {
+ conf.writeXml(fos);
+ fos.close();
+ fos = null;
+ } finally {
+ IOUtils.closeStream(fos);
+ }
+
+ } catch (IOException ioe) {
+ ioe.printStackTrace();
+ return false;
+ }
+
+ // Setup the Launch class path
+ List<String> classPath;
+ try {
+ classPath =
+ iConf.getAttribute(
+ IJavaLaunchConfigurationConstants.ATTR_CLASSPATH,
+ new ArrayList());
+ IPath confIPath = new Path(confDir.getAbsolutePath());
+ IRuntimeClasspathEntry cpEntry =
+ JavaRuntime.newArchiveRuntimeClasspathEntry(confIPath);
+ classPath.add(0, cpEntry.getMemento());
+ iConf.setAttribute(IJavaLaunchConfigurationConstants.ATTR_CLASSPATH,
+ classPath);
+ iConf.setAttribute(IJavaLaunchConfigurationConstants.ATTR_PROGRAM_ARGUMENTS, mainPage.argumentsText.getText());
+
+ } catch (CoreException e) {
+ e.printStackTrace();
+ return false;
+ }
+
+ // location.runResource(resource, progressMonitor);
+ return true;
+ }
+
+ private void refreshButtons() {
+ getContainer().updateButtons();
+ }
+
+ /**
+ * Allows finish when an existing server is selected or when a new server
+ * location is defined
+ */
+ /* @inheritDoc */
+ @Override
+ public boolean canFinish() {
+ if (mainPage != null)
+ return mainPage.canFinish();
+ return false;
+ }
+
+ /**
+ * This is the main page of the wizard. It allows the user either to choose
+ * an already existing location or to indicate he wants to create a new
+ * location.
+ */
+ public class MainWizardPage extends WizardPage {
+
+ private Button createNew;
+
+ private Table table;
+ private Text argumentsText;
+
+ private Button chooseExisting;
+
+ public MainWizardPage() {
+ super("Select or define server to run on");
+ setTitle("Select Hadoop location");
+ setDescription("Select a Hadoop location to run on.");
+ }
+
+ /* @inheritDoc */
+ @Override
+ public boolean canFlipToNextPage() {
+ return createNew.getSelection();
+ }
+
+ /* @inheritDoc */
+ public void createControl(Composite parent) {
+ Composite panel = new Composite(parent, SWT.NONE);
+ panel.setLayout(new GridLayout(1, false));
+
+ // Label
+ Label label = new Label(panel, SWT.NONE);
+ label.setText("Select a Hadoop Server to run on.");
+ GridData gData = new GridData(GridData.FILL_BOTH);
+ gData.grabExcessVerticalSpace = false;
+ label.setLayoutData(gData);
+
+ // Create location button
+ createNew = new Button(panel, SWT.RADIO);
+ createNew.setText("Define a new Hadoop server location");
+ createNew.setLayoutData(gData);
+ createNew.addSelectionListener(new SelectionListener() {
+ public void widgetDefaultSelected(SelectionEvent e) {
+ }
+
+ public void widgetSelected(SelectionEvent e) {
+ setPageComplete(true);
+ RunOnHadoopWizard.this.refreshButtons();
+ }
+ });
+ createNew.setSelection(true);
+
+ // Select existing location button
+ chooseExisting = new Button(panel, SWT.RADIO);
+ chooseExisting
+ .setText("Choose an existing server from the list below");
+ chooseExisting.setLayoutData(gData);
+ chooseExisting.addSelectionListener(new SelectionListener() {
+ public void widgetDefaultSelected(SelectionEvent e) {
+ }
+
+ public void widgetSelected(SelectionEvent e) {
+ if (chooseExisting.getSelection()
+ && (table.getSelectionCount() == 0)) {
+ if (table.getItems().length > 0) {
+ table.setSelection(0);
+ }
+ }
+ RunOnHadoopWizard.this.refreshButtons();
+ }
+ });
+
+ // Table of existing locations
+ Composite serverListPanel = new Composite(panel, SWT.FILL);
+ gData = new GridData(GridData.FILL_BOTH);
+ gData.horizontalSpan = 1;
+ serverListPanel.setLayoutData(gData);
+
+ FillLayout layout = new FillLayout();
+ layout.marginHeight = layout.marginWidth = 12;
+ serverListPanel.setLayout(layout);
+
+ table =
+ new Table(serverListPanel, SWT.BORDER | SWT.H_SCROLL
+ | SWT.V_SCROLL | SWT.FULL_SELECTION);
+ table.setHeaderVisible(true);
+ table.setLinesVisible(true);
+
+ TableColumn nameColumn = new TableColumn(table, SWT.LEFT);
+ nameColumn.setText("Location");
+ nameColumn.setWidth(450);
+
+ TableColumn hostColumn = new TableColumn(table, SWT.LEFT);
+ hostColumn.setText("Master host name");
+ hostColumn.setWidth(250);
+
+ // If the user select one entry, switch to "chooseExisting"
+ table.addSelectionListener(new SelectionListener() {
+ public void widgetDefaultSelected(SelectionEvent e) {
+ }
+
+ public void widgetSelected(SelectionEvent e) {
+ chooseExisting.setSelection(true);
+ createNew.setSelection(false);
+ setPageComplete(table.getSelectionCount() == 1);
+ RunOnHadoopWizard.this.refreshButtons();
+ }
+ });
+
+ // Label
+ Label argumentsLabel = new Label(panel, SWT.NONE);
+ argumentsLabel.setText("Arguments:");
+ GridData gDataArgumentsLabel = new GridData(GridData.FILL_BOTH);
+ gDataArgumentsLabel.grabExcessVerticalSpace = false;
+ argumentsLabel.setLayoutData(gDataArgumentsLabel);
+
+ // Textbox
+ argumentsText = new Text(panel, SWT.NONE);
+ try {
+ argumentsText.setText(iConf.getAttribute(IJavaLaunchConfigurationConstants.ATTR_PROGRAM_ARGUMENTS, ""));
+ } catch (CoreException e1) {
+ e1.printStackTrace();
+ }
+ GridData gDataArgumentsText = new GridData(GridData.FILL_BOTH);
+ gDataArgumentsText.grabExcessVerticalSpace = false;
+ argumentsText.setLayoutData(gDataArgumentsText);
+
+
+ TableViewer viewer = new TableViewer(table);
+ HadoopServerSelectionListContentProvider provider =
+ new HadoopServerSelectionListContentProvider();
+ viewer.setContentProvider(provider);
+ viewer.setLabelProvider(provider);
+ viewer.setInput(new Object());
+ // don't care, get from singleton server registry
+
+ this.setControl(panel);
+ }
+
+ /**
+ * Returns whether this page state allows the Wizard to finish or not
+ *
+ * @return can the wizard finish or not?
+ */
+ public boolean canFinish() {
+ if (!isControlCreated())
+ return false;
+
+ if (this.createNew.getSelection())
+ return getNextPage().isPageComplete();
+
+ return this.chooseExisting.getSelection();
+ }
+ }
+
+ /**
+ * @param progressMonitor
+ */
+ public void setProgressMonitor(IProgressMonitor progressMonitor) {
+ this.progressMonitor = progressMonitor;
+ }
+}
diff --git a/org.apache.hdt.debug.core/src/org/apache/hdt/debug/core/launch/HadoopApplicationLaunchShortcut.java b/org.apache.hdt.debug.core/src/org/apache/hdt/debug/core/launch/HadoopApplicationLaunchShortcut.java
new file mode 100644
index 0000000..0377571
--- /dev/null
+++ b/org.apache.hdt.debug.core/src/org/apache/hdt/debug/core/launch/HadoopApplicationLaunchShortcut.java
@@ -0,0 +1,145 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hdt.debug.core.launch;
+
+import java.util.ArrayList;
+import java.util.List;
+import java.util.logging.Logger;
+
+import org.apache.hdt.debug.core.cluster.RunOnHadoopWizard;
+import org.eclipse.core.resources.IFile;
+import org.eclipse.core.resources.IResource;
+import org.eclipse.core.runtime.CoreException;
+import org.eclipse.debug.core.ILaunchConfiguration;
+import org.eclipse.debug.core.ILaunchConfigurationType;
+import org.eclipse.debug.core.ILaunchConfigurationWorkingCopy;
+import org.eclipse.jdt.core.IJavaProject;
+import org.eclipse.jdt.core.IType;
+import org.eclipse.jdt.core.JavaCore;
+import org.eclipse.jdt.debug.ui.launchConfigurations.JavaApplicationLaunchShortcut;
+import org.eclipse.jdt.launching.IJavaLaunchConfigurationConstants;
+import org.eclipse.jdt.launching.IRuntimeClasspathEntry;
+import org.eclipse.jdt.launching.JavaRuntime;
+import org.eclipse.jface.wizard.IWizard;
+import org.eclipse.jface.wizard.WizardDialog;
+import org.eclipse.swt.widgets.Display;
+import org.eclipse.swt.widgets.Shell;
+
+/**
+ * Add a shortcut "Run on Hadoop" to the Run menu
+ */
+
+public class HadoopApplicationLaunchShortcut extends
+ JavaApplicationLaunchShortcut {
+
+ static Logger log =
+ Logger.getLogger(HadoopApplicationLaunchShortcut.class.getName());
+
+ // private ActionDelegate delegate = new RunOnHadoopActionDelegate();
+
+ public HadoopApplicationLaunchShortcut() {
+ }
+
+ /* @inheritDoc */
+ @Override
+ protected ILaunchConfiguration findLaunchConfiguration(IType type,
+ ILaunchConfigurationType configType) {
+
+ // Find an existing or create a launch configuration (Standard way)
+ ILaunchConfiguration iConf =
+ super.findLaunchConfiguration(type, configType);
+ if (iConf == null) iConf = super.createConfiguration(type);
+ ILaunchConfigurationWorkingCopy iConfWC;
+ try {
+ /*
+ * Tune the default launch configuration: setup run-time classpath
+ * manually
+ */
+ iConfWC = iConf.getWorkingCopy();
+
+ iConfWC.setAttribute(
+ IJavaLaunchConfigurationConstants.ATTR_DEFAULT_CLASSPATH, false);
+
+ List<String> classPath = new ArrayList<String>();
+ IResource resource = type.getResource();
+ IJavaProject project =
+ (IJavaProject) resource.getProject().getNature(JavaCore.NATURE_ID);
+ IRuntimeClasspathEntry cpEntry =
+ JavaRuntime.newDefaultProjectClasspathEntry(project);
+ classPath.add(0, cpEntry.getMemento());
+
+ iConfWC.setAttribute(IJavaLaunchConfigurationConstants.ATTR_CLASSPATH,
+ classPath);
+
+ } catch (CoreException e) {
+ e.printStackTrace();
+ // FIXME Error dialog
+ return null;
+ }
+
+ /*
+ * Update the selected configuration with a specific Hadoop location
+ * target
+ */
+ IResource resource = type.getResource();
+ if (!(resource instanceof IFile))
+ return null;
+ RunOnHadoopWizard wizard =
+ new RunOnHadoopWizard((IFile) resource, iConfWC);
+ WizardDialog dialog =
+ new WizardDialog(Display.getDefault().getActiveShell(), wizard);
+
+ dialog.create();
+ dialog.setBlockOnOpen(true);
+ if (dialog.open() != WizardDialog.OK)
+ return null;
+
+ try {
+
+ // Only save if some configuration is different.
+ if(!iConfWC.contentsEqual(iConf))
+ iConfWC.doSave();
+
+ } catch (CoreException e) {
+ e.printStackTrace();
+ // FIXME Error dialog
+ return null;
+ }
+
+ return iConfWC;
+ }
+
+ /**
+ * Was used to run the RunOnHadoopWizard inside and provide it a
+ * ProgressMonitor
+ */
+ static class Dialog extends WizardDialog {
+ public Dialog(Shell parentShell, IWizard newWizard) {
+ super(parentShell, newWizard);
+ }
+
+ @Override
+ public void create() {
+ super.create();
+
+ ((RunOnHadoopWizard) getWizard())
+ .setProgressMonitor(getProgressMonitor());
+ }
+ }
+}
diff --git a/org.apache.hdt.debug.ui/.classpath b/org.apache.hdt.debug.ui/.classpath
new file mode 100644
index 0000000..ad32c83
--- /dev/null
+++ b/org.apache.hdt.debug.ui/.classpath
@@ -0,0 +1,7 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<classpath>
+ <classpathentry kind="con" path="org.eclipse.jdt.launching.JRE_CONTAINER/org.eclipse.jdt.internal.debug.ui.launcher.StandardVMType/JavaSE-1.6"/>
+ <classpathentry kind="con" path="org.eclipse.pde.core.requiredPlugins"/>
+ <classpathentry kind="src" path="src"/>
+ <classpathentry kind="output" path="bin"/>
+</classpath>
diff --git a/org.apache.hdt.debug.ui/.project b/org.apache.hdt.debug.ui/.project
new file mode 100644
index 0000000..4b6b054
--- /dev/null
+++ b/org.apache.hdt.debug.ui/.project
@@ -0,0 +1,28 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<projectDescription>
+ <name>org.apache.hdt.debug.ui</name>
+ <comment></comment>
+ <projects>
+ </projects>
+ <buildSpec>
+ <buildCommand>
+ <name>org.eclipse.jdt.core.javabuilder</name>
+ <arguments>
+ </arguments>
+ </buildCommand>
+ <buildCommand>
+ <name>org.eclipse.pde.ManifestBuilder</name>
+ <arguments>
+ </arguments>
+ </buildCommand>
+ <buildCommand>
+ <name>org.eclipse.pde.SchemaBuilder</name>
+ <arguments>
+ </arguments>
+ </buildCommand>
+ </buildSpec>
+ <natures>
+ <nature>org.eclipse.pde.PluginNature</nature>
+ <nature>org.eclipse.jdt.core.javanature</nature>
+ </natures>
+</projectDescription>
diff --git a/org.apache.hdt.debug.ui/.settings/org.eclipse.jdt.core.prefs b/org.apache.hdt.debug.ui/.settings/org.eclipse.jdt.core.prefs
new file mode 100644
index 0000000..c537b63
--- /dev/null
+++ b/org.apache.hdt.debug.ui/.settings/org.eclipse.jdt.core.prefs
@@ -0,0 +1,7 @@
+eclipse.preferences.version=1
+org.eclipse.jdt.core.compiler.codegen.inlineJsrBytecode=enabled
+org.eclipse.jdt.core.compiler.codegen.targetPlatform=1.6
+org.eclipse.jdt.core.compiler.compliance=1.6
+org.eclipse.jdt.core.compiler.problem.assertIdentifier=error
+org.eclipse.jdt.core.compiler.problem.enumIdentifier=error
+org.eclipse.jdt.core.compiler.source=1.6
diff --git a/org.apache.hdt.debug.ui/META-INF/MANIFEST.MF b/org.apache.hdt.debug.ui/META-INF/MANIFEST.MF
new file mode 100644
index 0000000..cbf2088
--- /dev/null
+++ b/org.apache.hdt.debug.ui/META-INF/MANIFEST.MF
@@ -0,0 +1,13 @@
+Manifest-Version: 1.0
+Bundle-ManifestVersion: 2
+Bundle-Name: Hadoop Development Tools Debug UI
+Bundle-SymbolicName: org.apache.hdt.debug.ui;singleton:=true
+Bundle-Version: 0.0.0
+Bundle-Activator: org.apache.hdt.debug.ui.Activator
+Bundle-Vendor: Apache Software Foundation
+Require-Bundle: org.eclipse.ui,
+ org.eclipse.core.runtime,
+ org.apache.hdt.debug.core,
+ org.eclipse.debug.ui;bundle-version="3.8.1"
+Bundle-RequiredExecutionEnvironment: JavaSE-1.6
+Bundle-ActivationPolicy: lazy
diff --git a/org.apache.hdt.debug.ui/bin/org/apache/hdt/debug/ui/Activator.class b/org.apache.hdt.debug.ui/bin/org/apache/hdt/debug/ui/Activator.class
new file mode 100644
index 0000000..0615c4b
--- /dev/null
+++ b/org.apache.hdt.debug.ui/bin/org/apache/hdt/debug/ui/Activator.class
Binary files differ
diff --git a/org.apache.hdt.debug.ui/build.properties b/org.apache.hdt.debug.ui/build.properties
new file mode 100644
index 0000000..e9863e2
--- /dev/null
+++ b/org.apache.hdt.debug.ui/build.properties
@@ -0,0 +1,5 @@
+source.. = src/
+output.. = bin/
+bin.includes = META-INF/,\
+ .,\
+ plugin.xml
diff --git a/org.apache.hdt.debug.ui/plugin.xml b/org.apache.hdt.debug.ui/plugin.xml
new file mode 100644
index 0000000..9f7feb6
--- /dev/null
+++ b/org.apache.hdt.debug.ui/plugin.xml
@@ -0,0 +1,32 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<?eclipse version="3.4"?>
+<plugin>
+ <extension
+ point="org.eclipse.debug.ui.launchShortcuts">
+ <shortcut
+ class="org.apache.hdt.debug.core.launch.HadoopApplicationLaunchShortcut"
+ icon="resources/elephantblue16x16.gif"
+ id="org.apache.hadoop.eclipse.launch.shortcut"
+ label="Run on Hadoop"
+ modes="run">
+ <contextualLaunch>
+
+ <enablement>
+ <with variable="selection">
+ <count value="1"/>
+ <iterate>
+ <or>
+ <test property="org.eclipse.jdt.launching.hasMain"/>
+ <and>
+ <test property="org.eclipse.jdt.launching.isContainer"/>
+ <test property="org.eclipse.jdt.launching.hasProjectNature" args="org.eclipse.jdt.core.javanature"/>
+ <test property="org.eclipse.jdt.launching.hasProjectNature" args="org.apache.hadoop.eclipse.Nature"/>
+ </and>
+ </or>
+ </iterate>
+ </with>
+ </enablement>
+ </contextualLaunch>
+ </shortcut>
+ </extension>
+</plugin>
diff --git a/org.apache.hdt.debug.ui/resources/elephantblue16x16.gif b/org.apache.hdt.debug.ui/resources/elephantblue16x16.gif
new file mode 100644
index 0000000..0927b13
--- /dev/null
+++ b/org.apache.hdt.debug.ui/resources/elephantblue16x16.gif
Binary files differ
diff --git a/org.apache.hdt.debug.ui/src/org/apache/hdt/debug/ui/Activator.java b/org.apache.hdt.debug.ui/src/org/apache/hdt/debug/ui/Activator.java
new file mode 100644
index 0000000..f37503f
--- /dev/null
+++ b/org.apache.hdt.debug.ui/src/org/apache/hdt/debug/ui/Activator.java
@@ -0,0 +1,50 @@
+package org.apache.hdt.debug.ui;
+
+import org.eclipse.ui.plugin.AbstractUIPlugin;
+import org.osgi.framework.BundleContext;
+
+/**
+ * The activator class controls the plug-in life cycle
+ */
+public class Activator extends AbstractUIPlugin {
+
+ // The plug-in ID
+ public static final String PLUGIN_ID = "org.apache.hdt.debug.ui"; //$NON-NLS-1$
+
+ // The shared instance
+ private static Activator plugin;
+
+ /**
+ * The constructor
+ */
+ public Activator() {
+ }
+
+ /*
+ * (non-Javadoc)
+ * @see org.eclipse.ui.plugin.AbstractUIPlugin#start(org.osgi.framework.BundleContext)
+ */
+ public void start(BundleContext context) throws Exception {
+ super.start(context);
+ plugin = this;
+ }
+
+ /*
+ * (non-Javadoc)
+ * @see org.eclipse.ui.plugin.AbstractUIPlugin#stop(org.osgi.framework.BundleContext)
+ */
+ public void stop(BundleContext context) throws Exception {
+ plugin = null;
+ super.stop(context);
+ }
+
+ /**
+ * Returns the shared instance
+ *
+ * @return the shared instance
+ */
+ public static Activator getDefault() {
+ return plugin;
+ }
+
+}