KNOX-509 KnoxSSO with WebSSO support and picketlink provider for SAML WebSSO
diff --git a/CHANGES b/CHANGES
index 9f2677a..34aeee7 100644
--- a/CHANGES
+++ b/CHANGES
@@ -12,6 +12,10 @@
* [KNOX-462] - Proper error message when root tag of topology file incorrect
* [KNOX-501] - Avoid NPE in case of passing invalid argument to KnoxCli.
+** Bug
+ * [KNOX-378] - Knox rewrites numbers in JSON to engineering notation
+
+
------------------------------------------------------------------------------
Release Notes - Apache Knox - Version 0.5.1
------------------------------------------------------------------------------
@@ -27,6 +31,7 @@
* [KNOX-459] - fixed LDAP connection leaks in KnoxLdapRealm
* [KNOX-464] - Location headers have wrong hostname when used behind load balancer
* [KNOX-468] - update group lookup topologies to configure cache manager
+
------------------------------------------------------------------------------
Release Notes - Apache Knox - Version 0.5.0
------------------------------------------------------------------------------
diff --git a/gateway-provider-ha/src/main/java/org/apache/hadoop/gateway/ha/provider/HaServletContextListener.java b/gateway-provider-ha/src/main/java/org/apache/hadoop/gateway/ha/provider/HaServletContextListener.java
index 62dd684..b84e649 100644
--- a/gateway-provider-ha/src/main/java/org/apache/hadoop/gateway/ha/provider/HaServletContextListener.java
+++ b/gateway-provider-ha/src/main/java/org/apache/hadoop/gateway/ha/provider/HaServletContextListener.java
@@ -36,7 +36,7 @@
public class HaServletContextListener implements ServletContextListener {
- public static final String PROVIDER_ATTRIBUTE_NAME = HaProvider.class.getName();
+ public static final String PROVIDER_ATTRIBUTE_NAME = "haProvider";
public static final String DESCRIPTOR_LOCATION_INIT_PARAM_NAME = "haDescriptorLocation";
diff --git a/gateway-service-oozie/pom.xml b/gateway-provider-identity-assertion-common/pom.xml
similarity index 75%
rename from gateway-service-oozie/pom.xml
rename to gateway-provider-identity-assertion-common/pom.xml
index 00f8cc3..86b7ddb 100644
--- a/gateway-service-oozie/pom.xml
+++ b/gateway-provider-identity-assertion-common/pom.xml
@@ -1,3 +1,5 @@
+<?xml version="1.0" encoding="UTF-8"?>
+
<!--
Licensed to the Apache Software Foundation (ASF) under one or more
contributor license agreements. See the NOTICE file distributed with
@@ -14,19 +16,19 @@
See the License for the specific language governing permissions and
limitations under the License.
-->
-<project xmlns="http://maven.apache.org/POM/4.0.0"
- xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+
+<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<modelVersion>4.0.0</modelVersion>
<parent>
- <artifactId>gateway</artifactId>
<groupId>org.apache.knox</groupId>
+ <artifactId>gateway</artifactId>
<version>0.6.0-SNAPSHOT</version>
</parent>
- <artifactId>gateway-service-oozie</artifactId>
+ <artifactId>gateway-provider-identity-assertion-common</artifactId>
- <name>gateway-service-oozie</name>
- <description>The extensions to the gateway for supporting Oozie.</description>
+ <name>gateway-provider-identity-assertion-common</name>
+ <description>An extension to the gateway that provides an easy integration point for asserting identity to Hadoop clusters using using some custom mapping facility.</description>
<licenses>
<license>
@@ -37,43 +39,40 @@
</licenses>
<dependencies>
+
+ <dependency>
+ <groupId>commons-io</groupId>
+ <artifactId>commons-io</artifactId>
+ </dependency>
+
<dependency>
<groupId>${gateway-group}</groupId>
<artifactId>gateway-spi</artifactId>
</dependency>
+
<dependency>
<groupId>${gateway-group}</groupId>
<artifactId>gateway-provider-rewrite</artifactId>
</dependency>
<dependency>
- <groupId>org.jboss.shrinkwrap</groupId>
- <artifactId>shrinkwrap-api</artifactId>
- <version>1.0.1</version>
- </dependency>
- <dependency>
- <groupId>org.jboss.shrinkwrap</groupId>
- <artifactId>shrinkwrap-impl-base</artifactId>
- </dependency>
- <dependency>
- <groupId>org.jboss.shrinkwrap.descriptors</groupId>
- <artifactId>shrinkwrap-descriptors-api-javaee</artifactId>
- </dependency>
- <dependency>
- <groupId>org.jboss.shrinkwrap.descriptors</groupId>
- <artifactId>shrinkwrap-descriptors-impl-javaee</artifactId>
- </dependency>
-
- <dependency>
<groupId>${gateway-group}</groupId>
<artifactId>gateway-test-utils</artifactId>
<scope>test</scope>
</dependency>
+
+ <dependency>
+ <groupId>org.eclipse.jetty</groupId>
+ <artifactId>test-jetty-servlet</artifactId>
+ <scope>test</scope>
+ </dependency>
+
<dependency>
<groupId>junit</groupId>
<artifactId>junit</artifactId>
<scope>test</scope>
</dependency>
+
<dependency>
<groupId>org.hamcrest</groupId>
<artifactId>hamcrest-core</artifactId>
@@ -85,11 +84,16 @@
<scope>test</scope>
</dependency>
<dependency>
+ <groupId>org.xmlmatchers</groupId>
+ <artifactId>xml-matchers</artifactId>
+ <scope>test</scope>
+ </dependency>
+
+ <dependency>
<groupId>org.easymock</groupId>
<artifactId>easymock</artifactId>
<scope>test</scope>
</dependency>
</dependencies>
-
</project>
\ No newline at end of file
diff --git a/gateway-provider-security-picketlink/src/main/java/org/apache/hadoop/gateway/picketlink/PicketlinkAuthMessages.java b/gateway-provider-identity-assertion-common/src/main/java/org/apache/hadoop/gateway/IdentityAsserterMessages.java
similarity index 61%
copy from gateway-provider-security-picketlink/src/main/java/org/apache/hadoop/gateway/picketlink/PicketlinkAuthMessages.java
copy to gateway-provider-identity-assertion-common/src/main/java/org/apache/hadoop/gateway/IdentityAsserterMessages.java
index 7a88a26..c4ada6b 100644
--- a/gateway-provider-security-picketlink/src/main/java/org/apache/hadoop/gateway/picketlink/PicketlinkAuthMessages.java
+++ b/gateway-provider-identity-assertion-common/src/main/java/org/apache/hadoop/gateway/IdentityAsserterMessages.java
@@ -15,10 +15,17 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
-package org.apache.hadoop.gateway.picketlink;
+package org.apache.hadoop.gateway;
+import org.apache.hadoop.gateway.i18n.messages.Message;
+import org.apache.hadoop.gateway.i18n.messages.MessageLevel;
import org.apache.hadoop.gateway.i18n.messages.Messages;
+import org.apache.hadoop.gateway.i18n.messages.StackTrace;
-@Messages(logger="org.apache.hadoop.gateway.picketlink")
-public interface PicketlinkAuthMessages {
+@Messages(logger="org.apache.hadoop.gateway")
+public interface IdentityAsserterMessages {
+
+ @Message( level = MessageLevel.WARN, text = "Skipping unencodable parameter {0}={1}, {2}: {3}" )
+ void skippingUnencodableParameter( String name, String value, String encoding, @StackTrace( level = MessageLevel.DEBUG ) Exception e );
+
}
diff --git a/gateway-provider-identity-assertion-common/src/main/java/org/apache/hadoop/gateway/identityasserter/common/filter/AbstractIdentityAsserterDeploymentContributor.java b/gateway-provider-identity-assertion-common/src/main/java/org/apache/hadoop/gateway/identityasserter/common/filter/AbstractIdentityAsserterDeploymentContributor.java
new file mode 100644
index 0000000..15ae296
--- /dev/null
+++ b/gateway-provider-identity-assertion-common/src/main/java/org/apache/hadoop/gateway/identityasserter/common/filter/AbstractIdentityAsserterDeploymentContributor.java
@@ -0,0 +1,60 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.gateway.identityasserter.common.filter;
+
+import org.apache.hadoop.gateway.deploy.DeploymentContext;
+import org.apache.hadoop.gateway.deploy.ProviderDeploymentContributorBase;
+import org.apache.hadoop.gateway.descriptor.FilterParamDescriptor;
+import org.apache.hadoop.gateway.descriptor.ResourceDescriptor;
+import org.apache.hadoop.gateway.topology.Provider;
+import org.apache.hadoop.gateway.topology.Service;
+
+import java.util.ArrayList;
+import java.util.List;
+import java.util.Map;
+import java.util.Map.Entry;
+
+public abstract class AbstractIdentityAsserterDeploymentContributor extends ProviderDeploymentContributorBase {
+
+ @Override
+ public String getRole() {
+ return "identity-assertion";
+ }
+
+ @Override
+ public void contributeFilter( DeploymentContext context, Provider provider, Service service,
+ ResourceDescriptor resource, List<FilterParamDescriptor> params ) {
+ params = buildFilterInitParms(provider, resource, params);
+ resource.addFilter().name(getName()).role(getRole()).impl(getFilterClassname()).params(params);
+ }
+
+ public List<FilterParamDescriptor> buildFilterInitParms(Provider provider,
+ ResourceDescriptor resource, List<FilterParamDescriptor> params) {
+ // blindly add all the provider params as filter init params
+ if (params == null) {
+ params = new ArrayList<FilterParamDescriptor>();
+ }
+ Map<String, String> providerParams = provider.getParams();
+ for(Entry<String, String> entry : providerParams.entrySet()) {
+ params.add( resource.createFilterParam().name(entry.getKey().toLowerCase()).value(entry.getValue()));
+ }
+ return params;
+ }
+
+ protected abstract String getFilterClassname();
+}
diff --git a/gateway-provider-identity-assertion-common/src/main/java/org/apache/hadoop/gateway/identityasserter/common/filter/AbstractIdentityAssertionFilter.java b/gateway-provider-identity-assertion-common/src/main/java/org/apache/hadoop/gateway/identityasserter/common/filter/AbstractIdentityAssertionFilter.java
new file mode 100644
index 0000000..12a349a
--- /dev/null
+++ b/gateway-provider-identity-assertion-common/src/main/java/org/apache/hadoop/gateway/identityasserter/common/filter/AbstractIdentityAssertionFilter.java
@@ -0,0 +1,198 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.gateway.identityasserter.common.filter;
+
+import java.io.IOException;
+import java.security.AccessController;
+import java.security.Principal;
+import java.security.PrivilegedActionException;
+import java.security.PrivilegedExceptionAction;
+import java.util.Arrays;
+import java.util.Set;
+
+import javax.security.auth.Subject;
+import javax.servlet.Filter;
+import javax.servlet.FilterChain;
+import javax.servlet.ServletException;
+import javax.servlet.ServletRequest;
+import javax.servlet.ServletResponse;
+import javax.servlet.http.HttpServletRequest;
+import javax.servlet.http.HttpServletRequestWrapper;
+
+import org.apache.hadoop.gateway.audit.api.Action;
+import org.apache.hadoop.gateway.audit.api.ActionOutcome;
+import org.apache.hadoop.gateway.audit.api.AuditService;
+import org.apache.hadoop.gateway.audit.api.AuditServiceFactory;
+import org.apache.hadoop.gateway.audit.api.Auditor;
+import org.apache.hadoop.gateway.audit.api.ResourceType;
+import org.apache.hadoop.gateway.audit.log4j.audit.AuditConstants;
+import org.apache.hadoop.gateway.filter.security.AbstractIdentityAssertionBase;
+import org.apache.hadoop.gateway.i18n.GatewaySpiResources;
+import org.apache.hadoop.gateway.i18n.resources.ResourcesFactory;
+import org.apache.hadoop.gateway.security.GroupPrincipal;
+import org.apache.hadoop.gateway.security.ImpersonatedPrincipal;
+import org.apache.hadoop.gateway.security.PrimaryPrincipal;
+
+/**
+ *
+ */
+public abstract class AbstractIdentityAssertionFilter extends
+ AbstractIdentityAssertionBase implements Filter {
+
+ private static final GatewaySpiResources RES = ResourcesFactory.get( GatewaySpiResources.class );
+ private static AuditService auditService = AuditServiceFactory.getAuditService();
+ private static Auditor auditor = auditService.getAuditor(
+ AuditConstants.DEFAULT_AUDITOR_NAME, AuditConstants.KNOX_SERVICE_NAME,
+ AuditConstants.KNOX_COMPONENT_NAME );
+
+ /**
+ *
+ */
+ public AbstractIdentityAssertionFilter() {
+ super();
+ }
+
+ /**
+ * This method returns a Stringp[] of new group principal names to use
+ * based on implementation specific mapping or lookup mechanisms.
+ * Returning null means that whatever set of GroupPrincipals is in the
+ * provided Subject is sufficient to use and no additional mapping is required.
+ * @param mappedPrincipalName username for the authenticated identity - post mapUserPrincipal mapping.
+ * @param subject the existing Subject from the authentication event which may or may not contain GroupPrincipals.
+ * @return String[] of new principal names to use as GroupPrincipals or null.
+ */
+ public abstract String[] mapGroupPrincipals(String mappedPrincipalName, Subject subject);
+
+ /**
+ * This method is used to map the username of the authenticated identity to some other
+ * principal name based on an implementation specific mechanism. It will either return
+ * a new principal name or the provided principal name if there is no mapping required.
+ * @param principalName
+ * @return new username or the provided principalName
+ */
+ public abstract String mapUserPrincipal(String principalName);
+
+ /**
+ * @param wrapper
+ * @param response
+ * @param chain
+ * @param mappedPrincipalName
+ * @param groups
+ */
+ protected void continueChainAsPrincipal(HttpServletRequestWrapper request, ServletResponse response,
+ FilterChain chain, String mappedPrincipalName, String[] groups) throws IOException,
+ ServletException {
+ Subject subject = null;
+ Principal impersonationPrincipal = null;
+ Principal primaryPrincipal = null;
+
+ // get the current subject and determine whether we need another doAs with
+ // an impersonatedPrincipal and/or mapped group principals
+ boolean impersonationNeeded = false;
+ boolean groupsMapped = false;
+
+ // look up the current Java Subject and assosciated group principals
+ Subject currentSubject = Subject.getSubject(AccessController.getContext());
+ Set<?> currentGroups = currentSubject.getPrincipals(GroupPrincipal.class);
+
+ primaryPrincipal = (PrimaryPrincipal) currentSubject.getPrincipals(PrimaryPrincipal.class).toArray()[0];
+ if (primaryPrincipal != null) {
+ if (!primaryPrincipal.getName().equals(mappedPrincipalName)) {
+ impersonationNeeded = true;
+ auditService.getContext().setProxyUsername( mappedPrincipalName );
+ auditor.audit( Action.IDENTITY_MAPPING, primaryPrincipal.getName(), ResourceType.PRINCIPAL, ActionOutcome.SUCCESS );
+ }
+ }
+ else {
+ // something is amiss - authentication/federation providers should have run
+ // before identity assertion and should have ensured that the appropriate
+ // principals were added to the current subject
+ // TODO: log as appropriate
+ primaryPrincipal = new PrimaryPrincipal(((HttpServletRequest) request).getUserPrincipal().getName());
+ }
+
+ groupsMapped = groups != null || !currentGroups.isEmpty();
+
+ if (impersonationNeeded || groupsMapped) {
+ // gonna need a new subject and doAs
+ subject = new Subject();
+ Set<Principal> principals = subject.getPrincipals();
+ principals.add(primaryPrincipal);
+
+ // map group principals from current Subject into newly created Subject
+ for (Object obj : currentGroups) {
+ principals.add((Principal)obj);
+ }
+
+ if (impersonationNeeded) {
+ impersonationPrincipal = new ImpersonatedPrincipal(mappedPrincipalName);
+ subject.getPrincipals().add(impersonationPrincipal);
+ }
+ if (groupsMapped) {
+ addMappedGroupsToSubject(mappedPrincipalName, groups, subject);
+ addMappedGroupsToSubject("*", groups, subject);
+ }
+ doAs(request, response, chain, subject);
+ }
+ else {
+ doFilterInternal(request, response, chain);
+ }
+ }
+
+ private void doAs(final ServletRequest request, final ServletResponse response, final FilterChain chain, Subject subject)
+ throws IOException, ServletException {
+ try {
+ Subject.doAs(
+ subject,
+ new PrivilegedExceptionAction<Object>() {
+ public Object run() throws Exception {
+ doFilterInternal(request, response, chain);
+ return null;
+ }
+ });
+ }
+ catch (PrivilegedActionException e) {
+ Throwable t = e.getCause();
+ if (t instanceof IOException) {
+ throw (IOException) t;
+ }
+ else if (t instanceof ServletException) {
+ throw (ServletException) t;
+ }
+ else {
+ throw new ServletException(t);
+ }
+ }
+ }
+
+ private void addMappedGroupsToSubject(String mappedPrincipalName, String[] groups, Subject subject) {
+ if (groups != null) {
+ auditor.audit( Action.IDENTITY_MAPPING, mappedPrincipalName, ResourceType.PRINCIPAL,
+ ActionOutcome.SUCCESS, RES.groupsList( Arrays.toString( groups ) ) );
+
+ for (int i = 0; i < groups.length; i++) {
+ subject.getPrincipals().add(new GroupPrincipal(groups[i]));
+ }
+ }
+ }
+
+ private void doFilterInternal(ServletRequest request, ServletResponse response, FilterChain chain)
+ throws IOException, ServletException {
+ chain.doFilter(request, response);
+ }
+}
\ No newline at end of file
diff --git a/gateway-provider-identity-assertion-common/src/main/java/org/apache/hadoop/gateway/identityasserter/common/filter/CommonIdentityAssertionFilter.java b/gateway-provider-identity-assertion-common/src/main/java/org/apache/hadoop/gateway/identityasserter/common/filter/CommonIdentityAssertionFilter.java
new file mode 100644
index 0000000..1a20c39
--- /dev/null
+++ b/gateway-provider-identity-assertion-common/src/main/java/org/apache/hadoop/gateway/identityasserter/common/filter/CommonIdentityAssertionFilter.java
@@ -0,0 +1,97 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.gateway.identityasserter.common.filter;
+
+import javax.security.auth.Subject;
+import javax.servlet.FilterChain;
+import javax.servlet.FilterConfig;
+import javax.servlet.ServletException;
+import javax.servlet.ServletRequest;
+import javax.servlet.ServletResponse;
+import javax.servlet.http.HttpServletRequest;
+import javax.servlet.http.HttpServletRequestWrapper;
+
+import org.apache.hadoop.gateway.identityasserter.common.filter.AbstractIdentityAssertionFilter;
+
+import java.io.IOException;
+import java.security.AccessController;
+
+public class CommonIdentityAssertionFilter extends AbstractIdentityAssertionFilter {
+ /* (non-Javadoc)
+ * @see javax.servlet.Filter#init(javax.servlet.FilterConfig)
+ */
+ @Override
+ public void init(FilterConfig filterConfig) throws ServletException {
+ }
+
+ /* (non-Javadoc)
+ * @see javax.servlet.Filter#destroy()
+ */
+ @Override
+ public void destroy() {
+ }
+
+ /**
+ * Obtain the standard javax.security.auth.Subject, retrieve the caller principal, map
+ * to the identity to be asserted as appropriate and create the provider specific
+ * assertion token. Add the assertion token to the request.
+ */
+ public void doFilter(ServletRequest request, ServletResponse response, FilterChain chain)
+ throws IOException, ServletException {
+ Subject subject = Subject.getSubject(AccessController.getContext());
+
+ String principalName = getPrincipalName(subject);
+
+ String mappedPrincipalName = mapUserPrincipal(principalName);
+ String[] groups = mapGroupPrincipals(mappedPrincipalName, subject);
+
+ HttpServletRequestWrapper wrapper = wrapHttpServletRequest(
+ request, mappedPrincipalName);
+
+ continueChainAsPrincipal(wrapper, response, chain, mappedPrincipalName, groups);
+ }
+
+ public HttpServletRequestWrapper wrapHttpServletRequest(
+ ServletRequest request, String mappedPrincipalName) {
+ // wrap the request so that the proper principal is returned
+ // from request methods
+ IdentityAsserterHttpServletRequestWrapper wrapper =
+ new IdentityAsserterHttpServletRequestWrapper(
+ (HttpServletRequest)request,
+ mappedPrincipalName);
+ return wrapper;
+ }
+
+ /* (non-Javadoc)
+ * @see org.apache.hadoop.gateway.identityasserter.common.filter.AbstractIdentityAssertionFilter#mapGroupPrincipals(java.lang.String, javax.security.auth.Subject)
+ */
+ @Override
+ public String[] mapGroupPrincipals(String mappedPrincipalName, Subject subject) {
+ // NOP
+ return null;
+ }
+
+ /* (non-Javadoc)
+ * @see org.apache.hadoop.gateway.identityasserter.common.filter.AbstractIdentityAssertionFilter#mapUserPrincipal(java.lang.String)
+ */
+ @Override
+ public String mapUserPrincipal(String principalName) {
+ // NOP
+ return principalName;
+ }
+}
diff --git a/gateway-provider-identity-assertion-pseudo/src/main/java/org/apache/hadoop/gateway/identityasserter/filter/IdentityAsserterHttpServletRequestWrapper.java b/gateway-provider-identity-assertion-common/src/main/java/org/apache/hadoop/gateway/identityasserter/common/filter/IdentityAsserterHttpServletRequestWrapper.java
similarity index 95%
rename from gateway-provider-identity-assertion-pseudo/src/main/java/org/apache/hadoop/gateway/identityasserter/filter/IdentityAsserterHttpServletRequestWrapper.java
rename to gateway-provider-identity-assertion-common/src/main/java/org/apache/hadoop/gateway/identityasserter/common/filter/IdentityAsserterHttpServletRequestWrapper.java
index 7e3b92a..50e9e60 100644
--- a/gateway-provider-identity-assertion-pseudo/src/main/java/org/apache/hadoop/gateway/identityasserter/filter/IdentityAsserterHttpServletRequestWrapper.java
+++ b/gateway-provider-identity-assertion-common/src/main/java/org/apache/hadoop/gateway/identityasserter/common/filter/IdentityAsserterHttpServletRequestWrapper.java
@@ -15,10 +15,10 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
-package org.apache.hadoop.gateway.identityasserter.filter;
+package org.apache.hadoop.gateway.identityasserter.common.filter;
import org.apache.commons.io.IOUtils;
-import org.apache.hadoop.gateway.IdentityAsserterMessages;
+import org.apache.hadoop.gateway.SpiGatewayMessages;
import org.apache.hadoop.gateway.config.GatewayConfig;
import org.apache.hadoop.gateway.i18n.messages.MessagesFactory;
@@ -40,7 +40,7 @@
public class IdentityAsserterHttpServletRequestWrapper extends HttpServletRequestWrapper {
- private static IdentityAsserterMessages log = MessagesFactory.get( IdentityAsserterMessages.class );
+ private static SpiGatewayMessages log = MessagesFactory.get( SpiGatewayMessages.class );
private static final String PRINCIPAL_PARAM = "user.name";
private static final String DOAS_PRINCIPAL_PARAM = "doAs";
@@ -183,7 +183,7 @@
}
}
- static String urlEncode( Map<String, String[]> map, String encoding ) {
+ public static String urlEncode( Map<String, String[]> map, String encoding ) {
StringBuilder sb = new StringBuilder();
for( Map.Entry<String,String[]> entry : map.entrySet() ) {
String name = entry.getKey();
diff --git a/gateway-provider-identity-assertion-pseudo/src/main/java/org/apache/hadoop/gateway/identityasserter/function/UsernameFunctionDescriptor.java b/gateway-provider-identity-assertion-common/src/main/java/org/apache/hadoop/gateway/identityasserter/common/function/UsernameFunctionDescriptor.java
similarity index 93%
rename from gateway-provider-identity-assertion-pseudo/src/main/java/org/apache/hadoop/gateway/identityasserter/function/UsernameFunctionDescriptor.java
rename to gateway-provider-identity-assertion-common/src/main/java/org/apache/hadoop/gateway/identityasserter/common/function/UsernameFunctionDescriptor.java
index d2aa441..f997de8 100644
--- a/gateway-provider-identity-assertion-pseudo/src/main/java/org/apache/hadoop/gateway/identityasserter/function/UsernameFunctionDescriptor.java
+++ b/gateway-provider-identity-assertion-common/src/main/java/org/apache/hadoop/gateway/identityasserter/common/function/UsernameFunctionDescriptor.java
@@ -15,7 +15,7 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
-package org.apache.hadoop.gateway.identityasserter.function;
+package org.apache.hadoop.gateway.identityasserter.common.function;
import org.apache.hadoop.gateway.filter.rewrite.api.UrlRewriteFunctionDescriptor;
diff --git a/gateway-provider-identity-assertion-pseudo/src/main/java/org/apache/hadoop/gateway/identityasserter/function/UsernameFunctionProcessor.java b/gateway-provider-identity-assertion-common/src/main/java/org/apache/hadoop/gateway/identityasserter/common/function/UsernameFunctionProcessor.java
similarity index 97%
rename from gateway-provider-identity-assertion-pseudo/src/main/java/org/apache/hadoop/gateway/identityasserter/function/UsernameFunctionProcessor.java
rename to gateway-provider-identity-assertion-common/src/main/java/org/apache/hadoop/gateway/identityasserter/common/function/UsernameFunctionProcessor.java
index 1e65f89..cc5d39d 100644
--- a/gateway-provider-identity-assertion-pseudo/src/main/java/org/apache/hadoop/gateway/identityasserter/function/UsernameFunctionProcessor.java
+++ b/gateway-provider-identity-assertion-common/src/main/java/org/apache/hadoop/gateway/identityasserter/common/function/UsernameFunctionProcessor.java
@@ -15,7 +15,7 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
-package org.apache.hadoop.gateway.identityasserter.function;
+package org.apache.hadoop.gateway.identityasserter.common.function;
import org.apache.hadoop.gateway.filter.rewrite.api.UrlRewriteEnvironment;
import org.apache.hadoop.gateway.filter.rewrite.spi.UrlRewriteContext;
diff --git a/gateway-service-hive/src/main/resources/META-INF/services/org.apache.hadoop.gateway.deploy.ProviderDeploymentContributor b/gateway-provider-identity-assertion-common/src/main/resources/META-INF/services/org.apache.hadoop.gateway.deploy.ProviderDeploymentContributor
similarity index 93%
rename from gateway-service-hive/src/main/resources/META-INF/services/org.apache.hadoop.gateway.deploy.ProviderDeploymentContributor
rename to gateway-provider-identity-assertion-common/src/main/resources/META-INF/services/org.apache.hadoop.gateway.deploy.ProviderDeploymentContributor
index 3762427..2545fb6 100644
--- a/gateway-service-hive/src/main/resources/META-INF/services/org.apache.hadoop.gateway.deploy.ProviderDeploymentContributor
+++ b/gateway-provider-identity-assertion-common/src/main/resources/META-INF/services/org.apache.hadoop.gateway.deploy.ProviderDeploymentContributor
@@ -16,4 +16,3 @@
# limitations under the License.
##########################################################################
-org.apache.hadoop.gateway.hive.HiveDispatchDeploymentContributor
diff --git a/gateway-provider-identity-assertion-pseudo/src/main/resources/META-INF/services/org.apache.hadoop.gateway.filter.rewrite.api.UrlRewriteFunctionDescriptor b/gateway-provider-identity-assertion-common/src/main/resources/META-INF/services/org.apache.hadoop.gateway.filter.rewrite.api.UrlRewriteFunctionDescriptor
similarity index 91%
copy from gateway-provider-identity-assertion-pseudo/src/main/resources/META-INF/services/org.apache.hadoop.gateway.filter.rewrite.api.UrlRewriteFunctionDescriptor
copy to gateway-provider-identity-assertion-common/src/main/resources/META-INF/services/org.apache.hadoop.gateway.filter.rewrite.api.UrlRewriteFunctionDescriptor
index b42eb32..9671caf 100644
--- a/gateway-provider-identity-assertion-pseudo/src/main/resources/META-INF/services/org.apache.hadoop.gateway.filter.rewrite.api.UrlRewriteFunctionDescriptor
+++ b/gateway-provider-identity-assertion-common/src/main/resources/META-INF/services/org.apache.hadoop.gateway.filter.rewrite.api.UrlRewriteFunctionDescriptor
@@ -16,4 +16,4 @@
# limitations under the License.
##########################################################################
-org.apache.hadoop.gateway.identityasserter.function.UsernameFunctionDescriptor
\ No newline at end of file
+org.apache.hadoop.gateway.identityasserter.common.function.UsernameFunctionDescriptor
\ No newline at end of file
diff --git a/gateway-provider-identity-assertion-pseudo/src/main/resources/META-INF/services/org.apache.hadoop.gateway.filter.rewrite.api.UrlRewriteFunctionDescriptor b/gateway-provider-identity-assertion-common/src/main/resources/META-INF/services/org.apache.hadoop.gateway.filter.rewrite.spi.UrlRewriteFunctionProcessor
similarity index 91%
rename from gateway-provider-identity-assertion-pseudo/src/main/resources/META-INF/services/org.apache.hadoop.gateway.filter.rewrite.api.UrlRewriteFunctionDescriptor
rename to gateway-provider-identity-assertion-common/src/main/resources/META-INF/services/org.apache.hadoop.gateway.filter.rewrite.spi.UrlRewriteFunctionProcessor
index b42eb32..5038d11 100644
--- a/gateway-provider-identity-assertion-pseudo/src/main/resources/META-INF/services/org.apache.hadoop.gateway.filter.rewrite.api.UrlRewriteFunctionDescriptor
+++ b/gateway-provider-identity-assertion-common/src/main/resources/META-INF/services/org.apache.hadoop.gateway.filter.rewrite.spi.UrlRewriteFunctionProcessor
@@ -16,4 +16,4 @@
# limitations under the License.
##########################################################################
-org.apache.hadoop.gateway.identityasserter.function.UsernameFunctionDescriptor
\ No newline at end of file
+org.apache.hadoop.gateway.identityasserter.common.function.UsernameFunctionProcessor
\ No newline at end of file
diff --git a/gateway-provider-identity-assertion-common/src/test/java/org/apache/hadoop/gateway/identityasserter/filter/CommonIdentityAssertionFilterTest.java b/gateway-provider-identity-assertion-common/src/test/java/org/apache/hadoop/gateway/identityasserter/filter/CommonIdentityAssertionFilterTest.java
new file mode 100644
index 0000000..4d53dbb
--- /dev/null
+++ b/gateway-provider-identity-assertion-common/src/test/java/org/apache/hadoop/gateway/identityasserter/filter/CommonIdentityAssertionFilterTest.java
@@ -0,0 +1,130 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.gateway.identityasserter.filter;
+
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertTrue;
+
+import java.io.IOException;
+import java.net.URISyntaxException;
+import java.security.PrivilegedActionException;
+import java.security.PrivilegedExceptionAction;
+
+import javax.security.auth.Subject;
+import javax.servlet.Filter;
+import javax.servlet.FilterChain;
+import javax.servlet.FilterConfig;
+import javax.servlet.ServletException;
+import javax.servlet.ServletRequest;
+import javax.servlet.ServletResponse;
+import javax.servlet.http.HttpServletRequest;
+import javax.servlet.http.HttpServletResponse;
+
+import org.apache.hadoop.gateway.identityasserter.common.filter.CommonIdentityAssertionFilter;
+import org.apache.hadoop.gateway.security.GroupPrincipal;
+import org.apache.hadoop.gateway.security.PrimaryPrincipal;
+import org.easymock.EasyMock;
+import org.junit.Before;
+import org.junit.Test;
+
+/**
+ * @author larry
+ *
+ */
+public class CommonIdentityAssertionFilterTest {
+
+ private String username = null;
+ private String[] mappedGroups = null;
+ private Filter filter = null;
+
+ @Before
+ public void setup() {
+ filter = new CommonIdentityAssertionFilter() {
+ @Override
+ public String mapUserPrincipal(String principalName) {
+ username = principalName.toUpperCase();
+ return principalName;
+ }
+
+ @Override
+ public String[] mapGroupPrincipals(String principalName, Subject subject) {
+ String[] groups = new String[2];
+ int i = 0;
+ for(GroupPrincipal p : subject.getPrincipals(GroupPrincipal.class)) {
+ groups[i] = p.getName().toUpperCase();
+ i++;
+ }
+ mappedGroups = groups;
+ return groups;
+ }
+ };
+ }
+
+ @Test
+ public void testSimpleFilter() throws ServletException, IOException,
+ URISyntaxException {
+
+ FilterConfig config = EasyMock.createNiceMock( FilterConfig.class );
+ EasyMock.replay( config );
+
+ final HttpServletRequest request = EasyMock.createNiceMock( HttpServletRequest.class );
+ EasyMock.replay( request );
+
+ final HttpServletResponse response = EasyMock.createNiceMock( HttpServletResponse.class );
+ EasyMock.replay( response );
+
+ final FilterChain chain = new FilterChain() {
+ @Override
+ public void doFilter(ServletRequest request, ServletResponse response)
+ throws IOException, ServletException {
+ }
+ };
+
+ Subject subject = new Subject();
+ subject.getPrincipals().add(new PrimaryPrincipal("larry"));
+ subject.getPrincipals().add(new GroupPrincipal("users"));
+ subject.getPrincipals().add(new GroupPrincipal("admin"));
+ try {
+ Subject.doAs(
+ subject,
+ new PrivilegedExceptionAction<Object>() {
+ public Object run() throws Exception {
+ filter.doFilter(request, response, chain);
+ return null;
+ }
+ });
+ }
+ catch (PrivilegedActionException e) {
+ Throwable t = e.getCause();
+ if (t instanceof IOException) {
+ throw (IOException) t;
+ }
+ else if (t instanceof ServletException) {
+ throw (ServletException) t;
+ }
+ else {
+ throw new ServletException(t);
+ }
+ }
+ assertEquals("LARRY", username);
+ assertEquals(mappedGroups.length, 2);
+ assertTrue(mappedGroups[0].equals("USERS") || mappedGroups[0].equals("ADMIN"));
+ assertTrue(mappedGroups[1], mappedGroups[1].equals("USERS") || mappedGroups[1].equals("ADMIN"));
+ }
+
+}
diff --git a/gateway-provider-identity-assertion-pseudo/src/test/java/org/apache/hadoop/gateway/identityasserter/filter/IdentityAssertionHttpServletRequestWrapperTest.java b/gateway-provider-identity-assertion-common/src/test/java/org/apache/hadoop/gateway/identityasserter/filter/IdentityAssertionHttpServletRequestWrapperTest.java
similarity index 98%
rename from gateway-provider-identity-assertion-pseudo/src/test/java/org/apache/hadoop/gateway/identityasserter/filter/IdentityAssertionHttpServletRequestWrapperTest.java
rename to gateway-provider-identity-assertion-common/src/test/java/org/apache/hadoop/gateway/identityasserter/filter/IdentityAssertionHttpServletRequestWrapperTest.java
index 079540e..8bb0420 100644
--- a/gateway-provider-identity-assertion-pseudo/src/test/java/org/apache/hadoop/gateway/identityasserter/filter/IdentityAssertionHttpServletRequestWrapperTest.java
+++ b/gateway-provider-identity-assertion-common/src/test/java/org/apache/hadoop/gateway/identityasserter/filter/IdentityAssertionHttpServletRequestWrapperTest.java
@@ -18,6 +18,7 @@
package org.apache.hadoop.gateway.identityasserter.filter;
import org.apache.commons.io.IOUtils;
+import org.apache.hadoop.gateway.identityasserter.common.filter.IdentityAsserterHttpServletRequestWrapper;
import org.apache.hadoop.test.category.FastTests;
import org.apache.hadoop.test.category.UnitTests;
import org.apache.hadoop.test.mock.MockHttpServletRequest;
diff --git a/gateway-provider-identity-assertion-pseudo/src/test/java/org/apache/hadoop/gateway/identityasserter/function/UsernameFunctionDescriptorTest.java b/gateway-provider-identity-assertion-common/src/test/java/org/apache/hadoop/gateway/identityasserter/function/UsernameFunctionDescriptorTest.java
similarity index 94%
rename from gateway-provider-identity-assertion-pseudo/src/test/java/org/apache/hadoop/gateway/identityasserter/function/UsernameFunctionDescriptorTest.java
rename to gateway-provider-identity-assertion-common/src/test/java/org/apache/hadoop/gateway/identityasserter/function/UsernameFunctionDescriptorTest.java
index 3d835bf..51f9d57 100644
--- a/gateway-provider-identity-assertion-pseudo/src/test/java/org/apache/hadoop/gateway/identityasserter/function/UsernameFunctionDescriptorTest.java
+++ b/gateway-provider-identity-assertion-common/src/test/java/org/apache/hadoop/gateway/identityasserter/function/UsernameFunctionDescriptorTest.java
@@ -18,6 +18,7 @@
package org.apache.hadoop.gateway.identityasserter.function;
import org.apache.hadoop.gateway.filter.rewrite.api.UrlRewriteFunctionDescriptor;
+import org.apache.hadoop.gateway.identityasserter.common.function.UsernameFunctionDescriptor;
import org.junit.Test;
import java.util.Iterator;
diff --git a/gateway-provider-identity-assertion-pseudo/src/test/java/org/apache/hadoop/gateway/identityasserter/function/UsernameFunctionProcessorTest.java b/gateway-provider-identity-assertion-common/src/test/java/org/apache/hadoop/gateway/identityasserter/function/UsernameFunctionProcessorTest.java
similarity index 98%
rename from gateway-provider-identity-assertion-pseudo/src/test/java/org/apache/hadoop/gateway/identityasserter/function/UsernameFunctionProcessorTest.java
rename to gateway-provider-identity-assertion-common/src/test/java/org/apache/hadoop/gateway/identityasserter/function/UsernameFunctionProcessorTest.java
index dffa4b0..ebbe6ec 100644
--- a/gateway-provider-identity-assertion-pseudo/src/test/java/org/apache/hadoop/gateway/identityasserter/function/UsernameFunctionProcessorTest.java
+++ b/gateway-provider-identity-assertion-common/src/test/java/org/apache/hadoop/gateway/identityasserter/function/UsernameFunctionProcessorTest.java
@@ -21,6 +21,7 @@
import org.apache.hadoop.gateway.filter.rewrite.api.UrlRewriteServletContextListener;
import org.apache.hadoop.gateway.filter.rewrite.api.UrlRewriteServletFilter;
import org.apache.hadoop.gateway.filter.rewrite.spi.UrlRewriteFunctionProcessor;
+import org.apache.hadoop.gateway.identityasserter.common.function.UsernameFunctionProcessor;
import org.apache.hadoop.gateway.security.PrimaryPrincipal;
import org.apache.hadoop.gateway.util.urltemplate.Parser;
import org.apache.hadoop.test.log.NoOpLogger;
diff --git a/gateway-provider-identity-assertion-common/src/test/resources/org/apache/hadoop/gateway/identityasserter/function/UsernameFunctionProcessorTest/rewrite.xml b/gateway-provider-identity-assertion-common/src/test/resources/org/apache/hadoop/gateway/identityasserter/function/UsernameFunctionProcessorTest/rewrite.xml
new file mode 100644
index 0000000..cf28b92
--- /dev/null
+++ b/gateway-provider-identity-assertion-common/src/test/resources/org/apache/hadoop/gateway/identityasserter/function/UsernameFunctionProcessorTest/rewrite.xml
@@ -0,0 +1,24 @@
+<!--
+ Licensed to the Apache Software Foundation (ASF) under one or more
+ contributor license agreements. See the NOTICE file distributed with
+ this work for additional information regarding copyright ownership.
+ The ASF licenses this file to You under the Apache License, Version 2.0
+ (the "License"); you may not use this file except in compliance with
+ the License. You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
+-->
+<rules>
+
+ <rule name="test-rule-username">
+ <match pattern="*://{host}:{port}/{path=**}?{**}"/>
+ <rewrite template="test-output-scheme://{host}:{port}/test-output-path/{path=**}?user.name={$username}?{**}?test-query-output-name=test-query-output-value"/>
+ </rule>
+
+</rules>
\ No newline at end of file
diff --git a/gateway-service-oozie/pom.xml b/gateway-provider-identity-assertion-concat/pom.xml
similarity index 74%
copy from gateway-service-oozie/pom.xml
copy to gateway-provider-identity-assertion-concat/pom.xml
index 00f8cc3..13b64ba 100644
--- a/gateway-service-oozie/pom.xml
+++ b/gateway-provider-identity-assertion-concat/pom.xml
@@ -1,3 +1,5 @@
+<?xml version="1.0" encoding="UTF-8"?>
+
<!--
Licensed to the Apache Software Foundation (ASF) under one or more
contributor license agreements. See the NOTICE file distributed with
@@ -14,19 +16,19 @@
See the License for the specific language governing permissions and
limitations under the License.
-->
-<project xmlns="http://maven.apache.org/POM/4.0.0"
- xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+
+<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<modelVersion>4.0.0</modelVersion>
<parent>
- <artifactId>gateway</artifactId>
<groupId>org.apache.knox</groupId>
+ <artifactId>gateway</artifactId>
<version>0.6.0-SNAPSHOT</version>
</parent>
- <artifactId>gateway-service-oozie</artifactId>
+ <artifactId>gateway-provider-identity-assertion-concat</artifactId>
- <name>gateway-service-oozie</name>
- <description>The extensions to the gateway for supporting Oozie.</description>
+ <name>gateway-provider-identity-assertion-concat</name>
+ <description>An extension to the gateway that provides an easy integration point for asserting identity to Hadoop clusters using using some custom mapping facility.</description>
<licenses>
<license>
@@ -37,31 +39,20 @@
</licenses>
<dependencies>
+
+ <dependency>
+ <groupId>commons-io</groupId>
+ <artifactId>commons-io</artifactId>
+ </dependency>
+
<dependency>
<groupId>${gateway-group}</groupId>
<artifactId>gateway-spi</artifactId>
</dependency>
- <dependency>
- <groupId>${gateway-group}</groupId>
- <artifactId>gateway-provider-rewrite</artifactId>
- </dependency>
<dependency>
- <groupId>org.jboss.shrinkwrap</groupId>
- <artifactId>shrinkwrap-api</artifactId>
- <version>1.0.1</version>
- </dependency>
- <dependency>
- <groupId>org.jboss.shrinkwrap</groupId>
- <artifactId>shrinkwrap-impl-base</artifactId>
- </dependency>
- <dependency>
- <groupId>org.jboss.shrinkwrap.descriptors</groupId>
- <artifactId>shrinkwrap-descriptors-api-javaee</artifactId>
- </dependency>
- <dependency>
- <groupId>org.jboss.shrinkwrap.descriptors</groupId>
- <artifactId>shrinkwrap-descriptors-impl-javaee</artifactId>
+ <groupId>${gateway-group}</groupId>
+ <artifactId>gateway-provider-identity-assertion-common</artifactId>
</dependency>
<dependency>
@@ -69,11 +60,19 @@
<artifactId>gateway-test-utils</artifactId>
<scope>test</scope>
</dependency>
+
+ <dependency>
+ <groupId>org.eclipse.jetty</groupId>
+ <artifactId>test-jetty-servlet</artifactId>
+ <scope>test</scope>
+ </dependency>
+
<dependency>
<groupId>junit</groupId>
<artifactId>junit</artifactId>
<scope>test</scope>
</dependency>
+
<dependency>
<groupId>org.hamcrest</groupId>
<artifactId>hamcrest-core</artifactId>
@@ -85,11 +84,16 @@
<scope>test</scope>
</dependency>
<dependency>
+ <groupId>org.xmlmatchers</groupId>
+ <artifactId>xml-matchers</artifactId>
+ <scope>test</scope>
+ </dependency>
+
+ <dependency>
<groupId>org.easymock</groupId>
<artifactId>easymock</artifactId>
<scope>test</scope>
</dependency>
</dependencies>
-
</project>
\ No newline at end of file
diff --git a/gateway-provider-security-picketlink/src/main/java/org/apache/hadoop/gateway/picketlink/PicketlinkAuthMessages.java b/gateway-provider-identity-assertion-concat/src/main/java/org/apache/hadoop/gateway/ConcatIdentityAsserterMessages.java
similarity index 60%
copy from gateway-provider-security-picketlink/src/main/java/org/apache/hadoop/gateway/picketlink/PicketlinkAuthMessages.java
copy to gateway-provider-identity-assertion-concat/src/main/java/org/apache/hadoop/gateway/ConcatIdentityAsserterMessages.java
index 7a88a26..548f3cc 100644
--- a/gateway-provider-security-picketlink/src/main/java/org/apache/hadoop/gateway/picketlink/PicketlinkAuthMessages.java
+++ b/gateway-provider-identity-assertion-concat/src/main/java/org/apache/hadoop/gateway/ConcatIdentityAsserterMessages.java
@@ -15,10 +15,17 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
-package org.apache.hadoop.gateway.picketlink;
+package org.apache.hadoop.gateway;
+import org.apache.hadoop.gateway.i18n.messages.Message;
+import org.apache.hadoop.gateway.i18n.messages.MessageLevel;
import org.apache.hadoop.gateway.i18n.messages.Messages;
+import org.apache.hadoop.gateway.i18n.messages.StackTrace;
-@Messages(logger="org.apache.hadoop.gateway.picketlink")
-public interface PicketlinkAuthMessages {
+@Messages(logger="org.apache.hadoop.gateway")
+public interface ConcatIdentityAsserterMessages {
+
+ @Message( level = MessageLevel.WARN, text = "Skipping unencodable parameter {0}={1}, {2}: {3}" )
+ void skippingUnencodableParameter( String name, String value, String encoding, @StackTrace( level = MessageLevel.DEBUG ) Exception e );
+
}
diff --git a/gateway-provider-identity-assertion-pseudo/src/main/java/org/apache/hadoop/gateway/identityasserter/function/UsernameFunctionDescriptor.java b/gateway-provider-identity-assertion-concat/src/main/java/org/apache/hadoop/gateway/identityasserter/concat/filter/ConcatIdentityAsserterDeploymentContributor.java
similarity index 64%
copy from gateway-provider-identity-assertion-pseudo/src/main/java/org/apache/hadoop/gateway/identityasserter/function/UsernameFunctionDescriptor.java
copy to gateway-provider-identity-assertion-concat/src/main/java/org/apache/hadoop/gateway/identityasserter/concat/filter/ConcatIdentityAsserterDeploymentContributor.java
index d2aa441..5eb2ca1 100644
--- a/gateway-provider-identity-assertion-pseudo/src/main/java/org/apache/hadoop/gateway/identityasserter/function/UsernameFunctionDescriptor.java
+++ b/gateway-provider-identity-assertion-concat/src/main/java/org/apache/hadoop/gateway/identityasserter/concat/filter/ConcatIdentityAsserterDeploymentContributor.java
@@ -15,17 +15,18 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
-package org.apache.hadoop.gateway.identityasserter.function;
+package org.apache.hadoop.gateway.identityasserter.concat.filter;
-import org.apache.hadoop.gateway.filter.rewrite.api.UrlRewriteFunctionDescriptor;
+import org.apache.hadoop.gateway.identityasserter.common.filter.AbstractIdentityAsserterDeploymentContributor;
-public class UsernameFunctionDescriptor implements UrlRewriteFunctionDescriptor<UsernameFunctionDescriptor> {
-
- public static final String FUNCTION_NAME = "username";
+public class ConcatIdentityAsserterDeploymentContributor extends AbstractIdentityAsserterDeploymentContributor {
@Override
- public String name() {
- return FUNCTION_NAME;
+ public String getName() {
+ return "Concat";
}
+ protected String getFilterClassname() {
+ return ConcatIdentityAssertionFilter.class.getName();
+ }
}
diff --git a/gateway-provider-identity-assertion-concat/src/main/java/org/apache/hadoop/gateway/identityasserter/concat/filter/ConcatIdentityAssertionFilter.java b/gateway-provider-identity-assertion-concat/src/main/java/org/apache/hadoop/gateway/identityasserter/concat/filter/ConcatIdentityAssertionFilter.java
new file mode 100644
index 0000000..f02eb13
--- /dev/null
+++ b/gateway-provider-identity-assertion-concat/src/main/java/org/apache/hadoop/gateway/identityasserter/concat/filter/ConcatIdentityAssertionFilter.java
@@ -0,0 +1,60 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.gateway.identityasserter.concat.filter;
+
+import javax.security.auth.Subject;
+import javax.servlet.FilterConfig;
+import javax.servlet.ServletException;
+import org.apache.hadoop.gateway.identityasserter.common.filter.CommonIdentityAssertionFilter;
+
+public class ConcatIdentityAssertionFilter extends CommonIdentityAssertionFilter {
+ private String prefix = null;
+ private String suffix = null;
+
+ /* (non-Javadoc)
+ * @see javax.servlet.Filter#init(javax.servlet.FilterConfig)
+ */
+ @Override
+ public void init(FilterConfig filterConfig) throws ServletException {
+ prefix = filterConfig.getInitParameter("concat.prefix");
+ suffix = filterConfig.getInitParameter("concat.suffix");
+ if (prefix == null) {
+ prefix = "";
+ }
+ if (suffix == null) {
+ suffix = "";
+ }
+ }
+
+ /* (non-Javadoc)
+ * @see org.apache.hadoop.gateway.identityasserter.common.filter.AbstractIdentityAssertionFilter#mapGroupPrincipals(java.lang.String, javax.security.auth.Subject)
+ */
+ @Override
+ public String[] mapGroupPrincipals(String mappedPrincipalName, Subject subject) {
+ // NOP - returning null will allow existing Subject group principals to remain the same
+ return null;
+ }
+
+ /* (non-Javadoc)
+ * @see org.apache.hadoop.gateway.identityasserter.common.filter.AbstractIdentityAssertionFilter#mapUserPrincipal(java.lang.String)
+ */
+ @Override
+ public String mapUserPrincipal(String principalName) {
+ return prefix + principalName + suffix;
+ }
+}
diff --git a/gateway-service-hive/src/main/resources/META-INF/services/org.apache.hadoop.gateway.deploy.ProviderDeploymentContributor b/gateway-provider-identity-assertion-concat/src/main/resources/META-INF/services/org.apache.hadoop.gateway.deploy.ProviderDeploymentContributor
similarity index 90%
copy from gateway-service-hive/src/main/resources/META-INF/services/org.apache.hadoop.gateway.deploy.ProviderDeploymentContributor
copy to gateway-provider-identity-assertion-concat/src/main/resources/META-INF/services/org.apache.hadoop.gateway.deploy.ProviderDeploymentContributor
index 3762427..bbfd4ae 100644
--- a/gateway-service-hive/src/main/resources/META-INF/services/org.apache.hadoop.gateway.deploy.ProviderDeploymentContributor
+++ b/gateway-provider-identity-assertion-concat/src/main/resources/META-INF/services/org.apache.hadoop.gateway.deploy.ProviderDeploymentContributor
@@ -16,4 +16,4 @@
# limitations under the License.
##########################################################################
-org.apache.hadoop.gateway.hive.HiveDispatchDeploymentContributor
+org.apache.hadoop.gateway.identityasserter.concat.filter.ConcatIdentityAsserterDeploymentContributor
\ No newline at end of file
diff --git a/gateway-service-hive/src/test/java/org/apache/hadoop/gateway/hive/HiveDispatchDeploymentContributorTest.java b/gateway-provider-identity-assertion-concat/src/test/java/org/apache/hadoop/gateway/identityasserter/concat/filter/ConcatIdentityAsserterDeploymentContributorTest.java
similarity index 67%
copy from gateway-service-hive/src/test/java/org/apache/hadoop/gateway/hive/HiveDispatchDeploymentContributorTest.java
copy to gateway-provider-identity-assertion-concat/src/test/java/org/apache/hadoop/gateway/identityasserter/concat/filter/ConcatIdentityAsserterDeploymentContributorTest.java
index d748ea4..b046ef6 100644
--- a/gateway-service-hive/src/test/java/org/apache/hadoop/gateway/hive/HiveDispatchDeploymentContributorTest.java
+++ b/gateway-provider-identity-assertion-concat/src/test/java/org/apache/hadoop/gateway/identityasserter/concat/filter/ConcatIdentityAsserterDeploymentContributorTest.java
@@ -15,9 +15,10 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
-package org.apache.hadoop.gateway.hive;
+package org.apache.hadoop.gateway.identityasserter.concat.filter;
import org.apache.hadoop.gateway.deploy.ProviderDeploymentContributor;
+import org.apache.hadoop.gateway.identityasserter.concat.filter.ConcatIdentityAsserterDeploymentContributor;
import org.junit.Test;
import java.util.Iterator;
@@ -26,19 +27,19 @@
import static org.hamcrest.MatcherAssert.assertThat;
import static org.junit.Assert.fail;
-public class HiveDispatchDeploymentContributorTest {
+public class ConcatIdentityAsserterDeploymentContributorTest {
@Test
public void testServiceLoader() throws Exception {
- ServiceLoader loader = ServiceLoader.load( ProviderDeploymentContributor.class );
- Iterator iterator = loader.iterator();
+ ServiceLoader<ProviderDeploymentContributor> loader = ServiceLoader.load( ProviderDeploymentContributor.class );
+ Iterator<ProviderDeploymentContributor> iterator = loader.iterator();
assertThat( "Service iterator empty.", iterator.hasNext() );
while( iterator.hasNext() ) {
Object object = iterator.next();
- if( object instanceof HiveDispatchDeploymentContributor ) {
+ if( object instanceof ConcatIdentityAsserterDeploymentContributor ) {
return;
}
}
- fail( "Failed to find " + HiveDispatchDeploymentContributor.class.getName() + " via service loader." );
+ fail( "Failed to find " + ConcatIdentityAsserterDeploymentContributor.class.getName() + " via service loader." );
}
}
diff --git a/gateway-provider-identity-assertion-concat/src/test/java/org/apache/hadoop/gateway/identityasserter/concat/filter/ConcatIdentityAssertionFilterTest.java b/gateway-provider-identity-assertion-concat/src/test/java/org/apache/hadoop/gateway/identityasserter/concat/filter/ConcatIdentityAssertionFilterTest.java
new file mode 100644
index 0000000..924f9d3
--- /dev/null
+++ b/gateway-provider-identity-assertion-concat/src/test/java/org/apache/hadoop/gateway/identityasserter/concat/filter/ConcatIdentityAssertionFilterTest.java
@@ -0,0 +1,78 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.gateway.identityasserter.concat.filter;
+
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertNull;
+
+import java.security.Principal;
+
+import javax.security.auth.Subject;
+import javax.servlet.FilterConfig;
+
+import org.apache.hadoop.gateway.security.GroupPrincipal;
+import org.apache.hadoop.gateway.security.PrimaryPrincipal;
+import org.easymock.EasyMock;
+import org.junit.Test;
+
+/**
+ *
+ */
+public class ConcatIdentityAssertionFilterTest {
+
+ @Test
+ public void testPrefixAndSuffix() throws Exception {
+ FilterConfig config = EasyMock.createNiceMock( FilterConfig.class );
+ EasyMock.replay( config );
+
+ ConcatIdentityAssertionFilter filter = new ConcatIdentityAssertionFilter();
+ Subject subject = new Subject();
+
+ subject.getPrincipals().add(new PrimaryPrincipal("larry"));
+ subject.getPrincipals().add(new GroupPrincipal("users"));
+ subject.getPrincipals().add(new GroupPrincipal("admin"));
+
+ filter.init(config);
+ String username = filter.mapUserPrincipal(((Principal) subject.getPrincipals(PrimaryPrincipal.class).toArray()[0]).getName());
+ String[] groups = filter.mapGroupPrincipals(username, subject);
+ assertEquals(username, "larry");
+ assertNull(groups); // means for the caller to use the existing subject groups
+
+ config = EasyMock.createNiceMock( FilterConfig.class );
+ EasyMock.expect(config.getInitParameter("concat.prefix") ).andReturn( "sir-" ).anyTimes();
+ EasyMock.replay( config );
+ filter.init(config);
+ username = filter.mapUserPrincipal(((Principal) subject.getPrincipals(PrimaryPrincipal.class).toArray()[0]).getName());
+ assertEquals(username, "sir-larry");
+
+ config = EasyMock.createNiceMock( FilterConfig.class );
+ EasyMock.expect(config.getInitParameter("concat.suffix") ).andReturn( "-tenant-1" ).anyTimes();
+ EasyMock.replay( config );
+ filter.init(config);
+ username = filter.mapUserPrincipal(((Principal) subject.getPrincipals(PrimaryPrincipal.class).toArray()[0]).getName());
+ assertEquals(username, "larry-tenant-1");
+
+ config = EasyMock.createNiceMock( FilterConfig.class );
+ EasyMock.expect(config.getInitParameter("concat.prefix") ).andReturn( "sir-" ).anyTimes();
+ EasyMock.expect(config.getInitParameter("concat.suffix") ).andReturn( "-tenant-1" ).anyTimes();
+ EasyMock.replay( config );
+ filter.init(config);
+ username = filter.mapUserPrincipal(((Principal) subject.getPrincipals(PrimaryPrincipal.class).toArray()[0]).getName());
+ assertEquals(username, "sir-larry-tenant-1");
+ }
+}
diff --git a/gateway-provider-identity-assertion-pseudo/pom.xml b/gateway-provider-identity-assertion-pseudo/pom.xml
index 191c602..8fd7269 100644
--- a/gateway-provider-identity-assertion-pseudo/pom.xml
+++ b/gateway-provider-identity-assertion-pseudo/pom.xml
@@ -47,7 +47,7 @@
<dependency>
<groupId>${gateway-group}</groupId>
- <artifactId>gateway-spi</artifactId>
+ <artifactId>gateway-provider-identity-assertion-common</artifactId>
</dependency>
<dependency>
diff --git a/gateway-provider-identity-assertion-pseudo/src/main/java/org/apache/hadoop/gateway/identityasserter/filter/IdentityAsserterDeploymentContributor.java b/gateway-provider-identity-assertion-pseudo/src/main/java/org/apache/hadoop/gateway/identityasserter/filter/IdentityAsserterDeploymentContributor.java
index bb5cd99..b261138 100644
--- a/gateway-provider-identity-assertion-pseudo/src/main/java/org/apache/hadoop/gateway/identityasserter/filter/IdentityAsserterDeploymentContributor.java
+++ b/gateway-provider-identity-assertion-pseudo/src/main/java/org/apache/hadoop/gateway/identityasserter/filter/IdentityAsserterDeploymentContributor.java
@@ -21,43 +21,38 @@
import org.apache.hadoop.gateway.deploy.ProviderDeploymentContributorBase;
import org.apache.hadoop.gateway.descriptor.FilterParamDescriptor;
import org.apache.hadoop.gateway.descriptor.ResourceDescriptor;
+import org.apache.hadoop.gateway.identityasserter.common.filter.AbstractIdentityAsserterDeploymentContributor;
import org.apache.hadoop.gateway.topology.Provider;
import org.apache.hadoop.gateway.topology.Service;
import java.util.List;
-public class IdentityAsserterDeploymentContributor extends ProviderDeploymentContributorBase {
+public class IdentityAsserterDeploymentContributor extends AbstractIdentityAsserterDeploymentContributor {
private static final String FILTER_CLASSNAME = IdentityAsserterFilter.class.getName();
private static final String PRINCIPAL_MAPPING_PARAM_NAME = "principal.mapping";
private static final String GROUP_PRINCIPAL_MAPPING_PARAM_NAME = "group.principal.mapping";
@Override
- public String getRole() {
- return "identity-assertion";
- }
-
- @Override
public String getName() {
return "Pseudo";
}
@Override
public void contributeProvider( DeploymentContext context, Provider provider ) {
+ super.contributeProvider(context, provider);
String mappings = provider.getParams().get(PRINCIPAL_MAPPING_PARAM_NAME);
String groupMappings = provider.getParams().get(GROUP_PRINCIPAL_MAPPING_PARAM_NAME);
-// ServletType<WebAppDescriptor> servlet = findServlet( context, context.getTopology().getName() );
-// servlet.createInitParam()
-// .paramName( PRINCIPAL_MAPPING_PARAM_NAME )
-// .paramValue( mappings );
-
context.getWebAppDescriptor().createContextParam().paramName(PRINCIPAL_MAPPING_PARAM_NAME).paramValue(mappings);
context.getWebAppDescriptor().createContextParam().paramName(GROUP_PRINCIPAL_MAPPING_PARAM_NAME).paramValue(groupMappings);
}
+ /* (non-Javadoc)
+ * @see org.apache.hadoop.gateway.identityasserter.common.filter.AbstractIdentityAsserterDeploymentContributor#getFilterClassname()
+ */
@Override
- public void contributeFilter( DeploymentContext context, Provider provider, Service service, ResourceDescriptor resource, List<FilterParamDescriptor> params ) {
- resource.addFilter().name( getName() ).role( getRole() ).impl( FILTER_CLASSNAME ).params( params );
+ protected String getFilterClassname() {
+ return FILTER_CLASSNAME;
}
}
diff --git a/gateway-provider-identity-assertion-pseudo/src/main/java/org/apache/hadoop/gateway/identityasserter/filter/IdentityAsserterFilter.java b/gateway-provider-identity-assertion-pseudo/src/main/java/org/apache/hadoop/gateway/identityasserter/filter/IdentityAsserterFilter.java
index b39fd90..c3fffba 100644
--- a/gateway-provider-identity-assertion-pseudo/src/main/java/org/apache/hadoop/gateway/identityasserter/filter/IdentityAsserterFilter.java
+++ b/gateway-provider-identity-assertion-pseudo/src/main/java/org/apache/hadoop/gateway/identityasserter/filter/IdentityAsserterFilter.java
@@ -19,39 +19,43 @@
import javax.security.auth.Subject;
-import javax.servlet.FilterChain;
+import javax.servlet.FilterConfig;
import javax.servlet.ServletException;
-import javax.servlet.ServletRequest;
-import javax.servlet.ServletResponse;
-import javax.servlet.http.HttpServletRequest;
+import org.apache.hadoop.gateway.identityasserter.common.filter.CommonIdentityAssertionFilter;
+import org.apache.hadoop.gateway.security.principal.PrincipalMappingException;
+import org.apache.hadoop.gateway.security.principal.SimplePrincipalMapper;
-import org.apache.hadoop.gateway.filter.security.AbstractIdentityAssertionFilter;
+public class IdentityAsserterFilter extends CommonIdentityAssertionFilter {
+ private static final String GROUP_PRINCIPAL_MAPPING = "group.principal.mapping";
+ private static final String PRINCIPAL_MAPPING = "principal.mapping";
+ private SimplePrincipalMapper mapper = new SimplePrincipalMapper();
-import java.io.IOException;
-import java.security.AccessController;
+ @Override
+ public void init(FilterConfig filterConfig) throws ServletException {
+ String principalMapping = filterConfig.getInitParameter(PRINCIPAL_MAPPING);
+ if (principalMapping == null || principalMapping.isEmpty()) {
+ principalMapping = filterConfig.getServletContext().getInitParameter(PRINCIPAL_MAPPING);
+ }
+ String groupPrincipalMapping = filterConfig.getInitParameter(GROUP_PRINCIPAL_MAPPING);
+ if (groupPrincipalMapping == null || groupPrincipalMapping.isEmpty()) {
+ groupPrincipalMapping = filterConfig.getServletContext().getInitParameter(GROUP_PRINCIPAL_MAPPING);
+ }
+ if (principalMapping != null && !principalMapping.isEmpty() || groupPrincipalMapping != null && !groupPrincipalMapping.isEmpty()) {
+ try {
+ mapper.loadMappingTable(principalMapping, groupPrincipalMapping);
+ } catch (PrincipalMappingException e) {
+ throw new ServletException("Unable to load principal mapping table.", e);
+ }
+ }
+ }
-public class IdentityAsserterFilter extends AbstractIdentityAssertionFilter {
+ @Override
+ public String[] mapGroupPrincipals(String mappedPrincipalName, Subject subject) {
+ return mapper.mapGroupPrincipal(mappedPrincipalName);
+ }
- /**
- * Obtain the standard javax.security.auth.Subject, retrieve the caller principal, map
- * to the identity to be asserted as appropriate and create the provider specific
- * assertion token. Add the assertion token to the request.
- */
- public void doFilter(ServletRequest request, ServletResponse response, FilterChain chain)
- throws IOException, ServletException {
-// System.out.println("+++++++++++++ Identity Assertion Filtering");
- Subject subject = Subject.getSubject(AccessController.getContext());
-
- String principalName = getPrincipalName(subject);
- String mappedPrincipalName = mapper.mapUserPrincipal(principalName);
-
- // wrap the request so that the proper principal is returned
- // from request methods
- IdentityAsserterHttpServletRequestWrapper wrapper =
- new IdentityAsserterHttpServletRequestWrapper(
- (HttpServletRequest)request,
- mappedPrincipalName);
-
- continueChainAsPrincipal(wrapper, response, chain, mappedPrincipalName);
+ @Override
+ public String mapUserPrincipal(String principalName) {
+ return mapper.mapUserPrincipal(principalName);
}
}
diff --git a/gateway-provider-identity-assertion-pseudo/src/main/resources/META-INF/services/org.apache.hadoop.gateway.filter.rewrite.spi.UrlRewriteFunctionProcessor b/gateway-provider-identity-assertion-pseudo/src/main/resources/META-INF/services/org.apache.hadoop.gateway.filter.rewrite.spi.UrlRewriteFunctionProcessor
deleted file mode 100644
index 74e0a96..0000000
--- a/gateway-provider-identity-assertion-pseudo/src/main/resources/META-INF/services/org.apache.hadoop.gateway.filter.rewrite.spi.UrlRewriteFunctionProcessor
+++ /dev/null
@@ -1,19 +0,0 @@
-##########################################################################
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements. See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership. The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License. You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-##########################################################################
-
-org.apache.hadoop.gateway.identityasserter.function.UsernameFunctionProcessor
\ No newline at end of file
diff --git a/gateway-provider-identity-assertion-pseudo/src/test/java/org/apache/hadoop/gateway/identityasserter/filter/DefaultIdentityAssertionFilterTest.java b/gateway-provider-identity-assertion-pseudo/src/test/java/org/apache/hadoop/gateway/identityasserter/filter/DefaultIdentityAssertionFilterTest.java
new file mode 100644
index 0000000..9795a99
--- /dev/null
+++ b/gateway-provider-identity-assertion-pseudo/src/test/java/org/apache/hadoop/gateway/identityasserter/filter/DefaultIdentityAssertionFilterTest.java
@@ -0,0 +1,173 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.gateway.identityasserter.filter;
+
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertFalse;
+import static org.junit.Assert.assertNull;
+import static org.junit.Assert.assertTrue;
+
+import java.security.Principal;
+
+import javax.security.auth.Subject;
+import javax.servlet.FilterConfig;
+import javax.servlet.ServletContext;
+
+import org.apache.hadoop.gateway.security.GroupPrincipal;
+import org.apache.hadoop.gateway.security.PrimaryPrincipal;
+import org.easymock.EasyMock;
+import org.junit.Test;
+
+/**
+ *
+ */
+public class DefaultIdentityAssertionFilterTest {
+
+ @Test
+ public void testInitParameters() throws Exception {
+ FilterConfig config = EasyMock.createNiceMock( FilterConfig.class );
+ EasyMock.expect(config.getInitParameter("principal.mapping") ).andReturn( "" ).anyTimes();
+ ServletContext context = EasyMock.createNiceMock(ServletContext.class);
+ EasyMock.expect(config.getServletContext() ).andReturn( context ).anyTimes();
+ EasyMock.expect(context.getInitParameter("principal.mapping") ).andReturn( "" ).anyTimes();
+ EasyMock.replay( config );
+ EasyMock.replay( context );
+
+ IdentityAsserterFilter filter = new IdentityAsserterFilter();
+ Subject subject = new Subject();
+
+ subject.getPrincipals().add(new PrimaryPrincipal("lmccay"));
+ subject.getPrincipals().add(new GroupPrincipal("users"));
+ subject.getPrincipals().add(new GroupPrincipal("admin"));
+
+ filter.init(config);
+ String username = filter.mapUserPrincipal(((Principal) subject.getPrincipals(PrimaryPrincipal.class).toArray()[0]).getName());
+ String[] groups = filter.mapGroupPrincipals(username, subject);
+ assertEquals("lmccay", username);
+ assertNull(groups); // means for the caller to use the existing subject groups
+
+ config = EasyMock.createNiceMock( FilterConfig.class );
+ EasyMock.expect(config.getInitParameter("principal.mapping") ).andReturn( "lmccay,kminder=hdfs;newuser=mapred" ).anyTimes();
+ EasyMock.expect(config.getInitParameter("group.principal.mapping") ).andReturn( "kminder=group1;lmccay=mrgroup,mrducks" ).anyTimes();
+ context = EasyMock.createNiceMock(ServletContext.class);
+ EasyMock.expect(config.getServletContext() ).andReturn( context ).anyTimes();
+ EasyMock.replay( config );
+ filter.init(config);
+ username = filter.mapUserPrincipal(((Principal) subject.getPrincipals(PrimaryPrincipal.class).toArray()[0]).getName());
+ String[] mappedGroups = filter.mapGroupPrincipals(((Principal) subject.getPrincipals(PrimaryPrincipal.class).toArray()[0]).getName(), subject);
+ assertEquals("hdfs", username);
+ assertTrue("mrgroup not found in groups: " + mappedGroups, groupFoundIn("mrgroup", mappedGroups));
+ assertTrue("mrducks not found in groups: " + mappedGroups, groupFoundIn("mrducks", mappedGroups));
+ assertFalse("group1 WAS found in groups: " + mappedGroups, groupFoundIn("group1", mappedGroups));
+
+ subject = new Subject();
+
+ subject.getPrincipals().add(new PrimaryPrincipal("kminder"));
+ subject.getPrincipals().add(new GroupPrincipal("users"));
+ subject.getPrincipals().add(new GroupPrincipal("admin"));
+
+ config = EasyMock.createNiceMock( FilterConfig.class );
+ EasyMock.expect(config.getInitParameter("principal.mapping") ).andReturn( "lmccay,kminder=hdfs;newuser=mapred" ).anyTimes();
+ EasyMock.expect(config.getInitParameter("group.principal.mapping") ).andReturn( "kminder=group1;lmccay=mrgroup,mrducks" ).anyTimes();
+ context = EasyMock.createNiceMock(ServletContext.class);
+ EasyMock.expect(config.getServletContext() ).andReturn( context ).anyTimes();
+ EasyMock.replay( config );
+ filter.init(config);
+ username = filter.mapUserPrincipal(((Principal) subject.getPrincipals(PrimaryPrincipal.class).toArray()[0]).getName());
+ mappedGroups = filter.mapGroupPrincipals(((Principal) subject.getPrincipals(PrimaryPrincipal.class).toArray()[0]).getName(), subject);
+ assertEquals("hdfs", username);
+ assertTrue("group1 not found in groups: " + mappedGroups, groupFoundIn("group1", mappedGroups));
+ }
+
+ /**
+ * @param string
+ * @return
+ */
+ private boolean groupFoundIn(String expected, String[] mappedGroups) {
+ if (mappedGroups == null) return false;
+ for(int i = 0; i < mappedGroups.length; i++) {
+ if (mappedGroups[i].equals(expected)) {
+ return true;
+ }
+ }
+ return false;
+ }
+
+ @Test
+ public void testContextParameters() throws Exception {
+ // for backward compatibility of old deployment contributor's method
+ // of adding init params to the servlet context instead of to the filter.
+ // There is the possibility that previously deployed topologies will have
+ // init params in web.xml at the context level instead of the filter level.
+ FilterConfig config = EasyMock.createNiceMock( FilterConfig.class );
+ EasyMock.expect(config.getInitParameter("principal.mapping") ).andReturn( "" ).anyTimes();
+ ServletContext context = EasyMock.createNiceMock(ServletContext.class);
+ EasyMock.expect(config.getServletContext() ).andReturn( context ).anyTimes();
+ EasyMock.replay( config );
+ EasyMock.replay( context );
+
+ IdentityAsserterFilter filter = new IdentityAsserterFilter();
+ Subject subject = new Subject();
+
+ subject.getPrincipals().add(new PrimaryPrincipal("lmccay"));
+ subject.getPrincipals().add(new GroupPrincipal("users"));
+ subject.getPrincipals().add(new GroupPrincipal("admin"));
+
+ filter.init(config);
+ String username = filter.mapUserPrincipal(((Principal) subject.getPrincipals(PrimaryPrincipal.class).toArray()[0]).getName());
+ String[] groups = filter.mapGroupPrincipals(((Principal) subject.getPrincipals(PrimaryPrincipal.class).toArray()[0]).getName(), subject);
+// String[] groups = filter.mapGroupPrincipals(username, subject);
+ assertEquals("lmccay", username);
+ assertNull(groups); // means for the caller to use the existing subject groups
+
+ config = EasyMock.createNiceMock( FilterConfig.class );
+ EasyMock.expect(config.getInitParameter("principal.mapping") ).andReturn( "" ).anyTimes();
+ context = EasyMock.createNiceMock(ServletContext.class);
+ EasyMock.expect(config.getServletContext() ).andReturn( context ).anyTimes();
+ EasyMock.expect(context.getInitParameter("principal.mapping") ).andReturn( "lmccay,kminder=hdfs;newuser=mapred" ).anyTimes();
+ EasyMock.expect(context.getInitParameter("group.principal.mapping") ).andReturn( "kminder=group1;lmccay=mrgroup,mrducks" ).anyTimes();
+ EasyMock.replay( config );
+ EasyMock.replay( context );
+ filter.init(config);
+ username = filter.mapUserPrincipal(((Principal) subject.getPrincipals(PrimaryPrincipal.class).toArray()[0]).getName());
+ groups = filter.mapGroupPrincipals(((Principal) subject.getPrincipals(PrimaryPrincipal.class).toArray()[0]).getName(), subject);
+ assertEquals("hdfs", username);
+ assertTrue("mrgroup not found in groups: " + groups, groupFoundIn("mrgroup", groups));
+ assertTrue("mrducks not found in groups: " + groups, groupFoundIn("mrducks", groups));
+ assertFalse("group1 WAS found in groups: " + groups, groupFoundIn("group1", groups));
+
+ subject = new Subject();
+
+ subject.getPrincipals().add(new PrimaryPrincipal("kminder"));
+ subject.getPrincipals().add(new GroupPrincipal("users"));
+ subject.getPrincipals().add(new GroupPrincipal("admin"));
+
+ config = EasyMock.createNiceMock( FilterConfig.class );
+ EasyMock.expect(config.getInitParameter("principal.mapping") ).andReturn( "" ).anyTimes();
+ context = EasyMock.createNiceMock(ServletContext.class);
+ EasyMock.expect(config.getServletContext() ).andReturn( context ).anyTimes();
+ EasyMock.expect(context.getInitParameter("principal.mapping") ).andReturn( "lmccay,kminder=hdfs;newuser=mapred" ).anyTimes();
+ EasyMock.expect(context.getInitParameter("group.principal.mapping") ).andReturn( "kminder=group1;lmccay=mrgroup,mrducks" ).anyTimes();
+ EasyMock.replay( config );
+ EasyMock.replay( context );
+ filter.init(config);
+ username = filter.mapUserPrincipal(((Principal) subject.getPrincipals(PrimaryPrincipal.class).toArray()[0]).getName());
+ assertEquals("hdfs", username);
+ }
+
+}
diff --git a/gateway-provider-rewrite/src/main/java/org/apache/hadoop/gateway/filter/rewrite/impl/json/JsonFilterReader.java b/gateway-provider-rewrite/src/main/java/org/apache/hadoop/gateway/filter/rewrite/impl/json/JsonFilterReader.java
index 5ed3d81..10fc9b8 100644
--- a/gateway-provider-rewrite/src/main/java/org/apache/hadoop/gateway/filter/rewrite/impl/json/JsonFilterReader.java
+++ b/gateway-provider-rewrite/src/main/java/org/apache/hadoop/gateway/filter/rewrite/impl/json/JsonFilterReader.java
@@ -341,37 +341,89 @@
if( parent.isArray() ) {
if( bufferingLevel != null ) {
ArrayNode array = (ArrayNode)parent.node;
- array.add( parser.getDecimalValue() );
+ processBufferedArrayValueNumber( array );
}
} else {
child = stack.pop();
if( bufferingLevel != null ) {
parent = stack.peek();
ObjectNode object = (ObjectNode)parent.node;
- object.put( child.field, parser.getDecimalValue() );
+ processBufferedFieldValueNumber( child, object );
}
}
if( bufferingLevel == null ) {
- switch( parser.getNumberType() ) {
- case INT:
- generator.writeNumber( parser.getIntValue() );
- break;
- case LONG:
- generator.writeNumber( parser.getLongValue() );
- break;
- case BIG_INTEGER:
- generator.writeNumber( parser.getBigIntegerValue() );
- break;
- case FLOAT:
- generator.writeNumber( parser.getFloatValue() );
- break;
- case DOUBLE:
- generator.writeNumber( parser.getDoubleValue() );
- break;
- case BIG_DECIMAL:
- generator.writeNumber( parser.getDecimalValue() );
- break;
- }
+ processedUnbufferedValueNumber();
+ }
+ }
+
+ private void processedUnbufferedValueNumber() throws IOException {
+ switch( parser.getNumberType() ) {
+ case INT:
+ generator.writeNumber( parser.getIntValue() );
+ break;
+ case LONG:
+ generator.writeNumber( parser.getLongValue() );
+ break;
+ case BIG_INTEGER:
+ generator.writeNumber( parser.getBigIntegerValue() );
+ break;
+ case FLOAT:
+ generator.writeNumber( parser.getFloatValue() );
+ break;
+ case DOUBLE:
+ generator.writeNumber( parser.getDoubleValue() );
+ break;
+ case BIG_DECIMAL:
+ generator.writeNumber( parser.getDecimalValue() );
+ break;
+ }
+ }
+
+ private void processBufferedFieldValueNumber( Level child, ObjectNode object ) throws IOException {
+ //object.put( child.field, parser.getDecimalValue() );
+ switch( parser.getNumberType() ) {
+ case INT:
+ object.put( child.field, parser.getIntValue() );
+ break;
+ case LONG:
+ object.put( child.field, parser.getLongValue() );
+ break;
+ case BIG_INTEGER:
+ object.put( child.field, parser.getDecimalValue() );
+ break;
+ case FLOAT:
+ object.put( child.field, parser.getFloatValue() );
+ break;
+ case DOUBLE:
+ object.put( child.field, parser.getDoubleValue() );
+ break;
+ case BIG_DECIMAL:
+ object.put( child.field, parser.getDecimalValue() );
+ break;
+ }
+ }
+
+ private void processBufferedArrayValueNumber( ArrayNode array ) throws IOException {
+ //array.add( parser.getDecimalValue() );
+ switch( parser.getNumberType() ) {
+ case INT:
+ array.add( parser.getIntValue() );
+ break;
+ case LONG:
+ array.add( parser.getLongValue() );
+ break;
+ case BIG_INTEGER:
+ array.add( parser.getDecimalValue() );
+ break;
+ case FLOAT:
+ array.add( parser.getFloatValue() );
+ break;
+ case DOUBLE:
+ array.add( parser.getDoubleValue() );
+ break;
+ case BIG_DECIMAL:
+ array.add( parser.getDecimalValue() );
+ break;
}
}
diff --git a/gateway-provider-rewrite/src/test/java/org/apache/hadoop/gateway/filter/rewrite/impl/json/JsonFilterReaderTest.java b/gateway-provider-rewrite/src/test/java/org/apache/hadoop/gateway/filter/rewrite/impl/json/JsonFilterReaderTest.java
index 6f4eda2..f88f092 100644
--- a/gateway-provider-rewrite/src/test/java/org/apache/hadoop/gateway/filter/rewrite/impl/json/JsonFilterReaderTest.java
+++ b/gateway-provider-rewrite/src/test/java/org/apache/hadoop/gateway/filter/rewrite/impl/json/JsonFilterReaderTest.java
@@ -32,16 +32,34 @@
import java.io.IOException;
import java.io.InputStream;
import java.io.StringReader;
+import java.math.BigInteger;
import java.nio.charset.Charset;
import static org.hamcrest.CoreMatchers.is;
import static org.hamcrest.MatcherAssert.assertThat;
+import static org.hamcrest.Matchers.contains;
import static org.hamcrest.Matchers.containsString;
import static org.junit.Assert.fail;
public class JsonFilterReaderTest {
@Test
+ public void testValueNumberWithBuffering() throws Exception {
+ String input = "{ \"apps\" : {\"app\":[{\"id\":\"one\", \"progress\":100.0, \"startedTime\":1399975176760}]} }";
+
+ UrlRewriteRulesDescriptor rulesConfig = UrlRewriteRulesDescriptorFactory.create();
+ UrlRewriteFilterDescriptor filterConfig = rulesConfig.addFilter( "filter-1" );
+ UrlRewriteFilterContentDescriptor contentConfig = filterConfig.addContent( "text/json" );
+ UrlRewriteFilterBufferDescriptor bufferConfig = contentConfig.addBuffer( "$.apps.app[*]" );
+ UrlRewriteFilterApplyDescriptor applyConfig = bufferConfig.addApply( "$.id", "test-rule" );
+
+ JsonFilterReader filter = new JsonFilterReader( new StringReader( input ), contentConfig );
+ String output = IOUtils.toString( filter );
+ assertThat( output, containsString( "\"startedTime\":1399975176760}" ) );
+ }
+
+
+ @Test
public void testSimple() throws IOException {
String inputJson = "{ \"test-name\" : \"test-value\" }";
StringReader inputReader = new StringReader( inputJson );
diff --git a/gateway-provider-security-picketlink/src/main/java/org/apache/hadoop/gateway/picketlink/PicketlinkAuthMessages.java b/gateway-provider-security-picketlink/src/main/java/org/apache/hadoop/gateway/picketlink/PicketlinkMessages.java
similarity index 69%
copy from gateway-provider-security-picketlink/src/main/java/org/apache/hadoop/gateway/picketlink/PicketlinkAuthMessages.java
copy to gateway-provider-security-picketlink/src/main/java/org/apache/hadoop/gateway/picketlink/PicketlinkMessages.java
index 7a88a26..0272ba6 100644
--- a/gateway-provider-security-picketlink/src/main/java/org/apache/hadoop/gateway/picketlink/PicketlinkAuthMessages.java
+++ b/gateway-provider-security-picketlink/src/main/java/org/apache/hadoop/gateway/picketlink/PicketlinkMessages.java
@@ -17,8 +17,17 @@
*/
package org.apache.hadoop.gateway.picketlink;
+import org.apache.hadoop.gateway.i18n.messages.Message;
+import org.apache.hadoop.gateway.i18n.messages.MessageLevel;
import org.apache.hadoop.gateway.i18n.messages.Messages;
@Messages(logger="org.apache.hadoop.gateway.picketlink")
-public interface PicketlinkAuthMessages {
+public interface PicketlinkMessages {
+
+ @Message( level = MessageLevel.DEBUG, text = "Found Original URL in reequest: {0}")
+ public void foundOriginalURLInRequest(String url);
+
+ @Message( level = MessageLevel.DEBUG, text = "setting cookie for original-url")
+ public void settingCookieForOriginalURL();
+
}
diff --git a/gateway-provider-security-picketlink/src/main/java/org/apache/hadoop/gateway/picketlink/filter/CaptureOriginalURLFilter.java b/gateway-provider-security-picketlink/src/main/java/org/apache/hadoop/gateway/picketlink/filter/CaptureOriginalURLFilter.java
index 247b520..89d1ade 100644
--- a/gateway-provider-security-picketlink/src/main/java/org/apache/hadoop/gateway/picketlink/filter/CaptureOriginalURLFilter.java
+++ b/gateway-provider-security-picketlink/src/main/java/org/apache/hadoop/gateway/picketlink/filter/CaptureOriginalURLFilter.java
@@ -26,24 +26,34 @@
import javax.servlet.http.Cookie;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
+
+import org.apache.hadoop.gateway.i18n.messages.MessagesFactory;
+import org.apache.hadoop.gateway.picketlink.PicketlinkMessages;
+
import java.io.IOException;
public class CaptureOriginalURLFilter implements Filter {
-
+ private static PicketlinkMessages log = MessagesFactory.get( PicketlinkMessages.class );
+ private static final String COOKIE_PATH = "cookie.path";
+ private String cookiePath = null;
+
@Override
public void init( FilterConfig filterConfig ) throws ServletException {
- // TODO: get the cookie path from filterConfig
+ cookiePath = filterConfig.getInitParameter(COOKIE_PATH);
+ if (cookiePath == null) {
+ cookiePath = "/gateway/idp/knoxsso/websso";
+ }
}
@Override
public void doFilter( ServletRequest servletRequest, ServletResponse servletResponse, FilterChain filterChain ) throws IOException, ServletException {
String original = null;
HttpServletRequest request = (HttpServletRequest)servletRequest;
- System.out.println( "CaptureOriginalURLFilter" );
- if (request.getParameter("originalUrl") != null) {
- System.out.println( "original url found in request" );
+ String url = request.getParameter("originalUrl");
+ if (url != null) {
+ log.foundOriginalURLInRequest(url);
original = request.getParameter("originalUrl");
- System.out.println( "setting cookie for original-url" );
+ log.settingCookieForOriginalURL();
addCookie(servletResponse, original);
}
filterChain.doFilter(request, servletResponse);
@@ -56,8 +66,7 @@
private void addCookie(ServletResponse servletResponse, String original) {
Cookie c = new Cookie("original-url", original);
- // TODO: get the cookie path from filterConfig
- c.setPath("/gateway/idp/knoxsso/websso");
+ c.setPath(cookiePath);
c.setMaxAge(60);
((HttpServletResponse)servletResponse).addCookie(c);
}
diff --git a/gateway-provider-security-picketlink/src/test/java/org/apache/hadoop/gateway/provider/federation/PreAuthSSOTest.java b/gateway-provider-security-picketlink/src/test/java/org/apache/hadoop/gateway/picketlink/PicketlinkTest.java
similarity index 86%
rename from gateway-provider-security-picketlink/src/test/java/org/apache/hadoop/gateway/provider/federation/PreAuthSSOTest.java
rename to gateway-provider-security-picketlink/src/test/java/org/apache/hadoop/gateway/picketlink/PicketlinkTest.java
index ec57043..4ef3088 100644
--- a/gateway-provider-security-picketlink/src/test/java/org/apache/hadoop/gateway/provider/federation/PreAuthSSOTest.java
+++ b/gateway-provider-security-picketlink/src/test/java/org/apache/hadoop/gateway/picketlink/PicketlinkTest.java
@@ -16,16 +16,16 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
-package org.apache.hadoop.gateway.provider.federation;
+package org.apache.hadoop.gateway.picketlink;
import junit.framework.TestCase;
import org.apache.hadoop.gateway.services.security.token.impl.JWTToken;
import org.junit.Test;
-public class PreAuthSSOTest extends TestCase {
+public class PicketlinkTest extends TestCase {
@Test
- public void testPreAuth() throws Exception {
+ public void testPicketlink() throws Exception {
assertTrue(true);
}
}
diff --git a/gateway-provider-security-shiro/pom.xml b/gateway-provider-security-shiro/pom.xml
index a930a58..b3ba17c 100644
--- a/gateway-provider-security-shiro/pom.xml
+++ b/gateway-provider-security-shiro/pom.xml
@@ -66,6 +66,11 @@
</dependency>
<dependency>
+ <groupId>org.apache.shiro</groupId>
+ <artifactId>shiro-ehcache</artifactId>
+ </dependency>
+
+ <dependency>
<groupId>commons-io</groupId>
<artifactId>commons-io</artifactId>
</dependency>
diff --git a/gateway-provider-security-shiro/src/main/java/org/apache/hadoop/gateway/filter/ShiroSubjectIdentityAdapter.java b/gateway-provider-security-shiro/src/main/java/org/apache/hadoop/gateway/filter/ShiroSubjectIdentityAdapter.java
index 2f0de73..11a0780 100644
--- a/gateway-provider-security-shiro/src/main/java/org/apache/hadoop/gateway/filter/ShiroSubjectIdentityAdapter.java
+++ b/gateway-provider-security-shiro/src/main/java/org/apache/hadoop/gateway/filter/ShiroSubjectIdentityAdapter.java
@@ -103,17 +103,20 @@
auditService.getContext().setUsername( principal ); //KM: Audit Fix
String sourceUri = (String)request.getAttribute( AbstractGatewayFilter.SOURCE_REQUEST_CONTEXT_URL_ATTRIBUTE_NAME );
auditor.audit( Action.AUTHENTICATION , sourceUri, ResourceType.URI, ActionOutcome.SUCCESS );
-
+
+ Set<String> userGroups = null;
// map ldap groups saved in session to Java Subject GroupPrincipal(s)
if (SecurityUtils.getSubject().getSession().getAttribute(SUBJECT_USER_GROUPS) != null) {
- Set<String> userRoles = (Set<String>)SecurityUtils.getSubject().getSession().getAttribute(SUBJECT_USER_GROUPS);
- for (String userRole : userRoles) {
- Principal gp = new GroupPrincipal(userRole);
- principals.add(gp);
- }
- auditor.audit( Action.AUTHENTICATION , sourceUri, ResourceType.URI, ActionOutcome.SUCCESS, "Groups: " + userRoles );
+ userGroups = (Set<String>)SecurityUtils.getSubject().getSession().getAttribute(SUBJECT_USER_GROUPS);
+ } else {
+ userGroups = new HashSet<String>(shiroSubject.getPrincipals().asSet());
+ userGroups.remove(principal);
}
-
+ for (String userGroup : userGroups) {
+ Principal gp = new GroupPrincipal(userGroup);
+ principals.add(gp);
+ }
+ auditor.audit( Action.AUTHENTICATION , sourceUri, ResourceType.URI, ActionOutcome.SUCCESS, "Groups: " + userGroups );
// The newly constructed Sets check whether this Subject has been set read-only
// before permitting subsequent modifications. The newly created Sets also prevent
diff --git a/gateway-provider-security-shiro/src/main/java/org/apache/hadoop/gateway/shirorealm/KnoxLdapRealm.java b/gateway-provider-security-shiro/src/main/java/org/apache/hadoop/gateway/shirorealm/KnoxLdapRealm.java
index 00c0d68..c797c06 100644
--- a/gateway-provider-security-shiro/src/main/java/org/apache/hadoop/gateway/shirorealm/KnoxLdapRealm.java
+++ b/gateway-provider-security-shiro/src/main/java/org/apache/hadoop/gateway/shirorealm/KnoxLdapRealm.java
@@ -42,11 +42,20 @@
import org.apache.hadoop.gateway.GatewayMessages;
import org.apache.hadoop.gateway.i18n.messages.MessagesFactory;
import org.apache.shiro.SecurityUtils;
+import org.apache.shiro.authc.AuthenticationInfo;
+import org.apache.shiro.authc.AuthenticationToken;
+import org.apache.shiro.authc.SimpleAuthenticationInfo;
+import org.apache.shiro.authc.credential.HashedCredentialsMatcher;
import org.apache.shiro.authz.AuthorizationInfo;
import org.apache.shiro.authz.SimpleAuthorizationInfo;
+import org.apache.shiro.crypto.hash.DefaultHashService;
+import org.apache.shiro.crypto.hash.Hash;
+import org.apache.shiro.crypto.hash.HashRequest;
+import org.apache.shiro.crypto.hash.HashService;
import org.apache.shiro.realm.ldap.JndiLdapRealm;
import org.apache.shiro.realm.ldap.LdapContextFactory;
import org.apache.shiro.realm.ldap.LdapUtils;
+import org.apache.shiro.subject.MutablePrincipalCollection;
import org.apache.shiro.subject.PrincipalCollection;
import org.apache.shiro.util.StringUtils;
@@ -110,11 +119,13 @@
private final static String SUBJECT_USER_GROUPS = "subject.userGroups";
private final static String MEMBER_URL = "memberUrl";
-
+
+ private static final String HASHING_ALGORITHM = "SHA-1";
+
static {
- SUBTREE_SCOPE.setSearchScope(SearchControls.SUBTREE_SCOPE);
- ONELEVEL_SCOPE.setSearchScope(SearchControls.ONELEVEL_SCOPE);
- }
+ SUBTREE_SCOPE.setSearchScope(SearchControls.SUBTREE_SCOPE);
+ ONELEVEL_SCOPE.setSearchScope(SearchControls.ONELEVEL_SCOPE);
+ }
private String searchBase;
@@ -139,10 +150,13 @@
private String userSearchAttributeName;
private String userObjectClass = "person";
+ private HashService hashService = new DefaultHashService();
public KnoxLdapRealm() {
+ HashedCredentialsMatcher credentialsMatcher = new HashedCredentialsMatcher(HASHING_ALGORITHM);
+ setCredentialsMatcher(credentialsMatcher);
}
-
+
/**
* Get groups from LDAP.
*
@@ -169,14 +183,14 @@
return simpleAuthorizationInfo;
}
- private Set<String> getRoles(final PrincipalCollection principals,
+ private Set<String> getRoles(PrincipalCollection principals,
final LdapContextFactory ldapContextFactory) throws NamingException {
final String username = (String) getAvailablePrincipal(principals);
LdapContext systemLdapCtx = null;
try {
systemLdapCtx = ldapContextFactory.getSystemLdapContext();
- return rolesFor(username, systemLdapCtx, ldapContextFactory);
+ return rolesFor(principals, username, systemLdapCtx, ldapContextFactory);
} catch (AuthenticationException e) {
LOG.failedToGetSystemLdapConnection(e);
return Collections.emptySet();
@@ -185,7 +199,7 @@
}
}
- private Set<String> rolesFor(final String userName, final LdapContext ldapCtx,
+ private Set<String> rolesFor(PrincipalCollection principals, final String userName, final LdapContext ldapCtx,
final LdapContextFactory ldapContextFactory) throws NamingException {
final Set<String> roleNames = new HashSet();
final Set<String> groupNames = new HashSet();
@@ -213,6 +227,9 @@
// save role names and group names in session so that they can be easily looked up outside of this object
SecurityUtils.getSubject().getSession().setAttribute(SUBJECT_USER_ROLES, roleNames);
SecurityUtils.getSubject().getSession().setAttribute(SUBJECT_USER_GROUPS, groupNames);
+ if (!groupNames.isEmpty() && (principals instanceof MutablePrincipalCollection)) {
+ ((MutablePrincipalCollection)principals).addAll(groupNames, getName());
+ }
LOG.lookedUpUserRoles(roleNames, userName);
}
finally {
@@ -548,4 +565,11 @@
}
}
}
+
+ @Override
+ protected AuthenticationInfo createAuthenticationInfo(AuthenticationToken token, Object ldapPrincipal, Object ldapCredentials, LdapContext ldapContext) throws NamingException {
+ HashRequest.Builder builder = new HashRequest.Builder();
+ Hash credentialsHash = hashService.computeHash(builder.setSource(token.getCredentials()).setAlgorithmName(HASHING_ALGORITHM).build());
+ return new SimpleAuthenticationInfo(token.getPrincipal(), credentialsHash.toHex(), credentialsHash.getSalt(), getName());
+ }
}
diff --git a/gateway-release/home/templates/sandbox.knoxrealm.ehcache.xml b/gateway-release/home/templates/sandbox.knoxrealm.ehcache.xml
new file mode 100644
index 0000000..d861700
--- /dev/null
+++ b/gateway-release/home/templates/sandbox.knoxrealm.ehcache.xml
@@ -0,0 +1,217 @@
+<?xml version="1.0" encoding="utf-8"?>
+<!--
+ Licensed to the Apache Software Foundation (ASF) under one or more
+ contributor license agreements. See the NOTICE file distributed with
+ this work for additional information regarding copyright ownership.
+ The ASF licenses this file to You under the Apache License, Version 2.0
+ (the "License"); you may not use this file except in compliance with
+ the License. You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
+-->
+<topology>
+
+ <gateway>
+
+ <provider>
+ <role>authentication</role>
+ <name>ShiroProvider</name>
+ <enabled>true</enabled>
+ <!--
+ session timeout in minutes, this is really idle timeout,
+ defaults to 30mins, if the property value is not defined,,
+ current client authentication would expire if client idles contiuosly for more than this value
+ -->
+ <!-- defaults to: 30 minutes
+ <param>
+ <name>sessionTimeout</name>
+ <value>30</value>
+ </param>
+ -->
+
+ <!--
+ Use single KnoxLdapRealm to do authentication and ldap group look up
+ -->
+ <param>
+ <name>main.ldapRealm</name>
+ <value>org.apache.hadoop.gateway.shirorealm.KnoxLdapRealm</value>
+ </param>
+ <param>
+ <name>main.ldapGroupContextFactory</name>
+ <value>org.apache.hadoop.gateway.shirorealm.KnoxLdapContextFactory</value>
+ </param>
+ <param>
+ <name>main.ldapRealm.contextFactory</name>
+ <value>$ldapGroupContextFactory</value>
+ </param>
+ <!-- defaults to: simple
+ <param>
+ <name>main.ldapRealm.contextFactory.authenticationMechanism</name>
+ <value>simple</value>
+ </param>
+ -->
+ <param>
+ <name>main.ldapRealm.contextFactory.url</name>
+ <value>ldap://localhost:33389</value>
+ </param>
+ <param>
+ <name>main.ldapRealm.userDnTemplate</name>
+ <value>uid={0},ou=people,dc=hadoop,dc=apache,dc=org</value>
+ </param>
+
+ <param>
+ <name>main.ldapRealm.authorizationEnabled</name>
+ <!-- defaults to: false -->
+ <value>true</value>
+ </param>
+ <!-- defaults to: simple
+ <param>
+ <name>main.ldapRealm.contextFactory.systemAuthenticationMechanism</name>
+ <value>simple</value>
+ </param>
+ -->
+ <param>
+ <name>main.ldapRealm.searchBase</name>
+ <value>ou=groups,dc=hadoop,dc=apache,dc=org</value>
+ </param>
+ <!-- defaults to: groupOfNames
+ <param>
+ <name>main.ldapRealm.groupObjectClass</name>
+ <value>groupOfNames</value>
+ </param>
+ -->
+ <!-- defaults to: member
+ <param>
+ <name>main.ldapRealm.memberAttribute</name>
+ <value>member</value>
+ </param>
+ -->
+ <!--
+ The ehcache can be configured further by placing a ehcache.xml file
+ in an appropriate location in classpath.
+ For example: cacheManager.cacheManagerConfigFile = classpath:ehcache.xml
+ <param>
+ <name>main.cacheManager.cacheManagerConfigFile</name>
+ <value>classpath:ehcache.xml</value>
+ </param>
+
+ -->
+ <param>
+ <name>main.cacheManager</name>
+ <value>org.apache.shiro.cache.ehcache.EhCacheManager</value>
+ </param>
+ <param>
+ <name>main.securityManager.cacheManager</name>
+ <value>$cacheManager</value>
+ </param>
+ <param>
+ <name>main.ldapRealm.authenticationCachingEnabled</name>
+ <value>true</value>
+ </param>
+ <param>
+ <name>main.ldapRealm.memberAttributeValueTemplate</name>
+ <value>uid={0},ou=people,dc=hadoop,dc=apache,dc=org</value>
+ </param>
+ <param>
+ <name>main.ldapRealm.contextFactory.systemUsername</name>
+ <value>uid=guest,ou=people,dc=hadoop,dc=apache,dc=org</value>
+ </param>
+ <param>
+ <name>main.ldapRealm.contextFactory.systemPassword</name>
+ <value>guest-password</value>
+ </param>
+
+ <param>
+ <name>urls./**</name>
+ <value>authcBasic</value>
+ </param>
+
+ </provider>
+
+ <provider>
+ <role>identity-assertion</role>
+ <name>Default</name>
+ <enabled>true</enabled>
+ <param>
+ <name>group.principal.mapping</name>
+ <value>*=users</value>
+ </param>
+ </provider>
+
+ <provider>
+ <role>authorization</role>
+ <name>AclsAuthz</name>
+ <enabled>true</enabled>
+ <param>
+ <name>webhdfs.acl</name>
+ <value>*;analyst;*</value>
+ </param>
+ </provider>
+
+ <!--
+ Defines rules for mapping host names internal to a Hadoop cluster to externally accessible host names.
+ For example, a hadoop service running in AWS may return a response that includes URLs containing the
+ some AWS internal host name. If the client needs to make a subsequent request to the host identified
+ in those URLs they need to be mapped to external host names that the client Knox can use to connect.
+
+ If the external hostname and internal host names are same turn of this provider by setting the value of
+ enabled parameter as false.
+
+ The name parameter specifies the external host names in a comma separated list.
+ The value parameter specifies corresponding internal host names in a comma separated list.
+
+ Note that when you are using Sandbox, the external hostname needs to be localhost, as seen in out
+ of box sandbox.xml. This is because Sandbox uses port mapping to allow clients to connect to the
+ Hadoop services using localhost. In real clusters, external host names would almost never be localhost.
+ -->
+ <provider>
+ <role>hostmap</role>
+ <name>static</name>
+ <enabled>true</enabled>
+ <param><name>localhost</name><value>sandbox,sandbox.hortonworks.com</value></param>
+ </provider>
+
+ </gateway>
+
+ <service>
+ <role>NAMENODE</role>
+ <url>hdfs://localhost:8020</url>
+ </service>
+
+ <service>
+ <role>JOBTRACKER</role>
+ <url>rpc://localhost:8050</url>
+ </service>
+
+ <service>
+ <role>WEBHDFS</role>
+ <url>http://localhost:50070/webhdfs</url>
+ </service>
+
+ <service>
+ <role>WEBHCAT</role>
+ <url>http://localhost:50111/templeton</url>
+ </service>
+
+ <service>
+ <role>OOZIE</role>
+ <url>http://localhost:11000/oozie</url>
+ </service>
+
+ <service>
+ <role>WEBHBASE</role>
+ <url>http://localhost:60080</url>
+ </service>
+
+ <service>
+ <role>HIVE</role>
+ <url>http://localhost:10000</url>
+ </service>
+
+</topology>
diff --git a/gateway-release/pom.xml b/gateway-release/pom.xml
index 9294f23..60e808c 100644
--- a/gateway-release/pom.xml
+++ b/gateway-release/pom.xml
@@ -37,6 +37,7 @@
</license>
</licenses>
+
<profiles>
<profile>
<id>release</id>
@@ -94,6 +95,38 @@
</dependency>
</dependencies>
</plugin>
+ <plugin>
+ <!-- Using the dependency plugin to grab a dependency jar (gateway-service-definitions) and
+ unpack some resources into the build directory so that it can be included in the assembly.
+ The phase 'generate-resources' is chosen since it is not only the closest fit to the description
+ of the action, but more importantly, it is a phase that is guaranteed to occur before the
+ assembly which is tied to 'package'.
+ -->
+ <groupId>org.apache.maven.plugins</groupId>
+ <artifactId>maven-dependency-plugin</artifactId>
+ <version>2.8</version>
+ <executions>
+ <execution>
+ <id>unpack</id>
+ <phase>generate-resources</phase>
+ <goals>
+ <goal>unpack</goal>
+ </goals>
+ <configuration>
+ <artifactItems>
+ <artifactItem>
+ <groupId>${gateway-group}</groupId>
+ <artifactId>gateway-service-definitions</artifactId>
+ <type>jar</type>
+ <overWrite>true</overWrite>
+ <outputDirectory>${project.build.directory}</outputDirectory>
+ <includes>services/**/*</includes>
+ </artifactItem>
+ </artifactItems>
+ </configuration>
+ </execution>
+ </executions>
+ </plugin>
</plugins>
</build>
</profile>
@@ -135,11 +168,7 @@
</dependency>
<dependency>
<groupId>${gateway-group}</groupId>
- <artifactId>gateway-service-oozie</artifactId>
- </dependency>
- <dependency>
- <groupId>${gateway-group}</groupId>
- <artifactId>gateway-service-webhcat</artifactId>
+ <artifactId>gateway-service-storm</artifactId>
</dependency>
<dependency>
<groupId>${gateway-group}</groupId>
@@ -151,10 +180,6 @@
</dependency>
<dependency>
<groupId>${gateway-group}</groupId>
- <artifactId>gateway-service-yarn-rm</artifactId>
- </dependency>
- <dependency>
- <groupId>${gateway-group}</groupId>
<artifactId>gateway-provider-rewrite</artifactId>
</dependency>
<dependency>
@@ -203,6 +228,14 @@
</dependency>
<dependency>
<groupId>${gateway-group}</groupId>
+ <artifactId>gateway-provider-identity-assertion-common</artifactId>
+ </dependency>
+ <dependency>
+ <groupId>${gateway-group}</groupId>
+ <artifactId>gateway-provider-identity-assertion-concat</artifactId>
+ </dependency>
+ <dependency>
+ <groupId>${gateway-group}</groupId>
<artifactId>gateway-provider-identity-assertion-pseudo</artifactId>
</dependency>
<dependency>
diff --git a/gateway-release/src/assembly.xml b/gateway-release/src/assembly.xml
index 9a46f39..709d1f3 100644
--- a/gateway-release/src/assembly.xml
+++ b/gateway-release/src/assembly.xml
@@ -58,6 +58,13 @@
</includes>
<fileMode>0444</fileMode>
</fileSet>
+ <fileSet>
+ <directory>${project.build.directory}/services</directory>
+ <outputDirectory>data/services</outputDirectory>
+ <includes>
+ <include>**/*</include>
+ </includes>
+ </fileSet>
</fileSets>
<dependencySets>
<dependencySet>
diff --git a/gateway-server/pom.xml b/gateway-server/pom.xml
index 3f736f3..1304cd0 100644
--- a/gateway-server/pom.xml
+++ b/gateway-server/pom.xml
@@ -178,6 +178,14 @@
<groupId>commons-io</groupId>
<artifactId>commons-io</artifactId>
</dependency>
+ <dependency>
+ <groupId>org.apache.knox</groupId>
+ <artifactId>gateway-service-definitions</artifactId>
+ </dependency>
+ <dependency>
+ <groupId>org.apache.knox</groupId>
+ <artifactId>gateway-provider-rewrite</artifactId>
+ </dependency>
<!-- ********** ********** ********** ********** ********** ********** -->
<!-- ********** Test Dependencies ********** -->
diff --git a/gateway-server/src/main/java/org/apache/hadoop/gateway/GatewayMessages.java b/gateway-server/src/main/java/org/apache/hadoop/gateway/GatewayMessages.java
index 363fe7f..a1c4828 100644
--- a/gateway-server/src/main/java/org/apache/hadoop/gateway/GatewayMessages.java
+++ b/gateway-server/src/main/java/org/apache/hadoop/gateway/GatewayMessages.java
@@ -127,6 +127,9 @@
@Message( level = MessageLevel.WARN, text = "Ignoring service deployment contributor with invalid null role: {0}" )
void ignoringServiceContributorWithMissingRole( String className );
+ @Message( level = MessageLevel.WARN, text = "Ignoring service deployment contributor with invalid null version: {0}" )
+ void ignoringServiceContributorWithMissingVersion( String className );
+
@Message( level = MessageLevel.WARN, text = "Ignoring provider deployment contributor with invalid null name: {0}" )
void ignoringProviderContributorWithMissingName( String className );
@@ -327,4 +330,26 @@
@Message( level = MessageLevel.INFO, text = "Computed roles/groups: {0} for principal: {1}" )
void lookedUpUserRoles(Set<String> roleNames, String userName);
+
+ @Message( level = MessageLevel.INFO, text = "Configured services directory is {0}" )
+ void usingServicesDirectory(String path);
+
+ @Message( level = MessageLevel.ERROR, text = "Failed to unmarshall service definition file {0} file : {1}" )
+ void failedToLoadServiceDefinition(String fileName, @StackTrace( level = MessageLevel.DEBUG ) Exception e);
+
+ @Message( level = MessageLevel.ERROR, text = "Failed to find service definition file {0} file : {1}" )
+ void failedToFindServiceDefinitionFile(String fileName, @StackTrace( level = MessageLevel.DEBUG ) Exception e);
+
+ @Message( level = MessageLevel.ERROR, text = "Failed to find rewrite file {0} file : {1}" )
+ void failedToFindRewriteFile(String fileName, @StackTrace( level = MessageLevel.DEBUG ) Exception e);
+
+ @Message( level = MessageLevel.ERROR, text = "Failed to unmarshall rewrite file {0} file : {1}" )
+ void failedToLoadRewriteFile(String fileName, @StackTrace( level = MessageLevel.DEBUG ) Exception e);
+
+ @Message( level = MessageLevel.DEBUG, text = "No rewrite file found in service directory {0}" )
+ void noRewriteFileFound(String path);
+
+ @Message( level = MessageLevel.DEBUG, text = "Added Service definition name: {0}, role : {1}, version : {2}" )
+ void addedServiceDefinition(String serviceName, String serviceRole, String version);
+
}
diff --git a/gateway-server/src/main/java/org/apache/hadoop/gateway/config/impl/GatewayConfigImpl.java b/gateway-server/src/main/java/org/apache/hadoop/gateway/config/impl/GatewayConfigImpl.java
index b1f8d51..336d52e 100644
--- a/gateway-server/src/main/java/org/apache/hadoop/gateway/config/impl/GatewayConfigImpl.java
+++ b/gateway-server/src/main/java/org/apache/hadoop/gateway/config/impl/GatewayConfigImpl.java
@@ -75,6 +75,8 @@
private static final String GATEWAY_CONFIG_FILE_PREFIX = "gateway";
+ private static final String DEFAULT_STACKS_SERVICES_DIR = "services";
+
public static final String[] GATEWAY_CONFIG_FILENAMES = {
GATEWAY_CONFIG_DIR_PREFIX + "/" + GATEWAY_CONFIG_FILE_PREFIX + "-default.xml",
GATEWAY_CONFIG_DIR_PREFIX + "/" + GATEWAY_CONFIG_FILE_PREFIX + "-site.xml"
@@ -100,6 +102,7 @@
public static final String DEPLOYMENT_DIR = GATEWAY_CONFIG_FILE_PREFIX + ".deployment.dir";
public static final String SECURITY_DIR = GATEWAY_CONFIG_FILE_PREFIX + ".security.dir";
public static final String DATA_DIR = GATEWAY_CONFIG_FILE_PREFIX + ".data.dir";
+ public static final String STACKS_SERVICES_DIR = GATEWAY_CONFIG_FILE_PREFIX + ".services.dir";
public static final String HADOOP_CONF_DIR = GATEWAY_CONFIG_FILE_PREFIX + ".hadoop.conf.dir";
public static final String FRONTEND_URL = GATEWAY_CONFIG_FILE_PREFIX + ".frontend.url";
private static final String TRUST_ALL_CERTS = GATEWAY_CONFIG_FILE_PREFIX + ".trust.all.certs";
@@ -174,6 +177,11 @@
}
@Override
+ public String getGatewayServicesDir() {
+ return get(STACKS_SERVICES_DIR, getGatewayDataDir() + File.separator + DEFAULT_STACKS_SERVICES_DIR);
+ }
+
+ @Override
public String getHadoopConfDir() {
return get( HADOOP_CONF_DIR );
}
diff --git a/gateway-server/src/main/java/org/apache/hadoop/gateway/deploy/DeploymentFactory.java b/gateway-server/src/main/java/org/apache/hadoop/gateway/deploy/DeploymentFactory.java
index 44ba37a..026aa53 100644
--- a/gateway-server/src/main/java/org/apache/hadoop/gateway/deploy/DeploymentFactory.java
+++ b/gateway-server/src/main/java/org/apache/hadoop/gateway/deploy/DeploymentFactory.java
@@ -17,20 +17,20 @@
*/
package org.apache.hadoop.gateway.deploy;
-import org.apache.hadoop.gateway.GatewayMessages;
import org.apache.hadoop.gateway.GatewayForwardingServlet;
-import org.apache.hadoop.gateway.GatewayResources;
+import org.apache.hadoop.gateway.GatewayMessages;
import org.apache.hadoop.gateway.GatewayServlet;
import org.apache.hadoop.gateway.config.GatewayConfig;
import org.apache.hadoop.gateway.descriptor.GatewayDescriptor;
import org.apache.hadoop.gateway.descriptor.GatewayDescriptorFactory;
import org.apache.hadoop.gateway.i18n.messages.MessagesFactory;
-import org.apache.hadoop.gateway.i18n.resources.ResourcesFactory;
import org.apache.hadoop.gateway.services.GatewayServices;
import org.apache.hadoop.gateway.services.registry.ServiceRegistry;
import org.apache.hadoop.gateway.topology.Provider;
import org.apache.hadoop.gateway.topology.Service;
import org.apache.hadoop.gateway.topology.Topology;
+import org.apache.hadoop.gateway.topology.Version;
+import org.apache.hadoop.gateway.util.ServiceDefinitionsLoader;
import org.jboss.shrinkwrap.api.ShrinkWrap;
import org.jboss.shrinkwrap.api.asset.Asset;
import org.jboss.shrinkwrap.api.asset.StringAsset;
@@ -40,27 +40,27 @@
import org.jboss.shrinkwrap.descriptor.api.webcommon30.ServletType;
import java.beans.Statement;
+import java.io.File;
import java.io.IOException;
import java.io.StringWriter;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Iterator;
+import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
import java.util.ServiceLoader;
import java.util.Set;
-import java.util.LinkedHashMap;
+import java.util.TreeMap;
public abstract class DeploymentFactory {
private static final String DEFAULT_APP_REDIRECT_CONTEXT_PATH = "redirectTo";
- private static GatewayResources res = ResourcesFactory.get( GatewayResources.class );
private static GatewayMessages log = MessagesFactory.get( GatewayMessages.class );
private static GatewayServices gatewayServices = null;
- //private static Set<ServiceDeploymentContributor> SERVICE_CONTRIBUTORS;
- private static Map<String,Map<String,ServiceDeploymentContributor>> SERVICE_CONTRIBUTOR_MAP;
+ private static Map<String,Map<String,Map<Version, ServiceDeploymentContributor>>> SERVICE_CONTRIBUTOR_MAP;
static {
loadServiceContributors();
}
@@ -70,13 +70,20 @@
static {
loadProviderContributors();
}
-
+
public static void setGatewayServices(GatewayServices services) {
DeploymentFactory.gatewayServices = services;
}
public static WebArchive createDeployment( GatewayConfig config, Topology topology ) {
DeploymentContext context = null;
+ //TODO move the loading of service defs
+ String stacks = config.getGatewayServicesDir();
+ log.usingServicesDirectory(stacks);
+ File stacksDir = new File(stacks);
+ Set<ServiceDeploymentContributor> deploymentContributors = ServiceDefinitionsLoader.loadServiceDefinitions(stacksDir);
+ addServiceDeploymentContributors(deploymentContributors.iterator());
+
Map<String,List<ProviderDeploymentContributor>> providers = selectContextProviders( topology );
Map<String,List<ServiceDeploymentContributor>> services = selectContextServices( topology );
context = createDeploymentContext( config, topology.getName(), topology, providers, services );
@@ -187,7 +194,7 @@
= new HashMap<String,List<ServiceDeploymentContributor>>();
for( Service service : topology.getServices() ) {
String role = service.getRole();
- ServiceDeploymentContributor contributor = getServiceContributor( role, service.getName() );
+ ServiceDeploymentContributor contributor = getServiceContributor( role, service.getName(), service.getVersion() );
if( contributor != null ) {
List<ServiceDeploymentContributor> list = defaults.get( role );
if( list == null ) {
@@ -239,12 +246,12 @@
}
}
}
-
+
private static void injectServices(Object contributor) {
if (gatewayServices != null) {
Statement stmt = null;
for(String serviceName : gatewayServices.getServiceNames()) {
-
+
try {
// TODO: this is just a temporary injection solution
// TODO: test for the existence of the setter before attempting it
@@ -280,12 +287,12 @@
}
}
for( Service service : topology.getServices() ) {
- ServiceDeploymentContributor contributor = getServiceContributor( service.getRole(), null );
+ ServiceDeploymentContributor contributor = getServiceContributor( service.getRole(), service.getName(), service.getVersion() );
if( contributor != null ) {
try {
contributor.contributeService( context, service );
if (gatewayServices != null) {
- ServiceRegistry sr = (ServiceRegistry) gatewayServices.getService(GatewayServices.SERVICE_REGISTRY_SERVICE);
+ ServiceRegistry sr = gatewayServices.getService(GatewayServices.SERVICE_REGISTRY_SERVICE);
if (sr != null) {
String regCode = sr.getRegistrationCode(topology.getName());
sr.registerService(regCode, topology.getName(), service.getRole(), service.getUrls() );
@@ -313,14 +320,22 @@
return contributor;
}
- public static ServiceDeploymentContributor getServiceContributor( String role, String name ) {
+ public static ServiceDeploymentContributor getServiceContributor( String role, String name, Version version ) {
ServiceDeploymentContributor contributor = null;
- Map<String,ServiceDeploymentContributor> nameMap = SERVICE_CONTRIBUTOR_MAP.get( role );
- if( nameMap != null ) {
- if( name == null ) {
- contributor = nameMap.values().iterator().next();
- } else if ( !nameMap.isEmpty() ) {
- contributor = nameMap.get( name );
+ Map<String,Map<Version, ServiceDeploymentContributor>> nameMap = SERVICE_CONTRIBUTOR_MAP.get( role );
+ if( nameMap != null && !nameMap.isEmpty()) {
+ Map<Version, ServiceDeploymentContributor> versionMap = null;
+ if ( name == null ) {
+ versionMap = nameMap.values().iterator().next();
+ } else {
+ versionMap = nameMap.get( name );
+ }
+ if ( versionMap != null && !versionMap.isEmpty()) {
+ if( version == null ) {
+ contributor = ((TreeMap<Version, ServiceDeploymentContributor>) versionMap).firstEntry().getValue();
+ } else {
+ contributor = versionMap.get( version );
+ }
}
}
return contributor;
@@ -391,38 +406,45 @@
}
}
return null;
- }
-
- private static void loadServiceContributors() {
- Set<ServiceDeploymentContributor> set = new HashSet<ServiceDeploymentContributor>();
- Map<String,Map<String,ServiceDeploymentContributor>> roleMap
- = new HashMap<String,Map<String,ServiceDeploymentContributor>>();
-
- ServiceLoader<ServiceDeploymentContributor> loader = ServiceLoader.load( ServiceDeploymentContributor.class );
- Iterator<ServiceDeploymentContributor> contributors = loader.iterator();
- while( contributors.hasNext() ) {
- ServiceDeploymentContributor contributor = contributors.next();
- if( contributor.getName() == null ) {
- log.ignoringServiceContributorWithMissingName( contributor.getClass().getName() );
- continue;
- }
- if( contributor.getRole() == null ) {
- log.ignoringServiceContributorWithMissingRole( contributor.getClass().getName() );
- continue;
- }
- set.add( contributor );
- Map nameMap = roleMap.get( contributor.getRole() );
- if( nameMap == null ) {
- nameMap = new HashMap<String,ServiceDeploymentContributor>();
- roleMap.put( contributor.getRole(), nameMap );
- }
- nameMap.put( contributor.getName(), contributor );
- }
- //SERVICE_CONTRIBUTORS = set;
- SERVICE_CONTRIBUTOR_MAP = roleMap;
}
- private static void loadProviderContributors() {
+ private static void loadServiceContributors() {
+ SERVICE_CONTRIBUTOR_MAP = new HashMap<String, Map<String, Map<Version, ServiceDeploymentContributor>>>();
+ ServiceLoader<ServiceDeploymentContributor> loader = ServiceLoader.load( ServiceDeploymentContributor.class );
+ Iterator<ServiceDeploymentContributor> contributors = loader.iterator();
+ addServiceDeploymentContributors(contributors);
+ }
+
+ private static void addServiceDeploymentContributors(Iterator<ServiceDeploymentContributor> contributors) {
+ while( contributors.hasNext() ) {
+ ServiceDeploymentContributor contributor = contributors.next();
+ if( contributor.getName() == null ) {
+ log.ignoringServiceContributorWithMissingName( contributor.getClass().getName() );
+ continue;
+ }
+ if( contributor.getRole() == null ) {
+ log.ignoringServiceContributorWithMissingRole( contributor.getClass().getName() );
+ continue;
+ }
+ if( contributor.getVersion() == null ) {
+ log.ignoringServiceContributorWithMissingVersion(contributor.getClass().getName());
+ continue;
+ }
+ Map<String,Map<Version, ServiceDeploymentContributor>> nameMap = SERVICE_CONTRIBUTOR_MAP.get( contributor.getRole() );
+ if( nameMap == null ) {
+ nameMap = new HashMap<String,Map<Version, ServiceDeploymentContributor>>();
+ SERVICE_CONTRIBUTOR_MAP.put( contributor.getRole(), nameMap );
+ }
+ Map<Version, ServiceDeploymentContributor> versionMap = nameMap.get(contributor.getName());
+ if (versionMap == null) {
+ versionMap = new TreeMap<Version, ServiceDeploymentContributor>();
+ nameMap.put(contributor.getName(), versionMap);
+ }
+ versionMap.put( contributor.getVersion(), contributor );
+ }
+ }
+
+ private static void loadProviderContributors() {
Set<ProviderDeploymentContributor> set = new HashSet<ProviderDeploymentContributor>();
Map<String,Map<String,ProviderDeploymentContributor>> roleMap
= new HashMap<String,Map<String,ProviderDeploymentContributor>>();
@@ -464,19 +486,4 @@
}
return contributor;
}
-
- static ServiceDeploymentContributor getServiceContributor(
- Map<String,List<ServiceDeploymentContributor>> services, String role, String name ) {
- ServiceDeploymentContributor contributor = null;
- if( name == null ) {
- List<ServiceDeploymentContributor> list = services.get( role );
- if( !list.isEmpty() ) {
- contributor = list.get( 0 );
- }
- } else {
- contributor = getServiceContributor( role, name );
- }
- return contributor;
- }
-
}
diff --git a/gateway-server/src/main/java/org/apache/hadoop/gateway/deploy/impl/DispatchDeploymentContributor.java b/gateway-server/src/main/java/org/apache/hadoop/gateway/deploy/impl/DispatchDeploymentContributor.java
index ab3448b..3e1c336 100644
--- a/gateway-server/src/main/java/org/apache/hadoop/gateway/deploy/impl/DispatchDeploymentContributor.java
+++ b/gateway-server/src/main/java/org/apache/hadoop/gateway/deploy/impl/DispatchDeploymentContributor.java
@@ -22,7 +22,8 @@
import org.apache.hadoop.gateway.descriptor.FilterDescriptor;
import org.apache.hadoop.gateway.descriptor.FilterParamDescriptor;
import org.apache.hadoop.gateway.descriptor.ResourceDescriptor;
-import org.apache.hadoop.gateway.dispatch.HttpClientDispatch;
+import org.apache.hadoop.gateway.dispatch.GatewayDispatchFilter;
+import org.apache.hadoop.gateway.dispatch.DefaultDispatch;
import org.apache.hadoop.gateway.topology.Provider;
import org.apache.hadoop.gateway.topology.Service;
@@ -32,7 +33,9 @@
public class DispatchDeploymentContributor extends ProviderDeploymentContributorBase {
private static final String REPLAY_BUFFER_SIZE_PARAM = "replayBufferSize";
-
+
+ private static final String DISPATCH_IMPL_PARAM = "dispatch-impl";
+
// Default global replay buffer size in KB
public static final String DEFAULT_REPLAY_BUFFER_SIZE = "8";
@@ -48,8 +51,8 @@
@Override
public void contributeFilter( DeploymentContext context, Provider provider, Service service, ResourceDescriptor resource, List<FilterParamDescriptor> params ) {
- FilterDescriptor filter = resource.addFilter().name( getName() ).role( getRole() ).impl( HttpClientDispatch.class );
-
+ FilterDescriptor filter = resource.addFilter().name( getName() ).role( getRole() ).impl( GatewayDispatchFilter.class );
+ filter.param().name(DISPATCH_IMPL_PARAM).value(DefaultDispatch.class.getName());
FilterParamDescriptor filterParam = filter.param().name( REPLAY_BUFFER_SIZE_PARAM ).value( DEFAULT_REPLAY_BUFFER_SIZE );
for ( Map.Entry<String,String> serviceParam : service.getParams().entrySet() ) {
if ( REPLAY_BUFFER_SIZE_PARAM.equals( serviceParam.getKey() ) ) {
@@ -63,7 +66,6 @@
}
}
}
-
if( context.getGatewayConfig().isHadoopKerberosSecured() ) {
filter.param().name("kerberos").value("true");
}
diff --git a/gateway-server/src/main/java/org/apache/hadoop/gateway/deploy/impl/ServiceDefinitionDeploymentContributor.java b/gateway-server/src/main/java/org/apache/hadoop/gateway/deploy/impl/ServiceDefinitionDeploymentContributor.java
new file mode 100644
index 0000000..1daa72b
--- /dev/null
+++ b/gateway-server/src/main/java/org/apache/hadoop/gateway/deploy/impl/ServiceDefinitionDeploymentContributor.java
@@ -0,0 +1,217 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.gateway.deploy.impl;
+
+import org.apache.hadoop.gateway.deploy.DeploymentContext;
+import org.apache.hadoop.gateway.deploy.ServiceDeploymentContributorBase;
+import org.apache.hadoop.gateway.descriptor.FilterDescriptor;
+import org.apache.hadoop.gateway.descriptor.FilterParamDescriptor;
+import org.apache.hadoop.gateway.descriptor.ResourceDescriptor;
+import org.apache.hadoop.gateway.dispatch.GatewayDispatchFilter;
+import org.apache.hadoop.gateway.filter.rewrite.api.UrlRewriteRulesDescriptor;
+import org.apache.hadoop.gateway.service.definition.*;
+import org.apache.hadoop.gateway.topology.Provider;
+import org.apache.hadoop.gateway.topology.Service;
+import org.apache.hadoop.gateway.topology.Version;
+
+import java.net.URISyntaxException;
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+
+public class ServiceDefinitionDeploymentContributor extends ServiceDeploymentContributorBase {
+
+ private static final String DISPATCH_ROLE = "dispatch";
+
+ private static final String DISPATCH_IMPL_PARAM = "dispatch-impl";
+
+ private static final String REPLAY_BUFFER_SIZE_PARAM = "replayBufferSize";
+
+ public static final String DEFAULT_REPLAY_BUFFER_SIZE = "8";
+
+ private ServiceDefinition serviceDefinition;
+
+ private UrlRewriteRulesDescriptor serviceRules;
+
+ public ServiceDefinitionDeploymentContributor(ServiceDefinition serviceDefinition, UrlRewriteRulesDescriptor serviceRules) {
+ this.serviceDefinition = serviceDefinition;
+ this.serviceRules = serviceRules;
+ }
+
+ @Override
+ public String getRole() {
+ return serviceDefinition.getRole();
+ }
+
+ @Override
+ public String getName() {
+ return serviceDefinition.getName();
+ }
+
+ @Override
+ public Version getVersion() {
+ return new Version(serviceDefinition.getVersion());
+ }
+
+ @Override
+ public void contributeService(DeploymentContext context, Service service) throws Exception {
+ contributeRewriteRules(context, service);
+ contributeResources(context, service);
+ }
+
+ private void contributeRewriteRules(DeploymentContext context, Service service) {
+ if ( serviceRules != null ) {
+ UrlRewriteRulesDescriptor clusterRules = context.getDescriptor("rewrite");
+ clusterRules.addRules(serviceRules);
+ }
+ }
+
+ private void contributeResources(DeploymentContext context, Service service) {
+ Map<String, String> filterParams = new HashMap<String, String>();
+ List<Route> bindings = serviceDefinition.getRoutes();
+ for ( Route binding : bindings ) {
+ List<Rewrite> filters = binding.getRewrites();
+ if ( filters != null && !filters.isEmpty() ) {
+ filterParams.clear();
+ for ( Rewrite filter : filters ) {
+ filterParams.put(filter.getTo(), filter.getApply());
+ }
+ }
+ try {
+ contributeResource(context, service, binding, filterParams);
+ } catch ( URISyntaxException e ) {
+ e.printStackTrace();
+ }
+ }
+
+ }
+
+ private void contributeResource(DeploymentContext context, Service service, Route binding, Map<String, String> filterParams) throws URISyntaxException {
+ List<FilterParamDescriptor> params = new ArrayList<FilterParamDescriptor>();
+ ResourceDescriptor resource = context.getGatewayDescriptor().addResource();
+ resource.role(service.getRole());
+ resource.pattern(binding.getPath());
+ List<Policy> policyBindings = binding.getPolicies();
+ if ( policyBindings == null ) {
+ policyBindings = serviceDefinition.getPolicies();
+ }
+ if ( policyBindings == null ) {
+ //add default set
+ addDefaultPolicies(context, service, filterParams, params, resource);
+ } else {
+ addPolicies(context, service, filterParams, params, resource, policyBindings);
+ }
+ addDispatchFilter(context, service, resource, binding);
+ }
+
+ private void addPolicies(DeploymentContext context, Service service, Map<String, String> filterParams, List<FilterParamDescriptor> params, ResourceDescriptor resource, List<Policy> policyBindings) throws URISyntaxException {
+ for ( Policy policyBinding : policyBindings ) {
+ String role = policyBinding.getRole();
+ if ( role == null ) {
+ throw new IllegalArgumentException("Policy defined has no role for service " + service.getName());
+ }
+ role = role.trim().toLowerCase();
+ if ( role.equals("rewrite") ) {
+ addRewriteFilter(context, service, filterParams, params, resource);
+ } else if ( topologyContainsProviderType(context, role) ) {
+ context.contributeFilter(service, resource, role, policyBinding.getName(), null);
+ }
+ }
+ }
+
+ private void addDefaultPolicies(DeploymentContext context, Service service, Map<String, String> filterParams, List<FilterParamDescriptor> params, ResourceDescriptor resource) throws URISyntaxException {
+ addWebAppSecFilters(context, service, resource);
+ addAuthenticationFilter(context, service, resource);
+ addRewriteFilter(context, service, filterParams, params, resource);
+ addIdentityAssertionFilter(context, service, resource);
+ addAuthorizationFilter(context, service, resource);
+ }
+
+ private void addRewriteFilter(DeploymentContext context, Service service, Map<String, String> filterParams, List<FilterParamDescriptor> params, ResourceDescriptor resource) throws URISyntaxException {
+ if ( !filterParams.isEmpty() ) {
+ for ( Map.Entry<String, String> filterParam : filterParams.entrySet() ) {
+ params.add(resource.createFilterParam().name(filterParam.getKey()).value(filterParam.getValue()));
+ }
+ }
+ addRewriteFilter(context, service, resource, params);
+ }
+
+ private void addDispatchFilter(DeploymentContext context, Service service, ResourceDescriptor resource, Route binding) {
+ CustomDispatch customDispatch = binding.getDispatch();
+ if ( customDispatch == null ) {
+ customDispatch = serviceDefinition.getDispatch();
+ }
+ if ( customDispatch != null ) {
+ boolean isHaEnabled = isHaEnabled(context);
+ String haContributorName = customDispatch.getHaContributorName();
+ String haClassName = customDispatch.getHaClassName();
+ if ( isHaEnabled && (haContributorName != null || haClassName != null)) {
+ if (haContributorName != null) {
+ addDispatchFilter(context, service, resource, DISPATCH_ROLE, haContributorName);
+ } else {
+ addDispatchFilterForClass(context, service, resource, haClassName);
+ }
+ } else {
+ String contributorName = customDispatch.getContributorName();
+ if ( contributorName != null ) {
+ addDispatchFilter(context, service, resource, DISPATCH_ROLE, contributorName);
+ } else {
+ String className = customDispatch.getClassName();
+ if ( className != null ) {
+ addDispatchFilterForClass(context, service, resource, className);
+ }
+ }
+ }
+ } else {
+ addDispatchFilter(context, service, resource, DISPATCH_ROLE, "http-client");
+ }
+ }
+
+ private void addDispatchFilterForClass(DeploymentContext context, Service service, ResourceDescriptor resource, String className) {
+ FilterDescriptor filter = resource.addFilter().name(getName()).role(DISPATCH_ROLE).impl(GatewayDispatchFilter.class);
+ filter.param().name(DISPATCH_IMPL_PARAM).value(className);
+ FilterParamDescriptor filterParam = filter.param().name(REPLAY_BUFFER_SIZE_PARAM).value(DEFAULT_REPLAY_BUFFER_SIZE);
+ for ( Map.Entry<String, String> serviceParam : service.getParams().entrySet() ) {
+ if ( REPLAY_BUFFER_SIZE_PARAM.equals(serviceParam.getKey()) ) {
+ filterParam.value(serviceParam.getValue());
+ }
+ }
+ if ( context.getGatewayConfig().isHadoopKerberosSecured() ) {
+ filter.param().name("kerberos").value("true");
+ } else {
+ //TODO: [sumit] Get rid of special case. Add config/param capabilities to service definitions?
+ //special case for hive
+ filter.param().name("basicAuthPreemptive").value("true");
+ }
+ }
+
+ private boolean isHaEnabled(DeploymentContext context) {
+ Provider provider = getProviderByRole(context, "ha");
+ if ( provider != null && provider.isEnabled() ) {
+ Map<String, String> params = provider.getParams();
+ if ( params != null ) {
+ if ( params.containsKey(getRole()) ) {
+ return true;
+ }
+ }
+ }
+ return false;
+ }
+
+}
diff --git a/gateway-server/src/main/java/org/apache/hadoop/gateway/dispatch/UrlConnectionDispatch.java b/gateway-server/src/main/java/org/apache/hadoop/gateway/dispatch/UrlConnectionDispatch.java
index 5b02221..c0347cf 100644
--- a/gateway-server/src/main/java/org/apache/hadoop/gateway/dispatch/UrlConnectionDispatch.java
+++ b/gateway-server/src/main/java/org/apache/hadoop/gateway/dispatch/UrlConnectionDispatch.java
@@ -26,6 +26,7 @@
import org.apache.hadoop.gateway.audit.api.Auditor;
import org.apache.hadoop.gateway.audit.api.ResourceType;
import org.apache.hadoop.gateway.audit.log4j.audit.AuditConstants;
+import org.apache.hadoop.gateway.filter.AbstractGatewayFilter;
import org.apache.hadoop.gateway.i18n.messages.MessagesFactory;
import org.apache.hadoop.gateway.i18n.resources.ResourcesFactory;
import org.apache.hadoop.gateway.util.urltemplate.Parser;
@@ -36,6 +37,8 @@
import org.apache.hadoop.security.authentication.client.AuthenticationException;
import org.apache.hadoop.security.authentication.client.KerberosAuthenticator;
+import javax.servlet.FilterChain;
+import javax.servlet.ServletException;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
@@ -53,7 +56,7 @@
/**
*
*/
-public class UrlConnectionDispatch extends AbstractGatewayDispatch {
+public class UrlConnectionDispatch extends AbstractGatewayFilter {
private static final GatewayMessages LOG = MessagesFactory.get( GatewayMessages.class );
private static final GatewayResources RES = ResourcesFactory.get( GatewayResources.class );
@@ -61,6 +64,30 @@
AuditConstants.KNOX_SERVICE_NAME, AuditConstants.KNOX_COMPONENT_NAME );
@Override
+ protected void doFilter(HttpServletRequest request, HttpServletResponse response, FilterChain chain) throws IOException, ServletException {
+ String method = request.getMethod().toUpperCase();
+ if (method.equals("GET")) {
+ try {
+ doGet(getDispatchUrl(request), request, response);
+ } catch ( URISyntaxException e ) {
+ throw new ServletException(e);
+ }
+ } else {
+ response.sendError(HttpServletResponse.SC_METHOD_NOT_ALLOWED);
+ }
+ }
+
+ protected static URI getDispatchUrl(HttpServletRequest request) {
+ StringBuffer str = request.getRequestURL();
+ String query = request.getQueryString();
+ if ( query != null ) {
+ str.append('?');
+ str.append(query);
+ }
+ URI url = URI.create(str.toString());
+ return url;
+ }
+
public void doGet( URI url, HttpServletRequest request, HttpServletResponse response ) throws IOException, URISyntaxException {
String sourcePathInfo = request.getPathInfo();
String sourcePattern = getConfig().getInitParameter( "pattern" );
diff --git a/gateway-server/src/main/java/org/apache/hadoop/gateway/services/registry/impl/DefaultServiceRegistryService.java b/gateway-server/src/main/java/org/apache/hadoop/gateway/services/registry/impl/DefaultServiceRegistryService.java
index 12d22ad..b446b3a 100644
--- a/gateway-server/src/main/java/org/apache/hadoop/gateway/services/registry/impl/DefaultServiceRegistryService.java
+++ b/gateway-server/src/main/java/org/apache/hadoop/gateway/services/registry/impl/DefaultServiceRegistryService.java
@@ -185,7 +185,10 @@
if (registryFile.exists()) {
try {
String json = FileUtils.readFileToString(registryFile);
- registry = (Registry) getMapFromJsonString(json);
+ Registry reg = (Registry) getMapFromJsonString(json);
+ if (reg != null) {
+ registry = reg;
+ }
} catch (Exception e) {
throw new ServiceLifecycleException("Unable to load the persisted registry.", e);
}
diff --git a/gateway-server/src/main/java/org/apache/hadoop/gateway/services/registry/impl/RegEntry.java b/gateway-server/src/main/java/org/apache/hadoop/gateway/services/registry/impl/RegEntry.java
index 6d5c269..832821c 100644
--- a/gateway-server/src/main/java/org/apache/hadoop/gateway/services/registry/impl/RegEntry.java
+++ b/gateway-server/src/main/java/org/apache/hadoop/gateway/services/registry/impl/RegEntry.java
@@ -17,12 +17,14 @@
*/
package org.apache.hadoop.gateway.services.registry.impl;
+import java.util.ArrayList;
import java.util.List;
public class RegEntry {
public String clusterName;
public String serviceName;
public List<String> urls;
+ public String url;
public RegEntry() {
}
@@ -44,11 +46,14 @@
}
public List<String> getUrls() {
+ if (urls == null && url != null && !"null".equals(url)) {
+ urls = new ArrayList<String>();
+ urls.add(url);
+ }
return urls;
}
public void setUrls( List<String> urls) {
this.urls = urls;
}
-
}
\ No newline at end of file
diff --git a/gateway-server/src/main/java/org/apache/hadoop/gateway/topology/builder/property/interpreter/ServicePropertyInterpreter.java b/gateway-server/src/main/java/org/apache/hadoop/gateway/topology/builder/property/interpreter/ServicePropertyInterpreter.java
index 985fd6b..d297178 100644
--- a/gateway-server/src/main/java/org/apache/hadoop/gateway/topology/builder/property/interpreter/ServicePropertyInterpreter.java
+++ b/gateway-server/src/main/java/org/apache/hadoop/gateway/topology/builder/property/interpreter/ServicePropertyInterpreter.java
@@ -59,7 +59,8 @@
}
nextToken = nextToken.substring(dotPosition + 1);
- Service service = topology.getService(serviceRole, serviceName);
+ //TODO: sumit - version needs to be passed parsed and passed in if we want to continue to support the 'ambari' format
+ Service service = topology.getService(serviceRole, serviceName, null);
if (service == null) {
service = new Service();
service.setName(serviceName);
diff --git a/gateway-server/src/main/java/org/apache/hadoop/gateway/topology/xml/KnoxFormatXmlTopologyRules.java b/gateway-server/src/main/java/org/apache/hadoop/gateway/topology/xml/KnoxFormatXmlTopologyRules.java
index 745c661..b32f0c9 100644
--- a/gateway-server/src/main/java/org/apache/hadoop/gateway/topology/xml/KnoxFormatXmlTopologyRules.java
+++ b/gateway-server/src/main/java/org/apache/hadoop/gateway/topology/xml/KnoxFormatXmlTopologyRules.java
@@ -22,6 +22,7 @@
import org.apache.hadoop.gateway.topology.Param;
import org.apache.hadoop.gateway.topology.Provider;
import org.apache.hadoop.gateway.topology.Service;
+import org.apache.hadoop.gateway.topology.Version;
import org.apache.hadoop.gateway.topology.builder.BeanPropertyTopologyBuilder;
import org.xml.sax.Attributes;
@@ -39,6 +40,7 @@
private static final String VALUE_TAG = "value";
private static final Rule paramRule = new ParamRule();
+
@Override
protected void configure() {
forPattern( ROOT_TAG ).createObject().ofType( BeanPropertyTopologyBuilder.class );
@@ -48,6 +50,7 @@
forPattern( ROOT_TAG + "/" + SERVICE_TAG ).createObject().ofType( Service.class ).then().setNext( "addService" );
forPattern( ROOT_TAG + "/" + SERVICE_TAG + "/" + ROLE_TAG ).setBeanProperty();
forPattern( ROOT_TAG + "/" + SERVICE_TAG + "/" + NAME_TAG ).setBeanProperty();
+ forPattern( ROOT_TAG + "/" + SERVICE_TAG + "/" + VERSION_TAG ).createObject().ofType(Version.class).then().setBeanProperty().then().setNext("setVersion");
forPattern( ROOT_TAG + "/" + SERVICE_TAG + "/" + URL_TAG ).callMethod( "addUrl" ).usingElementBodyAsArgument();
forPattern( ROOT_TAG + "/" + SERVICE_TAG + "/" + PARAM_TAG ).createObject().ofType( Param.class ).then().addRule( paramRule ).then().setNext( "addParam" );
forPattern( ROOT_TAG + "/" + SERVICE_TAG + "/" + PARAM_TAG + "/" + NAME_TAG ).setBeanProperty();
diff --git a/gateway-server/src/main/java/org/apache/hadoop/gateway/util/ServiceDefinitionsLoader.java b/gateway-server/src/main/java/org/apache/hadoop/gateway/util/ServiceDefinitionsLoader.java
new file mode 100644
index 0000000..2ff07fd
--- /dev/null
+++ b/gateway-server/src/main/java/org/apache/hadoop/gateway/util/ServiceDefinitionsLoader.java
@@ -0,0 +1,108 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.gateway.util;
+
+import org.apache.commons.io.FileUtils;
+import org.apache.commons.io.filefilter.IOFileFilter;
+import org.apache.commons.io.filefilter.TrueFileFilter;
+import org.apache.hadoop.gateway.GatewayMessages;
+import org.apache.hadoop.gateway.deploy.ServiceDeploymentContributor;
+import org.apache.hadoop.gateway.deploy.impl.ServiceDefinitionDeploymentContributor;
+import org.apache.hadoop.gateway.filter.rewrite.api.UrlRewriteRulesDescriptor;
+import org.apache.hadoop.gateway.filter.rewrite.api.UrlRewriteRulesDescriptorFactory;
+import org.apache.hadoop.gateway.i18n.messages.MessagesFactory;
+import org.apache.hadoop.gateway.service.definition.ServiceDefinition;
+
+import javax.xml.bind.JAXBContext;
+import javax.xml.bind.JAXBException;
+import javax.xml.bind.Unmarshaller;
+import java.io.File;
+import java.io.FileInputStream;
+import java.io.FileNotFoundException;
+import java.io.IOException;
+import java.io.InputStream;
+import java.io.InputStreamReader;
+import java.io.Reader;
+import java.util.Collection;
+import java.util.HashSet;
+import java.util.Set;
+
+public class ServiceDefinitionsLoader {
+ private static GatewayMessages log = MessagesFactory.get(GatewayMessages.class);
+
+ private static String SERVICE_FILE_NAME = "service";
+
+ private static String REWRITE_FILE = "rewrite.xml";
+
+ public static Set<ServiceDeploymentContributor> loadServiceDefinitions(File servicesDir) {
+ Set<ServiceDeploymentContributor> contributors = new HashSet<ServiceDeploymentContributor>();
+ if ( servicesDir.exists() && servicesDir.isDirectory() ) {
+ try {
+ JAXBContext context = JAXBContext.newInstance(ServiceDefinition.class);
+ Unmarshaller unmarshaller = context.createUnmarshaller();
+ Collection<File> files = FileUtils.listFiles(servicesDir, new IOFileFilter() {
+ @Override
+ public boolean accept(File file) {
+ return file.getName().contains(SERVICE_FILE_NAME);
+ }
+ @Override
+ public boolean accept(File dir, String name) {
+ return name.contains(SERVICE_FILE_NAME);
+ }
+ }, TrueFileFilter.INSTANCE);
+ for ( File file : files ) {
+ try {
+ FileInputStream inputStream = new FileInputStream(file);
+ ServiceDefinition definition = (ServiceDefinition) unmarshaller.unmarshal(inputStream);
+ //look for rewrite rules as a sibling (for now)
+ UrlRewriteRulesDescriptor rewriteRulesDescriptor = loadRewriteRules(file.getParentFile());
+ contributors.add(new ServiceDefinitionDeploymentContributor(definition, rewriteRulesDescriptor));
+ log.addedServiceDefinition(definition.getName(), definition.getRole(), definition.getVersion());
+ } catch ( FileNotFoundException e ) {
+ log.failedToFindServiceDefinitionFile(file.getAbsolutePath(), e);
+ }
+ }
+ } catch ( JAXBException e ) {
+ log.failedToLoadServiceDefinition(SERVICE_FILE_NAME, e);
+ }
+ }
+ return contributors;
+ }
+
+ private static UrlRewriteRulesDescriptor loadRewriteRules(File servicesDir) {
+ File rewriteFile = new File(servicesDir, REWRITE_FILE);
+ if ( rewriteFile.exists() ) {
+ InputStream stream = null;
+ try {
+ stream = new FileInputStream(rewriteFile);
+ Reader reader = new InputStreamReader(stream);
+ UrlRewriteRulesDescriptor rules = UrlRewriteRulesDescriptorFactory.load(
+ "xml", reader);
+ reader.close();
+ stream.close();
+ return rules;
+ } catch ( FileNotFoundException e ) {
+ log.failedToFindRewriteFile(rewriteFile.getAbsolutePath(), e);
+ } catch ( IOException e ) {
+ log.failedToLoadRewriteFile(rewriteFile.getAbsolutePath(), e);
+ }
+ }
+ log.noRewriteFileFound(servicesDir.getAbsolutePath());
+ return null;
+ }
+}
diff --git a/gateway-server/src/test/java/org/apache/hadoop/gateway/AuditLoggingTest.java b/gateway-server/src/test/java/org/apache/hadoop/gateway/AuditLoggingTest.java
index b15c56b..7618253 100644
--- a/gateway-server/src/test/java/org/apache/hadoop/gateway/AuditLoggingTest.java
+++ b/gateway-server/src/test/java/org/apache/hadoop/gateway/AuditLoggingTest.java
@@ -46,9 +46,10 @@
import org.apache.hadoop.gateway.audit.log4j.audit.AuditConstants;
import org.apache.hadoop.gateway.audit.log4j.audit.Log4jAuditService;
import org.apache.hadoop.gateway.audit.log4j.correlation.Log4jCorrelationService;
-import org.apache.hadoop.gateway.dispatch.HttpClientDispatch;
+import org.apache.hadoop.gateway.dispatch.DefaultDispatch;
import org.apache.hadoop.gateway.i18n.resources.ResourcesFactory;
import org.apache.hadoop.test.log.CollectAppender;
+import org.apache.http.impl.client.DefaultHttpClient;
import org.apache.log4j.spi.LoggingEvent;
import org.easymock.EasyMock;
import org.junit.After;
@@ -170,7 +171,8 @@
HttpServletResponse outboundResponse = EasyMock.createNiceMock( HttpServletResponse.class );
EasyMock.replay( outboundResponse );
- HttpClientDispatch dispatch = new HttpClientDispatch();
+ DefaultDispatch dispatch = new DefaultDispatch();
+ dispatch.setHttpClient(new DefaultHttpClient());
try {
dispatch.doGet( new URI( uri ), inboundRequest, outboundResponse );
fail( "Expected exception while accessing to unreachable host" );
diff --git a/gateway-server/src/test/java/org/apache/hadoop/gateway/GatewayGlobalConfigTest.java b/gateway-server/src/test/java/org/apache/hadoop/gateway/GatewayGlobalConfigTest.java
index d9957bd..0e38405 100644
--- a/gateway-server/src/test/java/org/apache/hadoop/gateway/GatewayGlobalConfigTest.java
+++ b/gateway-server/src/test/java/org/apache/hadoop/gateway/GatewayGlobalConfigTest.java
@@ -19,15 +19,14 @@
import org.apache.hadoop.gateway.config.GatewayConfig;
import org.apache.hadoop.gateway.config.impl.GatewayConfigImpl;
+import org.hamcrest.Matchers;
import org.junit.Test;
import java.io.File;
import java.net.URL;
import static org.hamcrest.CoreMatchers.is;
-import static org.junit.Assert.assertNull;
-import static org.junit.Assert.assertThat;
-import static org.junit.Assert.assertTrue;
+import static org.junit.Assert.*;
public class GatewayGlobalConfigTest {
@@ -169,5 +168,15 @@
.getGatewayDataDir()));
}
+ @Test
+ public void testStacksServicesDir() {
+ System.clearProperty(GatewayConfigImpl.GATEWAY_HOME_VAR);
+ GatewayConfig config = new GatewayConfigImpl();
+ assertThat(config.getGatewayServicesDir(), Matchers.endsWith("data/services"));
+ String homeDirName = getHomeDirName("conf-demo/conf/gateway-site.xml");
+ System.setProperty(GatewayConfigImpl.GATEWAY_HOME_VAR, homeDirName);
+ config = new GatewayConfigImpl();
+ assertEquals("target/test", config.getGatewayServicesDir());
+ }
}
\ No newline at end of file
diff --git a/gateway-service-hive/src/test/java/org/apache/hadoop/gateway/hive/HiveDispatchDeploymentContributorTest.java b/gateway-server/src/test/java/org/apache/hadoop/gateway/deploy/impl/ServiceDefinitionDeploymentContributorTest.java
similarity index 81%
rename from gateway-service-hive/src/test/java/org/apache/hadoop/gateway/hive/HiveDispatchDeploymentContributorTest.java
rename to gateway-server/src/test/java/org/apache/hadoop/gateway/deploy/impl/ServiceDefinitionDeploymentContributorTest.java
index d748ea4..c18f977 100644
--- a/gateway-service-hive/src/test/java/org/apache/hadoop/gateway/hive/HiveDispatchDeploymentContributorTest.java
+++ b/gateway-server/src/test/java/org/apache/hadoop/gateway/deploy/impl/ServiceDefinitionDeploymentContributorTest.java
@@ -15,7 +15,7 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
-package org.apache.hadoop.gateway.hive;
+package org.apache.hadoop.gateway.deploy.impl;
import org.apache.hadoop.gateway.deploy.ProviderDeploymentContributor;
import org.junit.Test;
@@ -26,7 +26,7 @@
import static org.hamcrest.MatcherAssert.assertThat;
import static org.junit.Assert.fail;
-public class HiveDispatchDeploymentContributorTest {
+public class ServiceDefinitionDeploymentContributorTest {
@Test
public void testServiceLoader() throws Exception {
@@ -35,10 +35,9 @@
assertThat( "Service iterator empty.", iterator.hasNext() );
while( iterator.hasNext() ) {
Object object = iterator.next();
- if( object instanceof HiveDispatchDeploymentContributor ) {
- return;
+ if( object instanceof ServiceDefinitionDeploymentContributor ) {
+ fail("The ServiceDefinition deployment contributor is not meant to be loaded using the service loader mechanism");
}
}
- fail( "Failed to find " + HiveDispatchDeploymentContributor.class.getName() + " via service loader." );
}
}
diff --git a/gateway-server/src/test/java/org/apache/hadoop/gateway/topology/xml/TopologyRulesModuleTest.java b/gateway-server/src/test/java/org/apache/hadoop/gateway/topology/xml/TopologyRulesModuleTest.java
index 06b5b2a..5981479 100644
--- a/gateway-server/src/test/java/org/apache/hadoop/gateway/topology/xml/TopologyRulesModuleTest.java
+++ b/gateway-server/src/test/java/org/apache/hadoop/gateway/topology/xml/TopologyRulesModuleTest.java
@@ -22,6 +22,7 @@
import org.apache.hadoop.gateway.topology.Provider;
import org.apache.hadoop.gateway.topology.Service;
import org.apache.hadoop.gateway.topology.Topology;
+import org.apache.hadoop.gateway.topology.Version;
import org.apache.hadoop.gateway.topology.builder.TopologyBuilder;
import org.junit.After;
import org.junit.Before;
@@ -39,6 +40,7 @@
import static org.hamcrest.core.IsCollectionContaining.hasItem;
import static org.hamcrest.core.IsNull.notNullValue;
import static org.hamcrest.core.IsNull.nullValue;
+import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertThat;
public class TopologyRulesModuleTest {
@@ -68,11 +70,12 @@
assertThat( topology.getName(), is( "topology" ) );
assertThat( topology.getTimestamp(), is( file.lastModified() ) );
- assertThat( topology.getServices().size(), is( 1 ) );
+ assertThat( topology.getServices().size(), is( 3 ) );
Service comp = topology.getServices().iterator().next();
assertThat( comp, notNullValue() );
- assertThat( comp.getRole(), is( "WEBHDFS" ) );
+ assertThat( comp.getRole(), is("WEBHDFS") );
+ assertThat( comp.getVersion().toString(), is( "2.4.0" ) );
assertThat( comp.getUrls().size(), is( 2 ) );
assertThat( comp.getUrls(), hasItem( "http://host1:80/webhdfs" ) );
assertThat( comp.getUrls(), hasItem( "http://host2:80/webhdfs" ) );
@@ -82,6 +85,21 @@
assertThat( provider.isEnabled(), is(true) );
assertThat( provider.getRole(), is( "authentication" ) );
assertThat( provider.getParams().size(), is(5));
+
+ Service service = topology.getService("WEBHDFS", "webhdfs", new Version(2,4,0));
+ assertEquals(comp, service);
+
+ comp = topology.getService("RESOURCEMANAGER", null, new Version("2.5.0"));
+ assertThat( comp, notNullValue() );
+ assertThat( comp.getRole(), is("RESOURCEMANAGER") );
+ assertThat( comp.getVersion().toString(), is("2.5.0") );
+ assertThat(comp.getUrl(), is("http://host1:8088/ws") );
+
+ comp = topology.getService("HIVE", "hive", null);
+ assertThat( comp, notNullValue() );
+ assertThat( comp.getRole(), is("HIVE") );
+ assertThat( comp.getName(), is("hive") );
+ assertThat( comp.getUrl(), is("http://host2:10001/cliservice" ) );
}
@Test
@@ -116,6 +134,7 @@
assertThat( service.getName(), is( "test-service-name" ) );
assertThat( service.getParams(), hasEntry( is( "test-service-param-name-1" ), is( "test-service-param-value-1" ) ) );
assertThat( service.getParams(), hasEntry( is( "test-service-param-name-2" ), is( "test-service-param-value-2" ) ) );
+ assertThat( service.getVersion(), is( new Version(1,0,0)));
}
@@ -136,7 +155,7 @@
assertThat( topology.getServices().size(), is( 4 ) );
assertThat( topology.getProviders().size(), is( 2 ) );
- Service webhdfsService = topology.getService( "WEBHDFS", null );
+ Service webhdfsService = topology.getService( "WEBHDFS", null, null);
assertThat( webhdfsService, notNullValue() );
assertThat( webhdfsService.getRole(), is( "WEBHDFS" ) );
assertThat( webhdfsService.getName(), nullValue() );
@@ -144,21 +163,21 @@
assertThat( webhdfsService.getUrls(), hasItem( "http://host1:50070/webhdfs" ) );
assertThat( webhdfsService.getUrls(), hasItem( "http://host2:50070/webhdfs" ) );
- Service webhcatService = topology.getService( "WEBHCAT", null );
+ Service webhcatService = topology.getService( "WEBHCAT", null, null);
assertThat( webhcatService, notNullValue() );
assertThat( webhcatService.getRole(), is( "WEBHCAT" ) );
assertThat( webhcatService.getName(), nullValue() );
assertThat( webhcatService.getUrls().size(), is( 1 ) );
assertThat( webhcatService.getUrls(), hasItem( "http://host:50111/templeton" ) );
- Service oozieService = topology.getService( "OOZIE", null );
+ Service oozieService = topology.getService( "OOZIE", null, null);
assertThat( oozieService, notNullValue() );
assertThat( oozieService.getRole(), is( "OOZIE" ) );
assertThat( oozieService.getName(), nullValue() );
assertThat( webhcatService.getUrls().size(), is( 1 ) );
assertThat( oozieService.getUrls(), hasItem( "http://host:11000/oozie" ) );
- Service hiveService = topology.getService( "HIVE", null );
+ Service hiveService = topology.getService( "HIVE", null, null);
assertThat( hiveService, notNullValue() );
assertThat( hiveService.getRole(), is( "HIVE" ) );
assertThat( hiveService.getName(), nullValue() );
@@ -217,5 +236,4 @@
assertThat( service.getParams(), hasEntry( is( "test-service-param-name-1" ), is( "test-service-param-value-1" ) ) );
assertThat( service.getParams(), hasEntry( is( "test-service-param-name-2" ), is( "test-service-param-value-2" ) ) );
}
-
}
diff --git a/gateway-server/src/test/java/org/apache/hadoop/gateway/util/ServiceDefinitionsLoaderTest.java b/gateway-server/src/test/java/org/apache/hadoop/gateway/util/ServiceDefinitionsLoaderTest.java
new file mode 100644
index 0000000..d602813
--- /dev/null
+++ b/gateway-server/src/test/java/org/apache/hadoop/gateway/util/ServiceDefinitionsLoaderTest.java
@@ -0,0 +1,49 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.gateway.util;
+
+import org.apache.hadoop.gateway.deploy.ServiceDeploymentContributor;
+import org.junit.Test;
+
+import java.io.File;
+import java.net.URL;
+import java.util.Set;
+
+import static org.junit.Assert.*;
+
+public class ServiceDefinitionsLoaderTest {
+
+ @Test
+ public void testServiceDefinitionLoading() {
+ URL url = ClassLoader.getSystemResource("services");
+ Set<ServiceDeploymentContributor> contributors = ServiceDefinitionsLoader.loadServiceDefinitions(new File(url.getFile()));
+ assertNotNull(contributors);
+ assertEquals(2, contributors.size());
+ for (ServiceDeploymentContributor contributor : contributors) {
+ if (contributor.getName().equals("foo")) {
+ assertEquals("1.0.0", contributor.getVersion().toString());
+ assertEquals("FOO", contributor.getRole());
+ } else if (contributor.getName().equals("bar")) {
+ assertEquals("2.0.0", contributor.getVersion().toString());
+ assertEquals("BAR", contributor.getRole());
+ } else {
+ fail("the loaded services don't match the test input");
+ }
+ }
+ }
+}
diff --git a/gateway-server/src/test/resources/conf-demo/conf/gateway-site.xml b/gateway-server/src/test/resources/conf-demo/conf/gateway-site.xml
index aaee777..3225adc 100644
--- a/gateway-server/src/test/resources/conf-demo/conf/gateway-site.xml
+++ b/gateway-server/src/test/resources/conf-demo/conf/gateway-site.xml
@@ -69,6 +69,9 @@
<value>target/testDataDir</value>
<description>The path that contains Gateway Data (Security and deployments)</description>
</property>
-
-
+ <property>
+ <name>gateway.services.dir</name>
+ <value>target/test</value>
+ <description>The path that contains the service definitions</description>
+ </property>
</configuration>
diff --git a/gateway-server/src/test/resources/org/apache/hadoop/gateway/topology/xml/service-param-topology-knox-format.xml b/gateway-server/src/test/resources/org/apache/hadoop/gateway/topology/xml/service-param-topology-knox-format.xml
index a7c476e..870015a 100644
--- a/gateway-server/src/test/resources/org/apache/hadoop/gateway/topology/xml/service-param-topology-knox-format.xml
+++ b/gateway-server/src/test/resources/org/apache/hadoop/gateway/topology/xml/service-param-topology-knox-format.xml
@@ -31,6 +31,7 @@
<service>
<role>test-service-role</role>
<name>test-service-name</name>
+ <version>1.0.0</version>
<url>test-service-scheme://test-service-host1:42/test-service-path</url>
<url>test-service-scheme://test-service-host2:42/test-service-path</url>
<param>
diff --git a/gateway-server/src/test/resources/org/apache/hadoop/gateway/topology/xml/simple-topology-knox-format.xml b/gateway-server/src/test/resources/org/apache/hadoop/gateway/topology/xml/simple-topology-knox-format.xml
index 46175d1..0a208fc 100644
--- a/gateway-server/src/test/resources/org/apache/hadoop/gateway/topology/xml/simple-topology-knox-format.xml
+++ b/gateway-server/src/test/resources/org/apache/hadoop/gateway/topology/xml/simple-topology-knox-format.xml
@@ -63,7 +63,19 @@
<service>
<role>WEBHDFS</role>
+ <name>webhdfs</name>
+ <version>2.4.0</version>
<url>http://host1:80/webhdfs</url>
<url>http://host2:80/webhdfs</url>
</service>
+ <service>
+ <role>RESOURCEMANAGER</role>
+ <version>2.5.0</version>
+ <url>http://host1:8088/ws</url>
+ </service>
+ <service>
+ <role>HIVE</role>
+ <name>hive</name>
+ <url>http://host2:10001/cliservice</url>
+ </service>
</topology>
diff --git a/gateway-server/src/test/resources/services/bar/2.0.0/service.xml b/gateway-server/src/test/resources/services/bar/2.0.0/service.xml
new file mode 100644
index 0000000..1cd6cc3
--- /dev/null
+++ b/gateway-server/src/test/resources/services/bar/2.0.0/service.xml
@@ -0,0 +1,21 @@
+<!--
+ Licensed to the Apache Software Foundation (ASF) under one or more
+ contributor license agreements. See the NOTICE file distributed with
+ this work for additional information regarding copyright ownership.
+ The ASF licenses this file to You under the Apache License, Version 2.0
+ (the "License"); you may not use this file except in compliance with
+ the License. You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
+-->
+<service role="BAR" name="bar" version="2.0.0">
+ <routes>
+ <route path="/bar/?**"/>
+ </routes>
+</service>
\ No newline at end of file
diff --git a/gateway-server/src/test/resources/services/foo/1.0.0/rewrite.xml b/gateway-server/src/test/resources/services/foo/1.0.0/rewrite.xml
new file mode 100644
index 0000000..761bd4b
--- /dev/null
+++ b/gateway-server/src/test/resources/services/foo/1.0.0/rewrite.xml
@@ -0,0 +1,21 @@
+<!--
+ Licensed to the Apache Software Foundation (ASF) under one or more
+ contributor license agreements. See the NOTICE file distributed with
+ this work for additional information regarding copyright ownership.
+ The ASF licenses this file to You under the Apache License, Version 2.0
+ (the "License"); you may not use this file except in compliance with
+ the License. You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
+-->
+<rules>
+ <rule dir="IN" name="FOO/foo/inbound" pattern="*://*:*/**/foo">
+ <rewrite template="{$serviceUrl[FOO]}"/>
+ </rule>
+</rules>
\ No newline at end of file
diff --git a/gateway-server/src/test/resources/services/foo/1.0.0/service.xml b/gateway-server/src/test/resources/services/foo/1.0.0/service.xml
new file mode 100644
index 0000000..f739b0b
--- /dev/null
+++ b/gateway-server/src/test/resources/services/foo/1.0.0/service.xml
@@ -0,0 +1,23 @@
+<!--
+ Licensed to the Apache Software Foundation (ASF) under one or more
+ contributor license agreements. See the NOTICE file distributed with
+ this work for additional information regarding copyright ownership.
+ The ASF licenses this file to You under the Apache License, Version 2.0
+ (the "License"); you may not use this file except in compliance with
+ the License. You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
+-->
+<service role="FOO" name="foo" version="1.0.0">
+ <routes>
+ <route path="/foo/?**">
+ <rewrite apply="FOO/foo/inbound" to="request.url"/>
+ </route>
+ </routes>
+</service>
\ No newline at end of file
diff --git a/gateway-service-definitions/pom.xml b/gateway-service-definitions/pom.xml
new file mode 100644
index 0000000..004aed8
--- /dev/null
+++ b/gateway-service-definitions/pom.xml
@@ -0,0 +1,39 @@
+<?xml version="1.0"?>
+<project
+ xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd"
+ xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance">
+ <modelVersion>4.0.0</modelVersion>
+ <parent>
+ <groupId>org.apache.knox</groupId>
+ <artifactId>gateway</artifactId>
+ <version>0.6.0-SNAPSHOT</version>
+ </parent>
+
+ <artifactId>gateway-service-definitions</artifactId>
+ <name>gateway-service-definitions</name>
+ <description>The service definitions aka stacks.</description>
+ <licenses>
+ <license>
+ <name>The Apache Software License, Version 2.0</name>
+ <url>http://www.apache.org/licenses/LICENSE-2.0.txt</url>
+ <distribution>repo</distribution>
+ </license>
+ </licenses>
+
+ <dependencies>
+ <dependency>
+ <groupId>org.eclipse.persistence</groupId>
+ <artifactId>eclipselink</artifactId>
+ </dependency>
+ <dependency>
+ <groupId>org.apache.knox</groupId>
+ <artifactId>gateway-test-utils</artifactId>
+ <scope>test</scope>
+ </dependency>
+ <dependency>
+ <groupId>junit</groupId>
+ <artifactId>junit</artifactId>
+ <scope>test</scope>
+ </dependency>
+ </dependencies>
+</project>
diff --git a/gateway-service-definitions/src/main/java/org/apache/hadoop/gateway/service/definition/CustomDispatch.java b/gateway-service-definitions/src/main/java/org/apache/hadoop/gateway/service/definition/CustomDispatch.java
new file mode 100644
index 0000000..7270a2b
--- /dev/null
+++ b/gateway-service-definitions/src/main/java/org/apache/hadoop/gateway/service/definition/CustomDispatch.java
@@ -0,0 +1,69 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.gateway.service.definition;
+
+import javax.xml.bind.annotation.XmlAttribute;
+import javax.xml.bind.annotation.XmlType;
+
+@XmlType(name = "dispatch")
+public class CustomDispatch {
+
+ private String contributorName;
+
+ private String haContributorName;
+
+ private String className;
+
+ private String haClassName;
+
+ @XmlAttribute(name = "contributor-name")
+ public String getContributorName() {
+ return contributorName;
+ }
+
+ public void setContributorName(String contributorName) {
+ this.contributorName = contributorName;
+ }
+
+ @XmlAttribute(name = "ha-contributor-name")
+ public String getHaContributorName() {
+ return haContributorName;
+ }
+
+ public void setHaContributorName(String haContributorName) {
+ this.haContributorName = haContributorName;
+ }
+
+ @XmlAttribute(name = "classname")
+ public String getClassName() {
+ return className;
+ }
+
+ public void setClassName(String className) {
+ this.className = className;
+ }
+
+ @XmlAttribute(name = "ha-classname")
+ public String getHaClassName() {
+ return haClassName;
+ }
+
+ public void setHaClassName(String haContributorClassName) {
+ this.haClassName = haContributorClassName;
+ }
+}
diff --git a/gateway-provider-security-picketlink/src/test/java/org/apache/hadoop/gateway/provider/federation/PreAuthSSOTest.java b/gateway-service-definitions/src/main/java/org/apache/hadoop/gateway/service/definition/Policy.java
similarity index 65%
copy from gateway-provider-security-picketlink/src/test/java/org/apache/hadoop/gateway/provider/federation/PreAuthSSOTest.java
copy to gateway-service-definitions/src/main/java/org/apache/hadoop/gateway/service/definition/Policy.java
index ec57043..471aae5 100644
--- a/gateway-provider-security-picketlink/src/test/java/org/apache/hadoop/gateway/provider/federation/PreAuthSSOTest.java
+++ b/gateway-service-definitions/src/main/java/org/apache/hadoop/gateway/service/definition/Policy.java
@@ -1,4 +1,3 @@
-
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
@@ -16,16 +15,31 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
-package org.apache.hadoop.gateway.provider.federation;
+package org.apache.hadoop.gateway.service.definition;
-import junit.framework.TestCase;
+import javax.xml.bind.annotation.XmlAttribute;
-import org.apache.hadoop.gateway.services.security.token.impl.JWTToken;
-import org.junit.Test;
+public class Policy {
-public class PreAuthSSOTest extends TestCase {
- @Test
- public void testPreAuth() throws Exception {
- assertTrue(true);
+ private String name;
+
+ private String role;
+
+ @XmlAttribute
+ public String getName() {
+ return name;
+ }
+
+ public void setName(String name) {
+ this.name = name;
+ }
+
+ @XmlAttribute
+ public String getRole() {
+ return role;
+ }
+
+ public void setRole(String role) {
+ this.role = role;
}
}
diff --git a/gateway-provider-security-picketlink/src/test/java/org/apache/hadoop/gateway/provider/federation/PreAuthSSOTest.java b/gateway-service-definitions/src/main/java/org/apache/hadoop/gateway/service/definition/Rewrite.java
similarity index 62%
copy from gateway-provider-security-picketlink/src/test/java/org/apache/hadoop/gateway/provider/federation/PreAuthSSOTest.java
copy to gateway-service-definitions/src/main/java/org/apache/hadoop/gateway/service/definition/Rewrite.java
index ec57043..a7a3e3c 100644
--- a/gateway-provider-security-picketlink/src/test/java/org/apache/hadoop/gateway/provider/federation/PreAuthSSOTest.java
+++ b/gateway-service-definitions/src/main/java/org/apache/hadoop/gateway/service/definition/Rewrite.java
@@ -1,4 +1,3 @@
-
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
@@ -16,16 +15,33 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
-package org.apache.hadoop.gateway.provider.federation;
+package org.apache.hadoop.gateway.service.definition;
-import junit.framework.TestCase;
+import javax.xml.bind.annotation.XmlAttribute;
+import javax.xml.bind.annotation.XmlType;
-import org.apache.hadoop.gateway.services.security.token.impl.JWTToken;
-import org.junit.Test;
+@XmlType(name = "rewrite")
+public class Rewrite {
-public class PreAuthSSOTest extends TestCase {
- @Test
- public void testPreAuth() throws Exception {
- assertTrue(true);
+ private String apply;
+
+ private String to;
+
+ @XmlAttribute
+ public String getApply() {
+ return apply;
+ }
+
+ public void setApply(String apply) {
+ this.apply = apply;
+ }
+
+ @XmlAttribute
+ public String getTo() {
+ return to;
+ }
+
+ public void setTo(String to) {
+ this.to = to;
}
}
diff --git a/gateway-service-definitions/src/main/java/org/apache/hadoop/gateway/service/definition/Route.java b/gateway-service-definitions/src/main/java/org/apache/hadoop/gateway/service/definition/Route.java
new file mode 100644
index 0000000..7112c2e
--- /dev/null
+++ b/gateway-service-definitions/src/main/java/org/apache/hadoop/gateway/service/definition/Route.java
@@ -0,0 +1,73 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.gateway.service.definition;
+
+import javax.xml.bind.annotation.XmlAttribute;
+import javax.xml.bind.annotation.XmlElement;
+import javax.xml.bind.annotation.XmlElementWrapper;
+import javax.xml.bind.annotation.XmlType;
+import java.util.List;
+
+@XmlType(name = "route")
+public class Route {
+
+ private String path;
+
+ private List<Rewrite> rewrites;
+
+ private List<Policy> policies;
+
+ private CustomDispatch dispatch;
+
+ @XmlAttribute
+ public String getPath() {
+ return path;
+ }
+
+ public void setPath(String path) {
+ this.path = path;
+ }
+
+ @XmlElement(name = "rewrite")
+ public List<Rewrite> getRewrites() {
+ return rewrites;
+ }
+
+ public void setRewrites(List<Rewrite> rewrites) {
+ this.rewrites = rewrites;
+ }
+
+ @XmlElement(name = "policy")
+ @XmlElementWrapper(name = "policies")
+ public List<Policy> getPolicies() {
+ return policies;
+ }
+
+ public void setPolicies(List<Policy> policies) {
+ this.policies = policies;
+ }
+
+ @XmlElement(name = "dispatch")
+ public CustomDispatch getDispatch() {
+ return dispatch;
+ }
+
+ public void setDispatch(CustomDispatch dispatch) {
+ this.dispatch = dispatch;
+ }
+}
diff --git a/gateway-service-definitions/src/main/java/org/apache/hadoop/gateway/service/definition/ServiceDefinition.java b/gateway-service-definitions/src/main/java/org/apache/hadoop/gateway/service/definition/ServiceDefinition.java
new file mode 100644
index 0000000..cbb9309
--- /dev/null
+++ b/gateway-service-definitions/src/main/java/org/apache/hadoop/gateway/service/definition/ServiceDefinition.java
@@ -0,0 +1,96 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.gateway.service.definition;
+
+import javax.xml.bind.annotation.XmlAttribute;
+import javax.xml.bind.annotation.XmlElement;
+import javax.xml.bind.annotation.XmlElementWrapper;
+import javax.xml.bind.annotation.XmlRootElement;
+import java.util.List;
+
+@XmlRootElement(name = "service")
+public class ServiceDefinition {
+
+ private String name;
+
+ private String role;
+
+ private String version;
+
+ private List<Route> routes;
+
+ private List<Policy> policies;
+
+ private CustomDispatch dispatch;
+
+ @XmlAttribute
+ public String getName() {
+ return name;
+ }
+
+ public void setName(String name) {
+ this.name = name;
+ }
+
+ @XmlAttribute
+ public String getRole() {
+ return role;
+ }
+
+ public void setRole(String role) {
+ this.role = role;
+ }
+
+ @XmlAttribute
+ public String getVersion() {
+ return version;
+ }
+
+ public void setVersion(String version) {
+ this.version = version;
+ }
+
+ @XmlElement(name = "route")
+ @XmlElementWrapper(name = "routes")
+ public List<Route> getRoutes() {
+ return routes;
+ }
+
+ public void setRoutes(List<Route> routes) {
+ this.routes = routes;
+ }
+
+ @XmlElement(name = "policy")
+ @XmlElementWrapper(name = "policies")
+ public List<Policy> getPolicies() {
+ return policies;
+ }
+
+ public void setPolicies(List<Policy> policies) {
+ this.policies = policies;
+ }
+
+ @XmlElement(name = "dispatch")
+ public CustomDispatch getDispatch() {
+ return dispatch;
+ }
+
+ public void setDispatch(CustomDispatch dispatch) {
+ this.dispatch = dispatch;
+ }
+}
diff --git a/gateway-service-definitions/src/main/resources/services/falcon/0.6.0/rewrite.xml b/gateway-service-definitions/src/main/resources/services/falcon/0.6.0/rewrite.xml
new file mode 100644
index 0000000..bf31856
--- /dev/null
+++ b/gateway-service-definitions/src/main/resources/services/falcon/0.6.0/rewrite.xml
@@ -0,0 +1,21 @@
+<!--
+ Licensed to the Apache Software Foundation (ASF) under one or more
+ contributor license agreements. See the NOTICE file distributed with
+ this work for additional information regarding copyright ownership.
+ The ASF licenses this file to You under the Apache License, Version 2.0
+ (the "License"); you may not use this file except in compliance with
+ the License. You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
+-->
+<rules>
+ <rule dir="IN" name="FALCON/falcon/inbound" pattern="*://*:*/**/falcon/api/{path=**}?{**}">
+ <rewrite template="{$serviceUrl[FALCON]}/api/{path=**}?{**}"/>
+ </rule>
+</rules>
\ No newline at end of file
diff --git a/gateway-service-definitions/src/main/resources/services/falcon/0.6.0/services.xml b/gateway-service-definitions/src/main/resources/services/falcon/0.6.0/services.xml
new file mode 100644
index 0000000..716f165
--- /dev/null
+++ b/gateway-service-definitions/src/main/resources/services/falcon/0.6.0/services.xml
@@ -0,0 +1,21 @@
+<!--
+ Licensed to the Apache Software Foundation (ASF) under one or more
+ contributor license agreements. See the NOTICE file distributed with
+ this work for additional information regarding copyright ownership.
+ The ASF licenses this file to You under the Apache License, Version 2.0
+ (the "License"); you may not use this file except in compliance with
+ the License. You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
+-->
+<service role="FALCON" name="falcon" version="0.6.0">
+ <routes>
+ <route path="/falcon/api/**"/>
+ </routes>
+</service>
diff --git a/gateway-service-definitions/src/main/resources/services/hbase/0.98.0/rewrite.xml b/gateway-service-definitions/src/main/resources/services/hbase/0.98.0/rewrite.xml
new file mode 100644
index 0000000..64ca750
--- /dev/null
+++ b/gateway-service-definitions/src/main/resources/services/hbase/0.98.0/rewrite.xml
@@ -0,0 +1,62 @@
+<!--
+ Licensed to the Apache Software Foundation (ASF) under one or more
+ contributor license agreements. See the NOTICE file distributed with
+ this work for additional information regarding copyright ownership.
+ The ASF licenses this file to You under the Apache License, Version 2.0
+ (the "License"); you may not use this file except in compliance with
+ the License. You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
+-->
+<rules>
+
+ <rule dir="IN" name="WEBHBASE/webhbase/root/inbound" pattern="*://*:*/**/hbase/?{**}">
+ <rewrite template="{$serviceUrl[WEBHBASE]}/?{**}"/>
+ </rule>
+
+ <rule dir="IN" name="WEBHBASE/webhbase/path/inbound" pattern="*://*:*/**/hbase/{path=**}?{**}">
+ <rewrite template="{$serviceUrl[WEBHBASE]}/{path=**}?{**}"/>
+ </rule>
+
+ <rule name="WEBHBASE/webhbase/location/outbound">
+ <match pattern="*://*:*/{path=**}?{**}"/>
+ <rewrite template="{$frontend[url]}/hbase/{path=**}?{**}"/>
+ </rule>
+
+ <rule name="WEBHBASE/webhbase/address/outbound">
+ <match pattern="{host}:{port}"/>
+ <rewrite template="{$frontend[url]}/hbase-region?host={host}?port={port}"/>
+ <encrypt-query/>
+ </rule>
+
+ <filter name="WEBHBASE/webhbase/headers/outbound">
+ <content type="application/x-http-headers">
+ <apply path="Location" rule="WEBHBASE/webhbase/location/outbound"/>
+ </content>
+ </filter>
+
+ <filter name="WEBHBASE/webhbase/status/outbound">
+ <content type="*/json">
+ <apply path="$[LiveNodes][*][name]" rule="WEBHBASE/webhbase/address/outbound"/>
+ </content>
+ <content type="*/xml">
+ <apply path="/ClusterStatus/LiveNodes/Node/@name" rule="WEBHBASE/webhbase/address/outbound"/>
+ </content>
+ </filter>
+
+ <filter name="WEBHBASE/webhbase/regions/outbound">
+ <content type="*/json">
+ <apply path="$[Region][*][location]" rule="WEBHBASE/webhbase/address/outbound"/>
+ </content>
+ <content type="*/xml">
+ <apply path="/TableInfo/Region/@location" rule="WEBHBASE/webhbase/address/outbound"/>
+ </content>
+ </filter>
+
+</rules>
\ No newline at end of file
diff --git a/gateway-service-definitions/src/main/resources/services/hbase/0.98.0/service.xml b/gateway-service-definitions/src/main/resources/services/hbase/0.98.0/service.xml
new file mode 100644
index 0000000..37d49bb
--- /dev/null
+++ b/gateway-service-definitions/src/main/resources/services/hbase/0.98.0/service.xml
@@ -0,0 +1,33 @@
+<!--
+ Licensed to the Apache Software Foundation (ASF) under one or more
+ contributor license agreements. See the NOTICE file distributed with
+ this work for additional information regarding copyright ownership.
+ The ASF licenses this file to You under the Apache License, Version 2.0
+ (the "License"); you may not use this file except in compliance with
+ the License. You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
+-->
+<service role="WEBHBASE" name="webhbase" version="0.98.0">
+ <routes>
+ <route path="/hbase/?**">
+ <rewrite apply="WEBHBASE/webhbase/headers/outbound" to="response.headers"/>
+ </route>
+ <route path="/hbase/**?**">
+ <rewrite apply="WEBHBASE/webhbase/headers/outbound" to="response.headers"/>
+ </route>
+ <route path="/hbase/status/cluster?**">
+ <rewrite apply="WEBHBASE/webhbase/status/outbound" to="response.body"/>
+ </route>
+ <route path="/hbase/*/regions?**">
+ <rewrite apply="WEBHBASE/webhbase/regions/outbound" to="response.body"/>
+ </route>
+ </routes>
+ <dispatch classname="org.apache.hadoop.gateway.hbase.HBaseDispatch"/>
+</service>
\ No newline at end of file
diff --git a/gateway-service-definitions/src/main/resources/services/hive/0.13.0/rewrite.xml b/gateway-service-definitions/src/main/resources/services/hive/0.13.0/rewrite.xml
new file mode 100644
index 0000000..765e017
--- /dev/null
+++ b/gateway-service-definitions/src/main/resources/services/hive/0.13.0/rewrite.xml
@@ -0,0 +1,21 @@
+<!--
+ Licensed to the Apache Software Foundation (ASF) under one or more
+ contributor license agreements. See the NOTICE file distributed with
+ this work for additional information regarding copyright ownership.
+ The ASF licenses this file to You under the Apache License, Version 2.0
+ (the "License"); you may not use this file except in compliance with
+ the License. You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
+-->
+<rules>
+ <rule dir="IN" name="HIVE/hive/inbound" pattern="*://*:*/**/hive">
+ <rewrite template="{$serviceUrl[HIVE]}"/>
+ </rule>
+</rules>
\ No newline at end of file
diff --git a/gateway-service-definitions/src/main/resources/services/hive/0.13.0/service.xml b/gateway-service-definitions/src/main/resources/services/hive/0.13.0/service.xml
new file mode 100644
index 0000000..aba1d23
--- /dev/null
+++ b/gateway-service-definitions/src/main/resources/services/hive/0.13.0/service.xml
@@ -0,0 +1,22 @@
+<!--
+ Licensed to the Apache Software Foundation (ASF) under one or more
+ contributor license agreements. See the NOTICE file distributed with
+ this work for additional information regarding copyright ownership.
+ The ASF licenses this file to You under the Apache License, Version 2.0
+ (the "License"); you may not use this file except in compliance with
+ the License. You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
+-->
+<service role="HIVE" name="hive" version="0.13.0">
+ <routes>
+ <route path="/hive"/>
+ </routes>
+ <dispatch classname="org.apache.hadoop.gateway.hive.HiveDispatch"/>
+</service>
diff --git a/gateway-service-oozie/src/main/resources/org/apache/hadoop/gateway/oozie/OozieDeploymentContributor/rewrite.xml b/gateway-service-definitions/src/main/resources/services/oozie/4.0.0/rewrite.xml
similarity index 100%
rename from gateway-service-oozie/src/main/resources/org/apache/hadoop/gateway/oozie/OozieDeploymentContributor/rewrite.xml
rename to gateway-service-definitions/src/main/resources/services/oozie/4.0.0/rewrite.xml
diff --git a/gateway-service-definitions/src/main/resources/services/oozie/4.0.0/service.xml b/gateway-service-definitions/src/main/resources/services/oozie/4.0.0/service.xml
new file mode 100644
index 0000000..7bc7eb3
--- /dev/null
+++ b/gateway-service-definitions/src/main/resources/services/oozie/4.0.0/service.xml
@@ -0,0 +1,30 @@
+<?xml version="1.0" encoding="UTF-8" standalone="yes"?>
+<!--
+ Licensed to the Apache Software Foundation (ASF) under one or more
+ contributor license agreements. See the NOTICE file distributed with
+ this work for additional information regarding copyright ownership.
+ The ASF licenses this file to You under the Apache License, Version 2.0
+ (the "License"); you may not use this file except in compliance with
+ the License. You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
+-->
+<service role="OOZIE" name="oozie" version="4.0.0">
+ <routes>
+ <route path="/oozie/**?**">
+ <rewrite apply="OOZIE/oozie/configuration" to="request.body"/>
+ </route>
+ <route path="/oozie/v1/**?**">
+ <rewrite apply="OOZIE/oozie/configuration" to="request.body"/>
+ </route>
+ <route path="/oozie/v2/**?**">
+ <rewrite apply="OOZIE/oozie/configuration" to="request.body"/>
+ </route>
+ </routes>
+</service>
diff --git a/gateway-service-definitions/src/main/resources/services/storm-logviewer/0.9.3/rewrite.xml b/gateway-service-definitions/src/main/resources/services/storm-logviewer/0.9.3/rewrite.xml
new file mode 100644
index 0000000..848a20d
--- /dev/null
+++ b/gateway-service-definitions/src/main/resources/services/storm-logviewer/0.9.3/rewrite.xml
@@ -0,0 +1,21 @@
+<!--
+ Licensed to the Apache Software Foundation (ASF) under one or more
+ contributor license agreements. See the NOTICE file distributed with
+ this work for additional information regarding copyright ownership.
+ The ASF licenses this file to You under the Apache License, Version 2.0
+ (the "License"); you may not use this file except in compliance with
+ the License. You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
+-->
+<rules>
+ <rule dir="IN" name="STORM/storm/inbound" pattern="*://*:*/**/storm/logviewer/{path=**}?{**}">
+ <rewrite template="{$serviceUrl[STORM-LOGVIEWER]}/{path=**}?{**}"/>
+ </rule>
+</rules>
\ No newline at end of file
diff --git a/gateway-service-definitions/src/main/resources/services/storm-logviewer/0.9.3/services.xml b/gateway-service-definitions/src/main/resources/services/storm-logviewer/0.9.3/services.xml
new file mode 100644
index 0000000..dd13e65
--- /dev/null
+++ b/gateway-service-definitions/src/main/resources/services/storm-logviewer/0.9.3/services.xml
@@ -0,0 +1,21 @@
+<!--
+ Licensed to the Apache Software Foundation (ASF) under one or more
+ contributor license agreements. See the NOTICE file distributed with
+ this work for additional information regarding copyright ownership.
+ The ASF licenses this file to You under the Apache License, Version 2.0
+ (the "License"); you may not use this file except in compliance with
+ the License. You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
+-->
+<service role="STORM-LOGVIEWER" name="storm-logviewer" version="0.9.3">
+ <routes>
+ <route path="/storm/logviewer/**"/>
+ </routes>
+</service>
diff --git a/gateway-service-definitions/src/main/resources/services/storm/0.9.3/rewrite.xml b/gateway-service-definitions/src/main/resources/services/storm/0.9.3/rewrite.xml
new file mode 100644
index 0000000..9015ebb
--- /dev/null
+++ b/gateway-service-definitions/src/main/resources/services/storm/0.9.3/rewrite.xml
@@ -0,0 +1,38 @@
+<!--
+ Licensed to the Apache Software Foundation (ASF) under one or more
+ contributor license agreements. See the NOTICE file distributed with
+ this work for additional information regarding copyright ownership.
+ The ASF licenses this file to You under the Apache License, Version 2.0
+ (the "License"); you may not use this file except in compliance with
+ the License. You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
+-->
+<rules>
+ <rule dir="IN" name="STORM/storm/inbound" pattern="*://*:*/**/storm/api/{path=**}?{**}">
+ <rewrite template="{$serviceUrl[STORM]}/api/{path=**}?{**}"/>
+ </rule>
+ <rule dir="OUT" name="STORM/storm/url/outbound">
+ <match pattern="*://*:*/{**}?{**}"/>
+ <rewrite template="{$frontend[url]}/storm/logviewer/{**}?{**}"/>
+ </rule>
+ <filter name="STORM/storm/logs/outbound">
+ <content type="*/json">
+ <buffer path="$.executorStats[*]">
+ <apply path="$.workerLogLink" rule="STORM/storm/url/outbound"/>
+ </buffer>
+ <buffer path="$.spouts[*]">
+ <apply path="$.errorWorkerLogLink" rule="STORM/storm/url/outbound"/>
+ </buffer>
+ <buffer path="$.bolts[*]">
+ <apply path="$.errorWorkerLogLink" rule="STORM/storm/url/outbound"/>
+ </buffer>
+ </content>
+ </filter>
+</rules>
\ No newline at end of file
diff --git a/gateway-service-definitions/src/main/resources/services/storm/0.9.3/services.xml b/gateway-service-definitions/src/main/resources/services/storm/0.9.3/services.xml
new file mode 100644
index 0000000..ffe20a4
--- /dev/null
+++ b/gateway-service-definitions/src/main/resources/services/storm/0.9.3/services.xml
@@ -0,0 +1,28 @@
+<!--
+ Licensed to the Apache Software Foundation (ASF) under one or more
+ contributor license agreements. See the NOTICE file distributed with
+ this work for additional information regarding copyright ownership.
+ The ASF licenses this file to You under the Apache License, Version 2.0
+ (the "License"); you may not use this file except in compliance with
+ the License. You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
+-->
+<service role="STORM" name="storm" version="0.9.3">
+ <routes>
+ <route path="/storm/api/**"/>
+ <route path="/storm/api/v1/topology/*">
+ <rewrite apply="STORM/storm/logs/outbound" to="response.body"/>
+ </route>
+ <route path="/storm/api/v1/topology/**/component/**?**">
+ <rewrite apply="STORM/storm/logs/outbound" to="response.body"/>
+ </route>
+ </routes>
+ <dispatch classname="org.apache.hadoop.gateway.storm.StormDispatch"/>
+</service>
diff --git a/gateway-service-webhcat/src/main/resources/org/apache/hadoop/gateway/webhcat/WebHCatDeploymentContributor/rewrite.xml b/gateway-service-definitions/src/main/resources/services/webhcat/0.13.0/rewrite.xml
similarity index 100%
rename from gateway-service-webhcat/src/main/resources/org/apache/hadoop/gateway/webhcat/WebHCatDeploymentContributor/rewrite.xml
rename to gateway-service-definitions/src/main/resources/services/webhcat/0.13.0/rewrite.xml
diff --git a/gateway-service-definitions/src/main/resources/services/webhcat/0.13.0/service.xml b/gateway-service-definitions/src/main/resources/services/webhcat/0.13.0/service.xml
new file mode 100644
index 0000000..c550dc1
--- /dev/null
+++ b/gateway-service-definitions/src/main/resources/services/webhcat/0.13.0/service.xml
@@ -0,0 +1,23 @@
+<?xml version="1.0" encoding="UTF-8" standalone="yes"?>
+<!--
+ Licensed to the Apache Software Foundation (ASF) under one or more
+ contributor license agreements. See the NOTICE file distributed with
+ this work for additional information regarding copyright ownership.
+ The ASF licenses this file to You under the Apache License, Version 2.0
+ (the "License"); you may not use this file except in compliance with
+ the License. You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
+-->
+<service role="WEBHCAT" name="webhcat" version="0.13.0">
+ <routes>
+ <route path="/templeton/v1/?**"/>
+ <route path="/templeton/v1/**?**"/>
+ </routes>
+</service>
diff --git a/gateway-service-definitions/src/main/resources/services/webhdfs/2.4.0/rewrite.xml b/gateway-service-definitions/src/main/resources/services/webhdfs/2.4.0/rewrite.xml
new file mode 100644
index 0000000..efbd93d
--- /dev/null
+++ b/gateway-service-definitions/src/main/resources/services/webhdfs/2.4.0/rewrite.xml
@@ -0,0 +1,70 @@
+<?xml version="1.0" encoding="UTF-8" standalone="yes"?>
+<!--
+ Licensed to the Apache Software Foundation (ASF) under one or more
+ contributor license agreements. See the NOTICE file distributed with
+ this work for additional information regarding copyright ownership.
+ The ASF licenses this file to You under the Apache License, Version 2.0
+ (the "License"); you may not use this file except in compliance with
+ the License. You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
+-->
+<rules>
+
+ <rule dir="OUT" name="WEBHDFS/webhdfs/outbound" pattern="hdfs://*:*/{path=**}?{**}">
+ <rewrite template="{$frontend[url]}/webhdfs/v1/{path=**}?{**}"/>
+ </rule>
+
+ <rule dir="OUT" name="WEBHDFS/webhdfs/outbound" pattern="webhdfs://*:*/{path=**}?{**}">
+ <rewrite template="{$frontend[url]}/webhdfs/v1/{path=**}?{**}"/>
+ </rule>
+
+ <rule dir="OUT" name="WEBHDFS/webhdfs/outbound/namenode/headers/location">
+ <match pattern="{scheme}://{host}:{port}/{path=**}?{**}"/>
+ <rewrite template="{$frontend[url]}/webhdfs/data/v1/{path=**}?{scheme}?host={$hostmap(host)}?{port}?{**}"/>
+ <encrypt-query/>
+ </rule>
+
+ <rule dir="IN" name="WEBHDFS/webhdfs/inbound/hdfs" pattern="hdfs:/{path=**}?{**}">
+ <rewrite template="{$serviceMappedUrl[NAMENODE]}/{path=**}?{**}"/>
+ </rule>
+
+ <rule dir="IN" name="WEBHDFS/webhdfs/inbound/webhdfs" pattern="webhdfs:/{path=**}?{**}">
+ <rewrite template="{$serviceUrl[WEBHDFS]}/{path=**}?{**}"/>
+ </rule>
+
+ <rule dir="IN" name="WEBHDFS/webhdfs/inbound/namenode/root" pattern="*://*:*/**/webhdfs/{version}/?{**}">
+ <rewrite template="{$serviceUrl[WEBHDFS]}/{version}/?{**}"/>
+ </rule>
+
+ <rule dir="IN" name="WEBHDFS/webhdfs/inbound/namenode/file" pattern="*://*:*/**/webhdfs/{version}/{path=**}?{**}">
+ <rewrite template="{$serviceUrl[WEBHDFS]}/{version}/{path=**}?{**}"/>
+ </rule>
+
+ <rule dir="IN" name="WEBHDFS/webhdfs/inbound/namenode/home" pattern="*://*:*/**/webhdfs/{version}/~?{**}">
+ <rewrite template="{$serviceUrl[WEBHDFS]}/{version}/user/{$username}?{**}"/>
+ </rule>
+
+ <rule dir="IN" name="WEBHDFS/webhdfs/inbound/namenode/home/file" pattern="*://*:*/**/webhdfs/{version}/~/{path=**}?{**}">
+ <rewrite template="{$serviceUrl[WEBHDFS]}/{version}/user/{$username}/{path=**}?{**}"/>
+ </rule>
+
+ <rule dir="IN" name="WEBHDFS/webhdfs/inbound/datanode">
+ <decrypt-query/>
+ <match pattern="*://*:*/**/webhdfs/data/*/{path=**}?{scheme}?{host}?{port}?{**}"/>
+ <rewrite template="{scheme}://{host}:{port}/{path=**}?{**}"/>
+ </rule>
+
+ <filter name="WEBHDFS/webhdfs/outbound/namenode/headers">
+ <content type="application/x-http-headers">
+ <apply path="Location" rule="WEBHDFS/webhdfs/outbound/namenode/headers/location"/>
+ </content>
+ </filter>
+
+</rules>
\ No newline at end of file
diff --git a/gateway-service-definitions/src/main/resources/services/webhdfs/2.4.0/service.xml b/gateway-service-definitions/src/main/resources/services/webhdfs/2.4.0/service.xml
new file mode 100644
index 0000000..f958b42
--- /dev/null
+++ b/gateway-service-definitions/src/main/resources/services/webhdfs/2.4.0/service.xml
@@ -0,0 +1,40 @@
+<?xml version="1.0" encoding="UTF-8" standalone="yes"?>
+<!--
+ Licensed to the Apache Software Foundation (ASF) under one or more
+ contributor license agreements. See the NOTICE file distributed with
+ this work for additional information regarding copyright ownership.
+ The ASF licenses this file to You under the Apache License, Version 2.0
+ (the "License"); you may not use this file except in compliance with
+ the License. You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
+-->
+<service role="WEBHDFS" name="webhdfs" version="2.4.0">
+ <routes>
+ <route path="/webhdfs/v1/?**">
+ <rewrite apply="WEBHDFS/webhdfs/inbound/namenode/root" to="request.url"/>
+ </route>
+ <route path="/webhdfs/v1/**?**">
+ <rewrite apply="WEBHDFS/webhdfs/inbound/namenode/file" to="request.url"/>
+ <rewrite apply="WEBHDFS/webhdfs/outbound/namenode/headers" to="response.headers"/>
+ </route>
+ <route path="/webhdfs/v1/~?**">
+ <rewrite apply="WEBHDFS/webhdfs/inbound/namenode/home" to="request.url"/>
+ </route>
+ <route path="/webhdfs/v1/~/**?**">
+ <rewrite apply="WEBHDFS/webhdfs/inbound/namenode/home/file" to="request.url"/>
+ <rewrite apply="WEBHDFS/webhdfs/outbound/namenode/headers" to="response.headers"/>
+ </route>
+ <route path="/webhdfs/data/v1/**?**">
+ <rewrite apply="WEBHDFS/webhdfs/inbound/datanode" to="request.url"/>
+ <dispatch contributor-name="http-client" />
+ </route>
+ </routes>
+ <dispatch classname="org.apache.hadoop.gateway.hdfs.dispatch.HdfsHttpClientDispatch" ha-classname="org.apache.hadoop.gateway.hdfs.dispatch.WebHdfsHaDispatch"/>
+</service>
diff --git a/gateway-service-yarn-rm/src/main/resources/org/apache/hadoop/gateway/yarn/rm/ResourceManagerDeploymentContributor/rewrite.xml b/gateway-service-definitions/src/main/resources/services/yarn-rm/2.5.0/rewrite.xml
similarity index 100%
rename from gateway-service-yarn-rm/src/main/resources/org/apache/hadoop/gateway/yarn/rm/ResourceManagerDeploymentContributor/rewrite.xml
rename to gateway-service-definitions/src/main/resources/services/yarn-rm/2.5.0/rewrite.xml
diff --git a/gateway-service-definitions/src/main/resources/services/yarn-rm/2.5.0/service.xml b/gateway-service-definitions/src/main/resources/services/yarn-rm/2.5.0/service.xml
new file mode 100644
index 0000000..73fb771
--- /dev/null
+++ b/gateway-service-definitions/src/main/resources/services/yarn-rm/2.5.0/service.xml
@@ -0,0 +1,54 @@
+<?xml version="1.0" encoding="UTF-8" standalone="yes"?>
+<!--
+ Licensed to the Apache Software Foundation (ASF) under one or more
+ contributor license agreements. See the NOTICE file distributed with
+ this work for additional information regarding copyright ownership.
+ The ASF licenses this file to You under the Apache License, Version 2.0
+ (the "License"); you may not use this file except in compliance with
+ the License. You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
+-->
+<service role="RESOURCEMANAGER" name="resourcemanager" version="2.5.0">
+ <routes>
+ <route path="/resourcemanager/v1/cluster/"/>
+ <route path="/resourcemanager/v1/cluster/**?**"/>
+ <route path="/resourcemanager/v1/cluster/apps?**">
+ <rewrite apply="RESOURCEMANAGER/resourcemanager/apps/outbound" to="response.body"/>
+ </route>
+ <route path="/resourcemanager/v1/cluster/apps?**">
+ <rewrite apply="RESOURCEMANAGER/resourcemanager/apps/outbound" to="response.body"/>
+ </route>
+ <route path="/resourcemanager/v1/cluster/apps/*?**">
+ <rewrite apply="RESOURCEMANAGER/resourcemanager/app/outbound" to="response.body"/>
+ </route>
+ <route path="/resourcemanager/v1/cluster/apps/*/appattempts?**">
+ <rewrite apply="RESOURCEMANAGER/resourcemanager/appattempts/outbound" to="response.body"/>
+ </route>
+ <route path="/resourcemanager/v1/cluster/nodes?**">
+ <rewrite apply="RESOURCEMANAGER/resourcemanager/nodes/outbound" to="response.body"/>
+ </route>
+ <route path="/resourcemanager/v1/cluster/nodes/*?**">
+ <rewrite apply="RESOURCEMANAGER/resourcemanager/nodeId/inbound" to="request.url"/>
+ <rewrite apply="RESOURCEMANAGER/resourcemanager/node/outbound" to="response.body"/>
+ </route>
+ <route path="/resourcemanager/proxy/*/ws/v1/**?**">
+ <rewrite apply="RESOURCEMANAGER/resourcemanager/inbound/proxy" to="request.url"/>
+ </route>
+ <route path="/resourcemanager/proxy/*/ws/v1/mapreduce/jobs/*/jobattempts">
+ <rewrite apply="RESOURCEMANAGER/resourcemanager/proxy/jobattempts/outbound" to="response.body"/>
+ </route>
+ <route path="/resourcemanager/proxy/*/ws/v1/mapreduce/jobs/*/tasks/*/attempts">
+ <rewrite apply="RESOURCEMANAGER/resourcemanager/proxy/taskattempts/outbound" to="response.body"/>
+ </route>
+ <route path="/resourcemanager/proxy/*/ws/v1/mapreduce/jobs/*/tasks/*/attempts/*">
+ <rewrite apply="RESOURCEMANAGER/resourcemanager/proxy/taskattempt/outbound" to="response.body"/>
+ </route>
+ </routes>
+</service>
diff --git a/gateway-service-definitions/src/test/java/org/apache/hadoop/gateway/service/definition/ServiceDefinitionTest.java b/gateway-service-definitions/src/test/java/org/apache/hadoop/gateway/service/definition/ServiceDefinitionTest.java
new file mode 100644
index 0000000..f4a92cb
--- /dev/null
+++ b/gateway-service-definitions/src/test/java/org/apache/hadoop/gateway/service/definition/ServiceDefinitionTest.java
@@ -0,0 +1,86 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.gateway.service.definition;
+
+import org.junit.Test;
+
+import javax.xml.bind.JAXBContext;
+import javax.xml.bind.Unmarshaller;
+import java.net.URL;
+import java.util.List;
+
+import static org.junit.Assert.*;
+
+public class ServiceDefinitionTest {
+
+ @Test
+ public void testUnmarshalling() throws Exception {
+ JAXBContext context = JAXBContext.newInstance(ServiceDefinition.class);
+ Unmarshaller unmarshaller = context.createUnmarshaller();
+ URL url = ClassLoader.getSystemResource("services/foo/1.0.0/service.xml");
+ ServiceDefinition definition = (ServiceDefinition) unmarshaller.unmarshal(url.openStream());
+ assertEquals("foo", definition.getName());
+ assertEquals("FOO", definition.getRole());
+ assertEquals("1.0.0", definition.getVersion());
+ assertEquals("custom-client", definition.getDispatch().getContributorName());
+ assertEquals("ha-client", definition.getDispatch().getHaContributorName());
+ List<Policy> policies = definition.getPolicies();
+ assertEquals(5, policies.size());
+ String[] policyOrder = new String[]{"webappsec", "authentication", "rewrite", "identity-assertion", "authorization"};
+ for (int i=0; i< policyOrder.length; i++ ) {
+ assertEquals(policyOrder[i], policies.get(i).getRole());
+ }
+ List<Route> routes = definition.getRoutes();
+ assertNotNull(routes);
+ assertEquals(1, routes.size());
+ Route route = routes.get(0);
+ assertEquals("/foo/?**", route.getPath());
+ assertEquals("http-client", route.getDispatch().getContributorName());
+ policies = route.getPolicies();
+ assertEquals(5, policies.size());
+ policyOrder = new String[]{"webappsec", "federation", "identity-assertion", "authorization", "rewrite"};
+ for (int i=0; i< policyOrder.length; i++ ) {
+ assertEquals(policyOrder[i], policies.get(i).getRole());
+ }
+ }
+
+ @Test
+ public void testUnmarshallingCommonServices() throws Exception {
+ JAXBContext context = JAXBContext.newInstance(ServiceDefinition.class);
+ Unmarshaller unmarshaller = context.createUnmarshaller();
+ URL url = ClassLoader.getSystemResource("services/yarn-rm/2.5.0/service.xml");
+ ServiceDefinition definition = (ServiceDefinition) unmarshaller.unmarshal(url.openStream());
+ assertEquals("resourcemanager", definition.getName());
+ assertEquals("RESOURCEMANAGER", definition.getRole());
+ assertEquals("2.5.0", definition.getVersion());
+ List<Route> routes = definition.getRoutes();
+ assertNotNull(routes);
+ assertEquals(12, routes.size());
+ assertNotNull(routes.get(0).getPath());
+ url = ClassLoader.getSystemResource("services/hbase/0.98.0/service.xml");
+ definition = (ServiceDefinition) unmarshaller.unmarshal(url.openStream());
+ assertNotNull(definition.getDispatch());
+ assertEquals("org.apache.hadoop.gateway.hbase.HBaseDispatch", definition.getDispatch().getClassName());
+ url = ClassLoader.getSystemResource("services/webhdfs/2.4.0/service.xml");
+ definition = (ServiceDefinition) unmarshaller.unmarshal(url.openStream());
+ assertNotNull(definition.getDispatch());
+ assertEquals("org.apache.hadoop.gateway.hdfs.dispatch.HdfsHttpClientDispatch", definition.getDispatch().getClassName());
+ assertEquals("org.apache.hadoop.gateway.hdfs.dispatch.WebHdfsHaDispatch", definition.getDispatch().getHaClassName());
+ }
+
+}
diff --git a/gateway-service-definitions/src/test/resources/services/foo/1.0.0/rewrite.xml b/gateway-service-definitions/src/test/resources/services/foo/1.0.0/rewrite.xml
new file mode 100644
index 0000000..761bd4b
--- /dev/null
+++ b/gateway-service-definitions/src/test/resources/services/foo/1.0.0/rewrite.xml
@@ -0,0 +1,21 @@
+<!--
+ Licensed to the Apache Software Foundation (ASF) under one or more
+ contributor license agreements. See the NOTICE file distributed with
+ this work for additional information regarding copyright ownership.
+ The ASF licenses this file to You under the Apache License, Version 2.0
+ (the "License"); you may not use this file except in compliance with
+ the License. You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
+-->
+<rules>
+ <rule dir="IN" name="FOO/foo/inbound" pattern="*://*:*/**/foo">
+ <rewrite template="{$serviceUrl[FOO]}"/>
+ </rule>
+</rules>
\ No newline at end of file
diff --git a/gateway-service-definitions/src/test/resources/services/foo/1.0.0/service.xml b/gateway-service-definitions/src/test/resources/services/foo/1.0.0/service.xml
new file mode 100644
index 0000000..21a10f1
--- /dev/null
+++ b/gateway-service-definitions/src/test/resources/services/foo/1.0.0/service.xml
@@ -0,0 +1,39 @@
+<!--
+ Licensed to the Apache Software Foundation (ASF) under one or more
+ contributor license agreements. See the NOTICE file distributed with
+ this work for additional information regarding copyright ownership.
+ The ASF licenses this file to You under the Apache License, Version 2.0
+ (the "License"); you may not use this file except in compliance with
+ the License. You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
+-->
+<service role="FOO" name="foo" version="1.0.0">
+ <policies>
+ <policy role="webappsec"/>
+ <policy role="authentication"/>
+ <policy role="rewrite"/>
+ <policy role="identity-assertion"/>
+ <policy role="authorization"/>
+ </policies>
+ <routes>
+ <route path="/foo/?**">
+ <rewrite apply="FOO/foo/inbound" to="request.url"/>
+ <policies>
+ <policy role="webappsec"/>
+ <policy role="federation"/>
+ <policy role="identity-assertion"/>
+ <policy role="authorization"/>
+ <policy role="rewrite"/>
+ </policies>
+ <dispatch contributor-name="http-client" />
+ </route>
+ </routes>
+ <dispatch contributor-name="custom-client" ha-contributor-name="ha-client"/>
+</service>
\ No newline at end of file
diff --git a/gateway-service-hbase/src/main/java/org/apache/hadoop/gateway/hbase/HBaseDeploymentContributor.java b/gateway-service-hbase/src/main/java/org/apache/hadoop/gateway/hbase/HBaseDeploymentContributor.java
deleted file mode 100644
index 15372ec..0000000
--- a/gateway-service-hbase/src/main/java/org/apache/hadoop/gateway/hbase/HBaseDeploymentContributor.java
+++ /dev/null
@@ -1,133 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.hadoop.gateway.hbase;
-
-import org.apache.hadoop.gateway.deploy.DeploymentContext;
-import org.apache.hadoop.gateway.deploy.ServiceDeploymentContributorBase;
-import org.apache.hadoop.gateway.descriptor.FilterParamDescriptor;
-import org.apache.hadoop.gateway.descriptor.ResourceDescriptor;
-import org.apache.hadoop.gateway.filter.rewrite.api.UrlRewriteRulesDescriptor;
-import org.apache.hadoop.gateway.filter.rewrite.api.UrlRewriteRulesDescriptorFactory;
-import org.apache.hadoop.gateway.topology.Service;
-
-import java.io.IOException;
-import java.io.InputStream;
-import java.io.InputStreamReader;
-import java.io.Reader;
-import java.net.URISyntaxException;
-import java.util.ArrayList;
-import java.util.List;
-
-public class HBaseDeploymentContributor extends ServiceDeploymentContributorBase {
-
- private static final String RULES_RESOURCE = HBaseDeploymentContributor.class.getName().replace( '.', '/' ) + "/rewrite.xml";
- private static final String EXTERNAL_PATH = "/hbase";
-
- @Override
- public String getRole() {
- return "WEBHBASE";
- }
-
- @Override
- public String getName() {
- return "webhbase";
- }
-
- @Override
- public void contributeService( DeploymentContext context, Service service ) throws Exception {
- contributeRewriteRules( context, service );
- contributeResources( context, service );
- }
-
- private void contributeRewriteRules( DeploymentContext context, Service service ) throws URISyntaxException, IOException {
- UrlRewriteRulesDescriptor hbaseRules = loadRulesFromTemplate();
- UrlRewriteRulesDescriptor clusterRules = context.getDescriptor( "rewrite" );
- clusterRules.addRules( hbaseRules );
- }
-
- private void contributeResources( DeploymentContext context, Service service ) throws URISyntaxException {
- List<FilterParamDescriptor> params;
-
- ResourceDescriptor rootResource = context.getGatewayDescriptor().addResource();
- rootResource.role( service.getRole() );
- rootResource.pattern( EXTERNAL_PATH + "/?**" );
- addWebAppSecFilters(context, service, rootResource);
- addAuthenticationFilter( context, service, rootResource );
- params = new ArrayList<FilterParamDescriptor>();
- params.add( rootResource.createFilterParam().name( "response.headers" ).value( getQualifiedName() + "/headers/outbound" ) );
- addRewriteFilter( context, service, rootResource, params );
- addIdentityAssertionFilter( context, service, rootResource );
- addAuthorizationFilter(context, service, rootResource);
- addDispatchFilter( context, service, rootResource );
-
- ResourceDescriptor pathResource = context.getGatewayDescriptor().addResource();
- pathResource.role( service.getRole() );
- pathResource.pattern( EXTERNAL_PATH + "/**?**" );
- addWebAppSecFilters(context, service, pathResource);
- addAuthenticationFilter( context, service, pathResource );
- params = new ArrayList<FilterParamDescriptor>();
- params.add( rootResource.createFilterParam().name( "response.headers" ).value( getQualifiedName() + "/headers/outbound" ) );
- addRewriteFilter( context, service, pathResource, params );
- addIdentityAssertionFilter( context, service, pathResource );
- addAuthorizationFilter(context, service, pathResource);
- addDispatchFilter( context, service, pathResource );
-
- ResourceDescriptor statusResource = context.getGatewayDescriptor().addResource();
- statusResource.role( service.getRole() );
- statusResource.pattern( EXTERNAL_PATH + "/status/cluster?**" );
- addWebAppSecFilters(context, service, statusResource);
- addAuthenticationFilter( context, service, statusResource );
- params = new ArrayList<FilterParamDescriptor>();
- params.add( statusResource.createFilterParam().name( "response.body" ).value( getQualifiedName() + "/status/outbound" ) );
- addRewriteFilter( context, service, statusResource, params );
- addIdentityAssertionFilter( context, service, statusResource );
- addAuthorizationFilter(context, service, statusResource);
- addDispatchFilter( context, service, statusResource );
-
- ResourceDescriptor regionResource = context.getGatewayDescriptor().addResource();
- regionResource.role( service.getRole() );
- regionResource.pattern( EXTERNAL_PATH + "/*/regions?**" );
- addWebAppSecFilters(context, service, regionResource);
- addAuthenticationFilter( context, service, regionResource );
- params = new ArrayList<FilterParamDescriptor>();
- params.add( regionResource.createFilterParam().name( "response.body" ).value( getQualifiedName() + "/regions/outbound" ) );
- addRewriteFilter( context, service, regionResource, params );
- addIdentityAssertionFilter( context, service, regionResource );
- addAuthorizationFilter(context, service, regionResource);
- addDispatchFilter( context, service, regionResource );
- }
-
- private void addDispatchFilter(
- DeploymentContext context, Service service, ResourceDescriptor resource ) {
- context.contributeFilter( service, resource, "dispatch", "hbase", null );
- }
-
- private String getQualifiedName() {
- return getRole() + "/" + getName();
- }
-
- UrlRewriteRulesDescriptor loadRulesFromTemplate() throws IOException {
- InputStream stream = this.getClass().getClassLoader().getResourceAsStream( RULES_RESOURCE );
- Reader reader = new InputStreamReader( stream );
- UrlRewriteRulesDescriptor rules = UrlRewriteRulesDescriptorFactory.load( "xml", reader );
- reader.close();
- stream.close();
- return rules;
- }
-
-}
diff --git a/gateway-provider-identity-assertion-pseudo/src/main/java/org/apache/hadoop/gateway/identityasserter/function/UsernameFunctionDescriptor.java b/gateway-service-hbase/src/main/java/org/apache/hadoop/gateway/hbase/HBaseDispatch.java
similarity index 69%
copy from gateway-provider-identity-assertion-pseudo/src/main/java/org/apache/hadoop/gateway/identityasserter/function/UsernameFunctionDescriptor.java
copy to gateway-service-hbase/src/main/java/org/apache/hadoop/gateway/hbase/HBaseDispatch.java
index d2aa441..b019aed 100644
--- a/gateway-provider-identity-assertion-pseudo/src/main/java/org/apache/hadoop/gateway/identityasserter/function/UsernameFunctionDescriptor.java
+++ b/gateway-service-hbase/src/main/java/org/apache/hadoop/gateway/hbase/HBaseDispatch.java
@@ -15,17 +15,21 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
-package org.apache.hadoop.gateway.identityasserter.function;
+package org.apache.hadoop.gateway.hbase;
-import org.apache.hadoop.gateway.filter.rewrite.api.UrlRewriteFunctionDescriptor;
+import org.apache.hadoop.gateway.dispatch.DefaultDispatch;
-public class UsernameFunctionDescriptor implements UrlRewriteFunctionDescriptor<UsernameFunctionDescriptor> {
-
- public static final String FUNCTION_NAME = "username";
+/**
+ * This specialized dispatch provides HBase specific features to the
+ * default HttpClientDispatch.
+ */
+public class HBaseDispatch extends DefaultDispatch {
@Override
- public String name() {
- return FUNCTION_NAME;
+ public void init() {
+ super.init();
+ setAppCookieManager(new HBaseCookieManager());
}
}
+
diff --git a/gateway-service-hbase/src/main/java/org/apache/hadoop/gateway/hbase/HBaseDispatchDeploymentContributor.java b/gateway-service-hbase/src/main/java/org/apache/hadoop/gateway/hbase/HBaseDispatchDeploymentContributor.java
deleted file mode 100644
index d2b4ce0..0000000
--- a/gateway-service-hbase/src/main/java/org/apache/hadoop/gateway/hbase/HBaseDispatchDeploymentContributor.java
+++ /dev/null
@@ -1,70 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.hadoop.gateway.hbase;
-
-import org.apache.hadoop.gateway.deploy.DeploymentContext;
-import org.apache.hadoop.gateway.deploy.ProviderDeploymentContributorBase;
-import org.apache.hadoop.gateway.descriptor.FilterDescriptor;
-import org.apache.hadoop.gateway.descriptor.FilterParamDescriptor;
-import org.apache.hadoop.gateway.descriptor.ResourceDescriptor;
-import org.apache.hadoop.gateway.topology.Provider;
-import org.apache.hadoop.gateway.topology.Service;
-
-import java.util.List;
-import java.util.Map;
-
-public class HBaseDispatchDeploymentContributor extends ProviderDeploymentContributorBase {
-
- private static final String REPLAY_BUFFER_SIZE_PARAM = "replayBufferSize";
-
- // Default global replay buffer size in KB
- public static final String DEFAULT_REPLAY_BUFFER_SIZE = "8";
-
- @Override
- public String getRole() {
- return "dispatch";
- }
-
- @Override
- public String getName() {
- return "hbase";
- }
-
- @Override
- public void contributeFilter( DeploymentContext context, Provider provider, Service service, ResourceDescriptor resource, List<FilterParamDescriptor> params ) {
- FilterDescriptor filter = resource.addFilter().name( getName() ).role( getRole() ).impl( HBaseHttpClientDispatch.class );
-
- FilterParamDescriptor filterParam = filter.param().name( REPLAY_BUFFER_SIZE_PARAM ).value( DEFAULT_REPLAY_BUFFER_SIZE );
- for ( Map.Entry<String,String> serviceParam : service.getParams().entrySet() ) {
- if ( REPLAY_BUFFER_SIZE_PARAM.equals( serviceParam.getKey() ) ) {
- filterParam.value( serviceParam.getValue() );
- }
- }
- if ( params != null ) {
- for ( FilterParamDescriptor customParam : params ) {
- if ( REPLAY_BUFFER_SIZE_PARAM.equals( customParam.name() ) ) {
- filterParam.value( customParam.value() );
- }
- }
- }
-
- if( context.getGatewayConfig().isHadoopKerberosSecured() ) {
- filter.param().name("kerberos").value("true");
- }
- }
-}
diff --git a/gateway-service-hbase/src/main/java/org/apache/hadoop/gateway/hbase/HBaseHttpClientDispatch.java b/gateway-service-hbase/src/main/java/org/apache/hadoop/gateway/hbase/HBaseHttpClientDispatch.java
index a569692..5f5025d 100644
--- a/gateway-service-hbase/src/main/java/org/apache/hadoop/gateway/hbase/HBaseHttpClientDispatch.java
+++ b/gateway-service-hbase/src/main/java/org/apache/hadoop/gateway/hbase/HBaseHttpClientDispatch.java
@@ -17,21 +17,22 @@
*/
package org.apache.hadoop.gateway.hbase;
-import org.apache.hadoop.gateway.dispatch.HttpClientDispatch;
+import org.apache.hadoop.gateway.dispatch.GatewayDispatchFilter;
import javax.servlet.FilterConfig;
import javax.servlet.ServletException;
-/**
- * This specialized dispatch provides HBase specific features to the
- * default HttpClientDispatch.
+/***
+ * KNOX-526. Need to keep this class around for backward compatibility of deployed
+ * topologies. This is required for releases older than Apache Knox 0.6.0
*/
-public class HBaseHttpClientDispatch extends HttpClientDispatch {
+@Deprecated
+public class HBaseHttpClientDispatch extends GatewayDispatchFilter {
@Override
- public void init( FilterConfig filterConfig ) throws ServletException {
- super.init( filterConfig, new HBaseCookieManager() );
+ public void init(FilterConfig filterConfig) throws ServletException {
+ setDispatch(new HBaseDispatch());
+ super.init(filterConfig);
}
-
}
diff --git a/gateway-service-hbase/src/main/resources/META-INF/services/org.apache.hadoop.gateway.deploy.ProviderDeploymentContributor b/gateway-service-hbase/src/main/resources/META-INF/services/org.apache.hadoop.gateway.deploy.ProviderDeploymentContributor
deleted file mode 100644
index 610bfbb..0000000
--- a/gateway-service-hbase/src/main/resources/META-INF/services/org.apache.hadoop.gateway.deploy.ProviderDeploymentContributor
+++ /dev/null
@@ -1,19 +0,0 @@
-##########################################################################
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements. See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership. The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License. You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-##########################################################################
-
-org.apache.hadoop.gateway.hbase.HBaseDispatchDeploymentContributor
diff --git a/gateway-service-hbase/src/main/resources/META-INF/services/org.apache.hadoop.gateway.deploy.ServiceDeploymentContributor b/gateway-service-hbase/src/main/resources/META-INF/services/org.apache.hadoop.gateway.deploy.ServiceDeploymentContributor
deleted file mode 100644
index 5fc751f..0000000
--- a/gateway-service-hbase/src/main/resources/META-INF/services/org.apache.hadoop.gateway.deploy.ServiceDeploymentContributor
+++ /dev/null
@@ -1,19 +0,0 @@
-##########################################################################
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements. See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership. The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License. You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-##########################################################################
-
-org.apache.hadoop.gateway.hbase.HBaseDeploymentContributor
diff --git a/gateway-service-hbase/src/test/java/org/apache/hadoop/gateway/hbase/HBaseDeploymentContributorTest.java b/gateway-service-hbase/src/test/java/org/apache/hadoop/gateway/hbase/HBaseDeploymentContributorTest.java
deleted file mode 100644
index 3c8c1b1..0000000
--- a/gateway-service-hbase/src/test/java/org/apache/hadoop/gateway/hbase/HBaseDeploymentContributorTest.java
+++ /dev/null
@@ -1,45 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.hadoop.gateway.hbase;
-
-import org.apache.hadoop.gateway.deploy.ServiceDeploymentContributor;
-import org.junit.Test;
-
-import java.util.Iterator;
-import java.util.ServiceLoader;
-
-import static org.hamcrest.MatcherAssert.assertThat;
-import static org.junit.Assert.fail;
-
-public class HBaseDeploymentContributorTest {
-
- @Test
- public void testServiceLoader() throws Exception {
- ServiceLoader loader = ServiceLoader.load( ServiceDeploymentContributor.class );
- Iterator iterator = loader.iterator();
- assertThat( "Service iterator empty.", iterator.hasNext() );
- while( iterator.hasNext() ) {
- Object object = iterator.next();
- if( object instanceof HBaseDeploymentContributor ) {
- return;
- }
- }
- fail( "Failed to find " + HBaseDeploymentContributor.class.getName() + " via service loader." );
- }
-
-}
diff --git a/gateway-service-hbase/src/test/java/org/apache/hadoop/gateway/hbase/HBaseDispatchDeploymentContributorTest.java b/gateway-service-hbase/src/test/java/org/apache/hadoop/gateway/hbase/HBaseDispatchDeploymentContributorTest.java
deleted file mode 100644
index e2b4d10..0000000
--- a/gateway-service-hbase/src/test/java/org/apache/hadoop/gateway/hbase/HBaseDispatchDeploymentContributorTest.java
+++ /dev/null
@@ -1,45 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.hadoop.gateway.hbase;
-
-import org.apache.hadoop.gateway.deploy.ProviderDeploymentContributor;
-import org.junit.Test;
-
-import java.util.Iterator;
-import java.util.ServiceLoader;
-
-import static org.hamcrest.MatcherAssert.assertThat;
-import static org.junit.Assert.fail;
-
-public class HBaseDispatchDeploymentContributorTest {
-
- @Test
- public void testServiceLoader() throws Exception {
- ServiceLoader loader = ServiceLoader.load( ProviderDeploymentContributor.class );
- Iterator iterator = loader.iterator();
- assertThat( "Service iterator empty.", iterator.hasNext() );
- while( iterator.hasNext() ) {
- Object object = iterator.next();
- if( object instanceof HBaseDispatchDeploymentContributor ) {
- return;
- }
- }
- fail( "Failed to find " + HBaseDispatchDeploymentContributor.class.getName() + " via service loader." );
- }
-
-}
diff --git a/gateway-service-hive/src/main/java/org/apache/hadoop/gateway/hive/HiveDeploymentContributor.java b/gateway-service-hive/src/main/java/org/apache/hadoop/gateway/hive/HiveDeploymentContributor.java
deleted file mode 100644
index eb3779f..0000000
--- a/gateway-service-hive/src/main/java/org/apache/hadoop/gateway/hive/HiveDeploymentContributor.java
+++ /dev/null
@@ -1,84 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.hadoop.gateway.hive;
-
-import org.apache.hadoop.gateway.deploy.DeploymentContext;
-import org.apache.hadoop.gateway.deploy.ServiceDeploymentContributorBase;
-import org.apache.hadoop.gateway.descriptor.ResourceDescriptor;
-import org.apache.hadoop.gateway.filter.rewrite.api.UrlRewriteRuleDescriptor;
-import org.apache.hadoop.gateway.filter.rewrite.api.UrlRewriteRulesDescriptor;
-import org.apache.hadoop.gateway.filter.rewrite.ext.UrlRewriteActionRewriteDescriptorExt;
-import org.apache.hadoop.gateway.topology.Service;
-
-import java.net.URISyntaxException;
-
-public class HiveDeploymentContributor extends ServiceDeploymentContributorBase {
-
- private static final String ROLE = "HIVE";
- private static final String NAME = "hive";
- private static final String EXTERNAL_PATH = "/hive";
-
- @Override
- public String getRole() {
- return ROLE;
- }
-
- @Override
- public String getName() {
- return NAME;
- }
-
- @Override
- public void contributeService( DeploymentContext context, Service service ) throws Exception {
- contributeRewriteRules( context, service );
- contributeResources( context, service );
- }
-
- private void contributeRewriteRules( DeploymentContext context, Service service ) throws URISyntaxException {
- UrlRewriteRulesDescriptor rules = context.getDescriptor( "rewrite" );
- UrlRewriteRuleDescriptor rule;
- UrlRewriteActionRewriteDescriptorExt rewrite;
-
- rule = rules.addRule( getRole() + "/" + getName() + "/inbound" )
- .directions( "inbound" )
- .pattern( "*://*:*/**" + EXTERNAL_PATH );
- rewrite = rule.addStep( "rewrite" );
- rewrite.template( service.getUrl() );
- }
-
- public void contributeResources( DeploymentContext context, Service service ) throws URISyntaxException {
- ResourceDescriptor rootResource = context.getGatewayDescriptor().addResource();
- rootResource.role( service.getRole() );
- rootResource.pattern( EXTERNAL_PATH );
- addWebAppSecFilters(context, service, rootResource);
- addAuthenticationFilter( context, service, rootResource );
- addRewriteFilter( context, service, rootResource );
- addIdentityAssertionFilter( context, service, rootResource );
- addAuthorizationFilter(context, service, rootResource);
- addDispatchFilter( context, service, rootResource );
- }
-
- private void addRewriteFilter(
- DeploymentContext context, Service service, ResourceDescriptor resource ) throws URISyntaxException {
- context.contributeFilter( service, resource, "rewrite", null, null );
- }
-
- private void addDispatchFilter( DeploymentContext context, Service service, ResourceDescriptor resource ) {
- context.contributeFilter( service, resource, "dispatch", "hive", null );
- }
-}
diff --git a/gateway-service-hive/src/main/java/org/apache/hadoop/gateway/hive/HiveDispatch.java b/gateway-service-hive/src/main/java/org/apache/hadoop/gateway/hive/HiveDispatch.java
new file mode 100644
index 0000000..06f2d3e
--- /dev/null
+++ b/gateway-service-hive/src/main/java/org/apache/hadoop/gateway/hive/HiveDispatch.java
@@ -0,0 +1,119 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.gateway.hive;
+
+import org.apache.hadoop.gateway.config.Configure;
+import org.apache.hadoop.gateway.dispatch.DefaultDispatch;
+import org.apache.hadoop.gateway.security.PrimaryPrincipal;
+import org.apache.http.HttpResponse;
+import org.apache.http.auth.AuthScope;
+import org.apache.http.auth.Credentials;
+import org.apache.http.auth.UsernamePasswordCredentials;
+import org.apache.http.client.methods.HttpUriRequest;
+import org.apache.http.client.params.AuthPolicy;
+import org.apache.http.impl.auth.BasicScheme;
+import org.apache.http.impl.auth.SPNegoSchemeFactory;
+import org.apache.http.impl.client.DefaultHttpClient;
+
+import javax.security.auth.Subject;
+import java.io.IOException;
+import java.security.AccessController;
+import java.security.Principal;
+
+/**
+ * This specialized dispatch provides Hive specific features to the
+ * default HttpClientDispatch.
+ */
+public class HiveDispatch extends DefaultDispatch {
+ private static final String PASSWORD_PLACEHOLDER = "*";
+ private boolean basicAuthPreemptive = false;
+ private boolean kerberos = false;
+ private static final EmptyJaasCredentials EMPTY_JAAS_CREDENTIALS = new EmptyJaasCredentials();
+
+ @Override
+ public void init() {
+ super.init();
+ }
+
+ protected Principal getPrimaryPrincipal() {
+ Principal principal = null;
+ Subject subject = Subject.getSubject( AccessController.getContext());
+ if( subject != null ) {
+ principal = (Principal)subject.getPrincipals(PrimaryPrincipal.class).toArray()[0];
+ }
+ return principal;
+ }
+
+ protected void addCredentialsToRequest(HttpUriRequest request) {
+ if( isBasicAuthPreemptive() ) {
+ Principal principal = getPrimaryPrincipal();
+ if( principal != null ) {
+
+ UsernamePasswordCredentials credentials =
+ new UsernamePasswordCredentials( principal.getName(), PASSWORD_PLACEHOLDER );
+
+ request.addHeader(BasicScheme.authenticate(credentials,"US-ASCII",false));
+ }
+ }
+ }
+
+ @Configure
+ public void setBasicAuthPreemptive( boolean basicAuthPreemptive ) {
+ this.basicAuthPreemptive = basicAuthPreemptive;
+ }
+
+ public boolean isBasicAuthPreemptive() {
+ return basicAuthPreemptive;
+ }
+
+ public boolean isKerberos() {
+ return kerberos;
+ }
+
+ @Configure
+ public void setKerberos(boolean kerberos) {
+ this.kerberos = kerberos;
+ }
+
+ protected HttpResponse executeKerberosDispatch(HttpUriRequest outboundRequest,
+ DefaultHttpClient client) throws IOException {
+ //DefaultHttpClient client = new DefaultHttpClient();
+ SPNegoSchemeFactory spNegoSF = new SPNegoSchemeFactory(
+ /* stripPort */true);
+ // spNegoSF.setSpengoGenerator(new BouncySpnegoTokenGenerator());
+ client.getAuthSchemes().register(AuthPolicy.SPNEGO, spNegoSF);
+ client.getCredentialsProvider().setCredentials(
+ new AuthScope(/* host */null, /* port */-1, /* realm */null),
+ EMPTY_JAAS_CREDENTIALS);
+ return client.execute(outboundRequest);
+ }
+
+ private static class EmptyJaasCredentials implements Credentials {
+
+ public String getPassword() {
+ return null;
+ }
+
+ public Principal getUserPrincipal() {
+ return null;
+ }
+
+ }
+
+}
+
diff --git a/gateway-service-hive/src/main/java/org/apache/hadoop/gateway/hive/HiveDispatchDeploymentContributor.java b/gateway-service-hive/src/main/java/org/apache/hadoop/gateway/hive/HiveDispatchDeploymentContributor.java
deleted file mode 100644
index 00b13d9..0000000
--- a/gateway-service-hive/src/main/java/org/apache/hadoop/gateway/hive/HiveDispatchDeploymentContributor.java
+++ /dev/null
@@ -1,75 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.hadoop.gateway.hive;
-
-import java.util.List;
-import java.util.Map;
-
-import org.apache.hadoop.gateway.deploy.DeploymentContext;
-import org.apache.hadoop.gateway.deploy.ProviderDeploymentContributorBase;
-import org.apache.hadoop.gateway.descriptor.FilterDescriptor;
-import org.apache.hadoop.gateway.descriptor.FilterParamDescriptor;
-import org.apache.hadoop.gateway.descriptor.ResourceDescriptor;
-import org.apache.hadoop.gateway.topology.Provider;
-import org.apache.hadoop.gateway.topology.Service;
-
-/**
- *
- */
-public class HiveDispatchDeploymentContributor extends ProviderDeploymentContributorBase {
-
- private static final String REPLAY_BUFFER_SIZE_PARAM = "replayBufferSize";
-
- // Default global replay buffer size in KB
- public static final String DEFAULT_REPLAY_BUFFER_SIZE = "8";
-
- @Override
- public String getRole() {
- return "dispatch";
- }
-
- @Override
- public String getName() {
- return "hive";
- }
-
- @Override
- public void contributeFilter( DeploymentContext context, Provider provider, Service service, ResourceDescriptor resource, List<FilterParamDescriptor> params ) {
- FilterDescriptor filter = resource.addFilter().name( getName() ).role( getRole() ).impl( HiveHttpClientDispatch.class );
-
- FilterParamDescriptor filterParam = filter.param().name( REPLAY_BUFFER_SIZE_PARAM ).value( DEFAULT_REPLAY_BUFFER_SIZE );
- for ( Map.Entry<String,String> serviceParam : service.getParams().entrySet() ) {
- if ( REPLAY_BUFFER_SIZE_PARAM.equals( serviceParam.getKey() ) ) {
- filterParam.value( serviceParam.getValue() );
- }
- }
- if ( params != null ) {
- for ( FilterParamDescriptor customParam : params ) {
- if ( REPLAY_BUFFER_SIZE_PARAM.equals( customParam.name() ) ) {
- filterParam.value( customParam.value() );
- }
- }
- }
-
- if( context.getGatewayConfig().isHadoopKerberosSecured() ) {
- filter.param().name("kerberos").value("true");
- } else {
- filter.param().name("basicAuthPreemptive").value("true");
- }
- }
-}
diff --git a/gateway-service-hive/src/main/java/org/apache/hadoop/gateway/hive/HiveHttpClientDispatch.java b/gateway-service-hive/src/main/java/org/apache/hadoop/gateway/hive/HiveHttpClientDispatch.java
index 21d39ba..d44b123 100644
--- a/gateway-service-hive/src/main/java/org/apache/hadoop/gateway/hive/HiveHttpClientDispatch.java
+++ b/gateway-service-hive/src/main/java/org/apache/hadoop/gateway/hive/HiveHttpClientDispatch.java
@@ -17,103 +17,22 @@
*/
package org.apache.hadoop.gateway.hive;
-import org.apache.hadoop.gateway.dispatch.HttpClientDispatch;
-import org.apache.hadoop.gateway.security.PrimaryPrincipal;
-import org.apache.http.Header;
-import org.apache.http.HttpResponse;
-import org.apache.http.HttpStatus;
-import org.apache.http.auth.AuthScope;
-import org.apache.http.auth.Credentials;
-import org.apache.http.auth.UsernamePasswordCredentials;
-import org.apache.http.client.ClientProtocolException;
-import org.apache.http.client.methods.HttpUriRequest;
-import org.apache.http.client.params.AuthPolicy;
-import org.apache.http.impl.auth.BasicScheme;
-import org.apache.http.impl.auth.SPNegoSchemeFactory;
-import org.apache.http.impl.client.DefaultHttpClient;
-import org.apache.http.message.BasicHeader;
+import org.apache.hadoop.gateway.dispatch.GatewayDispatchFilter;
-import javax.security.auth.Subject;
import javax.servlet.FilterConfig;
import javax.servlet.ServletException;
-import java.io.IOException;
-import java.security.AccessController;
-import java.security.Principal;
-
-/**
- * This specialized dispatch provides Hive specific features to the
- * default HttpClientDispatch.
+/***
+ * KNOX-526. Need to keep this class around for backward compatibility of deployed
+ * topologies. This is required for releases older than Apache Knox 0.6.0
*/
-public class HiveHttpClientDispatch extends HttpClientDispatch {
- private static final String BASIC_AUTH_PREEMPTIVE_PARAM = "basicAuthPreemptive";
- private static final String PASSWORD_PLACEHOLDER = "*";
- private boolean basicAuthPreemptive = false;
- private static final EmptyJaasCredentials EMPTY_JAAS_CREDENTIALS = new EmptyJaasCredentials();
+@Deprecated
+public class HiveHttpClientDispatch extends GatewayDispatchFilter {
@Override
- public void init( FilterConfig filterConfig ) throws ServletException {
- super.init( filterConfig );
- String basicAuthPreemptiveString = filterConfig.getInitParameter( BASIC_AUTH_PREEMPTIVE_PARAM );
- if( basicAuthPreemptiveString != null ) {
- setBasicAuthPreemptive( Boolean.parseBoolean( basicAuthPreemptiveString ) );
- }
+ public void init(FilterConfig filterConfig) throws ServletException {
+ setDispatch(new HiveDispatch());
+ super.init(filterConfig);
}
-
- protected Principal getPrimaryPrincipal() {
- Principal principal = null;
- Subject subject = Subject.getSubject( AccessController.getContext());
- if( subject != null ) {
- principal = (Principal)subject.getPrincipals(PrimaryPrincipal.class).toArray()[0];
- }
- return principal;
- }
-
- protected void addCredentialsToRequest(HttpUriRequest request) {
- if( isBasicAuthPreemptive() ) {
- Principal principal = getPrimaryPrincipal();
- if( principal != null ) {
-
- UsernamePasswordCredentials credentials =
- new UsernamePasswordCredentials( principal.getName(), PASSWORD_PLACEHOLDER );
-
- request.addHeader(BasicScheme.authenticate(credentials,"US-ASCII",false));
- }
- }
- }
-
- public void setBasicAuthPreemptive( boolean basicAuthPreemptive ) {
- this.basicAuthPreemptive = basicAuthPreemptive;
- }
-
- public boolean isBasicAuthPreemptive() {
- return basicAuthPreemptive;
- }
-
- protected HttpResponse executeKerberosDispatch(HttpUriRequest outboundRequest,
- DefaultHttpClient client) throws IOException, ClientProtocolException {
- //DefaultHttpClient client = new DefaultHttpClient();
- SPNegoSchemeFactory spNegoSF = new SPNegoSchemeFactory(
- /* stripPort */true);
- // spNegoSF.setSpengoGenerator(new BouncySpnegoTokenGenerator());
- client.getAuthSchemes().register(AuthPolicy.SPNEGO, spNegoSF);
- client.getCredentialsProvider().setCredentials(
- new AuthScope(/* host */null, /* port */-1, /* realm */null),
- EMPTY_JAAS_CREDENTIALS);
- return client.execute(outboundRequest);
- }
-
- private static class EmptyJaasCredentials implements Credentials {
-
- public String getPassword() {
- return null;
- }
-
- public Principal getUserPrincipal() {
- return null;
- }
-
- }
-
}
diff --git a/gateway-service-hive/src/main/resources/META-INF/services/org.apache.hadoop.gateway.deploy.ServiceDeploymentContributor b/gateway-service-hive/src/main/resources/META-INF/services/org.apache.hadoop.gateway.deploy.ServiceDeploymentContributor
deleted file mode 100644
index 3898804..0000000
--- a/gateway-service-hive/src/main/resources/META-INF/services/org.apache.hadoop.gateway.deploy.ServiceDeploymentContributor
+++ /dev/null
@@ -1,19 +0,0 @@
-##########################################################################
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements. See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership. The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License. You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-##########################################################################
-
-org.apache.hadoop.gateway.hive.HiveDeploymentContributor
diff --git a/gateway-service-knoxsso/src/main/java/org/apache/hadoop/gateway/service/knoxsso/KnoxSSOMessages.java b/gateway-service-knoxsso/src/main/java/org/apache/hadoop/gateway/service/knoxsso/KnoxSSOMessages.java
index cb8b137..f277ac2 100644
--- a/gateway-service-knoxsso/src/main/java/org/apache/hadoop/gateway/service/knoxsso/KnoxSSOMessages.java
+++ b/gateway-service-knoxsso/src/main/java/org/apache/hadoop/gateway/service/knoxsso/KnoxSSOMessages.java
@@ -34,4 +34,13 @@
@Message( level = MessageLevel.ERROR, text = "Unable to properly send needed HTTP status code: {0}, {1}")
void unableToCloseOutputStream(String message, String string);
+
+ @Message( level = MessageLevel.ERROR, text = "Unable to add cookie to response. {0}: {1}")
+ void unableAddCookieToResponse(String message, String stackTrace);
+
+ @Message( level = MessageLevel.ERROR, text = "Original URL not found in request.")
+ void originalURLNotFound();
+
+ @Message( level = MessageLevel.INFO, text = "JWT cookie successfully added.")
+ void addedJWTCookie();
}
\ No newline at end of file
diff --git a/gateway-service-knoxsso/src/main/java/org/apache/hadoop/gateway/service/knoxsso/WebSSOResource.java b/gateway-service-knoxsso/src/main/java/org/apache/hadoop/gateway/service/knoxsso/WebSSOResource.java
index 84b74e7..bce09a0 100644
--- a/gateway-service-knoxsso/src/main/java/org/apache/hadoop/gateway/service/knoxsso/WebSSOResource.java
+++ b/gateway-service-knoxsso/src/main/java/org/apache/hadoop/gateway/service/knoxsso/WebSSOResource.java
@@ -42,8 +42,21 @@
import static javax.ws.rs.core.MediaType.APPLICATION_JSON;
import static javax.ws.rs.core.MediaType.APPLICATION_XML;
-@Path( "/knoxsso/websso" )
+@Path( WebSSOResource.RESOURCE_PATH )
public class WebSSOResource {
+ /**
+ *
+ */
+ private static final String ORIGINAL_URL_REQUEST_PARAM = "originalUrl";
+ /**
+ *
+ */
+ private static final String ORIGINAL_URL_COOKIE_NAME = "original-url";
+ /**
+ *
+ */
+ private static final String JWT_COOKIE_NAME = "hadoop-jwt";
+ static final String RESOURCE_PATH = "/knoxsso/websso";
private static KnoxSSOMessages log = MessagesFactory.get( KnoxSSOMessages.class );
@Context
@@ -68,12 +81,16 @@
GatewayServices services = (GatewayServices) request.getServletContext()
.getAttribute(GatewayServices.GATEWAY_SERVICES_ATTRIBUTE);
boolean removeOriginalUrlCookie = true;
- String original = getCookieValue((HttpServletRequest) request, "original-url");
+ String original = getCookieValue((HttpServletRequest) request, ORIGINAL_URL_COOKIE_NAME);
if (original == null) {
// in the case where there is no SAML redirects done before here
// we need to get it from the request parameters
removeOriginalUrlCookie = false;
- original = request.getParameter("originalUrl");
+ original = request.getParameter(ORIGINAL_URL_REQUEST_PARAM);
+ if (original == null) {
+ log.originalURLNotFound();
+ throw new WebApplicationException("Original URL not found in the request.", Response.Status.BAD_REQUEST);
+ }
}
JWTokenAuthority ts = services.getService(GatewayServices.TOKEN_SERVICE);
@@ -100,7 +117,7 @@
public void addJWTHadoopCookie(String original, JWT token) {
log.addingJWTCookie(token.toString());
- Cookie c = new Cookie("hadoop-jwt", token.toString());
+ Cookie c = new Cookie(JWT_COOKIE_NAME, token.toString());
c.setPath("/");
try {
String domain = getDomainName(original);
@@ -109,23 +126,29 @@
c.setSecure(true);
c.setMaxAge(120);
response.addCookie(c);
+ log.addedJWTCookie();
}
catch(Exception e) {
+ log.unableAddCookieToResponse(e.getMessage(), e.getStackTrace().toString());
throw new WebApplicationException("Unable to add JWT cookie to response.");
}
}
private void removeOriginalUrlCookie(HttpServletResponse response) {
- Cookie c = new Cookie("original-url", null);
+ Cookie c = new Cookie(ORIGINAL_URL_COOKIE_NAME, null);
c.setMaxAge(0);
- c.setPath("/knoxsso/websso");
+ c.setPath(RESOURCE_PATH);
response.addCookie(c);
}
public String getDomainName(String url) throws URISyntaxException {
- URI uri = new URI(url);
- String domain = uri.getHost();
- return domain.startsWith("www.") ? domain.substring(4) : domain.substring(domain.indexOf('.'));
+ URI uri = new URI(url);
+ String domain = uri.getHost();
+ int idx = domain.indexOf('.');
+ if (idx == -1) {
+ idx = 0;
+ }
+ return domain.startsWith("www.") ? domain.substring(4) : domain.substring(idx);
}
private String getCookieValue(HttpServletRequest request, String name) {
diff --git a/gateway-service-oozie/src/main/java/org/apache/hadoop/gateway/oozie/OozieDeploymentContributor.java b/gateway-service-oozie/src/main/java/org/apache/hadoop/gateway/oozie/OozieDeploymentContributor.java
deleted file mode 100644
index 0e9ffae..0000000
--- a/gateway-service-oozie/src/main/java/org/apache/hadoop/gateway/oozie/OozieDeploymentContributor.java
+++ /dev/null
@@ -1,121 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.hadoop.gateway.oozie;
-
-import java.io.IOException;
-import java.io.InputStream;
-import java.io.InputStreamReader;
-import java.io.Reader;
-import java.net.URISyntaxException;
-import java.util.ArrayList;
-import java.util.List;
-
-import org.apache.hadoop.gateway.deploy.DeploymentContext;
-import org.apache.hadoop.gateway.deploy.ServiceDeploymentContributorBase;
-import org.apache.hadoop.gateway.descriptor.FilterParamDescriptor;
-import org.apache.hadoop.gateway.descriptor.ResourceDescriptor;
-import org.apache.hadoop.gateway.filter.rewrite.api.UrlRewriteRulesDescriptor;
-import org.apache.hadoop.gateway.filter.rewrite.api.UrlRewriteRulesDescriptorFactory;
-import org.apache.hadoop.gateway.topology.Service;
-
-public class OozieDeploymentContributor extends ServiceDeploymentContributorBase {
-
- private static final String RULES_RESOURCE = OozieDeploymentContributor.class.getName().replace( '.', '/' ) + "/rewrite.xml";
- private static final String EXTERNAL_PATH = "/oozie";
-
- private static final String REPLAY_BUFFER_SIZE_PARAM = "replayBufferSize";
-
- // Oozie replay buffer size in KB
- private static final String DEFAULT_REPLAY_BUFFER_SIZE = "8";
-
- @Override
- public String getRole() {
- return "OOZIE";
- }
-
- @Override
- public String getName() {
- return "oozie";
- }
-
- @Override
- public void contributeService( DeploymentContext context, Service service ) throws Exception {
- contributeRewriteRules( context, service );
- contributeResources( context, service );
- }
-
- private void contributeRewriteRules( DeploymentContext context, Service service ) throws URISyntaxException, IOException {
- UrlRewriteRulesDescriptor oozieRules = loadRulesFromTemplate();
- UrlRewriteRulesDescriptor clusterRules = context.getDescriptor( "rewrite" );
- clusterRules.addRules( oozieRules );
- }
-
- public void contributeResources( DeploymentContext context, Service service ) throws URISyntaxException {
- ResourceDescriptor rootResource = context.getGatewayDescriptor().addResource();
- rootResource.role( service.getRole() );
- rootResource.pattern( EXTERNAL_PATH + "/**?**" );
- addWebAppSecFilters(context, service, rootResource);
- addAuthenticationFilter( context, service, rootResource );
- addRewriteFilter( context, service, rootResource );
- addIdentityAssertionFilter( context, service, rootResource );
- addAuthorizationFilter(context, service, rootResource);
- addDispatchFilter( context, service, rootResource );
-
- ResourceDescriptor v1Resource = context.getGatewayDescriptor().addResource();
- v1Resource.role( service.getRole() );
- v1Resource.pattern( EXTERNAL_PATH + "/v1/**?**" );
- addWebAppSecFilters(context, service, v1Resource);
- addAuthenticationFilter( context, service, v1Resource );
- addRewriteFilter( context, service, v1Resource );
- addIdentityAssertionFilter( context, service, v1Resource );
- addAuthorizationFilter(context, service, v1Resource);
- addDispatchFilter( context, service, v1Resource );
-
- ResourceDescriptor v2Resource = context.getGatewayDescriptor().addResource();
- v2Resource.role( service.getRole() );
- v2Resource.pattern( EXTERNAL_PATH + "/v2/**?**" );
- addWebAppSecFilters(context, service, v2Resource);
- addAuthenticationFilter( context, service, v2Resource );
- addRewriteFilter( context, service, v2Resource );
- addIdentityAssertionFilter( context, service, v2Resource );
- addAuthorizationFilter(context, service, v2Resource);
- addDispatchFilter( context, service, v2Resource );
- }
-
- private void addRewriteFilter(
- DeploymentContext context, Service service, ResourceDescriptor resource ) throws URISyntaxException {
- List<FilterParamDescriptor> params = new ArrayList<FilterParamDescriptor>();
- params.add( resource.createFilterParam().name( "request.body" ).value( "OOZIE/oozie/configuration" ) );
- context.contributeFilter( service, resource, "rewrite", null, params );
- }
-
- private void addDispatchFilter(DeploymentContext context, Service service,
- ResourceDescriptor resource) {
- context.contributeFilter(service, resource, "dispatch", "http-client", null );
- }
-
- UrlRewriteRulesDescriptor loadRulesFromTemplate() throws IOException {
- InputStream stream = this.getClass().getClassLoader().getResourceAsStream( RULES_RESOURCE );
- Reader reader = new InputStreamReader( stream );
- UrlRewriteRulesDescriptor rules = UrlRewriteRulesDescriptorFactory.load( "xml", reader );
- reader.close();
- stream.close();
- return rules;
- }
-
-}
\ No newline at end of file
diff --git a/gateway-service-oozie/src/main/resources/META-INF/services/org.apache.hadoop.gateway.deploy.ServiceDeploymentContributor b/gateway-service-oozie/src/main/resources/META-INF/services/org.apache.hadoop.gateway.deploy.ServiceDeploymentContributor
deleted file mode 100644
index cba4689..0000000
--- a/gateway-service-oozie/src/main/resources/META-INF/services/org.apache.hadoop.gateway.deploy.ServiceDeploymentContributor
+++ /dev/null
@@ -1,19 +0,0 @@
-##########################################################################
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements. See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership. The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License. You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-##########################################################################
-
-org.apache.hadoop.gateway.oozie.OozieDeploymentContributor
diff --git a/gateway-service-oozie/src/test/java/org/apache/hadoop/gateway/oozie/OozieDeploymentContributorTest.java b/gateway-service-oozie/src/test/java/org/apache/hadoop/gateway/oozie/OozieDeploymentContributorTest.java
deleted file mode 100644
index fad79f0..0000000
--- a/gateway-service-oozie/src/test/java/org/apache/hadoop/gateway/oozie/OozieDeploymentContributorTest.java
+++ /dev/null
@@ -1,130 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.hadoop.gateway.oozie;
-
-import org.apache.hadoop.gateway.deploy.DeploymentContext;
-import org.apache.hadoop.gateway.deploy.ServiceDeploymentContributor;
-import org.apache.hadoop.gateway.filter.rewrite.api.UrlRewriteRulesDescriptor;
-import org.apache.hadoop.gateway.filter.rewrite.impl.UrlRewriteRulesDescriptorImpl;
-import org.apache.hadoop.gateway.topology.Service;
-import org.easymock.EasyMock;
-import org.jboss.shrinkwrap.api.ShrinkWrap;
-import org.jboss.shrinkwrap.api.spec.WebArchive;
-import org.junit.Test;
-
-import java.io.IOException;
-import java.net.URISyntaxException;
-import java.util.Iterator;
-import java.util.ServiceLoader;
-
-import static org.hamcrest.MatcherAssert.assertThat;
-import static org.hamcrest.Matchers.notNullValue;
-import static org.junit.Assert.fail;
-
-public class OozieDeploymentContributorTest {
-
- @Test
- public void testServiceLoader() throws Exception {
- ServiceLoader loader = ServiceLoader.load( ServiceDeploymentContributor.class );
- Iterator iterator = loader.iterator();
- assertThat( "Service iterator empty.", iterator.hasNext() );
- while( iterator.hasNext() ) {
- Object object = iterator.next();
- if( object instanceof OozieDeploymentContributor ) {
- return;
- }
- }
- fail( "Failed to find " + OozieDeploymentContributor.class.getName() + " via service loader." );
- }
-
- @Test
- public void testLoadRulesFromTemplate() throws IOException, URISyntaxException {
- WebArchive webArchive = ShrinkWrap.create( WebArchive.class, "test-archive" );
-
- UrlRewriteRulesDescriptorImpl clusterRules = new UrlRewriteRulesDescriptorImpl();
-
- DeploymentContext context = EasyMock.createNiceMock( DeploymentContext.class );
- EasyMock.expect( context.getDescriptor( "rewrite" ) ).andReturn( clusterRules ).anyTimes();
- EasyMock.expect( context.getWebArchive() ).andReturn( webArchive ).anyTimes();
-
- Service service = EasyMock.createNiceMock( Service.class );
- EasyMock.expect( service.getRole() ).andReturn( "OOZIE" ).anyTimes();
- EasyMock.expect( service.getName() ).andReturn( null ).anyTimes();
- EasyMock.expect( service.getUrl() ).andReturn( "http://test-host:777" ).anyTimes();
-
- EasyMock.replay( context, service );
-
- OozieDeploymentContributor contributor = new OozieDeploymentContributor();
-
- UrlRewriteRulesDescriptor oozieRules = contributor.loadRulesFromTemplate();
-
- assertThat( oozieRules, notNullValue() );
- assertThat( oozieRules.getFilter( "OOZIE/oozie/configuration" ), notNullValue() );
- }
-
-// @Test
-// public void testDeployment() throws Exception {
-// WebArchive webArchive = ShrinkWrap.create( WebArchive.class, "test-archive" );
-//
-// UrlRewriteRulesDescriptorImpl rewriteRules = new UrlRewriteRulesDescriptorImpl();
-//
-// Map<String,String> serviceParams = new HashMap<String, String>();
-// Service service = new Service();
-// service.setRole( "OOZIE" );
-// service.setName( "oozie" );
-// service.setUrl( "http://test-host:42/test-path" );
-// service.setParams( serviceParams );
-//
-// Topology topology = new Topology();
-// topology.setName( "test-topology" );
-// topology.addService( service );
-//
-// GatewayDescriptor gateway = EasyMock.createNiceMock( GatewayDescriptor.class );
-// DeploymentContext context = EasyMock.createNiceMock( DeploymentContext.class );
-// EasyMock.expect( context.getWebArchive() ).andReturn( webArchive ).anyTimes();
-// EasyMock.expect( context.getDescriptor( "rewrite" ) ).andReturn( rewriteRules ).anyTimes();
-// EasyMock.expect( context.getWebAppDescriptor() ).andReturn( Descriptors.create( WebAppDescriptor.class ) ).anyTimes();
-// EasyMock.expect( context.getTopology() ).andReturn( topology ).anyTimes();
-// EasyMock.expect( context.getGatewayDescriptor() ).andReturn( gateway ).anyTimes();
-// Capture<Service> capturedService = new Capture<Service>();
-// Capture<ResourceDescriptor> capturedResource = new Capture<ResourceDescriptor>();
-// Capture<String> capturedRole = new Capture<String>();
-// Capture<String> capturedName = new Capture<String>();
-// Capture<List<FilterParamDescriptor>> capturedParams = new Capture<List<FilterParamDescriptor>>();
-// context.contributeFilter( capture(capturedService) , capture(capturedResource), capture(capturedRole), capture(capturedName), capture(capturedParams) );
-// EasyMock.expectLastCall().anyTimes();
-// EasyMock.replay( gateway, context );
-//
-// OozieDeploymentContributor contributor = new OozieDeploymentContributor();
-//
-// assertThat( contributor.getRole(), CoreMatchers.is( "OOZIE" ) );
-// assertThat( contributor.getName(), CoreMatchers.is( "oozie" ) );
-//
-// // Just make sure it doesn't blow up.
-// contributor.initializeContribution( context );
-//
-// contributor.contributeService( context, service );
-//
-// // Just make sure it doesn't blow up.
-// contributor.finalizeContribution( context );
-//
-// assertThat( capturedRole.getValue(), is( "dispatch" ) );
-// assertThat( capturedName.getValue(), is( "http-client" ) );
-// }
-
-}
diff --git a/gateway-service-oozie/pom.xml b/gateway-service-storm/pom.xml
similarity index 62%
copy from gateway-service-oozie/pom.xml
copy to gateway-service-storm/pom.xml
index 00f8cc3..3111f79 100644
--- a/gateway-service-oozie/pom.xml
+++ b/gateway-service-storm/pom.xml
@@ -19,14 +19,14 @@
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<modelVersion>4.0.0</modelVersion>
<parent>
- <artifactId>gateway</artifactId>
<groupId>org.apache.knox</groupId>
+ <artifactId>gateway</artifactId>
<version>0.6.0-SNAPSHOT</version>
</parent>
- <artifactId>gateway-service-oozie</artifactId>
+ <artifactId>gateway-service-storm</artifactId>
- <name>gateway-service-oozie</name>
- <description>The extensions to the gateway for supporting Oozie.</description>
+ <name>gateway-service-storm</name>
+ <description>The extension to the gateway for supporting Storm.</description>
<licenses>
<license>
@@ -45,26 +45,11 @@
<groupId>${gateway-group}</groupId>
<artifactId>gateway-provider-rewrite</artifactId>
</dependency>
-
<dependency>
- <groupId>org.jboss.shrinkwrap</groupId>
- <artifactId>shrinkwrap-api</artifactId>
- <version>1.0.1</version>
+ <groupId>org.apache.httpcomponents</groupId>
+ <artifactId>httpclient</artifactId>
</dependency>
<dependency>
- <groupId>org.jboss.shrinkwrap</groupId>
- <artifactId>shrinkwrap-impl-base</artifactId>
- </dependency>
- <dependency>
- <groupId>org.jboss.shrinkwrap.descriptors</groupId>
- <artifactId>shrinkwrap-descriptors-api-javaee</artifactId>
- </dependency>
- <dependency>
- <groupId>org.jboss.shrinkwrap.descriptors</groupId>
- <artifactId>shrinkwrap-descriptors-impl-javaee</artifactId>
- </dependency>
-
- <dependency>
<groupId>${gateway-group}</groupId>
<artifactId>gateway-test-utils</artifactId>
<scope>test</scope>
@@ -74,22 +59,6 @@
<artifactId>junit</artifactId>
<scope>test</scope>
</dependency>
- <dependency>
- <groupId>org.hamcrest</groupId>
- <artifactId>hamcrest-core</artifactId>
- <scope>test</scope>
- </dependency>
- <dependency>
- <groupId>org.hamcrest</groupId>
- <artifactId>hamcrest-library</artifactId>
- <scope>test</scope>
- </dependency>
- <dependency>
- <groupId>org.easymock</groupId>
- <artifactId>easymock</artifactId>
- <scope>test</scope>
- </dependency>
-
</dependencies>
-</project>
\ No newline at end of file
+</project>
diff --git a/gateway-provider-identity-assertion-pseudo/src/main/java/org/apache/hadoop/gateway/identityasserter/function/UsernameFunctionDescriptor.java b/gateway-service-storm/src/main/java/org/apache/hadoop/gateway/storm/StormDispatch.java
similarity index 69%
copy from gateway-provider-identity-assertion-pseudo/src/main/java/org/apache/hadoop/gateway/identityasserter/function/UsernameFunctionDescriptor.java
copy to gateway-service-storm/src/main/java/org/apache/hadoop/gateway/storm/StormDispatch.java
index d2aa441..abca519 100644
--- a/gateway-provider-identity-assertion-pseudo/src/main/java/org/apache/hadoop/gateway/identityasserter/function/UsernameFunctionDescriptor.java
+++ b/gateway-service-storm/src/main/java/org/apache/hadoop/gateway/storm/StormDispatch.java
@@ -15,17 +15,21 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
-package org.apache.hadoop.gateway.identityasserter.function;
+package org.apache.hadoop.gateway.storm;
-import org.apache.hadoop.gateway.filter.rewrite.api.UrlRewriteFunctionDescriptor;
+import org.apache.hadoop.gateway.dispatch.DefaultDispatch;
-public class UsernameFunctionDescriptor implements UrlRewriteFunctionDescriptor<UsernameFunctionDescriptor> {
+import java.util.Set;
- public static final String FUNCTION_NAME = "username";
+/**
+ * This specialized dispatch provides Storm specific features to the
+ * default dispatch.
+ */
+public class StormDispatch extends DefaultDispatch {
@Override
- public String name() {
- return FUNCTION_NAME;
+ public Set<String> getOutboundResponseExcludeHeaders() {
+ return null;
}
-
}
+
diff --git a/gateway-service-webhcat/src/main/java/org/apache/hadoop/gateway/webhcat/WebHCatDeploymentContributor.java b/gateway-service-webhcat/src/main/java/org/apache/hadoop/gateway/webhcat/WebHCatDeploymentContributor.java
deleted file mode 100644
index 9ee7a9a..0000000
--- a/gateway-service-webhcat/src/main/java/org/apache/hadoop/gateway/webhcat/WebHCatDeploymentContributor.java
+++ /dev/null
@@ -1,95 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.hadoop.gateway.webhcat;
-
-import java.io.IOException;
-import java.io.InputStream;
-import java.io.InputStreamReader;
-import java.io.Reader;
-import java.net.URISyntaxException;
-
-import org.apache.hadoop.gateway.deploy.DeploymentContext;
-import org.apache.hadoop.gateway.deploy.ServiceDeploymentContributorBase;
-import org.apache.hadoop.gateway.descriptor.ResourceDescriptor;
-import org.apache.hadoop.gateway.filter.rewrite.api.UrlRewriteRulesDescriptor;
-import org.apache.hadoop.gateway.filter.rewrite.api.UrlRewriteRulesDescriptorFactory;
-import org.apache.hadoop.gateway.topology.Service;
-
-public class WebHCatDeploymentContributor extends ServiceDeploymentContributorBase {
-
- private static final String RULES_RESOURCE = WebHCatDeploymentContributor.class.getName().replace( '.', '/' ) + "/rewrite.xml";
- private static final String WEBHCAT_EXTERNAL_PATH = "/templeton/v1";
-
- @Override
- public String getRole() {
- return "WEBHCAT";
- }
-
- @Override
- public String getName() {
- return "webhcat";
- }
-
- @Override
- public void contributeService( DeploymentContext context, Service service ) throws Exception {
- contributeRewriteRules( context, service );
- contributeResources( context, service );
- }
-
- private void contributeRewriteRules( DeploymentContext context, Service service ) throws IOException {
- UrlRewriteRulesDescriptor hbaseRules = loadRulesFromTemplate();
- UrlRewriteRulesDescriptor clusterRules = context.getDescriptor( "rewrite" );
- clusterRules.addRules( hbaseRules );
- }
-
- private UrlRewriteRulesDescriptor loadRulesFromTemplate() throws IOException {
- InputStream stream = this.getClass().getClassLoader().getResourceAsStream( RULES_RESOURCE );
- Reader reader = new InputStreamReader( stream );
- UrlRewriteRulesDescriptor rules = UrlRewriteRulesDescriptorFactory.load( "xml", reader );
- reader.close();
- stream.close();
- return rules;
- }
-
- private void contributeResources( DeploymentContext context, Service service ) throws URISyntaxException {
- ResourceDescriptor rootResource = context.getGatewayDescriptor().addResource();
- rootResource.role( service.getRole() );
- rootResource.pattern( WEBHCAT_EXTERNAL_PATH + "/?**" );
- addWebAppSecFilters( context, service, rootResource );
- addAuthenticationFilter( context, service, rootResource );
- addRewriteFilter( context, service, rootResource, null );
- addIdentityAssertionFilter( context, service, rootResource );
- addAuthorizationFilter( context, service, rootResource );
- addDispatchFilter( context, service, rootResource );
-
- ResourceDescriptor pathResource = context.getGatewayDescriptor().addResource();
- pathResource.role( service.getRole() );
- pathResource.pattern( WEBHCAT_EXTERNAL_PATH + "/**?**" );
- addWebAppSecFilters( context, service, pathResource );
- addAuthenticationFilter( context, service, pathResource );
- addRewriteFilter( context, service, pathResource, null );
- addIdentityAssertionFilter( context, service, pathResource );
- addAuthorizationFilter( context, service, pathResource );
- addDispatchFilter( context, service, pathResource );
- }
-
- private void addDispatchFilter(
- DeploymentContext context, Service service, ResourceDescriptor resource ) {
- context.contributeFilter( service, resource, "dispatch", "http-client", null );
- }
-}
diff --git a/gateway-service-webhcat/src/main/resources/META-INF/services/org.apache.hadoop.gateway.deploy.ServiceDeploymentContributor b/gateway-service-webhcat/src/main/resources/META-INF/services/org.apache.hadoop.gateway.deploy.ServiceDeploymentContributor
deleted file mode 100644
index 1b74d89..0000000
--- a/gateway-service-webhcat/src/main/resources/META-INF/services/org.apache.hadoop.gateway.deploy.ServiceDeploymentContributor
+++ /dev/null
@@ -1,19 +0,0 @@
-##########################################################################
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements. See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership. The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License. You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-##########################################################################
-
-org.apache.hadoop.gateway.webhcat.WebHCatDeploymentContributor
\ No newline at end of file
diff --git a/gateway-service-webhdfs/src/main/java/org/apache/hadoop/gateway/hdfs/NameNodeHaDispatchDeploymentContributor.java b/gateway-service-webhdfs/src/main/java/org/apache/hadoop/gateway/hdfs/NameNodeHaDispatchDeploymentContributor.java
deleted file mode 100644
index c7b3ff9..0000000
--- a/gateway-service-webhdfs/src/main/java/org/apache/hadoop/gateway/hdfs/NameNodeHaDispatchDeploymentContributor.java
+++ /dev/null
@@ -1,60 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- * <p/>
- * http://www.apache.org/licenses/LICENSE-2.0
- * <p/>
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.hadoop.gateway.hdfs;
-
-import org.apache.hadoop.gateway.deploy.DeploymentContext;
-import org.apache.hadoop.gateway.deploy.ProviderDeploymentContributorBase;
-import org.apache.hadoop.gateway.descriptor.FilterParamDescriptor;
-import org.apache.hadoop.gateway.descriptor.ResourceDescriptor;
-import org.apache.hadoop.gateway.hdfs.dispatch.WebHdfsHaHttpClientDispatch;
-import org.apache.hadoop.gateway.topology.Provider;
-import org.apache.hadoop.gateway.topology.Service;
-
-import java.util.ArrayList;
-import java.util.List;
-import java.util.Map;
-
-public class NameNodeHaDispatchDeploymentContributor extends ProviderDeploymentContributorBase {
-
- private static final String ROLE = "dispatch";
-
- private static final String NAME = "ha-hdfs";
-
- @Override
- public String getRole() {
- return ROLE;
- }
-
- @Override
- public String getName() {
- return NAME;
- }
-
- @Override
- public void contributeFilter(DeploymentContext context, Provider provider, Service service, ResourceDescriptor resource, List<FilterParamDescriptor> params) {
- if (params == null) {
- params = new ArrayList<FilterParamDescriptor>();
- }
- params.add(resource.createFilterParam().name(WebHdfsHaHttpClientDispatch.RESOURCE_ROLE_ATTRIBUTE).value(resource.role()));
- Map<String, String> providerParams = provider.getParams();
- for (Map.Entry<String, String> entry : providerParams.entrySet()) {
- params.add(resource.createFilterParam().name(entry.getKey().toLowerCase()).value(entry.getValue()));
- }
- resource.addFilter().name(getName()).role(getRole()).impl(WebHdfsHaHttpClientDispatch.class).params(params);
- }
-}
diff --git a/gateway-service-webhdfs/src/main/java/org/apache/hadoop/gateway/hdfs/WebHdfsDeploymentContributor.java b/gateway-service-webhdfs/src/main/java/org/apache/hadoop/gateway/hdfs/WebHdfsDeploymentContributor.java
deleted file mode 100644
index 0e63597..0000000
--- a/gateway-service-webhdfs/src/main/java/org/apache/hadoop/gateway/hdfs/WebHdfsDeploymentContributor.java
+++ /dev/null
@@ -1,173 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.hadoop.gateway.hdfs;
-
-import org.apache.hadoop.gateway.deploy.DeploymentContext;
-import org.apache.hadoop.gateway.deploy.ServiceDeploymentContributorBase;
-import org.apache.hadoop.gateway.descriptor.FilterParamDescriptor;
-import org.apache.hadoop.gateway.descriptor.ResourceDescriptor;
-import org.apache.hadoop.gateway.filter.rewrite.api.UrlRewriteRulesDescriptor;
-import org.apache.hadoop.gateway.filter.rewrite.api.UrlRewriteRulesDescriptorFactory;
-import org.apache.hadoop.gateway.filter.rewrite.api.UrlRewriteServletFilter;
-import org.apache.hadoop.gateway.topology.Provider;
-import org.apache.hadoop.gateway.topology.Service;
-
-import java.io.IOException;
-import java.io.InputStream;
-import java.io.InputStreamReader;
-import java.io.Reader;
-import java.net.URISyntaxException;
-import java.util.ArrayList;
-import java.util.List;
-import java.util.Map;
-
-public class WebHdfsDeploymentContributor extends ServiceDeploymentContributorBase {
-
- private static final String RULES_RESOURCE = WebHdfsDeploymentContributor.class.getName().replace( '.', '/' ) + "/rewrite.xml";
- private static final String WEBHDFS_EXTERNAL_PATH = "/webhdfs/v1";
- private static final String DATANODE_EXTERNAL_PATH = "/webhdfs/data/v1";
-
- @Override
- public String getRole() {
- return "WEBHDFS";
- }
-
- @Override
- public String getName() {
- return "webhdfs";
- }
-
- @Override
- public void contributeService( DeploymentContext context, Service service ) throws Exception {
- contributeRewriteRules( context, service );
- contributeNameNodeResource( context, service );
- contributeDataNodeResource( context, service );
- }
-
- private void contributeRewriteRules( DeploymentContext context, Service service ) throws URISyntaxException, IOException {
- UrlRewriteRulesDescriptor serviceRules = loadRulesFromClassPath();
- UrlRewriteRulesDescriptor clusterRules = context.getDescriptor( "rewrite" );
- clusterRules.addRules( serviceRules );
- }
-
- public void contributeNameNodeResource( DeploymentContext context, Service service ) throws URISyntaxException {
- List<FilterParamDescriptor> params;
- ResourceDescriptor rootResource = context.getGatewayDescriptor().addResource();
- rootResource.role( service.getRole() );
- rootResource.pattern( WEBHDFS_EXTERNAL_PATH + "/?**" );
- addWebAppSecFilters(context, service, rootResource);
- addAuthenticationFilter( context, service, rootResource );
- params = new ArrayList<FilterParamDescriptor>();
- params.add( rootResource.createFilterParam().
- name( UrlRewriteServletFilter.REQUEST_URL_RULE_PARAM ).value( getQualifiedName() + "/inbound/namenode/root" ) );
- addRewriteFilter( context, service, rootResource, params );
- addIdentityAssertionFilter( context, service, rootResource );
- addAuthorizationFilter( context, service, rootResource );
- String dispatchName = getDispatchNameForNN( context );
- String dispatchRole = "dispatch";
- addDispatchFilter( context, service, rootResource, dispatchRole, dispatchName );
-
- ResourceDescriptor fileResource = context.getGatewayDescriptor().addResource();
- fileResource.role( service.getRole() );
- fileResource.pattern( WEBHDFS_EXTERNAL_PATH + "/**?**" );
- addWebAppSecFilters(context, service, fileResource);
- addAuthenticationFilter( context, service, fileResource );
- params = new ArrayList<FilterParamDescriptor>();
- params.add( fileResource.createFilterParam().
- name( UrlRewriteServletFilter.REQUEST_URL_RULE_PARAM ).value( getQualifiedName() + "/inbound/namenode/file" ) );
- params.add( fileResource.createFilterParam().
- name( UrlRewriteServletFilter.RESPONSE_HEADERS_FILTER_PARAM ).value( getQualifiedName() + "/outbound/namenode/headers" ) );
- addRewriteFilter( context, service, fileResource, params );
- addIdentityAssertionFilter( context, service, fileResource );
- addAuthorizationFilter( context, service, fileResource );
- addDispatchFilter( context, service, fileResource, dispatchRole, dispatchName );
-
- ResourceDescriptor homeResource = context.getGatewayDescriptor().addResource();
- homeResource.role( service.getRole() );
- homeResource.pattern( WEBHDFS_EXTERNAL_PATH + "/~?**" );
- addWebAppSecFilters(context, service, homeResource);
- addAuthenticationFilter( context, service, homeResource );
- params = new ArrayList<FilterParamDescriptor>();
- params.add( homeResource.createFilterParam().
- name( UrlRewriteServletFilter.REQUEST_URL_RULE_PARAM ).value( getQualifiedName() + "/inbound/namenode/home" ) );
- addRewriteFilter( context, service, homeResource, params );
- addIdentityAssertionFilter( context, service, homeResource );
- addAuthorizationFilter( context, service, homeResource );
- addDispatchFilter( context, service, homeResource, dispatchRole, dispatchName );
-
- ResourceDescriptor homeFileResource = context.getGatewayDescriptor().addResource();
- homeFileResource.role( service.getRole() );
- homeFileResource.pattern( WEBHDFS_EXTERNAL_PATH + "/~/**?**" );
- addWebAppSecFilters(context, service, homeFileResource);
- addAuthenticationFilter( context, service, homeFileResource );
- params = new ArrayList<FilterParamDescriptor>();
- params.add( homeFileResource.createFilterParam().
- name( UrlRewriteServletFilter.REQUEST_URL_RULE_PARAM ).value( getQualifiedName() + "/inbound/namenode/home/file" ) );
- params.add( homeFileResource.createFilterParam().
- name( UrlRewriteServletFilter.RESPONSE_HEADERS_FILTER_PARAM ).value( getQualifiedName() + "/outbound/namenode/headers" ) );
- addRewriteFilter( context, service, homeFileResource, params );
- addIdentityAssertionFilter( context, service, homeFileResource );
- addAuthorizationFilter( context, service, homeFileResource );
- addDispatchFilter( context, service, homeFileResource, dispatchRole, dispatchName );
- }
-
- public void contributeDataNodeResource( DeploymentContext context, Service service ) throws URISyntaxException {
- List<FilterParamDescriptor> params;
- ResourceDescriptor fileResource = context.getGatewayDescriptor().addResource();
- fileResource.role( service.getRole() );
- fileResource.pattern( DATANODE_EXTERNAL_PATH + "/**?**" );
- addWebAppSecFilters(context, service, fileResource);
- addAuthenticationFilter( context, service, fileResource );
- addIdentityAssertionFilter( context, service, fileResource );
- addAuthorizationFilter( context, service, fileResource );
- params = new ArrayList<FilterParamDescriptor>();
- params.add( fileResource.createFilterParam().
- name( UrlRewriteServletFilter.REQUEST_URL_RULE_PARAM ).value( getQualifiedName() + "/inbound/datanode" ) );
- addRewriteFilter( context, service, fileResource, params );
- addDispatchFilter( context, service, fileResource, "dispatch", "http-client" );
- }
-
- String getQualifiedName() {
- return getRole() + "/" + getName();
- }
-
- UrlRewriteRulesDescriptor loadRulesFromClassPath() throws IOException {
- InputStream stream = this.getClass().getClassLoader().getResourceAsStream( RULES_RESOURCE );
- Reader reader = new InputStreamReader( stream );
- UrlRewriteRulesDescriptor rules = UrlRewriteRulesDescriptorFactory.load( "xml", reader );
- reader.close();
- stream.close();
- return rules;
- }
-
- /**
- * Returns the name of the dispatch to use by checking to see if 'HA' is enabled.
- */
- private String getDispatchNameForNN(DeploymentContext context) {
- Provider provider = getProviderByRole(context, "ha");
- if (provider != null && provider.isEnabled()) {
- Map<String, String> params = provider.getParams();
- if (params != null) {
- if (params.containsKey(getRole())) {
- return "ha-hdfs";
- }
- }
- }
- return "hdfs";
- }
-}
diff --git a/gateway-service-webhdfs/src/main/java/org/apache/hadoop/gateway/hdfs/WebHdfsDispatchDeploymentContributor.java b/gateway-service-webhdfs/src/main/java/org/apache/hadoop/gateway/hdfs/WebHdfsDispatchDeploymentContributor.java
deleted file mode 100644
index 78353d1..0000000
--- a/gateway-service-webhdfs/src/main/java/org/apache/hadoop/gateway/hdfs/WebHdfsDispatchDeploymentContributor.java
+++ /dev/null
@@ -1,47 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.hadoop.gateway.hdfs;
-
-import org.apache.hadoop.gateway.deploy.DeploymentContext;
-import org.apache.hadoop.gateway.deploy.ProviderDeploymentContributorBase;
-import org.apache.hadoop.gateway.descriptor.FilterParamDescriptor;
-import org.apache.hadoop.gateway.descriptor.ResourceDescriptor;
-import org.apache.hadoop.gateway.hdfs.dispatch.HdfsDispatch;
-import org.apache.hadoop.gateway.topology.Provider;
-import org.apache.hadoop.gateway.topology.Service;
-
-import java.util.List;
-
-public class WebHdfsDispatchDeploymentContributor extends ProviderDeploymentContributorBase {
-
- @Override
- public String getRole() {
- return "dispatch";
- }
-
- @Override
- public String getName() {
- return "hdfs";
- }
-
- @Override
- public void contributeFilter( DeploymentContext context, Provider provider, Service service, ResourceDescriptor resource, List<FilterParamDescriptor> params ) {
- resource.addFilter().role( getRole() ).name( getName() ).impl( HdfsDispatch.class ).params(params);
- }
-
-}
diff --git a/gateway-service-webhdfs/src/main/java/org/apache/hadoop/gateway/hdfs/dispatch/HdfsDispatch.java b/gateway-service-webhdfs/src/main/java/org/apache/hadoop/gateway/hdfs/dispatch/HdfsDispatch.java
index 2ccc41e..f4ec34e 100644
--- a/gateway-service-webhdfs/src/main/java/org/apache/hadoop/gateway/hdfs/dispatch/HdfsDispatch.java
+++ b/gateway-service-webhdfs/src/main/java/org/apache/hadoop/gateway/hdfs/dispatch/HdfsDispatch.java
@@ -17,29 +17,21 @@
*/
package org.apache.hadoop.gateway.hdfs.dispatch;
-import org.apache.hadoop.gateway.dispatch.HttpClientDispatch;
-import org.apache.http.HttpEntity;
+import org.apache.hadoop.gateway.dispatch.GatewayDispatchFilter;
+
+import javax.servlet.FilterConfig;
import javax.servlet.ServletException;
-import javax.servlet.http.HttpServletRequest;
-import java.io.IOException;
-public class HdfsDispatch extends HttpClientDispatch {
+/***
+ * KNOX-526. Need to keep this class around for backward compatibility of deployed
+ * topologies. This is required for releases older than Apache Knox 0.6.0
+ */
+@Deprecated
+public class HdfsDispatch extends GatewayDispatchFilter {
- public HdfsDispatch() throws ServletException {
- super();
+ @Override
+ public void init(FilterConfig filterConfig) throws ServletException {
+ setDispatch(new HdfsHttpClientDispatch());
+ super.init(filterConfig);
}
-
- //@Override
- /**
- * This method ensures that the request InputStream is not acquired
- * prior to a dispatch to a component such as a namenode that doesn't
- * the request body. The side effect of this is that the client does
- * not get a 100 continue from Knox which will trigger the client to
- * send the entire payload before redirect to the target component
- * like a datanode and have to send it again.
- */
- protected HttpEntity createRequestEntity(HttpServletRequest request)
- throws IOException {
- return null;
- }
}
diff --git a/gateway-service-webhdfs/src/main/java/org/apache/hadoop/gateway/hdfs/dispatch/HdfsHttpClientDispatch.java b/gateway-service-webhdfs/src/main/java/org/apache/hadoop/gateway/hdfs/dispatch/HdfsHttpClientDispatch.java
new file mode 100644
index 0000000..c03de7a
--- /dev/null
+++ b/gateway-service-webhdfs/src/main/java/org/apache/hadoop/gateway/hdfs/dispatch/HdfsHttpClientDispatch.java
@@ -0,0 +1,45 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.gateway.hdfs.dispatch;
+
+import org.apache.hadoop.gateway.dispatch.DefaultDispatch;
+import org.apache.http.HttpEntity;
+import javax.servlet.ServletException;
+import javax.servlet.http.HttpServletRequest;
+import java.io.IOException;
+
+public class HdfsHttpClientDispatch extends DefaultDispatch {
+
+ public HdfsHttpClientDispatch() throws ServletException {
+ super();
+ }
+
+ //@Override
+ /**
+ * This method ensures that the request InputStream is not acquired
+ * prior to a dispatch to a component such as a namenode that doesn't
+ * the request body. The side effect of this is that the client does
+ * not get a 100 continue from Knox which will trigger the client to
+ * send the entire payload before redirect to the target component
+ * like a datanode and have to send it again.
+ */
+ protected HttpEntity createRequestEntity(HttpServletRequest request)
+ throws IOException {
+ return null;
+ }
+}
diff --git a/gateway-service-webhdfs/src/main/java/org/apache/hadoop/gateway/hdfs/dispatch/WebHdfsHaDispatch.java b/gateway-service-webhdfs/src/main/java/org/apache/hadoop/gateway/hdfs/dispatch/WebHdfsHaDispatch.java
new file mode 100644
index 0000000..d0bfd34
--- /dev/null
+++ b/gateway-service-webhdfs/src/main/java/org/apache/hadoop/gateway/hdfs/dispatch/WebHdfsHaDispatch.java
@@ -0,0 +1,195 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ * <p/>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p/>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.gateway.hdfs.dispatch;
+
+import org.apache.hadoop.gateway.config.Configure;
+import org.apache.hadoop.gateway.filter.AbstractGatewayFilter;
+import org.apache.hadoop.gateway.ha.provider.HaProvider;
+import org.apache.hadoop.gateway.ha.provider.HaServiceConfig;
+import org.apache.hadoop.gateway.ha.provider.impl.HaServiceConfigConstants;
+import org.apache.hadoop.gateway.hdfs.i18n.WebHdfsMessages;
+import org.apache.hadoop.gateway.i18n.messages.MessagesFactory;
+import org.apache.http.HttpResponse;
+import org.apache.http.client.methods.HttpRequestBase;
+import org.apache.http.client.methods.HttpUriRequest;
+import org.apache.http.entity.BufferedHttpEntity;
+
+import javax.servlet.ServletException;
+import javax.servlet.http.HttpServletRequest;
+import javax.servlet.http.HttpServletResponse;
+import java.io.ByteArrayOutputStream;
+import java.io.IOException;
+import java.net.URI;
+import java.util.concurrent.atomic.AtomicInteger;
+
+public class WebHdfsHaDispatch extends HdfsHttpClientDispatch {
+
+ private static final String FAILOVER_COUNTER_ATTRIBUTE = "dispatch.ha.failover.counter";
+
+ private static final String RETRY_COUNTER_ATTRIBUTE = "dispatch.ha.retry.counter";
+
+ public static final String RESOURCE_ROLE = "WEBHDFS";
+
+ private static final WebHdfsMessages LOG = MessagesFactory.get(WebHdfsMessages.class);
+
+ private int maxFailoverAttempts = HaServiceConfigConstants.DEFAULT_MAX_FAILOVER_ATTEMPTS;
+
+ private int failoverSleep = HaServiceConfigConstants.DEFAULT_FAILOVER_SLEEP;
+
+ private int maxRetryAttempts = HaServiceConfigConstants.DEFAULT_MAX_RETRY_ATTEMPTS;
+
+ private int retrySleep = HaServiceConfigConstants.DEFAULT_RETRY_SLEEP;
+
+ private HaProvider haProvider;
+
+ /**
+ * @throws javax.servlet.ServletException
+ */
+ public WebHdfsHaDispatch() throws ServletException {
+ super();
+ }
+
+ @Override
+ public void init() {
+ super.init();
+ if (haProvider != null) {
+ HaServiceConfig serviceConfig = haProvider.getHaDescriptor().getServiceConfig(RESOURCE_ROLE);
+ maxFailoverAttempts = serviceConfig.getMaxFailoverAttempts();
+ failoverSleep = serviceConfig.getFailoverSleep();
+ maxRetryAttempts = serviceConfig.getMaxRetryAttempts();
+ retrySleep = serviceConfig.getRetrySleep();
+ }
+ }
+
+ public HaProvider getHaProvider() {
+ return haProvider;
+ }
+
+ @Configure
+ public void setHaProvider(HaProvider haProvider) {
+ this.haProvider = haProvider;
+ }
+
+ @Override
+ protected void executeRequest(HttpUriRequest outboundRequest, HttpServletRequest inboundRequest, HttpServletResponse outboundResponse) throws IOException {
+ HttpResponse inboundResponse = null;
+ try {
+ inboundResponse = executeOutboundRequest(outboundRequest);
+ writeOutboundResponse(outboundRequest, inboundRequest, outboundResponse, inboundResponse);
+ } catch (StandbyException e) {
+ LOG.errorReceivedFromStandbyNode(e);
+ failoverRequest(outboundRequest, inboundRequest, outboundResponse, inboundResponse, e);
+ } catch (SafeModeException e) {
+ LOG.errorReceivedFromSafeModeNode(e);
+ retryRequest(outboundRequest, inboundRequest, outboundResponse, inboundResponse, e);
+ } catch (IOException e) {
+ LOG.errorConnectingToServer(outboundRequest.getURI().toString(), e);
+ failoverRequest(outboundRequest, inboundRequest, outboundResponse, inboundResponse, e);
+ }
+ }
+
+ /**
+ * Checks for specific outbound response codes/content to trigger a retry or failover
+ */
+ @Override
+ protected void writeOutboundResponse(HttpUriRequest outboundRequest, HttpServletRequest inboundRequest, HttpServletResponse outboundResponse, HttpResponse inboundResponse) throws IOException {
+ if (inboundResponse.getStatusLine().getStatusCode() == 403) {
+ BufferedHttpEntity entity = new BufferedHttpEntity(inboundResponse.getEntity());
+ inboundResponse.setEntity(entity);
+ ByteArrayOutputStream outputStream = new ByteArrayOutputStream();
+ inboundResponse.getEntity().writeTo(outputStream);
+ String body = new String(outputStream.toByteArray());
+ if (body.contains("StandbyException")) {
+ throw new StandbyException();
+ }
+ if (body.contains("SafeModeException") || body.contains("RetriableException")) {
+ throw new SafeModeException();
+ }
+ }
+ super.writeOutboundResponse(outboundRequest, inboundRequest, outboundResponse, inboundResponse);
+ }
+
+ private void failoverRequest(HttpUriRequest outboundRequest, HttpServletRequest inboundRequest, HttpServletResponse outboundResponse, HttpResponse inboundResponse, Exception exception) throws IOException {
+ LOG.failingOverRequest(outboundRequest.getURI().toString());
+ AtomicInteger counter = (AtomicInteger) inboundRequest.getAttribute(FAILOVER_COUNTER_ATTRIBUTE);
+ if (counter == null) {
+ counter = new AtomicInteger(0);
+ }
+ inboundRequest.setAttribute(FAILOVER_COUNTER_ATTRIBUTE, counter);
+ if (counter.incrementAndGet() <= maxFailoverAttempts) {
+ haProvider.markFailedURL(RESOURCE_ROLE, outboundRequest.getURI().toString());
+ //null out target url so that rewriters run again
+ inboundRequest.setAttribute(AbstractGatewayFilter.TARGET_REQUEST_URL_ATTRIBUTE_NAME, null);
+ URI uri = getDispatchUrl(inboundRequest);
+ ((HttpRequestBase) outboundRequest).setURI(uri);
+ if (failoverSleep > 0) {
+ try {
+ Thread.sleep(failoverSleep);
+ } catch (InterruptedException e) {
+ LOG.failoverSleepFailed(RESOURCE_ROLE, e);
+ }
+ }
+ executeRequest(outboundRequest, inboundRequest, outboundResponse);
+ } else {
+ LOG.maxFailoverAttemptsReached(maxFailoverAttempts, RESOURCE_ROLE);
+ if (inboundResponse != null) {
+ writeOutboundResponse(outboundRequest, inboundRequest, outboundResponse, inboundResponse);
+ } else {
+ throw new IOException(exception);
+ }
+ }
+ }
+
+ private void retryRequest(HttpUriRequest outboundRequest, HttpServletRequest inboundRequest, HttpServletResponse outboundResponse, HttpResponse inboundResponse, Exception exception) throws IOException {
+ LOG.retryingRequest(outboundRequest.getURI().toString());
+ AtomicInteger counter = (AtomicInteger) inboundRequest.getAttribute(RETRY_COUNTER_ATTRIBUTE);
+ if (counter == null) {
+ counter = new AtomicInteger(0);
+ }
+ inboundRequest.setAttribute(RETRY_COUNTER_ATTRIBUTE, counter);
+ if (counter.incrementAndGet() <= maxRetryAttempts) {
+ if (retrySleep > 0) {
+ try {
+ Thread.sleep(retrySleep);
+ } catch (InterruptedException e) {
+ LOG.retrySleepFailed(RESOURCE_ROLE, e);
+ }
+ }
+ executeRequest(outboundRequest, inboundRequest, outboundResponse);
+ } else {
+ LOG.maxRetryAttemptsReached(maxRetryAttempts, RESOURCE_ROLE, outboundRequest.getURI().toString());
+ if (inboundResponse != null) {
+ writeOutboundResponse(outboundRequest, inboundRequest, outboundResponse, inboundResponse);
+ } else {
+ throw new IOException(exception);
+ }
+ }
+ }
+
+ private static URI getDispatchUrl(HttpServletRequest request) {
+ StringBuffer str = request.getRequestURL();
+ String query = request.getQueryString();
+ if ( query != null ) {
+ str.append('?');
+ str.append(query);
+ }
+ URI url = URI.create(str.toString());
+ return url;
+ }
+
+}
diff --git a/gateway-service-webhdfs/src/main/java/org/apache/hadoop/gateway/hdfs/dispatch/WebHdfsHaHttpClientDispatch.java b/gateway-service-webhdfs/src/main/java/org/apache/hadoop/gateway/hdfs/dispatch/WebHdfsHaHttpClientDispatch.java
index a0aa014..bb1e623 100644
--- a/gateway-service-webhdfs/src/main/java/org/apache/hadoop/gateway/hdfs/dispatch/WebHdfsHaHttpClientDispatch.java
+++ b/gateway-service-webhdfs/src/main/java/org/apache/hadoop/gateway/hdfs/dispatch/WebHdfsHaHttpClientDispatch.java
@@ -17,161 +17,22 @@
*/
package org.apache.hadoop.gateway.hdfs.dispatch;
-import org.apache.hadoop.gateway.filter.AbstractGatewayFilter;
-import org.apache.hadoop.gateway.ha.provider.HaProvider;
-import org.apache.hadoop.gateway.ha.provider.HaServiceConfig;
-import org.apache.hadoop.gateway.ha.provider.HaServletContextListener;
-import org.apache.hadoop.gateway.hdfs.i18n.WebHdfsMessages;
-import org.apache.hadoop.gateway.i18n.messages.MessagesFactory;
-import org.apache.http.HttpResponse;
-import org.apache.http.client.methods.HttpRequestBase;
-import org.apache.http.client.methods.HttpUriRequest;
-import org.apache.http.entity.BufferedHttpEntity;
+import org.apache.hadoop.gateway.dispatch.GatewayDispatchFilter;
import javax.servlet.FilterConfig;
import javax.servlet.ServletException;
-import javax.servlet.http.HttpServletRequest;
-import javax.servlet.http.HttpServletResponse;
-import java.io.ByteArrayOutputStream;
-import java.io.IOException;
-import java.net.URI;
-import java.util.concurrent.atomic.AtomicInteger;
-public class WebHdfsHaHttpClientDispatch extends HdfsDispatch {
- private static final String FAILOVER_COUNTER_ATTRIBUTE = "dispatch.ha.failover.counter";
+/***
+ * KNOX-526. Need to keep this class around for backward compatibility of deployed
+ * topologies. This is required for releases older than Apache Knox 0.6.0
+ */
+@Deprecated
+public class WebHdfsHaHttpClientDispatch extends GatewayDispatchFilter {
- private static final String RETRY_COUNTER_ATTRIBUTE = "dispatch.ha.retry.counter";
-
- public static final String RESOURCE_ROLE_ATTRIBUTE = "resource.role";
-
- private static final WebHdfsMessages LOG = MessagesFactory.get(WebHdfsMessages.class);
-
- private int maxFailoverAttempts;
-
- private int failoverSleep;
-
- private int maxRetryAttempts;
-
- private int retrySleep;
-
- private String resourceRole;
-
- private HaProvider haProvider;
-
- /**
- * @throws ServletException
- */
- public WebHdfsHaHttpClientDispatch() throws ServletException {
- super();
+ @Override
+ public void init(FilterConfig filterConfig) throws ServletException {
+ setDispatch(new WebHdfsHaDispatch());
+ super.init(filterConfig);
}
-
- @Override
- public void init(FilterConfig filterConfig) throws ServletException {
- super.init(filterConfig);
- resourceRole = filterConfig.getInitParameter(RESOURCE_ROLE_ATTRIBUTE);
- LOG.initializingForResourceRole(resourceRole);
- haProvider = HaServletContextListener.getHaProvider(filterConfig.getServletContext());
- HaServiceConfig serviceConfig = haProvider.getHaDescriptor().getServiceConfig(resourceRole);
- maxFailoverAttempts = serviceConfig.getMaxFailoverAttempts();
- failoverSleep = serviceConfig.getFailoverSleep();
- maxRetryAttempts = serviceConfig.getMaxRetryAttempts();
- retrySleep = serviceConfig.getRetrySleep();
- }
-
- @Override
- protected void executeRequest(HttpUriRequest outboundRequest, HttpServletRequest inboundRequest, HttpServletResponse outboundResponse) throws IOException {
- HttpResponse inboundResponse = null;
- try {
- inboundResponse = executeOutboundRequest(outboundRequest);
- writeOutboundResponse(outboundRequest, inboundRequest, outboundResponse, inboundResponse);
- } catch (StandbyException e) {
- LOG.errorReceivedFromStandbyNode(e);
- failoverRequest(outboundRequest, inboundRequest, outboundResponse, inboundResponse, e);
- } catch (SafeModeException e) {
- LOG.errorReceivedFromSafeModeNode(e);
- retryRequest(outboundRequest, inboundRequest, outboundResponse, inboundResponse, e);
- } catch (IOException e) {
- LOG.errorConnectingToServer(outboundRequest.getURI().toString(), e);
- failoverRequest(outboundRequest, inboundRequest, outboundResponse, inboundResponse, e);
- }
- }
-
- /**
- * Checks for specific outbound response codes/content to trigger a retry or failover
- */
- @Override
- protected void writeOutboundResponse(HttpUriRequest outboundRequest, HttpServletRequest inboundRequest, HttpServletResponse outboundResponse, HttpResponse inboundResponse) throws IOException {
- if (inboundResponse.getStatusLine().getStatusCode() == 403) {
- BufferedHttpEntity entity = new BufferedHttpEntity(inboundResponse.getEntity());
- inboundResponse.setEntity(entity);
- ByteArrayOutputStream outputStream = new ByteArrayOutputStream();
- inboundResponse.getEntity().writeTo(outputStream);
- String body = new String(outputStream.toByteArray());
- if (body.contains("StandbyException")) {
- throw new StandbyException();
- }
- if (body.contains("SafeModeException") || body.contains("RetriableException")) {
- throw new SafeModeException();
- }
- }
- super.writeOutboundResponse(outboundRequest, inboundRequest, outboundResponse, inboundResponse);
- }
-
- private void failoverRequest(HttpUriRequest outboundRequest, HttpServletRequest inboundRequest, HttpServletResponse outboundResponse, HttpResponse inboundResponse, Exception exception) throws IOException {
- LOG.failingOverRequest(outboundRequest.getURI().toString());
- AtomicInteger counter = (AtomicInteger) inboundRequest.getAttribute(FAILOVER_COUNTER_ATTRIBUTE);
- if (counter == null) {
- counter = new AtomicInteger(0);
- }
- inboundRequest.setAttribute(FAILOVER_COUNTER_ATTRIBUTE, counter);
- if (counter.incrementAndGet() <= maxFailoverAttempts) {
- haProvider.markFailedURL(resourceRole, outboundRequest.getURI().toString());
- //null out target url so that rewriters run again
- inboundRequest.setAttribute(AbstractGatewayFilter.TARGET_REQUEST_URL_ATTRIBUTE_NAME, null);
- URI uri = getDispatchUrl(inboundRequest);
- ((HttpRequestBase) outboundRequest).setURI(uri);
- if (failoverSleep > 0) {
- try {
- Thread.sleep(failoverSleep);
- } catch (InterruptedException e) {
- LOG.failoverSleepFailed(resourceRole, e);
- }
- }
- executeRequest(outboundRequest, inboundRequest, outboundResponse);
- } else {
- LOG.maxFailoverAttemptsReached(maxFailoverAttempts, resourceRole);
- if (inboundResponse != null) {
- writeOutboundResponse(outboundRequest, inboundRequest, outboundResponse, inboundResponse);
- } else {
- throw new IOException(exception);
- }
- }
- }
-
- private void retryRequest(HttpUriRequest outboundRequest, HttpServletRequest inboundRequest, HttpServletResponse outboundResponse, HttpResponse inboundResponse, Exception exception) throws IOException {
- LOG.retryingRequest(outboundRequest.getURI().toString());
- AtomicInteger counter = (AtomicInteger) inboundRequest.getAttribute(RETRY_COUNTER_ATTRIBUTE);
- if (counter == null) {
- counter = new AtomicInteger(0);
- }
- inboundRequest.setAttribute(RETRY_COUNTER_ATTRIBUTE, counter);
- if (counter.incrementAndGet() <= maxRetryAttempts) {
- if (retrySleep > 0) {
- try {
- Thread.sleep(retrySleep);
- } catch (InterruptedException e) {
- LOG.retrySleepFailed(resourceRole, e);
- }
- }
- executeRequest(outboundRequest, inboundRequest, outboundResponse);
- } else {
- LOG.maxRetryAttemptsReached(maxRetryAttempts, resourceRole, outboundRequest.getURI().toString());
- if (inboundResponse != null) {
- writeOutboundResponse(outboundRequest, inboundRequest, outboundResponse, inboundResponse);
- } else {
- throw new IOException(exception);
- }
- }
- }
}
diff --git a/gateway-service-webhdfs/src/main/resources/META-INF/services/org.apache.hadoop.gateway.deploy.ProviderDeploymentContributor b/gateway-service-webhdfs/src/main/resources/META-INF/services/org.apache.hadoop.gateway.deploy.ProviderDeploymentContributor
deleted file mode 100644
index 6fde03f..0000000
--- a/gateway-service-webhdfs/src/main/resources/META-INF/services/org.apache.hadoop.gateway.deploy.ProviderDeploymentContributor
+++ /dev/null
@@ -1,20 +0,0 @@
-##########################################################################
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements. See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership. The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License. You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-##########################################################################
-
-org.apache.hadoop.gateway.hdfs.NameNodeHaDispatchDeploymentContributor
-org.apache.hadoop.gateway.hdfs.WebHdfsDispatchDeploymentContributor
diff --git a/gateway-service-webhdfs/src/main/resources/META-INF/services/org.apache.hadoop.gateway.deploy.ServiceDeploymentContributor b/gateway-service-webhdfs/src/main/resources/META-INF/services/org.apache.hadoop.gateway.deploy.ServiceDeploymentContributor
index 0a3d6ee..d918906 100644
--- a/gateway-service-webhdfs/src/main/resources/META-INF/services/org.apache.hadoop.gateway.deploy.ServiceDeploymentContributor
+++ b/gateway-service-webhdfs/src/main/resources/META-INF/services/org.apache.hadoop.gateway.deploy.ServiceDeploymentContributor
@@ -16,6 +16,5 @@
# limitations under the License.
##########################################################################
-org.apache.hadoop.gateway.hdfs.WebHdfsDeploymentContributor
org.apache.hadoop.gateway.hdfs.NameNodeDeploymentContributor
org.apache.hadoop.gateway.hdfs.JobTrackerDeploymentContributor
\ No newline at end of file
diff --git a/gateway-service-webhdfs/src/test/java/org/apache/hadoop/gateway/hdfs/NameNodeHaDispatchDeploymentContributorTest.java b/gateway-service-webhdfs/src/test/java/org/apache/hadoop/gateway/hdfs/NameNodeHaDispatchDeploymentContributorTest.java
deleted file mode 100644
index 596ac81..0000000
--- a/gateway-service-webhdfs/src/test/java/org/apache/hadoop/gateway/hdfs/NameNodeHaDispatchDeploymentContributorTest.java
+++ /dev/null
@@ -1,44 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- * <p/>
- * http://www.apache.org/licenses/LICENSE-2.0
- * <p/>
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.hadoop.gateway.hdfs;
-
-import org.apache.hadoop.gateway.deploy.ProviderDeploymentContributor;
-import org.junit.Test;
-
-import java.util.Iterator;
-import java.util.ServiceLoader;
-
-import static org.hamcrest.MatcherAssert.assertThat;
-import static org.junit.Assert.fail;
-
-public class NameNodeHaDispatchDeploymentContributorTest {
-
- @Test
- public void testServiceLoader() throws Exception {
- ServiceLoader loader = ServiceLoader.load( ProviderDeploymentContributor.class );
- Iterator iterator = loader.iterator();
- assertThat( "Service iterator empty.", iterator.hasNext() );
- while( iterator.hasNext() ) {
- Object object = iterator.next();
- if( object instanceof NameNodeHaDispatchDeploymentContributor) {
- return;
- }
- }
- fail( "Failed to find " + NameNodeHaDispatchDeploymentContributor.class.getName() + " via service loader." );
- }
-}
diff --git a/gateway-service-webhdfs/src/test/java/org/apache/hadoop/gateway/hdfs/dispatch/WebHdfsHaHttpClientDispatchTest.java b/gateway-service-webhdfs/src/test/java/org/apache/hadoop/gateway/hdfs/dispatch/WebHdfsHaDispatchTest.java
similarity index 92%
rename from gateway-service-webhdfs/src/test/java/org/apache/hadoop/gateway/hdfs/dispatch/WebHdfsHaHttpClientDispatchTest.java
rename to gateway-service-webhdfs/src/test/java/org/apache/hadoop/gateway/hdfs/dispatch/WebHdfsHaDispatchTest.java
index ae861a9..422218f 100644
--- a/gateway-service-webhdfs/src/test/java/org/apache/hadoop/gateway/hdfs/dispatch/WebHdfsHaHttpClientDispatchTest.java
+++ b/gateway-service-webhdfs/src/test/java/org/apache/hadoop/gateway/hdfs/dispatch/WebHdfsHaDispatchTest.java
@@ -27,6 +27,7 @@
import org.apache.hadoop.gateway.ha.provider.impl.HaDescriptorFactory;
import org.apache.http.client.methods.HttpRequestBase;
import org.apache.http.client.methods.HttpUriRequest;
+import org.apache.http.impl.client.DefaultHttpClient;
import org.apache.http.params.BasicHttpParams;
import org.easymock.EasyMock;
import org.easymock.IAnswer;
@@ -47,9 +48,9 @@
import static org.hamcrest.CoreMatchers.notNullValue;
import static org.hamcrest.MatcherAssert.assertThat;
-public class WebHdfsHaHttpClientDispatchTest {
+public class WebHdfsHaDispatchTest {
- private class InstrumentedWebHdfsHaHttpClientDispatch extends WebHdfsHaHttpClientDispatch {
+ private class InstrumentedWebHdfsHaHttpClientDispatch extends WebHdfsHaDispatch {
public InstrumentedWebHdfsHaHttpClientDispatch() throws ServletException {
}
@@ -69,12 +70,11 @@
EasyMock.expect(context.getAttribute(HaServletContextListener.PROVIDER_ATTRIBUTE_NAME)).andReturn(haProvider).anyTimes();
FilterConfig config = EasyMock.createNiceMock( FilterConfig.class );
EasyMock.expect(config.getServletContext()).andReturn(context).anyTimes();
- EasyMock.expect(config.getInitParameter( WebHdfsHaHttpClientDispatch.RESOURCE_ROLE_ATTRIBUTE )).andReturn("test-role").anyTimes();
EasyMock.expect(config.getInitParameter(EasyMock.anyObject(String.class))).andReturn(null).anyTimes();
InstrumentedWebHdfsHaHttpClientDispatch dispatch = new InstrumentedWebHdfsHaHttpClientDispatch();
EasyMock.replay(context,config);
- dispatch.init(config);
+ dispatch.init();
assertThat( dispatch.getAppCookieManager(), notNullValue() );
}
@@ -94,7 +94,6 @@
FilterConfig filterConfig = EasyMock.createNiceMock(FilterConfig.class);
ServletContext servletContext = EasyMock.createNiceMock(ServletContext.class);
- EasyMock.expect(filterConfig.getInitParameter(WebHdfsHaHttpClientDispatch.RESOURCE_ROLE_ATTRIBUTE)).andReturn(serviceName).anyTimes();
EasyMock.expect(filterConfig.getServletContext()).andReturn(servletContext).anyTimes();
EasyMock.expect(servletContext.getAttribute(HaServletContextListener.PROVIDER_ATTRIBUTE_NAME)).andReturn(provider).anyTimes();
@@ -124,8 +123,10 @@
}).once();
EasyMock.replay(filterConfig, servletContext, outboundRequest, inboundRequest, outboundResponse);
Assert.assertEquals(uri1.toString(), provider.getActiveURL(serviceName));
- WebHdfsHaHttpClientDispatch dispatch = new WebHdfsHaHttpClientDispatch();
- dispatch.init(filterConfig);
+ WebHdfsHaDispatch dispatch = new WebHdfsHaDispatch();
+ dispatch.setHttpClient(new DefaultHttpClient());
+ dispatch.setHaProvider(provider);
+ dispatch.init();
long startTime = System.currentTimeMillis();
try {
dispatch.executeRequest(outboundRequest, inboundRequest, outboundResponse);
diff --git a/gateway-service-yarn-rm/pom.xml b/gateway-service-yarn-rm/pom.xml
deleted file mode 100644
index fc9db48..0000000
--- a/gateway-service-yarn-rm/pom.xml
+++ /dev/null
@@ -1,61 +0,0 @@
-<?xml version="1.0"?>
-<!--
- Licensed to the Apache Software Foundation (ASF) under one or more
- contributor license agreements. See the NOTICE file distributed with
- this work for additional information regarding copyright ownership.
- The ASF licenses this file to You under the Apache License, Version 2.0
- (the "License"); you may not use this file except in compliance with
- the License. You may obtain a copy of the License at
-
- http://www.apache.org/licenses/LICENSE-2.0
-
- Unless required by applicable law or agreed to in writing, software
- distributed under the License is distributed on an "AS IS" BASIS,
- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- See the License for the specific language governing permissions and
- limitations under the License.
--->
-<project xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd" xmlns="http://maven.apache.org/POM/4.0.0"
- xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance">
- <modelVersion>4.0.0</modelVersion>
- <parent>
- <groupId>org.apache.knox</groupId>
- <artifactId>gateway</artifactId>
- <version>0.6.0-SNAPSHOT</version>
- </parent>
- <artifactId>gateway-service-yarn-rm</artifactId>
- <name>gateway-service-yarn-rm</name>
- <description>The extension to the gateway for supporting YARN Resource Manager REST API.</description>
-
- <licenses>
- <license>
- <name>The Apache Software License, Version 2.0</name>
- <url>http://www.apache.org/licenses/LICENSE-2.0.txt</url>
- <distribution>repo</distribution>
- </license>
- </licenses>
-
- <dependencies>
- <dependency>
- <groupId>${gateway-group}</groupId>
- <artifactId>gateway-spi</artifactId>
- </dependency>
- <dependency>
- <groupId>${gateway-group}</groupId>
- <artifactId>gateway-provider-rewrite</artifactId>
- </dependency>
-
- <dependency>
- <groupId>${gateway-group}</groupId>
- <artifactId>gateway-test-utils</artifactId>
- <scope>test</scope>
- </dependency>
- <dependency>
- <groupId>junit</groupId>
- <artifactId>junit</artifactId>
- <scope>test</scope>
- </dependency>
-
- </dependencies>
-
-</project>
diff --git a/gateway-service-yarn-rm/src/main/java/org/apache/hadoop/gateway/yarn/rm/ResourceManagerDeploymentContributor.java b/gateway-service-yarn-rm/src/main/java/org/apache/hadoop/gateway/yarn/rm/ResourceManagerDeploymentContributor.java
deleted file mode 100644
index 21e1b2e..0000000
--- a/gateway-service-yarn-rm/src/main/java/org/apache/hadoop/gateway/yarn/rm/ResourceManagerDeploymentContributor.java
+++ /dev/null
@@ -1,150 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.hadoop.gateway.yarn.rm;
-
-import java.io.IOException;
-import java.io.InputStream;
-import java.io.InputStreamReader;
-import java.io.Reader;
-import java.net.URISyntaxException;
-import java.util.ArrayList;
-import java.util.HashMap;
-import java.util.List;
-import java.util.Map;
-import java.util.Map.Entry;
-
-import org.apache.hadoop.gateway.deploy.DeploymentContext;
-import org.apache.hadoop.gateway.deploy.ServiceDeploymentContributorBase;
-import org.apache.hadoop.gateway.descriptor.FilterParamDescriptor;
-import org.apache.hadoop.gateway.descriptor.ResourceDescriptor;
-import org.apache.hadoop.gateway.filter.rewrite.api.UrlRewriteRulesDescriptor;
-import org.apache.hadoop.gateway.filter.rewrite.api.UrlRewriteRulesDescriptorFactory;
-import org.apache.hadoop.gateway.filter.rewrite.api.UrlRewriteServletFilter;
-import org.apache.hadoop.gateway.topology.Service;
-
-public class ResourceManagerDeploymentContributor extends
- ServiceDeploymentContributorBase {
- private static final String RULES_RESOURCE = ResourceManagerDeploymentContributor.class
- .getName().replace( '.', '/' ) + "/rewrite.xml";
- private static final String EXTERNAL_PATH = "/resourcemanager";
- private static final String PROXY_PATH = "/resourcemanager/proxy";
-
- @Override
- public String getRole() {
- return "RESOURCEMANAGER";
- }
-
- @Override
- public String getName() {
- return "resourcemanager";
- }
-
- @Override
- public void contributeService(DeploymentContext context, Service service)
- throws Exception {
- contributeRewriteRules( context, service );
- contributeResources( context, service );
- }
-
- private void contributeRewriteRules(DeploymentContext context, Service service)
- throws URISyntaxException, IOException {
- UrlRewriteRulesDescriptor serviceRules = loadRulesFromClassPath();
- UrlRewriteRulesDescriptor clusterRules = context.getDescriptor( "rewrite" );
- clusterRules.addRules( serviceRules );
- }
-
- private UrlRewriteRulesDescriptor loadRulesFromClassPath() throws IOException {
- InputStream stream = this.getClass().getClassLoader()
- .getResourceAsStream( RULES_RESOURCE );
- Reader reader = new InputStreamReader( stream );
- UrlRewriteRulesDescriptor rules = UrlRewriteRulesDescriptorFactory.load(
- "xml", reader );
- reader.close();
- stream.close();
- return rules;
- }
-
- private void contributeResources(DeploymentContext context, Service service)
- throws URISyntaxException {
- Map<String, String> filterParams = new HashMap<String, String>();
-
- contributeResource( context, service, EXTERNAL_PATH + "/v1/cluster/", null );
-
- contributeResource( context, service, EXTERNAL_PATH + "/v1/cluster/**?**", null );
-
- filterParams.clear();
- filterParams.put( UrlRewriteServletFilter.RESPONSE_BODY_FILTER_PARAM, getQualifiedName() + "/apps/outbound" );
- contributeResource( context, service, EXTERNAL_PATH + "/v1/cluster/apps?**", filterParams );
-
- filterParams.clear();
- filterParams.put( UrlRewriteServletFilter.RESPONSE_BODY_FILTER_PARAM, getQualifiedName() + "/app/outbound" );
- contributeResource( context, service, EXTERNAL_PATH + "/v1/cluster/apps/*?**", filterParams );
-
- filterParams.clear();
- filterParams.put( UrlRewriteServletFilter.RESPONSE_BODY_FILTER_PARAM, getQualifiedName() + "/appattempts/outbound" );
- contributeResource( context, service, EXTERNAL_PATH + "/v1/cluster/apps/*/appattempts?**", filterParams );
-
- filterParams.clear();
- filterParams.put( UrlRewriteServletFilter.RESPONSE_BODY_FILTER_PARAM, getQualifiedName() + "/nodes/outbound" );
- contributeResource( context, service, EXTERNAL_PATH + "/v1/cluster/nodes?**", filterParams );
-
- filterParams.clear();
- filterParams.put( UrlRewriteServletFilter.REQUEST_URL_RULE_PARAM, getQualifiedName() + "/nodeId/inbound" );
- filterParams.put( UrlRewriteServletFilter.RESPONSE_BODY_FILTER_PARAM, getQualifiedName() + "/node/outbound" );
- contributeResource( context, service, EXTERNAL_PATH + "/v1/cluster/nodes/*?**", filterParams );
-
- filterParams.clear();
- filterParams.put( UrlRewriteServletFilter.REQUEST_URL_RULE_PARAM, getQualifiedName() + "/inbound/proxy" );
- contributeResource( context, service, PROXY_PATH + "/*/ws/v1/**?**", filterParams );
-
- filterParams.clear();
- filterParams.put( UrlRewriteServletFilter.RESPONSE_BODY_FILTER_PARAM, getQualifiedName() + "/proxy/jobattempts/outbound" );
- contributeResource( context, service, PROXY_PATH + "/*/ws/v1/mapreduce/jobs/*/jobattempts", filterParams );
-
-
- filterParams.clear();
- filterParams.put( UrlRewriteServletFilter.RESPONSE_BODY_FILTER_PARAM, getQualifiedName() + "/proxy/taskattempts/outbound" );
- contributeResource( context, service, PROXY_PATH + "/*/ws/v1/mapreduce/jobs/*/tasks/*/attempts", filterParams );
-
- filterParams.clear();
- filterParams.put( UrlRewriteServletFilter.RESPONSE_BODY_FILTER_PARAM, getQualifiedName() + "/proxy/taskattempt/outbound" );
- contributeResource( context, service, PROXY_PATH + "/*/ws/v1/mapreduce/jobs/*/tasks/*/attempts/*", filterParams );
- }
-
- private void contributeResource( DeploymentContext context, Service service, String pattern, Map<String, String> filterParams ) throws URISyntaxException {
- List<FilterParamDescriptor> params = new ArrayList<FilterParamDescriptor>();
- ResourceDescriptor resource = context.getGatewayDescriptor().addResource();
- resource.role( service.getRole() );
- resource.pattern( pattern );
- addWebAppSecFilters( context, service, resource );
- addAuthenticationFilter( context, service, resource );
- addIdentityAssertionFilter( context, service, resource );
- addAuthorizationFilter( context, service, resource );
- if ( filterParams != null ) {
- for( Entry<String, String> filterParam : filterParams.entrySet() ) {
- params.add( resource.createFilterParam().name( filterParam.getKey() ).value( filterParam.getValue() ) );
- }
- }
- addRewriteFilter( context, service, resource, params );
- addDispatchFilter( context, service, resource, "dispatch", "http-client" );
- }
-
- private String getQualifiedName() {
- return getRole() + "/" + getName();
- }
-}
diff --git a/gateway-service-yarn-rm/src/test/java/org/apache/hadoop/gateway/yarn/rm/ResourceManagerDeploymentContributorTest.java b/gateway-service-yarn-rm/src/test/java/org/apache/hadoop/gateway/yarn/rm/ResourceManagerDeploymentContributorTest.java
deleted file mode 100644
index 485e1f2..0000000
--- a/gateway-service-yarn-rm/src/test/java/org/apache/hadoop/gateway/yarn/rm/ResourceManagerDeploymentContributorTest.java
+++ /dev/null
@@ -1,45 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.hadoop.gateway.yarn.rm;
-
-import static org.hamcrest.MatcherAssert.assertThat;
-import static org.junit.Assert.fail;
-
-import java.util.Iterator;
-import java.util.ServiceLoader;
-
-import org.apache.hadoop.gateway.deploy.ServiceDeploymentContributor;
-import org.junit.Test;
-
-public class ResourceManagerDeploymentContributorTest {
-
- @SuppressWarnings("rawtypes")
- @Test
- public void testServiceLoader() throws Exception {
- ServiceLoader loader = ServiceLoader.load( ServiceDeploymentContributor.class );
- Iterator iterator = loader.iterator();
- assertThat( "Service iterator empty.", iterator.hasNext() );
- while( iterator.hasNext() ) {
- Object object = iterator.next();
- if( object instanceof ResourceManagerDeploymentContributor ) {
- return;
- }
- }
- fail( "Failed to find " + ResourceManagerDeploymentContributor.class.getName() + " via service loader." );
- }
-}
diff --git a/gateway-spi/pom.xml b/gateway-spi/pom.xml
index 0fb2b28..5edb2f7 100644
--- a/gateway-spi/pom.xml
+++ b/gateway-spi/pom.xml
@@ -48,6 +48,10 @@
</dependency>
<dependency>
<groupId>${gateway-group}</groupId>
+ <artifactId>gateway-util-configinjector</artifactId>
+ </dependency>
+ <dependency>
+ <groupId>${gateway-group}</groupId>
<artifactId>gateway-util-urltemplate</artifactId>
</dependency>
@@ -75,11 +79,17 @@
<groupId>org.jboss.shrinkwrap.descriptors</groupId>
<artifactId>shrinkwrap-descriptors-impl-javaee</artifactId>
</dependency>
- <dependency>
- <groupId>com.nimbusds</groupId>
- <artifactId>nimbus-jose-jwt</artifactId>
- <version>3.9</version>
- </dependency>
+ <dependency>
+ <groupId>com.nimbusds</groupId>
+ <artifactId>nimbus-jose-jwt</artifactId>
+ <scope>compile</scope>
+ <exclusions>
+ <exclusion>
+ <groupId>org.bouncycastle</groupId>
+ <artifactId>bcprov-jdk15on</artifactId>
+ </exclusion>
+ </exclusions>
+ </dependency>
<dependency>
<groupId>commons-net</groupId>
<artifactId>commons-net</artifactId>
@@ -96,7 +106,11 @@
<groupId>com.jayway.jsonpath</groupId>
<artifactId>json-path</artifactId>
</dependency>
-
+ <dependency>
+ <groupId>commons-collections</groupId>
+ <artifactId>commons-collections</artifactId>
+ </dependency>
+
<dependency>
<groupId>junit</groupId>
<artifactId>junit</artifactId>
diff --git a/gateway-spi/src/main/java/org/apache/hadoop/gateway/SpiGatewayMessages.java b/gateway-spi/src/main/java/org/apache/hadoop/gateway/SpiGatewayMessages.java
index 9b575e3..34ec105 100644
--- a/gateway-spi/src/main/java/org/apache/hadoop/gateway/SpiGatewayMessages.java
+++ b/gateway-spi/src/main/java/org/apache/hadoop/gateway/SpiGatewayMessages.java
@@ -48,4 +48,11 @@
@Message( level = MessageLevel.ERROR, text = "Failed Knox->Hadoop SPNegotiation authentication for URL: {0}" )
void failedSPNegoAuthn(String uri);
+
+ @Message( level = MessageLevel.WARN, text = "Error occurred when closing HTTP client : {0}" )
+ void errorClosingHttpClient(@StackTrace(level=MessageLevel.WARN) Exception e);
+
+ @Message( level = MessageLevel.WARN, text = "Skipping unencodable parameter {0}={1}, {2}: {3}" )
+ void skippingUnencodableParameter( String name, String value, String encoding, @StackTrace( level = MessageLevel.DEBUG ) Exception e );
+
}
diff --git a/gateway-provider-identity-assertion-pseudo/src/main/java/org/apache/hadoop/gateway/identityasserter/function/UsernameFunctionDescriptor.java b/gateway-spi/src/main/java/org/apache/hadoop/gateway/config/FilterConfigurationAdapter.java
similarity index 61%
copy from gateway-provider-identity-assertion-pseudo/src/main/java/org/apache/hadoop/gateway/identityasserter/function/UsernameFunctionDescriptor.java
copy to gateway-spi/src/main/java/org/apache/hadoop/gateway/config/FilterConfigurationAdapter.java
index d2aa441..882597c 100644
--- a/gateway-provider-identity-assertion-pseudo/src/main/java/org/apache/hadoop/gateway/identityasserter/function/UsernameFunctionDescriptor.java
+++ b/gateway-spi/src/main/java/org/apache/hadoop/gateway/config/FilterConfigurationAdapter.java
@@ -15,17 +15,24 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
-package org.apache.hadoop.gateway.identityasserter.function;
+package org.apache.hadoop.gateway.config;
-import org.apache.hadoop.gateway.filter.rewrite.api.UrlRewriteFunctionDescriptor;
+import javax.servlet.FilterConfig;
-public class UsernameFunctionDescriptor implements UrlRewriteFunctionDescriptor<UsernameFunctionDescriptor> {
+public class FilterConfigurationAdapter implements ConfigurationAdapter {
- public static final String FUNCTION_NAME = "username";
+ private FilterConfig config;
- @Override
- public String name() {
- return FUNCTION_NAME;
+ public FilterConfigurationAdapter(FilterConfig config) {
+ this.config = config;
}
+ @Override
+ public Object getConfigurationValue(String name) throws ConfigurationException {
+ Object value = config.getInitParameter(name);
+ if (value == null) {
+ value = config.getServletContext().getAttribute(name);
+ }
+ return value;
+ }
}
diff --git a/gateway-provider-security-picketlink/src/test/java/org/apache/hadoop/gateway/provider/federation/PreAuthSSOTest.java b/gateway-spi/src/main/java/org/apache/hadoop/gateway/config/FilterConfigurationAdapterDescriptor.java
similarity index 68%
copy from gateway-provider-security-picketlink/src/test/java/org/apache/hadoop/gateway/provider/federation/PreAuthSSOTest.java
copy to gateway-spi/src/main/java/org/apache/hadoop/gateway/config/FilterConfigurationAdapterDescriptor.java
index ec57043..4d106e1 100644
--- a/gateway-provider-security-picketlink/src/test/java/org/apache/hadoop/gateway/provider/federation/PreAuthSSOTest.java
+++ b/gateway-spi/src/main/java/org/apache/hadoop/gateway/config/FilterConfigurationAdapterDescriptor.java
@@ -1,4 +1,3 @@
-
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
@@ -16,16 +15,15 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
-package org.apache.hadoop.gateway.provider.federation;
+package org.apache.hadoop.gateway.config;
-import junit.framework.TestCase;
+import org.apache.hadoop.gateway.config.spi.AbstractConfigurationAdapterDescriptor;
-import org.apache.hadoop.gateway.services.security.token.impl.JWTToken;
-import org.junit.Test;
+import javax.servlet.FilterConfig;
-public class PreAuthSSOTest extends TestCase {
- @Test
- public void testPreAuth() throws Exception {
- assertTrue(true);
+public class FilterConfigurationAdapterDescriptor extends AbstractConfigurationAdapterDescriptor {
+
+ public FilterConfigurationAdapterDescriptor() {
+ add(FilterConfig.class, FilterConfigurationAdapter.class);
}
}
diff --git a/gateway-spi/src/main/java/org/apache/hadoop/gateway/config/GatewayConfig.java b/gateway-spi/src/main/java/org/apache/hadoop/gateway/config/GatewayConfig.java
index a8ddc83..966539a 100644
--- a/gateway-spi/src/main/java/org/apache/hadoop/gateway/config/GatewayConfig.java
+++ b/gateway-spi/src/main/java/org/apache/hadoop/gateway/config/GatewayConfig.java
@@ -52,6 +52,12 @@
*/
String getGatewayDataDir();
+ /**
+ * The location of the gateway services definition's root directory
+ * @return The location of the gateway services top level directory.
+ */
+ String getGatewayServicesDir();
+
String getHadoopConfDir();
String getGatewayHost();
diff --git a/gateway-spi/src/main/java/org/apache/hadoop/gateway/deploy/ServiceDeploymentContributor.java b/gateway-spi/src/main/java/org/apache/hadoop/gateway/deploy/ServiceDeploymentContributor.java
index ba21928..4b2b110 100644
--- a/gateway-spi/src/main/java/org/apache/hadoop/gateway/deploy/ServiceDeploymentContributor.java
+++ b/gateway-spi/src/main/java/org/apache/hadoop/gateway/deploy/ServiceDeploymentContributor.java
@@ -18,6 +18,7 @@
package org.apache.hadoop.gateway.deploy;
import org.apache.hadoop.gateway.topology.Service;
+import org.apache.hadoop.gateway.topology.Version;
public interface ServiceDeploymentContributor {
@@ -27,6 +28,13 @@
// The name of this service deployment contributor. Not used yet.
String getName();
+ /**
+ * Returns the version of the deployment contributor. This helps in providing versioned
+ * contributions for service versions.
+ * @return the version
+ */
+ Version getVersion();
+
// Called after provider initializeContribution methods and in arbitrary order relative to other service contributors.
void initializeContribution( DeploymentContext context );
diff --git a/gateway-spi/src/main/java/org/apache/hadoop/gateway/deploy/ServiceDeploymentContributorBase.java b/gateway-spi/src/main/java/org/apache/hadoop/gateway/deploy/ServiceDeploymentContributorBase.java
index 4d33fa8..f82bad5 100644
--- a/gateway-spi/src/main/java/org/apache/hadoop/gateway/deploy/ServiceDeploymentContributorBase.java
+++ b/gateway-spi/src/main/java/org/apache/hadoop/gateway/deploy/ServiceDeploymentContributorBase.java
@@ -21,6 +21,7 @@
import org.apache.hadoop.gateway.descriptor.ResourceDescriptor;
import org.apache.hadoop.gateway.topology.Provider;
import org.apache.hadoop.gateway.topology.Service;
+import org.apache.hadoop.gateway.topology.Version;
import java.net.URISyntaxException;
import java.util.Collection;
@@ -28,6 +29,11 @@
public abstract class ServiceDeploymentContributorBase extends DeploymentContributorBase implements ServiceDeploymentContributor {
+ @Override
+ public Version getVersion() {
+ return new Version();
+ }
+
public void initializeContribution( DeploymentContext context ) {
// Noop.
}
diff --git a/gateway-spi/src/main/java/org/apache/hadoop/gateway/dispatch/AbstractGatewayDispatch.java b/gateway-spi/src/main/java/org/apache/hadoop/gateway/dispatch/AbstractGatewayDispatch.java
index 1bf5fc0..b5ff6d4 100644
--- a/gateway-spi/src/main/java/org/apache/hadoop/gateway/dispatch/AbstractGatewayDispatch.java
+++ b/gateway-spi/src/main/java/org/apache/hadoop/gateway/dispatch/AbstractGatewayDispatch.java
@@ -17,13 +17,11 @@
*/
package org.apache.hadoop.gateway.dispatch;
-import org.apache.hadoop.gateway.filter.AbstractGatewayFilter;
import org.apache.hadoop.gateway.filter.GatewayResponse;
import org.apache.hadoop.io.IOUtils;
+import org.apache.http.client.HttpClient;
import org.apache.http.client.methods.HttpUriRequest;
-import javax.servlet.FilterChain;
-import javax.servlet.ServletException;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import java.io.IOException;
@@ -32,54 +30,15 @@
import java.net.URI;
import java.net.URISyntaxException;
import java.util.Arrays;
-import java.util.Collections;
import java.util.Enumeration;
-import java.util.HashMap;
import java.util.List;
-import java.util.Map;
-public abstract class AbstractGatewayDispatch extends AbstractGatewayFilter implements Dispatch {
+public abstract class AbstractGatewayDispatch implements Dispatch {
- private static Map<String,Adapter> METHOD_ADAPTERS = createMethodAdapters();
private static int STREAM_COPY_BUFFER_SIZE = 4096;
private static final List<String> EXCLUDE_HEADERS = Arrays.asList( "Host", "Authorization", "Content-Length", "Transfer-Encoding" );
- private static Map<String,Adapter> createMethodAdapters() {
- Map<String,Adapter> map = new HashMap<String,Adapter>();
- map.put( "GET", new GetAdapter() );
- map.put( "POST", new PostAdapter() );
- map.put( "PUT", new PutAdapter() );
- map.put( "DELETE", new DeleteAdapter() );
- map.put( "OPTIONS", new OptionsAdapter() );
- return Collections.unmodifiableMap( map );
- }
-
- @Override
- protected void doFilter( HttpServletRequest request, HttpServletResponse response, FilterChain chain )
- throws IOException, ServletException {
- String method = request.getMethod().toUpperCase();
- Adapter adapter = METHOD_ADAPTERS.get( method );
- if( adapter != null ) {
- try {
- adapter.doMethod( this, request, response );
- } catch( URISyntaxException e ) {
- throw new ServletException( e );
- }
- } else {
- response.sendError( HttpServletResponse.SC_METHOD_NOT_ALLOWED );
- }
- }
-
- protected static URI getDispatchUrl( HttpServletRequest request ) {
- StringBuffer str = request.getRequestURL();
- String query = request.getQueryString();
- if( query != null ) {
- str.append( '?' );
- str.append( query );
- }
- URI url = URI.create( str.toString() );
- return url;
- }
+ protected HttpClient client;
protected void writeResponse( HttpServletRequest request, HttpServletResponse response, InputStream stream )
throws IOException {
@@ -98,6 +57,16 @@
// }
}
+ @Override
+ public HttpClient getHttpClient() {
+ return client;
+ }
+
+ @Override
+ public void setHttpClient(HttpClient client) {
+ this.client = client;
+ }
+
public void doGet( URI url, HttpServletRequest request, HttpServletResponse response )
throws IOException, URISyntaxException {
response.sendError( HttpServletResponse.SC_METHOD_NOT_ALLOWED );
@@ -122,56 +91,16 @@
throws IOException, URISyntaxException {
response.sendError( HttpServletResponse.SC_METHOD_NOT_ALLOWED );
}
-
- private interface Adapter {
- public void doMethod( Dispatch dispatch, HttpServletRequest request, HttpServletResponse response )
- throws IOException, ServletException, URISyntaxException;
- }
-
- private static class GetAdapter implements Adapter {
- public void doMethod( Dispatch dispatch, HttpServletRequest request, HttpServletResponse response )
- throws IOException, ServletException, URISyntaxException {
- dispatch.doGet( getDispatchUrl( request ), request, response );
- }
- }
-
- private static class PostAdapter implements Adapter {
- public void doMethod( Dispatch dispatch, HttpServletRequest request, HttpServletResponse response )
- throws IOException, ServletException, URISyntaxException {
- dispatch.doPost( getDispatchUrl( request ), request, response );
- }
- }
-
- private static class PutAdapter implements Adapter {
- public void doMethod( Dispatch dispatch, HttpServletRequest request, HttpServletResponse response )
- throws IOException, ServletException, URISyntaxException {
- dispatch.doPut( getDispatchUrl( request ), request, response );
- }
- }
-
- private static class DeleteAdapter implements Adapter {
- public void doMethod( Dispatch dispatch, HttpServletRequest request, HttpServletResponse response )
- throws IOException, ServletException, URISyntaxException {
- dispatch.doDelete( getDispatchUrl( request ), request, response );
- }
- }
-
- private static class OptionsAdapter implements Adapter {
- public void doMethod( Dispatch dispatch, HttpServletRequest request, HttpServletResponse response )
- throws IOException, ServletException, URISyntaxException {
- dispatch.doOptions( getDispatchUrl( request ), request, response );
- }
- }
public static void copyRequestHeaderFields(HttpUriRequest outboundRequest,
HttpServletRequest inboundRequest) {
Enumeration<String> headerNames = inboundRequest.getHeaderNames();
while( headerNames.hasMoreElements() ) {
- String name = (String) headerNames.nextElement();
+ String name = headerNames.nextElement();
if ( !outboundRequest.containsHeader( name )
&& !EXCLUDE_HEADERS.contains( name ) ) {
- String vaule = inboundRequest.getHeader( name );
- outboundRequest.addHeader( name, vaule );
+ String value = inboundRequest.getHeader( name );
+ outboundRequest.addHeader( name, value );
}
}
}
diff --git a/gateway-spi/src/main/java/org/apache/hadoop/gateway/dispatch/DefaultDispatch.java b/gateway-spi/src/main/java/org/apache/hadoop/gateway/dispatch/DefaultDispatch.java
new file mode 100644
index 0000000..1ee9e49
--- /dev/null
+++ b/gateway-spi/src/main/java/org/apache/hadoop/gateway/dispatch/DefaultDispatch.java
@@ -0,0 +1,318 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.gateway.dispatch;
+
+import org.apache.hadoop.gateway.SpiGatewayMessages;
+import org.apache.hadoop.gateway.SpiGatewayResources;
+import org.apache.hadoop.gateway.audit.api.Action;
+import org.apache.hadoop.gateway.audit.api.ActionOutcome;
+import org.apache.hadoop.gateway.audit.api.AuditServiceFactory;
+import org.apache.hadoop.gateway.audit.api.Auditor;
+import org.apache.hadoop.gateway.audit.api.ResourceType;
+import org.apache.hadoop.gateway.audit.log4j.audit.AuditConstants;
+import org.apache.hadoop.gateway.config.Configure;
+import org.apache.hadoop.gateway.config.Default;
+import org.apache.hadoop.gateway.config.GatewayConfig;
+import org.apache.hadoop.gateway.i18n.messages.MessagesFactory;
+import org.apache.hadoop.gateway.i18n.resources.ResourcesFactory;
+import org.apache.http.Header;
+import org.apache.http.HttpEntity;
+import org.apache.http.HttpResponse;
+import org.apache.http.HttpStatus;
+import org.apache.http.client.HttpClient;
+import org.apache.http.client.methods.HttpDelete;
+import org.apache.http.client.methods.HttpGet;
+import org.apache.http.client.methods.HttpOptions;
+import org.apache.http.client.methods.HttpPost;
+import org.apache.http.client.methods.HttpPut;
+import org.apache.http.client.methods.HttpUriRequest;
+import org.apache.http.entity.ContentType;
+import org.apache.http.entity.InputStreamEntity;
+import org.apache.http.impl.client.DefaultHttpClient;
+import org.apache.http.message.BasicHeader;
+
+import javax.servlet.http.HttpServletRequest;
+import javax.servlet.http.HttpServletResponse;
+import java.io.IOException;
+import java.io.InputStream;
+import java.net.URI;
+import java.net.URISyntaxException;
+import java.util.HashSet;
+import java.util.Set;
+
+/**
+ *
+ */
+public class DefaultDispatch extends AbstractGatewayDispatch {
+
+ // private static final String CT_APP_WWW_FORM_URL_ENCODED = "application/x-www-form-urlencoded";
+ // private static final String CT_APP_XML = "application/xml";
+ protected static final String Q_DELEGATION_EQ = "?delegation=";
+ protected static final String AMP_DELEGATION_EQ = "&delegation=";
+ protected static final String COOKIE = "Cookie";
+ protected static final String SET_COOKIE = "Set-Cookie";
+ protected static final String WWW_AUTHENTICATE = "WWW-Authenticate";
+ protected static final String NEGOTIATE = "Negotiate";
+
+ protected static SpiGatewayMessages LOG = MessagesFactory.get(SpiGatewayMessages.class);
+ protected static SpiGatewayResources RES = ResourcesFactory.get(SpiGatewayResources.class);
+ protected static Auditor auditor = AuditServiceFactory.getAuditService().getAuditor(AuditConstants.DEFAULT_AUDITOR_NAME,
+ AuditConstants.KNOX_SERVICE_NAME, AuditConstants.KNOX_COMPONENT_NAME);
+
+ protected AppCookieManager appCookieManager;
+
+ private int replayBufferSize = 0;
+ private Set<String> outboundResponseExcludeHeaders;
+
+ @Override
+ public void init() {
+ setAppCookieManager(new AppCookieManager());
+ outboundResponseExcludeHeaders = new HashSet<String>();
+ outboundResponseExcludeHeaders.add(SET_COOKIE);
+ outboundResponseExcludeHeaders.add(WWW_AUTHENTICATE);
+ }
+
+ @Override
+ public void destroy() {
+
+ }
+
+ public void setAppCookieManager(AppCookieManager appCookieManager) {
+ this.appCookieManager = appCookieManager;
+ }
+
+ protected void executeRequest(
+ HttpUriRequest outboundRequest,
+ HttpServletRequest inboundRequest,
+ HttpServletResponse outboundResponse)
+ throws IOException {
+ HttpResponse inboundResponse = executeOutboundRequest(outboundRequest);
+ writeOutboundResponse(outboundRequest, inboundRequest, outboundResponse, inboundResponse);
+ }
+
+ protected HttpResponse executeOutboundRequest(HttpUriRequest outboundRequest) throws IOException {
+ LOG.dispatchRequest(outboundRequest.getMethod(), outboundRequest.getURI());
+ HttpResponse inboundResponse = null;
+
+ try {
+ String query = outboundRequest.getURI().getQuery();
+ if (!"true".equals(System.getProperty(GatewayConfig.HADOOP_KERBEROS_SECURED))) {
+ // Hadoop cluster not Kerberos enabled
+ addCredentialsToRequest(outboundRequest);
+ inboundResponse = client.execute(outboundRequest);
+ } else if (query.contains(Q_DELEGATION_EQ) ||
+ // query string carries delegation token
+ query.contains(AMP_DELEGATION_EQ)) {
+ inboundResponse = client.execute(outboundRequest);
+ } else {
+ // Kerberos secured, no delegation token in query string
+ inboundResponse = executeKerberosDispatch(outboundRequest, client);
+ }
+ } catch (IOException e) {
+ // we do not want to expose back end host. port end points to clients, see JIRA KNOX-58
+ LOG.dispatchServiceConnectionException(outboundRequest.getURI(), e);
+ auditor.audit(Action.DISPATCH, outboundRequest.getURI().toString(), ResourceType.URI, ActionOutcome.FAILURE);
+ throw new IOException(RES.dispatchConnectionError());
+ } finally {
+ if (inboundResponse != null) {
+ int statusCode = inboundResponse.getStatusLine().getStatusCode();
+ if (statusCode != 201) {
+ LOG.dispatchResponseStatusCode(statusCode);
+ } else {
+ Header location = inboundResponse.getFirstHeader("Location");
+ if (location == null) {
+ LOG.dispatchResponseStatusCode(statusCode);
+ } else {
+ LOG.dispatchResponseCreatedStatusCode(statusCode, location.getValue());
+ }
+ }
+ auditor.audit(Action.DISPATCH, outboundRequest.getURI().toString(), ResourceType.URI, ActionOutcome.SUCCESS, RES.responseStatus(statusCode));
+ } else {
+ auditor.audit(Action.DISPATCH, outboundRequest.getURI().toString(), ResourceType.URI, ActionOutcome.UNAVAILABLE);
+ }
+
+ }
+ return inboundResponse;
+ }
+
+ protected void writeOutboundResponse(HttpUriRequest outboundRequest, HttpServletRequest inboundRequest, HttpServletResponse outboundResponse, HttpResponse inboundResponse) throws IOException {
+ // Copy the client respond header to the server respond.
+ outboundResponse.setStatus(inboundResponse.getStatusLine().getStatusCode());
+ Header[] headers = inboundResponse.getAllHeaders();
+ Set<String> excludeHeaders = getOutboundResponseExcludeHeaders();
+ boolean hasExcludeHeaders = false;
+ if ((excludeHeaders != null) && !(excludeHeaders.isEmpty())) {
+ hasExcludeHeaders = true;
+ }
+ for ( Header header : headers ) {
+ String name = header.getName();
+ if (hasExcludeHeaders && excludeHeaders.contains(name)) {
+ continue;
+ }
+ String value = header.getValue();
+ outboundResponse.addHeader(name, value);
+ }
+
+ HttpEntity entity = inboundResponse.getEntity();
+ if ( entity != null ) {
+ Header contentType = entity.getContentType();
+ if ( contentType != null ) {
+ outboundResponse.setContentType(contentType.getValue());
+ }
+ //KM[ If this is set here it ends up setting the content length to the content returned from the server.
+ // This length might not match if the the content is rewritten.
+ // long contentLength = entity.getContentLength();
+ // if( contentLength <= Integer.MAX_VALUE ) {
+ // outboundResponse.setContentLength( (int)contentLength );
+ // }
+ //]
+ writeResponse(inboundRequest, outboundResponse, entity.getContent());
+ }
+ }
+
+ /**
+ * This method provides a hook for specialized credential propagation
+ * in subclasses.
+ *
+ * @param outboundRequest
+ */
+ protected void addCredentialsToRequest(HttpUriRequest outboundRequest) {
+ }
+
+ protected HttpResponse executeKerberosDispatch(HttpUriRequest outboundRequest,
+ HttpClient client) throws IOException {
+ HttpResponse inboundResponse;
+ outboundRequest.removeHeaders(COOKIE);
+ String appCookie = appCookieManager.getCachedAppCookie();
+ if (appCookie != null) {
+ outboundRequest.addHeader(new BasicHeader(COOKIE, appCookie));
+ }
+ inboundResponse = client.execute(outboundRequest);
+ // if inBoundResponse has status 401 and header WWW-Authenticate: Negoitate
+ // refresh hadoop.auth.cookie and attempt one more time
+ int statusCode = inboundResponse.getStatusLine().getStatusCode();
+ if (statusCode == HttpStatus.SC_UNAUTHORIZED) {
+ Header[] wwwAuthHeaders = inboundResponse.getHeaders(WWW_AUTHENTICATE);
+ if (wwwAuthHeaders != null && wwwAuthHeaders.length != 0 &&
+ wwwAuthHeaders[0].getValue().trim().startsWith(NEGOTIATE)) {
+ appCookie = appCookieManager.getAppCookie(outboundRequest, true);
+ outboundRequest.removeHeaders(COOKIE);
+ outboundRequest.addHeader(new BasicHeader(COOKIE, appCookie));
+ client = new DefaultHttpClient();
+ inboundResponse = client.execute(outboundRequest);
+ } else {
+ // no supported authentication type found
+ // we would let the original response propagate
+ }
+ } else {
+ // not a 401 Unauthorized status code
+ // we would let the original response propagate
+ }
+ return inboundResponse;
+ }
+
+ protected HttpEntity createRequestEntity(HttpServletRequest request)
+ throws IOException {
+
+ String contentType = request.getContentType();
+ int contentLength = request.getContentLength();
+ InputStream contentStream = request.getInputStream();
+
+ HttpEntity entity;
+ if (contentType == null) {
+ entity = new InputStreamEntity(contentStream, contentLength);
+ } else {
+ entity = new InputStreamEntity(contentStream, contentLength, ContentType.parse(contentType));
+ }
+
+
+ if ("true".equals(System.getProperty(GatewayConfig.HADOOP_KERBEROS_SECURED))) {
+
+ //Check if delegation token is supplied in the request
+ boolean delegationTokenPresent = false;
+ String queryString = request.getQueryString();
+ if (queryString != null) {
+ delegationTokenPresent = queryString.startsWith("delegation=") ||
+ queryString.contains("&delegation=");
+ }
+ if (!delegationTokenPresent && getReplayBufferSize() > 0) {
+ entity = new CappedBufferHttpEntity(entity, getReplayBufferSize() * 1024);
+ }
+ }
+
+ return entity;
+ }
+
+ @Override
+ public void doGet(URI url, HttpServletRequest request, HttpServletResponse response)
+ throws IOException, URISyntaxException {
+ HttpGet method = new HttpGet(url);
+ // https://issues.apache.org/jira/browse/KNOX-107 - Service URLs not rewritten for WebHDFS GET redirects
+ method.getParams().setBooleanParameter("http.protocol.handle-redirects", false);
+ copyRequestHeaderFields(method, request);
+ executeRequest(method, request, response);
+ }
+
+ @Override
+ public void doOptions(URI url, HttpServletRequest request, HttpServletResponse response)
+ throws IOException, URISyntaxException {
+ HttpOptions method = new HttpOptions(url);
+ executeRequest(method, request, response);
+ }
+
+ @Override
+ public void doPut(URI url, HttpServletRequest request, HttpServletResponse response)
+ throws IOException, URISyntaxException {
+ HttpPut method = new HttpPut(url);
+ HttpEntity entity = createRequestEntity(request);
+ method.setEntity(entity);
+ copyRequestHeaderFields(method, request);
+ executeRequest(method, request, response);
+ }
+
+ @Override
+ public void doPost(URI url, HttpServletRequest request, HttpServletResponse response)
+ throws IOException, URISyntaxException {
+ HttpPost method = new HttpPost(url);
+ HttpEntity entity = createRequestEntity(request);
+ method.setEntity(entity);
+ copyRequestHeaderFields(method, request);
+ executeRequest(method, request, response);
+ }
+
+ @Override
+ public void doDelete(URI url, HttpServletRequest request, HttpServletResponse response)
+ throws IOException, URISyntaxException {
+ HttpDelete method = new HttpDelete(url);
+ copyRequestHeaderFields(method, request);
+ executeRequest(method, request, response);
+ }
+
+ protected int getReplayBufferSize() {
+ return replayBufferSize;
+ }
+
+ @Configure
+ protected void setReplayBufferSize(@Default("8") int size) {
+ replayBufferSize = size;
+ }
+
+ public Set<String> getOutboundResponseExcludeHeaders() {
+ return outboundResponseExcludeHeaders;
+ }
+}
diff --git a/gateway-spi/src/main/java/org/apache/hadoop/gateway/dispatch/Dispatch.java b/gateway-spi/src/main/java/org/apache/hadoop/gateway/dispatch/Dispatch.java
index 072a2c6..0ce1339 100644
--- a/gateway-spi/src/main/java/org/apache/hadoop/gateway/dispatch/Dispatch.java
+++ b/gateway-spi/src/main/java/org/apache/hadoop/gateway/dispatch/Dispatch.java
@@ -17,6 +17,8 @@
*/
package org.apache.hadoop.gateway.dispatch;
+import org.apache.http.client.HttpClient;
+
import javax.servlet.ServletException;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
@@ -26,6 +28,14 @@
public interface Dispatch {
+ void init();
+
+ void destroy();
+
+ HttpClient getHttpClient();
+
+ void setHttpClient(HttpClient httpClient);
+
void doGet( URI url, HttpServletRequest request, HttpServletResponse response )
throws IOException, ServletException, URISyntaxException;
diff --git a/gateway-spi/src/main/java/org/apache/hadoop/gateway/dispatch/GatewayDispatchFilter.java b/gateway-spi/src/main/java/org/apache/hadoop/gateway/dispatch/GatewayDispatchFilter.java
new file mode 100644
index 0000000..c23d9dd
--- /dev/null
+++ b/gateway-spi/src/main/java/org/apache/hadoop/gateway/dispatch/GatewayDispatchFilter.java
@@ -0,0 +1,196 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.gateway.dispatch;
+
+import org.apache.hadoop.gateway.SpiGatewayMessages;
+import org.apache.hadoop.gateway.filter.AbstractGatewayFilter;
+import org.apache.hadoop.gateway.i18n.messages.MessagesFactory;
+import org.apache.http.client.CookieStore;
+import org.apache.http.cookie.Cookie;
+import org.apache.http.impl.client.CloseableHttpClient;
+import org.apache.http.impl.client.HttpClients;
+
+import javax.servlet.FilterChain;
+import javax.servlet.FilterConfig;
+import javax.servlet.ServletException;
+import javax.servlet.http.HttpServletRequest;
+import javax.servlet.http.HttpServletResponse;
+import java.io.IOException;
+import java.net.URI;
+import java.net.URISyntaxException;
+import java.util.Collections;
+import java.util.Date;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+
+import static org.apache.hadoop.gateway.config.ConfigurationInjectorBuilder.configuration;
+
+public class GatewayDispatchFilter extends AbstractGatewayFilter {
+
+ private static Map<String, Adapter> METHOD_ADAPTERS = createMethodAdapters();
+
+ protected static SpiGatewayMessages LOG = MessagesFactory.get(SpiGatewayMessages.class);
+
+ private Dispatch dispatch;
+
+ private CloseableHttpClient httpClient;
+
+ private static Map<String, Adapter> createMethodAdapters() {
+ Map<String, Adapter> map = new HashMap<String, Adapter>();
+ map.put("GET", new GetAdapter());
+ map.put("POST", new PostAdapter());
+ map.put("PUT", new PutAdapter());
+ map.put("DELETE", new DeleteAdapter());
+ map.put("OPTIONS", new OptionsAdapter());
+ return Collections.unmodifiableMap(map);
+ }
+
+ @Override
+ public void init(FilterConfig filterConfig) throws ServletException {
+ super.init(filterConfig);
+ if (dispatch == null) {
+ String dispatchImpl = filterConfig.getInitParameter("dispatch-impl");
+ dispatch = newDispatch(dispatchImpl);
+ }
+ configuration().target(dispatch).source(filterConfig).inject();
+ httpClient = HttpClients.custom().setDefaultCookieStore(new NoCookieStore()).build();
+ //[sumit] this can perhaps be stashed in the servlet context to increase sharing of the client
+ dispatch.setHttpClient(httpClient);
+ dispatch.init();
+ }
+
+ @Override
+ public void destroy() {
+ dispatch.destroy();
+ try {
+ httpClient.close();
+ } catch ( IOException e ) {
+ LOG.errorClosingHttpClient(e);
+ }
+ }
+
+ public Dispatch getDispatch() {
+ return dispatch;
+ }
+
+ public void setDispatch(Dispatch dispatch) {
+ this.dispatch = dispatch;
+ }
+
+ @Override
+ protected void doFilter(HttpServletRequest request, HttpServletResponse response, FilterChain chain) throws IOException, ServletException {
+ String method = request.getMethod().toUpperCase();
+ Adapter adapter = METHOD_ADAPTERS.get(method);
+ if ( adapter != null ) {
+ try {
+ adapter.doMethod(dispatch, request, response);
+ } catch ( URISyntaxException e ) {
+ throw new ServletException(e);
+ }
+ } else {
+ response.sendError(HttpServletResponse.SC_METHOD_NOT_ALLOWED);
+ }
+ }
+
+ protected static URI getDispatchUrl(HttpServletRequest request) {
+ StringBuffer str = request.getRequestURL();
+ String query = request.getQueryString();
+ if ( query != null ) {
+ str.append('?');
+ str.append(query);
+ }
+ URI url = URI.create(str.toString());
+ return url;
+ }
+
+ private interface Adapter {
+ public void doMethod(Dispatch dispatch, HttpServletRequest request, HttpServletResponse response)
+ throws IOException, ServletException, URISyntaxException;
+ }
+
+ private static class GetAdapter implements Adapter {
+ public void doMethod(Dispatch dispatch, HttpServletRequest request, HttpServletResponse response)
+ throws IOException, ServletException, URISyntaxException {
+ dispatch.doGet(getDispatchUrl(request), request, response);
+ }
+ }
+
+ private static class PostAdapter implements Adapter {
+ public void doMethod(Dispatch dispatch, HttpServletRequest request, HttpServletResponse response)
+ throws IOException, ServletException, URISyntaxException {
+ dispatch.doPost(getDispatchUrl(request), request, response);
+ }
+ }
+
+ private static class PutAdapter implements Adapter {
+ public void doMethod(Dispatch dispatch, HttpServletRequest request, HttpServletResponse response)
+ throws IOException, ServletException, URISyntaxException {
+ dispatch.doPut(getDispatchUrl(request), request, response);
+ }
+ }
+
+ private static class DeleteAdapter implements Adapter {
+ public void doMethod(Dispatch dispatch, HttpServletRequest request, HttpServletResponse response)
+ throws IOException, ServletException, URISyntaxException {
+ dispatch.doDelete(getDispatchUrl(request), request, response);
+ }
+ }
+
+ private static class OptionsAdapter implements Adapter {
+ public void doMethod(Dispatch dispatch, HttpServletRequest request, HttpServletResponse response)
+ throws IOException, ServletException, URISyntaxException {
+ dispatch.doOptions(getDispatchUrl(request), request, response);
+ }
+ }
+
+ private Dispatch newDispatch(String dispatchImpl) throws ServletException {
+ try {
+ ClassLoader loader = Thread.currentThread().getContextClassLoader();
+ if ( loader == null ) {
+ loader = this.getClass().getClassLoader();
+ }
+ Class<Dispatch> clazz = (Class) loader.loadClass(dispatchImpl);
+ return clazz.newInstance();
+ } catch ( Exception e ) {
+ throw new ServletException(e);
+ }
+ }
+
+ private class NoCookieStore implements CookieStore {
+ @Override
+ public void addCookie(Cookie cookie) {
+ //no op
+ }
+
+ @Override
+ public List<Cookie> getCookies() {
+ return Collections.EMPTY_LIST;
+ }
+
+ @Override
+ public boolean clearExpired(Date date) {
+ return true;
+ }
+
+ @Override
+ public void clear() {
+ //no op
+ }
+ }
+}
diff --git a/gateway-spi/src/main/java/org/apache/hadoop/gateway/dispatch/HttpClientDispatch.java b/gateway-spi/src/main/java/org/apache/hadoop/gateway/dispatch/HttpClientDispatch.java
index 4de9730..46fb43e 100644
--- a/gateway-spi/src/main/java/org/apache/hadoop/gateway/dispatch/HttpClientDispatch.java
+++ b/gateway-spi/src/main/java/org/apache/hadoop/gateway/dispatch/HttpClientDispatch.java
@@ -17,293 +17,19 @@
*/
package org.apache.hadoop.gateway.dispatch;
-import java.io.IOException;
-import java.io.InputStream;
-import java.net.URI;
-import java.net.URISyntaxException;
-
import javax.servlet.FilterConfig;
import javax.servlet.ServletException;
-import javax.servlet.http.HttpServletRequest;
-import javax.servlet.http.HttpServletResponse;
-import org.apache.hadoop.gateway.SpiGatewayMessages;
-import org.apache.hadoop.gateway.SpiGatewayResources;
-import org.apache.hadoop.gateway.audit.api.Action;
-import org.apache.hadoop.gateway.audit.api.ActionOutcome;
-import org.apache.hadoop.gateway.audit.api.AuditServiceFactory;
-import org.apache.hadoop.gateway.audit.api.Auditor;
-import org.apache.hadoop.gateway.audit.api.ResourceType;
-import org.apache.hadoop.gateway.audit.log4j.audit.AuditConstants;
-import org.apache.hadoop.gateway.config.GatewayConfig;
-import org.apache.hadoop.gateway.i18n.messages.MessagesFactory;
-import org.apache.hadoop.gateway.i18n.resources.ResourcesFactory;
-import org.apache.http.Header;
-import org.apache.http.HttpEntity;
-import org.apache.http.HttpResponse;
-import org.apache.http.HttpStatus;
-import org.apache.http.client.ClientProtocolException;
-import org.apache.http.client.methods.HttpDelete;
-import org.apache.http.client.methods.HttpGet;
-import org.apache.http.client.methods.HttpOptions;
-import org.apache.http.client.methods.HttpPost;
-import org.apache.http.client.methods.HttpPut;
-import org.apache.http.client.methods.HttpUriRequest;
-import org.apache.http.entity.ContentType;
-import org.apache.http.entity.InputStreamEntity;
-import org.apache.http.impl.client.DefaultHttpClient;
-import org.apache.http.message.BasicHeader;
-
-/**
- *
+/***
+ * KNOX-526. Need to keep this class around for backward compatibility of deployed
+ * topologies. This is required for releases older than Apache Knox 0.6.0
*/
-public class HttpClientDispatch extends AbstractGatewayDispatch {
+@Deprecated
+public class HttpClientDispatch extends GatewayDispatchFilter {
- private static final String REPLAY_BUFFER_SIZE = "replayBufferSize";
-
- // private static final String CT_APP_WWW_FORM_URL_ENCODED = "application/x-www-form-urlencoded";
- // private static final String CT_APP_XML = "application/xml";
- protected static final String Q_DELEGATION_EQ = "?delegation=";
- protected static final String AMP_DELEGATION_EQ = "&delegation=";
- protected static final String COOKIE = "Cookie";
- protected static final String SET_COOKIE = "Set-Cookie";
- protected static final String WWW_AUTHENTICATE = "WWW-Authenticate";
- protected static final String NEGOTIATE = "Negotiate";
-
- protected static SpiGatewayMessages LOG = MessagesFactory.get(SpiGatewayMessages.class);
- protected static SpiGatewayResources RES = ResourcesFactory.get(SpiGatewayResources.class);
- protected static Auditor auditor = AuditServiceFactory.getAuditService().getAuditor(AuditConstants.DEFAULT_AUDITOR_NAME,
- AuditConstants.KNOX_SERVICE_NAME, AuditConstants.KNOX_COMPONENT_NAME);
-
- protected AppCookieManager appCookieManager;
-
- protected static final String REPLAY_BUFFER_SIZE_PARAM = "replayBufferSize";
-
- private int replayBufferSize = 0;
-
- @Override
- public void init(FilterConfig filterConfig) throws ServletException {
- this.init(filterConfig, new AppCookieManager());
- }
-
- protected void init(FilterConfig filterConfig, AppCookieManager cookieManager) throws ServletException {
- super.init(filterConfig);
- appCookieManager = cookieManager;
- String replayBufferSizeString = filterConfig.getInitParameter(REPLAY_BUFFER_SIZE_PARAM);
- if (replayBufferSizeString != null) {
- setReplayBufferSize(Integer.valueOf(replayBufferSizeString));
- }
- }
-
- protected void executeRequest(
- HttpUriRequest outboundRequest,
- HttpServletRequest inboundRequest,
- HttpServletResponse outboundResponse)
- throws IOException {
- HttpResponse inboundResponse = executeOutboundRequest(outboundRequest);
- writeOutboundResponse(outboundRequest, inboundRequest, outboundResponse, inboundResponse);
- }
-
- protected HttpResponse executeOutboundRequest(HttpUriRequest outboundRequest) throws IOException {
- LOG.dispatchRequest(outboundRequest.getMethod(), outboundRequest.getURI());
- HttpResponse inboundResponse = null;
- DefaultHttpClient client = new DefaultHttpClient();
-
- try {
- String query = outboundRequest.getURI().getQuery();
- if (!"true".equals(System.getProperty(GatewayConfig.HADOOP_KERBEROS_SECURED))) {
- // Hadoop cluster not Kerberos enabled
- addCredentialsToRequest(outboundRequest);
- inboundResponse = client.execute(outboundRequest);
- } else if (query.contains(Q_DELEGATION_EQ) ||
- // query string carries delegation token
- query.contains(AMP_DELEGATION_EQ)) {
- inboundResponse = client.execute(outboundRequest);
- } else {
- // Kerberos secured, no delegation token in query string
- inboundResponse = executeKerberosDispatch(outboundRequest, client);
- }
- } catch (IOException e) {
- // we do not want to expose back end host. port end points to clients, see JIRA KNOX-58
- LOG.dispatchServiceConnectionException(outboundRequest.getURI(), e);
- auditor.audit(Action.DISPATCH, outboundRequest.getURI().toString(), ResourceType.URI, ActionOutcome.FAILURE);
- throw new IOException(RES.dispatchConnectionError());
- } finally {
- if (inboundResponse != null) {
- int statusCode = inboundResponse.getStatusLine().getStatusCode();
- if (statusCode != 201) {
- LOG.dispatchResponseStatusCode(statusCode);
- } else {
- Header location = inboundResponse.getFirstHeader("Location");
- if (location == null) {
- LOG.dispatchResponseStatusCode(statusCode);
- } else {
- LOG.dispatchResponseCreatedStatusCode(statusCode, location.getValue());
- }
- }
- auditor.audit(Action.DISPATCH, outboundRequest.getURI().toString(), ResourceType.URI, ActionOutcome.SUCCESS, RES.responseStatus(statusCode));
- } else {
- auditor.audit(Action.DISPATCH, outboundRequest.getURI().toString(), ResourceType.URI, ActionOutcome.UNAVAILABLE);
- }
-
- }
- return inboundResponse;
- }
-
- protected void writeOutboundResponse(HttpUriRequest outboundRequest, HttpServletRequest inboundRequest, HttpServletResponse outboundResponse, HttpResponse inboundResponse) throws IOException {
- // Copy the client respond header to the server respond.
- outboundResponse.setStatus(inboundResponse.getStatusLine().getStatusCode());
- Header[] headers = inboundResponse.getAllHeaders();
- for (Header header : headers) {
- String name = header.getName();
- if (name.equals(SET_COOKIE) || name.equals(WWW_AUTHENTICATE)) {
- continue;
- }
- String value = header.getValue();
- outboundResponse.addHeader(name, value);
- }
-
- HttpEntity entity = inboundResponse.getEntity();
- if (entity != null) {
- Header contentType = entity.getContentType();
- if (contentType != null) {
- outboundResponse.setContentType(contentType.getValue());
- }
- //KM[ If this is set here it ends up setting the content length to the content returned from the server.
- // This length might not match if the the content is rewritten.
- // long contentLength = entity.getContentLength();
- // if( contentLength <= Integer.MAX_VALUE ) {
- // outboundResponse.setContentLength( (int)contentLength );
- // }
- //]
- writeResponse(inboundRequest, outboundResponse, entity.getContent());
- }
- }
-
- /**
- * This method provides a hook for specialized credential propagation
- * in subclasses.
- *
- * @param outboundRequest
- */
- protected void addCredentialsToRequest(HttpUriRequest outboundRequest) {
- }
-
- protected HttpResponse executeKerberosDispatch(HttpUriRequest outboundRequest,
- DefaultHttpClient client) throws IOException, ClientProtocolException {
- HttpResponse inboundResponse;
- outboundRequest.removeHeaders(COOKIE);
- String appCookie = appCookieManager.getCachedAppCookie();
- if (appCookie != null) {
- outboundRequest.addHeader(new BasicHeader(COOKIE, appCookie));
- }
- inboundResponse = client.execute(outboundRequest);
- // if inBoundResponse has status 401 and header WWW-Authenticate: Negoitate
- // refresh hadoop.auth.cookie and attempt one more time
- int statusCode = inboundResponse.getStatusLine().getStatusCode();
- if (statusCode == HttpStatus.SC_UNAUTHORIZED) {
- Header[] wwwAuthHeaders = inboundResponse.getHeaders(WWW_AUTHENTICATE);
- if (wwwAuthHeaders != null && wwwAuthHeaders.length != 0 &&
- wwwAuthHeaders[0].getValue().trim().startsWith(NEGOTIATE)) {
- appCookie = appCookieManager.getAppCookie(outboundRequest, true);
- outboundRequest.removeHeaders(COOKIE);
- outboundRequest.addHeader(new BasicHeader(COOKIE, appCookie));
- client = new DefaultHttpClient();
- inboundResponse = client.execute(outboundRequest);
- } else {
- // no supported authentication type found
- // we would let the original response propagate
- }
- } else {
- // not a 401 Unauthorized status code
- // we would let the original response propagate
- }
- return inboundResponse;
- }
-
- protected HttpEntity createRequestEntity(HttpServletRequest request)
- throws IOException {
-
- String contentType = request.getContentType();
- int contentLength = request.getContentLength();
- InputStream contentStream = request.getInputStream();
-
- HttpEntity entity;
- if (contentType == null) {
- entity = new InputStreamEntity(contentStream, contentLength);
- } else {
- entity = new InputStreamEntity(contentStream, contentLength, ContentType.parse(contentType));
- }
-
-
- if ("true".equals(System.getProperty(GatewayConfig.HADOOP_KERBEROS_SECURED))) {
-
- //Check if delegation token is supplied in the request
- boolean delegationTokenPresent = false;
- String queryString = request.getQueryString();
- if (queryString != null) {
- delegationTokenPresent = queryString.startsWith("delegation=") ||
- queryString.contains("&delegation=");
- }
- if (!delegationTokenPresent && getReplayBufferSize() > 0) {
- entity = new CappedBufferHttpEntity(entity, getReplayBufferSize() * 1024);
- }
- }
-
- return entity;
- }
-
- @Override
- public void doGet(URI url, HttpServletRequest request, HttpServletResponse response)
- throws IOException, URISyntaxException {
- HttpGet method = new HttpGet(url);
- // https://issues.apache.org/jira/browse/KNOX-107 - Service URLs not rewritten for WebHDFS GET redirects
- method.getParams().setBooleanParameter("http.protocol.handle-redirects", false);
- copyRequestHeaderFields(method, request);
- executeRequest(method, request, response);
- }
-
- @Override
- public void doOptions(URI url, HttpServletRequest request, HttpServletResponse response)
- throws IOException, URISyntaxException {
- HttpOptions method = new HttpOptions(url);
- executeRequest(method, request, response);
- }
-
- @Override
- public void doPut(URI url, HttpServletRequest request, HttpServletResponse response)
- throws IOException, URISyntaxException {
- HttpPut method = new HttpPut(url);
- HttpEntity entity = createRequestEntity(request);
- method.setEntity(entity);
- copyRequestHeaderFields(method, request);
- executeRequest(method, request, response);
- }
-
- @Override
- public void doPost(URI url, HttpServletRequest request, HttpServletResponse response)
- throws IOException, URISyntaxException {
- HttpPost method = new HttpPost(url);
- HttpEntity entity = createRequestEntity(request);
- method.setEntity(entity);
- copyRequestHeaderFields(method, request);
- executeRequest(method, request, response);
- }
-
- @Override
- public void doDelete(URI url, HttpServletRequest request, HttpServletResponse response)
- throws IOException, URISyntaxException {
- HttpDelete method = new HttpDelete(url);
- copyRequestHeaderFields(method, request);
- executeRequest(method, request, response);
- }
-
- protected int getReplayBufferSize() {
- return replayBufferSize;
- }
-
- protected void setReplayBufferSize(int size) {
- replayBufferSize = size;
- }
-
+ @Override
+ public void init(FilterConfig filterConfig) throws ServletException {
+ setDispatch(new DefaultDispatch());
+ super.init(filterConfig);
+ }
}
diff --git a/gateway-spi/src/main/java/org/apache/hadoop/gateway/services/security/impl/X509CertificateUtil.java b/gateway-spi/src/main/java/org/apache/hadoop/gateway/services/security/impl/X509CertificateUtil.java
index 17c75cf..936356d 100644
--- a/gateway-spi/src/main/java/org/apache/hadoop/gateway/services/security/impl/X509CertificateUtil.java
+++ b/gateway-spi/src/main/java/org/apache/hadoop/gateway/services/security/impl/X509CertificateUtil.java
@@ -20,6 +20,7 @@
import java.io.IOException;
import java.lang.reflect.Constructor;
import java.lang.reflect.Field;
+import java.lang.reflect.InvocationTargetException;
import java.lang.reflect.Method;
import java.math.BigInteger;
import java.security.GeneralSecurityException;
@@ -86,22 +87,34 @@
certificateSerialNumberObject);
// info.set(X509CertInfo.SUBJECT, new CertificateSubjectName(owner));
- Class<?> certificateSubjectNameClass = Class.forName(getCertificateSubjectNameModuleName());
- Constructor<?> certificateSubjectNameConstr = certificateSubjectNameClass
- .getConstructor(new Class[] { x500NameClass });
- Object certificateSubjectNameObject = certificateSubjectNameConstr
- .newInstance(x500NameObject);
- methodSET.invoke(certInfoObject, getSetField(certInfoObject, "SUBJECT"),
- certificateSubjectNameObject);
+ try {
+ Class<?> certificateSubjectNameClass = Class.forName(getCertificateSubjectNameModuleName());
+ Constructor<?> certificateSubjectNameConstr = certificateSubjectNameClass
+ .getConstructor(new Class[] { x500NameClass });
+ Object certificateSubjectNameObject = certificateSubjectNameConstr
+ .newInstance(x500NameObject);
+ methodSET.invoke(certInfoObject, getSetField(certInfoObject, "SUBJECT"),
+ certificateSubjectNameObject);
+ }
+ catch (InvocationTargetException ite) {
+ methodSET.invoke(certInfoObject, getSetField(certInfoObject, "SUBJECT"),
+ x500NameObject);
+ }
// info.set(X509CertInfo.ISSUER, new CertificateIssuerName(owner));
- Class<?> certificateIssuerNameClass = Class.forName(getCertificateIssuerNameModuleName());
- Constructor<?> certificateIssuerNameConstr = certificateIssuerNameClass
- .getConstructor(new Class[] { x500NameClass });
- Object certificateIssuerNameObject = certificateIssuerNameConstr
- .newInstance(x500NameObject);
- methodSET.invoke(certInfoObject, getSetField(certInfoObject, "ISSUER"),
- certificateIssuerNameObject);
+ try {
+ Class<?> certificateIssuerNameClass = Class.forName(getCertificateIssuerNameModuleName());
+ Constructor<?> certificateIssuerNameConstr = certificateIssuerNameClass
+ .getConstructor(new Class[] { x500NameClass });
+ Object certificateIssuerNameObject = certificateIssuerNameConstr
+ .newInstance(x500NameObject);
+ methodSET.invoke(certInfoObject, getSetField(certInfoObject, "ISSUER"),
+ certificateIssuerNameObject);
+ }
+ catch (InvocationTargetException ite) {
+ methodSET.invoke(certInfoObject, getSetField(certInfoObject, "ISSUER"),
+ x500NameObject);
+ }
// info.set(X509CertInfo.KEY, new CertificateX509Key(pair.getPublic()));
Class<?> certificateX509KeyClass = Class.forName(getCertificateX509KeyModuleName());
diff --git a/gateway-spi/src/main/java/org/apache/hadoop/gateway/topology/Service.java b/gateway-spi/src/main/java/org/apache/hadoop/gateway/topology/Service.java
index 9763174..955f72f 100644
--- a/gateway-spi/src/main/java/org/apache/hadoop/gateway/topology/Service.java
+++ b/gateway-spi/src/main/java/org/apache/hadoop/gateway/topology/Service.java
@@ -27,6 +27,7 @@
private String role;
private String name;
+ private Version version;
private Map<String, String> params = new LinkedHashMap<String, String>();
private List<String> urls;
@@ -46,6 +47,14 @@
this.name = name;
}
+ public Version getVersion() {
+ return version;
+ }
+
+ public void setVersion(Version version) {
+ this.version = version;
+ }
+
public List<String> getUrls() {
if ( urls == null ) {
urls = new ArrayList<String>();
@@ -94,4 +103,24 @@
params.put(param.getName(), param.getValue());
}
+ @Override
+ public boolean equals(Object object) {
+ if (!(object instanceof Service)) {
+ return false;
+ }
+ Service that = (Service) object;
+ String thatName = that.getName();
+ if (thatName != null && !(thatName.equals(name))) {
+ return false;
+ }
+ String thatRole = that.getRole();
+ if (thatRole != null && !thatRole.equals(role)) {
+ return false;
+ }
+ Version thatVersion = that.getVersion();
+ if (thatVersion != null && !(thatVersion.equals(version))) {
+ return false;
+ }
+ return true;
+ }
}
diff --git a/gateway-spi/src/main/java/org/apache/hadoop/gateway/topology/Topology.java b/gateway-spi/src/main/java/org/apache/hadoop/gateway/topology/Topology.java
index c8d611d..7be8301 100644
--- a/gateway-spi/src/main/java/org/apache/hadoop/gateway/topology/Topology.java
+++ b/gateway-spi/src/main/java/org/apache/hadoop/gateway/topology/Topology.java
@@ -17,6 +17,9 @@
*/
package org.apache.hadoop.gateway.topology;
+import org.apache.commons.collections.map.HashedMap;
+import org.apache.commons.collections.map.MultiKeyMap;
+
import java.net.URI;
import java.util.ArrayList;
import java.util.Collection;
@@ -32,7 +35,12 @@
public List<Provider> providerList = new ArrayList<Provider>();
private Map<String,Map<String,Provider>> providerMap = new HashMap<String,Map<String,Provider>>();
public List<Service> services = new ArrayList<Service>();
- private Map<String, Map<String, Service>> serviceMap = new HashMap<String, Map<String, Service>>();
+
+ private MultiKeyMap serviceMap;
+
+ public Topology() {
+ serviceMap = MultiKeyMap.decorate(new HashedMap());
+ }
public URI getUri() {
return uri;
@@ -62,27 +70,13 @@
return services;
}
- public Service getService( String role, String name ) {
- Service service = null;
- Map<String, Service> nameMap = serviceMap.get( role );
- if( nameMap != null) {
- service = nameMap.get( name );
- if ( service == null && !nameMap.values().isEmpty() ) {
- service = (Service) nameMap.values().toArray()[0];
- }
- }
- return service;
+ public Service getService(String role, String name, Version version) {
+ return (Service)serviceMap.get(role, name, version);
}
public void addService( Service service ) {
services.add( service );
- String role = service.getRole();
- Map<String, Service> nameMap = serviceMap.get( role );
- if( nameMap == null ) {
- nameMap = new HashMap<String, Service>();
- serviceMap.put( role, nameMap );
- }
- nameMap.put( service.getName(), service );
+ serviceMap.put(service.getRole(), service.getName(), service.getVersion(), service);
}
public Collection<Provider> getProviders() {
diff --git a/gateway-spi/src/main/java/org/apache/hadoop/gateway/topology/Version.java b/gateway-spi/src/main/java/org/apache/hadoop/gateway/topology/Version.java
new file mode 100644
index 0000000..475f694
--- /dev/null
+++ b/gateway-spi/src/main/java/org/apache/hadoop/gateway/topology/Version.java
@@ -0,0 +1,135 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.gateway.topology;
+
+public class Version implements Comparable<Version> {
+
+ private int major;
+
+ private int minor;
+
+ private int patch;
+
+ public Version() {
+ }
+
+ public Version(String version) {
+ setVersion(version);
+ }
+
+ public Version(int major, int minor, int patch) {
+ this.major = major;
+ this.minor = minor;
+ this.patch = patch;
+ }
+
+ public int getMajor() {
+ return major;
+ }
+
+ public void setMajor(int major) {
+ this.major = major;
+ }
+
+ public int getMinor() {
+ return minor;
+ }
+
+ public void setMinor(int minor) {
+ this.minor = minor;
+ }
+
+ public int getPatch() {
+ return patch;
+ }
+
+ public void setPatch(int patch) {
+ this.patch = patch;
+ }
+
+ public void setVersion(String version) {
+ if (version != null) {
+ parseVersion(version);
+ }
+ }
+
+ private void parseVersion(String version) {
+ String parts[] = version.split("\\.");
+ int length = parts.length;
+ if (length >= 1) {
+ major = Integer.parseInt(parts[0]);
+ }
+ if (length >= 2) {
+ minor = Integer.parseInt(parts[1]);
+ }
+ if (length >= 3) {
+ patch = Integer.parseInt(parts[2]);
+ }
+ }
+
+ @Override
+ public int compareTo(Version version) {
+ if (major > version.getMajor()) {
+ return 1;
+ }
+ if (major < version.getMajor()) {
+ return -1;
+ }
+ if (minor > version.getMinor()) {
+ return 1;
+ }
+ if (minor < version.getMinor()) {
+ return -1;
+ }
+ if (patch > version.getPatch()) {
+ return 1;
+ }
+ if (patch < version.getPatch()) {
+ return -1;
+ }
+ return 0;
+ }
+
+ @Override
+ public String toString() {
+ StringBuffer buffer = new StringBuffer();
+ buffer.append(major);
+ buffer.append(".");
+ buffer.append(minor);
+ buffer.append(".");
+ buffer.append(patch);
+ return buffer.toString();
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (!(o instanceof Version)) {
+ return false;
+ }
+ Version that = (Version) o;
+ if (major == that.getMajor() && minor == that.getMinor() && patch == that.getPatch()) {
+ return true;
+ }
+ return false;
+ }
+
+ @Override
+ public int hashCode() {
+ return toString().hashCode();
+ }
+}
diff --git a/gateway-service-yarn-rm/src/main/resources/META-INF/services/org.apache.hadoop.gateway.deploy.ServiceDeploymentContributor b/gateway-spi/src/main/resources/META-INF/services/org.apache.hadoop.gateway.config.spi.ConfigurationAdapterDescriptor
old mode 100644
new mode 100755
similarity index 92%
rename from gateway-service-yarn-rm/src/main/resources/META-INF/services/org.apache.hadoop.gateway.deploy.ServiceDeploymentContributor
rename to gateway-spi/src/main/resources/META-INF/services/org.apache.hadoop.gateway.config.spi.ConfigurationAdapterDescriptor
index 3db76c7..4c1cad9
--- a/gateway-service-yarn-rm/src/main/resources/META-INF/services/org.apache.hadoop.gateway.deploy.ServiceDeploymentContributor
+++ b/gateway-spi/src/main/resources/META-INF/services/org.apache.hadoop.gateway.config.spi.ConfigurationAdapterDescriptor
@@ -15,5 +15,4 @@
# See the License for the specific language governing permissions and
# limitations under the License.
##########################################################################
-
-org.apache.hadoop.gateway.yarn.rm.ResourceManagerDeploymentContributor
\ No newline at end of file
+org.apache.hadoop.gateway.config.FilterConfigurationAdapterDescriptor
diff --git a/gateway-spi/src/test/java/org/apache/hadoop/gateway/dispatch/HttpClientDispatchTest.java b/gateway-spi/src/test/java/org/apache/hadoop/gateway/dispatch/DefaultDispatchTest.java
similarity index 90%
rename from gateway-spi/src/test/java/org/apache/hadoop/gateway/dispatch/HttpClientDispatchTest.java
rename to gateway-spi/src/test/java/org/apache/hadoop/gateway/dispatch/DefaultDispatchTest.java
index b9b26b6..592db57 100644
--- a/gateway-spi/src/test/java/org/apache/hadoop/gateway/dispatch/HttpClientDispatchTest.java
+++ b/gateway-spi/src/test/java/org/apache/hadoop/gateway/dispatch/DefaultDispatchTest.java
@@ -39,12 +39,13 @@
import org.apache.hadoop.gateway.config.GatewayConfig;
import org.apache.http.HttpEntity;
import org.apache.http.client.methods.HttpUriRequest;
+import org.apache.http.impl.client.DefaultHttpClient;
import org.apache.http.params.BasicHttpParams;
import org.easymock.EasyMock;
import org.easymock.IAnswer;
import org.junit.Test;
-public class HttpClientDispatchTest {
+public class DefaultDispatchTest {
// Make sure Hadoop cluster topology isn't exposed to client when there is a connectivity issue.
@Test
@@ -75,7 +76,8 @@
EasyMock.replay( outboundRequest, inboundRequest, outboundResponse );
- HttpClientDispatch dispatch = new HttpClientDispatch();
+ DefaultDispatch dispatch = new DefaultDispatch();
+ dispatch.setHttpClient(new DefaultHttpClient());
try {
dispatch.executeRequest( outboundRequest, inboundRequest, outboundResponse );
fail( "Should have thrown IOException" );
@@ -89,13 +91,13 @@
@Test
public void testCallToSecureClusterWithDelegationTpken() throws URISyntaxException, IOException {
System.setProperty(GatewayConfig.HADOOP_KERBEROS_SECURED, "true");
- HttpClientDispatch httpClientDispatch = new HttpClientDispatch();
+ DefaultDispatch defaultDispatch = new DefaultDispatch();
ServletInputStream inputStream = EasyMock.createNiceMock( ServletInputStream.class );
HttpServletRequest inboundRequest = EasyMock.createNiceMock( HttpServletRequest.class );
EasyMock.expect(inboundRequest.getQueryString()).andReturn( "delegation=123").anyTimes();
EasyMock.expect(inboundRequest.getInputStream()).andReturn( inputStream).anyTimes();
EasyMock.replay( inboundRequest );
- HttpEntity httpEntity = httpClientDispatch.createRequestEntity(inboundRequest);
+ HttpEntity httpEntity = defaultDispatch.createRequestEntity(inboundRequest);
System.setProperty(GatewayConfig.HADOOP_KERBEROS_SECURED, "false");
assertFalse("buffering in the presence of delegation token",
(httpEntity instanceof CappedBufferHttpEntity));
@@ -104,14 +106,14 @@
@Test
public void testCallToSecureClusterWithoutDelegationTpken() throws URISyntaxException, IOException {
System.setProperty(GatewayConfig.HADOOP_KERBEROS_SECURED, "true");
- HttpClientDispatch httpClientDispatch = new HttpClientDispatch();
- httpClientDispatch.setReplayBufferSize(10);
+ DefaultDispatch defaultDispatch = new DefaultDispatch();
+ defaultDispatch.setReplayBufferSize(10);
ServletInputStream inputStream = EasyMock.createNiceMock( ServletInputStream.class );
HttpServletRequest inboundRequest = EasyMock.createNiceMock( HttpServletRequest.class );
EasyMock.expect(inboundRequest.getQueryString()).andReturn( "a=123").anyTimes();
EasyMock.expect(inboundRequest.getInputStream()).andReturn( inputStream).anyTimes();
EasyMock.replay( inboundRequest );
- HttpEntity httpEntity = httpClientDispatch.createRequestEntity(inboundRequest);
+ HttpEntity httpEntity = defaultDispatch.createRequestEntity(inboundRequest);
System.setProperty(GatewayConfig.HADOOP_KERBEROS_SECURED, "false");
assertTrue("not buffering in the absence of delegation token",
(httpEntity instanceof CappedBufferHttpEntity));
diff --git a/gateway-spi/src/test/java/org/apache/hadoop/gateway/topology/VersionTest.java b/gateway-spi/src/test/java/org/apache/hadoop/gateway/topology/VersionTest.java
new file mode 100644
index 0000000..a01b804
--- /dev/null
+++ b/gateway-spi/src/test/java/org/apache/hadoop/gateway/topology/VersionTest.java
@@ -0,0 +1,50 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.gateway.topology;
+
+import org.junit.Test;
+
+import static org.junit.Assert.*;
+
+public class VersionTest {
+
+ @Test
+ public void testDefaultVersion() {
+ Version version = new Version();
+ assertEquals(0, version.getMajor());
+ assertEquals(0, version.getMinor());
+ assertEquals(0, version.getPatch());
+ assertEquals("0.0.0", version.toString());
+ }
+
+ @Test
+ public void testVersion() {
+ Version version = new Version("1.2.3");
+ assertEquals(1, version.getMajor());
+ assertEquals(2, version.getMinor());
+ assertEquals(3, version.getPatch());
+ assertEquals("1.2.3", version.toString());
+ version = new Version(4, 5, 6);
+ assertEquals(4, version.getMajor());
+ assertEquals(5, version.getMinor());
+ assertEquals(6, version.getPatch());
+ assertEquals("4.5.6", version.toString());
+ Version other = new Version("4.5.6");
+ assertTrue(version.equals(other));
+ }
+}
diff --git a/gateway-test/src/test/java/org/apache/hadoop/gateway/GatewayBasicFuncTest.java b/gateway-test/src/test/java/org/apache/hadoop/gateway/GatewayBasicFuncTest.java
index f99c10f..7e830ea 100644
--- a/gateway-test/src/test/java/org/apache/hadoop/gateway/GatewayBasicFuncTest.java
+++ b/gateway-test/src/test/java/org/apache/hadoop/gateway/GatewayBasicFuncTest.java
@@ -23,7 +23,6 @@
import com.jayway.restassured.specification.ResponseSpecification;
import com.mycila.xmltool.XMLDoc;
import com.mycila.xmltool.XMLTag;
-
import org.apache.commons.io.filefilter.WildcardFileFilter;
import org.apache.hadoop.test.TestUtils;
import org.apache.hadoop.test.category.FunctionalTests;
@@ -42,6 +41,7 @@
import org.hamcrest.Matchers;
import org.junit.After;
import org.junit.AfterClass;
+import org.junit.Assert;
import org.junit.BeforeClass;
import org.junit.Ignore;
import org.junit.Test;
@@ -64,17 +64,13 @@
import java.util.Map.Entry;
import static com.jayway.restassured.RestAssured.given;
-import static org.hamcrest.CoreMatchers.anyOf;
-import static org.hamcrest.CoreMatchers.containsString;
-import static org.hamcrest.CoreMatchers.is;
-import static org.hamcrest.CoreMatchers.not;
-import static org.hamcrest.CoreMatchers.startsWith;
+import static org.hamcrest.CoreMatchers.*;
import static org.hamcrest.MatcherAssert.assertThat;
import static org.hamcrest.Matchers.greaterThan;
+import static org.hamcrest.text.IsEmptyString.isEmptyString;
import static org.xmlmatchers.XmlMatchers.isEquivalentTo;
import static org.xmlmatchers.transform.XmlConverters.the;
import static uk.co.datumedge.hamcrest.json.SameJSONAs.sameJSONAs;
-import static org.hamcrest.text.IsEmptyString.isEmptyString;
@Category( { FunctionalTests.class, MediumTests.class } )
public class GatewayBasicFuncTest {
@@ -133,9 +129,9 @@
Log.setLog( new NoOpLogger() );
GatewayTestConfig config = new GatewayTestConfig();
config.setGatewayPath( "gateway" );
- driver.setResourceBase( GatewayBasicFuncTest.class );
- driver.setupLdap( findFreePort() );
- driver.setupService( "WEBHDFS", "http://" + TEST_HOST + ":50070/webhdfs", "/cluster/webhdfs", USE_MOCK_SERVICES );
+ driver.setResourceBase(GatewayBasicFuncTest.class);
+ driver.setupLdap(findFreePort());
+ driver.setupService("WEBHDFS", "http://" + TEST_HOST + ":50070/webhdfs", "/cluster/webhdfs", USE_MOCK_SERVICES);
driver.setupService( "DATANODE", "http://" + TEST_HOST + ":50075/webhdfs", "/cluster/webhdfs/data", USE_MOCK_SERVICES );
driver.setupService( "WEBHCAT", "http://" + TEST_HOST + ":50111/templeton", "/cluster/templeton", USE_MOCK_SERVICES );
driver.setupService( "OOZIE", "http://" + TEST_HOST + ":11000/oozie", "/cluster/oozie", USE_MOCK_SERVICES );
@@ -144,6 +140,9 @@
driver.setupService( "NAMENODE", "hdfs://" + TEST_HOST + ":8020", null, USE_MOCK_SERVICES );
driver.setupService( "JOBTRACKER", "thrift://" + TEST_HOST + ":8021", null, USE_MOCK_SERVICES );
driver.setupService( "RESOURCEMANAGER", "http://" + TEST_HOST + ":8088/ws", "/cluster/resourcemanager", USE_MOCK_SERVICES );
+ driver.setupService( "FALCON", "http://" + TEST_HOST + ":15000", "/cluster/falcon", USE_MOCK_SERVICES );
+ driver.setupService( "STORM", "http://" + TEST_HOST + ":8477", "/cluster/storm", USE_MOCK_SERVICES );
+ driver.setupService( "STORM-LOGVIEWER", "http://" + TEST_HOST + ":8477", "/cluster/storm", USE_MOCK_SERVICES );
driver.setupGateway( config, "cluster", createTopology(), USE_GATEWAY );
}
@@ -170,18 +169,18 @@
.addTag( "gateway" )
.addTag( "provider" )
.addTag( "role" ).addText( "webappsec" )
- .addTag( "name" ).addText( "WebAppSec" )
- .addTag( "enabled" ).addText( "true" )
+ .addTag("name").addText("WebAppSec")
+ .addTag("enabled").addText("true")
.addTag( "param" )
- .addTag( "name" ).addText( "csrf.enabled" )
- .addTag( "value" ).addText( "true" ).gotoParent().gotoParent()
- .addTag( "provider" )
- .addTag( "role" ).addText( "authentication" )
- .addTag( "name" ).addText( "ShiroProvider" )
- .addTag( "enabled" ).addText( "true" )
+ .addTag("name").addText("csrf.enabled")
+ .addTag("value").addText("true").gotoParent().gotoParent()
+ .addTag("provider")
+ .addTag("role").addText("authentication")
+ .addTag("name").addText("ShiroProvider")
+ .addTag("enabled").addText("true")
.addTag( "param" )
- .addTag( "name" ).addText( "main.ldapRealm" )
- .addTag( "value" ).addText( "org.apache.hadoop.gateway.shirorealm.KnoxLdapRealm" ).gotoParent()
+ .addTag("name").addText("main.ldapRealm")
+ .addTag("value").addText("org.apache.hadoop.gateway.shirorealm.KnoxLdapRealm").gotoParent()
.addTag( "param" )
.addTag( "name" ).addText( "main.ldapRealm.userDnTemplate" )
.addTag( "value" ).addText( "uid={0},ou=people,dc=hadoop,dc=apache,dc=org" ).gotoParent()
@@ -194,21 +193,21 @@
.addTag( "param" )
.addTag( "name" ).addText( "urls./**" )
.addTag( "value" ).addText( "authcBasic" ).gotoParent().gotoParent()
- .addTag( "provider" )
- .addTag( "role" ).addText( "identity-assertion" )
- .addTag( "enabled" ).addText( "true" )
- .addTag( "name" ).addText( "Default" ).gotoParent()
- .addTag( "provider" )
+ .addTag("provider")
+ .addTag("role").addText("identity-assertion")
+ .addTag("enabled").addText("true")
+ .addTag("name").addText("Default").gotoParent()
+ .addTag("provider")
.addTag( "role" ).addText( "authorization" )
.addTag( "enabled" ).addText( "true" )
- .addTag( "name" ).addText( "AclsAuthz" ).gotoParent()
- .addTag( "param" )
- .addTag( "name" ).addText( "webhdfs-acl" )
- .addTag( "value" ).addText( "hdfs;*;*" ).gotoParent()
+ .addTag("name").addText("AclsAuthz").gotoParent()
+ .addTag("param")
+ .addTag("name").addText( "webhdfs-acl" )
+ .addTag("value").addText( "hdfs;*;*" ).gotoParent()
.gotoRoot()
- .addTag( "service" )
- .addTag( "role" ).addText( "WEBHDFS" )
- .addTag( "url" ).addText( driver.getRealUrl( "WEBHDFS" ) ).gotoParent()
+ .addTag("service")
+ .addTag("role").addText("WEBHDFS")
+ .addTag("url").addText(driver.getRealUrl("WEBHDFS")).gotoParent()
.addTag( "service" )
.addTag( "role" ).addText( "NAMENODE" )
.addTag( "url" ).addText( driver.getRealUrl( "NAMENODE" ) ).gotoParent()
@@ -230,9 +229,18 @@
.addTag( "service" )
.addTag( "role" ).addText( "WEBHBASE" )
.addTag( "url" ).addText( driver.getRealUrl( "WEBHBASE" ) ).gotoParent()
- .addTag( "service" )
+ .addTag("service")
.addTag( "role" ).addText( "RESOURCEMANAGER" )
.addTag( "url" ).addText( driver.getRealUrl( "RESOURCEMANAGER" ) ).gotoParent()
+ .addTag( "service" )
+ .addTag( "role" ).addText( "FALCON" )
+ .addTag( "url" ).addText( driver.getRealUrl( "FALCON" ) ).gotoParent()
+ .addTag( "service" )
+ .addTag( "role" ).addText( "STORM" )
+ .addTag( "url" ).addText( driver.getRealUrl( "STORM" ) ).gotoParent()
+ .addTag( "service" )
+ .addTag( "role" ).addText( "STORM-LOGVIEWER" )
+ .addTag( "url" ).addText( driver.getRealUrl( "STORM-LOGVIEWER" ) ).gotoParent()
.gotoRoot();
// System.out.println( "GATEWAY=" + xml.toString() );
return xml;
@@ -289,7 +297,7 @@
String password = "hdfs-password";
InetSocketAddress gatewayAddress = driver.gateway.getAddresses()[0];
String gatewayHostName = gatewayAddress.getHostName();
- String gatewayAddrName = InetAddress.getByName( gatewayHostName ).getHostAddress();
+ String gatewayAddrName = InetAddress.getByName(gatewayHostName).getHostAddress();
driver.getMock( "WEBHDFS" )
.expect()
@@ -300,7 +308,7 @@
.queryParam( "user.name", username )
.respond()
.status( HttpStatus.SC_TEMPORARY_REDIRECT )
- .header( "Location", driver.getRealUrl( "DATANODE" ) + "/v1" + root + "/dir/file?op=CREATE&user.name=hdfs" );
+ .header("Location", driver.getRealUrl("DATANODE") + "/v1" + root + "/dir/file?op=CREATE&user.name=hdfs");
Response response = given()
//.log().all()
.auth().preemptive().basic( username, password )
@@ -319,8 +327,8 @@
startsWith( "http://" + gatewayAddrName + ":" + gatewayAddress.getPort() + "/" ) ) );
MatcherAssert.assertThat( location, containsString( "?_=" ) );
}
- MatcherAssert.assertThat( location, not( containsString( "host=" ) ) );
- MatcherAssert.assertThat( location, not( containsString( "port=" ) ) );
+ MatcherAssert.assertThat(location, not(containsString("host=")));
+ MatcherAssert.assertThat(location, not(containsString("port=")));
}
@Test
@@ -363,7 +371,7 @@
.respond()
.status( HttpStatus.SC_OK )
.content( driver.getResourceBytes( "webhdfs-success.json" ) )
- .contentType( "application/json" );
+ .contentType("application/json");
given()
//.log().all()
.auth().preemptive().basic( username, password )
@@ -869,10 +877,10 @@
// Post the data to HDFS
// Post the script to HDFS
- driver.createFile( user, pass, null, root + "/script.hive", "777", "text/plain", "script.hive", 307, 201, 200 );
+ driver.createFile(user, pass, null, root + "/script.hive", "777", "text/plain", "script.hive", 307, 201, 200);
// Submit the job
- driver.submitHive( user, pass, group, root + "/script.hive", root + "/output", 200 );
+ driver.submitHive(user, pass, group, root + "/script.hive", root + "/output", 200);
// Check job status (if possible)
// Check output (if possible)
@@ -1465,7 +1473,7 @@
.statusCode( HttpStatus.SC_CREATED )
.contentType( ContentType.XML )
.header( "Location", startsWith( driver.getUrl( "WEBHBASE" ) + path ) )
- .when().put( driver.getUrl( "WEBHBASE" ) + path );
+ .when().put(driver.getUrl("WEBHBASE") + path);
driver.assertComplete();
driver.getMock( "WEBHBASE" )
@@ -1473,10 +1481,10 @@
.method( "PUT" )
.pathInfo( path )
.respond()
- .status( HttpStatus.SC_CREATED )
- .content( driver.getResourceBytes( resourceName + ".json" ) )
- .contentType( ContentType.JSON.toString() )
- .header( "Location", driver.getRealUrl( "WEBHBASE" ) + path );
+ .status(HttpStatus.SC_CREATED)
+ .content(driver.getResourceBytes(resourceName + ".json"))
+ .contentType(ContentType.JSON.toString())
+ .header("Location", driver.getRealUrl("WEBHBASE") + path);
given()
.auth().preemptive().basic( username, password )
@@ -1496,16 +1504,16 @@
.status( HttpStatus.SC_CREATED )
.content( driver.getResourceBytes( resourceName + ".protobuf" ) )
.contentType( "application/x-protobuf" )
- .header( "Location", driver.getRealUrl( "WEBHBASE" ) + path );
+ .header("Location", driver.getRealUrl("WEBHBASE") + path);
given()
- .auth().preemptive().basic( username, password )
- .header( "X-XSRF-Header", "jksdhfkhdsf" )
+ .auth().preemptive().basic(username, password)
+ .header("X-XSRF-Header", "jksdhfkhdsf")
.expect()
- .statusCode( HttpStatus.SC_CREATED )
- .contentType( "application/x-protobuf" )
- .header( "Location", startsWith( driver.getUrl( "WEBHBASE" ) + path ) )
- .when().put( driver.getUrl( "WEBHBASE" ) + path );
+ .statusCode(HttpStatus.SC_CREATED)
+ .contentType("application/x-protobuf")
+ .header("Location", startsWith(driver.getUrl("WEBHBASE") + path))
+ .when().put(driver.getUrl("WEBHBASE") + path);
driver.assertComplete();
}
@@ -1521,11 +1529,11 @@
.expect()
.method( "GET" )
.pathInfo( path )
- .header( "Accept", ContentType.XML.toString() )
+ .header("Accept", ContentType.XML.toString())
.respond()
- .status( HttpStatus.SC_OK )
- .content( driver.getResourceBytes( resourceName + ".xml" ) )
- .contentType( ContentType.XML.toString() );
+ .status(HttpStatus.SC_OK)
+ .content(driver.getResourceBytes(resourceName + ".xml"))
+ .contentType(ContentType.XML.toString());
Response response = given()
.auth().preemptive().basic( username, password )
@@ -1538,19 +1546,19 @@
MatcherAssert
.assertThat(
- the( response.getBody().asString() ),
- isEquivalentTo( the( driver.getResourceString( resourceName + ".xml", UTF8 ) ) ) );
+ the(response.getBody().asString()),
+ isEquivalentTo(the(driver.getResourceString(resourceName + ".xml", UTF8))));
driver.assertComplete();
- driver.getMock( "WEBHBASE" )
+ driver.getMock("WEBHBASE")
.expect()
- .method( "GET" )
- .pathInfo( path )
- .header( "Accept", ContentType.JSON.toString() )
+ .method("GET")
+ .pathInfo(path)
+ .header("Accept", ContentType.JSON.toString())
.respond()
- .status( HttpStatus.SC_OK )
- .content( driver.getResourceBytes( resourceName + ".json" ) )
- .contentType( ContentType.JSON.toString() );
+ .status(HttpStatus.SC_OK)
+ .content(driver.getResourceBytes(resourceName + ".json"))
+ .contentType(ContentType.JSON.toString());
response = given()
.auth().preemptive().basic( username, password )
@@ -1562,7 +1570,7 @@
.when().get( driver.getUrl( "WEBHBASE" ) + path );
MatcherAssert
- .assertThat( response.getBody().asString(), sameJSONAs( driver.getResourceString( resourceName + ".json", UTF8 ) ) );
+ .assertThat(response.getBody().asString(), sameJSONAs(driver.getResourceString(resourceName + ".json", UTF8)));
driver.assertComplete();
driver.getMock( "WEBHBASE" )
@@ -1573,7 +1581,7 @@
.respond()
.status( HttpStatus.SC_OK )
.content( driver.getResourceBytes( resourceName + ".protobuf" ) )
- .contentType( "application/x-protobuf" );
+ .contentType("application/x-protobuf");
response = given()
.auth().preemptive().basic( username, password )
@@ -1608,7 +1616,7 @@
.content( driver.getResourceBytes( resourceName + ".xml" ) )
.contentType( ContentType.XML.toString() )
.respond()
- .status( HttpStatus.SC_OK );
+ .status(HttpStatus.SC_OK);
given()
.auth().preemptive().basic( username, password )
@@ -1618,7 +1626,7 @@
.contentType( ContentType.XML.toString() )
.expect()
.statusCode( HttpStatus.SC_OK )
- .when().put( driver.getUrl( "WEBHBASE" ) + multipleRowPath );
+ .when().put(driver.getUrl("WEBHBASE") + multipleRowPath);
driver.assertComplete();
driver.getMock( "WEBHBASE" )
@@ -1638,18 +1646,18 @@
.contentType( ContentType.JSON.toString() )
.expect()
.statusCode( HttpStatus.SC_OK )
- .when().put( driver.getUrl( "WEBHBASE" ) + singleRowPath );
+ .when().put(driver.getUrl("WEBHBASE") + singleRowPath);
driver.assertComplete();
- driver.getMock( "WEBHBASE" )
+ driver.getMock("WEBHBASE")
.expect()
- .method( "PUT" )
- .pathInfo( multipleRowPath )
+ .method("PUT")
+ .pathInfo(multipleRowPath)
//.header( "Content-Type", "application/x-protobuf" )
- .contentType( "application/x-protobuf" )
- .content( driver.getResourceBytes( resourceName + ".protobuf" ) )
+ .contentType("application/x-protobuf")
+ .content(driver.getResourceBytes(resourceName + ".protobuf"))
.respond()
- .status( HttpStatus.SC_OK );
+ .status(HttpStatus.SC_OK);
given()
.auth().preemptive().basic( username, password )
@@ -1723,7 +1731,7 @@
.contentType( "application/x-protobuf" )
.expect()
.statusCode( HttpStatus.SC_OK )
- .when().post( driver.getUrl( "WEBHBASE" ) + multipleRowPath );
+ .when().post(driver.getUrl("WEBHBASE") + multipleRowPath);
driver.assertComplete();
}
@@ -1737,52 +1745,52 @@
String familyId = "family";
String columnId = "column";
- driver.getMock( "WEBHBASE" )
+ driver.getMock("WEBHBASE")
.expect()
- .from( "testHBaseDeleteDataFromTable-1" )
- .method( "DELETE" )
- .pathInfo( "/" + tableId + "/" + rowId )
+ .from("testHBaseDeleteDataFromTable-1")
+ .method("DELETE")
+ .pathInfo("/" + tableId + "/" + rowId)
.respond()
- .status( HttpStatus.SC_OK );
+ .status(HttpStatus.SC_OK);
given()
- .auth().preemptive().basic( username, password )
- .header( "X-XSRF-Header", "jksdhfkhdsf" )
+ .auth().preemptive().basic(username, password)
+ .header("X-XSRF-Header", "jksdhfkhdsf")
.expect()
.statusCode( HttpStatus.SC_OK )
- .when().delete( driver.getUrl( "WEBHBASE" ) + "/" + tableId + "/" + rowId );
+ .when().delete(driver.getUrl("WEBHBASE") + "/" + tableId + "/" + rowId);
driver.assertComplete();
driver.getMock( "WEBHBASE" )
.expect()
- .from( "testHBaseDeleteDataFromTable-2" )
- .method( "DELETE" )
- .pathInfo( "/" + tableId + "/" + rowId + "/" + familyId )
+ .from("testHBaseDeleteDataFromTable-2")
+ .method("DELETE")
+ .pathInfo("/" + tableId + "/" + rowId + "/" + familyId)
.respond()
.status( HttpStatus.SC_OK );
given()
- .auth().preemptive().basic( username, password )
- .header( "X-XSRF-Header", "jksdhfkhdsf" )
+ .auth().preemptive().basic(username, password)
+ .header("X-XSRF-Header", "jksdhfkhdsf")
.expect()
.statusCode( HttpStatus.SC_OK )
- .when().delete( driver.getUrl( "WEBHBASE" ) + "/" + tableId + "/" + rowId + "/" + familyId );
+ .when().delete(driver.getUrl("WEBHBASE") + "/" + tableId + "/" + rowId + "/" + familyId);
driver.assertComplete();
- driver.getMock( "WEBHBASE" )
+ driver.getMock("WEBHBASE")
.expect()
- .from( "testHBaseDeleteDataFromTable-3" )
- .method( "DELETE" )
- .pathInfo( "/" + tableId + "/" + rowId + "/" + familyId + ":" + columnId )
+ .from("testHBaseDeleteDataFromTable-3")
+ .method("DELETE")
+ .pathInfo("/" + tableId + "/" + rowId + "/" + familyId + ":" + columnId)
.respond()
- .status( HttpStatus.SC_OK );
+ .status(HttpStatus.SC_OK);
given()
- .auth().preemptive().basic( username, password )
- .header( "X-XSRF-Header", "jksdhfkhdsf" )
+ .auth().preemptive().basic(username, password)
+ .header("X-XSRF-Header", "jksdhfkhdsf")
.expect()
.statusCode( HttpStatus.SC_OK )
- .when().delete( driver.getUrl( "WEBHBASE" ) + "/" + tableId + "/" + rowId + "/" + familyId + ":" + columnId );
+ .when().delete(driver.getUrl("WEBHBASE") + "/" + tableId + "/" + rowId + "/" + familyId + ":" + columnId);
driver.assertComplete();
}
@@ -1799,15 +1807,15 @@
String rowsWithKeyPath = "/table/row";
String rowsWithKeyAndColumnPath = "/table/row/family:col";
- driver.getMock( "WEBHBASE" )
+ driver.getMock("WEBHBASE")
.expect()
- .method( "GET" )
- .pathInfo( allRowsPath )
- .header( "Accept", ContentType.XML.toString() )
+ .method("GET")
+ .pathInfo(allRowsPath)
+ .header("Accept", ContentType.XML.toString())
.respond()
- .status( HttpStatus.SC_OK )
- .content( driver.getResourceBytes( resourceName + ".xml" ) )
- .contentType( ContentType.XML.toString() );
+ .status(HttpStatus.SC_OK)
+ .content(driver.getResourceBytes(resourceName + ".xml"))
+ .contentType(ContentType.XML.toString());
Response response = given()
.auth().preemptive().basic( username, password )
@@ -1820,8 +1828,8 @@
MatcherAssert
.assertThat(
- the( response.getBody().asString() ),
- isEquivalentTo( the( driver.getResourceString( resourceName + ".xml", UTF8 ) ) ) );
+ the(response.getBody().asString()),
+ isEquivalentTo(the(driver.getResourceString(resourceName + ".xml", UTF8))));
driver.assertComplete();
driver.getMock( "WEBHBASE" )
@@ -1832,7 +1840,7 @@
.respond()
.status( HttpStatus.SC_OK )
.content( driver.getResourceBytes( resourceName + ".xml" ) )
- .contentType( ContentType.XML.toString() );
+ .contentType(ContentType.XML.toString());
response = given()
.auth().preemptive().basic( username, password )
@@ -1845,8 +1853,8 @@
MatcherAssert
.assertThat(
- the( response.getBody().asString() ),
- isEquivalentTo( the( driver.getResourceString( resourceName + ".xml", UTF8 ) ) ) );
+ the(response.getBody().asString()),
+ isEquivalentTo(the(driver.getResourceString(resourceName + ".xml", UTF8))));
driver.assertComplete();
driver.getMock( "WEBHBASE" )
@@ -1907,13 +1915,13 @@
String scannerId = "13705290446328cff5ed";
//Create scanner for table using PUT and POST requests
- driver.getMock( "WEBHBASE" )
+ driver.getMock("WEBHBASE")
.expect()
- .method( "PUT" )
- .pathInfo( scannerPath )
- .header( "Content-Type", ContentType.XML.toString() )
+ .method("PUT")
+ .pathInfo(scannerPath)
+ .header("Content-Type", ContentType.XML.toString())
.respond()
- .status( HttpStatus.SC_CREATED );
+ .status(HttpStatus.SC_CREATED);
given()
.auth().preemptive().basic( username, password )
@@ -1936,7 +1944,7 @@
.respond()
.status( HttpStatus.SC_OK )
.content( driver.getResourceBytes( tableDataResourceName + ".xml" ) )
- .contentType( ContentType.XML.toString() );
+ .contentType(ContentType.XML.toString());
Response response = given()
.auth().preemptive().basic( username, password )
@@ -1949,8 +1957,8 @@
MatcherAssert
.assertThat(
- the( response.getBody().asString() ),
- isEquivalentTo( the( driver.getResourceString( tableDataResourceName + ".xml", UTF8 ) ) ) );
+ the(response.getBody().asString()),
+ isEquivalentTo(the(driver.getResourceString(tableDataResourceName + ".xml", UTF8))));
driver.assertComplete();
//Delete scanner
@@ -1967,7 +1975,7 @@
.header("X-XSRF-Header", "jksdhfkhdsf")
.expect()
.statusCode( HttpStatus.SC_OK )
- .when().delete( driver.getUrl( "WEBHBASE" ) + scannerPath + "/" + scannerId );
+ .when().delete(driver.getUrl("WEBHBASE") + scannerPath + "/" + scannerId);
driver.assertComplete();
}
@@ -1995,14 +2003,14 @@
String username = "hdfs";
String password = "hdfs-password";
- driver.getMock( "WEBHDFS" )
+ driver.getMock("WEBHDFS")
.expect()
- .method( "GET" )
- .pathInfo( "/v1" + root + "/dir" )
- .queryParam( "op", "LISTSTATUS" )
- .queryParam( "user.name", username )
+ .method("GET")
+ .pathInfo("/v1" + root + "/dir")
+ .queryParam("op", "LISTSTATUS")
+ .queryParam("user.name", username)
.respond()
- .status( HttpStatus.SC_OK );
+ .status(HttpStatus.SC_OK);
given()
// .log().all()
.auth().preemptive().basic( username, password )
@@ -2021,25 +2029,25 @@
getYarnRmResource( "/v1/cluster/", ContentType.JSON, "yarn/cluster-info" );
getYarnRmResource( "/v1/cluster/", ContentType.XML, "yarn/cluster-info" );
getYarnRmResource( "/v1/cluster/info/", ContentType.JSON, "yarn/cluster-info" );
- getYarnRmResource( "/v1/cluster/info/", ContentType.XML, "yarn/cluster-info" );
+ getYarnRmResource("/v1/cluster/info/", ContentType.XML, "yarn/cluster-info");
}
@Test
public void testYarnRmGetClusterMetrics() throws Exception {
getYarnRmResource( "/v1/cluster/metrics/", ContentType.JSON, "yarn/cluster-metrics" );
- getYarnRmResource( "/v1/cluster/metrics/", ContentType.XML, "yarn/cluster-metrics" );
+ getYarnRmResource("/v1/cluster/metrics/", ContentType.XML, "yarn/cluster-metrics");
}
@Test
public void testYarnRnGetScheduler() throws Exception {
getYarnRmResource( "/v1/cluster/scheduler/", ContentType.JSON, "yarn/scheduler" );
- getYarnRmResource( "/v1/cluster/scheduler/", ContentType.XML, "yarn/scheduler" );
+ getYarnRmResource("/v1/cluster/scheduler/", ContentType.XML, "yarn/scheduler");
}
@Test
public void getYarnRmAppstatistics() throws Exception {
getYarnRmResource( "/v1/cluster/appstatistics/", ContentType.JSON, "yarn/appstatistics" );
- getYarnRmResource( "/v1/cluster/appstatistics/", ContentType.XML, "yarn/appstatistics" );
+ getYarnRmResource("/v1/cluster/appstatistics/", ContentType.XML, "yarn/appstatistics");
}
@Test
@@ -2105,7 +2113,7 @@
mockRequestMatcher.respond()
.status( HttpStatus.SC_OK )
.content( driver.getResourceBytes( resource ) )
- .contentType( contentType.toString() );
+ .contentType(contentType.toString());
given()
// .log().all()
@@ -2129,7 +2137,7 @@
.content( "apps.app[1].amHostHttpAddress", isEmptyString() )
.content( "apps.app[2].id", is( "application_1399541193872_0009" ) )
.when()
- .get( gatewayPath + gatewayPathQuery );
+ .get(gatewayPath + gatewayPathQuery);
driver.assertComplete();
}
@@ -2342,7 +2350,7 @@
@Test
public void testYarnRmAppattempts() throws Exception {
getYarnRmAppattempts( ContentType.JSON );
- getYarnRmAppattempts( ContentType.XML );
+ getYarnRmAppattempts(ContentType.XML);
}
private void getYarnRmAppattempts( ContentType contentType ) throws Exception {
@@ -2398,7 +2406,7 @@
params.put( "healthy", "true" );
getYarnRmNodes( ContentType.JSON, params );
- getYarnRmNodes( ContentType.XML, params );
+ getYarnRmNodes(ContentType.XML, params);
}
private void getYarnRmNodes( ContentType contentType, Map<String, String> params ) throws Exception {
@@ -2681,7 +2689,7 @@
case XML:
MatcherAssert
.assertThat( the( response.getBody().asString() ),
- isEquivalentTo( the( driver.getResourceString( resource, UTF8 ) ) ) );
+ isEquivalentTo(the(driver.getResourceString(resource, UTF8))) );
break;
default:
break;
@@ -2718,12 +2726,12 @@
Response response = given()
// .log().all()
- .auth().preemptive().basic( username, password )
- .header( "X-XSRF-Header", "jksdhfkhdsf" )
+ .auth().preemptive().basic(username, password)
+ .header("X-XSRF-Header", "jksdhfkhdsf")
.expect()
// .log().all()
- .statusCode( HttpStatus.SC_OK ).contentType( contentType ).when()
- .get( gatewayPath );
+ .statusCode(HttpStatus.SC_OK).contentType(contentType).when()
+ .get(gatewayPath);
assertThat( response.body().asString(), not( containsString( "host.yarn.com" ) ) );
@@ -2743,7 +2751,7 @@
private String findHadoopExamplesJar() throws IOException {
String pattern = "hadoop-examples-*.jar";
File dir = new File( System.getProperty( "user.dir" ), "hadoop-examples/target" );
- File file = findFile( dir, pattern );
+ File file = findFile(dir, pattern);
if( file == null || !file.exists() ) {
file = findFile( new File( System.getProperty( "user.dir" ), "../hadoop-examples/target" ), pattern );
}
@@ -2752,4 +2760,467 @@
}
return file.toURI().toString();
}
+
+ @Test
+ public void testFalconAdmin() throws Exception {
+ String resourceName = "falcon/version";
+ String path = "/api/admin/version";
+ testGetFalconResource(resourceName, path, ContentType.XML);
+ testGetFalconResource(resourceName, path, ContentType.JSON);
+
+ resourceName = "falcon/config-runtime";
+ path = "/api/admin/config/runtime";
+ testGetFalconResource(resourceName, path, ContentType.XML);
+ testGetFalconResource(resourceName, path, ContentType.JSON);
+
+ resourceName = "falcon/config-deploy";
+ path = "/api/admin/config/deploy";
+ testGetFalconResource(resourceName, path, ContentType.XML);
+ testGetFalconResource(resourceName, path, ContentType.JSON);
+
+ resourceName = "falcon/config-startup";
+ path = "/api/admin/config/startup";
+ testGetFalconResource(resourceName, path, ContentType.XML);
+ testGetFalconResource(resourceName, path, ContentType.JSON);
+
+
+ String username = "hdfs";
+ String password = "hdfs-password";
+ resourceName = "falcon/stack.txt";
+ path = "/api/admin/config/stack";
+ String gatewayPath = driver.getUrl( "FALCON" ) + path;
+
+ driver.getMock("FALCON")
+ .expect()
+ .method("GET")
+ .pathInfo(path)
+ .queryParam("user.name", username)
+ .respond()
+ .status(HttpStatus.SC_OK)
+ .content(driver.getResourceBytes(resourceName))
+ .contentType(ContentType.TEXT.toString());
+
+ Response response = given()
+ .auth().preemptive().basic(username, password)
+ .header("X-XSRF-Header", "jksdhfkhdsf")
+ .expect()
+ .statusCode(HttpStatus.SC_OK)
+ .when().get( gatewayPath );
+
+ Assert.assertEquals(response.getBody().asString(), driver.getResourceString( resourceName, UTF8 ) );
+ driver.assertComplete();
+ }
+
+ @Test
+ public void testFalconEntities() throws Exception {
+ String resourceName = "falcon/entity-status-process";
+ String path = "/api/entities/status/process/cleanseEmailProcess";
+ testGetFalconResource(resourceName, path, ContentType.XML);
+
+ resourceName = "falcon/oregonCluster";
+ path = "/api/entities/definition/cluster/primaryCluster";
+ testGetFalconResource(resourceName, path, ContentType.XML);
+
+ resourceName = "falcon/entity-list-cluster";
+ path = "/api/entities/list/cluster";
+ testGetFalconResource(resourceName, path, ContentType.XML);
+
+// resourceName = "falcon/entity-summary-feed";
+// path = "/api/entities/summary/feed?cluster=primaryCluster&filterBy=STATUS:RUNNING&fields=status&tags=consumer=consumer@xyz.com&orderBy=name&offset=0&numResults=1&numInstances=2";
+// testGetFalconResource(resourceName, path, ContentType.XML);
+// testGetFalconResource(resourceName, path, ContentType.JSON);
+
+ resourceName = "falcon/entity-dependency-process";
+ path = "/api/entities/dependencies/process/cleanseEmailProcess";
+ testGetFalconResource(resourceName, path, ContentType.XML);
+
+ String postResource = "falcon/oregonCluster.xml";
+ String responseResource = "falcon/entity-validate-cluster.xml";
+ path = "/api/entities/validate/cluster";
+ testPostFalconResource(postResource, responseResource, path, ContentType.XML);
+
+ postResource = "falcon/rawEmailFeed.xml";
+ responseResource = "falcon/entity-submit-feed.json";
+ path = "/api/entities/submit/feed";
+ testPostFalconResource(postResource, responseResource, path, ContentType.JSON);
+
+ postResource = "falcon/rawEmailFeed.xml";
+ responseResource = "falcon/entity-update-feed.xml";
+ path = "/api/entities/update/feed";
+ testPostFalconResource(postResource, responseResource, path, ContentType.XML);
+
+ postResource = "falcon/cleanseEmailProcess.xml";
+ responseResource = "falcon/entity-submit-schedule-process.json";
+ path = "/api/entities/submitAndSchedule/process";
+ testPostFalconResource(postResource, responseResource, path, ContentType.JSON);
+
+ postResource = null;
+ responseResource = "falcon/entity-schedule-feed.xml";
+ path = "/api/entities/schedule/feed/rawEmailFeed";
+ testPostFalconResource(postResource, responseResource, path, ContentType.XML);
+
+ responseResource = "falcon/entity-resume-feed.xml";
+ path = "/api/entities/resume/feed/rawEmailFeed";
+ testPostFalconResource(postResource, responseResource, path, ContentType.XML);
+
+ }
+
+ @Test
+ public void testFalconFeedAndProcess() throws Exception {
+ String resourceName = "falcon/instance-running-process";
+ String path = "/api/instance/running/process/cleanseEmailProcess";
+ testGetFalconResource(resourceName, path, ContentType.JSON);
+
+ resourceName = "falcon/instance-params-process";
+ path = "/api/instance/params/process/cleanseEmailProcess";
+ testGetFalconResource(resourceName, path, ContentType.JSON);
+
+ resourceName = "falcon/instance-status-process";
+ path = "/api/instance/status/process/cleanseEmailProcess";
+ testGetFalconResource(resourceName, path, ContentType.JSON);
+ }
+
+ @Test
+ public void testFalconMetadataLineage() throws Exception {
+ String resourceName = "falcon/metadata-lineage-vertices-all";
+ String path = "/api/metadata/lineage/vertices/all";
+ testGetFalconResource(resourceName, path, ContentType.JSON);
+
+ resourceName = "falcon/metadata-lineage-vertices-id";
+ path = "/api/metadata/lineage/vertices/76";
+ testGetFalconResource(resourceName, path, ContentType.JSON);
+
+ resourceName = "falcon/metadata-lineage-vertices-direction";
+ path = "/api/metadata/lineage/vertices/76/out";
+ testGetFalconResource(resourceName, path, ContentType.JSON);
+
+ resourceName = "falcon/metadata-lineage-edges-all";
+ path = "/api/metadata/lineage/edges/all";
+ testGetFalconResource(resourceName, path, ContentType.JSON);
+
+ resourceName = "falcon/metadata-lineage-edges-id";
+ path = "/api/metadata/lineage/edges/Q2v-4-4m";
+ testGetFalconResource(resourceName, path, ContentType.JSON);
+
+ String username = "hdfs";
+ String password = "hdfs-password";
+ resourceName = "falcon/metadata-lineage-vertices-key.json";
+ path = "/api/metadata/lineage/vertices";
+ String gatewayPath = driver.getUrl( "FALCON" ) + path + "?key=name&value=rawEmailIngestProcess";
+
+ driver.getMock("FALCON")
+ .expect()
+ .method("GET")
+ .pathInfo(path)
+ .queryParam("user.name", username)
+ .queryParam("key", "name")
+ .queryParam("value", "rawEmailIngestProcess")
+ .respond()
+ .status(HttpStatus.SC_OK)
+ .content(driver.getResourceBytes(resourceName))
+ .contentType(ContentType.JSON.toString());
+
+ Response response = given()
+ .auth().preemptive().basic(username, password)
+ .header("X-XSRF-Header", "jksdhfkhdsf")
+ .expect()
+ .statusCode(HttpStatus.SC_OK)
+ .when().get( gatewayPath );
+
+ MatcherAssert.assertThat(response.getBody().asString(),
+ sameJSONAs(driver.getResourceString(resourceName, UTF8)));
+ driver.assertComplete();
+ }
+
+ @Test
+ public void testFalconMetadataDiscovery() throws Exception {
+ String resourceName = "falcon/metadata-disc-process-entity";
+ String path = "/api/metadata/discovery/process_entity/list";
+ testGetFalconResource(resourceName, path, ContentType.JSON);
+
+ resourceName = "falcon/metadata-disc-cluster-entity";
+ path = "/api/metadata/discovery/cluster_entity/list";
+ testGetFalconResource(resourceName, path, ContentType.JSON);
+
+ resourceName = "falcon/metadata-disc-cluster-relations";
+ path = "/api/metadata/discovery/cluster_entity/primaryCluster/relations";
+ testGetFalconResource(resourceName, path, ContentType.JSON);
+ }
+
+ private void testGetFalconResource(String resourceName, String path, ContentType contentType) throws IOException {
+ String username = "hdfs";
+ String password = "hdfs-password";
+ String gatewayPath = driver.getUrl( "FALCON" ) + path;
+
+ switch( contentType ) {
+ case JSON:
+ resourceName += ".json";
+ break;
+ case XML:
+ resourceName += ".xml";
+ break;
+ default:
+ break;
+ }
+
+ driver.getMock("FALCON")
+ .expect()
+ .method("GET")
+ .pathInfo(path)
+ .queryParam("user.name", username)
+ .header("Accept", contentType.toString())
+ .respond()
+ .status(HttpStatus.SC_OK)
+ .content(driver.getResourceBytes(resourceName))
+ .contentType(contentType.toString());
+
+ Response response = given()
+ .auth().preemptive().basic(username, password)
+ .header("X-XSRF-Header", "jksdhfkhdsf")
+ .header("Accept", contentType.toString())
+ .expect()
+// .log().all()
+ .statusCode(HttpStatus.SC_OK)
+ .contentType( contentType )
+ .when().get( gatewayPath );
+
+ switch( contentType ) {
+ case JSON:
+ MatcherAssert.assertThat( response.getBody().asString(),
+ sameJSONAs( driver.getResourceString( resourceName, UTF8 ) ) );
+ break;
+ case XML:
+ MatcherAssert
+ .assertThat( the( response.getBody().asString() ),
+ isEquivalentTo( the( driver.getResourceString( resourceName, UTF8 ) ) ) );
+ break;
+ default:
+ break;
+ }
+ driver.assertComplete();
+ }
+
+ private void testPostFalconResource(String postResource, String responseResource, String path, ContentType contentType) throws IOException {
+ String username = "hdfs";
+ String password = "hdfs-password";
+ String gatewayPath = driver.getUrl( "FALCON" ) + path;
+ Response response;
+
+ if (postResource != null) {
+ driver.getMock("FALCON")
+ .expect()
+ .method("POST")
+ .content(driver.getResourceBytes(postResource))
+ .header("Accept", contentType.toString())
+ .pathInfo(path)
+ .queryParam("user.name", username)
+ .respond()
+ .status(HttpStatus.SC_OK)
+ .content(driver.getResourceBytes(responseResource))
+ .contentType(contentType.toString());
+
+ response = given()
+ .auth().preemptive().basic(username, password)
+ .header("X-XSRF-Header", "jksdhfkhdsf")
+ .header("Accept", contentType.toString())
+ .content(driver.getResourceBytes(postResource))
+ .expect()
+ .statusCode(HttpStatus.SC_OK)
+ .contentType(contentType.toString())
+ .when().post(gatewayPath);
+ } else {
+ driver.getMock("FALCON")
+ .expect()
+ .method("POST")
+ .header("Accept", contentType.toString())
+ .pathInfo(path)
+ .queryParam("user.name", username)
+ .respond()
+ .status(HttpStatus.SC_OK)
+ .content(driver.getResourceBytes(responseResource))
+ .contentType(contentType.toString());
+
+ response = given()
+ .auth().preemptive().basic(username, password)
+ .header("X-XSRF-Header", "jksdhfkhdsf")
+ .header("Accept", contentType.toString())
+ .expect()
+ .statusCode(HttpStatus.SC_OK)
+ .contentType(contentType.toString())
+ .when().post(gatewayPath);
+ }
+
+ switch( contentType ) {
+ case JSON:
+ MatcherAssert.assertThat( response.getBody().asString(),
+ sameJSONAs( driver.getResourceString( responseResource, UTF8 ) ) );
+ break;
+ case XML:
+ MatcherAssert
+ .assertThat( the( response.getBody().asString() ),
+ isEquivalentTo( the( driver.getResourceString( responseResource, UTF8 ) ) ) );
+ break;
+ default:
+ break;
+ }
+ driver.assertComplete();
+ }
+
+ @Test
+ public void testStormUiApi() throws Exception {
+ String resourceName = "storm/cluster-configuration.json";
+ String path = "/api/v1/cluster/configuration";
+ testGetStormResource(resourceName, path);
+
+ resourceName = "storm/cluster-summary.json";
+ path = "/api/v1/cluster/summary";
+ testGetStormResource(resourceName, path);
+
+ resourceName = "storm/supervisor-summary.json";
+ path = "/api/v1/supervisor/summary";
+ testGetStormResource(resourceName, path);
+
+ resourceName = "storm/topology-summary.json";
+ path = "/api/v1/topology/summary";
+ testGetStormResource(resourceName, path);
+
+ String username = "hdfs";
+ String password = "hdfs-password";
+
+ InetSocketAddress gatewayAddress = driver.gateway.getAddresses()[0];
+ String gatewayHostName = gatewayAddress.getHostName();
+ String gatewayAddrName = InetAddress.getByName( gatewayHostName ).getHostAddress();
+
+ resourceName = "storm/topology-id.json";
+ path = "/api/v1/topology/WordCount-1-1424792039";
+ String gatewayPath = driver.getUrl( "STORM" ) + path;
+ driver.getMock("STORM")
+ .expect()
+ .method("GET")
+ .pathInfo(path)
+ .queryParam("user.name", username)
+ .respond()
+ .status(HttpStatus.SC_OK)
+ .content(driver.getResourceBytes(resourceName))
+ .contentType(ContentType.JSON.toString());
+
+ Response response = given()
+ .auth().preemptive().basic(username, password)
+ .header("X-XSRF-Header", "jksdhfkhdsf")
+ .header("Accept", ContentType.JSON.toString())
+ .expect()
+ .statusCode(HttpStatus.SC_OK)
+ .contentType( ContentType.JSON.toString() )
+ .when().get( gatewayPath );
+
+ String link = response.getBody().jsonPath().getString("spouts[0].errorWorkerLogLink");
+ MatcherAssert.assertThat(link, anyOf(
+ startsWith("http://" + gatewayHostName + ":" + gatewayAddress.getPort() + "/"),
+ startsWith("http://" + gatewayAddrName + ":" + gatewayAddress.getPort() + "/")));
+ MatcherAssert.assertThat( link, containsString("/storm/logviewer") );
+
+ driver.assertComplete();
+
+ resourceName = "storm/topology-component-id.json";
+ path = "/api/v1/topology/WordCount-1-1424792039/component/spout";
+ gatewayPath = driver.getUrl( "STORM" ) + path;
+ driver.getMock("STORM")
+ .expect()
+ .method("GET")
+ .pathInfo(path)
+ .queryParam("user.name", username)
+ .respond()
+ .status(HttpStatus.SC_OK)
+ .content(driver.getResourceBytes(resourceName))
+ .contentType(ContentType.JSON.toString());
+
+ response = given()
+ .auth().preemptive().basic(username, password)
+ .header("X-XSRF-Header", "jksdhfkhdsf")
+ .header("Accept", ContentType.JSON.toString())
+ .expect()
+// .log().all()
+ .statusCode(HttpStatus.SC_OK)
+ .contentType( ContentType.JSON.toString() )
+ .when().get( gatewayPath );
+
+
+ link = response.getBody().jsonPath().getString("executorStats[0].workerLogLink");
+ MatcherAssert.assertThat(link, anyOf(
+ startsWith("http://" + gatewayHostName + ":" + gatewayAddress.getPort() + "/"),
+ startsWith("http://" + gatewayAddrName + ":" + gatewayAddress.getPort() + "/")));
+ MatcherAssert.assertThat( link, containsString("/storm/logviewer") );
+
+ driver.assertComplete();
+
+ path = "/api/v1/topology/WordCount-1-1424792039/activate";
+ testPostStormResource(path);
+
+ path = "/api/v1/topology/WordCount-1-1424792039/deactivate";
+ testPostStormResource(path);
+
+ path = "/api/v1/topology/WordCount-1-1424792039/rebalance/20";
+ testPostStormResource(path);
+
+ path = "/api/v1/topology/WordCount-1-1424792039/kill/20";
+ testPostStormResource(path);
+
+ }
+
+ private void testGetStormResource(String resourceName, String path) throws IOException {
+ String username = "hdfs";
+ String password = "hdfs-password";
+ String gatewayPath = driver.getUrl( "STORM" ) + path;
+
+ driver.getMock("STORM")
+ .expect()
+ .method("GET")
+ .pathInfo(path)
+ .queryParam("user.name", username)
+ .respond()
+ .status(HttpStatus.SC_OK)
+ .content(driver.getResourceBytes(resourceName))
+ .contentType(ContentType.JSON.toString());
+
+ Response response = given()
+ .auth().preemptive().basic(username, password)
+ .header("X-XSRF-Header", "jksdhfkhdsf")
+ .header("Accept", ContentType.JSON.toString())
+ .expect()
+// .log().all()
+ .statusCode(HttpStatus.SC_OK)
+ .contentType( ContentType.JSON.toString() )
+ .when().get( gatewayPath );
+
+ MatcherAssert.assertThat(response.getBody().asString(),
+ sameJSONAs(driver.getResourceString(resourceName, UTF8)));
+ driver.assertComplete();
+ }
+
+ private void testPostStormResource(String path) throws IOException {
+ String username = "hdfs";
+ String password = "hdfs-password";
+ String gatewayPath = driver.getUrl( "STORM" ) + path;
+
+ driver.getMock("STORM")
+ .expect()
+ .method("POST")
+ .pathInfo(path)
+ .queryParam("user.name", username)
+ .respond()
+ .status(HttpStatus.SC_MOVED_TEMPORARILY)
+ .contentType(ContentType.JSON.toString());
+
+ Response response = given()
+ .auth().preemptive().basic(username, password)
+ .header("X-XSRF-Header", "jksdhfkhdsf")
+ .header("X-CSRF-Token", "H/8xIWCYQo4ZDWLvV9k0FAkjD0omWI8beVTp2mEPRxCbJmWBTYhRMhIV9LGIY3E51OAj+s6T7eQChpGJ")
+ .header("Accept", ContentType.JSON.toString())
+ .expect()
+ .statusCode(HttpStatus.SC_MOVED_TEMPORARILY)
+ .contentType( ContentType.JSON.toString() )
+ .when().post( gatewayPath );
+
+ driver.assertComplete();
+ }
}
diff --git a/gateway-test/src/test/java/org/apache/hadoop/gateway/GatewayFuncTestDriver.java b/gateway-test/src/test/java/org/apache/hadoop/gateway/GatewayFuncTestDriver.java
index 6faccac..af707b1 100644
--- a/gateway-test/src/test/java/org/apache/hadoop/gateway/GatewayFuncTestDriver.java
+++ b/gateway-test/src/test/java/org/apache/hadoop/gateway/GatewayFuncTestDriver.java
@@ -157,6 +157,18 @@
} catch (ServiceLifecycleException e) {
e.printStackTrace(); // I18N not required.
}
+ File stacksDir = new File( config.getGatewayServicesDir() );
+ stacksDir.mkdirs();
+ //TODO: [sumit] This is a hack for now, need to find a better way to locate the source resources for 'stacks' to be tested
+ String pathToStacksSource = "gateway-service-definitions/src/main/resources/services";
+ File stacksSourceDir = new File( targetDir.getParent(), pathToStacksSource);
+ if (!stacksSourceDir.exists()) {
+ stacksSourceDir = new File( targetDir.getParentFile().getParent(), pathToStacksSource);
+ }
+ if (stacksSourceDir.exists()) {
+ FileUtils.copyDirectoryToDirectory(stacksSourceDir, stacksDir);
+ }
+
gateway = GatewayServer.startGateway( config, srvcs );
MatcherAssert.assertThat( "Failed to start gateway.", gateway, notNullValue() );
@@ -170,6 +182,7 @@
FileUtils.deleteQuietly( new File( config.getGatewaySecurityDir() ) );
FileUtils.deleteQuietly( new File( config.getGatewayDeploymentDir() ) );
FileUtils.deleteQuietly( new File( config.getGatewayDataDir() ) );
+ FileUtils.deleteQuietly( new File( config.getGatewayServicesDir() ) );
for( Service service : services.values() ) {
service.server.stop();
diff --git a/gateway-test/src/test/java/org/apache/hadoop/gateway/GatewayTestConfig.java b/gateway-test/src/test/java/org/apache/hadoop/gateway/GatewayTestConfig.java
index 4340500..baf206b 100644
--- a/gateway-test/src/test/java/org/apache/hadoop/gateway/GatewayTestConfig.java
+++ b/gateway-test/src/test/java/org/apache/hadoop/gateway/GatewayTestConfig.java
@@ -234,5 +234,9 @@
// public void setKerberosLoginConfig(String kerberosLoginConfig) {
// this.kerberosLoginConfig = kerberosLoginConfig;
// }
-
+
+ @Override
+ public String getGatewayServicesDir() {
+ return gatewayHomeDir + "/data/services";
+ }
}
diff --git a/gateway-test/src/test/java/org/apache/hadoop/gateway/deploy/DeploymentFactoryFuncTest.java b/gateway-test/src/test/java/org/apache/hadoop/gateway/deploy/DeploymentFactoryFuncTest.java
index fc699b1..99fd5ab 100644
--- a/gateway-test/src/test/java/org/apache/hadoop/gateway/deploy/DeploymentFactoryFuncTest.java
+++ b/gateway-test/src/test/java/org/apache/hadoop/gateway/deploy/DeploymentFactoryFuncTest.java
@@ -82,6 +82,7 @@
// ((GatewayTestConfig) config).setDeploymentDir( "clusters" );
+ addStacksDir(config, targetDir);
DefaultGatewayServices srvcs = new DefaultGatewayServices();
Map<String,String> options = new HashMap<String,String>();
options.put("persist-master", "false");
@@ -135,6 +136,7 @@
((GatewayTestConfig) config).setGatewayHomeDir( gatewayDir.getAbsolutePath() );
File deployDir = new File( config.getGatewayDeploymentDir() );
deployDir.mkdirs();
+ addStacksDir(config, targetDir);
DefaultGatewayServices srvcs = new DefaultGatewayServices();
Map<String,String> options = new HashMap<String,String>();
@@ -189,6 +191,7 @@
((GatewayTestConfig) config).setGatewayHomeDir( gatewayDir.getAbsolutePath() );
File deployDir = new File( config.getGatewayDeploymentDir() );
deployDir.mkdirs();
+ addStacksDir(config, targetDir);
DefaultGatewayServices srvcs = new DefaultGatewayServices();
Map<String,String> options = new HashMap<String,String>();
@@ -228,8 +231,8 @@
topology.addProvider( authorizer );
WebArchive war = DeploymentFactory.createDeployment( config, topology );
- //File dir = new File( System.getProperty( "user.dir" ) );
- //File file = war.as( ExplodedExporter.class ).exportExploded( dir, "test-cluster.war" );
+// File dir = new File( System.getProperty( "user.dir" ) );
+// File file = war.as( ExplodedExporter.class ).exportExploded( dir, "test-cluster.war" );
Document web = parse( war.get( "WEB-INF/web.xml" ).getAsset().openStream() );
assertThat( web, hasXPath( "/web-app/servlet/servlet-name", equalTo( "test-cluster" ) ) );
@@ -264,8 +267,8 @@
assertThat( gateway, hasXPath( "/gateway/resource[1]/filter[6]/class", equalTo( "org.apache.hadoop.gateway.filter.AclsAuthorizationFilter" ) ) );
assertThat( gateway, hasXPath( "/gateway/resource[1]/filter[7]/role", equalTo( "dispatch" ) ) );
- assertThat( gateway, hasXPath( "/gateway/resource[1]/filter[7]/name", equalTo( "hdfs" ) ) );
- assertThat( gateway, hasXPath( "/gateway/resource[1]/filter[7]/class", equalTo( "org.apache.hadoop.gateway.hdfs.dispatch.HdfsDispatch" ) ) );
+ assertThat( gateway, hasXPath( "/gateway/resource[1]/filter[7]/name", equalTo( "webhdfs" ) ) );
+ assertThat( gateway, hasXPath( "/gateway/resource[1]/filter[7]/class", equalTo( "org.apache.hadoop.gateway.dispatch.GatewayDispatchFilter" ) ) );
assertThat( gateway, hasXPath( "/gateway/resource[2]/pattern", equalTo( "/webhdfs/v1/**?**" ) ) );
//assertThat( gateway, hasXPath( "/gateway/resource[2]/target", equalTo( "http://localhost:50070/webhdfs/v1/{path=**}?{**}" ) ) );
@@ -290,8 +293,8 @@
assertThat( gateway, hasXPath( "/gateway/resource[1]/filter[6]/class", equalTo( "org.apache.hadoop.gateway.filter.AclsAuthorizationFilter" ) ) );
assertThat( gateway, hasXPath( "/gateway/resource[2]/filter[7]/role", equalTo( "dispatch" ) ) );
- assertThat( gateway, hasXPath( "/gateway/resource[2]/filter[7]/name", equalTo( "hdfs" ) ) );
- assertThat( gateway, hasXPath( "/gateway/resource[2]/filter[7]/class", equalTo( "org.apache.hadoop.gateway.hdfs.dispatch.HdfsDispatch" ) ) );
+ assertThat( gateway, hasXPath( "/gateway/resource[2]/filter[7]/name", equalTo( "webhdfs" ) ) );
+ assertThat( gateway, hasXPath( "/gateway/resource[2]/filter[7]/class", equalTo( "org.apache.hadoop.gateway.dispatch.GatewayDispatchFilter" ) ) );
}
@@ -379,6 +382,7 @@
((GatewayTestConfig) config).setGatewayHomeDir(gatewayDir.getAbsolutePath());
File deployDir = new File(config.getGatewayDeploymentDir());
deployDir.mkdirs();
+ addStacksDir(config, targetDir);
DefaultGatewayServices srvcs = new DefaultGatewayServices();
Map<String, String> options = new HashMap<String, String>();
@@ -453,6 +457,7 @@
((GatewayTestConfig) config).setGatewayHomeDir(gatewayDir.getAbsolutePath());
File deployDir = new File(config.getGatewayDeploymentDir());
deployDir.mkdirs();
+ addStacksDir(config, targetDir);
DefaultGatewayServices srvcs = new DefaultGatewayServices();
Map<String, String> options = new HashMap<String, String>();
@@ -499,7 +504,7 @@
WebArchive war = DeploymentFactory.createDeployment( config, topology );
Document doc = parse( war.get( "WEB-INF/gateway.xml" ).getAsset().openStream() );
- //dump( doc );
+// dump( doc );
Node resourceNode, filterNode, paramNode;
String value;
@@ -538,6 +543,25 @@
return builder.parse( source );
}
+ private void addStacksDir(GatewayConfig config, File targetDir) {
+ File stacksDir = new File( config.getGatewayServicesDir() );
+ stacksDir.mkdirs();
+ //TODO: [sumit] This is a hack for now, need to find a better way to locate the source resources for 'stacks' to be tested
+ String pathToStacksSource = "gateway-service-definitions/src/main/resources/services";
+ File stacksSourceDir = new File( targetDir.getParent(), pathToStacksSource);
+ if (!stacksSourceDir.exists()) {
+ stacksSourceDir = new File( targetDir.getParentFile().getParent(), pathToStacksSource);
+ }
+ if (stacksSourceDir.exists()) {
+ try {
+ FileUtils.copyDirectoryToDirectory(stacksSourceDir, stacksDir);
+ } catch ( IOException e) {
+ fail(e.getMessage());
+ }
+ }
+
+ }
+
private void dump( Document document ) throws TransformerException {
Transformer transformer = TransformerFactory.newInstance().newTransformer();
transformer.setOutputProperty( OutputKeys.INDENT, "yes" );
diff --git a/gateway-test/src/test/java/org/apache/hadoop/gateway/webhcat/WebHCatDeploymentContributorTest.java b/gateway-test/src/test/java/org/apache/hadoop/gateway/webhcat/WebHCatDeploymentContributorTest.java
deleted file mode 100644
index 8b3303d..0000000
--- a/gateway-test/src/test/java/org/apache/hadoop/gateway/webhcat/WebHCatDeploymentContributorTest.java
+++ /dev/null
@@ -1,45 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.hadoop.gateway.webhcat;
-
-import org.apache.hadoop.gateway.deploy.ServiceDeploymentContributor;
-import org.junit.Test;
-
-import java.util.Iterator;
-import java.util.ServiceLoader;
-
-import static org.hamcrest.MatcherAssert.assertThat;
-import static org.junit.Assert.fail;
-
-public class WebHCatDeploymentContributorTest {
-
- @Test
- public void testServiceLoader() throws Exception {
- ServiceLoader loader = ServiceLoader.load( ServiceDeploymentContributor.class );
- Iterator iterator = loader.iterator();
- assertThat( "Service iterator empty.", iterator.hasNext() );
- while( iterator.hasNext() ) {
- Object object = iterator.next();
- if( object instanceof WebHCatDeploymentContributor ) {
- return;
- }
- }
- fail( "Failed to find " + WebHCatDeploymentContributor.class.getName() + " via service loader." );
- }
-
-}
diff --git a/gateway-test/src/test/resources/org/apache/hadoop/gateway/GatewayBasicFuncTest/falcon/cleanseEmailProcess.xml b/gateway-test/src/test/resources/org/apache/hadoop/gateway/GatewayBasicFuncTest/falcon/cleanseEmailProcess.xml
new file mode 100644
index 0000000..45ae7e3
--- /dev/null
+++ b/gateway-test/src/test/resources/org/apache/hadoop/gateway/GatewayBasicFuncTest/falcon/cleanseEmailProcess.xml
@@ -0,0 +1,36 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!--
+ Hourly process, cleanses raw data
+ -->
+<process name="cleanseEmailProcess" xmlns="uri:falcon:process:0.1">
+
+ <tags>pipeline=churnAnalysisDataPipeline,owner=ETLGroup</tags>
+
+
+ <clusters>
+ <cluster name="primaryCluster">
+ <validity start="2014-02-28T00:00Z" end="2016-03-31T00:00Z"/>
+ </cluster>
+ </clusters>
+
+ <parallel>1</parallel>
+ <order>FIFO</order>
+ <frequency>hours(1)</frequency>
+
+ <inputs>
+ <input name="input" feed="rawEmailFeed" start="now(0,0)" end="now(0,0)" />
+ </inputs>
+
+ <outputs>
+ <output name="output" feed="cleansedEmailFeed" instance="now(0,0)" />
+ </outputs>
+
+ <workflow name="emailCleanseWorkflow" version="5.0"
+ engine="pig" path="/user/ambari-qa/falcon/demo/apps/pig/id.pig" />
+
+ <retry policy="periodic" delay="minutes(15)" attempts="3" />
+ <!-- <late-process policy="exp-backoff" delay="hours(1)">
+ <late-input input="input" workflow-path="/apps/clickstream/late" />
+ </late-process> -->
+
+</process>
diff --git a/gateway-test/src/test/resources/org/apache/hadoop/gateway/GatewayBasicFuncTest/falcon/cleansedEmailFeed.xml b/gateway-test/src/test/resources/org/apache/hadoop/gateway/GatewayBasicFuncTest/falcon/cleansedEmailFeed.xml
new file mode 100644
index 0000000..698c30b
--- /dev/null
+++ b/gateway-test/src/test/resources/org/apache/hadoop/gateway/GatewayBasicFuncTest/falcon/cleansedEmailFeed.xml
@@ -0,0 +1,43 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!--
+ A feed representing hourly cleansed customer email data
+ -->
+<feed description="Cleansed customer emails" name="cleansedEmailFeed"
+ xmlns="uri:falcon:feed:0.1">
+
+ <tags>owner=USMarketing,classification=Secure,externalSource=USProdEmailServers,externalTarget=BITools</tags>
+ <groups>churnAnalysisDataPipeline</groups>
+
+ <frequency>hours(1)</frequency>
+
+ <clusters>
+ <cluster name="primaryCluster" type="source">
+ <validity start="2014-02-28T00:00Z" end="2016-03-31T00:00Z"/>
+ <retention limit="days(90)" action="delete"/>
+
+ <locations>
+ <location type="data"
+ path="/user/ambari-qa/falcon/demo/primary/processed/enron/${YEAR}-${MONTH}-${DAY}-${HOUR}"/>
+ </locations>
+ </cluster>
+
+ <cluster name="backupCluster" type="target">
+ <validity start="2014-02-28T00:00Z" end="2016-03-31T00:00Z"/>
+ <retention limit="months(36)" action="delete"/>
+
+ <locations>
+ <location type="data"
+ path="/falcon/demo/bcp/processed/enron/${YEAR}-${MONTH}-${DAY}-${HOUR}"/>
+ </locations>
+ </cluster>
+
+ </clusters>
+
+ <locations>
+ <location type="data"
+ path="/user/ambari-qa/falcon/demo/processed/enron/${YEAR}-${MONTH}-${DAY}-${HOUR}"/>
+ </locations>
+
+ <ACL owner="ambari-qa" group="users" permission="0755"/>
+ <schema location="/none" provider="none"/>
+</feed>
diff --git a/gateway-test/src/test/resources/org/apache/hadoop/gateway/GatewayBasicFuncTest/falcon/config-build.json b/gateway-test/src/test/resources/org/apache/hadoop/gateway/GatewayBasicFuncTest/falcon/config-build.json
new file mode 100644
index 0000000..957a53e
--- /dev/null
+++ b/gateway-test/src/test/resources/org/apache/hadoop/gateway/GatewayBasicFuncTest/falcon/config-build.json
@@ -0,0 +1,32 @@
+{
+ "properties": [
+ {
+ "key": "vc.source.url",
+ "value": "scm:git:https://git-wip-us.apache.org/repos/asf/incubator-falcon.git/falcon-webapp"
+ },
+ {
+ "key": "build.epoch",
+ "value": "1416432530059"
+ },
+ {
+ "key": "project.version",
+ "value": "0.6.0.2.2.0.0-2041"
+ },
+ {
+ "key": "build.user",
+ "value": "jenkins"
+ },
+ {
+ "key": "vc.revision",
+ "value": "5fa2772890a189327d5d62a3353290c54613246c"
+ },
+ {
+ "key": "domain",
+ "value": "all"
+ },
+ {
+ "key": "build.version",
+ "value": "0.6.0.2.2.0.0-2041-r5fa2772890a189327d5d62a3353290c54613246c"
+ }
+ ]
+}
\ No newline at end of file
diff --git a/gateway-test/src/test/resources/org/apache/hadoop/gateway/GatewayBasicFuncTest/falcon/config-build.xml b/gateway-test/src/test/resources/org/apache/hadoop/gateway/GatewayBasicFuncTest/falcon/config-build.xml
new file mode 100644
index 0000000..8e7b3a3
--- /dev/null
+++ b/gateway-test/src/test/resources/org/apache/hadoop/gateway/GatewayBasicFuncTest/falcon/config-build.xml
@@ -0,0 +1,31 @@
+<?xml version="1.0" standalone="yes"?>
+<properties>
+ <properties>
+ <key>vc.source.url</key>
+ <value>scm:git:https://git-wip-us.apache.org/repos/asf/incubator-falcon.git/falcon-webapp</value>
+ </properties>
+ <properties>
+ <key>build.epoch</key>
+ <value>1416432530059</value>
+ </properties>
+ <properties>
+ <key>project.version</key>
+ <value>0.6.0.2.2.0.0-2041</value>
+ </properties>
+ <properties>
+ <key>build.user</key>
+ <value>jenkins</value>
+ </properties>
+ <properties>
+ <key>vc.revision</key>
+ <value>5fa2772890a189327d5d62a3353290c54613246c</value>
+ </properties>
+ <properties>
+ <key>domain</key>
+ <value>all</value>
+ </properties>
+ <properties>
+ <key>build.version</key>
+ <value>0.6.0.2.2.0.0-2041-r5fa2772890a189327d5d62a3353290c54613246c</value>
+ </properties>
+</properties>
\ No newline at end of file
diff --git a/gateway-test/src/test/resources/org/apache/hadoop/gateway/GatewayBasicFuncTest/falcon/config-deploy.json b/gateway-test/src/test/resources/org/apache/hadoop/gateway/GatewayBasicFuncTest/falcon/config-deploy.json
new file mode 100644
index 0000000..8cb1f5d
--- /dev/null
+++ b/gateway-test/src/test/resources/org/apache/hadoop/gateway/GatewayBasicFuncTest/falcon/config-deploy.json
@@ -0,0 +1 @@
+{"properties":[{"key":"deploy.mode","value":"embedded"},{"key":"domain","value":"all"}]}
\ No newline at end of file
diff --git a/gateway-test/src/test/resources/org/apache/hadoop/gateway/GatewayBasicFuncTest/falcon/config-deploy.xml b/gateway-test/src/test/resources/org/apache/hadoop/gateway/GatewayBasicFuncTest/falcon/config-deploy.xml
new file mode 100644
index 0000000..ad5b167
--- /dev/null
+++ b/gateway-test/src/test/resources/org/apache/hadoop/gateway/GatewayBasicFuncTest/falcon/config-deploy.xml
@@ -0,0 +1,11 @@
+<?xml version="1.0" standalone="yes"?>
+<properties>
+ <properties>
+ <key>deploy.mode</key>
+ <value>embedded</value>
+ </properties>
+ <properties>
+ <key>domain</key>
+ <value>all</value>
+ </properties>
+</properties>
\ No newline at end of file
diff --git a/gateway-test/src/test/resources/org/apache/hadoop/gateway/GatewayBasicFuncTest/falcon/config-runtime.json b/gateway-test/src/test/resources/org/apache/hadoop/gateway/GatewayBasicFuncTest/falcon/config-runtime.json
new file mode 100644
index 0000000..4bc21da
--- /dev/null
+++ b/gateway-test/src/test/resources/org/apache/hadoop/gateway/GatewayBasicFuncTest/falcon/config-runtime.json
@@ -0,0 +1,24 @@
+{
+ "properties": [
+ {
+ "key": "log.cleanup.frequency.days.retention",
+ "value": "days(7)"
+ },
+ {
+ "key": "log.cleanup.frequency.months.retention",
+ "value": "months(3)"
+ },
+ {
+ "key": "domain",
+ "value": "falcon"
+ },
+ {
+ "key": "log.cleanup.frequency.hours.retention",
+ "value": "minutes(1)"
+ },
+ {
+ "key": "log.cleanup.frequency.minutes.retention",
+ "value": "hours(6)"
+ }
+ ]
+}
\ No newline at end of file
diff --git a/gateway-test/src/test/resources/org/apache/hadoop/gateway/GatewayBasicFuncTest/falcon/config-runtime.xml b/gateway-test/src/test/resources/org/apache/hadoop/gateway/GatewayBasicFuncTest/falcon/config-runtime.xml
new file mode 100644
index 0000000..5377277
--- /dev/null
+++ b/gateway-test/src/test/resources/org/apache/hadoop/gateway/GatewayBasicFuncTest/falcon/config-runtime.xml
@@ -0,0 +1,23 @@
+<?xml version="1.0" standalone="yes"?>
+<properties>
+ <properties>
+ <key>log.cleanup.frequency.days.retention</key>
+ <value>days(7)</value>
+ </properties>
+ <properties>
+ <key>log.cleanup.frequency.months.retention</key>
+ <value>months(3)</value>
+ </properties>
+ <properties>
+ <key>domain</key>
+ <value>falcon</value>
+ </properties>
+ <properties>
+ <key>log.cleanup.frequency.hours.retention</key>
+ <value>minutes(1)</value>
+ </properties>
+ <properties>
+ <key>log.cleanup.frequency.minutes.retention</key>
+ <value>hours(6)</value>
+ </properties>
+</properties>
\ No newline at end of file
diff --git a/gateway-test/src/test/resources/org/apache/hadoop/gateway/GatewayBasicFuncTest/falcon/config-startup.json b/gateway-test/src/test/resources/org/apache/hadoop/gateway/GatewayBasicFuncTest/falcon/config-startup.json
new file mode 100644
index 0000000..096dbf0
--- /dev/null
+++ b/gateway-test/src/test/resources/org/apache/hadoop/gateway/GatewayBasicFuncTest/falcon/config-startup.json
@@ -0,0 +1,188 @@
+{
+ "properties": [
+ {
+ "key": "ProcessInstanceManager.impl",
+ "value": "org.apache.falcon.resource.InstanceManager"
+ },
+ {
+ "key": "oozie.process.workflow.builder",
+ "value": "org.apache.falcon.workflow.OozieProcessWorkflowBuilder"
+ },
+ {
+ "key": "falcon.http.authentication.kerberos.name.rules",
+ "value": "DEFAULT"
+ },
+ {
+ "key": "catalog.service.impl",
+ "value": "org.apache.falcon.catalog.HiveCatalogService"
+ },
+ {
+ "key": "falcon.graph.storage.directory",
+ "value": "/hadoop/falcon/data/lineage/graphdb"
+ },
+ {
+ "key": "ConfigSyncService.impl",
+ "value": "org.apache.falcon.resource.ConfigSyncService"
+ },
+ {
+ "key": "journal.impl",
+ "value": "org.apache.falcon.transaction.SharedFileSystemJournal"
+ },
+ {
+ "key": "max.retry.failure.count",
+ "value": "1"
+ },
+ {
+ "key": "dfs.namenode.kerberos.principal",
+ "value": "nn/_HOST@EXAMPLE.COM"
+ },
+ {
+ "key": "application.services",
+ "value": "org.apache.falcon.security.AuthenticationInitializationService,org.apache.falcon.workflow.WorkflowJobEndNotificationService, org.apache.falcon.service.ProcessSubscriberService,org.apache.falcon.entity.store.ConfigurationStore,org.apache.falcon.rerun.service.RetryService,org.apache.falcon.rerun.service.LateRunService,org.apache.falcon.service.LogCleanupService,org.apache.falcon.metadata.MetadataMappingService"
+ },
+ {
+ "key": "broker.ttlInMins",
+ "value": "4320"
+ },
+ {
+ "key": "falcon.http.authentication.token.validity",
+ "value": "36000"
+ },
+ {
+ "key": "broker.impl.class",
+ "value": "org.apache.activemq.ActiveMQConnectionFactory"
+ },
+ {
+ "key": "domain",
+ "value": "falcon"
+ },
+ {
+ "key": "shared.libs",
+ "value": "activemq-core,ant,geronimo-j2ee-management,hadoop-distcp,jms,json-simple,oozie-client,spring-jms"
+ },
+ {
+ "key": "system.lib.location",
+ "value": "/usr/hdp/current/falcon-server/server/webapp/falcon/WEB-INF/lib"
+ },
+ {
+ "key": "falcon.service.authentication.kerberos.principal",
+ "value": "falcon/_HOST@EXAMPLE.COM"
+ },
+ {
+ "key": "config.store.uri",
+ "value": "file:///hadoop/falcon/store"
+ },
+ {
+ "key": "falcon.authentication.type",
+ "value": "simple"
+ },
+ {
+ "key": "falcon.security.authorization.admin.groups",
+ "value": "falcon"
+ },
+ {
+ "key": "falcon.graph.blueprints.graph",
+ "value": "com.thinkaurelius.titan.core.TitanFactory"
+ },
+ {
+ "key": "falcon.http.authentication.simple.anonymous.allowed",
+ "value": "true"
+ },
+ {
+ "key": "falcon.http.authentication.blacklisted.users",
+ "value": ""
+ },
+ {
+ "key": "falcon.security.authorization.provider",
+ "value": "org.apache.falcon.security.DefaultAuthorizationProvider"
+ },
+ {
+ "key": "falcon.http.authentication.signature.secret",
+ "value": "falcon"
+ },
+ {
+ "key": "configstore.listeners",
+ "value": "org.apache.falcon.entity.v0.EntityGraph,org.apache.falcon.entity.ColoClusterRelation,org.apache.falcon.group.FeedGroupMap,org.apache.falcon.service.SharedLibraryHostingService"
+ },
+ {
+ "key": "falcon.http.authentication.kerberos.keytab",
+ "value": "/etc/security/keytabs/spnego.service.keytab"
+ },
+ {
+ "key": "falcon.http.authentication.type",
+ "value": "simple"
+ },
+ {
+ "key": "oozie.feed.workflow.builder",
+ "value": "org.apache.falcon.workflow.OozieFeedWorkflowBuilder"
+ },
+ {
+ "key": "falcon.security.authorization.superusergroup",
+ "value": "falcon"
+ },
+ {
+ "key": "internal.queue.size",
+ "value": "1000"
+ },
+ {
+ "key": "SchedulableEntityManager.impl",
+ "value": "org.apache.falcon.resource.SchedulableEntityManager"
+ },
+ {
+ "key": "falcon.security.authorization.enabled",
+ "value": "false"
+ },
+ {
+ "key": "falcon.graph.preserve.history",
+ "value": "false"
+ },
+ {
+ "key": "falcon.service.authentication.kerberos.keytab",
+ "value": "/etc/security/keytabs/falcon.service.keytab"
+ },
+ {
+ "key": "broker.url",
+ "value": "tcp://sandbox.hortonworks.com:61616"
+ },
+ {
+ "key": "falcon.graph.storage.backend",
+ "value": "berkeleyje"
+ },
+ {
+ "key": "falcon.graph.serialize.path",
+ "value": "/hadoop/falcon/data/lineage"
+ },
+ {
+ "key": "falcon.cleanup.service.frequency",
+ "value": "days(1)"
+ },
+ {
+ "key": "retry.recorder.path",
+ "value": "/var/log/falcon/retry"
+ },
+ {
+ "key": "falcon.http.authentication.cookie.domain",
+ "value": "EXAMPLE.COM"
+ },
+ {
+ "key": "falcon.enableTLS",
+ "value": "false"
+ },
+ {
+ "key": "entity.topic",
+ "value": "FALCON.ENTITY.TOPIC"
+ },
+ {
+ "key": "falcon.security.authorization.admin.users",
+ "value": "falcon,ambari-qa"
+ },
+ {
+ "key": "workflow.engine.impl",
+ "value": "org.apache.falcon.workflow.engine.OozieWorkflowEngine"
+ },
+ {
+ "key": "falcon.http.authentication.kerberos.principal",
+ "value": "HTTP/_HOST@EXAMPLE.COM"
+ }
+ ]
+}
\ No newline at end of file
diff --git a/gateway-test/src/test/resources/org/apache/hadoop/gateway/GatewayBasicFuncTest/falcon/config-startup.xml b/gateway-test/src/test/resources/org/apache/hadoop/gateway/GatewayBasicFuncTest/falcon/config-startup.xml
new file mode 100644
index 0000000..05315fb
--- /dev/null
+++ b/gateway-test/src/test/resources/org/apache/hadoop/gateway/GatewayBasicFuncTest/falcon/config-startup.xml
@@ -0,0 +1,192 @@
+<?xml version="1.0" standalone="yes"?>
+<properties>
+ <properties>
+ <key>ProcessInstanceManager.impl</key>
+ <value>org.apache.falcon.resource.InstanceManager</value>
+ </properties>
+ <properties>
+ <key>oozie.process.workflow.builder</key>
+ <value>org.apache.falcon.workflow.OozieProcessWorkflowBuilder</value>
+ </properties>
+ <properties>
+ <key>falcon.http.authentication.kerberos.name.rules</key>
+ <value>DEFAULT</value>
+ </properties>
+ <properties>
+ <key>catalog.service.impl</key>
+ <value>org.apache.falcon.catalog.HiveCatalogService</value>
+ </properties>
+ <properties>
+ <key>falcon.graph.storage.directory</key>
+ <value>/hadoop/falcon/data/lineage/graphdb</value>
+ </properties>
+ <properties>
+ <key>ConfigSyncService.impl</key>
+ <value>org.apache.falcon.resource.ConfigSyncService</value>
+ </properties>
+ <properties>
+ <key>journal.impl</key>
+ <value>org.apache.falcon.transaction.SharedFileSystemJournal</value>
+ </properties>
+ <properties>
+ <key>max.retry.failure.count</key>
+ <value>1</value>
+ </properties>
+ <properties>
+ <key>dfs.namenode.kerberos.principal</key>
+ <value>nn/_HOST@EXAMPLE.COM</value>
+ </properties>
+ <properties>
+ <key>application.services</key>
+ <value>
+ org.apache.falcon.security.AuthenticationInitializationService,org.apache.falcon.workflow.WorkflowJobEndNotificationService,
+ org.apache.falcon.service.ProcessSubscriberService,org.apache.falcon.entity.store.ConfigurationStore,org.apache.falcon.rerun.service.RetryService,org.apache.falcon.rerun.service.LateRunService,org.apache.falcon.service.LogCleanupService,org.apache.falcon.metadata.MetadataMappingService
+ </value>
+ </properties>
+ <properties>
+ <key>broker.ttlInMins</key>
+ <value>4320</value>
+ </properties>
+ <properties>
+ <key>falcon.http.authentication.token.validity</key>
+ <value>36000</value>
+ </properties>
+ <properties>
+ <key>broker.impl.class</key>
+ <value>org.apache.activemq.ActiveMQConnectionFactory</value>
+ </properties>
+ <properties>
+ <key>domain</key>
+ <value>falcon</value>
+ </properties>
+ <properties>
+ <key>shared.libs</key>
+ <value>activemq-core,ant,geronimo-j2ee-management,hadoop-distcp,jms,json-simple,oozie-client,spring-jms</value>
+ </properties>
+ <properties>
+ <key>system.lib.location</key>
+ <value>/usr/hdp/current/falcon-server/server/webapp/falcon/WEB-INF/lib</value>
+ </properties>
+ <properties>
+ <key>falcon.service.authentication.kerberos.principal</key>
+ <value>falcon/_HOST@EXAMPLE.COM</value>
+ </properties>
+ <properties>
+ <key>config.store.uri</key>
+ <value>file:///hadoop/falcon/store</value>
+ </properties>
+ <properties>
+ <key>falcon.authentication.type</key>
+ <value>simple</value>
+ </properties>
+ <properties>
+ <key>falcon.security.authorization.admin.groups</key>
+ <value>falcon</value>
+ </properties>
+ <properties>
+ <key>falcon.graph.blueprints.graph</key>
+ <value>com.thinkaurelius.titan.core.TitanFactory</value>
+ </properties>
+ <properties>
+ <key>falcon.http.authentication.simple.anonymous.allowed</key>
+ <value>true</value>
+ </properties>
+ <properties>
+ <key>falcon.http.authentication.blacklisted.users</key>
+ <value></value>
+ </properties>
+ <properties>
+ <key>falcon.security.authorization.provider</key>
+ <value>org.apache.falcon.security.DefaultAuthorizationProvider</value>
+ </properties>
+ <properties>
+ <key>falcon.http.authentication.signature.secret</key>
+ <value>falcon</value>
+ </properties>
+ <properties>
+ <key>configstore.listeners</key>
+ <value>
+ org.apache.falcon.entity.v0.EntityGraph,org.apache.falcon.entity.ColoClusterRelation,org.apache.falcon.group.FeedGroupMap,org.apache.falcon.service.SharedLibraryHostingService
+ </value>
+ </properties>
+ <properties>
+ <key>falcon.http.authentication.kerberos.keytab</key>
+ <value>/etc/security/keytabs/spnego.service.keytab</value>
+ </properties>
+ <properties>
+ <key>falcon.http.authentication.type</key>
+ <value>simple</value>
+ </properties>
+ <properties>
+ <key>oozie.feed.workflow.builder</key>
+ <value>org.apache.falcon.workflow.OozieFeedWorkflowBuilder</value>
+ </properties>
+ <properties>
+ <key>falcon.security.authorization.superusergroup</key>
+ <value>falcon</value>
+ </properties>
+ <properties>
+ <key>internal.queue.size</key>
+ <value>1000</value>
+ </properties>
+ <properties>
+ <key>SchedulableEntityManager.impl</key>
+ <value>org.apache.falcon.resource.SchedulableEntityManager</value>
+ </properties>
+ <properties>
+ <key>falcon.security.authorization.enabled</key>
+ <value>false</value>
+ </properties>
+ <properties>
+ <key>falcon.graph.preserve.history</key>
+ <value>false</value>
+ </properties>
+ <properties>
+ <key>falcon.service.authentication.kerberos.keytab</key>
+ <value>/etc/security/keytabs/falcon.service.keytab</value>
+ </properties>
+ <properties>
+ <key>broker.url</key>
+ <value>tcp://sandbox.hortonworks.com:61616</value>
+ </properties>
+ <properties>
+ <key>falcon.graph.storage.backend</key>
+ <value>berkeleyje</value>
+ </properties>
+ <properties>
+ <key>falcon.graph.serialize.path</key>
+ <value>/hadoop/falcon/data/lineage</value>
+ </properties>
+ <properties>
+ <key>falcon.cleanup.service.frequency</key>
+ <value>days(1)</value>
+ </properties>
+ <properties>
+ <key>retry.recorder.path</key>
+ <value>/var/log/falcon/retry</value>
+ </properties>
+ <properties>
+ <key>falcon.http.authentication.cookie.domain</key>
+ <value>EXAMPLE.COM</value>
+ </properties>
+ <properties>
+ <key>falcon.enableTLS</key>
+ <value>false</value>
+ </properties>
+ <properties>
+ <key>entity.topic</key>
+ <value>FALCON.ENTITY.TOPIC</value>
+ </properties>
+ <properties>
+ <key>falcon.security.authorization.admin.users</key>
+ <value>falcon,ambari-qa</value>
+ </properties>
+ <properties>
+ <key>workflow.engine.impl</key>
+ <value>org.apache.falcon.workflow.engine.OozieWorkflowEngine</value>
+ </properties>
+ <properties>
+ <key>falcon.http.authentication.kerberos.principal</key>
+ <value>HTTP/_HOST@EXAMPLE.COM</value>
+ </properties>
+</properties>
\ No newline at end of file
diff --git a/gateway-test/src/test/resources/org/apache/hadoop/gateway/GatewayBasicFuncTest/falcon/emailIngestProcess.xml b/gateway-test/src/test/resources/org/apache/hadoop/gateway/GatewayBasicFuncTest/falcon/emailIngestProcess.xml
new file mode 100644
index 0000000..6131a99
--- /dev/null
+++ b/gateway-test/src/test/resources/org/apache/hadoop/gateway/GatewayBasicFuncTest/falcon/emailIngestProcess.xml
@@ -0,0 +1,29 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!--
+ Hourly process, ingests raw emails from all US west email servers
+ -->
+<process name="rawEmailIngestProcess" xmlns="uri:falcon:process:0.1">
+
+ <tags>pipeline=churnAnalysisDataPipeline,owner=ETLGroup,externalSystem=USWestEmailServers</tags>
+
+
+ <clusters>
+ <cluster name="primaryCluster">
+ <validity start="2014-02-28T00:00Z" end="2016-03-31T00:00Z"/>
+ </cluster>
+ </clusters>
+
+ <parallel>1</parallel>
+ <order>FIFO</order>
+ <frequency>hours(1)</frequency>
+
+ <outputs>
+ <output name="output" feed="rawEmailFeed" instance="now(0,0)" />
+ </outputs>
+
+ <workflow name="emailIngestWorkflow" version="2.0.0"
+ engine="oozie" path="/user/ambari-qa/falcon/demo/apps/ingest/fs" />
+
+ <retry policy="periodic" delay="minutes(15)" attempts="3" />
+
+</process>
diff --git a/gateway-test/src/test/resources/org/apache/hadoop/gateway/GatewayBasicFuncTest/falcon/entity-delete-process.json b/gateway-test/src/test/resources/org/apache/hadoop/gateway/GatewayBasicFuncTest/falcon/entity-delete-process.json
new file mode 100644
index 0000000..9337345
--- /dev/null
+++ b/gateway-test/src/test/resources/org/apache/hadoop/gateway/GatewayBasicFuncTest/falcon/entity-delete-process.json
@@ -0,0 +1,5 @@
+{
+ "requestId": "falcon\/17ff6ca6-1c8a-459f-9ba8-8fec480e384a\n",
+ "message": "falcon\/SampleProcess(cluster) removed successfully\n",
+ "status": "SUCCEEDED"
+}
diff --git a/gateway-test/src/test/resources/org/apache/hadoop/gateway/GatewayBasicFuncTest/falcon/entity-dependency-process.xml b/gateway-test/src/test/resources/org/apache/hadoop/gateway/GatewayBasicFuncTest/falcon/entity-dependency-process.xml
new file mode 100644
index 0000000..770645b
--- /dev/null
+++ b/gateway-test/src/test/resources/org/apache/hadoop/gateway/GatewayBasicFuncTest/falcon/entity-dependency-process.xml
@@ -0,0 +1,22 @@
+<?xml version="1.0" standalone="yes"?>
+<entities>
+ <entity>
+ <type>feed</type>
+ <name>cleansedEmailFeed</name>
+ <list>
+ <tag>Output</tag>
+ </list>
+ </entity>
+ <entity>
+ <type>cluster</type>
+ <name>primaryCluster</name>
+ <list></list>
+ </entity>
+ <entity>
+ <type>feed</type>
+ <name>rawEmailFeed</name>
+ <list>
+ <tag>Input</tag>
+ </list>
+ </entity>
+</entities>
\ No newline at end of file
diff --git a/gateway-test/src/test/resources/org/apache/hadoop/gateway/GatewayBasicFuncTest/falcon/entity-list-cluster.xml b/gateway-test/src/test/resources/org/apache/hadoop/gateway/GatewayBasicFuncTest/falcon/entity-list-cluster.xml
new file mode 100644
index 0000000..74ee87b
--- /dev/null
+++ b/gateway-test/src/test/resources/org/apache/hadoop/gateway/GatewayBasicFuncTest/falcon/entity-list-cluster.xml
@@ -0,0 +1,11 @@
+<?xml version="1.0" standalone="yes"?>
+<entities>
+ <entity>
+ <type>CLUSTER</type>
+ <name>backupCluster</name>
+ </entity>
+ <entity>
+ <type>CLUSTER</type>
+ <name>primaryCluster</name>
+ </entity>
+</entities>
\ No newline at end of file
diff --git a/gateway-test/src/test/resources/org/apache/hadoop/gateway/GatewayBasicFuncTest/falcon/entity-resume-feed.xml b/gateway-test/src/test/resources/org/apache/hadoop/gateway/GatewayBasicFuncTest/falcon/entity-resume-feed.xml
new file mode 100644
index 0000000..782f952
--- /dev/null
+++ b/gateway-test/src/test/resources/org/apache/hadoop/gateway/GatewayBasicFuncTest/falcon/entity-resume-feed.xml
@@ -0,0 +1,8 @@
+<?xml version="1.0" standalone="yes"?>
+<result>
+ <status>SUCCEEDED</status>
+ <message>default/rawEmailFeed(feed) resumed successfully
+ </message>
+ <requestId>default/null
+ </requestId>
+</result>
\ No newline at end of file
diff --git a/gateway-test/src/test/resources/org/apache/hadoop/gateway/GatewayBasicFuncTest/falcon/entity-schedule-feed.xml b/gateway-test/src/test/resources/org/apache/hadoop/gateway/GatewayBasicFuncTest/falcon/entity-schedule-feed.xml
new file mode 100644
index 0000000..d9ae798
--- /dev/null
+++ b/gateway-test/src/test/resources/org/apache/hadoop/gateway/GatewayBasicFuncTest/falcon/entity-schedule-feed.xml
@@ -0,0 +1,8 @@
+<?xml version="1.0" standalone="yes"?>
+<result>
+ <status>SUCCEEDED</status>
+ <message>default/rawEmailFeed(feed) scheduled successfully
+ </message>
+ <requestId>default/null
+ </requestId>
+</result>
\ No newline at end of file
diff --git a/gateway-test/src/test/resources/org/apache/hadoop/gateway/GatewayBasicFuncTest/falcon/entity-status-process.xml b/gateway-test/src/test/resources/org/apache/hadoop/gateway/GatewayBasicFuncTest/falcon/entity-status-process.xml
new file mode 100644
index 0000000..3bf418c
--- /dev/null
+++ b/gateway-test/src/test/resources/org/apache/hadoop/gateway/GatewayBasicFuncTest/falcon/entity-status-process.xml
@@ -0,0 +1,8 @@
+<?xml version="1.0" standalone="yes"?>
+<result>
+ <status>SUCCEEDED</status>
+ <message>default/RUNNING
+ </message>
+ <requestId>default/null
+ </requestId>
+</result>
\ No newline at end of file
diff --git a/gateway-test/src/test/resources/org/apache/hadoop/gateway/GatewayBasicFuncTest/falcon/entity-submit-feed.json b/gateway-test/src/test/resources/org/apache/hadoop/gateway/GatewayBasicFuncTest/falcon/entity-submit-feed.json
new file mode 100644
index 0000000..65289e6
--- /dev/null
+++ b/gateway-test/src/test/resources/org/apache/hadoop/gateway/GatewayBasicFuncTest/falcon/entity-submit-feed.json
@@ -0,0 +1,5 @@
+{
+ "requestId": "default\/d72a41f7-6420-487b-8199-62d66e492e35\n",
+ "message": "default\/Submit successful (feed) SampleInput\n",
+ "status": "SUCCEEDED"
+}
diff --git a/gateway-test/src/test/resources/org/apache/hadoop/gateway/GatewayBasicFuncTest/falcon/entity-submit-schedule-process.json b/gateway-test/src/test/resources/org/apache/hadoop/gateway/GatewayBasicFuncTest/falcon/entity-submit-schedule-process.json
new file mode 100644
index 0000000..3562c06
--- /dev/null
+++ b/gateway-test/src/test/resources/org/apache/hadoop/gateway/GatewayBasicFuncTest/falcon/entity-submit-schedule-process.json
@@ -0,0 +1,5 @@
+{
+ "requestId": "schedule\/default\/b5b40931-175b-4b15-8f2b-02ef2e66f06b\n\nsubmit\/default\/b5b40931-175b-4b15-8f2b-02ef2e66f06b\n\n",
+ "message": "schedule\/default\/SampleProcess(process) scheduled successfully\n\nsubmit\/default\/Submit successful (process) SampleProcess\n\n",
+ "status": "SUCCEEDED"
+}
\ No newline at end of file
diff --git a/gateway-test/src/test/resources/org/apache/hadoop/gateway/GatewayBasicFuncTest/falcon/entity-summary-feed.json b/gateway-test/src/test/resources/org/apache/hadoop/gateway/GatewayBasicFuncTest/falcon/entity-summary-feed.json
new file mode 100644
index 0000000..451d8ca
--- /dev/null
+++ b/gateway-test/src/test/resources/org/apache/hadoop/gateway/GatewayBasicFuncTest/falcon/entity-summary-feed.json
@@ -0,0 +1 @@
+{"status":"SUCCEEDED","message":"Entity Summary Result"}
\ No newline at end of file
diff --git a/gateway-test/src/test/resources/org/apache/hadoop/gateway/GatewayBasicFuncTest/falcon/entity-summary-feed.xml b/gateway-test/src/test/resources/org/apache/hadoop/gateway/GatewayBasicFuncTest/falcon/entity-summary-feed.xml
new file mode 100644
index 0000000..717607a
--- /dev/null
+++ b/gateway-test/src/test/resources/org/apache/hadoop/gateway/GatewayBasicFuncTest/falcon/entity-summary-feed.xml
@@ -0,0 +1,5 @@
+<?xml version="1.0" standalone="yes"?>
+<entitySummaryResult>
+ <status>SUCCEEDED</status>
+ <message>Entity Summary Result</message>
+</entitySummaryResult>
\ No newline at end of file
diff --git a/gateway-test/src/test/resources/org/apache/hadoop/gateway/GatewayBasicFuncTest/falcon/entity-update-feed.xml b/gateway-test/src/test/resources/org/apache/hadoop/gateway/GatewayBasicFuncTest/falcon/entity-update-feed.xml
new file mode 100644
index 0000000..f4b611b
--- /dev/null
+++ b/gateway-test/src/test/resources/org/apache/hadoop/gateway/GatewayBasicFuncTest/falcon/entity-update-feed.xml
@@ -0,0 +1,10 @@
+<?xml version="1.0" standalone="yes"?>
+<result>
+ <status>SUCCEEDED</status>
+ <message>falcon/update/default/Updated successfully
+
+ </message>
+ <requestId>falcon/update/default/null
+
+ </requestId>
+</result>
\ No newline at end of file
diff --git a/gateway-test/src/test/resources/org/apache/hadoop/gateway/GatewayBasicFuncTest/falcon/entity-validate-cluster.xml b/gateway-test/src/test/resources/org/apache/hadoop/gateway/GatewayBasicFuncTest/falcon/entity-validate-cluster.xml
new file mode 100644
index 0000000..673e984
--- /dev/null
+++ b/gateway-test/src/test/resources/org/apache/hadoop/gateway/GatewayBasicFuncTest/falcon/entity-validate-cluster.xml
@@ -0,0 +1,5 @@
+<?xml version="1.0" standalone="yes"?>
+<result>
+ <status>SUCCEEDED</status>
+ <message>Validated successfully (CLUSTER) primaryCluster</message>
+</result>
\ No newline at end of file
diff --git a/gateway-test/src/test/resources/org/apache/hadoop/gateway/GatewayBasicFuncTest/falcon/instance-params-process.json b/gateway-test/src/test/resources/org/apache/hadoop/gateway/GatewayBasicFuncTest/falcon/instance-params-process.json
new file mode 100644
index 0000000..ee71e3c
--- /dev/null
+++ b/gateway-test/src/test/resources/org/apache/hadoop/gateway/GatewayBasicFuncTest/falcon/instance-params-process.json
@@ -0,0 +1 @@
+{"status":"SUCCEEDED","message":"default/PARAMS\n","requestId":"default/null\n"}
\ No newline at end of file
diff --git a/gateway-test/src/test/resources/org/apache/hadoop/gateway/GatewayBasicFuncTest/falcon/instance-process-logs.json b/gateway-test/src/test/resources/org/apache/hadoop/gateway/GatewayBasicFuncTest/falcon/instance-process-logs.json
new file mode 100644
index 0000000..f2073c7
--- /dev/null
+++ b/gateway-test/src/test/resources/org/apache/hadoop/gateway/GatewayBasicFuncTest/falcon/instance-process-logs.json
@@ -0,0 +1,77 @@
+{
+ "status": "SUCCEEDED",
+ "message": "default/STATUS\n",
+ "requestId": "default/null\n",
+ "instances": [
+ {
+ "instance": "2014-03-07T11:00Z",
+ "status": "WAITING",
+ "logFile": "-",
+ "cluster": "primaryCluster",
+ "details": ""
+ },
+ {
+ "instance": "2014-03-07T10:00Z",
+ "status": "WAITING",
+ "logFile": "-",
+ "cluster": "primaryCluster",
+ "details": ""
+ },
+ {
+ "instance": "2014-03-07T09:00Z",
+ "status": "WAITING",
+ "logFile": "-",
+ "cluster": "primaryCluster",
+ "details": ""
+ },
+ {
+ "instance": "2014-03-07T08:00Z",
+ "status": "WAITING",
+ "logFile": "-",
+ "cluster": "primaryCluster",
+ "details": ""
+ },
+ {
+ "instance": "2014-03-07T07:00Z",
+ "status": "WAITING",
+ "logFile": "-",
+ "cluster": "primaryCluster",
+ "details": ""
+ },
+ {
+ "instance": "2014-03-07T06:00Z",
+ "status": "WAITING",
+ "logFile": "-",
+ "cluster": "primaryCluster",
+ "details": ""
+ },
+ {
+ "instance": "2014-03-07T05:00Z",
+ "status": "WAITING",
+ "logFile": "-",
+ "cluster": "primaryCluster",
+ "details": ""
+ },
+ {
+ "instance": "2014-03-07T04:00Z",
+ "status": "WAITING",
+ "logFile": "-",
+ "cluster": "primaryCluster",
+ "details": ""
+ },
+ {
+ "instance": "2014-03-07T03:00Z",
+ "status": "WAITING",
+ "logFile": "-",
+ "cluster": "primaryCluster",
+ "details": ""
+ },
+ {
+ "instance": "2014-03-07T02:00Z",
+ "status": "WAITING",
+ "logFile": "-",
+ "cluster": "primaryCluster",
+ "details": ""
+ }
+ ]
+}
\ No newline at end of file
diff --git a/gateway-test/src/test/resources/org/apache/hadoop/gateway/GatewayBasicFuncTest/falcon/instance-running-process.json b/gateway-test/src/test/resources/org/apache/hadoop/gateway/GatewayBasicFuncTest/falcon/instance-running-process.json
new file mode 100644
index 0000000..1b49692
--- /dev/null
+++ b/gateway-test/src/test/resources/org/apache/hadoop/gateway/GatewayBasicFuncTest/falcon/instance-running-process.json
@@ -0,0 +1,13 @@
+{
+ "status": "SUCCEEDED",
+ "message": "default/Running Instances\n",
+ "requestId": "default/null\n",
+ "instances": [
+ {
+ "instance": "2014-02-28T03:00Z",
+ "status": "RUNNING",
+ "cluster": "primaryCluster",
+ "startTime": "2015-02-20T03:09:07Z"
+ }
+ ]
+}
\ No newline at end of file
diff --git a/gateway-test/src/test/resources/org/apache/hadoop/gateway/GatewayBasicFuncTest/falcon/instance-status-process.json b/gateway-test/src/test/resources/org/apache/hadoop/gateway/GatewayBasicFuncTest/falcon/instance-status-process.json
new file mode 100644
index 0000000..ea21a22
--- /dev/null
+++ b/gateway-test/src/test/resources/org/apache/hadoop/gateway/GatewayBasicFuncTest/falcon/instance-status-process.json
@@ -0,0 +1 @@
+{"status":"SUCCEEDED","message":"default/STATUS\n","requestId":"default/null\n"}
\ No newline at end of file
diff --git a/gateway-test/src/test/resources/org/apache/hadoop/gateway/GatewayBasicFuncTest/falcon/metadata-disc-cluster-entity.json b/gateway-test/src/test/resources/org/apache/hadoop/gateway/GatewayBasicFuncTest/falcon/metadata-disc-cluster-entity.json
new file mode 100644
index 0000000..e5366ec
--- /dev/null
+++ b/gateway-test/src/test/resources/org/apache/hadoop/gateway/GatewayBasicFuncTest/falcon/metadata-disc-cluster-entity.json
@@ -0,0 +1 @@
+{"results":["primaryCluster","backupCluster"],"totalSize":2}
\ No newline at end of file
diff --git a/gateway-test/src/test/resources/org/apache/hadoop/gateway/GatewayBasicFuncTest/falcon/metadata-disc-cluster-relations.json b/gateway-test/src/test/resources/org/apache/hadoop/gateway/GatewayBasicFuncTest/falcon/metadata-disc-cluster-relations.json
new file mode 100644
index 0000000..8e3c776
--- /dev/null
+++ b/gateway-test/src/test/resources/org/apache/hadoop/gateway/GatewayBasicFuncTest/falcon/metadata-disc-cluster-relations.json
@@ -0,0 +1,106 @@
+{
+ "timestamp": "2015-02-19T20:54Z",
+ "name": "primaryCluster",
+ "type": "CLUSTER_ENTITY",
+ "_id": 4,
+ "_type": "vertex",
+ "inVertices": [
+ {
+ "name": "rawEmailFeed/2014-02-28T00:00Z",
+ "type": "FEED_INSTANCE",
+ "label": "stored-in"
+ },
+ {
+ "name": "rawEmailFeed",
+ "type": "FEED_ENTITY",
+ "label": "stored-in"
+ },
+ {
+ "name": "rawEmailFeed/2014-02-28T01:00Z",
+ "type": "FEED_INSTANCE",
+ "label": "stored-in"
+ },
+ {
+ "name": "cleansedEmailFeed/2014-02-28T00:00Z",
+ "type": "FEED_INSTANCE",
+ "label": "stored-in"
+ },
+ {
+ "name": "rawEmailFeed/2014-02-28T02:00Z",
+ "type": "FEED_INSTANCE",
+ "label": "stored-in"
+ },
+ {
+ "name": "cleansedEmailFeed",
+ "type": "FEED_ENTITY",
+ "label": "stored-in"
+ },
+ {
+ "name": "cleansedEmailFeed/2014-02-28T02:00Z",
+ "type": "FEED_INSTANCE",
+ "label": "stored-in"
+ },
+ {
+ "name": "cleansedEmailFeed/2014-02-28T01:00Z",
+ "type": "FEED_INSTANCE",
+ "label": "stored-in"
+ },
+ {
+ "name": "rawEmailFeed/2014-02-28T03:00Z",
+ "type": "FEED_INSTANCE",
+ "label": "stored-in"
+ },
+ {
+ "name": "cleanseEmailProcess",
+ "type": "PROCESS_ENTITY",
+ "label": "runs-on"
+ },
+ {
+ "name": "rawEmailIngestProcess/2014-02-28T00:00Z",
+ "type": "PROCESS_INSTANCE",
+ "label": "runs-on"
+ },
+ {
+ "name": "rawEmailIngestProcess/2014-02-28T01:00Z",
+ "type": "PROCESS_INSTANCE",
+ "label": "runs-on"
+ },
+ {
+ "name": "cleanseEmailProcess/2014-02-28T00:00Z",
+ "type": "PROCESS_INSTANCE",
+ "label": "runs-on"
+ },
+ {
+ "name": "rawEmailIngestProcess/2014-02-28T02:00Z",
+ "type": "PROCESS_INSTANCE",
+ "label": "runs-on"
+ },
+ {
+ "name": "cleanseEmailProcess/2014-02-28T02:00Z",
+ "type": "PROCESS_INSTANCE",
+ "label": "runs-on"
+ },
+ {
+ "name": "cleanseEmailProcess/2014-02-28T01:00Z",
+ "type": "PROCESS_INSTANCE",
+ "label": "runs-on"
+ },
+ {
+ "name": "rawEmailIngestProcess/2014-02-28T03:00Z",
+ "type": "PROCESS_INSTANCE",
+ "label": "runs-on"
+ },
+ {
+ "name": "rawEmailIngestProcess",
+ "type": "PROCESS_ENTITY",
+ "label": "runs-on"
+ }
+ ],
+ "outVertices": [
+ {
+ "name": "USWestOregon",
+ "type": "COLO",
+ "label": "collocated"
+ }
+ ]
+}
\ No newline at end of file
diff --git a/gateway-test/src/test/resources/org/apache/hadoop/gateway/GatewayBasicFuncTest/falcon/metadata-disc-process-entity.json b/gateway-test/src/test/resources/org/apache/hadoop/gateway/GatewayBasicFuncTest/falcon/metadata-disc-process-entity.json
new file mode 100644
index 0000000..e6055bb
--- /dev/null
+++ b/gateway-test/src/test/resources/org/apache/hadoop/gateway/GatewayBasicFuncTest/falcon/metadata-disc-process-entity.json
@@ -0,0 +1 @@
+{"results":["rawEmailIngestProcess","cleanseEmailProcess"],"totalSize":2}
\ No newline at end of file
diff --git a/gateway-test/src/test/resources/org/apache/hadoop/gateway/GatewayBasicFuncTest/falcon/metadata-lineage-edges-all.json b/gateway-test/src/test/resources/org/apache/hadoop/gateway/GatewayBasicFuncTest/falcon/metadata-lineage-edges-all.json
new file mode 100644
index 0000000..b13d0d8
--- /dev/null
+++ b/gateway-test/src/test/resources/org/apache/hadoop/gateway/GatewayBasicFuncTest/falcon/metadata-lineage-edges-all.json
@@ -0,0 +1,559 @@
+{
+ "results": [
+ {
+ "_id": "Q2v-4-4m",
+ "_type": "edge",
+ "_outV": 4,
+ "_inV": 8,
+ "_label": "collocated"
+ },
+ {
+ "_id": "Q2N-c-4m",
+ "_type": "edge",
+ "_outV": 12,
+ "_inV": 16,
+ "_label": "collocated"
+ },
+ {
+ "_id": "Q3t-k-4u",
+ "_type": "edge",
+ "_outV": 20,
+ "_inV": 24,
+ "_label": "owned-by"
+ },
+ {
+ "_id": "Q41-k-4C",
+ "_type": "edge",
+ "_outV": 20,
+ "_inV": 28,
+ "_label": "externalSystem"
+ },
+ {
+ "_id": "Q4z-k-4K",
+ "_type": "edge",
+ "_outV": 20,
+ "_inV": 32,
+ "_label": "classification"
+ },
+ {
+ "_id": "Q57-k-4S",
+ "_type": "edge",
+ "_outV": 20,
+ "_inV": 36,
+ "_label": "grouped-as"
+ },
+ {
+ "_id": "Q5x-k-50",
+ "_type": "edge",
+ "_outV": 20,
+ "_inV": 4,
+ "_label": "stored-in"
+ },
+ {
+ "_id": "Qb5-k-6a",
+ "_type": "edge",
+ "_outV": 20,
+ "_inV": 72,
+ "_label": "input"
+ },
+ {
+ "_id": "Q5H-E-4u",
+ "_type": "edge",
+ "_outV": 40,
+ "_inV": 24,
+ "_label": "owned-by"
+ },
+ {
+ "_id": "Q6p-E-4K",
+ "_type": "edge",
+ "_outV": 40,
+ "_inV": 48,
+ "_label": "classification"
+ },
+ {
+ "_id": "Q7x-E-4S",
+ "_type": "edge",
+ "_outV": 40,
+ "_inV": 36,
+ "_label": "grouped-as"
+ },
+ {
+ "_id": "Q7z-E-50",
+ "_type": "edge",
+ "_outV": 40,
+ "_inV": 4,
+ "_label": "stored-in"
+ },
+ {
+ "_id": "Q6f-E-58",
+ "_type": "edge",
+ "_outV": 40,
+ "_inV": 44,
+ "_label": "owner"
+ },
+ {
+ "_id": "Q6X-E-5g",
+ "_type": "edge",
+ "_outV": 40,
+ "_inV": 52,
+ "_label": "externalSource"
+ },
+ {
+ "_id": "Q7v-E-5o",
+ "_type": "edge",
+ "_outV": 40,
+ "_inV": 56,
+ "_label": "externalTarget"
+ },
+ {
+ "_id": "Q8J-Y-4u",
+ "_type": "edge",
+ "_outV": 60,
+ "_inV": 24,
+ "_label": "owned-by"
+ },
+ {
+ "_id": "Q9t-Y-4C",
+ "_type": "edge",
+ "_outV": 60,
+ "_inV": 28,
+ "_label": "externalSystem"
+ },
+ {
+ "_id": "Q9r-Y-58",
+ "_type": "edge",
+ "_outV": 60,
+ "_inV": 68,
+ "_label": "owner"
+ },
+ {
+ "_id": "Q9h-Y-5M",
+ "_type": "edge",
+ "_outV": 60,
+ "_inV": 64,
+ "_label": "pipeline"
+ },
+ {
+ "_id": "Q9T-Y-5U",
+ "_type": "edge",
+ "_outV": 60,
+ "_inV": 4,
+ "_label": "runs-on"
+ },
+ {
+ "_id": "Qaj-Y-62",
+ "_type": "edge",
+ "_outV": 60,
+ "_inV": 20,
+ "_label": "output"
+ },
+ {
+ "_id": "Qaz-1a-4u",
+ "_type": "edge",
+ "_outV": 72,
+ "_inV": 24,
+ "_label": "owned-by"
+ },
+ {
+ "_id": "QaD-1a-58",
+ "_type": "edge",
+ "_outV": 72,
+ "_inV": 68,
+ "_label": "owner"
+ },
+ {
+ "_id": "QaB-1a-5M",
+ "_type": "edge",
+ "_outV": 72,
+ "_inV": 64,
+ "_label": "pipeline"
+ },
+ {
+ "_id": "QaF-1a-5U",
+ "_type": "edge",
+ "_outV": 72,
+ "_inV": 4,
+ "_label": "runs-on"
+ },
+ {
+ "_id": "Qb7-1a-62",
+ "_type": "edge",
+ "_outV": 72,
+ "_inV": 40,
+ "_label": "output"
+ },
+ {
+ "_id": "Qef-1e-4u",
+ "_type": "edge",
+ "_outV": 76,
+ "_inV": 24,
+ "_label": "owned-by"
+ },
+ {
+ "_id": "Qed-1e-5U",
+ "_type": "edge",
+ "_outV": 76,
+ "_inV": 4,
+ "_label": "runs-on"
+ },
+ {
+ "_id": "Qev-1e-62",
+ "_type": "edge",
+ "_outV": 76,
+ "_inV": 80,
+ "_label": "output"
+ },
+ {
+ "_id": "Qeb-1e-6W",
+ "_type": "edge",
+ "_outV": 76,
+ "_inV": 60,
+ "_label": "instance-of"
+ },
+ {
+ "_id": "Qet-1i-4u",
+ "_type": "edge",
+ "_outV": 80,
+ "_inV": 24,
+ "_label": "owned-by"
+ },
+ {
+ "_id": "Qer-1i-50",
+ "_type": "edge",
+ "_outV": 80,
+ "_inV": 4,
+ "_label": "stored-in"
+ },
+ {
+ "_id": "Qg1-1i-6a",
+ "_type": "edge",
+ "_outV": 80,
+ "_inV": 92,
+ "_label": "input"
+ },
+ {
+ "_id": "Qep-1i-6W",
+ "_type": "edge",
+ "_outV": 80,
+ "_inV": 20,
+ "_label": "instance-of"
+ },
+ {
+ "_id": "QeZ-1m-4u",
+ "_type": "edge",
+ "_outV": 84,
+ "_inV": 24,
+ "_label": "owned-by"
+ },
+ {
+ "_id": "QeX-1m-5U",
+ "_type": "edge",
+ "_outV": 84,
+ "_inV": 4,
+ "_label": "runs-on"
+ },
+ {
+ "_id": "Qff-1m-62",
+ "_type": "edge",
+ "_outV": 84,
+ "_inV": 88,
+ "_label": "output"
+ },
+ {
+ "_id": "QeV-1m-6W",
+ "_type": "edge",
+ "_outV": 84,
+ "_inV": 60,
+ "_label": "instance-of"
+ },
+ {
+ "_id": "Qfd-1q-4u",
+ "_type": "edge",
+ "_outV": 88,
+ "_inV": 24,
+ "_label": "owned-by"
+ },
+ {
+ "_id": "Qfb-1q-50",
+ "_type": "edge",
+ "_outV": 88,
+ "_inV": 4,
+ "_label": "stored-in"
+ },
+ {
+ "_id": "QiJ-1q-6a",
+ "_type": "edge",
+ "_outV": 88,
+ "_inV": 116,
+ "_label": "input"
+ },
+ {
+ "_id": "Qf9-1q-6W",
+ "_type": "edge",
+ "_outV": 88,
+ "_inV": 20,
+ "_label": "instance-of"
+ },
+ {
+ "_id": "QfJ-1u-4u",
+ "_type": "edge",
+ "_outV": 92,
+ "_inV": 24,
+ "_label": "owned-by"
+ },
+ {
+ "_id": "QfH-1u-5U",
+ "_type": "edge",
+ "_outV": 92,
+ "_inV": 4,
+ "_label": "runs-on"
+ },
+ {
+ "_id": "QfZ-1u-62",
+ "_type": "edge",
+ "_outV": 92,
+ "_inV": 96,
+ "_label": "output"
+ },
+ {
+ "_id": "QfF-1u-6W",
+ "_type": "edge",
+ "_outV": 92,
+ "_inV": 72,
+ "_label": "instance-of"
+ },
+ {
+ "_id": "QfX-1y-4u",
+ "_type": "edge",
+ "_outV": 96,
+ "_inV": 24,
+ "_label": "owned-by"
+ },
+ {
+ "_id": "QfV-1y-50",
+ "_type": "edge",
+ "_outV": 96,
+ "_inV": 4,
+ "_label": "stored-in"
+ },
+ {
+ "_id": "QfT-1y-6W",
+ "_type": "edge",
+ "_outV": 96,
+ "_inV": 40,
+ "_label": "instance-of"
+ },
+ {
+ "timestamp": "2015-02-19T20:55Z",
+ "_id": "Qhb-1y-74",
+ "_type": "edge",
+ "_outV": 96,
+ "_inV": 12,
+ "_label": "replicated-to"
+ },
+ {
+ "_id": "Qgv-1C-4u",
+ "_type": "edge",
+ "_outV": 100,
+ "_inV": 24,
+ "_label": "owned-by"
+ },
+ {
+ "_id": "Qgt-1C-5U",
+ "_type": "edge",
+ "_outV": 100,
+ "_inV": 4,
+ "_label": "runs-on"
+ },
+ {
+ "_id": "QgL-1C-62",
+ "_type": "edge",
+ "_outV": 100,
+ "_inV": 104,
+ "_label": "output"
+ },
+ {
+ "_id": "Qgr-1C-6W",
+ "_type": "edge",
+ "_outV": 100,
+ "_inV": 60,
+ "_label": "instance-of"
+ },
+ {
+ "_id": "QgJ-1G-4u",
+ "_type": "edge",
+ "_outV": 104,
+ "_inV": 24,
+ "_label": "owned-by"
+ },
+ {
+ "_id": "QgH-1G-50",
+ "_type": "edge",
+ "_outV": 104,
+ "_inV": 4,
+ "_label": "stored-in"
+ },
+ {
+ "_id": "QhX-1G-6a",
+ "_type": "edge",
+ "_outV": 104,
+ "_inV": 108,
+ "_label": "input"
+ },
+ {
+ "_id": "QgF-1G-6W",
+ "_type": "edge",
+ "_outV": 104,
+ "_inV": 20,
+ "_label": "instance-of"
+ },
+ {
+ "_id": "QhF-1K-4u",
+ "_type": "edge",
+ "_outV": 108,
+ "_inV": 24,
+ "_label": "owned-by"
+ },
+ {
+ "_id": "QhD-1K-5U",
+ "_type": "edge",
+ "_outV": 108,
+ "_inV": 4,
+ "_label": "runs-on"
+ },
+ {
+ "_id": "QhV-1K-62",
+ "_type": "edge",
+ "_outV": 108,
+ "_inV": 112,
+ "_label": "output"
+ },
+ {
+ "_id": "QhB-1K-6W",
+ "_type": "edge",
+ "_outV": 108,
+ "_inV": 72,
+ "_label": "instance-of"
+ },
+ {
+ "_id": "QhT-1O-4u",
+ "_type": "edge",
+ "_outV": 112,
+ "_inV": 24,
+ "_label": "owned-by"
+ },
+ {
+ "_id": "QhR-1O-50",
+ "_type": "edge",
+ "_outV": 112,
+ "_inV": 4,
+ "_label": "stored-in"
+ },
+ {
+ "_id": "QhP-1O-6W",
+ "_type": "edge",
+ "_outV": 112,
+ "_inV": 40,
+ "_label": "instance-of"
+ },
+ {
+ "_id": "Qir-1S-4u",
+ "_type": "edge",
+ "_outV": 116,
+ "_inV": 24,
+ "_label": "owned-by"
+ },
+ {
+ "_id": "Qip-1S-5U",
+ "_type": "edge",
+ "_outV": 116,
+ "_inV": 4,
+ "_label": "runs-on"
+ },
+ {
+ "_id": "QiH-1S-62",
+ "_type": "edge",
+ "_outV": 116,
+ "_inV": 120,
+ "_label": "output"
+ },
+ {
+ "_id": "Qin-1S-6W",
+ "_type": "edge",
+ "_outV": 116,
+ "_inV": 72,
+ "_label": "instance-of"
+ },
+ {
+ "_id": "QiF-1W-4u",
+ "_type": "edge",
+ "_outV": 120,
+ "_inV": 24,
+ "_label": "owned-by"
+ },
+ {
+ "_id": "QiD-1W-50",
+ "_type": "edge",
+ "_outV": 120,
+ "_inV": 4,
+ "_label": "stored-in"
+ },
+ {
+ "_id": "QiB-1W-6W",
+ "_type": "edge",
+ "_outV": 120,
+ "_inV": 40,
+ "_label": "instance-of"
+ },
+ {
+ "_id": "Qjd-20-4u",
+ "_type": "edge",
+ "_outV": 124,
+ "_inV": 24,
+ "_label": "owned-by"
+ },
+ {
+ "_id": "Qjb-20-5U",
+ "_type": "edge",
+ "_outV": 124,
+ "_inV": 4,
+ "_label": "runs-on"
+ },
+ {
+ "_id": "Qjt-20-62",
+ "_type": "edge",
+ "_outV": 124,
+ "_inV": 128,
+ "_label": "output"
+ },
+ {
+ "_id": "Qj9-20-6W",
+ "_type": "edge",
+ "_outV": 124,
+ "_inV": 60,
+ "_label": "instance-of"
+ },
+ {
+ "_id": "Qjr-24-4u",
+ "_type": "edge",
+ "_outV": 128,
+ "_inV": 24,
+ "_label": "owned-by"
+ },
+ {
+ "_id": "Qjp-24-50",
+ "_type": "edge",
+ "_outV": 128,
+ "_inV": 4,
+ "_label": "stored-in"
+ },
+ {
+ "_id": "Qjn-24-6W",
+ "_type": "edge",
+ "_outV": 128,
+ "_inV": 20,
+ "_label": "instance-of"
+ }
+ ],
+ "totalSize": 79
+}
\ No newline at end of file
diff --git a/gateway-test/src/test/resources/org/apache/hadoop/gateway/GatewayBasicFuncTest/falcon/metadata-lineage-edges-id.json b/gateway-test/src/test/resources/org/apache/hadoop/gateway/GatewayBasicFuncTest/falcon/metadata-lineage-edges-id.json
new file mode 100644
index 0000000..0501d75
--- /dev/null
+++ b/gateway-test/src/test/resources/org/apache/hadoop/gateway/GatewayBasicFuncTest/falcon/metadata-lineage-edges-id.json
@@ -0,0 +1 @@
+{"results":{"_id":"Q2v-4-4m","_type":"edge","_outV":4,"_inV":8,"_label":"collocated"}}
\ No newline at end of file
diff --git a/gateway-test/src/test/resources/org/apache/hadoop/gateway/GatewayBasicFuncTest/falcon/metadata-lineage-vertices-all.json b/gateway-test/src/test/resources/org/apache/hadoop/gateway/GatewayBasicFuncTest/falcon/metadata-lineage-vertices-all.json
new file mode 100644
index 0000000..e61083e
--- /dev/null
+++ b/gateway-test/src/test/resources/org/apache/hadoop/gateway/GatewayBasicFuncTest/falcon/metadata-lineage-vertices-all.json
@@ -0,0 +1,238 @@
+{
+ "results": [
+ {
+ "timestamp": "2015-02-19T20:54Z",
+ "name": "primaryCluster",
+ "type": "cluster-entity",
+ "_id": 4,
+ "_type": "vertex"
+ },
+ {
+ "timestamp": "2015-02-19T20:54Z",
+ "name": "USWestOregon",
+ "type": "data-center",
+ "_id": 8,
+ "_type": "vertex"
+ },
+ {
+ "timestamp": "2015-02-19T20:54Z",
+ "name": "backupCluster",
+ "type": "cluster-entity",
+ "_id": 12,
+ "_type": "vertex"
+ },
+ {
+ "timestamp": "2015-02-19T20:54Z",
+ "name": "USEastVirginia",
+ "type": "data-center",
+ "_id": 16,
+ "_type": "vertex"
+ },
+ {
+ "timestamp": "2015-02-19T20:54Z",
+ "name": "rawEmailFeed",
+ "type": "feed-entity",
+ "_id": 20,
+ "_type": "vertex"
+ },
+ {
+ "timestamp": "2015-02-19T20:54Z",
+ "name": "ambari-qa",
+ "type": "user",
+ "_id": 24,
+ "_type": "vertex"
+ },
+ {
+ "timestamp": "2015-02-19T20:54Z",
+ "name": "USWestEmailServers",
+ "type": "classification",
+ "_id": 28,
+ "_type": "vertex"
+ },
+ {
+ "timestamp": "2015-02-19T20:54Z",
+ "name": "secure",
+ "type": "classification",
+ "_id": 32,
+ "_type": "vertex"
+ },
+ {
+ "timestamp": "2015-02-19T20:54Z",
+ "name": "churnAnalysisDataPipeline",
+ "type": "group",
+ "_id": 36,
+ "_type": "vertex"
+ },
+ {
+ "timestamp": "2015-02-19T20:54Z",
+ "name": "cleansedEmailFeed",
+ "type": "feed-entity",
+ "_id": 40,
+ "_type": "vertex"
+ },
+ {
+ "timestamp": "2015-02-19T20:54Z",
+ "name": "USMarketing",
+ "type": "classification",
+ "_id": 44,
+ "_type": "vertex"
+ },
+ {
+ "timestamp": "2015-02-19T20:54Z",
+ "name": "Secure",
+ "type": "classification",
+ "_id": 48,
+ "_type": "vertex"
+ },
+ {
+ "timestamp": "2015-02-19T20:54Z",
+ "name": "USProdEmailServers",
+ "type": "classification",
+ "_id": 52,
+ "_type": "vertex"
+ },
+ {
+ "timestamp": "2015-02-19T20:54Z",
+ "name": "BITools",
+ "type": "classification",
+ "_id": 56,
+ "_type": "vertex"
+ },
+ {
+ "timestamp": "2015-02-19T20:54Z",
+ "name": "rawEmailIngestProcess",
+ "type": "process-entity",
+ "version": "2.0.0",
+ "_id": 60,
+ "_type": "vertex"
+ },
+ {
+ "timestamp": "2015-02-19T20:54Z",
+ "name": "churnAnalysisDataPipeline",
+ "type": "classification",
+ "_id": 64,
+ "_type": "vertex"
+ },
+ {
+ "timestamp": "2015-02-19T20:54Z",
+ "name": "ETLGroup",
+ "type": "classification",
+ "_id": 68,
+ "_type": "vertex"
+ },
+ {
+ "timestamp": "2015-02-19T20:54Z",
+ "name": "cleanseEmailProcess",
+ "type": "process-entity",
+ "version": "5.0",
+ "_id": 72,
+ "_type": "vertex"
+ },
+ {
+ "timestamp": "2015-02-19T20:55Z",
+ "name": "rawEmailIngestProcess/2014-02-28T00:00Z",
+ "type": "process-instance",
+ "version": "2.0.0",
+ "_id": 76,
+ "_type": "vertex"
+ },
+ {
+ "timestamp": "2015-02-19T20:55Z",
+ "name": "rawEmailFeed/2014-02-28T00:00Z",
+ "type": "feed-instance",
+ "_id": 80,
+ "_type": "vertex"
+ },
+ {
+ "timestamp": "2015-02-19T20:55Z",
+ "name": "rawEmailIngestProcess/2014-02-28T01:00Z",
+ "type": "process-instance",
+ "version": "2.0.0",
+ "_id": 84,
+ "_type": "vertex"
+ },
+ {
+ "timestamp": "2015-02-19T20:55Z",
+ "name": "rawEmailFeed/2014-02-28T01:00Z",
+ "type": "feed-instance",
+ "_id": 88,
+ "_type": "vertex"
+ },
+ {
+ "timestamp": "2015-02-19T20:56Z",
+ "name": "cleanseEmailProcess/2014-02-28T00:00Z",
+ "type": "process-instance",
+ "version": "5.0",
+ "_id": 92,
+ "_type": "vertex"
+ },
+ {
+ "timestamp": "2015-02-19T20:56Z",
+ "name": "cleansedEmailFeed/2014-02-28T00:00Z",
+ "type": "feed-instance",
+ "_id": 96,
+ "_type": "vertex"
+ },
+ {
+ "timestamp": "2015-02-19T20:55Z",
+ "name": "rawEmailIngestProcess/2014-02-28T02:00Z",
+ "type": "process-instance",
+ "version": "2.0.0",
+ "_id": 100,
+ "_type": "vertex"
+ },
+ {
+ "timestamp": "2015-02-19T20:55Z",
+ "name": "rawEmailFeed/2014-02-28T02:00Z",
+ "type": "feed-instance",
+ "_id": 104,
+ "_type": "vertex"
+ },
+ {
+ "timestamp": "2015-02-19T20:56Z",
+ "name": "cleanseEmailProcess/2014-02-28T02:00Z",
+ "type": "process-instance",
+ "version": "5.0",
+ "_id": 108,
+ "_type": "vertex"
+ },
+ {
+ "timestamp": "2015-02-19T20:56Z",
+ "name": "cleansedEmailFeed/2014-02-28T02:00Z",
+ "type": "feed-instance",
+ "_id": 112,
+ "_type": "vertex"
+ },
+ {
+ "timestamp": "2015-02-19T20:56Z",
+ "name": "cleanseEmailProcess/2014-02-28T01:00Z",
+ "type": "process-instance",
+ "version": "5.0",
+ "_id": 116,
+ "_type": "vertex"
+ },
+ {
+ "timestamp": "2015-02-19T20:56Z",
+ "name": "cleansedEmailFeed/2014-02-28T01:00Z",
+ "type": "feed-instance",
+ "_id": 120,
+ "_type": "vertex"
+ },
+ {
+ "timestamp": "2015-02-19T20:55Z",
+ "name": "rawEmailIngestProcess/2014-02-28T03:00Z",
+ "type": "process-instance",
+ "version": "2.0.0",
+ "_id": 124,
+ "_type": "vertex"
+ },
+ {
+ "timestamp": "2015-02-19T20:55Z",
+ "name": "rawEmailFeed/2014-02-28T03:00Z",
+ "type": "feed-instance",
+ "_id": 128,
+ "_type": "vertex"
+ }
+ ],
+ "totalSize": 32
+}
\ No newline at end of file
diff --git a/gateway-test/src/test/resources/org/apache/hadoop/gateway/GatewayBasicFuncTest/falcon/metadata-lineage-vertices-direction.json b/gateway-test/src/test/resources/org/apache/hadoop/gateway/GatewayBasicFuncTest/falcon/metadata-lineage-vertices-direction.json
new file mode 100644
index 0000000..624c15b
--- /dev/null
+++ b/gateway-test/src/test/resources/org/apache/hadoop/gateway/GatewayBasicFuncTest/falcon/metadata-lineage-vertices-direction.json
@@ -0,0 +1,34 @@
+{
+ "results": [
+ {
+ "timestamp": "2015-02-19T20:54Z",
+ "name": "ambari-qa",
+ "type": "user",
+ "_id": 24,
+ "_type": "vertex"
+ },
+ {
+ "timestamp": "2015-02-19T20:54Z",
+ "name": "primaryCluster",
+ "type": "cluster-entity",
+ "_id": 4,
+ "_type": "vertex"
+ },
+ {
+ "timestamp": "2015-02-19T20:55Z",
+ "name": "rawEmailFeed/2014-02-28T00:00Z",
+ "type": "feed-instance",
+ "_id": 80,
+ "_type": "vertex"
+ },
+ {
+ "timestamp": "2015-02-19T20:54Z",
+ "name": "rawEmailIngestProcess",
+ "type": "process-entity",
+ "version": "2.0.0",
+ "_id": 60,
+ "_type": "vertex"
+ }
+ ],
+ "totalSize": 4
+}
\ No newline at end of file
diff --git a/gateway-test/src/test/resources/org/apache/hadoop/gateway/GatewayBasicFuncTest/falcon/metadata-lineage-vertices-id.json b/gateway-test/src/test/resources/org/apache/hadoop/gateway/GatewayBasicFuncTest/falcon/metadata-lineage-vertices-id.json
new file mode 100644
index 0000000..777b5f9
--- /dev/null
+++ b/gateway-test/src/test/resources/org/apache/hadoop/gateway/GatewayBasicFuncTest/falcon/metadata-lineage-vertices-id.json
@@ -0,0 +1,10 @@
+{
+ "results": {
+ "timestamp": "2015-02-19T20:55Z",
+ "name": "rawEmailIngestProcess/2014-02-28T00:00Z",
+ "type": "process-instance",
+ "version": "2.0.0",
+ "_id": 76,
+ "_type": "vertex"
+ }
+}
\ No newline at end of file
diff --git a/gateway-test/src/test/resources/org/apache/hadoop/gateway/GatewayBasicFuncTest/falcon/metadata-lineage-vertices-key.json b/gateway-test/src/test/resources/org/apache/hadoop/gateway/GatewayBasicFuncTest/falcon/metadata-lineage-vertices-key.json
new file mode 100644
index 0000000..dba930f
--- /dev/null
+++ b/gateway-test/src/test/resources/org/apache/hadoop/gateway/GatewayBasicFuncTest/falcon/metadata-lineage-vertices-key.json
@@ -0,0 +1,13 @@
+{
+ "results": [
+ {
+ "timestamp": "2015-02-19T20:54Z",
+ "name": "rawEmailIngestProcess",
+ "type": "process-entity",
+ "version": "2.0.0",
+ "_id": 60,
+ "_type": "vertex"
+ }
+ ],
+ "totalSize": 1
+}
\ No newline at end of file
diff --git a/gateway-test/src/test/resources/org/apache/hadoop/gateway/GatewayBasicFuncTest/falcon/oregonCluster.xml b/gateway-test/src/test/resources/org/apache/hadoop/gateway/GatewayBasicFuncTest/falcon/oregonCluster.xml
new file mode 100644
index 0000000..3d0c7a6
--- /dev/null
+++ b/gateway-test/src/test/resources/org/apache/hadoop/gateway/GatewayBasicFuncTest/falcon/oregonCluster.xml
@@ -0,0 +1,15 @@
+<?xml version="1.0"?>
+<cluster colo="USWestOregon" description="oregonHadoopCluster" name="primaryCluster" xmlns="uri:falcon:cluster:0.1">
+ <interfaces>
+ <interface type="readonly" endpoint="hftp://sandbox.hortonworks.com:50070" version="2.2.0" />
+ <interface type="write" endpoint="hdfs://sandbox.hortonworks.com:8020" version="2.2.0" />
+ <interface type="execute" endpoint="sandbox.hortonworks.com:8050" version="2.2.0" />
+ <interface type="workflow" endpoint="http://sandbox.hortonworks.com:11000/oozie/" version="4.0.0" />
+ <interface type="messaging" endpoint="tcp://sandbox.hortonworks.com:61616?daemon=true" version="5.1.6" />
+ </interfaces>
+ <locations>
+ <location name="staging" path="/apps/falcon/primaryCluster/staging" />
+ <location name="temp" path="/tmp" />
+ <location name="working" path="/apps/falcon/primaryCluster/working" />
+ </locations>
+</cluster>
diff --git a/gateway-test/src/test/resources/org/apache/hadoop/gateway/GatewayBasicFuncTest/falcon/rawEmailFeed.xml b/gateway-test/src/test/resources/org/apache/hadoop/gateway/GatewayBasicFuncTest/falcon/rawEmailFeed.xml
new file mode 100644
index 0000000..eb4aa15
--- /dev/null
+++ b/gateway-test/src/test/resources/org/apache/hadoop/gateway/GatewayBasicFuncTest/falcon/rawEmailFeed.xml
@@ -0,0 +1,28 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!--
+ A feed representing Hourly customer email data retained for 90 days
+ -->
+<feed description="Raw customer email feed" name="rawEmailFeed"
+ xmlns="uri:falcon:feed:0.1">
+
+ <tags>externalSystem=USWestEmailServers,classification=secure</tags>
+ <groups>churnAnalysisDataPipeline</groups>
+ <frequency>hours(1)</frequency>
+ <late-arrival cut-off="hours(4)"/>
+ <clusters>
+ <cluster name="primaryCluster" type="source">
+ <validity start="2014-02-28T00:00Z" end="2016-03-31T00:00Z"/>
+ <retention limit="days(90)" action="delete"/>
+ </cluster>
+ </clusters>
+
+ <locations>
+ <location type="data"
+ path="/user/ambari-qa/falcon/demo/primary/input/enron/${YEAR}-${MONTH}-${DAY}-${HOUR}"/>
+ <location type="stats" path="/none"/>
+ <location type="meta" path="/none"/>
+ </locations>
+
+ <ACL owner="ambari-qa" group="users" permission="0755"/>
+ <schema location="/none" provider="none"/>
+</feed>
diff --git a/gateway-test/src/test/resources/org/apache/hadoop/gateway/GatewayBasicFuncTest/falcon/stack.txt b/gateway-test/src/test/resources/org/apache/hadoop/gateway/GatewayBasicFuncTest/falcon/stack.txt
new file mode 100644
index 0000000..f1af193
--- /dev/null
+++ b/gateway-test/src/test/resources/org/apache/hadoop/gateway/GatewayBasicFuncTest/falcon/stack.txt
@@ -0,0 +1,40 @@
+Reference Handler
+State: WAITING
+java.lang.Object.wait(Native Method)
+java.lang.Object.wait(Object.java:503)
+java.lang.ref.Reference$ReferenceHandler.run(Reference.java:133)Finalizer
+State: WAITING
+java.lang.Object.wait(Native Method)
+java.lang.ref.ReferenceQueue.remove(ReferenceQueue.java:135)
+java.lang.ref.ReferenceQueue.remove(ReferenceQueue.java:151)
+java.lang.ref.Finalizer$FinalizerThread.run(Finalizer.java:209)Signal Dispatcher
+State: RUNNABLE
+main
+State: WAITING
+java.lang.Object.wait(Native Method)
+java.lang.Object.wait(Object.java:503)
+org.mortbay.thread.QueuedThreadPool.join(QueuedThreadPool.java:298)
+org.mortbay.jetty.Server.join(Server.java:332)
+org.apache.falcon.util.EmbeddedServer.start(EmbeddedServer.java:58)
+org.apache.falcon.Main.main(Main.java:83)KahaDB Scheduler
+State: TIMED_WAITING
+java.lang.Object.wait(Native Method)
+java.util.TimerThread.mainLoop(Timer.java:552)
+java.util.TimerThread.run(Timer.java:505)ActiveMQ Journal Checkpoint Worker
+State: TIMED_WAITING
+java.lang.Thread.sleep(Native Method)
+org.apache.activemq.store.kahadb.MessageDatabase$3.run(MessageDatabase.java:286)ActiveMQ Data File Writer
+State: WAITING
+java.lang.Object.wait(Native Method)
+java.lang.Object.wait(Object.java:503)
+org.apache.kahadb.journal.DataFileAppender.processQueue(DataFileAppender.java:325)
+org.apache.kahadb.journal.DataFileAppender$2.run(DataFileAppender.java:216)ActiveMQ Broker[localhost] Scheduler
+State: TIMED_WAITING
+java.lang.Object.wait(Native Method)
+java.util.TimerThread.mainLoop(Timer.java:552)
+java.util.TimerThread.run(Timer.java:505)KahaDB Scheduler
+State: TIMED_WAITING
+java.lang.Object.wait(Native Method)
+java.util.TimerThread.mainLoop(Timer.java:552)
+java.util.TimerThread.run(Timer.java:505)JobScheduler:JMS
+State: TIMED_WAITING
\ No newline at end of file
diff --git a/gateway-test/src/test/resources/org/apache/hadoop/gateway/GatewayBasicFuncTest/falcon/version.json b/gateway-test/src/test/resources/org/apache/hadoop/gateway/GatewayBasicFuncTest/falcon/version.json
new file mode 100644
index 0000000..e6e3e63
--- /dev/null
+++ b/gateway-test/src/test/resources/org/apache/hadoop/gateway/GatewayBasicFuncTest/falcon/version.json
@@ -0,0 +1,16 @@
+{
+ "properties": [
+ {
+ "key": "Version",
+ "value": "0.6.0.2.2.0.0-2041-r5fa2772890a189327d5d62a3353290c54613246c"
+ },
+ {
+ "key": "Mode",
+ "value": "embedded"
+ },
+ {
+ "key": "Hadoop",
+ "value": "2.6.0.2.2.0.0-2041-r7d56f02902b436d46efba030651a2fbe7c1cf1e9"
+ }
+ ]
+}
\ No newline at end of file
diff --git a/gateway-test/src/test/resources/org/apache/hadoop/gateway/GatewayBasicFuncTest/falcon/version.xml b/gateway-test/src/test/resources/org/apache/hadoop/gateway/GatewayBasicFuncTest/falcon/version.xml
new file mode 100644
index 0000000..dae6ecc
--- /dev/null
+++ b/gateway-test/src/test/resources/org/apache/hadoop/gateway/GatewayBasicFuncTest/falcon/version.xml
@@ -0,0 +1,15 @@
+<?xml version="1.0" standalone="yes"?>
+<properties>
+ <properties>
+ <key>Version</key>
+ <value>0.6.0.2.2.0.0-2041-r5fa2772890a189327d5d62a3353290c54613246c</value>
+ </properties>
+ <properties>
+ <key>Mode</key>
+ <value>embedded</value>
+ </properties>
+ <properties>
+ <key>Hadoop</key>
+ <value>2.6.0.2.2.0.0-2041-r7d56f02902b436d46efba030651a2fbe7c1cf1e9</value>
+ </properties>
+</properties>
\ No newline at end of file
diff --git a/gateway-test/src/test/resources/org/apache/hadoop/gateway/GatewayBasicFuncTest/falcon/virginiaCluster.xml b/gateway-test/src/test/resources/org/apache/hadoop/gateway/GatewayBasicFuncTest/falcon/virginiaCluster.xml
new file mode 100644
index 0000000..c3e90d4
--- /dev/null
+++ b/gateway-test/src/test/resources/org/apache/hadoop/gateway/GatewayBasicFuncTest/falcon/virginiaCluster.xml
@@ -0,0 +1,15 @@
+<?xml version="1.0"?>
+<cluster colo="USEastVirginia" description="virginiaHadoopCluster" name="backupCluster" xmlns="uri:falcon:cluster:0.1">
+ <interfaces>
+ <interface type="readonly" endpoint="hftp://sandbox.hortonworks.com:50070" version="2.2.0" />
+ <interface type="write" endpoint="hdfs://sandbox.hortonworks.com:8020" version="2.2.0" />
+ <interface type="execute" endpoint="sandbox.hortonworks.com:8050" version="2.2.0" />
+ <interface type="workflow" endpoint="http://sandbox.hortonworks.com:11000/oozie/" version="4.0.0" />
+ <interface type="messaging" endpoint="tcp://sandbox.hortonworks.com:61616?daemon=true" version="5.1.6" />
+ </interfaces>
+ <locations>
+ <location name="staging" path="/apps/falcon/backupCluster/staging" />
+ <location name="temp" path="/tmp" />
+ <location name="working" path="/apps/falcon/backupCluster/working" />
+ </locations>
+</cluster>
diff --git a/gateway-test/src/test/resources/org/apache/hadoop/gateway/GatewayBasicFuncTest/storm/cluster-configuration.json b/gateway-test/src/test/resources/org/apache/hadoop/gateway/GatewayBasicFuncTest/storm/cluster-configuration.json
new file mode 100644
index 0000000..c8261eb
--- /dev/null
+++ b/gateway-test/src/test/resources/org/apache/hadoop/gateway/GatewayBasicFuncTest/storm/cluster-configuration.json
@@ -0,0 +1,141 @@
+{
+ "dev.zookeeper.path": "/tmp/dev-storm-zookeeper",
+ "topology.tick.tuple.freq.secs": null,
+ "topology.builtin.metrics.bucket.size.secs": 60,
+ "topology.fall.back.on.java.serialization": true,
+ "supervisor.run.worker.as.user": false,
+ "topology.max.error.report.per.interval": 5,
+ "storm.group.mapping.service": "backtype.storm.security.auth.ShellBasedGroupsMapping",
+ "zmq.linger.millis": 5000,
+ "topology.skip.missing.kryo.registrations": false,
+ "storm.messaging.netty.client_worker_threads": 1,
+ "ui.childopts": "-Xmx220m",
+ "storm.zookeeper.session.timeout": 20000,
+ "ui.filter.params": null,
+ "nimbus.reassign": true,
+ "storm.auth.simple-acl.admins": [],
+ "storm.group.mapping.service.cache.duration.secs": 120,
+ "topology.trident.batch.emit.interval.millis": 500,
+ "drpc.authorizer.acl.filename": "drpc-auth-acl.yaml",
+ "storm.messaging.netty.flush.check.interval.ms": 10,
+ "ui.header.buffer.bytes": 4096,
+ "nimbus.monitor.freq.secs": 10,
+ "logviewer.childopts": "-Xmx128m ",
+ "java.library.path": "/usr/local/lib:/opt/local/lib:/usr/lib:/usr/hdp/current/storm-client/lib",
+ "supervisor.supervisors": [],
+ "topology.executor.send.buffer.size": 1024,
+ "storm.local.dir": "/hadoop/storm",
+ "storm.messaging.netty.buffer_size": 5242880,
+ "supervisor.worker.start.timeout.secs": 120,
+ "drpc.authorizer.acl.strict": false,
+ "storm.nimbus.retry.times": 5,
+ "topology.enable.message.timeouts": true,
+ "nimbus.cleanup.inbox.freq.secs": 600,
+ "nimbus.inbox.jar.expiration.secs": 3600,
+ "drpc.worker.threads": 64,
+ "storm.meta.serialization.delegate": "backtype.storm.serialization.DefaultSerializationDelegate",
+ "topology.worker.shared.thread.pool.size": 4,
+ "nimbus.host": "sandbox.hortonworks.com",
+ "storm.messaging.netty.min_wait_ms": 100,
+ "storm.zookeeper.port": 2181,
+ "transactional.zookeeper.port": null,
+ "ui.http.creds.plugin": "backtype.storm.security.auth.DefaultHttpCredentialsPlugin",
+ "topology.executor.receive.buffer.size": 1024,
+ "logs.users": null,
+ "transactional.zookeeper.servers": null,
+ "storm.zookeeper.root": "/storm",
+ "storm.zookeeper.retry.intervalceiling.millis": 30000,
+ "supervisor.enable": true,
+ "storm.messaging.netty.server_worker_threads": 1,
+ "storm.zookeeper.servers": ["sandbox.hortonworks.com"],
+ "transactional.zookeeper.root": "/transactional",
+ "topology.acker.executors": null,
+ "storm.auth.simple-acl.users": [],
+ "storm.zookeeper.auth.user": null,
+ "topology.testing.always.try.serialize": false,
+ "topology.transfer.buffer.size": 1024,
+ "storm.principal.tolocal": "backtype.storm.security.auth.DefaultPrincipalToLocal",
+ "topology.worker.childopts": null,
+ "drpc.queue.size": 128,
+ "worker.childopts": "-Xmx768m -javaagent:/usr/hdp/current/storm-client/contrib/storm-jmxetric/lib/jmxetric-1.0.4.jar=host=sandbox.hortonworks.com,port=8650,wireformat31x=true,mode=multicast,config=/usr/hdp/current/storm-client/contrib/storm-jmxetric/conf/jmxetric-conf.xml,process=Worker_%ID%_JVM",
+ "storm.auth.simple-acl.users.commands": [],
+ "supervisor.heartbeat.frequency.secs": 5,
+ "topology.error.throttle.interval.secs": 10,
+ "storm.nimbus.retry.interval.millis": 2000,
+ "ui.users": null,
+ "zmq.hwm": 0,
+ "drpc.port": 3772,
+ "supervisor.monitor.frequency.secs": 3,
+ "drpc.childopts": "-Xmx220m",
+ "topology.receiver.buffer.size": 8,
+ "task.heartbeat.frequency.secs": 3,
+ "topology.tasks": null,
+ "storm.messaging.netty.max_retries": 30,
+ "topology.spout.wait.strategy": "backtype.storm.spout.SleepSpoutWaitStrategy",
+ "nimbus.thrift.max_buffer_size": 1048576,
+ "drpc.invocations.threads": 64,
+ "drpc.https.port": -1,
+ "supervisor.supervisors.commands": [],
+ "topology.max.spout.pending": null,
+ "ui.filter": null,
+ "logviewer.cleanup.age.mins": 10080,
+ "storm.zookeeper.retry.interval": 1000,
+ "topology.sleep.spout.wait.strategy.time.ms": 1,
+ "nimbus.topology.validator": "backtype.storm.nimbus.DefaultTopologyValidator",
+ "supervisor.slots.ports": [
+ 6700,
+ 6701
+ ],
+ "storm.messaging.netty.authentication": false,
+ "topology.environment": null,
+ "topology.debug": false,
+ "nimbus.thrift.threads": 64,
+ "nimbus.task.launch.secs": 120,
+ "nimbus.supervisor.timeout.secs": 60,
+ "drpc.http.creds.plugin": "backtype.storm.security.auth.DefaultHttpCredentialsPlugin",
+ "topology.message.timeout.secs": 30,
+ "task.refresh.poll.secs": 10,
+ "topology.workers": 1,
+ "supervisor.childopts": "-Xmx256m -Dcom.sun.management.jmxremote -Dcom.sun.management.jmxremote.ssl=false -Dcom.sun.management.jmxremote.authenticate=false -Dcom.sun.management.jmxremote.port=56431 -javaagent:/usr/hdp/current/storm-supervisor/contrib/storm-jmxetric/lib/jmxetric-1.0.4.jar=host=sandbox.hortonworks.com,port=8650,wireformat31x=true,mode=multicast,config=/usr/hdp/current/storm-supervisor/contrib/storm-jmxetric/conf/jmxetric-conf.xml,process=Supervisor_JVM",
+ "storm.auth.simple-white-list.users": [],
+ "nimbus.thrift.port": 6627,
+ "drpc.https.keystore.type": "JKS",
+ "topology.stats.sample.rate": 0.05,
+ "task.credentials.poll.secs": 30,
+ "worker.heartbeat.frequency.secs": 1,
+ "ui.actions.enabled": true,
+ "topology.tuple.serializer": "backtype.storm.serialization.types.ListDelegateSerializer",
+ "drpc.https.keystore.password": "",
+ "topology.disruptor.wait.strategy": "com.lmax.disruptor.BlockingWaitStrategy",
+ "topology.multilang.serializer": "backtype.storm.multilang.JsonSerializer",
+ "drpc.max_buffer_size": 1048576,
+ "nimbus.task.timeout.secs": 30,
+ "storm.zookeeper.connection.timeout": 15000,
+ "topology.kryo.factory": "backtype.storm.serialization.DefaultKryoFactory",
+ "drpc.invocations.port": 3773,
+ "logviewer.port": 8005,
+ "zmq.threads": 1,
+ "storm.zookeeper.retry.times": 5,
+ "topology.worker.receiver.thread.count": 1,
+ "storm.thrift.transport": "backtype.storm.security.auth.SimpleTransportPlugin",
+ "topology.state.synchronization.timeout.secs": 60,
+ "supervisor.worker.timeout.secs": 30,
+ "nimbus.file.copy.expiration.secs": 600,
+ "nimbus.credential.renewers.freq.secs": 600,
+ "storm.messaging.transport": "backtype.storm.messaging.netty.Context",
+ "worker.gc.childopts": "",
+ "logviewer.appender.name": "A1",
+ "storm.messaging.netty.max_wait_ms": 1000,
+ "storm.zookeeper.auth.password": null,
+ "drpc.http.port": 3774,
+ "drpc.request.timeout.secs": 600,
+ "storm.local.mode.zmq": false,
+ "ui.port": 8744,
+ "nimbus.childopts": "-Xmx220m -javaagent:/usr/hdp/current/storm-client/contrib/storm-jmxetric/lib/jmxetric-1.0.4.jar=host=sandbox.hortonworks.com,port=8649,wireformat31x=true,mode=multicast,config=/usr/hdp/current/storm-client/contrib/storm-jmxetric/conf/jmxetric-conf.xml,process=Nimbus_JVM",
+ "storm.cluster.mode": "distributed",
+ "topology.optimize": true,
+ "topology.max.task.parallelism": null,
+ "storm.messaging.netty.transfer.batch.size": 262144,
+ "storm.nimbus.retry.intervalceiling.millis": 60000,
+ "topology.classpath": null
+}
\ No newline at end of file
diff --git a/gateway-test/src/test/resources/org/apache/hadoop/gateway/GatewayBasicFuncTest/storm/cluster-summary.json b/gateway-test/src/test/resources/org/apache/hadoop/gateway/GatewayBasicFuncTest/storm/cluster-summary.json
new file mode 100644
index 0000000..857cae0
--- /dev/null
+++ b/gateway-test/src/test/resources/org/apache/hadoop/gateway/GatewayBasicFuncTest/storm/cluster-summary.json
@@ -0,0 +1,12 @@
+{
+ "slotsFree": 0,
+ "executorsTotal": 28,
+ "supervisors": 1,
+ "user": null,
+ "topologies": 1,
+ "slotsTotal": 2,
+ "stormVersion": "0.9.3.2.2.0.0-2041",
+ "slotsUsed": 2,
+ "tasksTotal": 28,
+ "nimbusUptime": "4h 54m 52s"
+}
\ No newline at end of file
diff --git a/gateway-test/src/test/resources/org/apache/hadoop/gateway/GatewayBasicFuncTest/storm/supervisor-summary.json b/gateway-test/src/test/resources/org/apache/hadoop/gateway/GatewayBasicFuncTest/storm/supervisor-summary.json
new file mode 100644
index 0000000..f965e00
--- /dev/null
+++ b/gateway-test/src/test/resources/org/apache/hadoop/gateway/GatewayBasicFuncTest/storm/supervisor-summary.json
@@ -0,0 +1,11 @@
+{
+ "supervisors": [
+ {
+ "id": "9bda48ca-7f71-4829-b463-16fd97cf0391",
+ "host": "sandbox.hortonworks.com",
+ "uptime": "4h 55m 1s",
+ "slotsTotal": 2,
+ "slotsUsed": 2
+ }
+ ]
+}
\ No newline at end of file
diff --git a/gateway-test/src/test/resources/org/apache/hadoop/gateway/GatewayBasicFuncTest/storm/topology-component-id.json b/gateway-test/src/test/resources/org/apache/hadoop/gateway/GatewayBasicFuncTest/storm/topology-component-id.json
new file mode 100644
index 0000000..c178a2c
--- /dev/null
+++ b/gateway-test/src/test/resources/org/apache/hadoop/gateway/GatewayBasicFuncTest/storm/topology-component-id.json
@@ -0,0 +1,122 @@
+{
+ "executors": 5,
+ "componentErrors": [],
+ "topologyId": "WordCount-1-1424792039",
+ "name": "WordCount",
+ "user": null,
+ "executorStats": [
+ {
+ "workerLogLink": "http://sandbox.hortonworks.com:8005/log?file=WordCount-1-1424792039-worker-6700.log",
+ "emitted": 124600,
+ "port": 6700,
+ "completeLatency": "0.000",
+ "transferred": 124600,
+ "host": "sandbox.hortonworks.com",
+ "acked": 0,
+ "uptime": "4h 49m 30s",
+ "id": "[24-24]",
+ "failed": 0
+ },
+ {
+ "workerLogLink": "http://sandbox.hortonworks.com:8005/log?file=WordCount-1-1424792039-worker-6701.log",
+ "emitted": 124560,
+ "port": 6701,
+ "completeLatency": "0.000",
+ "transferred": 124560,
+ "host": "sandbox.hortonworks.com",
+ "acked": 0,
+ "uptime": "4h 49m 30s",
+ "id": "[25-25]",
+ "failed": 0
+ },
+ {
+ "workerLogLink": "http://sandbox.hortonworks.com:8005/log?file=WordCount-1-1424792039-worker-6700.log",
+ "emitted": 124540,
+ "port": 6700,
+ "completeLatency": "0.000",
+ "transferred": 124540,
+ "host": "sandbox.hortonworks.com",
+ "acked": 0,
+ "uptime": "4h 49m 30s",
+ "id": "[26-26]",
+ "failed": 0
+ },
+ {
+ "workerLogLink": "http://sandbox.hortonworks.com:8005/log?file=WordCount-1-1424792039-worker-6701.log",
+ "emitted": 124580,
+ "port": 6701,
+ "completeLatency": "0.000",
+ "transferred": 124580,
+ "host": "sandbox.hortonworks.com",
+ "acked": 0,
+ "uptime": "4h 49m 30s",
+ "id": "[27-27]",
+ "failed": 0
+ },
+ {
+ "workerLogLink": "http://sandbox.hortonworks.com:8005/log?file=WordCount-1-1424792039-worker-6700.log",
+ "emitted": 124660,
+ "port": 6700,
+ "completeLatency": "0.000",
+ "transferred": 124660,
+ "host": "sandbox.hortonworks.com",
+ "acked": 0,
+ "uptime": "4h 49m 30s",
+ "id": "[28-28]",
+ "failed": 0
+ }
+ ],
+ "tasks": 5,
+ "window": ":all-time",
+ "spoutSummary": [
+ {
+ "windowPretty": "10m 0s",
+ "window": "600",
+ "emitted": 0,
+ "transferred": 0,
+ "completeLatency": "0.000",
+ "acked": 0,
+ "failed": 0
+ },
+ {
+ "windowPretty": "3h 0m 0s",
+ "window": "10800",
+ "emitted": 273620,
+ "transferred": 273620,
+ "completeLatency": "0.000",
+ "acked": 0,
+ "failed": 0
+ },
+ {
+ "windowPretty": "1d 0h 0m 0s",
+ "window": "86400",
+ "emitted": 622940,
+ "transferred": 622940,
+ "completeLatency": "0.000",
+ "acked": 0,
+ "failed": 0
+ },
+ {
+ "windowPretty": "All time",
+ "window": ":all-time",
+ "emitted": 622940,
+ "transferred": 622940,
+ "completeLatency": "0.000",
+ "acked": 0,
+ "failed": 0
+ }
+ ],
+ "componentType": "spout",
+ "windowHint": "All time",
+ "id": "spout",
+ "outputStats": [
+ {
+ "stream": "default",
+ "emitted": 622940,
+ "transferred": 622940,
+ "completeLatency": "0",
+ "acked": 0,
+ "failed": 0
+ }
+ ]
+}
\ No newline at end of file
diff --git a/gateway-test/src/test/resources/org/apache/hadoop/gateway/GatewayBasicFuncTest/storm/topology-id.json b/gateway-test/src/test/resources/org/apache/hadoop/gateway/GatewayBasicFuncTest/storm/topology-id.json
new file mode 100644
index 0000000..9f48f98
--- /dev/null
+++ b/gateway-test/src/test/resources/org/apache/hadoop/gateway/GatewayBasicFuncTest/storm/topology-id.json
@@ -0,0 +1,284 @@
+{
+ "msgTimeout": 30,
+ "spouts": [
+ {
+ "executors": 5,
+ "emitted": 622940,
+ "errorLapsedSecs": null,
+ "completeLatency": "0.000",
+ "transferred": 622940,
+ "acked": 0,
+ "errorPort": "",
+ "spoutId": "spout",
+ "tasks": 5,
+ "errorHost": "sandbox.hortonworks.com",
+ "lastError": "",
+ "errorWorkerLogLink": "http://sandbox.hortonworks.com:8005/log?file=WordCount-1-1424792039-worker-.log",
+ "failed": 0
+ }
+ ],
+ "executorsTotal": 28,
+ "uptime": "4h 51m 26s",
+ "schedulerInfo": null,
+ "visualizationTable": [
+ {
+ ":row": [
+ {
+ ":stream": "default",
+ ":sani-stream": "default1544803905",
+ ":checked": true
+ },
+ {
+ ":stream": "__ack_ack",
+ ":sani-stream": "s__ack_ack1278315507",
+ ":checked": false
+ },
+ {
+ ":stream": "__ack_init",
+ ":sani-stream": "s__ack_init973324006",
+ ":checked": false
+ },
+ {
+ ":stream": "__ack_fail",
+ ":sani-stream": "s__ack_fail973222132",
+ ":checked": false
+ }
+ ]
+ }
+ ],
+ "name": "WordCount",
+ "user": null,
+ "antiForgeryToken": "H/8xIWCYQo4ZDWLvV9k0FAkjD0omWI8beVTp2mEPRxCbJmWBTYhRMhIV9LGIY3E51OAj+s6T7eQChpGJ",
+ "workersTotal": 2,
+ "topologyStats": [
+ {
+ "windowPretty": "10m 0s",
+ "window": "600",
+ "emitted": 0,
+ "transferred": 0,
+ "completeLatency": "0.000",
+ "acked": 0,
+ "failed": 0
+ },
+ {
+ "windowPretty": "3h 0m 0s",
+ "window": "10800",
+ "emitted": 4143380,
+ "transferred": 2222000,
+ "completeLatency": "0.000",
+ "acked": 2221840,
+ "failed": 0
+ },
+ {
+ "windowPretty": "1d 0h 0m 0s",
+ "window": "86400",
+ "emitted": 8594080,
+ "transferred": 4608660,
+ "completeLatency": "0.000",
+ "acked": 4608260,
+ "failed": 0
+ },
+ {
+ "windowPretty": "All time",
+ "window": ":all-time",
+ "emitted": 8594080,
+ "transferred": 4608660,
+ "completeLatency": "0.000",
+ "acked": 4608260,
+ "failed": 0
+ }
+ ],
+ "window": ":all-time",
+ "status": "INACTIVE",
+ "owner": "",
+ "tasksTotal": 28,
+ "configuration": {
+ "storm.id": "WordCount-1-1424792039",
+ "dev.zookeeper.path": "/tmp/dev-storm-zookeeper",
+ "topology.tick.tuple.freq.secs": null,
+ "topology.builtin.metrics.bucket.size.secs": 60,
+ "topology.fall.back.on.java.serialization": true,
+ "supervisor.run.worker.as.user": false,
+ "topology.max.error.report.per.interval": 5,
+ "storm.group.mapping.service": "backtype.storm.security.auth.ShellBasedGroupsMapping",
+ "zmq.linger.millis": 5000,
+ "topology.skip.missing.kryo.registrations": false,
+ "storm.messaging.netty.client_worker_threads": 1,
+ "ui.childopts": "-Xmx220m",
+ "storm.zookeeper.session.timeout": 20000,
+ "ui.filter.params": null,
+ "nimbus.reassign": true,
+ "storm.auth.simple-acl.admins": [],
+ "storm.group.mapping.service.cache.duration.secs": 120,
+ "topology.trident.batch.emit.interval.millis": 500,
+ "drpc.authorizer.acl.filename": "drpc-auth-acl.yaml",
+ "storm.messaging.netty.flush.check.interval.ms": 10,
+ "ui.header.buffer.bytes": 4096,
+ "nimbus.monitor.freq.secs": 10,
+ "logviewer.childopts": "-Xmx128m ",
+ "java.library.path": "/usr/local/lib:/opt/local/lib:/usr/lib:/usr/hdp/current/storm-client/lib",
+ "supervisor.supervisors": [],
+ "topology.executor.send.buffer.size": 1024,
+ "storm.local.dir": "/hadoop/storm",
+ "storm.messaging.netty.buffer_size": 5242880,
+ "supervisor.worker.start.timeout.secs": 120,
+ "drpc.authorizer.acl.strict": false,
+ "storm.nimbus.retry.times": 5,
+ "topology.enable.message.timeouts": true,
+ "nimbus.cleanup.inbox.freq.secs": 600,
+ "nimbus.inbox.jar.expiration.secs": 3600,
+ "drpc.worker.threads": 64,
+ "storm.meta.serialization.delegate": "backtype.storm.serialization.DefaultSerializationDelegate",
+ "topology.worker.shared.thread.pool.size": 4,
+ "nimbus.host": "sandbox.hortonworks.com",
+ "storm.messaging.netty.min_wait_ms": 100,
+ "storm.zookeeper.port": 2181,
+ "transactional.zookeeper.port": null,
+ "ui.http.creds.plugin": "backtype.storm.security.auth.DefaultHttpCredentialsPlugin",
+ "topology.executor.receive.buffer.size": 1024,
+ "logs.users": null,
+ "transactional.zookeeper.servers": null,
+ "topology.users": [],
+ "storm.zookeeper.root": "/storm",
+ "storm.zookeeper.retry.intervalceiling.millis": 30000,
+ "supervisor.enable": true,
+ "storm.messaging.netty.server_worker_threads": 1,
+ "storm.zookeeper.servers": ["sandbox.hortonworks.com"],
+ "transactional.zookeeper.root": "/transactional",
+ "topology.acker.executors": null,
+ "storm.auth.simple-acl.users": [],
+ "topology.kryo.decorators": [],
+ "topology.name": "WordCount",
+ "topology.submitter.principal": "",
+ "storm.zookeeper.auth.user": null,
+ "topology.testing.always.try.serialize": false,
+ "topology.transfer.buffer.size": 1024,
+ "storm.principal.tolocal": "backtype.storm.security.auth.DefaultPrincipalToLocal",
+ "topology.worker.childopts": null,
+ "drpc.queue.size": 128,
+ "worker.childopts": "-Xmx768m -javaagent:/usr/hdp/current/storm-client/contrib/storm-jmxetric/lib/jmxetric-1.0.4.jar=host=sandbox.hortonworks.com,port=8650,wireformat31x=true,mode=multicast,config=/usr/hdp/current/storm-client/contrib/storm-jmxetric/conf/jmxetric-conf.xml,process=Worker_%ID%_JVM",
+ "storm.auth.simple-acl.users.commands": [],
+ "supervisor.heartbeat.frequency.secs": 5,
+ "topology.error.throttle.interval.secs": 10,
+ "storm.nimbus.retry.interval.millis": 2000,
+ "ui.users": null,
+ "zmq.hwm": 0,
+ "drpc.port": 3772,
+ "supervisor.monitor.frequency.secs": 3,
+ "drpc.childopts": "-Xmx220m",
+ "topology.receiver.buffer.size": 8,
+ "topology.submitter.user": "",
+ "task.heartbeat.frequency.secs": 3,
+ "topology.tasks": null,
+ "storm.messaging.netty.max_retries": 30,
+ "topology.spout.wait.strategy": "backtype.storm.spout.SleepSpoutWaitStrategy",
+ "nimbus.thrift.max_buffer_size": 1048576,
+ "drpc.invocations.threads": 64,
+ "drpc.https.port": -1,
+ "supervisor.supervisors.commands": [],
+ "topology.max.spout.pending": null,
+ "ui.filter": null,
+ "logviewer.cleanup.age.mins": 10080,
+ "storm.zookeeper.retry.interval": 1000,
+ "topology.sleep.spout.wait.strategy.time.ms": 1,
+ "nimbus.topology.validator": "backtype.storm.nimbus.DefaultTopologyValidator",
+ "supervisor.slots.ports": [
+ 6700,
+ 6701
+ ],
+ "storm.messaging.netty.authentication": false,
+ "topology.environment": null,
+ "topology.debug": true,
+ "nimbus.thrift.threads": 64,
+ "nimbus.task.launch.secs": 120,
+ "nimbus.supervisor.timeout.secs": 60,
+ "topology.kryo.register": null,
+ "drpc.http.creds.plugin": "backtype.storm.security.auth.DefaultHttpCredentialsPlugin",
+ "topology.message.timeout.secs": 30,
+ "task.refresh.poll.secs": 10,
+ "topology.workers": 3,
+ "supervisor.childopts": "-Xmx256m -Dcom.sun.management.jmxremote -Dcom.sun.management.jmxremote.ssl=false -Dcom.sun.management.jmxremote.authenticate=false -Dcom.sun.management.jmxremote.port=56431 -javaagent:/usr/hdp/current/storm-supervisor/contrib/storm-jmxetric/lib/jmxetric-1.0.4.jar=host=sandbox.hortonworks.com,port=8650,wireformat31x=true,mode=multicast,config=/usr/hdp/current/storm-supervisor/contrib/storm-jmxetric/conf/jmxetric-conf.xml,process=Supervisor_JVM",
+ "storm.auth.simple-white-list.users": [],
+ "storm.zookeeper.superACL": null,
+ "nimbus.thrift.port": 6627,
+ "drpc.https.keystore.type": "JKS",
+ "topology.stats.sample.rate": 0.05,
+ "task.credentials.poll.secs": 30,
+ "worker.heartbeat.frequency.secs": 1,
+ "ui.actions.enabled": true,
+ "topology.tuple.serializer": "backtype.storm.serialization.types.ListDelegateSerializer",
+ "drpc.https.keystore.password": "",
+ "topology.disruptor.wait.strategy": "com.lmax.disruptor.BlockingWaitStrategy",
+ "topology.multilang.serializer": "backtype.storm.multilang.JsonSerializer",
+ "drpc.max_buffer_size": 1048576,
+ "nimbus.task.timeout.secs": 30,
+ "storm.zookeeper.connection.timeout": 15000,
+ "topology.kryo.factory": "backtype.storm.serialization.DefaultKryoFactory",
+ "drpc.invocations.port": 3773,
+ "logviewer.port": 8005,
+ "zmq.threads": 1,
+ "storm.zookeeper.retry.times": 5,
+ "topology.worker.receiver.thread.count": 1,
+ "storm.thrift.transport": "backtype.storm.security.auth.SimpleTransportPlugin",
+ "topology.state.synchronization.timeout.secs": 60,
+ "supervisor.worker.timeout.secs": 30,
+ "nimbus.file.copy.expiration.secs": 600,
+ "nimbus.credential.renewers.freq.secs": 600,
+ "storm.messaging.transport": "backtype.storm.messaging.netty.Context",
+ "worker.gc.childopts": "",
+ "logviewer.appender.name": "A1",
+ "storm.messaging.netty.max_wait_ms": 1000,
+ "storm.zookeeper.auth.password": null,
+ "drpc.http.port": 3774,
+ "drpc.request.timeout.secs": 600,
+ "storm.local.mode.zmq": false,
+ "ui.port": 8744,
+ "nimbus.childopts": "-Xmx220m -javaagent:/usr/hdp/current/storm-client/contrib/storm-jmxetric/lib/jmxetric-1.0.4.jar=host=sandbox.hortonworks.com,port=8649,wireformat31x=true,mode=multicast,config=/usr/hdp/current/storm-client/contrib/storm-jmxetric/conf/jmxetric-conf.xml,process=Nimbus_JVM",
+ "storm.cluster.mode": "distributed",
+ "topology.optimize": true,
+ "topology.max.task.parallelism": null,
+ "storm.messaging.netty.transfer.batch.size": 262144,
+ "storm.nimbus.retry.intervalceiling.millis": 60000,
+ "topology.classpath": null
+ },
+ "windowHint": "All time",
+ "id": "WordCount-1-1424792039",
+ "bolts": [
+ {
+ "executors": 12,
+ "emitted": 3985420,
+ "errorLapsedSecs": null,
+ "transferred": 0,
+ "acked": 3985540,
+ "errorPort": "",
+ "executeLatency": "0.199",
+ "tasks": 12,
+ "executed": 3985560,
+ "processLatency": "0.107",
+ "boltId": "count",
+ "errorHost": "sandbox.hortonworks.com",
+ "lastError": "",
+ "errorWorkerLogLink": "http://sandbox.hortonworks.com:8005/log?file=WordCount-1-1424792039-worker-.log",
+ "capacity": "0.000",
+ "failed": 0
+ },
+ {
+ "executors": 8,
+ "emitted": 3985720,
+ "errorLapsedSecs": 16907,
+ "transferred": 3985720,
+ "acked": 622720,
+ "errorPort": 6701,
+ "executeLatency": "0.063",
+ "tasks": 8,
+ "executed": 622660,
+ "processLatency": "33.837",
+ "boltId": "split",
+ "errorHost": "sandbox.hortonworks.com",
+ "lastError": "java.lang.RuntimeException: backtype.storm.multilang.NoOutputException: Pipe to subprocess seems to be broken! No output read.\nSerializer Exception:\n\n\n\tat backtype.storm.utils.ShellProcess.readShellMs",
+ "errorWorkerLogLink": "http://sandbox.hortonworks.com:8005/log?file=WordCount-1-1424792039-worker-6701.log",
+ "capacity": "0.000",
+ "failed": 0
+ }
+ ]
+}
\ No newline at end of file
diff --git a/gateway-test/src/test/resources/org/apache/hadoop/gateway/GatewayBasicFuncTest/storm/topology-summary.json b/gateway-test/src/test/resources/org/apache/hadoop/gateway/GatewayBasicFuncTest/storm/topology-summary.json
new file mode 100644
index 0000000..aaee9f1
--- /dev/null
+++ b/gateway-test/src/test/resources/org/apache/hadoop/gateway/GatewayBasicFuncTest/storm/topology-summary.json
@@ -0,0 +1,15 @@
+{
+ "topologies": [
+ {
+ "executorsTotal": 28,
+ "uptime": "4h 48m 23s",
+ "schedulerInfo": null,
+ "name": "WordCount",
+ "workersTotal": 2,
+ "status": "INACTIVE",
+ "owner": "",
+ "tasksTotal": 28,
+ "id": "WordCount-1-1424792039"
+ }
+ ]
+}
\ No newline at end of file
diff --git a/gateway-service-webhcat/pom.xml b/gateway-util-configinjector/pom.xml
old mode 100644
new mode 100755
similarity index 69%
rename from gateway-service-webhcat/pom.xml
rename to gateway-util-configinjector/pom.xml
index 3f81cc4..c04f56b
--- a/gateway-service-webhcat/pom.xml
+++ b/gateway-util-configinjector/pom.xml
@@ -19,39 +19,42 @@
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<modelVersion>4.0.0</modelVersion>
+
<parent>
<groupId>org.apache.knox</groupId>
<artifactId>gateway</artifactId>
<version>0.6.0-SNAPSHOT</version>
</parent>
- <artifactId>gateway-service-webhcat</artifactId>
+ <artifactId>gateway-util-configinjector</artifactId>
- <name>gateway-service-webhcat</name>
- <description>The extension to the gateway for supporting WebHCat.</description>
-
- <licenses>
- <license>
- <name>The Apache Software License, Version 2.0</name>
- <url>http://www.apache.org/licenses/LICENSE-2.0.txt</url>
- <distribution>repo</distribution>
- </license>
- </licenses>
+ <name>gateway-util-configinjector</name>
+ <description>A lightweight config injection utility</description>
<dependencies>
<dependency>
- <groupId>${gateway-group}</groupId>
- <artifactId>gateway-spi</artifactId>
+ <groupId>commons-beanutils</groupId>
+ <artifactId>commons-beanutils</artifactId>
</dependency>
<dependency>
<groupId>${gateway-group}</groupId>
- <artifactId>gateway-provider-rewrite</artifactId>
+ <artifactId>gateway-test-utils</artifactId>
+ <scope>test</scope>
</dependency>
-
<dependency>
<groupId>junit</groupId>
<artifactId>junit</artifactId>
<scope>test</scope>
</dependency>
+ <dependency>
+ <groupId>org.hamcrest</groupId>
+ <artifactId>hamcrest-core</artifactId>
+ <scope>test</scope>
+ </dependency>
+ <dependency>
+ <groupId>org.hamcrest</groupId>
+ <artifactId>hamcrest-library</artifactId>
+ <scope>test</scope>
+ </dependency>
</dependencies>
</project>
\ No newline at end of file
diff --git a/gateway-provider-security-picketlink/src/main/java/org/apache/hadoop/gateway/picketlink/PicketlinkAuthMessages.java b/gateway-util-configinjector/src/main/java/org/apache/hadoop/gateway/config/Alias.java
old mode 100644
new mode 100755
similarity index 65%
copy from gateway-provider-security-picketlink/src/main/java/org/apache/hadoop/gateway/picketlink/PicketlinkAuthMessages.java
copy to gateway-util-configinjector/src/main/java/org/apache/hadoop/gateway/config/Alias.java
index 7a88a26..b002e7a
--- a/gateway-provider-security-picketlink/src/main/java/org/apache/hadoop/gateway/picketlink/PicketlinkAuthMessages.java
+++ b/gateway-util-configinjector/src/main/java/org/apache/hadoop/gateway/config/Alias.java
@@ -15,10 +15,17 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
-package org.apache.hadoop.gateway.picketlink;
+package org.apache.hadoop.gateway.config;
-import org.apache.hadoop.gateway.i18n.messages.Messages;
+import java.lang.annotation.Documented;
+import java.lang.annotation.ElementType;
+import java.lang.annotation.Retention;
+import java.lang.annotation.RetentionPolicy;
+import java.lang.annotation.Target;
-@Messages(logger="org.apache.hadoop.gateway.picketlink")
-public interface PicketlinkAuthMessages {
+@Target( { ElementType.METHOD, ElementType.FIELD, ElementType.PARAMETER} )
+@Retention( RetentionPolicy.RUNTIME )
+@Documented
+public @interface Alias {
+ public String value();
}
diff --git a/gateway-provider-security-picketlink/src/main/java/org/apache/hadoop/gateway/picketlink/PicketlinkAuthMessages.java b/gateway-util-configinjector/src/main/java/org/apache/hadoop/gateway/config/ConfigurationAdapter.java
old mode 100644
new mode 100755
similarity index 79%
copy from gateway-provider-security-picketlink/src/main/java/org/apache/hadoop/gateway/picketlink/PicketlinkAuthMessages.java
copy to gateway-util-configinjector/src/main/java/org/apache/hadoop/gateway/config/ConfigurationAdapter.java
index 7a88a26..401258f
--- a/gateway-provider-security-picketlink/src/main/java/org/apache/hadoop/gateway/picketlink/PicketlinkAuthMessages.java
+++ b/gateway-util-configinjector/src/main/java/org/apache/hadoop/gateway/config/ConfigurationAdapter.java
@@ -15,10 +15,10 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
-package org.apache.hadoop.gateway.picketlink;
+package org.apache.hadoop.gateway.config;
-import org.apache.hadoop.gateway.i18n.messages.Messages;
+public interface ConfigurationAdapter {
-@Messages(logger="org.apache.hadoop.gateway.picketlink")
-public interface PicketlinkAuthMessages {
+ Object getConfigurationValue(String name) throws ConfigurationException;
+
}
diff --git a/gateway-provider-security-picketlink/src/main/java/org/apache/hadoop/gateway/picketlink/PicketlinkAuthMessages.java b/gateway-util-configinjector/src/main/java/org/apache/hadoop/gateway/config/ConfigurationBinding.java
old mode 100644
new mode 100755
similarity index 79%
rename from gateway-provider-security-picketlink/src/main/java/org/apache/hadoop/gateway/picketlink/PicketlinkAuthMessages.java
rename to gateway-util-configinjector/src/main/java/org/apache/hadoop/gateway/config/ConfigurationBinding.java
index 7a88a26..a2b2b67
--- a/gateway-provider-security-picketlink/src/main/java/org/apache/hadoop/gateway/picketlink/PicketlinkAuthMessages.java
+++ b/gateway-util-configinjector/src/main/java/org/apache/hadoop/gateway/config/ConfigurationBinding.java
@@ -15,10 +15,10 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
-package org.apache.hadoop.gateway.picketlink;
+package org.apache.hadoop.gateway.config;
-import org.apache.hadoop.gateway.i18n.messages.Messages;
+public interface ConfigurationBinding {
-@Messages(logger="org.apache.hadoop.gateway.picketlink")
-public interface PicketlinkAuthMessages {
+ String getConfigurationName(String name);
+
}
diff --git a/gateway-provider-security-picketlink/src/main/java/org/apache/hadoop/gateway/picketlink/PicketlinkAuthMessages.java b/gateway-util-configinjector/src/main/java/org/apache/hadoop/gateway/config/ConfigurationException.java
old mode 100644
new mode 100755
similarity index 73%
copy from gateway-provider-security-picketlink/src/main/java/org/apache/hadoop/gateway/picketlink/PicketlinkAuthMessages.java
copy to gateway-util-configinjector/src/main/java/org/apache/hadoop/gateway/config/ConfigurationException.java
index 7a88a26..c979cc6
--- a/gateway-provider-security-picketlink/src/main/java/org/apache/hadoop/gateway/picketlink/PicketlinkAuthMessages.java
+++ b/gateway-util-configinjector/src/main/java/org/apache/hadoop/gateway/config/ConfigurationException.java
@@ -15,10 +15,16 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
-package org.apache.hadoop.gateway.picketlink;
+package org.apache.hadoop.gateway.config;
-import org.apache.hadoop.gateway.i18n.messages.Messages;
+public class ConfigurationException extends RuntimeException {
-@Messages(logger="org.apache.hadoop.gateway.picketlink")
-public interface PicketlinkAuthMessages {
+ public ConfigurationException( String message, Throwable cause ) {
+ super( message, cause );
+ }
+
+ public ConfigurationException( String message ) {
+ super( message );
+ }
+
}
diff --git a/gateway-util-configinjector/src/main/java/org/apache/hadoop/gateway/config/ConfigurationInjectorBuilder.java b/gateway-util-configinjector/src/main/java/org/apache/hadoop/gateway/config/ConfigurationInjectorBuilder.java
new file mode 100755
index 0000000..e0fadbd
--- /dev/null
+++ b/gateway-util-configinjector/src/main/java/org/apache/hadoop/gateway/config/ConfigurationInjectorBuilder.java
@@ -0,0 +1,107 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.gateway.config;
+
+import org.apache.hadoop.gateway.config.impl.ConfigurationAdapterFactory;
+import org.apache.hadoop.gateway.config.impl.DefaultConfigurationBinding;
+import org.apache.hadoop.gateway.config.impl.MappedConfigurationBinding;
+import org.apache.hadoop.gateway.config.spi.ConfigurationInjector;
+
+import java.util.Iterator;
+import java.util.ServiceLoader;
+
+public class ConfigurationInjectorBuilder {
+
+ private static ConfigurationBinding DEFAULT_BINDING = new DefaultConfigurationBinding();
+ private static ConfigurationInjector INSTANCE = null;
+
+ private static synchronized ConfigurationInjector getInjector() {
+ if( INSTANCE == null ) {
+ INSTANCE = createInjector();
+ }
+ return INSTANCE;
+ }
+
+ private static synchronized ConfigurationInjector createInjector() {
+ ConfigurationInjector injector = null;
+ ServiceLoader<ConfigurationInjector> loader = ServiceLoader.load( ConfigurationInjector.class );
+ if( loader != null ) {
+ Iterator<ConfigurationInjector> iterator = loader.iterator();
+ if( iterator != null ) {
+ while( iterator.hasNext() ) {
+ injector = iterator.next();
+ break;
+ }
+ }
+ }
+ if( injector == null ) {
+ throw new ConfigurationException( String.format(
+ "Failed to load an implementation of %s", ConfigurationInjector.class.getName() ) );
+ }
+ return injector;
+ }
+
+ private Object target = null;
+ private ConfigurationAdapter source = null;
+ private ConfigurationBinding binding = null;
+
+ public static ConfigurationInjectorBuilder configuration() {
+ return new ConfigurationInjectorBuilder();
+ }
+
+ public ConfigurationInjectorBuilder target( Object target ) {
+ this.target = target;
+ return this;
+ }
+
+ public ConfigurationInjectorBuilder source( Object source ) {
+ this.source = ConfigurationAdapterFactory.get(source);
+ return this;
+ }
+
+ public ConfigurationInjectorBuilder source( ConfigurationAdapter adapter ) {
+ this.source = adapter;
+ return this;
+ }
+
+ public ConfigurationInjectorBuilder binding( ConfigurationBinding binding ) {
+ this.binding = binding;
+ return this;
+ }
+
+ public ConfigurationInjectorBuilder bind( String targetName, String sourceName ) {
+ ((MappedConfigurationBinding)binding()).bind( targetName, sourceName );
+ return this;
+ }
+
+ public ConfigurationBinding binding() {
+ if( binding == null ) {
+ binding = new MappedConfigurationBinding();
+ }
+ return binding;
+ }
+
+ public void inject() throws ConfigurationException {
+ ConfigurationInjector injector = getInjector();
+ if( binding == null ) {
+ binding = DEFAULT_BINDING;
+ }
+ injector.configure( target, source, binding );
+ }
+
+}
diff --git a/gateway-provider-security-picketlink/src/main/java/org/apache/hadoop/gateway/picketlink/PicketlinkAuthMessages.java b/gateway-util-configinjector/src/main/java/org/apache/hadoop/gateway/config/Configure.java
old mode 100644
new mode 100755
similarity index 66%
copy from gateway-provider-security-picketlink/src/main/java/org/apache/hadoop/gateway/picketlink/PicketlinkAuthMessages.java
copy to gateway-util-configinjector/src/main/java/org/apache/hadoop/gateway/config/Configure.java
index 7a88a26..895648f
--- a/gateway-provider-security-picketlink/src/main/java/org/apache/hadoop/gateway/picketlink/PicketlinkAuthMessages.java
+++ b/gateway-util-configinjector/src/main/java/org/apache/hadoop/gateway/config/Configure.java
@@ -15,10 +15,16 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
-package org.apache.hadoop.gateway.picketlink;
+package org.apache.hadoop.gateway.config;
-import org.apache.hadoop.gateway.i18n.messages.Messages;
+import java.lang.annotation.Documented;
+import java.lang.annotation.ElementType;
+import java.lang.annotation.Retention;
+import java.lang.annotation.RetentionPolicy;
+import java.lang.annotation.Target;
-@Messages(logger="org.apache.hadoop.gateway.picketlink")
-public interface PicketlinkAuthMessages {
+@Target( { ElementType.METHOD, ElementType.FIELD, ElementType.PARAMETER} )
+@Retention( RetentionPolicy.RUNTIME )
+@Documented
+public @interface Configure {
}
diff --git a/gateway-provider-security-picketlink/src/main/java/org/apache/hadoop/gateway/picketlink/PicketlinkAuthMessages.java b/gateway-util-configinjector/src/main/java/org/apache/hadoop/gateway/config/Default.java
old mode 100644
new mode 100755
similarity index 67%
copy from gateway-provider-security-picketlink/src/main/java/org/apache/hadoop/gateway/picketlink/PicketlinkAuthMessages.java
copy to gateway-util-configinjector/src/main/java/org/apache/hadoop/gateway/config/Default.java
index 7a88a26..1ef9042
--- a/gateway-provider-security-picketlink/src/main/java/org/apache/hadoop/gateway/picketlink/PicketlinkAuthMessages.java
+++ b/gateway-util-configinjector/src/main/java/org/apache/hadoop/gateway/config/Default.java
@@ -15,10 +15,17 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
-package org.apache.hadoop.gateway.picketlink;
+package org.apache.hadoop.gateway.config;
-import org.apache.hadoop.gateway.i18n.messages.Messages;
+import java.lang.annotation.Documented;
+import java.lang.annotation.ElementType;
+import java.lang.annotation.Retention;
+import java.lang.annotation.RetentionPolicy;
+import java.lang.annotation.Target;
-@Messages(logger="org.apache.hadoop.gateway.picketlink")
-public interface PicketlinkAuthMessages {
+@Target( { ElementType.PARAMETER } )
+@Retention( RetentionPolicy.RUNTIME )
+@Documented
+public @interface Default {
+ public String value();
}
diff --git a/gateway-provider-security-picketlink/src/test/java/org/apache/hadoop/gateway/provider/federation/PreAuthSSOTest.java b/gateway-util-configinjector/src/main/java/org/apache/hadoop/gateway/config/Optional.java
old mode 100644
new mode 100755
similarity index 69%
copy from gateway-provider-security-picketlink/src/test/java/org/apache/hadoop/gateway/provider/federation/PreAuthSSOTest.java
copy to gateway-util-configinjector/src/main/java/org/apache/hadoop/gateway/config/Optional.java
index ec57043..1299e75
--- a/gateway-provider-security-picketlink/src/test/java/org/apache/hadoop/gateway/provider/federation/PreAuthSSOTest.java
+++ b/gateway-util-configinjector/src/main/java/org/apache/hadoop/gateway/config/Optional.java
@@ -1,4 +1,3 @@
-
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
@@ -16,16 +15,16 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
-package org.apache.hadoop.gateway.provider.federation;
+package org.apache.hadoop.gateway.config;
-import junit.framework.TestCase;
+import java.lang.annotation.Documented;
+import java.lang.annotation.ElementType;
+import java.lang.annotation.Retention;
+import java.lang.annotation.RetentionPolicy;
+import java.lang.annotation.Target;
-import org.apache.hadoop.gateway.services.security.token.impl.JWTToken;
-import org.junit.Test;
-
-public class PreAuthSSOTest extends TestCase {
- @Test
- public void testPreAuth() throws Exception {
- assertTrue(true);
- }
+@Target( { ElementType.FIELD } )
+@Retention( RetentionPolicy.RUNTIME )
+@Documented
+public @interface Optional {
}
diff --git a/gateway-util-configinjector/src/main/java/org/apache/hadoop/gateway/config/impl/BeanConfigurationAdapter.java b/gateway-util-configinjector/src/main/java/org/apache/hadoop/gateway/config/impl/BeanConfigurationAdapter.java
new file mode 100755
index 0000000..67459a4
--- /dev/null
+++ b/gateway-util-configinjector/src/main/java/org/apache/hadoop/gateway/config/impl/BeanConfigurationAdapter.java
@@ -0,0 +1,40 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.gateway.config.impl;
+
+import org.apache.commons.beanutils.PropertyUtils;
+import org.apache.hadoop.gateway.config.ConfigurationAdapter;
+import org.apache.hadoop.gateway.config.ConfigurationException;
+
+public class BeanConfigurationAdapter implements ConfigurationAdapter {
+
+ private Object bean;
+
+ public BeanConfigurationAdapter( Object bean ) {
+ this.bean = bean;
+ }
+
+ @Override
+ public Object getConfigurationValue( String name ) throws ConfigurationException {
+ try {
+ return PropertyUtils.getSimpleProperty( bean, name );
+ } catch( Exception e ) {
+ throw new ConfigurationException( String.format( "" ), e );
+ }
+ }
+}
diff --git a/gateway-provider-security-picketlink/src/test/java/org/apache/hadoop/gateway/provider/federation/PreAuthSSOTest.java b/gateway-util-configinjector/src/main/java/org/apache/hadoop/gateway/config/impl/BeanConfigurationAdapterDescriptor.java
old mode 100644
new mode 100755
similarity index 69%
copy from gateway-provider-security-picketlink/src/test/java/org/apache/hadoop/gateway/provider/federation/PreAuthSSOTest.java
copy to gateway-util-configinjector/src/main/java/org/apache/hadoop/gateway/config/impl/BeanConfigurationAdapterDescriptor.java
index ec57043..92a6cca
--- a/gateway-provider-security-picketlink/src/test/java/org/apache/hadoop/gateway/provider/federation/PreAuthSSOTest.java
+++ b/gateway-util-configinjector/src/main/java/org/apache/hadoop/gateway/config/impl/BeanConfigurationAdapterDescriptor.java
@@ -1,4 +1,3 @@
-
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
@@ -16,16 +15,16 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
-package org.apache.hadoop.gateway.provider.federation;
+package org.apache.hadoop.gateway.config.impl;
-import junit.framework.TestCase;
+import org.apache.hadoop.gateway.config.spi.AbstractConfigurationAdapterDescriptor;
-import org.apache.hadoop.gateway.services.security.token.impl.JWTToken;
-import org.junit.Test;
+import java.util.Map;
-public class PreAuthSSOTest extends TestCase {
- @Test
- public void testPreAuth() throws Exception {
- assertTrue(true);
+public class BeanConfigurationAdapterDescriptor extends AbstractConfigurationAdapterDescriptor {
+
+ public BeanConfigurationAdapterDescriptor() {
+ add( Object.class, BeanConfigurationAdapter.class );
}
+
}
diff --git a/gateway-util-configinjector/src/main/java/org/apache/hadoop/gateway/config/impl/ConfigurationAdapterFactory.java b/gateway-util-configinjector/src/main/java/org/apache/hadoop/gateway/config/impl/ConfigurationAdapterFactory.java
new file mode 100755
index 0000000..73cd97b
--- /dev/null
+++ b/gateway-util-configinjector/src/main/java/org/apache/hadoop/gateway/config/impl/ConfigurationAdapterFactory.java
@@ -0,0 +1,132 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.gateway.config.impl;
+
+
+import org.apache.hadoop.gateway.config.ConfigurationAdapter;
+import org.apache.hadoop.gateway.config.ConfigurationException;
+import org.apache.hadoop.gateway.config.spi.ConfigurationAdapterDescriptor;
+
+import java.lang.reflect.Constructor;
+import java.util.Collections;
+import java.util.HashMap;
+import java.util.Iterator;
+import java.util.Map;
+import java.util.ServiceLoader;
+
+public class ConfigurationAdapterFactory {
+
+ private static Map<Class<?>, Class<? extends ConfigurationAdapter>> ADAPTERS = null;
+
+ private static synchronized Map<Class<?>, Class<? extends ConfigurationAdapter>> getAdapters() {
+ if( ADAPTERS == null ) {
+ loadAdapters();
+ }
+ return ADAPTERS;
+ }
+
+ private static void loadAdapters() {
+ Map<Class<?>, Class<? extends ConfigurationAdapter>> all =
+ new HashMap<Class<?>, Class<? extends ConfigurationAdapter>>();
+ ServiceLoader<ConfigurationAdapterDescriptor> loader = ServiceLoader.load( ConfigurationAdapterDescriptor.class );
+ if( loader != null ) {
+ Iterator<ConfigurationAdapterDescriptor> i = loader.iterator();
+ if( i != null ) {
+ while( i.hasNext() ) {
+ ConfigurationAdapterDescriptor descriptor = i.next();
+ Map<Class<?>, Class<? extends ConfigurationAdapter>> add = descriptor.providedConfigurationAdapters();
+ if( add != null ) {
+ all.putAll( add );
+ }
+ }
+ }
+ }
+ ADAPTERS = Collections.unmodifiableMap( all );
+ }
+
+ public static ConfigurationAdapter get( Object config ) throws ConfigurationException {
+ if( config == null ) {
+ throw new NullPointerException( "Configuration adapter instantiation impossible for null config object." );
+ }
+ try {
+ Map<Class<?>, Class<? extends ConfigurationAdapter>> adapters = getAdapters();
+ Class configType = config.getClass();
+ Class adapterType = findAdapterTypeForConfigTypeOrParent( adapters, configType );
+ if( adapterType == null ) {
+ throw new ConfigurationException( "No configuration adapter found for config type " + configType.getName() );
+ }
+ Constructor c = findConstructorForConfigType( adapterType, configType );
+ if( !c.isAccessible() ) {
+ c.setAccessible( true );
+ }
+ Object adapter = c.newInstance( config );
+ return ConfigurationAdapter.class.cast( adapter );
+ } catch( ConfigurationException e ) {
+ throw e;
+ } catch( Exception e ) {
+ throw new ConfigurationException( "Configuration adapter instantiation failed.", e );
+ }
+ }
+
+ public static Constructor findConstructorForConfigType( Class<?> adapterType, Class<?> configType ) throws NoSuchMethodException {
+ Constructor constructor = null;
+ Constructor[] constructors = adapterType.getConstructors();
+ for( Constructor candidate : constructors ) {
+ Class<?>[] paramTypes = candidate.getParameterTypes();
+ if( paramTypes.length == 1 ) {
+ Class<?> paramType = paramTypes[0];
+ if( paramType.isAssignableFrom( configType ) ) {
+ constructor = candidate;
+ break;
+ }
+ }
+ }
+ if( constructor == null ) {
+ throw new NoSuchMethodException( "No constructor for " + adapterType.getName() + " that will accept " + configType.getName() );
+ }
+ return constructor;
+ }
+
+ public static Class<? extends ConfigurationAdapter> findAdapterTypeForConfigTypeOrParent(
+ Map<Class<?>, Class<? extends ConfigurationAdapter>> adapters, Class<?> configType ) {
+ Class<? extends ConfigurationAdapter> adapterType = null;
+ while( configType != null ) {
+ adapterType = findAdapterTypeForConfigType( adapters, configType );
+ if( adapterType != null ) {
+ break;
+ }
+ configType = configType.getSuperclass();
+ }
+ return adapterType;
+ }
+
+ public static Class<? extends ConfigurationAdapter> findAdapterTypeForConfigType(
+ Map<Class<?>, Class<? extends ConfigurationAdapter>> adapters, Class<?> configType ) {
+ Class<? extends ConfigurationAdapter> adapterType = adapters.get( configType );
+ if( adapterType == null ) {
+ for( Class interfaceType : configType.getInterfaces() ) {
+ adapterType = findAdapterTypeForConfigTypeOrParent( adapters, interfaceType );
+ if( adapterType != null ) {
+ break;
+ }
+ }
+ }
+ return adapterType;
+ }
+
+}
diff --git a/gateway-provider-security-picketlink/src/main/java/org/apache/hadoop/gateway/picketlink/PicketlinkAuthMessages.java b/gateway-util-configinjector/src/main/java/org/apache/hadoop/gateway/config/impl/DefaultConfigurationBinding.java
old mode 100644
new mode 100755
similarity index 74%
copy from gateway-provider-security-picketlink/src/main/java/org/apache/hadoop/gateway/picketlink/PicketlinkAuthMessages.java
copy to gateway-util-configinjector/src/main/java/org/apache/hadoop/gateway/config/impl/DefaultConfigurationBinding.java
index 7a88a26..5629bc1
--- a/gateway-provider-security-picketlink/src/main/java/org/apache/hadoop/gateway/picketlink/PicketlinkAuthMessages.java
+++ b/gateway-util-configinjector/src/main/java/org/apache/hadoop/gateway/config/impl/DefaultConfigurationBinding.java
@@ -15,10 +15,15 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
-package org.apache.hadoop.gateway.picketlink;
+package org.apache.hadoop.gateway.config.impl;
-import org.apache.hadoop.gateway.i18n.messages.Messages;
+import org.apache.hadoop.gateway.config.ConfigurationBinding;
-@Messages(logger="org.apache.hadoop.gateway.picketlink")
-public interface PicketlinkAuthMessages {
+public class DefaultConfigurationBinding implements ConfigurationBinding {
+
+ @Override
+ public String getConfigurationName( String name ) {
+ return name;
+ }
+
}
diff --git a/gateway-util-configinjector/src/main/java/org/apache/hadoop/gateway/config/impl/DefaultConfigurationInjector.java b/gateway-util-configinjector/src/main/java/org/apache/hadoop/gateway/config/impl/DefaultConfigurationInjector.java
new file mode 100755
index 0000000..8b86ba1
--- /dev/null
+++ b/gateway-util-configinjector/src/main/java/org/apache/hadoop/gateway/config/impl/DefaultConfigurationInjector.java
@@ -0,0 +1,224 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.gateway.config.impl;
+
+import org.apache.commons.beanutils.ConvertUtilsBean2;
+import org.apache.hadoop.gateway.config.*;
+import org.apache.hadoop.gateway.config.spi.ConfigurationInjector;
+
+import java.lang.annotation.Annotation;
+import java.lang.reflect.Field;
+import java.lang.reflect.Method;
+
+public class DefaultConfigurationInjector implements ConfigurationInjector {
+
+ private static ConvertUtilsBean2 DEFAULT_CONVERTER = new ConvertUtilsBean2();
+
+ @Override
+ public void configure( Object target, ConfigurationAdapter adapter, ConfigurationBinding binding )
+ throws ConfigurationException {
+ Class type = target.getClass();
+ while( type != null ) {
+ injectClass( type, target, adapter, binding );
+ type = type.getSuperclass();
+ }
+ }
+
+ private void injectClass( Class type, Object target, ConfigurationAdapter config, ConfigurationBinding binding )
+ throws ConfigurationException {
+ Field[] fields = type.getDeclaredFields();
+ for( Field field : fields ) {
+ injectFieldValue( field, target, config, binding );
+ }
+ Method[] methods = type.getDeclaredMethods();
+ for( Method method : methods ) {
+ injectMethodValue( method, target, config, binding );
+ }
+ }
+
+ private void injectFieldValue( Field field, Object target, ConfigurationAdapter adapter, ConfigurationBinding binding )
+ throws ConfigurationException {
+ Configure annotation = field.getAnnotation( Configure.class );
+ if( annotation != null ) {
+ Alias alias = field.getAnnotation( Alias.class );
+ String name = getConfigName( field, alias );
+ String bind = getBindName( target, name, binding );
+ Object value = retrieveValue( target, bind, name, field.getType(), adapter, binding );
+ if( value == null ) {
+ Optional optional = field.getAnnotation( Optional.class );
+ if( optional == null ) {
+ throw new ConfigurationException( String.format(
+ "Failed to find configuration for %s bound to %s of %s via %s",
+ bind, name, target.getClass().getName(), adapter.getClass().getName() ) );
+ }
+ } else {
+ try {
+ if( !field.isAccessible() ) {
+ field.setAccessible( true );
+ }
+ field.set( target, value );
+ } catch( Exception e ) {
+ throw new ConfigurationException( String.format(
+ "Failed to inject field configuration property %s of %s",
+ name, target.getClass().getName() ), e );
+ }
+ }
+ }
+ }
+
+ private void injectMethodValue( Method method, Object target, ConfigurationAdapter adapter, ConfigurationBinding binding )
+ throws ConfigurationException {
+ Configure methodTag = method.getAnnotation( Configure.class );
+ if( methodTag != null ) {
+ Alias aliasTag = method.getAnnotation( Alias.class );
+ String methodName = getConfigName( method, aliasTag );
+ Class[] argTypes = method.getParameterTypes();
+ Object[] args = new Object[ argTypes.length ];
+ Annotation[][] argTags = method.getParameterAnnotations();
+ for( int i=0; i<argTypes.length; i++ ) {
+ String argName = getConfigName( methodName, argTags[i] );
+ String bndName = getBindName( target, argName, binding );
+ Object argValue = retrieveValue( target, bndName, argName, argTypes[i], adapter, binding );
+ if( argValue == null ) {
+ Default defTag = findAnnotation( argTags[i], Default.class );
+ if( defTag != null ) {
+ String strValue = defTag.value();
+ argValue = convertValue( target, argName, strValue, argTypes[i] );
+ } else {
+ throw new ConfigurationException( String.format(
+ "Failed to find configuration for %s of %s via %s",
+ bndName, argName, target.getClass().getName(), adapter.getClass().getName() ) );
+ }
+ }
+ args[ i ] = argValue;
+ }
+ if( !method.isAccessible() ) {
+ method.setAccessible( true );
+ }
+ try {
+ method.invoke( target, args );
+ } catch( Exception e ) {
+ throw new ConfigurationException( String.format(
+ "Failed to inject method configuration via %s of %s",
+ methodName, target.getClass().getName() ), e );
+ }
+ }
+ }
+
+ private Object convertValue( Object target, String name, Object strValue, Class<?> type ) {
+ Object objValue = null;
+ try {
+ objValue = DEFAULT_CONVERTER.convert( strValue, type );
+ } catch( Exception e ) {
+ throw new ConfigurationException( String.format(
+ "Failed to convert configuration for %s of %s to %s",
+ name, target.getClass().getName(), type.getName() ), e );
+ }
+ return objValue;
+ }
+
+ private Object retrieveValue( Object target, String bind, String name, Class<?> type, ConfigurationAdapter adapter, ConfigurationBinding binding ) {
+ Object value;
+ try {
+ value = adapter.getConfigurationValue( bind );
+ } catch( Exception e ) {
+ throw new ConfigurationException( String.format(
+ "Failed to retrieve configuration for %s bound to %s of %s via %s",
+ bind, name, target.getClass().getName(), adapter.getClass().getName() ), e );
+ }
+ value = convertValue( target, name, value, type );
+ return value;
+ }
+
+ private <T extends Annotation> T findAnnotation( Annotation[] annotations, Class<T> type ) {
+ T found = null;
+ for( Annotation current : annotations ) {
+ if( type.isAssignableFrom( current.getClass() ) ) {
+ found = (T)current;
+ break;
+ }
+ }
+ return found;
+ }
+
+ private static String pickName( String implied, Alias explicit ) {
+ String name = implied;
+ if( explicit != null ) {
+ String tagValue = explicit.value().trim();
+ if( tagValue.length() > 0 ) {
+ name = tagValue;
+ }
+ }
+ return name;
+ }
+
+ private static String getBindName( Object target, String name, ConfigurationBinding binding ) {
+ String bind = null;
+ try {
+ bind = binding.getConfigurationName( name );
+ } catch( Exception e ) {
+ throw new ConfigurationException( String.format(
+ "Failed to bind configuration for %s of %s via %s",
+ name, target.getClass().getName(), binding.getClass().getName() ), e );
+ }
+ if( bind == null ) {
+ bind = name;
+ }
+ return bind;
+ }
+
+ private static String getConfigName( Field field, Alias tag ) {
+ return pickName( field.getName(), tag );
+ }
+
+ private static String getConfigName( String name, Annotation[] tags ) {
+ if( tags != null ) {
+ for( Annotation tag : tags ) {
+ if( tag != null && tag instanceof Alias ) {
+ Alias aliasTag = Alias.class.cast( tag );
+ String aliasValue = aliasTag.value().trim();
+ if( aliasValue.length() > 0 ) {
+ name = aliasValue;
+ break;
+ }
+ }
+ }
+ }
+ return name;
+ }
+
+ private static String getConfigName( Method method, Alias tag ) {
+ return pickName( getConfigName( method ), tag );
+ }
+
+ private static String getConfigName( Method method ) {
+ String methodName = method.getName();
+ StringBuilder name = new StringBuilder( methodName.length() );
+ if( methodName != null &&
+ methodName.length() > 3 &&
+ methodName.startsWith( "set" ) &&
+ Character.isUpperCase( methodName.charAt( 3 ) ) ) {
+ name.append( methodName.substring( 3 ) );
+ name.setCharAt( 0, Character.toLowerCase( name.charAt( 0 ) ) );
+ } else {
+ name.append( name );
+ }
+ return name.toString();
+ }
+
+}
diff --git a/gateway-provider-identity-assertion-pseudo/src/main/java/org/apache/hadoop/gateway/identityasserter/function/UsernameFunctionDescriptor.java b/gateway-util-configinjector/src/main/java/org/apache/hadoop/gateway/config/impl/MapConfigurationAdapter.java
old mode 100644
new mode 100755
similarity index 67%
copy from gateway-provider-identity-assertion-pseudo/src/main/java/org/apache/hadoop/gateway/identityasserter/function/UsernameFunctionDescriptor.java
copy to gateway-util-configinjector/src/main/java/org/apache/hadoop/gateway/config/impl/MapConfigurationAdapter.java
index d2aa441..d0556f4
--- a/gateway-provider-identity-assertion-pseudo/src/main/java/org/apache/hadoop/gateway/identityasserter/function/UsernameFunctionDescriptor.java
+++ b/gateway-util-configinjector/src/main/java/org/apache/hadoop/gateway/config/impl/MapConfigurationAdapter.java
@@ -15,17 +15,23 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
-package org.apache.hadoop.gateway.identityasserter.function;
+package org.apache.hadoop.gateway.config.impl;
-import org.apache.hadoop.gateway.filter.rewrite.api.UrlRewriteFunctionDescriptor;
+import org.apache.hadoop.gateway.config.ConfigurationAdapter;
-public class UsernameFunctionDescriptor implements UrlRewriteFunctionDescriptor<UsernameFunctionDescriptor> {
+import java.util.Map;
- public static final String FUNCTION_NAME = "username";
+public class MapConfigurationAdapter implements ConfigurationAdapter {
+
+ private Map config;
+
+ public MapConfigurationAdapter( Map map ) {
+ this.config = map;
+ }
@Override
- public String name() {
- return FUNCTION_NAME;
+ public Object getConfigurationValue( String name ) {
+ return config.get( name );
}
}
diff --git a/gateway-provider-security-picketlink/src/test/java/org/apache/hadoop/gateway/provider/federation/PreAuthSSOTest.java b/gateway-util-configinjector/src/main/java/org/apache/hadoop/gateway/config/impl/MapConfigurationAdapterDescriptor.java
old mode 100644
new mode 100755
similarity index 69%
copy from gateway-provider-security-picketlink/src/test/java/org/apache/hadoop/gateway/provider/federation/PreAuthSSOTest.java
copy to gateway-util-configinjector/src/main/java/org/apache/hadoop/gateway/config/impl/MapConfigurationAdapterDescriptor.java
index ec57043..4099483
--- a/gateway-provider-security-picketlink/src/test/java/org/apache/hadoop/gateway/provider/federation/PreAuthSSOTest.java
+++ b/gateway-util-configinjector/src/main/java/org/apache/hadoop/gateway/config/impl/MapConfigurationAdapterDescriptor.java
@@ -1,4 +1,3 @@
-
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
@@ -16,16 +15,16 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
-package org.apache.hadoop.gateway.provider.federation;
+package org.apache.hadoop.gateway.config.impl;
-import junit.framework.TestCase;
+import org.apache.hadoop.gateway.config.spi.AbstractConfigurationAdapterDescriptor;
-import org.apache.hadoop.gateway.services.security.token.impl.JWTToken;
-import org.junit.Test;
+import java.util.Map;
-public class PreAuthSSOTest extends TestCase {
- @Test
- public void testPreAuth() throws Exception {
- assertTrue(true);
+public class MapConfigurationAdapterDescriptor extends AbstractConfigurationAdapterDescriptor {
+
+ public MapConfigurationAdapterDescriptor() {
+ add( Map.class, MapConfigurationAdapter.class );
}
+
}
diff --git a/gateway-provider-identity-assertion-pseudo/src/main/java/org/apache/hadoop/gateway/identityasserter/function/UsernameFunctionDescriptor.java b/gateway-util-configinjector/src/main/java/org/apache/hadoop/gateway/config/impl/MappedConfigurationBinding.java
old mode 100644
new mode 100755
similarity index 61%
copy from gateway-provider-identity-assertion-pseudo/src/main/java/org/apache/hadoop/gateway/identityasserter/function/UsernameFunctionDescriptor.java
copy to gateway-util-configinjector/src/main/java/org/apache/hadoop/gateway/config/impl/MappedConfigurationBinding.java
index d2aa441..161869f
--- a/gateway-provider-identity-assertion-pseudo/src/main/java/org/apache/hadoop/gateway/identityasserter/function/UsernameFunctionDescriptor.java
+++ b/gateway-util-configinjector/src/main/java/org/apache/hadoop/gateway/config/impl/MappedConfigurationBinding.java
@@ -15,17 +15,24 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
-package org.apache.hadoop.gateway.identityasserter.function;
+package org.apache.hadoop.gateway.config.impl;
-import org.apache.hadoop.gateway.filter.rewrite.api.UrlRewriteFunctionDescriptor;
+import org.apache.hadoop.gateway.config.ConfigurationBinding;
-public class UsernameFunctionDescriptor implements UrlRewriteFunctionDescriptor<UsernameFunctionDescriptor> {
+import java.util.Map;
+import java.util.concurrent.ConcurrentHashMap;
- public static final String FUNCTION_NAME = "username";
+public class MappedConfigurationBinding implements ConfigurationBinding {
+
+ private Map<String,String> map = new ConcurrentHashMap<String, String>();
+
+ public void bind( String targetName, String sourceName ) {
+ map.put( targetName, sourceName );
+ }
@Override
- public String name() {
- return FUNCTION_NAME;
+ public String getConfigurationName( String name ) {
+ return map.get( name );
}
}
diff --git a/gateway-util-configinjector/src/main/java/org/apache/hadoop/gateway/config/impl/PropertiesConfigurationAdapter.java b/gateway-util-configinjector/src/main/java/org/apache/hadoop/gateway/config/impl/PropertiesConfigurationAdapter.java
new file mode 100755
index 0000000..6240d44
--- /dev/null
+++ b/gateway-util-configinjector/src/main/java/org/apache/hadoop/gateway/config/impl/PropertiesConfigurationAdapter.java
@@ -0,0 +1,38 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.gateway.config.impl;
+
+import org.apache.hadoop.gateway.config.ConfigurationAdapter;
+import org.apache.hadoop.gateway.config.ConfigurationException;
+
+import java.util.Properties;
+
+public class PropertiesConfigurationAdapter implements ConfigurationAdapter {
+
+ private Properties properties;
+
+ public PropertiesConfigurationAdapter( Properties properties ) {
+ this.properties = properties;
+ }
+
+ @Override
+ public Object getConfigurationValue( String name ) throws ConfigurationException {
+ return properties.getProperty( name );
+ }
+
+}
diff --git a/gateway-provider-security-picketlink/src/test/java/org/apache/hadoop/gateway/provider/federation/PreAuthSSOTest.java b/gateway-util-configinjector/src/main/java/org/apache/hadoop/gateway/config/impl/PropertiesConfigurationAdapterDescriptor.java
old mode 100644
new mode 100755
similarity index 67%
copy from gateway-provider-security-picketlink/src/test/java/org/apache/hadoop/gateway/provider/federation/PreAuthSSOTest.java
copy to gateway-util-configinjector/src/main/java/org/apache/hadoop/gateway/config/impl/PropertiesConfigurationAdapterDescriptor.java
index ec57043..b93e49a
--- a/gateway-provider-security-picketlink/src/test/java/org/apache/hadoop/gateway/provider/federation/PreAuthSSOTest.java
+++ b/gateway-util-configinjector/src/main/java/org/apache/hadoop/gateway/config/impl/PropertiesConfigurationAdapterDescriptor.java
@@ -1,4 +1,3 @@
-
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
@@ -16,16 +15,16 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
-package org.apache.hadoop.gateway.provider.federation;
+package org.apache.hadoop.gateway.config.impl;
-import junit.framework.TestCase;
+import org.apache.hadoop.gateway.config.spi.AbstractConfigurationAdapterDescriptor;
-import org.apache.hadoop.gateway.services.security.token.impl.JWTToken;
-import org.junit.Test;
+import java.util.Properties;
-public class PreAuthSSOTest extends TestCase {
- @Test
- public void testPreAuth() throws Exception {
- assertTrue(true);
+public class PropertiesConfigurationAdapterDescriptor extends AbstractConfigurationAdapterDescriptor {
+
+ public PropertiesConfigurationAdapterDescriptor() {
+ add( Properties.class, PropertiesConfigurationAdapter.class );
}
+
}
diff --git a/gateway-util-configinjector/src/main/java/org/apache/hadoop/gateway/config/spi/AbstractConfigurationAdapterDescriptor.java b/gateway-util-configinjector/src/main/java/org/apache/hadoop/gateway/config/spi/AbstractConfigurationAdapterDescriptor.java
new file mode 100755
index 0000000..5372911
--- /dev/null
+++ b/gateway-util-configinjector/src/main/java/org/apache/hadoop/gateway/config/spi/AbstractConfigurationAdapterDescriptor.java
@@ -0,0 +1,42 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.gateway.config.spi;
+
+import org.apache.hadoop.gateway.config.ConfigurationAdapter;
+
+import java.util.HashMap;
+import java.util.Map;
+
+public abstract class AbstractConfigurationAdapterDescriptor implements ConfigurationAdapterDescriptor {
+
+ private static Map<Class<?>, Class<? extends ConfigurationAdapter>> ADAPTERS =
+ new HashMap<Class<?>, Class<? extends ConfigurationAdapter>>();
+
+ protected AbstractConfigurationAdapterDescriptor() {
+ }
+
+ protected void add( Class<?> configType, Class<? extends ConfigurationAdapter> adapterType ) {
+ ADAPTERS.put( configType, adapterType );
+ }
+
+ @Override
+ public Map<Class<?>, Class<? extends ConfigurationAdapter>> providedConfigurationAdapters() {
+ return ADAPTERS;
+ }
+
+}
diff --git a/gateway-provider-security-picketlink/src/main/java/org/apache/hadoop/gateway/picketlink/PicketlinkAuthMessages.java b/gateway-util-configinjector/src/main/java/org/apache/hadoop/gateway/config/spi/ConfigurationAdapterDescriptor.java
old mode 100644
new mode 100755
similarity index 74%
copy from gateway-provider-security-picketlink/src/main/java/org/apache/hadoop/gateway/picketlink/PicketlinkAuthMessages.java
copy to gateway-util-configinjector/src/main/java/org/apache/hadoop/gateway/config/spi/ConfigurationAdapterDescriptor.java
index 7a88a26..3e304d4
--- a/gateway-provider-security-picketlink/src/main/java/org/apache/hadoop/gateway/picketlink/PicketlinkAuthMessages.java
+++ b/gateway-util-configinjector/src/main/java/org/apache/hadoop/gateway/config/spi/ConfigurationAdapterDescriptor.java
@@ -15,10 +15,14 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
-package org.apache.hadoop.gateway.picketlink;
+package org.apache.hadoop.gateway.config.spi;
-import org.apache.hadoop.gateway.i18n.messages.Messages;
+import org.apache.hadoop.gateway.config.ConfigurationAdapter;
-@Messages(logger="org.apache.hadoop.gateway.picketlink")
-public interface PicketlinkAuthMessages {
+import java.util.Map;
+
+public interface ConfigurationAdapterDescriptor {
+
+ Map<Class<?>,Class<? extends ConfigurationAdapter>> providedConfigurationAdapters();
+
}
diff --git a/gateway-provider-security-picketlink/src/main/java/org/apache/hadoop/gateway/picketlink/PicketlinkAuthMessages.java b/gateway-util-configinjector/src/main/java/org/apache/hadoop/gateway/config/spi/ConfigurationInjector.java
old mode 100644
new mode 100755
similarity index 71%
copy from gateway-provider-security-picketlink/src/main/java/org/apache/hadoop/gateway/picketlink/PicketlinkAuthMessages.java
copy to gateway-util-configinjector/src/main/java/org/apache/hadoop/gateway/config/spi/ConfigurationInjector.java
index 7a88a26..0bbd1d9
--- a/gateway-provider-security-picketlink/src/main/java/org/apache/hadoop/gateway/picketlink/PicketlinkAuthMessages.java
+++ b/gateway-util-configinjector/src/main/java/org/apache/hadoop/gateway/config/spi/ConfigurationInjector.java
@@ -15,10 +15,13 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
-package org.apache.hadoop.gateway.picketlink;
+package org.apache.hadoop.gateway.config.spi;
-import org.apache.hadoop.gateway.i18n.messages.Messages;
+import org.apache.hadoop.gateway.config.ConfigurationAdapter;
+import org.apache.hadoop.gateway.config.ConfigurationBinding;
-@Messages(logger="org.apache.hadoop.gateway.picketlink")
-public interface PicketlinkAuthMessages {
+public interface ConfigurationInjector {
+
+ void configure(Object target, ConfigurationAdapter adapter, ConfigurationBinding binding);
+
}
diff --git a/gateway-provider-identity-assertion-pseudo/src/main/resources/META-INF/services/org.apache.hadoop.gateway.filter.rewrite.api.UrlRewriteFunctionDescriptor b/gateway-util-configinjector/src/main/resources/META-INF/services/org.apache.hadoop.gateway.config.spi.ConfigurationAdapterDescriptor
old mode 100644
new mode 100755
similarity index 80%
copy from gateway-provider-identity-assertion-pseudo/src/main/resources/META-INF/services/org.apache.hadoop.gateway.filter.rewrite.api.UrlRewriteFunctionDescriptor
copy to gateway-util-configinjector/src/main/resources/META-INF/services/org.apache.hadoop.gateway.config.spi.ConfigurationAdapterDescriptor
index b42eb32..5a240e0
--- a/gateway-provider-identity-assertion-pseudo/src/main/resources/META-INF/services/org.apache.hadoop.gateway.filter.rewrite.api.UrlRewriteFunctionDescriptor
+++ b/gateway-util-configinjector/src/main/resources/META-INF/services/org.apache.hadoop.gateway.config.spi.ConfigurationAdapterDescriptor
@@ -15,5 +15,6 @@
# See the License for the specific language governing permissions and
# limitations under the License.
##########################################################################
-
-org.apache.hadoop.gateway.identityasserter.function.UsernameFunctionDescriptor
\ No newline at end of file
+org.apache.hadoop.gateway.config.impl.MapConfigurationAdapterDescriptor
+org.apache.hadoop.gateway.config.impl.PropertiesConfigurationAdapterDescriptor
+org.apache.hadoop.gateway.config.impl.BeanConfigurationAdapterDescriptor
\ No newline at end of file
diff --git a/gateway-service-yarn-rm/src/main/resources/META-INF/services/org.apache.hadoop.gateway.deploy.ServiceDeploymentContributor b/gateway-util-configinjector/src/main/resources/META-INF/services/org.apache.hadoop.gateway.config.spi.ConfigurationInjector
old mode 100644
new mode 100755
similarity index 92%
copy from gateway-service-yarn-rm/src/main/resources/META-INF/services/org.apache.hadoop.gateway.deploy.ServiceDeploymentContributor
copy to gateway-util-configinjector/src/main/resources/META-INF/services/org.apache.hadoop.gateway.config.spi.ConfigurationInjector
index 3db76c7..e3a0d65
--- a/gateway-service-yarn-rm/src/main/resources/META-INF/services/org.apache.hadoop.gateway.deploy.ServiceDeploymentContributor
+++ b/gateway-util-configinjector/src/main/resources/META-INF/services/org.apache.hadoop.gateway.config.spi.ConfigurationInjector
@@ -15,5 +15,4 @@
# See the License for the specific language governing permissions and
# limitations under the License.
##########################################################################
-
-org.apache.hadoop.gateway.yarn.rm.ResourceManagerDeploymentContributor
\ No newline at end of file
+org.apache.hadoop.gateway.config.impl.DefaultConfigurationInjector
\ No newline at end of file
diff --git a/gateway-util-configinjector/src/test/java/org/apache/hadoop/gateway/config/AdapterSampleTest.java b/gateway-util-configinjector/src/test/java/org/apache/hadoop/gateway/config/AdapterSampleTest.java
new file mode 100755
index 0000000..f3e392e
--- /dev/null
+++ b/gateway-util-configinjector/src/test/java/org/apache/hadoop/gateway/config/AdapterSampleTest.java
@@ -0,0 +1,57 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.gateway.config;
+
+import org.junit.Test;
+
+import java.util.Hashtable;
+
+import static org.hamcrest.CoreMatchers.is;
+import static org.hamcrest.MatcherAssert.assertThat;
+
+public class AdapterSampleTest {
+
+ public static class Target {
+ @Configure
+ private String username = null;
+ }
+
+ public static class Adapter implements ConfigurationAdapter {
+ private Hashtable config;
+ public Adapter( Hashtable config ) {
+ this.config = config;
+ }
+ @Override
+ public Object getConfigurationValue( String name ) throws ConfigurationException {
+ Object value = config.get( name.toUpperCase() );
+ return value == null ? null : value.toString();
+ }
+ }
+
+ static Hashtable config = new Hashtable();
+ static{ config.put( "USERNAME", "somebody" ); }
+
+ @Test
+ public void sample() {
+ Target target = new Target();
+ Adapter adapter = new Adapter( config );
+ ConfigurationInjectorBuilder.configuration().target( target ).source( adapter ).inject();
+ assertThat( target.username, is( "somebody" ) );
+ }
+
+}
diff --git a/gateway-util-configinjector/src/test/java/org/apache/hadoop/gateway/config/FuncTest.java b/gateway-util-configinjector/src/test/java/org/apache/hadoop/gateway/config/FuncTest.java
new file mode 100755
index 0000000..f4898ac
--- /dev/null
+++ b/gateway-util-configinjector/src/test/java/org/apache/hadoop/gateway/config/FuncTest.java
@@ -0,0 +1,379 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.gateway.config;
+
+import org.apache.hadoop.gateway.config.impl.MappedConfigurationBinding;
+import org.junit.Test;
+
+import java.util.HashMap;
+import java.util.Map;
+import java.util.Properties;
+
+import static org.hamcrest.MatcherAssert.assertThat;
+import static org.hamcrest.Matchers.containsString;
+import static org.hamcrest.core.AllOf.allOf;
+import static org.hamcrest.core.Is.is;
+import static org.junit.Assert.fail;
+
+import static org.apache.hadoop.gateway.config.ConfigurationInjectorBuilder.*;
+
+public class FuncTest {
+
+ public static class TestBean {
+ @Configure
+ String stringMember = "stringDefault";
+
+ @Configure
+ int intMember = 1;
+
+ @Configure
+ Integer integerMember = Integer.valueOf( 1 );
+
+ @Configure
+ public void setStringProp( String s ) {
+ stringPropField = s;
+ }
+ protected String stringPropField = "stringDefault";
+
+ @Configure
+ @Alias("altStringProp")
+ public void setNamedStringProp( String s ) {
+ stringPropFieldAlt = s;
+ }
+ protected String stringPropFieldAlt = "stringDefault";
+
+ @Configure
+ public void setNamedArgMethod( @Configure @Alias("altArgStringProp") String s ) {
+ stringPropFieldAltArg = s;
+ }
+ protected String stringPropFieldAltArg = "stringDefault";
+
+ @Configure
+ public void setMultiArgs(
+ @Configure @Alias("multiArg1") String s,
+ @Configure @Alias("multiArg2") Integer i,
+ @Configure @Alias("multiArg3") int n ) {
+ multiArgStringField = s;
+ multiArgIntegerField = i;
+ multiArgIntField = n;
+ }
+ String multiArgStringField = "default";
+ Integer multiArgIntegerField = 0;
+ int multiArgIntField = 0;
+
+ }
+
+ @Test
+ public void testMapOfStrings() {
+
+ Map<String,String> testConfig = new HashMap<String,String>();
+ testConfig.put( "stringMember", "stringValue" );
+ testConfig.put( "intMember", "2" );
+ testConfig.put( "integerMember", "2" );
+ testConfig.put( "stringProp", "stringValue" );
+ testConfig.put( "altStringProp", "stringValue" );
+ testConfig.put( "altArgStringProp", "stringValue" );
+ testConfig.put( "multiArg1", "stringValue" );
+ testConfig.put( "multiArg2", "42" );
+ testConfig.put( "multiArg3", "42" );
+
+ TestBean testBean = new TestBean();
+
+ configuration().target( testBean ).source( testConfig ).inject();
+
+ assertThat( testBean.stringMember, is( "stringValue" ) );
+ assertThat( testBean.intMember, is( 2 ) );
+ assertThat( testBean.integerMember, is( new Integer(2) ) );
+ assertThat( testBean.stringPropField, is( "stringValue" ) );
+ assertThat( testBean.stringPropFieldAlt, is( "stringValue" ) );
+ assertThat( testBean.stringPropFieldAltArg, is( "stringValue" ) );
+ assertThat( testBean.multiArgStringField, is( "stringValue" ) );
+ assertThat( testBean.multiArgIntegerField, is( 42 ) );
+ assertThat( testBean.multiArgIntField, is( 42 ) );
+ }
+
+ @Test
+ public void testProperties() {
+
+ Properties testConfig = new Properties();
+ testConfig.put( "stringMember", "stringValue" );
+ testConfig.put( "intMember", "2" );
+ testConfig.put( "integerMember", "2" );
+ testConfig.put( "stringProp", "stringValue" );
+ testConfig.put( "altStringProp", "stringValue" );
+ testConfig.put( "altArgStringProp", "stringValue" );
+ testConfig.put( "multiArg1", "stringValue" );
+ testConfig.put( "multiArg2", "42" );
+ testConfig.put( "multiArg3", "42" );
+
+ TestBean testBean = new TestBean();
+
+ configuration().target( testBean ).source( testConfig ).inject();
+
+ assertThat( testBean.stringMember, is( "stringValue" ) );
+ assertThat( testBean.intMember, is( 2 ) );
+ assertThat( testBean.integerMember, is( new Integer(2) ) );
+ assertThat( testBean.stringPropField, is( "stringValue" ) );
+ assertThat( testBean.stringPropFieldAlt, is( "stringValue" ) );
+ assertThat( testBean.stringPropFieldAltArg, is( "stringValue" ) );
+ assertThat( testBean.multiArgStringField, is( "stringValue" ) );
+ assertThat( testBean.multiArgIntegerField, is( 42 ) );
+ assertThat( testBean.multiArgIntField, is( 42 ) );
+ }
+
+ public static class TestAdapter implements ConfigurationAdapter {
+
+ private Map<String,String> config;
+
+ public TestAdapter( Map<String,String> config ) {
+ this.config = config;
+ }
+
+ @Override
+ public String getConfigurationValue( String name ) {
+ return config.get( name );
+ }
+
+ }
+
+ @Test
+ public void testExplicitProvider() {
+
+ Map<String,String> testConfig = new HashMap<String,String>();
+ testConfig.put( "stringMember", "stringValue" );
+ testConfig.put( "intMember", "2" );
+ testConfig.put( "integerMember", "2" );
+ testConfig.put( "stringProp", "stringValue" );
+ testConfig.put( "altStringProp", "stringValue" );
+ testConfig.put( "altArgStringProp", "stringValue" );
+ testConfig.put( "multiArg1", "stringValue" );
+ testConfig.put( "multiArg2", "42" );
+ testConfig.put( "multiArg3", "42" );
+
+ TestBean testBean = new TestBean();
+
+ configuration().target( testBean ).source( new TestAdapter( testConfig ) ).inject();
+
+ assertThat( testBean.stringMember, is( "stringValue" ) );
+ assertThat( testBean.intMember, is( 2 ) );
+ assertThat( testBean.integerMember, is( new Integer(2) ) );
+ assertThat( testBean.stringPropField, is( "stringValue" ) );
+ assertThat( testBean.stringPropFieldAlt, is( "stringValue" ) );
+ assertThat( testBean.stringPropFieldAltArg, is( "stringValue" ) );
+ assertThat( testBean.multiArgStringField, is( "stringValue" ) );
+ assertThat( testBean.multiArgIntegerField, is( 42 ) );
+ assertThat( testBean.multiArgIntField, is( 42 ) );
+ }
+
+ @Test
+ public void testMapOfObjects() {
+
+ Map<Object,Object> testConfig = new HashMap<Object,Object>();
+ testConfig.put( "stringMember", "stringValue" );
+ testConfig.put( "intMember", 42 );
+ testConfig.put( "integerMember", new Integer(42) );
+ testConfig.put( "stringProp", "stringValue" );
+ testConfig.put( "altStringProp", "stringValue" );
+ testConfig.put( "altArgStringProp", "stringValue" );
+ testConfig.put( "multiArg1", "stringValue" );
+ testConfig.put( "multiArg2", new Integer(42) );
+ testConfig.put( "multiArg3", "42" );
+
+ TestBean testBean = new TestBean();
+
+ configuration().target( testBean ).source( testConfig ).inject();
+
+ assertThat( testBean.stringMember, is( "stringValue" ) );
+ assertThat( testBean.intMember, is( 42 ) );
+ assertThat( testBean.integerMember, is( new Integer(42) ) );
+ assertThat( testBean.stringPropField, is( "stringValue" ) );
+ assertThat( testBean.stringPropFieldAlt, is( "stringValue" ) );
+ assertThat( testBean.stringPropFieldAltArg, is( "stringValue" ) );
+ assertThat( testBean.multiArgStringField, is( "stringValue" ) );
+ assertThat( testBean.multiArgIntegerField, is( 42 ) );
+ assertThat( testBean.multiArgIntField, is( 42 ) );
+ }
+
+ public class Target {
+ @Configure @Alias("user.name")
+ private String user;
+ }
+
+ public class Adapter implements ConfigurationAdapter {
+ @Override
+ public Object getConfigurationValue( String name ) throws ConfigurationException {
+ return System.getProperty( name );
+ }
+ }
+
+ @Test
+ public void testFactoryConfigurationDirect() {
+ Target target = new Target();
+ configuration().target( target ).source( System.getProperties() ).inject();
+ assertThat( target.user, is( System.getProperty( "user.name" ) ) );
+ }
+
+ @Test
+ public void testFactoryConfigurationAdapter() {
+ Target target = new Target();
+ configuration().target( target ).source( new Adapter() ).inject();
+ assertThat( target.user, is( System.getProperty( "user.name" ) ) );
+ }
+
+ @Test
+ public void testMissingRequiredFieldConfiguration() {
+ class RequiredFieldTarget {
+ @Configure
+ private String required;
+ }
+ RequiredFieldTarget target = new RequiredFieldTarget();
+ try {
+ configuration().target( target ).source( System.getProperties() ).inject();
+ fail( "Expected an exception because the configuration values could not be populated." );
+ } catch ( ConfigurationException e ) {
+ assertThat( e.getMessage(), allOf(containsString("Failed"),containsString( "find" ),containsString( "required" )) );
+ }
+ }
+
+ @Test
+ public void testMissingOptionalFieldConfiguration() {
+ class OptionalFieldTarget {
+ @Configure
+ @Optional
+ private String optional = "default";
+ }
+ OptionalFieldTarget target = new OptionalFieldTarget();
+ configuration().target( target ).source( System.getProperties() ).inject();
+ assertThat( target.optional, is("default") );
+ }
+
+ @Test
+ public void testMissingRequiredConfigurationParameter() {
+ class Target {
+ private String field;
+ @Configure
+ public void setRequired(String value) {
+ field = value;
+ }
+ }
+ Target target = new Target();
+ try {
+ configuration().target( target ).source( System.getProperties() ).inject();
+ fail( "Expected an exception because the configuration values could not be populated." );
+ } catch ( ConfigurationException e ) {
+ assertThat( e.getMessage(), allOf(containsString("Failed"),containsString( "find" ),containsString( "required" )) );
+ }
+ }
+
+ @Test
+ public void testMissingRequiredConfigurationParameterWithDefault() {
+ class Target {
+ private String field;
+ @Configure
+ public void setRequired(@Default("default")String value) {
+ field = value;
+ }
+ }
+ Target target = new Target();
+ configuration().target( target ).source( System.getProperties() ).inject();
+ assertThat( target.field, is( "default" ) );
+ }
+
+ @Test
+ public void testTwoMissingRequiredConfigurationParameterWithDefault() {
+ class Target {
+ private String field1;
+ private String field2;
+ @Configure
+ public void setRequired(@Default("default1")String value1, @Default("default2")String value2) {
+ field1 = value1;
+ field2 = value2;
+ }
+ }
+ Target target = new Target();
+ configuration().target( target ).source( System.getProperties() ).inject();
+ assertThat( target.field1, is( "default1" ) );
+ assertThat( target.field2, is("default2") );
+ }
+
+ @Test
+ public void testFieldBinding() {
+ class Target {
+ @Configure
+ private String user;
+ }
+ class Binding extends MappedConfigurationBinding {
+ Binding() {
+ bind("user","user.name");
+ }
+ }
+ Target target = new Target();
+ Properties source = System.getProperties();
+ ConfigurationBinding binding = new Binding();
+ configuration().target( target ).source( source ).binding( binding ).inject();
+ assertThat( target.user, is(System.getProperty("user.name")));
+
+ }
+
+ @Test
+ public void testFieldBindingUsingBuilderBinding() {
+ class Target {
+ @Configure
+ private String user;
+ }
+ Target target = new Target();
+ Properties source = System.getProperties();
+ configuration().target( target ).source( source ).bind( "user", "user.name" ).inject();
+ assertThat( target.user, is(System.getProperty("user.name")));
+
+ }
+
+ @Test
+ public void testFieldBindingUsingBuilderBindingFactory() {
+ class Target {
+ @Configure
+ private String user;
+ }
+ Target target = new Target();
+ Properties source = System.getProperties();
+ ConfigurationBinding binding = configuration().bind( "user", "user.name" ).binding();
+ configuration().target( target ).source( source ).binding( binding ).inject();
+ assertThat( target.user, is( System.getProperty( "user.name" ) ) );
+
+ }
+
+ public static class UserBean {
+ public String getPrincipal() {
+ return "test-user";
+ }
+ }
+
+ @Test
+ public void testBeanAdapter() {
+ Target target = new Target();
+ UserBean bean = new UserBean();
+ configuration()
+ .target( target )
+ .source( bean )
+ .bind( "user.name", "principal" )
+ .inject();
+ assertThat( target.user, is( "test-user" ) );
+
+ }
+
+}
diff --git a/gateway-util-configinjector/src/test/java/org/apache/hadoop/gateway/config/MapFieldSampleTest.java b/gateway-util-configinjector/src/test/java/org/apache/hadoop/gateway/config/MapFieldSampleTest.java
new file mode 100755
index 0000000..b9336b5
--- /dev/null
+++ b/gateway-util-configinjector/src/test/java/org/apache/hadoop/gateway/config/MapFieldSampleTest.java
@@ -0,0 +1,45 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.gateway.config;
+
+import org.junit.Test;
+
+import java.util.HashMap;
+import java.util.Map;
+
+import static org.hamcrest.CoreMatchers.is;
+import static org.hamcrest.MatcherAssert.assertThat;
+
+public class MapFieldSampleTest {
+
+ public static class Target {
+ @Configure
+ private int retryLimit = 3;
+ }
+
+ static Map<String,String> config = new HashMap<String,String>();
+ static { config.put( "retryLimit", "5" ); }
+
+ @Test
+ public void sample() {
+ Target target = new Target();
+ ConfigurationInjectorBuilder.configuration().target( target ).source( config ).inject();
+ assertThat( target.retryLimit, is(5) );
+ }
+
+}
diff --git a/gateway-util-configinjector/src/test/java/org/apache/hadoop/gateway/config/MapMethodSampleTest.java b/gateway-util-configinjector/src/test/java/org/apache/hadoop/gateway/config/MapMethodSampleTest.java
new file mode 100755
index 0000000..96f46ec
--- /dev/null
+++ b/gateway-util-configinjector/src/test/java/org/apache/hadoop/gateway/config/MapMethodSampleTest.java
@@ -0,0 +1,49 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.gateway.config;
+
+import org.junit.Test;
+
+import java.util.HashMap;
+import java.util.Map;
+
+import static org.hamcrest.CoreMatchers.is;
+import static org.hamcrest.MatcherAssert.assertThat;
+
+public class MapMethodSampleTest {
+
+ public static class Target {
+ private int limit = 3;
+
+ @Configure
+ public void setRetryLimit( int value ) {
+ limit = value;
+ }
+ }
+
+ static Map<String,String> config = new HashMap<String,String>();
+ static { config.put( "retryLimit", "5" ); }
+
+ @Test
+ public void sample() {
+ Target target = new Target();
+ ConfigurationInjectorBuilder.configuration().target( target ).source( config ).inject();
+ assertThat( target.limit, is( 5 ) );
+ }
+
+}
diff --git a/gateway-util-configinjector/src/test/java/org/apache/hadoop/gateway/config/PropertiesFactorySampleTest.java b/gateway-util-configinjector/src/test/java/org/apache/hadoop/gateway/config/PropertiesFactorySampleTest.java
new file mode 100755
index 0000000..2adbe72
--- /dev/null
+++ b/gateway-util-configinjector/src/test/java/org/apache/hadoop/gateway/config/PropertiesFactorySampleTest.java
@@ -0,0 +1,40 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.gateway.config;
+
+import org.junit.Test;
+
+import static org.apache.hadoop.gateway.config.ConfigurationInjectorBuilder.*;
+import static org.hamcrest.CoreMatchers.is;
+import static org.hamcrest.MatcherAssert.assertThat;
+
+public class PropertiesFactorySampleTest {
+
+ public static class Target {
+ @Configure @Alias("user.name")
+ private String user = "nobody";
+ }
+
+ @Test
+ public void sampleDirect() {
+ Target target = new Target();
+ configuration().target( target ).source( System.getProperties() ).inject();
+ assertThat( target.user, is( System.getProperty( "user.name" ) ) );
+ }
+
+}
diff --git a/gateway-provider-security-picketlink/src/test/java/org/apache/hadoop/gateway/provider/federation/PreAuthSSOTest.java b/gateway-util-configinjector/src/test/java/org/apache/hadoop/gateway/config/PropertiesFieldSampleTest.java
old mode 100644
new mode 100755
similarity index 61%
copy from gateway-provider-security-picketlink/src/test/java/org/apache/hadoop/gateway/provider/federation/PreAuthSSOTest.java
copy to gateway-util-configinjector/src/test/java/org/apache/hadoop/gateway/config/PropertiesFieldSampleTest.java
index ec57043..db0af61
--- a/gateway-provider-security-picketlink/src/test/java/org/apache/hadoop/gateway/provider/federation/PreAuthSSOTest.java
+++ b/gateway-util-configinjector/src/test/java/org/apache/hadoop/gateway/config/PropertiesFieldSampleTest.java
@@ -1,4 +1,3 @@
-
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
@@ -16,16 +15,25 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
-package org.apache.hadoop.gateway.provider.federation;
+package org.apache.hadoop.gateway.config;
-import junit.framework.TestCase;
-
-import org.apache.hadoop.gateway.services.security.token.impl.JWTToken;
import org.junit.Test;
-public class PreAuthSSOTest extends TestCase {
- @Test
- public void testPreAuth() throws Exception {
- assertTrue(true);
+import static org.hamcrest.CoreMatchers.is;
+import static org.hamcrest.MatcherAssert.assertThat;
+
+public class PropertiesFieldSampleTest {
+
+ public static class Target {
+ @Configure @Alias("user.name")
+ private String user = "nobody";
}
+
+ @Test
+ public void sample() {
+ Target target = new Target();
+ ConfigurationInjectorBuilder.configuration().target( target ).source( System.getProperties() ).inject();
+ assertThat( target.user, is( System.getProperty( "user.name" ) ) );
+ }
+
}
diff --git a/gateway-util-configinjector/src/test/java/org/apache/hadoop/gateway/config/PropertiesMethodSampleTest.java b/gateway-util-configinjector/src/test/java/org/apache/hadoop/gateway/config/PropertiesMethodSampleTest.java
new file mode 100755
index 0000000..4b72e3b
--- /dev/null
+++ b/gateway-util-configinjector/src/test/java/org/apache/hadoop/gateway/config/PropertiesMethodSampleTest.java
@@ -0,0 +1,57 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.gateway.config;
+
+import org.junit.Test;
+
+import static org.hamcrest.CoreMatchers.is;
+import static org.hamcrest.MatcherAssert.assertThat;
+
+public class PropertiesMethodSampleTest {
+
+ public static class Target {
+
+ private String user = "nobody";
+ private String home = "nowhere";
+ private String temp = "nowhere";
+
+ @Configure
+ @Alias("user.name")
+ public void setUser( String value ) {
+ user = value;
+ }
+
+ @Configure
+ public void setDirs(
+ @Alias("user.dir") String home,
+ @Alias("java.io.tmpdir") String temp ) {
+ this.home = home;
+ this.temp = temp;
+ }
+ }
+
+ @Test
+ public void sample() {
+ Target target = new Target();
+ ConfigurationInjectorBuilder.configuration().target( target ).source( System.getProperties() ).inject();
+ assertThat( target.user, is( System.getProperty( "user.name" ) ) );
+ assertThat( target.home, is( System.getProperty( "user.dir" ) ) );
+ assertThat( target.temp, is( System.getProperty( "java.io.tmpdir" ) ) );
+ }
+
+}
diff --git a/gateway-util-configinjector/src/test/java/org/apache/hadoop/gateway/config/UsageTest.java b/gateway-util-configinjector/src/test/java/org/apache/hadoop/gateway/config/UsageTest.java
new file mode 100755
index 0000000..2fe963a
--- /dev/null
+++ b/gateway-util-configinjector/src/test/java/org/apache/hadoop/gateway/config/UsageTest.java
@@ -0,0 +1,43 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.gateway.config;
+
+import org.junit.Test;
+import static org.apache.hadoop.gateway.config.ConfigurationInjectorBuilder.configuration;
+import static org.hamcrest.MatcherAssert.assertThat;
+import static org.hamcrest.core.Is.is;
+
+public class UsageTest {
+
+ class Target {
+ @Configure
+ private String user;
+ }
+
+ @Test
+ public void usage() {
+ Target target = new Target();
+ configuration()
+ .target( target )
+ .source( System.getProperties() )
+ .bind( "user", "user.name" )
+ .inject();
+ assertThat( target.user, is(System.getProperty("user.name")));
+ }
+
+}
diff --git a/gateway-util-configinjector/src/test/java/org/apache/hadoop/gateway/config/impl/BeanConfigurationAdapterDescriptorTest.java b/gateway-util-configinjector/src/test/java/org/apache/hadoop/gateway/config/impl/BeanConfigurationAdapterDescriptorTest.java
new file mode 100755
index 0000000..80913e2
--- /dev/null
+++ b/gateway-util-configinjector/src/test/java/org/apache/hadoop/gateway/config/impl/BeanConfigurationAdapterDescriptorTest.java
@@ -0,0 +1,57 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.gateway.config.impl;
+
+import org.apache.hadoop.gateway.config.ConfigurationAdapter;
+import org.apache.hadoop.gateway.config.spi.ConfigurationAdapterDescriptor;
+import org.junit.Test;
+
+import java.util.Iterator;
+import java.util.Map;
+import java.util.ServiceLoader;
+
+import static junit.framework.TestCase.fail;
+import static org.hamcrest.MatcherAssert.assertThat;
+import static org.hamcrest.Matchers.hasKey;
+
+public class BeanConfigurationAdapterDescriptorTest {
+
+ @Test
+ public void testServiceLoader() {
+ ServiceLoader<ConfigurationAdapterDescriptor> loader = ServiceLoader.load( ConfigurationAdapterDescriptor.class );
+ Iterator<ConfigurationAdapterDescriptor> i = loader.iterator();
+ while( i.hasNext() ) {
+ if( i.next() instanceof BeanConfigurationAdapterDescriptor ) {
+ return;
+ }
+ }
+ fail( "Failed to load BeanConfigurationAdapterDescriptor" );
+ }
+
+ @Test
+ public void testDescriptor() {
+ ConfigurationAdapterDescriptor descriptor = new BeanConfigurationAdapterDescriptor();
+ Map<Class<?>,Class<? extends ConfigurationAdapter>> map = descriptor.providedConfigurationAdapters();
+ assertThat( map, hasKey( (Class)Object.class ) );
+ Class<? extends ConfigurationAdapter> type = map.get( Object.class );
+ assertThat(
+ "Descriptor didn't return " + BeanConfigurationAdapter.class.getName(),
+ type == BeanConfigurationAdapter.class );
+ }
+
+}
diff --git a/gateway-provider-security-picketlink/src/test/java/org/apache/hadoop/gateway/provider/federation/PreAuthSSOTest.java b/gateway-util-configinjector/src/test/java/org/apache/hadoop/gateway/config/impl/BeanConfigurationAdapterTest.java
old mode 100644
new mode 100755
similarity index 62%
copy from gateway-provider-security-picketlink/src/test/java/org/apache/hadoop/gateway/provider/federation/PreAuthSSOTest.java
copy to gateway-util-configinjector/src/test/java/org/apache/hadoop/gateway/config/impl/BeanConfigurationAdapterTest.java
index ec57043..0d36eb6
--- a/gateway-provider-security-picketlink/src/test/java/org/apache/hadoop/gateway/provider/federation/PreAuthSSOTest.java
+++ b/gateway-util-configinjector/src/test/java/org/apache/hadoop/gateway/config/impl/BeanConfigurationAdapterTest.java
@@ -1,4 +1,3 @@
-
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
@@ -16,16 +15,26 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
-package org.apache.hadoop.gateway.provider.federation;
+package org.apache.hadoop.gateway.config.impl;
-import junit.framework.TestCase;
-
-import org.apache.hadoop.gateway.services.security.token.impl.JWTToken;
import org.junit.Test;
-public class PreAuthSSOTest extends TestCase {
- @Test
- public void testPreAuth() throws Exception {
- assertTrue(true);
+import static org.hamcrest.CoreMatchers.is;
+import static org.hamcrest.MatcherAssert.assertThat;
+
+public class BeanConfigurationAdapterTest {
+
+ public static class Bean {
+ public String getValue() {
+ return "beanValue";
+ };
}
+
+ @Test
+ public void test() {
+ Bean bean = new Bean();
+ BeanConfigurationAdapter adapter = new BeanConfigurationAdapter( bean );
+ assertThat( adapter.getConfigurationValue( "value" ).toString(), is( "beanValue" ) );
+ }
+
}
diff --git a/hsso-release/pom.xml b/hsso-release/pom.xml
index df85c52..c5abf8e 100644
--- a/hsso-release/pom.xml
+++ b/hsso-release/pom.xml
@@ -119,21 +119,13 @@
</dependency>
<dependency>
<groupId>${gateway-group}</groupId>
- <artifactId>gateway-service-oozie</artifactId>
- </dependency>
- <dependency>
- <groupId>${gateway-group}</groupId>
- <artifactId>gateway-service-webhcat</artifactId>
+ <artifactId>gateway-service-definitions</artifactId>
</dependency>
<dependency>
<groupId>${gateway-group}</groupId>
<artifactId>gateway-service-tgs</artifactId>
</dependency>
<dependency>
- <groupId>${gateway-group}</groupId>
- <artifactId>gateway-service-yarn-rm</artifactId>
- </dependency>
- <dependency>
<groupId>${gateway-group}</groupId>
<artifactId>gateway-provider-rewrite</artifactId>
</dependency>
diff --git a/pom.xml b/pom.xml
index e23f699..e412a81 100644
--- a/pom.xml
+++ b/pom.xml
@@ -36,6 +36,7 @@
<modules>
<module>gateway-test-utils</module>
<module>gateway-util-common</module>
+ <module>gateway-util-configinjector</module>
<module>gateway-util-launcher</module>
<module>gateway-util-urltemplate</module>
<module>gateway-test-ldap</module>
@@ -57,6 +58,8 @@
<module>gateway-provider-security-hadoopauth</module>
<module>gateway-provider-security-shiro</module>
<module>gateway-provider-security-authz-acls</module>
+ <module>gateway-provider-identity-assertion-common</module>
+ <module>gateway-provider-identity-assertion-concat</module>
<module>gateway-provider-security-picketlink</module>
<module>gateway-provider-identity-assertion-pseudo</module>
<module>gateway-provider-jersey</module>
@@ -67,10 +70,9 @@
<module>gateway-service-hive</module>
<module>gateway-service-knoxsso</module>
<module>gateway-service-webhdfs</module>
- <module>gateway-service-oozie</module>
- <module>gateway-service-webhcat</module>
<module>gateway-service-tgs</module>
- <module>gateway-service-yarn-rm</module>
+ <module>gateway-service-storm</module>
+ <module>gateway-service-definitions</module>
<module>gateway-shell</module>
<module>gateway-shell-launcher</module>
<module>knox-cli-launcher</module>
@@ -353,6 +355,11 @@
</dependency>
<dependency>
<groupId>${gateway-group}</groupId>
+ <artifactId>gateway-util-configinjector</artifactId>
+ <version>${gateway-version}</version>
+ </dependency>
+ <dependency>
+ <groupId>${gateway-group}</groupId>
<artifactId>gateway-util-launcher</artifactId>
<version>${gateway-version}</version>
</dependency>
@@ -418,6 +425,16 @@
</dependency>
<dependency>
<groupId>${gateway-group}</groupId>
+ <artifactId>gateway-provider-identity-assertion-common</artifactId>
+ <version>${gateway-version}</version>
+ </dependency>
+ <dependency>
+ <groupId>${gateway-group}</groupId>
+ <artifactId>gateway-provider-identity-assertion-concat</artifactId>
+ <version>${gateway-version}</version>
+ </dependency>
+ <dependency>
+ <groupId>${gateway-group}</groupId>
<artifactId>gateway-provider-identity-assertion-pseudo</artifactId>
<version>${gateway-version}</version>
</dependency>
@@ -498,17 +515,12 @@
</dependency>
<dependency>
<groupId>${gateway-group}</groupId>
- <artifactId>gateway-service-webhcat</artifactId>
+ <artifactId>gateway-service-storm</artifactId>
<version>${gateway-version}</version>
</dependency>
<dependency>
<groupId>${gateway-group}</groupId>
- <artifactId>gateway-service-yarn-rm</artifactId>
- <version>${gateway-version}</version>
- </dependency>
- <dependency>
- <groupId>${gateway-group}</groupId>
- <artifactId>gateway-service-oozie</artifactId>
+ <artifactId>gateway-service-definitions</artifactId>
<version>${gateway-version}</version>
</dependency>
<dependency>
@@ -562,7 +574,6 @@
<artifactId>jboss-logging</artifactId>
<version>3.2.0.Final</version>
</dependency>
-
<dependency>
<groupId>org.glassfish.jersey.containers</groupId>
<artifactId>jersey-container-servlet</artifactId>
@@ -573,7 +584,18 @@
<artifactId>jersey-server</artifactId>
<version>2.6</version>
</dependency>
-
+ <dependency>
+ <groupId>com.nimbusds</groupId>
+ <artifactId>nimbus-jose-jwt</artifactId>
+ <version>3.9</version>
+ <scope>compile</scope>
+ <exclusions>
+ <exclusion>
+ <groupId>org.bouncycastle</groupId>
+ <artifactId>bcprov-jdk15on</artifactId>
+ </exclusion>
+ </exclusions>
+ </dependency>
<dependency>
<groupId>org.eclipse.jetty</groupId>
<artifactId>jetty-server</artifactId>
@@ -631,7 +653,7 @@
<dependency>
<groupId>org.apache.httpcomponents</groupId>
<artifactId>httpclient</artifactId>
- <version>4.2.5</version>
+ <version>4.3.6</version>
</dependency>
<!--
@@ -841,7 +863,16 @@
<artifactId>commons-net</artifactId>
<version>1.4.1</version>
</dependency>
-
+ <dependency>
+ <groupId>commons-collections</groupId>
+ <artifactId>commons-collections</artifactId>
+ <version>3.2.1</version>
+ </dependency>
+ <dependency>
+ <groupId>commons-beanutils</groupId>
+ <artifactId>commons-beanutils</artifactId>
+ <version>1.9.2</version>
+ </dependency>
<dependency>
<groupId>org.apache.commons</groupId>
<artifactId>commons-digester3</artifactId>
@@ -908,6 +939,12 @@
<version>1.2.3</version>
</dependency>
+ <dependency>
+ <groupId>org.apache.shiro</groupId>
+ <artifactId>shiro-ehcache</artifactId>
+ <version>1.2.3</version>
+ </dependency>
+
<!-- Html pull parser. EPLv1 license -->
<dependency>
<groupId>net.htmlparser.jericho</groupId>