AMBARI-18335. After upgrading cluster from HDP-2.4.x to HDP-2.5.x and added atlas service - missing kafka security properties (rlevas)
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/serveraction/upgrades/UpgradeUserKerberosDescriptor.java b/ambari-server/src/main/java/org/apache/ambari/server/serveraction/upgrades/UpgradeUserKerberosDescriptor.java
new file mode 100644
index 0000000..f1eab38
--- /dev/null
+++ b/ambari-server/src/main/java/org/apache/ambari/server/serveraction/upgrades/UpgradeUserKerberosDescriptor.java
@@ -0,0 +1,205 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.ambari.server.serveraction.upgrades;
+
+import com.google.inject.Inject;
+import org.apache.ambari.server.AmbariException;
+import org.apache.ambari.server.actionmanager.HostRoleCommand;
+import org.apache.ambari.server.actionmanager.HostRoleStatus;
+import org.apache.ambari.server.agent.CommandReport;
+import org.apache.ambari.server.api.services.AmbariMetaInfo;
+import org.apache.ambari.server.orm.dao.ArtifactDAO;
+import org.apache.ambari.server.orm.entities.ArtifactEntity;
+import org.apache.ambari.server.serveraction.AbstractServerAction;
+import org.apache.ambari.server.state.Cluster;
+import org.apache.ambari.server.state.Clusters;
+import org.apache.ambari.server.state.StackId;
+import org.apache.ambari.server.state.kerberos.KerberosDescriptor;
+import org.apache.ambari.server.state.kerberos.KerberosDescriptorFactory;
+import org.apache.ambari.server.state.kerberos.KerberosDescriptorUpdateHelper;
+import org.apache.ambari.server.state.stack.upgrade.Direction;
+import org.apache.commons.lang.StringUtils;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import java.util.ArrayList;
+import java.util.List;
+import java.util.TreeMap;
+import java.util.concurrent.ConcurrentMap;
+
+/**
+ * Update the user-defined Kerberos Descriptor to work with the current stack.
+ *
+ * @see org.apache.ambari.server.state.kerberos.KerberosDescriptorUpdateHelper
+ */
+public class UpgradeUserKerberosDescriptor extends AbstractServerAction {
+ private static final Logger LOG = LoggerFactory.getLogger(UpgradeUserKerberosDescriptor.class);
+
+ /**
+ * The upgrade direction.
+ *
+ * @see Direction
+ */
+ private static final String UPGRADE_DIRECTION_KEY = "upgrade_direction";
+
+ /**
+ * The original "current" stack of the cluster before the upgrade started.
+ * This is the same regardless of whether the current direction is
+ * {@link Direction#UPGRADE} or {@link Direction#DOWNGRADE}.
+ *
+ * @see Direction
+ */
+ private static final String ORIGINAL_STACK_KEY = "original_stack";
+
+ /**
+ * The target upgrade stack before the upgrade started. This is the same
+ * regardless of whether the current direction is {@link Direction#UPGRADE} or
+ * {@link Direction#DOWNGRADE}.
+ *
+ * @see Direction
+ */
+ private static final String TARGET_STACK_KEY = "target_stack";
+
+ @Inject
+ private ArtifactDAO artifactDAO;
+
+ @Inject
+ private Clusters clusters;
+
+ @Inject
+ private AmbariMetaInfo ambariMetaInfo;
+
+ @Inject
+ private KerberosDescriptorFactory kerberosDescriptorFactory;
+
+ protected UpgradeUserKerberosDescriptor() {
+ }
+
+ /**
+ * Update Kerberos Descriptor Storm properties when upgrading to Storm 1.0
+ * <p/>
+ * Finds the relevant artifact entities and iterates through them to process each independently.
+ */
+ @Override
+ public CommandReport execute(ConcurrentMap<String, Object> requestSharedDataContext)
+ throws AmbariException, InterruptedException {
+ HostRoleCommand hostRoleCommand = getHostRoleCommand();
+ String clusterName = hostRoleCommand.getExecutionCommandWrapper().getExecutionCommand().getClusterName();
+ Cluster cluster = clusters.getCluster(clusterName);
+ List<String> messages = new ArrayList<String>();
+ List<String> errorMessages = new ArrayList<String>();
+
+ if (cluster != null) {
+ logMessage(messages, "Obtaining the user-defined Kerberos descriptor");
+
+ TreeMap<String, String> foreignKeys = new TreeMap<String, String>();
+ foreignKeys.put("cluster", String.valueOf(cluster.getClusterId()));
+
+ ArtifactEntity entity = artifactDAO.findByNameAndForeignKeys("kerberos_descriptor", foreignKeys);
+ KerberosDescriptor userDescriptor = (entity == null) ? null : kerberosDescriptorFactory.createInstance(entity.getArtifactData());
+
+ if (userDescriptor != null) {
+ StackId originalStackId = getStackIdFromCommandParams(ORIGINAL_STACK_KEY);
+ StackId targetStackId = getStackIdFromCommandParams(TARGET_STACK_KEY);
+ boolean isDowngrade = isDowngrade();
+
+ StackId newVersion = (isDowngrade) ? originalStackId : targetStackId;
+ StackId previousVersion = (isDowngrade) ? targetStackId : originalStackId;
+ KerberosDescriptor newDescriptor = null;
+ KerberosDescriptor previousDescriptor = null;
+
+ if (newVersion == null) {
+ logErrorMessage(messages, errorMessages, "The new stack version information was not found.");
+ } else {
+ logMessage(messages, String.format("Obtaining new stack Kerberos descriptor for %s.", newVersion.toString()));
+ newDescriptor = ambariMetaInfo.getKerberosDescriptor(newVersion.getStackName(), newVersion.getStackVersion());
+
+ if (newDescriptor == null) {
+ logErrorMessage(messages, errorMessages, String.format("The Kerberos descriptor for the new stack version, %s, was not found.", newVersion.toString()));
+ }
+ }
+
+ if (previousVersion == null) {
+ logErrorMessage(messages, errorMessages, "The previous stack version information was not found.");
+ } else {
+ logMessage(messages, String.format("Obtaining previous stack Kerberos descriptor for %s.", previousVersion.toString()));
+ previousDescriptor = ambariMetaInfo.getKerberosDescriptor(previousVersion.getStackName(), previousVersion.getStackVersion());
+
+ if (newDescriptor == null) {
+ logErrorMessage(messages, errorMessages, String.format("The Kerberos descriptor for the previous stack version, %s, was not found.", previousVersion.toString()));
+ }
+ }
+
+ if (errorMessages.isEmpty()) {
+ logMessage(messages, "Updating the user-specified Kerberos descriptor.");
+
+ KerberosDescriptor updatedDescriptor = KerberosDescriptorUpdateHelper.updateUserKerberosDescriptor(
+ previousDescriptor,
+ newDescriptor,
+ userDescriptor);
+
+ logMessage(messages, "Storing updated user-specified Kerberos descriptor.");
+
+ entity.setArtifactData(updatedDescriptor.toMap());
+ artifactDAO.merge(entity);
+
+ logMessage(messages, "Successfully updated the user-specified Kerberos descriptor.");
+ }
+ } else {
+ logMessage(messages, "A user-specified Kerberos descriptor was not found. No updates are necessary.");
+ }
+ } else {
+ logErrorMessage(messages, errorMessages, String.format("The cluster named %s was not found.", clusterName));
+ }
+
+ if (!errorMessages.isEmpty()) {
+ logErrorMessage(messages, errorMessages, "No updates have been performed due to previous issues.");
+ }
+
+ return createCommandReport(0, HostRoleStatus.COMPLETED, "{}", StringUtils.join(messages, "\n"), StringUtils.join(errorMessages, "\n"));
+ }
+
+ /**
+ * Determines if upgrade direction is {@link Direction#UPGRADE} or {@link Direction#DOWNGRADE}.
+ *
+ * @return {@code true} if {@link Direction#DOWNGRADE}; {@code false} if {@link Direction#UPGRADE}
+ */
+ private boolean isDowngrade() {
+ return Direction.DOWNGRADE.name().equalsIgnoreCase(getCommandParameterValue(UPGRADE_DIRECTION_KEY));
+ }
+
+ private StackId getStackIdFromCommandParams(String commandParamKey) {
+ String stackId = getCommandParameterValue(commandParamKey);
+ if (stackId == null) {
+ return null;
+ } else {
+ return new StackId(stackId);
+ }
+ }
+
+ private void logMessage(List<String> messages, String message) {
+ LOG.info(message);
+ messages.add(message);
+ }
+
+ private void logErrorMessage(List<String> messages, List<String> errorMessages, String message) {
+ LOG.error(message);
+ messages.add(message);
+ errorMessages.add(message);
+ }
+}
\ No newline at end of file
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/state/kerberos/AbstractKerberosDescriptor.java b/ambari-server/src/main/java/org/apache/ambari/server/state/kerberos/AbstractKerberosDescriptor.java
index 84a9111..2112fcc 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/state/kerberos/AbstractKerberosDescriptor.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/state/kerberos/AbstractKerberosDescriptor.java
@@ -18,8 +18,8 @@
package org.apache.ambari.server.state.kerberos;
-import java.util.HashMap;
import java.util.Map;
+import java.util.TreeMap;
/**
* AbstractKerberosDescriptor is the base class for all Kerberos*Descriptor and associated classes.
@@ -65,7 +65,7 @@
* @return a Map of date representing this AbstractKerberosDescriptor implementation
*/
public Map<String, Object> toMap() {
- HashMap<String, Object> dataMap = new HashMap<String, Object>();
+ TreeMap<String, Object> dataMap = new TreeMap<String, Object>();
String name = getName();
if (name != null) {
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/state/kerberos/AbstractKerberosDescriptorContainer.java b/ambari-server/src/main/java/org/apache/ambari/server/state/kerberos/AbstractKerberosDescriptorContainer.java
index 39ebdaf..ad2437a 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/state/kerberos/AbstractKerberosDescriptorContainer.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/state/kerberos/AbstractKerberosDescriptorContainer.java
@@ -23,12 +23,12 @@
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
-import java.util.HashMap;
-import java.util.HashSet;
+import java.util.TreeMap;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Set;
+import java.util.TreeSet;
import java.util.regex.Pattern;
/**
@@ -165,8 +165,8 @@
/**
* Returns a specific named child container
- * @param name the name of the child container to retrieve
*
+ * @param name the name of the child container to retrieve
* @return an {@link AbstractKerberosDescriptorContainer}
*/
public abstract AbstractKerberosDescriptorContainer getChildContainer(String name);
@@ -193,6 +193,17 @@
}
/**
+ * Set the {@link KerberosIdentityDescriptor} for this {@link AbstractKerberosDescriptorContainer}.
+ *
+ * @param identities a {@link List} of {@link KerberosIdentityDescriptor}s
+ */
+ public void setIdentities(List<KerberosIdentityDescriptor> identities) {
+ this.identities = (identities == null)
+ ? null
+ : new ArrayList<KerberosIdentityDescriptor>(identities);
+ }
+
+ /**
* Returns a List of KerberosIdentityDescriptors contained within this
* AbstractKerberosDescriptorContainer.
* <p/>
@@ -215,9 +226,9 @@
* (false)
* @return a List of the requested KerberosIdentityDescriptors
*/
- public List<KerberosIdentityDescriptor> getIdentities(boolean resolveReferences, Map<String,Object> contextForFilter) throws AmbariException {
+ public List<KerberosIdentityDescriptor> getIdentities(boolean resolveReferences, Map<String, Object> contextForFilter) throws AmbariException {
if (identities == null) {
- return Collections.emptyList();
+ return null;
} else {
List<KerberosIdentityDescriptor> list = new ArrayList<KerberosIdentityDescriptor>();
@@ -317,6 +328,18 @@
}
/**
+ * Sets the {@link Map} of {@link KerberosConfigurationDescriptor}s for this
+ * {@link AbstractKerberosDescriptorContainer}.
+ *
+ * @param configurations a {@link Map} of {@link KerberosConfigurationDescriptor}s
+ */
+ public void setConfigurations(Map<String, KerberosConfigurationDescriptor> configurations) {
+ this.configurations = (configurations == null)
+ ? null
+ : new TreeMap<String, KerberosConfigurationDescriptor>(configurations);
+ }
+
+ /**
* Returns a Map of raw KerberosConfigurationDescriptors contained within this
* AbstractKerberosDescriptorContainer.
* <p/>
@@ -353,7 +376,7 @@
*/
public Map<String, KerberosConfigurationDescriptor> getConfigurations(boolean includeInherited) {
if (includeInherited) {
- Map<String, KerberosConfigurationDescriptor> mergedConfigurations = new HashMap<String, KerberosConfigurationDescriptor>();
+ Map<String, KerberosConfigurationDescriptor> mergedConfigurations = new TreeMap<String, KerberosConfigurationDescriptor>();
List<Map<String, KerberosConfigurationDescriptor>> configurationSets = new ArrayList<Map<String, KerberosConfigurationDescriptor>>();
AbstractKerberosDescriptor currentDescriptor = this;
@@ -417,7 +440,7 @@
}
if (configurations == null) {
- configurations = new HashMap<String, KerberosConfigurationDescriptor>();
+ configurations = new TreeMap<String, KerberosConfigurationDescriptor>();
}
configurations.put(type, configuration);
@@ -448,7 +471,7 @@
public void putAuthToLocalProperty(String authToLocalProperty) {
if (authToLocalProperty != null) {
if (authToLocalProperties == null) {
- authToLocalProperties = new HashSet<String>();
+ authToLocalProperties = new TreeSet<String>();
}
authToLocalProperties.add(authToLocalProperty);
@@ -456,6 +479,17 @@
}
/**
+ * Sets the set of <code>auth_to_local</code> property names.
+ *
+ * @param authToLocalProperties a Set of String values; or null if not set
+ */
+ public void setAuthToLocalProperties(Set<String> authToLocalProperties) {
+ this.authToLocalProperties = (authToLocalProperties == null)
+ ? null
+ : new TreeSet<String>(authToLocalProperties);
+ }
+
+ /**
* Gets the set of <code>auth_to_local</code> property names.
*
* @return a Set of String values; or null if not set
@@ -582,13 +616,13 @@
KerberosIdentityDescriptor identityDescriptor = null;
if (path != null) {
- if(path.startsWith("../")) {
+ if (path.startsWith("../")) {
// Resolve parent path
AbstractKerberosDescriptor parent = getParent();
path = path.substring(2);
- while(parent != null) {
+ while (parent != null) {
String name = parent.getName();
if (name != null) {
@@ -694,24 +728,27 @@
Map<String, Object> map = super.toMap();
if (identities != null) {
- List<Map<String, Object>> list = new ArrayList<Map<String, Object>>();
+ // Use a TreeMap to force the identities definitions to be ordered by name, alphebetically.
+ // This helps with readability and comparisons.
+ Map<String, Map<String, Object>> list = new TreeMap<String, Map<String, Object>>();
for (KerberosIdentityDescriptor identity : identities) {
- list.add(identity.toMap());
+ list.put(identity.getName(), identity.toMap());
}
- map.put(Type.IDENTITY.getDescriptorPluralName(), list);
+ map.put(Type.IDENTITY.getDescriptorPluralName(), list.values());
}
if (configurations != null) {
- List<Map<String, Object>> list = new ArrayList<Map<String, Object>>();
+ // Use a TreeMap to force the configurations to be ordered by configuration type, alphebetically.
+ // This helps with readability and comparisons.
+ Map<String, Map<String, Object>> list = new TreeMap<String, Map<String, Object>>();
for (KerberosConfigurationDescriptor configuration : configurations.values()) {
- list.add(configuration.toMap());
+ list.put(configuration.getType(), configuration.toMap());
}
- map.put(Type.CONFIGURATION.getDescriptorPluralName(), list);
+ map.put(Type.CONFIGURATION.getDescriptorPluralName(), list.values());
}
if (authToLocalProperties != null) {
- List<String> list = new ArrayList<String>(authToLocalProperties);
- map.put(Type.AUTH_TO_LOCAL_PROPERTY.getDescriptorPluralName(), list);
+ map.put(Type.AUTH_TO_LOCAL_PROPERTY.getDescriptorPluralName(), authToLocalProperties);
}
return map;
@@ -723,6 +760,9 @@
((getIdentities() == null)
? 0
: getIdentities().hashCode()) +
+ ((getAuthToLocalProperties() == null)
+ ? 0
+ : getAuthToLocalProperties().hashCode()) +
((getConfigurations() == null)
? 0
: getConfigurations().hashCode());
@@ -743,6 +783,11 @@
: getIdentities().equals(descriptor.getIdentities())
) &&
(
+ (getAuthToLocalProperties() == null)
+ ? (descriptor.getAuthToLocalProperties() == null)
+ : getAuthToLocalProperties().equals(descriptor.getAuthToLocalProperties())
+ ) &&
+ (
(getConfigurations() == null)
? (descriptor.getConfigurations() == null)
: getConfigurations().equals(descriptor.getConfigurations())
@@ -768,7 +813,7 @@
private KerberosIdentityDescriptor dereferenceIdentity(KerberosIdentityDescriptor identity) throws AmbariException {
KerberosIdentityDescriptor dereferencedIdentity = null;
- if(identity != null) {
+ if (identity != null) {
KerberosIdentityDescriptor referencedIdentity;
try {
if (identity.getReference() != null) {
@@ -790,8 +835,7 @@
dereferencedIdentity = new KerberosIdentityDescriptor(referencedIdentity.toMap());
dereferencedIdentity.update(identity);
}
- }
- else {
+ } else {
dereferencedIdentity = new KerberosIdentityDescriptor(identity.toMap());
}
}
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/state/kerberos/KerberosConfigurationDescriptor.java b/ambari-server/src/main/java/org/apache/ambari/server/state/kerberos/KerberosConfigurationDescriptor.java
index 3cdd9908..bc5f936 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/state/kerberos/KerberosConfigurationDescriptor.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/state/kerberos/KerberosConfigurationDescriptor.java
@@ -1,4 +1,4 @@
-/**
+/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
@@ -18,7 +18,7 @@
package org.apache.ambari.server.state.kerberos;
-import java.util.HashMap;
+import java.util.TreeMap;
import java.util.Map;
import java.util.Set;
@@ -105,6 +105,19 @@
}
/**
+ * Set the properties of the configuration data represented by this KerberosConfigurationDescriptor
+ *
+ * @param properties a Map of properties
+ */
+ public void setProperties(Map<String, String> properties) {
+ if (properties == null) {
+ this.properties = null;
+ } else {
+ this.properties = new TreeMap<String, String>(properties);
+ }
+ }
+
+ /**
* Gets the properties of the configuration data represented by this KerberosConfigurationDescriptor
*
* @return a Map of properties
@@ -137,7 +150,7 @@
}
if (properties == null) {
- properties = new HashMap<String, String>();
+ properties = new TreeMap<String, String>();
}
properties.put(name, value);
@@ -173,8 +186,8 @@
*/
@Override
public Map<String, Object> toMap() {
- Map<String, Object> map = new HashMap<String, Object>();
- map.put(getName(), (properties == null) ? null : new HashMap<String, Object>(properties));
+ Map<String, Object> map = new TreeMap<String, Object>();
+ map.put(getName(), (properties == null) ? null : new TreeMap<String, Object>(properties));
return map;
}
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/state/kerberos/KerberosDescriptor.java b/ambari-server/src/main/java/org/apache/ambari/server/state/kerberos/KerberosDescriptor.java
index 484f65c..e7be589 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/state/kerberos/KerberosDescriptor.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/state/kerberos/KerberosDescriptor.java
@@ -1,4 +1,4 @@
-/**
+/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
@@ -20,7 +20,7 @@
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
-import java.util.HashMap;
+import java.util.TreeMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
@@ -147,6 +147,17 @@
}
/**
+ * Set the KerberosServiceDescriptors in this KerberosDescriptor
+ *
+ * @param services a Map of String to KerberosServiceDescriptor
+ */
+ public void setServices(Map<String, KerberosServiceDescriptor> services) {
+ this.services = (services == null)
+ ? null
+ : new TreeMap<String, KerberosServiceDescriptor>(services);
+ }
+
+ /**
* Returns a Map of the KerberosServiceDescriptors in this KerberosDescriptor
*
* @return a Map of String to KerberosServiceDescriptor
@@ -182,7 +193,7 @@
}
if (services == null) {
- services = new HashMap<String, KerberosServiceDescriptor>();
+ services = new TreeMap<String, KerberosServiceDescriptor>();
}
services.put(name, service);
@@ -193,6 +204,17 @@
}
/**
+ * Set the Map of properties for this KerberosDescriptor
+ *
+ * @param properties a Map of String to String values
+ */
+ public void setProperties(Map<String, String> properties) {
+ this.properties = (properties == null)
+ ? null
+ : new TreeMap<String, String>(properties);
+ }
+
+ /**
* Gets the Map of properties for this KerberosDescriptor
*
* @return a Map of String to String values
@@ -225,7 +247,7 @@
}
if (properties == null) {
- properties = new HashMap<String, String>();
+ properties = new TreeMap<String, String>();
}
properties.put(name, value);
@@ -304,7 +326,7 @@
}
if (properties != null) {
- map.put("properties", new HashMap<String, String>(properties));
+ map.put("properties", new TreeMap<String, String>(properties));
}
return map;
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/state/kerberos/KerberosDescriptorUpdateHelper.java b/ambari-server/src/main/java/org/apache/ambari/server/state/kerberos/KerberosDescriptorUpdateHelper.java
new file mode 100644
index 0000000..2eef4b9
--- /dev/null
+++ b/ambari-server/src/main/java/org/apache/ambari/server/state/kerberos/KerberosDescriptorUpdateHelper.java
@@ -0,0 +1,585 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.server.state.kerberos;
+
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import java.util.ArrayList;
+import java.util.Collections;
+import java.util.TreeMap;
+import java.util.Iterator;
+import java.util.List;
+import java.util.Map;
+import java.util.Set;
+import java.util.TreeSet;
+
+/**
+ * KerberosDescriptorUpdateHelper provides routines for upgrading the user-specified Kerberos descriptor
+ * when changing stacks.
+ * <p>
+ * This implementation should work for stack upgrades and downgrades.
+ */
+public class KerberosDescriptorUpdateHelper {
+ private static final Logger LOG = LoggerFactory.getLogger(KerberosDescriptorUpdateHelper.class);
+
+ /**
+ * The entry point into upgrading a user-specified Kerberos descriptor.
+ * <p>
+ * The supplied Kerberos descriptors will remain untouched and new Kerberos descriptor instance will
+ * created and returned with the update data.
+ *
+ * @param beginningStackKerberosDescriptor the Kerberos descriptor for the previous stack version
+ * @param endingStackKerberosDescriptor the Kerberos descriptor for the new stack version
+ * @param userKerberosDescriptor the user-specified Kerberos descriptor
+ * @return a new Kerberos descriptor containing the updated user-specified data
+ */
+ public static KerberosDescriptor updateUserKerberosDescriptor(KerberosDescriptor beginningStackKerberosDescriptor,
+ KerberosDescriptor endingStackKerberosDescriptor,
+ KerberosDescriptor userKerberosDescriptor) {
+ KerberosDescriptor updated = new KerberosDescriptor(userKerberosDescriptor.toMap());
+
+ updated.setProperties(processProperties(
+ beginningStackKerberosDescriptor.getProperties(),
+ endingStackKerberosDescriptor.getProperties(),
+ updated.getProperties()));
+
+ updated.setConfigurations(processConfigurations(
+ beginningStackKerberosDescriptor.getConfigurations(),
+ endingStackKerberosDescriptor.getConfigurations(),
+ updated.getConfigurations()));
+
+ updated.setIdentities(processIdentities(
+ beginningStackKerberosDescriptor.getIdentities(),
+ endingStackKerberosDescriptor.getIdentities(),
+ updated.getIdentities()));
+
+ updated.setAuthToLocalProperties(processAuthToLocalProperties(
+ beginningStackKerberosDescriptor.getAuthToLocalProperties(),
+ endingStackKerberosDescriptor.getAuthToLocalProperties(),
+ updated.getAuthToLocalProperties()));
+
+ updated.setServices(processServices(
+ beginningStackKerberosDescriptor.getServices(),
+ endingStackKerberosDescriptor.getServices(),
+ updated.getServices()));
+
+ return updated;
+ }
+
+ /**
+ * Processes the service-level Kerberos descriptors to add, remove, or update data in the user-specified
+ * Kerberos descriptor.
+ *
+ * @param previousStackServices a map of {@link KerberosServiceDescriptor}s from the previous stack version's Kerberos descriptor
+ * @param newStackServices a map of {@link KerberosServiceDescriptor}s from the new stack version's Kerberos descriptor
+ * @param userServices a map of {@link KerberosServiceDescriptor}s from the user-supplied Kerberos descriptor
+ * @return a map of updated {@link KerberosServiceDescriptor}s
+ */
+ private static Map<String, KerberosServiceDescriptor> processServices(Map<String, KerberosServiceDescriptor> previousStackServices,
+ Map<String, KerberosServiceDescriptor> newStackServices,
+ Map<String, KerberosServiceDescriptor> userServices) {
+ if ((userServices == null) || userServices.isEmpty() || ((previousStackServices == null) && (newStackServices == null))) {
+ return userServices;
+ }
+
+ Map<String, KerberosServiceDescriptor> updatedServices = new TreeMap<String, KerberosServiceDescriptor>();
+
+ if (previousStackServices == null) {
+ previousStackServices = Collections.emptyMap();
+ }
+
+ if (newStackServices == null) {
+ newStackServices = Collections.emptyMap();
+ }
+
+ for (Map.Entry<String, KerberosServiceDescriptor> entry : userServices.entrySet()) {
+ String name = entry.getKey();
+ KerberosServiceDescriptor userValue = entry.getValue();
+
+ if (userValue != null) {
+ if (newStackServices.containsKey(name)) {
+ KerberosServiceDescriptor oldValue = previousStackServices.get(name);
+ KerberosServiceDescriptor newValue = newStackServices.get(name);
+
+ LOG.debug("Processing service {} for modifications", name);
+ updatedServices.put(name, processService(oldValue, newValue, userValue));
+ } else if (previousStackServices.containsKey(name)) {
+ LOG.debug("Removing service {} from user-specified Kerberos Descriptor", name);
+ // Nothing to do here, just don't add it to the updated configurations map...
+ } else {
+ LOG.debug("Leaving service {} in user-specified Kerberos Descriptor unchanged since it was user-defined.", name);
+ updatedServices.put(name, userValue);
+ }
+ }
+ }
+
+ // Note: there is no need to add service definitions that do not exist since they will get
+ // added dynamically when merged with the stack default value.
+
+ return updatedServices;
+ }
+
+ /**
+ * Processes a {@link KerberosServiceDescriptor} to change the user-supplied data based on the changes
+ * observed between the previous stack version's data and the new stack version's data.
+ *
+ * @param previousStackService a {@link KerberosServiceDescriptor} from the previous stack version's Kerberos descriptor
+ * @param newStackService a {@link KerberosServiceDescriptor} from the new stack version's Kerberos descriptor
+ * @param userService a {@link KerberosServiceDescriptor} from the user-specified Kerberos descriptor
+ * @return the updated {@link KerberosServiceDescriptor}
+ */
+ private static KerberosServiceDescriptor processService(KerberosServiceDescriptor previousStackService,
+ KerberosServiceDescriptor newStackService,
+ KerberosServiceDescriptor userService) {
+
+ KerberosServiceDescriptor updatedService = new KerberosServiceDescriptor(userService.toMap());
+
+ updatedService.setAuthToLocalProperties(processAuthToLocalProperties(
+ (previousStackService == null) ? null : previousStackService.getAuthToLocalProperties(),
+ (newStackService == null) ? null : newStackService.getAuthToLocalProperties(),
+ updatedService.getAuthToLocalProperties()));
+
+ updatedService.setConfigurations(processConfigurations(
+ (previousStackService == null) ? null : previousStackService.getConfigurations(),
+ (newStackService == null) ? null : newStackService.getConfigurations(),
+ updatedService.getConfigurations()));
+
+ updatedService.setIdentities(processIdentities(
+ (previousStackService == null) ? null : previousStackService.getIdentities(),
+ (newStackService == null) ? null : newStackService.getIdentities(),
+ updatedService.getIdentities()));
+
+ Map<String, KerberosComponentDescriptor> userServiceComponents = updatedService.getComponents();
+ Map<String, KerberosComponentDescriptor> newServiceComponents = (newStackService == null) ? null : newStackService.getComponents();
+ Map<String, KerberosComponentDescriptor> oldServiceComponents = (previousStackService == null) ? null : previousStackService.getComponents();
+
+ if (newServiceComponents == null) {
+ newServiceComponents = Collections.emptyMap();
+ }
+
+ if (oldServiceComponents == null) {
+ oldServiceComponents = Collections.emptyMap();
+ }
+
+ if (userServiceComponents != null) {
+ Iterator<Map.Entry<String, KerberosComponentDescriptor>> iterator = userServiceComponents.entrySet().iterator();
+ while (iterator.hasNext()) {
+ Map.Entry<String, KerberosComponentDescriptor> entry = iterator.next();
+ String name = entry.getKey();
+ KerberosComponentDescriptor userValue = entry.getValue();
+
+ if (userValue == null) {
+ iterator.remove(); // This is a bad entry... remove it.
+ } else if (newServiceComponents.containsKey(name)) {
+ KerberosComponentDescriptor oldValue = oldServiceComponents.get(name);
+ KerberosComponentDescriptor newValue = newServiceComponents.get(name);
+
+ LOG.debug("Processing component {}/{} for modifications", updatedService.getName(), name);
+ processComponent(oldValue, newValue, userValue);
+ } else {
+ LOG.debug("Removing component {}/{} from user-specified Kerberos Descriptor", updatedService.getName(), name);
+ iterator.remove();
+ }
+ }
+ }
+
+ return updatedService;
+ }
+
+ /**
+ * Processes a {@link KerberosComponentDescriptor} to change the user-supplied data based on the changes
+ * observed between the previous stack version's data and the new stack version's data.
+ * <p>
+ * The supplied userComponent value is updated in place.
+ *
+ * @param previousStackComponent a {@link KerberosComponentDescriptor} from the previous stack version's Kerberos descriptor
+ * @param newStackComponent a {@link KerberosComponentDescriptor} from the new stack version's Kerberos descriptor
+ * @param userComponent a {@link KerberosComponentDescriptor} from the user-specified Kerberos descriptor
+ * @return the updated {@link KerberosComponentDescriptor}
+ */
+ private static KerberosComponentDescriptor processComponent(KerberosComponentDescriptor previousStackComponent,
+ KerberosComponentDescriptor newStackComponent,
+ KerberosComponentDescriptor userComponent) {
+ userComponent.setAuthToLocalProperties(processAuthToLocalProperties(
+ (previousStackComponent == null) ? null : previousStackComponent.getAuthToLocalProperties(),
+ (newStackComponent == null) ? null : newStackComponent.getAuthToLocalProperties(),
+ userComponent.getAuthToLocalProperties()));
+
+ userComponent.setConfigurations(processConfigurations(
+ (previousStackComponent == null) ? null : previousStackComponent.getConfigurations(),
+ (newStackComponent == null) ? null : newStackComponent.getConfigurations(),
+ userComponent.getConfigurations()));
+
+ userComponent.setIdentities(processIdentities(
+ (previousStackComponent == null) ? null : previousStackComponent.getIdentities(),
+ (newStackComponent == null) ? null : newStackComponent.getIdentities(),
+ userComponent.getIdentities()));
+
+ return userComponent;
+ }
+
+ /**
+ * Processes a the list of configuration specification (<code><configuration type>/<property name></code>)
+ * identifying the properties that should be automatically updated with generated auth-to-local rules.
+ * <p>
+ * If no user-specified properties are set, <code>null</code> is returned.
+ * <p>
+ * Else the configuration specifications from the previous stack are removed from the user-specified
+ * data and the configuration specifications from the new stack are added to the user-specified,
+ * leaving the new list of configuration specifications as well as any user-specified changes.
+ *
+ * @param previousStackAuthToLocalProperties the auth-to-local properties from the previous stack version's Kerberos descriptor
+ * @param newStackAuthToLocalProperties the auth-to-local properties from the new stack version's Kerberos descriptor
+ * @param userAuthToLocalProperties the auth-to-local properties from the user-specified Kerberos descriptor
+ * @return an updated {@link Set} of configuration specifications
+ */
+ private static Set<String> processAuthToLocalProperties(Set<String> previousStackAuthToLocalProperties,
+ Set<String> newStackAuthToLocalProperties,
+ Set<String> userAuthToLocalProperties) {
+ if (userAuthToLocalProperties == null) {
+ return null;
+ }
+
+ TreeSet<String> updatedAuthToLocalProperties = new TreeSet<String>(userAuthToLocalProperties);
+
+ // Remove old configuration specifications, leaving the user-specified ones.
+ if (previousStackAuthToLocalProperties != null) {
+ updatedAuthToLocalProperties.removeAll(previousStackAuthToLocalProperties);
+ }
+
+ // Add the new configuration specifications
+ if (newStackAuthToLocalProperties != null) {
+ updatedAuthToLocalProperties.addAll(newStackAuthToLocalProperties);
+ }
+
+ return updatedAuthToLocalProperties;
+ }
+
+ /**
+ * Processes the identity-level Kerberos descriptors to add, remove, or update data in the user-specified
+ * Kerberos descriptor.
+ *
+ * @param previousStackIdentities a map of {@link KerberosIdentityDescriptor}s from the previous stack version's Kerberos descriptor
+ * @param newStackIdentities a map of {@link KerberosIdentityDescriptor}s from the new stack version's Kerberos descriptor
+ * @param userIdentities a map of {@link KerberosIdentityDescriptor}s from the user-supplied Kerberos descriptor
+ * @return a list of updated {@link KerberosIdentityDescriptor}s
+ */
+ private static List<KerberosIdentityDescriptor> processIdentities(List<KerberosIdentityDescriptor> previousStackIdentities,
+ List<KerberosIdentityDescriptor> newStackIdentities,
+ List<KerberosIdentityDescriptor> userIdentities) {
+
+ if ((userIdentities == null) || userIdentities.isEmpty() || ((previousStackIdentities == null) && (newStackIdentities == null))) {
+ return userIdentities;
+ }
+
+ // Create maps to make processing easier....
+ Map<String, KerberosIdentityDescriptor> previousStackIdentityMap = toMap(previousStackIdentities);
+ Map<String, KerberosIdentityDescriptor> newStackIdentityMap = toMap(newStackIdentities);
+ Map<String, KerberosIdentityDescriptor> userStackIdentityMap = toMap(userIdentities);
+
+ Map<String, KerberosIdentityDescriptor> updatedIdentities = new TreeMap<String, KerberosIdentityDescriptor>();
+
+ if (previousStackIdentityMap == null) {
+ previousStackIdentityMap = Collections.emptyMap();
+ }
+
+ if (newStackIdentityMap == null) {
+ newStackIdentityMap = Collections.emptyMap();
+ }
+
+ // Find identities to modify or remove
+ for (Map.Entry<String, KerberosIdentityDescriptor> entry : userStackIdentityMap.entrySet()) {
+ String name = entry.getKey();
+ KerberosIdentityDescriptor userValue = entry.getValue();
+
+ if (userValue != null) {
+ if (newStackIdentityMap.containsKey(name)) {
+ // Modify the new stack identity value by changing on the principal value and/or keytab
+ // file value since they are the only fields in this structure that should be changed
+ // by a user. However, the new stack identity may have been converted to a pure reference
+ // where the user changes will then be ignored.
+ KerberosIdentityDescriptor newValue = newStackIdentityMap.get(name);
+ KerberosIdentityDescriptor previousValue = previousStackIdentityMap.get(name);
+
+ updatedIdentities.put(name, processIdentity(previousValue, newValue, userValue));
+
+ } else if (previousStackIdentityMap.containsKey(name)) {
+ LOG.debug("Removing identity named {} from user-specified Kerberos Descriptor", name);
+ // Nothing to do here, just don't add it to the updated identity map...
+ } else {
+ LOG.debug("Leaving identity named {} in user-specified Kerberos Descriptor unchanged since it was user-defined.", name);
+ updatedIdentities.put(name, userValue);
+ }
+ }
+ }
+
+ // Note: there is no need to add identity definitions that do not exist since they will get
+ // added dynamically when merged with the stack default value.
+
+ return new ArrayList<KerberosIdentityDescriptor>(updatedIdentities.values());
+ }
+
+
+ /**
+ * Processes a {@link KerberosIdentityDescriptor} to change the user-supplied data based on the changes
+ * observed between the previous stack version's data and the new stack version's data.
+ *
+ * @param previousStackIdentity a {@link KerberosIdentityDescriptor} from the previous stack version's Kerberos descriptor
+ * @param newStackIdentity a {@link KerberosIdentityDescriptor} from the new stack version's Kerberos descriptor
+ * @param userIdentity a {@link KerberosIdentityDescriptor} from the user-specified Kerberos descriptor
+ * @return a new, updated, {@link KerberosIdentityDescriptor}
+ */
+ private static KerberosIdentityDescriptor processIdentity(KerberosIdentityDescriptor previousStackIdentity,
+ KerberosIdentityDescriptor newStackIdentity,
+ KerberosIdentityDescriptor userIdentity) {
+
+ KerberosIdentityDescriptor updatedValue = new KerberosIdentityDescriptor(newStackIdentity.toMap());
+ KerberosPrincipalDescriptor updatedValuePrincipal = updatedValue.getPrincipalDescriptor();
+ KerberosKeytabDescriptor updatedValueKeytab = updatedValue.getKeytabDescriptor();
+
+ // If the new identity definition is a reference and no longer has a principal definition,
+ // Ignore any user changes to the old principal definition.
+ if (updatedValuePrincipal != null) {
+ KerberosPrincipalDescriptor oldValuePrincipal = previousStackIdentity.getPrincipalDescriptor();
+ String previousValuePrincipalValue = null;
+ KerberosPrincipalDescriptor userValuePrincipal = userIdentity.getPrincipalDescriptor();
+ String userValuePrincipalValue = null;
+
+ if (oldValuePrincipal != null) {
+ previousValuePrincipalValue = oldValuePrincipal.getValue();
+ }
+
+ if (userValuePrincipal != null) {
+ userValuePrincipalValue = userValuePrincipal.getValue();
+ }
+
+ // If the user changed the stack default, replace the new stack default value with the user's
+ // changed value
+ if ((userValuePrincipalValue != null) && !userValuePrincipalValue.equals(previousValuePrincipalValue)) {
+ updatedValuePrincipal.setValue(userValuePrincipalValue);
+ }
+ }
+
+ // If the new identity definition is a reference and no longer has a keytab definition,
+ // Ignore any user changes to the old keytab definition.
+ if (updatedValueKeytab != null) {
+ KerberosKeytabDescriptor oldValueKeytab = previousStackIdentity.getKeytabDescriptor();
+ String previousValueKeytabFile = null;
+ KerberosKeytabDescriptor userValueKeytab = userIdentity.getKeytabDescriptor();
+ String userValueKeytabFile = null;
+
+ if (oldValueKeytab != null) {
+ previousValueKeytabFile = oldValueKeytab.getFile();
+ }
+
+ if (userValueKeytab != null) {
+ userValueKeytabFile = userValueKeytab.getFile();
+ }
+
+ // If the user changed the stack default, replace the new stack default value with the user's
+ // changed value
+ if ((userValueKeytabFile != null) && !userValueKeytabFile.equals(previousValueKeytabFile)) {
+ updatedValueKeytab.setFile(userValueKeytabFile);
+ }
+ }
+
+ // Remove the when clause
+ updatedValue.setWhen(null);
+
+ return updatedValue;
+ }
+
+ /**
+ * Processes the configuration-level Kerberos descriptors to add, remove, or update data in the user-specified
+ * Kerberos descriptor.
+ *
+ * @param previousStackConfigurations a map of {@link KerberosConfigurationDescriptor}s from the previous stack version's Kerberos descriptor
+ * @param newStackConfigurations a map of {@link KerberosConfigurationDescriptor}s from the new stack version's Kerberos descriptor
+ * @param userConfigurations a map of {@link KerberosConfigurationDescriptor}s from the user-supplied Kerberos descriptor
+ * @return a map of updated {@link KerberosConfigurationDescriptor}s
+ */
+ private static Map<String, KerberosConfigurationDescriptor> processConfigurations(Map<String, KerberosConfigurationDescriptor> previousStackConfigurations,
+ Map<String, KerberosConfigurationDescriptor> newStackConfigurations,
+ Map<String, KerberosConfigurationDescriptor> userConfigurations) {
+
+ if ((userConfigurations == null) || ((previousStackConfigurations == null) && (newStackConfigurations == null))) {
+ return userConfigurations;
+ }
+
+ Map<String, KerberosConfigurationDescriptor> updatedConfigurations = new TreeMap<String, KerberosConfigurationDescriptor>();
+
+ if (previousStackConfigurations == null) {
+ previousStackConfigurations = Collections.emptyMap();
+ }
+
+ if (newStackConfigurations == null) {
+ newStackConfigurations = Collections.emptyMap();
+ }
+
+ // Find configurations to modify or remove
+ for (Map.Entry<String, KerberosConfigurationDescriptor> entry : userConfigurations.entrySet()) {
+ String name = entry.getKey();
+ KerberosConfigurationDescriptor userValue = entry.getValue();
+
+ if (userValue != null) {
+ if (newStackConfigurations.containsKey(name)) {
+ KerberosConfigurationDescriptor oldValue = previousStackConfigurations.get(name);
+ KerberosConfigurationDescriptor newValue = newStackConfigurations.get(name);
+
+ LOG.debug("Processing configuration type {} for modifications", name);
+ updatedConfigurations.put(name, processConfiguration(oldValue, newValue, userValue));
+ } else if (previousStackConfigurations.containsKey(name)) {
+ LOG.debug("Removing configuration type {} from user-specified Kerberos Descriptor", name);
+ // Nothing to do here, just don't add it to the updated configurations map...
+ } else {
+ LOG.debug("Leaving configuration type {} in user-specified Kerberos Descriptor unchanged since it was user-defined.", name);
+ updatedConfigurations.put(name, userValue);
+ }
+ }
+ }
+
+ // Note: there is no need to add configuration definitions that do not exist in the user-specified
+ // descriptor since they will get added dynamically when merged with the stack default value.
+
+ return updatedConfigurations;
+ }
+
+ /**
+ * Processes a {@link KerberosConfigurationDescriptor} to change the user-supplied data based on the changes
+ * observed between the previous stack version's data and the new stack version's data.
+ *
+ * @param previousStackConfiguration a {@link KerberosConfigurationDescriptor} from the previous stack version's Kerberos descriptor
+ * @param newStackConfiguration a {@link KerberosConfigurationDescriptor} from the new stack version's Kerberos descriptor
+ * @param userConfiguration a {@link KerberosConfigurationDescriptor} from the user-specified Kerberos descriptor
+ * @return an updated {@link KerberosConfigurationDescriptor}
+ */
+ private static KerberosConfigurationDescriptor processConfiguration(KerberosConfigurationDescriptor previousStackConfiguration,
+ KerberosConfigurationDescriptor newStackConfiguration,
+ KerberosConfigurationDescriptor userConfiguration) {
+
+ KerberosConfigurationDescriptor updatedValue = new KerberosConfigurationDescriptor((userConfiguration == null) ? null : userConfiguration.toMap());
+
+ Map<String, String> previousValue = (previousStackConfiguration == null) ? null : previousStackConfiguration.getProperties();
+ Map<String, String> newValue = (newStackConfiguration == null) ? null : newStackConfiguration.getProperties();
+ Map<String, String> userValue = updatedValue.getProperties();
+
+ updatedValue.setProperties(processProperties(previousValue, newValue, userValue));
+
+ return updatedValue;
+ }
+
+ /**
+ * Processes a map of global or configuration properties to change the user-supplied data based on
+ * the changes observed between the previous stack version's data and the new stack version's data.
+ * <p>
+ * If a property exists in both the previous and new stacks, and the user has not changed it; then
+ * the value of the property will be updated to the new stack version's value. Else, if the user
+ * changed the value, the changed value will be left as-is.
+ * <p>
+ * If a property exists only in the previous stack, then it will be removed.
+ * <p>
+ * If a property exists only in the new stack, then it will be added.
+ *
+ * @param previousStackProperties properties from the previous stack version's Kerberos descriptor
+ * @param newStackProperties properties from the new stack version's Kerberos descriptor
+ * @param userProperties properties from the user-specified Kerberos descriptor
+ * @return a new map of updated properties
+ */
+ private static Map<String, String> processProperties(Map<String, String> previousStackProperties,
+ Map<String, String> newStackProperties,
+ Map<String, String> userProperties) {
+
+ if ((previousStackProperties == null) && (newStackProperties == null)) {
+ return userProperties;
+ } else {
+ Map<String, String> updatedProperties = new TreeMap<String, String>();
+ if (userProperties != null) {
+ updatedProperties.putAll(userProperties);
+ }
+
+ if (previousStackProperties == null) {
+ previousStackProperties = Collections.emptyMap();
+ }
+
+ if (newStackProperties == null) {
+ newStackProperties = Collections.emptyMap();
+ }
+
+ // Find properties to modify and remove
+ for (Map.Entry<String, String> entry : previousStackProperties.entrySet()) {
+ String name = entry.getKey();
+
+ if (newStackProperties.containsKey(name)) {
+ String previousValue = entry.getValue();
+ String newValue = newStackProperties.get(name);
+ String userValue = updatedProperties.get(name);
+
+ // See if the user property should be modified...
+ // Test if the old property value is different than the new property value and that the user did
+ // not update the value from the old default. If the user updated the value, then it would be
+ // risky to change it to the new stack default value.
+ if (((previousValue == null) ? (newValue != null) : !previousValue.equals(newValue)) &&
+ ((previousValue == null) ? (userValue == null) : previousValue.equals(userValue))) {
+ LOG.debug("Modifying property named {} from user-specified Kerberos Descriptor", name);
+ updatedProperties.put(name, newValue);
+ }
+ } else {
+ LOG.debug("Removing property named {} from user-specified Kerberos Descriptor", name);
+ updatedProperties.remove(name);
+ }
+ }
+
+ // Find properties to add
+ for (Map.Entry<String, String> entry : newStackProperties.entrySet()) {
+ String name = entry.getKey();
+
+ if (!previousStackProperties.containsKey(name) && !updatedProperties.containsKey(name)) {
+ // A new property was found, add it...
+ LOG.debug("Adding property named {} to user-specified Kerberos Descriptor", name);
+ updatedProperties.put(name, entry.getValue());
+ }
+ }
+
+ return updatedProperties;
+ }
+ }
+
+ /**
+ * A convenience method used to change a list of {@link KerberosIdentityDescriptor} items into a map
+ * of identity names or {@link org.apache.ambari.server.api.services.HostKerberosIdentityService}
+ * items.
+ *
+ * @param identities a list of {@link KerberosIdentityDescriptor}s
+ * @return a map of identity names or {@link org.apache.ambari.server.api.services.HostKerberosIdentityService}
+ */
+ private static Map<String, KerberosIdentityDescriptor> toMap(List<KerberosIdentityDescriptor> identities) {
+ if (identities == null) {
+ return null;
+ } else {
+ Map<String, KerberosIdentityDescriptor> map = new TreeMap<String, KerberosIdentityDescriptor>();
+
+ for (KerberosIdentityDescriptor identity : identities) {
+ map.put(identity.getName(), identity);
+ }
+
+ return map;
+ }
+ }
+}
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/state/kerberos/KerberosKeytabDescriptor.java b/ambari-server/src/main/java/org/apache/ambari/server/state/kerberos/KerberosKeytabDescriptor.java
index 7ce1c9f..ccb4efe 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/state/kerberos/KerberosKeytabDescriptor.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/state/kerberos/KerberosKeytabDescriptor.java
@@ -1,4 +1,4 @@
-/**
+/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
@@ -17,8 +17,8 @@
*/
package org.apache.ambari.server.state.kerberos;
-import java.util.HashMap;
import java.util.Map;
+import java.util.TreeMap;
/**
* KerberosKeytabDescriptor is an implementation of an AbstractKerberosDescriptor that
@@ -157,13 +157,13 @@
/**
* Creates a new KerberosKeytabDescriptor
*
- * @param file the path to the keytab file
- * @param ownerName the local username of the file owner
- * @param ownerAccess the file access privileges for the file owner ("r", "rw", "")
- * @param groupName the local group name with privileges to access the file
- * @param groupAccess the file access privileges for the group ("r", "rw", "")
+ * @param file the path to the keytab file
+ * @param ownerName the local username of the file owner
+ * @param ownerAccess the file access privileges for the file owner ("r", "rw", "")
+ * @param groupName the local group name with privileges to access the file
+ * @param groupAccess the file access privileges for the group ("r", "rw", "")
* @param configuration the configuration used to store the principal name
- * @param cachable true if the keytab may be cached by Ambari; otherwise false
+ * @param cachable true if the keytab may be cached by Ambari; otherwise false
*/
public KerberosKeytabDescriptor(String file, String ownerName, String ownerAccess, String groupName,
String groupAccess, String configuration, boolean cachable) {
@@ -175,6 +175,7 @@
setConfiguration(configuration);
setCachable(cachable);
}
+
/**
* Creates a new KerberosKeytabDescriptor
* <p/>
@@ -416,21 +417,53 @@
*/
@Override
public Map<String, Object> toMap() {
- Map<String, Object> map = new HashMap<String, Object>();
+ Map<String, Object> map = new TreeMap<String, Object>();
- map.put("file", getFile());
+ String data;
- map.put("owner", new HashMap<String, Object>() {{
- put("name", getOwnerName());
- put("access", getOwnerAccess());
- }});
+ data = getFile();
+ map.put("file", data);
- map.put("group", new HashMap<String, Object>() {{
- put("name", getGroupName());
- put("access", getGroupAccess());
- }});
+ // Build file owner map
+ Map<String, String> owner = new TreeMap<String, String>();
- map.put("configuration", getConfiguration());
+ data = getOwnerName();
+ if (data != null) {
+ owner.put("name", data);
+ }
+
+ data = getOwnerAccess();
+ if (data != null) {
+ owner.put("access", data);
+ }
+
+ if (!owner.isEmpty()) {
+ map.put("owner", owner);
+ }
+ // Build file owner map (end)
+
+ // Build file owner map
+ Map<String, String> group = new TreeMap<String, String>();
+
+ data = getGroupName();
+ if (data != null) {
+ group.put("name", data);
+ }
+
+ data = getGroupAccess();
+ if (data != null) {
+ group.put("access", data);
+ }
+
+ if (!owner.isEmpty()) {
+ map.put("group", group);
+ }
+ // Build file owner map (end)
+
+ data = getConfiguration();
+ if (data != null) {
+ map.put("configuration", data);
+ }
return map;
}
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/state/kerberos/KerberosPrincipalDescriptor.java b/ambari-server/src/main/java/org/apache/ambari/server/state/kerberos/KerberosPrincipalDescriptor.java
index 0156e4a..83dd953 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/state/kerberos/KerberosPrincipalDescriptor.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/state/kerberos/KerberosPrincipalDescriptor.java
@@ -17,7 +17,7 @@
*/
package org.apache.ambari.server.state.kerberos;
-import java.util.HashMap;
+import java.util.TreeMap;
import java.util.Map;
/**
@@ -267,7 +267,7 @@
*/
@Override
public Map<String, Object> toMap() {
- Map<String, Object> map = new HashMap<String, Object>();
+ Map<String, Object> map = new TreeMap<String, Object>();
map.put("value", getValue());
map.put("type", KerberosPrincipalType.translate(getType()));
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/state/kerberos/KerberosServiceDescriptor.java b/ambari-server/src/main/java/org/apache/ambari/server/state/kerberos/KerberosServiceDescriptor.java
index 72dbcfe..0f14ca6 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/state/kerberos/KerberosServiceDescriptor.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/state/kerberos/KerberosServiceDescriptor.java
@@ -20,7 +20,7 @@
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
-import java.util.HashMap;
+import java.util.TreeMap;
import java.util.List;
import java.util.Map;
@@ -196,7 +196,7 @@
}
if (components == null) {
- components = new HashMap<String, KerberosComponentDescriptor>();
+ components = new TreeMap<String, KerberosComponentDescriptor>();
}
components.put(name, component);
diff --git a/ambari-server/src/main/resources/common-services/HBASE/0.96.0.2.0/kerberos.json b/ambari-server/src/main/resources/common-services/HBASE/0.96.0.2.0/kerberos.json
index d569447..636d36e 100644
--- a/ambari-server/src/main/resources/common-services/HBASE/0.96.0.2.0/kerberos.json
+++ b/ambari-server/src/main/resources/common-services/HBASE/0.96.0.2.0/kerberos.json
@@ -104,7 +104,8 @@
"name": "PHOENIX_QUERY_SERVER",
"identities": [
{
- "name": "/spnego",
+ "name": "phoenix_spnego",
+ "reference": "/spnego",
"principal": {
"configuration": "hbase-site/phoenix.queryserver.kerberos.principal"
},
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.3/upgrades/nonrolling-upgrade-2.5.xml b/ambari-server/src/main/resources/stacks/HDP/2.3/upgrades/nonrolling-upgrade-2.5.xml
index f5f06c0..ff4ed18 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.3/upgrades/nonrolling-upgrade-2.5.xml
+++ b/ambari-server/src/main/resources/stacks/HDP/2.3/upgrades/nonrolling-upgrade-2.5.xml
@@ -449,6 +449,16 @@
</group>
<!--
+ After processing this group, the user-specified Kerberos descriptor will be updated to work with
+ the new stack-level Kerberos descriptor.
+ -->
+ <group xsi:type="cluster" name="UPDATE_KERBEROS_DESCRIPTORS" title="Update Kerberos Descriptors">
+ <execute-stage title="Update the user-specified Kerberos descriptor" service="" component="">
+ <task xsi:type="server_action" class="org.apache.ambari.server.serveraction.upgrades.UpgradeUserKerberosDescriptor"/>
+ </execute-stage>
+ </group>
+
+ <!--
Invoke "hdp-select set all" to change any components we may have missed
that are installed on the hosts but not known by Ambari.
-->
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.3/upgrades/upgrade-2.5.xml b/ambari-server/src/main/resources/stacks/HDP/2.3/upgrades/upgrade-2.5.xml
index 09bd2ac..46e11e4 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.3/upgrades/upgrade-2.5.xml
+++ b/ambari-server/src/main/resources/stacks/HDP/2.3/upgrades/upgrade-2.5.xml
@@ -197,6 +197,16 @@
</execute-stage>
</group>
+ <!--
+ After processing this group, the user-specified Kerberos descriptor will be updated to work with
+ the new stack-level Kerberos descriptor.
+ -->
+ <group xsi:type="cluster" name="UPDATE_KERBEROS_DESCRIPTORS" title="Update Kerberos Descriptors">
+ <execute-stage title="Update the user-specified Kerberos descriptor" service="" component="">
+ <task xsi:type="server_action" class="org.apache.ambari.server.serveraction.upgrades.UpgradeUserKerberosDescriptor"/>
+ </execute-stage>
+ </group>
+
<group name="CORE_MASTER" title="Core Masters">
<service-check>false</service-check>
<service name="HDFS">
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.4/upgrades/nonrolling-upgrade-2.5.xml b/ambari-server/src/main/resources/stacks/HDP/2.4/upgrades/nonrolling-upgrade-2.5.xml
index 1cc9529..3478603 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.4/upgrades/nonrolling-upgrade-2.5.xml
+++ b/ambari-server/src/main/resources/stacks/HDP/2.4/upgrades/nonrolling-upgrade-2.5.xml
@@ -405,6 +405,16 @@
</group>
<!--
+ After processing this group, the user-specified Kerberos descriptor will be updated to work with
+ the new stack-level Kerberos descriptor.
+ -->
+ <group xsi:type="cluster" name="UPDATE_KERBEROS_DESCRIPTORS" title="Update Kerberos Descriptors">
+ <execute-stage title="Update the user-specified Kerberos descriptor" service="" component="">
+ <task xsi:type="server_action" class="org.apache.ambari.server.serveraction.upgrades.UpgradeUserKerberosDescriptor"/>
+ </execute-stage>
+ </group>
+
+ <!--
Invoke "hdp-select set all" to change any components we may have missed
that are installed on the hosts but not known by Ambari.
-->
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.4/upgrades/upgrade-2.5.xml b/ambari-server/src/main/resources/stacks/HDP/2.4/upgrades/upgrade-2.5.xml
index f20ad06..8a7f0fa 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.4/upgrades/upgrade-2.5.xml
+++ b/ambari-server/src/main/resources/stacks/HDP/2.4/upgrades/upgrade-2.5.xml
@@ -183,6 +183,16 @@
</execute-stage>
</group>
+ <!--
+ After processing this group, the user-specified Kerberos descriptor will be updated to work with
+ the new stack-level Kerberos descriptor.
+ -->
+ <group xsi:type="cluster" name="UPDATE_KERBEROS_DESCRIPTORS" title="Update Kerberos Descriptors">
+ <execute-stage title="Update the user-specified Kerberos descriptor" service="" component="">
+ <task xsi:type="server_action" class="org.apache.ambari.server.serveraction.upgrades.UpgradeUserKerberosDescriptor"/>
+ </execute-stage>
+ </group>
+
<group name="CORE_MASTER" title="Core Masters">
<service-check>false</service-check>
<service name="HDFS">
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.5/services/HBASE/kerberos.json b/ambari-server/src/main/resources/stacks/HDP/2.5/services/HBASE/kerberos.json
index 501bcd3..9ed40ef 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.5/services/HBASE/kerberos.json
+++ b/ambari-server/src/main/resources/stacks/HDP/2.5/services/HBASE/kerberos.json
@@ -142,7 +142,8 @@
"name": "PHOENIX_QUERY_SERVER",
"identities": [
{
- "name": "/spnego",
+ "name": "phoenix_spnego",
+ "reference": "/spnego",
"principal": {
"configuration": "hbase-site/phoenix.queryserver.kerberos.principal"
},
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.5/upgrades/nonrolling-upgrade-2.5.xml b/ambari-server/src/main/resources/stacks/HDP/2.5/upgrades/nonrolling-upgrade-2.5.xml
index cbab386..a54c830 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.5/upgrades/nonrolling-upgrade-2.5.xml
+++ b/ambari-server/src/main/resources/stacks/HDP/2.5/upgrades/nonrolling-upgrade-2.5.xml
@@ -295,6 +295,16 @@
</group>
<!--
+ After processing this group, the user-specified Kerberos descriptor will be updated to work with
+ the new stack-level Kerberos descriptor.
+ -->
+ <group xsi:type="cluster" name="UPDATE_KERBEROS_DESCRIPTORS" title="Update Kerberos Descriptors">
+ <execute-stage title="Update the user-specified Kerberos descriptor" service="" component="">
+ <task xsi:type="server_action" class="org.apache.ambari.server.serveraction.upgrades.UpgradeUserKerberosDescriptor"/>
+ </execute-stage>
+ </group>
+
+ <!--
Invoke "hdp-select set all" to change any components we may have missed
that are installed on the hosts but not known by Ambari.
-->
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.5/upgrades/upgrade-2.5.xml b/ambari-server/src/main/resources/stacks/HDP/2.5/upgrades/upgrade-2.5.xml
index 60e81c0..e3e632b 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.5/upgrades/upgrade-2.5.xml
+++ b/ambari-server/src/main/resources/stacks/HDP/2.5/upgrades/upgrade-2.5.xml
@@ -148,6 +148,16 @@
</service>
</group>
+ <!--
+ After processing this group, the user-specified Kerberos descriptor will be updated to work with
+ the new stack-level Kerberos descriptor.
+ -->
+ <group xsi:type="cluster" name="UPDATE_KERBEROS_DESCRIPTORS" title="Update Kerberos Descriptors">
+ <execute-stage title="Update the user-specified Kerberos descriptor" service="" component="">
+ <task xsi:type="server_action" class="org.apache.ambari.server.serveraction.upgrades.UpgradeUserKerberosDescriptor"/>
+ </execute-stage>
+ </group>
+
<group name="CORE_MASTER" title="Core Masters">
<service-check>false</service-check>
<service name="HDFS">
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/ClusterKerberosDescriptorResourceProviderTest.java b/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/ClusterKerberosDescriptorResourceProviderTest.java
index f551b42..1f91915 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/ClusterKerberosDescriptorResourceProviderTest.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/ClusterKerberosDescriptorResourceProviderTest.java
@@ -18,6 +18,7 @@
package org.apache.ambari.server.controller.internal;
+import com.google.gson.Gson;
import com.google.inject.AbstractModule;
import com.google.inject.Guice;
import com.google.inject.Injector;
@@ -65,6 +66,7 @@
import java.util.Map;
import java.util.Properties;
import java.util.Set;
+import java.util.TreeMap;
import static org.easymock.EasyMock.*;
@@ -74,111 +76,105 @@
@SuppressWarnings("unchecked")
public class ClusterKerberosDescriptorResourceProviderTest extends EasyMockSupport {
- private static final Map<String, Object> STACK_MAP =
- new HashMap<String, Object>() {
- {
- put("properties", new HashMap<String, Object>() {{
- put("realm", "EXAMPLE.COM");
- put("some.property", "Hello World");
- }});
+ private static final Gson GSON = new Gson();
- put(AbstractKerberosDescriptor.Type.AUTH_TO_LOCAL_PROPERTY.getDescriptorPluralName(), new ArrayList<String>() {{
- add("global.name.rules");
- }});
+ private static final Map<String, Object> STACK_MAP;
- put(AbstractKerberosDescriptor.Type.SERVICE.getDescriptorPluralName(), new ArrayList<Object>() {{
- add(KerberosServiceDescriptorTest.MAP_VALUE);
- }});
- put(AbstractKerberosDescriptor.Type.CONFIGURATION.getDescriptorPluralName(), new ArrayList<Map<String, Object>>() {{
- add(new HashMap<String, Object>() {
- {
- put("cluster-conf", new HashMap<String, String>() {
- {
- put("property1", "red");
- }
- });
- }
- });
- }});
- put(AbstractKerberosDescriptor.Type.IDENTITY.getDescriptorPluralName(), new ArrayList<Object>() {{
- add(new HashMap<String, Object>() {
- {
- put("name", "shared");
- put("principal", new HashMap<String, Object>(KerberosPrincipalDescriptorTest.MAP_VALUE));
- put("keytab", new HashMap<String, Object>() {
- {
- put("file", "/etc/security/keytabs/subject.service.keytab");
+ private static final Map<String, Object> USER_MAP;
- put("owner", new HashMap<String, Object>() {{
- put("name", "root");
- put("access", "rw");
- }});
-
- put("group", new HashMap<String, Object>() {{
- put("name", "hadoop");
- put("access", "r");
- }});
-
- put("configuration", "service-site/service2.component.keytab.file");
- }
- });
- }
- });
- }});
- }
- };
-
- private static final Map<String, Object> USER_MAP =
- new HashMap<String, Object>() {
- {
- put("properties", new HashMap<String, Object>() {{
- put("realm", "HWX.COM");
- put("some.property", "Hello World");
- }});
-
- put(AbstractKerberosDescriptor.Type.CONFIGURATION.getDescriptorPluralName(), new ArrayList<Map<String, Object>>() {{
- add(new HashMap<String, Object>() {
- {
- put("cluster-conf", new HashMap<String, String>() {
- {
- put("property1", "blue");
- put("property2", "orange");
- }
- });
- }
- });
- }});
- put(AbstractKerberosDescriptor.Type.IDENTITY.getDescriptorPluralName(), new ArrayList<Object>() {{
- add(new HashMap<String, Object>() {
- {
- put("name", "shared");
- put("principal", new HashMap<String, Object>(KerberosPrincipalDescriptorTest.MAP_VALUE));
- put("keytab", new HashMap<String, Object>() {
- {
- put("file", "/etc/security/keytabs/subject.service.keytab");
-
- put("owner", new HashMap<String, Object>() {{
- put("name", "root");
- put("access", "rw");
- }});
-
- put("group", new HashMap<String, Object>() {{
- put("name", "hadoop");
- put("access", "r");
- }});
-
- put("configuration", "service-site/service2.component.keytab.file");
- }
- });
- }
- });
- }});
- }
- };
-
- private static final Map<String, Object> COMPOSITE_MAP = new HashMap<String, Object>();
+ private static final Map<String, Object> COMPOSITE_MAP;
static {
+ TreeMap<String, Object> stackProperties = new TreeMap<String, Object>();
+ stackProperties.put("realm", "EXAMPLE.COM");
+ stackProperties.put("some.property", "Hello World");
+
+ Collection<String> authToLocalRules = new ArrayList<String>();
+ authToLocalRules.add("global.name.rules");
+
+ TreeMap<String, Object> stackServices = new TreeMap<String, Object>();
+ stackServices.put((String) KerberosServiceDescriptorTest.MAP_VALUE.get("name"), KerberosServiceDescriptorTest.MAP_VALUE);
+
+ TreeMap<String, Object> stackClusterConfProperties = new TreeMap<String, Object>();
+ stackClusterConfProperties.put("property1", "red");
+
+ TreeMap<String, Object> stackClusterConf = new TreeMap<String, Object>();
+ stackClusterConf.put("cluster-conf", stackClusterConfProperties);
+
+ TreeMap<String, Object> stackConfigurations = new TreeMap<String, Object>();
+ stackConfigurations.put("cluster-conf", stackClusterConf);
+
+ TreeMap<String, Object> stackSharedIdentityKeytabOwner = new TreeMap<String, Object>();
+ stackSharedIdentityKeytabOwner.put("name", "root");
+ stackSharedIdentityKeytabOwner.put("access", "rw");
+
+ TreeMap<String, Object> sharedIdentityKeytabGroup = new TreeMap<String, Object>();
+ sharedIdentityKeytabGroup.put("name", "hadoop");
+ sharedIdentityKeytabGroup.put("access", "r");
+
+ TreeMap<String, Object> stackSharedIdentityKeytab = new TreeMap<String, Object>();
+ stackSharedIdentityKeytab.put("file", "/etc/security/keytabs/subject.service.keytab");
+ stackSharedIdentityKeytab.put("owner", stackSharedIdentityKeytabOwner);
+ stackSharedIdentityKeytab.put("group", sharedIdentityKeytabGroup);
+ stackSharedIdentityKeytab.put("configuration", "service-site/service2.component.keytab.file");
+
+ TreeMap<String, Object> stackSharedIdentity = new TreeMap<String, Object>();
+ stackSharedIdentity.put("name", "shared");
+ stackSharedIdentity.put("principal", new TreeMap<String, Object>(KerberosPrincipalDescriptorTest.MAP_VALUE));
+ stackSharedIdentity.put("keytab", stackSharedIdentityKeytab);
+
+ TreeMap<String, Object> stackIdentities = new TreeMap<String, Object>();
+ stackIdentities.put("shared", stackSharedIdentity);
+
+ STACK_MAP = new TreeMap<String, Object>();
+ STACK_MAP.put("properties", stackProperties);
+ STACK_MAP.put(AbstractKerberosDescriptor.Type.AUTH_TO_LOCAL_PROPERTY.getDescriptorPluralName(), authToLocalRules);
+ STACK_MAP.put(AbstractKerberosDescriptor.Type.SERVICE.getDescriptorPluralName(), stackServices.values());
+ STACK_MAP.put(AbstractKerberosDescriptor.Type.CONFIGURATION.getDescriptorPluralName(), stackConfigurations.values());
+ STACK_MAP.put(AbstractKerberosDescriptor.Type.IDENTITY.getDescriptorPluralName(), stackIdentities.values());
+
+ TreeMap<String, Object> userProperties = new TreeMap<String, Object>();
+ userProperties.put("realm", "HWX.COM");
+ userProperties.put("some.property", "Hello World");
+
+ TreeMap<String, Object> userClusterConfProperties = new TreeMap<String, Object>();
+ userClusterConfProperties.put("property1", "blue");
+ userClusterConfProperties.put("property2", "orange");
+
+ TreeMap<String, Object> userClusterConf = new TreeMap<String, Object>();
+ userClusterConf.put("cluster-conf", userClusterConfProperties);
+
+ TreeMap<String, Object> userConfigurations = new TreeMap<String, Object>();
+ userConfigurations.put("cluster-conf", userClusterConf);
+
+ TreeMap<String, Object> userSharedIdentityKeytabOwner = new TreeMap<String, Object>();
+ userSharedIdentityKeytabOwner.put("name", "root");
+ userSharedIdentityKeytabOwner.put("access", "rw");
+
+ TreeMap<String, Object> userSharedIdentityKeytabGroup = new TreeMap<String, Object>();
+ userSharedIdentityKeytabGroup.put("name", "hadoop");
+ userSharedIdentityKeytabGroup.put("access", "r");
+
+ TreeMap<String, Object> userSharedIdentityKeytab = new TreeMap<String, Object>();
+ userSharedIdentityKeytab.put("file", "/etc/security/keytabs/subject.service.keytab");
+ userSharedIdentityKeytab.put("owner", userSharedIdentityKeytabOwner);
+ userSharedIdentityKeytab.put("group", userSharedIdentityKeytabGroup);
+ userSharedIdentityKeytab.put("configuration", "service-site/service2.component.keytab.file");
+
+ TreeMap<String, Object> userSharedIdentity = new TreeMap<String, Object>();
+ userSharedIdentity.put("name", "shared");
+ userSharedIdentity.put("principal", new TreeMap<String, Object>(KerberosPrincipalDescriptorTest.MAP_VALUE));
+ userSharedIdentity.put("keytab", userSharedIdentityKeytab);
+
+ TreeMap<String, Object> userIdentities = new TreeMap<String, Object>();
+ userIdentities.put("shared", userSharedIdentity);
+
+ USER_MAP = new TreeMap<String, Object>();
+ USER_MAP.put("properties", userProperties);
+ USER_MAP.put(AbstractKerberosDescriptor.Type.CONFIGURATION.getDescriptorPluralName(), userConfigurations.values());
+ USER_MAP.put(AbstractKerberosDescriptor.Type.IDENTITY.getDescriptorPluralName(), userIdentities.values());
+
+ COMPOSITE_MAP = new TreeMap<String, Object>();
COMPOSITE_MAP.putAll(STACK_MAP);
COMPOSITE_MAP.putAll(USER_MAP);
}
@@ -396,17 +392,7 @@
results = provider.getResources(request, new AndPredicate(clusterPredicate, typePredicate));
Assert.assertEquals(1, results.size());
- for (Resource result : results) {
- Assert.assertEquals("c1", result.getPropertyValue(ClusterKerberosDescriptorResourceProvider.CLUSTER_KERBEROS_DESCRIPTOR_CLUSTER_NAME_PROPERTY_ID));
- Assert.assertEquals("STACK", result.getPropertyValue(ClusterKerberosDescriptorResourceProvider.CLUSTER_KERBEROS_DESCRIPTOR_TYPE_PROPERTY_ID));
-
- // Reconstruct the deconstructed Kerberos Descriptor
- Map partial1 = result.getPropertiesMap().get(ClusterKerberosDescriptorResourceProvider.CLUSTER_KERBEROS_DESCRIPTOR_DESCRIPTOR_PROPERTY_ID);
- Map partial2 = result.getPropertiesMap().get("KerberosDescriptor/kerberos_descriptor/properties");
- partial1.put("properties", partial2);
-
- Assert.assertEquals(STACK_MAP, partial1);
- }
+ testResults("STACK", STACK_MAP, results);
// --------------
// Get the USER Kerberos Descriptor
@@ -417,17 +403,7 @@
results = provider.getResources(request, new AndPredicate(clusterPredicate, typePredicate));
Assert.assertEquals(1, results.size());
- for (Resource result : results) {
- Assert.assertEquals("c1", result.getPropertyValue(ClusterKerberosDescriptorResourceProvider.CLUSTER_KERBEROS_DESCRIPTOR_CLUSTER_NAME_PROPERTY_ID));
- Assert.assertEquals("USER", result.getPropertyValue(ClusterKerberosDescriptorResourceProvider.CLUSTER_KERBEROS_DESCRIPTOR_TYPE_PROPERTY_ID));
-
- // Reconstruct the deconstructed Kerberos Descriptor
- Map partial1 = result.getPropertiesMap().get(ClusterKerberosDescriptorResourceProvider.CLUSTER_KERBEROS_DESCRIPTOR_DESCRIPTOR_PROPERTY_ID);
- Map partial2 = result.getPropertiesMap().get("KerberosDescriptor/kerberos_descriptor/properties");
- partial1.put("properties", partial2);
-
- Assert.assertEquals(USER_MAP, partial1);
- }
+ testResults("USER", USER_MAP, results);
// --------------
// Get the COMPOSITE Kerberos Descriptor
@@ -438,17 +414,7 @@
results = provider.getResources(request, new AndPredicate(clusterPredicate, typePredicate));
Assert.assertEquals(1, results.size());
- for (Resource result : results) {
- Assert.assertEquals("c1", result.getPropertyValue(ClusterKerberosDescriptorResourceProvider.CLUSTER_KERBEROS_DESCRIPTOR_CLUSTER_NAME_PROPERTY_ID));
- Assert.assertEquals("COMPOSITE", result.getPropertyValue(ClusterKerberosDescriptorResourceProvider.CLUSTER_KERBEROS_DESCRIPTOR_TYPE_PROPERTY_ID));
-
- // Reconstruct the deconstructed Kerberos Descriptor
- Map partial1 = result.getPropertiesMap().get(ClusterKerberosDescriptorResourceProvider.CLUSTER_KERBEROS_DESCRIPTOR_DESCRIPTOR_PROPERTY_ID);
- Map partial2 = result.getPropertiesMap().get("KerberosDescriptor/kerberos_descriptor/properties");
- partial1.put("properties", partial2);
-
- Assert.assertEquals(COMPOSITE_MAP, partial1);
- }
+ testResults("COMPOSITE", COMPOSITE_MAP, results);
verifyAll();
}
@@ -553,7 +519,7 @@
Map partial2 = result.getPropertiesMap().get("KerberosDescriptor/kerberos_descriptor/properties");
partial1.put("properties", partial2);
- Assert.assertEquals(STACK_MAP, partial1);
+ Assert.assertEquals(GSON.toJson(STACK_MAP), GSON.toJson(partial1));
}
// --------------
@@ -574,7 +540,7 @@
Map partial2 = result.getPropertiesMap().get("KerberosDescriptor/kerberos_descriptor/properties");
partial1.put("properties", partial2);
- Assert.assertEquals(USER_MAP, partial1);
+ Assert.assertEquals(GSON.toJson(USER_MAP), GSON.toJson(partial1));
}
// --------------
@@ -586,24 +552,14 @@
results = provider.getResources(request, new AndPredicate(clusterPredicate, typePredicate));
Assert.assertEquals(1, results.size());
- for (Resource result : results) {
- Assert.assertEquals("c1", result.getPropertyValue(ClusterKerberosDescriptorResourceProvider.CLUSTER_KERBEROS_DESCRIPTOR_CLUSTER_NAME_PROPERTY_ID));
- Assert.assertEquals("COMPOSITE", result.getPropertyValue(ClusterKerberosDescriptorResourceProvider.CLUSTER_KERBEROS_DESCRIPTOR_TYPE_PROPERTY_ID));
-
- // Reconstruct the deconstructed Kerberos Descriptor
- Map partial1 = result.getPropertiesMap().get(ClusterKerberosDescriptorResourceProvider.CLUSTER_KERBEROS_DESCRIPTOR_DESCRIPTOR_PROPERTY_ID);
- Map partial2 = result.getPropertiesMap().get("KerberosDescriptor/kerberos_descriptor/properties");
- partial1.put("properties", partial2);
-
- Assert.assertEquals(COMPOSITE_MAP, partial1);
- }
+ testResults("COMPOSITE", COMPOSITE_MAP, results);
verifyAll();
List<? extends Collection<String>> capturedValues = captureAdditionalServices.getValues();
Assert.assertEquals(3, capturedValues.size());
- for(Collection<String> capturedValue: capturedValues) {
+ for (Collection<String> capturedValue : capturedValues) {
Assert.assertEquals(3, capturedValue.size());
Assert.assertTrue(capturedValue.contains("HIVE"));
Assert.assertTrue(capturedValue.contains("PIG"));
@@ -821,5 +777,19 @@
verifyAll();
}
+
+ private void testResults(String type, Map<String, Object> expectedData, Set<Resource> results) {
+ for (Resource result : results) {
+ Assert.assertEquals("c1", result.getPropertyValue(ClusterKerberosDescriptorResourceProvider.CLUSTER_KERBEROS_DESCRIPTOR_CLUSTER_NAME_PROPERTY_ID));
+ Assert.assertEquals(type, result.getPropertyValue(ClusterKerberosDescriptorResourceProvider.CLUSTER_KERBEROS_DESCRIPTOR_TYPE_PROPERTY_ID));
+
+ // Reconstruct the deconstructed Kerberos Descriptor
+ Map partial1 = result.getPropertiesMap().get(ClusterKerberosDescriptorResourceProvider.CLUSTER_KERBEROS_DESCRIPTOR_DESCRIPTOR_PROPERTY_ID);
+ Map partial2 = result.getPropertiesMap().get("KerberosDescriptor/kerberos_descriptor/properties");
+ partial1.put("properties", partial2);
+
+ Assert.assertEquals(GSON.toJson(expectedData), GSON.toJson(partial1));
+ }
+ }
}
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/state/kerberos/KerberosComponentDescriptorTest.java b/ambari-server/src/test/java/org/apache/ambari/server/state/kerberos/KerberosComponentDescriptorTest.java
index b74f417..201d84e 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/state/kerberos/KerberosComponentDescriptorTest.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/state/kerberos/KerberosComponentDescriptorTest.java
@@ -1,4 +1,4 @@
-/**
+/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
@@ -24,11 +24,12 @@
import org.junit.Test;
import java.util.ArrayList;
-import java.util.HashMap;
+import java.util.Collection;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Set;
+import java.util.TreeMap;
import java.util.TreeSet;
public class KerberosComponentDescriptorTest {
@@ -51,34 +52,35 @@
" ]" +
"}";
- public static final Map<String, Object> MAP_VALUE =
- new HashMap<String, Object>() {
- {
- put("name", "A_DIFFERENT_COMPONENT_NAME");
- put(AbstractKerberosDescriptor.Type.IDENTITY.getDescriptorPluralName(), new ArrayList<Object>() {{
- add(KerberosIdentityDescriptorTest.MAP_VALUE);
- add(KerberosIdentityDescriptorTest.MAP_VALUE_ALT);
- add(KerberosIdentityDescriptorTest.MAP_VALUE_REFERENCE);
- }});
- put(AbstractKerberosDescriptor.Type.CONFIGURATION.getDescriptorPluralName(), new ArrayList<Map<String, Object>>() {{
- add(new HashMap<String, Object>() {
- {
- put("service-site", new HashMap<String, String>() {
- {
- put("service.component.property1", "red");
- put("service.component.property", "green");
- }
- });
- }
- });
- }});
- put(AbstractKerberosDescriptor.Type.AUTH_TO_LOCAL_PROPERTY.getDescriptorPluralName(), new ArrayList<String>() {{
- add("component.name.rules2");
- }});
- }
- };
+ static final Map<String, Object> MAP_VALUE;
- public static void validateFromJSON(KerberosComponentDescriptor componentDescriptor) {
+ static {
+ Map<String, Object> identitiesMap = new TreeMap<String, Object>();
+ identitiesMap.put((String) KerberosIdentityDescriptorTest.MAP_VALUE.get("name"), KerberosIdentityDescriptorTest.MAP_VALUE);
+ identitiesMap.put((String) KerberosIdentityDescriptorTest.MAP_VALUE_ALT.get("name"), KerberosIdentityDescriptorTest.MAP_VALUE_ALT);
+ identitiesMap.put((String) KerberosIdentityDescriptorTest.MAP_VALUE_REFERENCE.get("name"), KerberosIdentityDescriptorTest.MAP_VALUE_REFERENCE);
+
+ Map<String, Object> serviceSiteProperties = new TreeMap<String, Object>();
+ serviceSiteProperties.put("service.component.property1", "red");
+ serviceSiteProperties.put("service.component.property", "green");
+
+ Map<String, Map<String, Object>> serviceSiteMap = new TreeMap<String, Map<String, Object>>();
+ serviceSiteMap.put("service-site", serviceSiteProperties);
+
+ TreeMap<String, Map<String, Map<String, Object>>> configurationsMap = new TreeMap<String, Map<String, Map<String, Object>>>();
+ configurationsMap.put("service-site", serviceSiteMap);
+
+ Collection<String> authToLocalRules = new ArrayList<String>();
+ authToLocalRules.add("component.name.rules2");
+
+ MAP_VALUE = new TreeMap<String, Object>();
+ MAP_VALUE.put("name", "A_DIFFERENT_COMPONENT_NAME");
+ MAP_VALUE.put(AbstractKerberosDescriptor.Type.IDENTITY.getDescriptorPluralName(), new ArrayList<>(identitiesMap.values()));
+ MAP_VALUE.put(AbstractKerberosDescriptor.Type.CONFIGURATION.getDescriptorPluralName(), configurationsMap.values());
+ MAP_VALUE.put(AbstractKerberosDescriptor.Type.AUTH_TO_LOCAL_PROPERTY.getDescriptorPluralName(), authToLocalRules);
+ }
+
+ static void validateFromJSON(KerberosComponentDescriptor componentDescriptor) {
Assert.assertNotNull(componentDescriptor);
Assert.assertTrue(componentDescriptor.isContainer());
@@ -112,7 +114,7 @@
Assert.assertEquals("component.name.rules1", authToLocalProperties.iterator().next());
}
- public static void validateFromMap(KerberosComponentDescriptor componentDescriptor) {
+ static void validateFromMap(KerberosComponentDescriptor componentDescriptor) {
Assert.assertNotNull(componentDescriptor);
Assert.assertTrue(componentDescriptor.isContainer());
@@ -146,7 +148,7 @@
Assert.assertEquals("component.name.rules2", authToLocalProperties.iterator().next());
}
- public static void validateUpdatedData(KerberosComponentDescriptor componentDescriptor) {
+ private static void validateUpdatedData(KerberosComponentDescriptor componentDescriptor) {
Assert.assertNotNull(componentDescriptor);
Assert.assertEquals("A_DIFFERENT_COMPONENT_NAME", componentDescriptor.getName());
@@ -213,9 +215,10 @@
@Test
public void testToMap() throws AmbariException {
+ Gson gson = new Gson();
KerberosComponentDescriptor descriptor = createFromMap();
Assert.assertNotNull(descriptor);
- Assert.assertEquals(MAP_VALUE, descriptor.toMap());
+ Assert.assertEquals(gson.toJson(MAP_VALUE), gson.toJson(descriptor.toMap()));
}
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/state/kerberos/KerberosConfigurationDescriptorTest.java b/ambari-server/src/test/java/org/apache/ambari/server/state/kerberos/KerberosConfigurationDescriptorTest.java
index 6017fae..4f2a2f5 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/state/kerberos/KerberosConfigurationDescriptorTest.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/state/kerberos/KerberosConfigurationDescriptorTest.java
@@ -1,4 +1,4 @@
-/**
+/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
@@ -45,36 +45,32 @@
"}}" +
"]";
- private static final Map<String, Map<String, Object>> MAP_SINGLE_VALUE =
- new HashMap<String, Map<String, Object>>() {
- {
- put("configuration-type", new HashMap<String, Object>() {
- {
- put("property1", "black");
- put("property2", "white");
- }
- });
- }
- };
+ private static final Map<String, Map<String, Object>> MAP_SINGLE_VALUE;
+ private static final Collection<Map<String, Map<String, Object>>> MAP_MULTIPLE_VALUES;
- private static final Collection<Map<String, Map<String, Object>>> MAP_MULTIPLE_VALUES =
- new ArrayList<Map<String, Map<String, Object>>>() {
- {
- add(MAP_SINGLE_VALUE);
- add(new HashMap<String, Map<String, Object>>() {
- {
- put("configuration-type2", new HashMap<String, Object>() {
- {
- put("property1", "red");
- put("property2", "yellow");
- put("property3", "green");
- }
- });
- }
- });
- }
- };
+ static {
+ TreeMap<String, Object> configuration_data = new TreeMap<String, Object>();
+ configuration_data.put("property1", "black");
+ configuration_data.put("property2", "white");
+
+ MAP_SINGLE_VALUE = new TreeMap<String, Map<String, Object>>();
+ MAP_SINGLE_VALUE.put("configuration-type", configuration_data);
+
+ TreeMap<String, Object> configurationType2Properties = new TreeMap<String, Object>();
+ configurationType2Properties.put("property1", "red");
+ configurationType2Properties.put("property2", "yellow");
+ configurationType2Properties.put("property3", "green");
+
+ Map<String, Map<String, Object>> configurationType2 = new TreeMap<String, Map<String, Object>>();
+ configurationType2.put("configuration-type2", configurationType2Properties);
+
+ TreeMap<String, Map<String, Map<String, Object>>> multipleValuesMap = new TreeMap<String, Map<String, Map<String, Object>>>();
+ multipleValuesMap.put("configuration-type", MAP_SINGLE_VALUE);
+ multipleValuesMap.put("configuration-type2", configurationType2);
+
+ MAP_MULTIPLE_VALUES = multipleValuesMap.values();
+ }
@Test
public void testJSONDeserialize() {
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/state/kerberos/KerberosDescriptorTest.java b/ambari-server/src/test/java/org/apache/ambari/server/state/kerberos/KerberosDescriptorTest.java
index 9463749..0070e6d 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/state/kerberos/KerberosDescriptorTest.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/state/kerberos/KerberosDescriptorTest.java
@@ -1,4 +1,4 @@
-/**
+/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
@@ -26,11 +26,12 @@
import java.io.IOException;
import java.net.URL;
import java.util.ArrayList;
-import java.util.HashMap;
+import java.util.Collection;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Set;
+import java.util.TreeMap;
import java.util.TreeSet;
public class KerberosDescriptorTest {
@@ -51,59 +52,57 @@
" ]" +
"}";
- public static final Map<String, Object> MAP_VALUE =
- new HashMap<String, Object>() {
- {
- put("properties", new HashMap<String, Object>() {{
- put("realm", "EXAMPLE.COM");
- put("some.property", "Hello World");
- }});
+ public static final Map<String, Object> MAP_VALUE;
- put(AbstractKerberosDescriptor.Type.AUTH_TO_LOCAL_PROPERTY.getDescriptorPluralName(), new ArrayList<String>() {{
- add("global.name.rules");
- }});
+ static {
+ Map<String, Object> keytabOwnerMap = new TreeMap<String, Object>();
+ keytabOwnerMap.put("name", "root");
+ keytabOwnerMap.put("access", "rw");
- put(AbstractKerberosDescriptor.Type.SERVICE.getDescriptorPluralName(), new ArrayList<Object>() {{
- add(KerberosServiceDescriptorTest.MAP_VALUE);
- }});
- put(AbstractKerberosDescriptor.Type.CONFIGURATION.getDescriptorPluralName(), new ArrayList<Map<String, Object>>() {{
- add(new HashMap<String, Object>() {
- {
- put("cluster-conf", new HashMap<String, String>() {
- {
- put("property1", "red");
- }
- });
- }
- });
- }});
- put(AbstractKerberosDescriptor.Type.IDENTITY.getDescriptorPluralName(), new ArrayList<Object>() {{
- add(new HashMap<String, Object>() {
- {
- put("name", "shared");
- put("principal", new HashMap<String, Object>(KerberosPrincipalDescriptorTest.MAP_VALUE));
- put("keytab", new HashMap<String, Object>() {
- {
- put("file", "/etc/security/keytabs/subject.service.keytab");
+ Map<String, Object> keytabGroupMap = new TreeMap<String, Object>();
+ keytabGroupMap.put("name", "hadoop");
+ keytabGroupMap.put("access", "r");
- put("owner", new HashMap<String, Object>() {{
- put("name", "root");
- put("access", "rw");
- }});
+ Map<String, Object> keytabMap = new TreeMap<String, Object>();
+ keytabMap.put("file", "/etc/security/keytabs/subject.service.keytab");
+ keytabMap.put("owner", keytabOwnerMap);
+ keytabMap.put("group", keytabGroupMap);
+ keytabMap.put("configuration", "service-site/service2.component.keytab.file");
- put("group", new HashMap<String, Object>() {{
- put("name", "hadoop");
- put("access", "r");
- }});
+ Map<String, Object> sharedIdentityMap = new TreeMap<String, Object>();
+ sharedIdentityMap.put("name", "shared");
+ sharedIdentityMap.put("principal", KerberosPrincipalDescriptorTest.MAP_VALUE);
+ sharedIdentityMap.put("keytab", keytabMap);
- put("configuration", "service-site/service2.component.keytab.file");
- }
- });
- }
- });
- }});
- }
- };
+ Map<String, Object> servicesMap = new TreeMap<String, Object>();
+ servicesMap.put((String) KerberosServiceDescriptorTest.MAP_VALUE.get("name"), KerberosServiceDescriptorTest.MAP_VALUE);
+
+ Map<String, Object> identitiesMap = new TreeMap<String, Object>();
+ identitiesMap.put("shared", sharedIdentityMap);
+
+ Map<String, Object> clusterConfigProperties = new TreeMap<String, Object>();
+ clusterConfigProperties.put("property1", "red");
+
+ Map<String, Map<String, Object>> clusterConfigMap = new TreeMap<String, Map<String, Object>>();
+ clusterConfigMap.put("cluster-conf", clusterConfigProperties);
+
+ TreeMap<String, Map<String, Map<String, Object>>> configurationsMap = new TreeMap<String, Map<String, Map<String, Object>>>();
+ configurationsMap.put("cluster-conf", clusterConfigMap);
+
+ Collection<String> authToLocalRules = new ArrayList<String>();
+ authToLocalRules.add("global.name.rules");
+
+ TreeMap<String, Object> properties = new TreeMap<String, Object>();
+ properties.put("realm", "EXAMPLE.COM");
+ properties.put("some.property", "Hello World");
+
+ MAP_VALUE = new TreeMap<String, Object>();
+ MAP_VALUE.put("properties", properties);
+ MAP_VALUE.put(AbstractKerberosDescriptor.Type.AUTH_TO_LOCAL_PROPERTY.getDescriptorPluralName(), authToLocalRules);
+ MAP_VALUE.put(AbstractKerberosDescriptor.Type.SERVICE.getDescriptorPluralName(), servicesMap.values());
+ MAP_VALUE.put(AbstractKerberosDescriptor.Type.CONFIGURATION.getDescriptorPluralName(), configurationsMap.values());
+ MAP_VALUE.put(AbstractKerberosDescriptor.Type.IDENTITY.getDescriptorPluralName(), identitiesMap.values());
+ }
public static void validateFromJSON(KerberosDescriptor descriptor) {
Assert.assertNotNull(descriptor);
@@ -307,9 +306,10 @@
@Test
public void testToMap() throws AmbariException {
+ Gson gson = new Gson();
KerberosDescriptor descriptor = createFromMap();
Assert.assertNotNull(descriptor);
- Assert.assertEquals(MAP_VALUE, descriptor.toMap());
+ Assert.assertEquals(gson.toJson(MAP_VALUE), gson.toJson(descriptor.toMap()));
}
@Test
@@ -325,7 +325,7 @@
validateUpdatedData(descriptor);
}
- @Test
+ @Test
public void testGetReferencedIdentityDescriptor() throws IOException {
URL systemResourceURL = ClassLoader.getSystemResource("kerberos/test_get_referenced_identity_descriptor.json");
Assert.assertNotNull(systemResourceURL);
@@ -398,7 +398,7 @@
KerberosServiceDescriptor serviceDescriptor = descriptor.getService("SERVICE2");
Assert.assertNotNull(serviceDescriptor);
- KerberosComponentDescriptor componentDescriptor = serviceDescriptor.getComponent("SERVICE2_COMPONENT1");
+ KerberosComponentDescriptor componentDescriptor = serviceDescriptor.getComponent("SERVICE2_COMPONENT1");
Assert.assertNotNull(componentDescriptor);
KerberosIdentityDescriptor identity;
@@ -429,8 +429,8 @@
Assert.assertNotNull(identities);
identityFound = false;
- for(KerberosIdentityDescriptor identity : identities) {
- if("service2_stack_reference".equals(identity.getName())) {
+ for (KerberosIdentityDescriptor identity : identities) {
+ if ("service2_stack_reference".equals(identity.getName())) {
// From base identity
Assert.assertEquals("stack@${realm}", identity.getPrincipalDescriptor().getValue());
@@ -445,15 +445,15 @@
}
Assert.assertTrue(identityFound);
- KerberosComponentDescriptor componentDescriptor = serviceDescriptor.getComponent("SERVICE2_COMPONENT1");
+ KerberosComponentDescriptor componentDescriptor = serviceDescriptor.getComponent("SERVICE2_COMPONENT1");
Assert.assertNotNull(componentDescriptor);
identities = componentDescriptor.getIdentities(true, null);
Assert.assertNotNull(identities);
identityFound = false;
- for(KerberosIdentityDescriptor identity : identities) {
- if("component1_service2_stack_reference".equals(identity.getName())) {
+ for (KerberosIdentityDescriptor identity : identities) {
+ if ("component1_service2_stack_reference".equals(identity.getName())) {
// From base identity
Assert.assertEquals("stack@${realm}", identity.getPrincipalDescriptor().getValue());
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/state/kerberos/KerberosDescriptorUpdateHelperTest.java b/ambari-server/src/test/java/org/apache/ambari/server/state/kerberos/KerberosDescriptorUpdateHelperTest.java
new file mode 100644
index 0000000..fca2f1f
--- /dev/null
+++ b/ambari-server/src/test/java/org/apache/ambari/server/state/kerberos/KerberosDescriptorUpdateHelperTest.java
@@ -0,0 +1,2272 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.server.state.kerberos;
+
+import com.google.gson.Gson;
+import com.google.inject.AbstractModule;
+import com.google.inject.Guice;
+import com.google.inject.Injector;
+import com.google.inject.assistedinject.FactoryModuleBuilder;
+import junit.framework.Assert;
+import org.apache.ambari.server.AmbariException;
+import org.apache.ambari.server.api.services.AmbariMetaInfo;
+import org.apache.ambari.server.configuration.Configuration;
+import org.apache.ambari.server.orm.DBAccessor;
+import org.apache.ambari.server.orm.dao.ExtensionLinkDAO;
+import org.apache.ambari.server.orm.entities.ExtensionLinkEntity;
+import org.apache.ambari.server.orm.entities.MetainfoEntity;
+import org.apache.ambari.server.orm.entities.StackEntity;
+import org.apache.ambari.server.stack.StackManagerFactory;
+import org.apache.ambari.server.state.Clusters;
+import org.apache.ambari.server.state.stack.OsFamily;
+import org.easymock.EasyMock;
+import org.easymock.EasyMockSupport;
+import org.junit.Test;
+
+import javax.persistence.EntityManager;
+import javax.persistence.TypedQuery;
+import java.util.Collections;
+import java.util.Properties;
+
+import static org.easymock.EasyMock.anyString;
+import static org.easymock.EasyMock.expect;
+
+public class KerberosDescriptorUpdateHelperTest extends EasyMockSupport {
+ private static final KerberosDescriptorFactory KERBEROS_DESCRIPTOR_FACTORY = new KerberosDescriptorFactory();
+ private static final Gson GSON = new Gson();
+
+ @Test
+ public void updateDefaultUserKerberosDescriptor() throws Exception {
+ Injector injector = Guice.createInjector(new AbstractModule() {
+
+ @Override
+ protected void configure() {
+ Properties properties = new Properties();
+ properties.put("metadata.path", "src/main/resources/stacks");
+ properties.put("common.services.path", "src/main/resources/common-services");
+ properties.put("server.version.file", "target/version");
+ properties.put("custom.action.definitions", "/tmp/nofile");
+ Configuration configuration = new Configuration(properties);
+
+ install(new FactoryModuleBuilder().build(StackManagerFactory.class));
+
+ bind(Clusters.class).toInstance(createNiceMock(Clusters.class));
+ bind(DBAccessor.class).toInstance(createNiceMock(DBAccessor.class));
+ bind(EntityManager.class).toInstance(createNiceMock(EntityManager.class));
+ bind(OsFamily.class).toInstance(createNiceMock(OsFamily.class));
+ bind(Configuration.class).toInstance(configuration);
+ bind(ExtensionLinkDAO.class).toInstance(createNiceMock(ExtensionLinkDAO.class));
+ }
+ });
+
+ OsFamily osFamily = injector.getInstance(OsFamily.class);
+ expect(osFamily.os_list()).andReturn(Collections.singleton("centos6")).anyTimes();
+
+ ExtensionLinkDAO linkDao = injector.getInstance(ExtensionLinkDAO.class);
+ expect(linkDao.findByStack(anyString(), anyString())).andReturn(Collections.<ExtensionLinkEntity>emptyList()).anyTimes();
+
+ TypedQuery<StackEntity> query = createNiceMock(TypedQuery.class);
+ expect(query.setMaxResults(1)).andReturn(query).anyTimes();
+ expect(query.getSingleResult()).andReturn(null).anyTimes();
+
+ EntityManager entityManager = injector.getInstance(EntityManager.class);
+ expect(entityManager.createNamedQuery("StackEntity.findByNameAndVersion", StackEntity.class)).andReturn(query).anyTimes();
+ expect(entityManager.find(EasyMock.eq(MetainfoEntity.class), anyString())).andReturn(createNiceMock(MetainfoEntity.class)).anyTimes();
+
+ AmbariMetaInfo metaInfo = new AmbariMetaInfo(injector.getInstance(Configuration.class));
+
+ replayAll();
+
+ injector.injectMembers(metaInfo);
+ metaInfo.init();
+
+ KerberosDescriptor hdp24 = metaInfo.getKerberosDescriptor("HDP", "2.4");
+ KerberosDescriptor hdp25 = metaInfo.getKerberosDescriptor("HDP", "2.5");
+ KerberosDescriptor user = new KerberosDescriptor(hdp24.toMap());
+
+ KerberosDescriptor updated = KerberosDescriptorUpdateHelper.updateUserKerberosDescriptor(hdp24, hdp25, user);
+
+ KerberosDescriptor composite = new KerberosDescriptor(hdp25.toMap());
+ composite.update(updated);
+ Assert.assertEquals(GSON.toJson(hdp25.toMap()), GSON.toJson(composite.toMap()));
+ }
+
+ @Test
+ public void testUpdateProperties() throws AmbariException {
+ KerberosDescriptor oldValue = KERBEROS_DESCRIPTOR_FACTORY.createInstance("{" +
+ " \"properties\": {" +
+ " \"realm\": \"${kerberos-env/realm}\"," +
+ " \"keytab_dir\": \"/etc/security/keytabs\"," +
+ " \"additional_realms\": \"\"," +
+ " \"old_property\": \"old_value\"" +
+ " }" +
+ "}");
+
+ KerberosDescriptor newValue = KERBEROS_DESCRIPTOR_FACTORY.createInstance("{" +
+ " \"properties\": {" +
+ " \"realm\": \"${kerberos-env/realm}\"," +
+ " \"keytab_dir\": \"/etc/security/keytabs\"," +
+ " \"additional_realms\": \"\"," +
+ " \"new_property\": \"new_value\"" +
+ " }" +
+ "}");
+
+ KerberosDescriptor userValue = KERBEROS_DESCRIPTOR_FACTORY.createInstance("{" +
+ " \"properties\": {" +
+ " \"realm\": \"EXAMPLE.COM\"," +
+ " \"keytab_dir\": \"/etc/security/keytabs\"," +
+ " \"additional_realms\": \"\"," +
+ " \"old_property\": \"old_value\"" +
+ " }" +
+ "}");
+
+ KerberosDescriptor updatedUserValue = KerberosDescriptorUpdateHelper.updateUserKerberosDescriptor(
+ oldValue,
+ newValue,
+ userValue);
+
+ // "old_property" is removed
+ // "new_property" is added
+ // "realm" retains user set value
+ Assert.assertEquals(
+ KERBEROS_DESCRIPTOR_FACTORY.createInstance(
+ "{\n" +
+ " \"properties\": {\n" +
+ " \"new_property\": \"new_value\",\n" +
+ " \"realm\": \"EXAMPLE.COM\",\n" +
+ " \"additional_realms\": \"\",\n" +
+ " \"keytab_dir\": \"/etc/security/keytabs\"\n" +
+ " }\n" +
+ "}"),
+ updatedUserValue);
+ }
+
+ @Test
+ public void testUpdateIdentities() throws AmbariException {
+ KerberosDescriptor oldValue = KERBEROS_DESCRIPTOR_FACTORY.createInstance(
+ "{" +
+ " \"identities\": [" +
+ " {" +
+ " \"name\": \"spnego\"," +
+ " \"principal\": {" +
+ " \"value\": \"HTTP/_HOST@${realm}\"," +
+ " \"type\": \"service\"" +
+ " }," +
+ " \"keytab\": {" +
+ " \"file\": \"${keytab_dir}/spnego.service.keytab\"," +
+ " \"owner\": {" +
+ " \"name\": \"root\"," +
+ " \"access\": \"r\"" +
+ " }," +
+ " \"group\": {" +
+ " \"name\": \"${cluster-env/user_group}\"," +
+ " \"access\": \"r\"" +
+ " }" +
+ " }" +
+ " }," +
+ " {" +
+ " \"name\": \"smokeuser\"," +
+ " \"principal\": {" +
+ " \"value\": \"old_value@${realm}\"," +
+ " \"type\": \"user\"," +
+ " \"configuration\": \"cluster-env/smokeuser_principal_name\"," +
+ " \"local_username\": \"${cluster-env/smokeuser}\"" +
+ " }," +
+ " \"keytab\": {" +
+ " \"file\": \"${keytab_dir}/smokeuser.headless.keytab\"," +
+ " \"owner\": {" +
+ " \"name\": \"${cluster-env/smokeuser}\"," +
+ " \"access\": \"r\"" +
+ " }," +
+ " \"group\": {" +
+ " \"name\": \"${cluster-env/user_group}\"," +
+ " \"access\": \"r\"" +
+ " }," +
+ " \"configuration\": \"cluster-env/smokeuser_keytab\"" +
+ " }" +
+ " }," +
+ " {" +
+ " \"name\": \"old_identity\"," +
+ " \"principal\": {" +
+ " \"value\": \"foobar-${cluster_name|toLower()}@${realm}\"," +
+ " \"type\": \"user\"," +
+ " \"configuration\": \"cluster-env/ambari_principal_name\"" +
+ " }," +
+ " \"keytab\": {" +
+ " \"file\": \"${keytab_dir}/ambari.server.keytab\"" +
+ " }" +
+ " }" +
+ " ]" +
+ "}");
+
+ KerberosDescriptor newValue = KERBEROS_DESCRIPTOR_FACTORY.createInstance(
+ "{" +
+ " \"identities\": [" +
+ " {" +
+ " \"name\": \"spnego\"," +
+ " \"principal\": {" +
+ " \"value\": \"HTTP/_HOST@${realm}\"," +
+ " \"type\": \"service\"" +
+ " }," +
+ " \"keytab\": {" +
+ " \"file\": \"${keytab_dir}/spnego.service.keytab\"," +
+ " \"owner\": {" +
+ " \"name\": \"root\"," +
+ " \"access\": \"r\"" +
+ " }," +
+ " \"group\": {" +
+ " \"name\": \"${cluster-env/user_group}\"," +
+ " \"access\": \"r\"" +
+ " }" +
+ " }" +
+ " }," +
+ " {" +
+ " \"name\": \"smokeuser\"," +
+ " \"principal\": {" +
+ " \"value\": \"${cluster-env/smokeuser}-${cluster_name|toLower()}@${realm}\"," +
+ " \"type\": \"user\"," +
+ " \"configuration\": \"cluster-env/smokeuser_principal_name\"," +
+ " \"local_username\": \"${cluster-env/smokeuser}\"" +
+ " }," +
+ " \"keytab\": {" +
+ " \"file\": \"updated_dir/smokeuser.headless.keytab\"," +
+ " \"owner\": {" +
+ " \"name\": \"${cluster-env/smokeuser}\"," +
+ " \"access\": \"r\"" +
+ " }," +
+ " \"group\": {" +
+ " \"name\": \"${cluster-env/user_group}\"," +
+ " \"access\": \"r\"" +
+ " }," +
+ " \"configuration\": \"cluster-env/smokeuser_keytab\"" +
+ " }" +
+ " }," +
+ " {" +
+ " \"name\": \"ambari-server\"," +
+ " \"principal\": {" +
+ " \"value\": \"ambari-server-${cluster_name|toLower()}@${realm}\"," +
+ " \"type\": \"user\"," +
+ " \"configuration\": \"cluster-env/ambari_principal_name\"" +
+ " }," +
+ " \"keytab\": {" +
+ " \"file\": \"${keytab_dir}/ambari.server.keytab\"" +
+ " }" +
+ " }" +
+ " ]" +
+ "}");
+
+ KerberosDescriptor userValue = KERBEROS_DESCRIPTOR_FACTORY.createInstance(
+ "{" +
+ " \"identities\": [" +
+ " {" +
+ " \"name\": \"spnego\"," +
+ " \"principal\": {" +
+ " \"value\": \"CHANGED_HTTP/_HOST@${realm}\"," +
+ " \"type\": \"service\"" +
+ " }," +
+ " \"keytab\": {" +
+ " \"file\": \"${keytab_dir}/spnego.service.keytab\"," +
+ " \"owner\": {" +
+ " \"name\": \"root\"," +
+ " \"access\": \"r\"" +
+ " }," +
+ " \"group\": {" +
+ " \"name\": \"${cluster-env/user_group}\"," +
+ " \"access\": \"r\"" +
+ " }" +
+ " }" +
+ " }," +
+ " {" +
+ " \"name\": \"smokeuser\"," +
+ " \"principal\": {" +
+ " \"value\": \"old_value@${realm}\"," +
+ " \"type\": \"user\"," +
+ " \"configuration\": \"cluster-env/smokeuser_principal_name\"," +
+ " \"local_username\": \"${cluster-env/smokeuser}\"" +
+ " }," +
+ " \"keytab\": {" +
+ " \"file\": \"custom_dir/smokeuser.headless.keytab\"," +
+ " \"owner\": {" +
+ " \"name\": \"${cluster-env/smokeuser}\"," +
+ " \"access\": \"r\"" +
+ " }," +
+ " \"group\": {" +
+ " \"name\": \"${cluster-env/user_group}\"," +
+ " \"access\": \"r\"" +
+ " }," +
+ " \"configuration\": \"cluster-env/smokeuser_keytab\"" +
+ " }" +
+ " }," +
+ " {" +
+ " \"name\": \"old_identity\"," +
+ " \"principal\": {" +
+ " \"value\": \"foobar-${cluster_name|toLower()}@${realm}\"," +
+ " \"type\": \"user\"," +
+ " \"configuration\": \"cluster-env/ambari_principal_name\"" +
+ " }," +
+ " \"keytab\": {" +
+ " \"file\": \"${keytab_dir}/ambari.server.keytab\"" +
+ " }" +
+ " }" +
+ " ]" +
+ "}");
+
+
+ KerberosDescriptor updatedUserValue = KerberosDescriptorUpdateHelper.updateUserKerberosDescriptor(
+ oldValue,
+ newValue,
+ userValue);
+
+ Assert.assertEquals(
+ GSON.toJson(KERBEROS_DESCRIPTOR_FACTORY.createInstance(
+ "{\n" +
+ " \"identities\": [\n" +
+ " {\n" +
+ " \"name\": \"spnego\",\n" +
+ " \"principal\": {\n" +
+ " \"value\": \"CHANGED_HTTP/_HOST@${realm}\",\n" +
+ " \"type\": \"service\"\n" +
+ " },\n" +
+ " \"keytab\": {\n" +
+ " \"file\": \"${keytab_dir}/spnego.service.keytab\",\n" +
+ " \"owner\": {\n" +
+ " \"name\": \"root\",\n" +
+ " \"access\": \"r\"\n" +
+ " },\n" +
+ " \"group\": {\n" +
+ " \"name\": \"${cluster-env/user_group}\",\n" +
+ " \"access\": \"r\"\n" +
+ " }\n" +
+ " }\n" +
+ " },\n" +
+ " {\n" +
+ " \"name\": \"smokeuser\",\n" +
+ " \"principal\": {\n" +
+ " \"value\": \"${cluster-env/smokeuser}-${cluster_name|toLower()}@${realm}\",\n" +
+ " \"local_username\": \"${cluster-env/smokeuser}\",\n" +
+ " \"configuration\": \"cluster-env/smokeuser_principal_name\",\n" +
+ " \"type\": \"user\"\n" +
+ " },\n" +
+ " \"keytab\": {\n" +
+ " \"file\": \"custom_dir/smokeuser.headless.keytab\",\n" +
+ " \"owner\": {\n" +
+ " \"name\": \"${cluster-env/smokeuser}\",\n" +
+ " \"access\": \"r\"\n" +
+ " },\n" +
+ " \"group\": {\n" +
+ " \"name\": \"${cluster-env/user_group}\",\n" +
+ " \"access\": \"r\"\n" +
+ " },\n" +
+ " \"configuration\": \"cluster-env/smokeuser_keytab\"\n" +
+ " }\n" +
+ " }\n" +
+ " ]\n" +
+ "}").toMap()),
+ GSON.toJson(updatedUserValue.toMap()));
+
+ // Test that the merge of the default (stack) Kerberos descriptor and the updated user-specified
+ // Kerberos descriptor yield the expected composite Kerberos descriptor.
+ newValue.update(updatedUserValue);
+
+ Assert.assertEquals(
+ GSON.toJson(KERBEROS_DESCRIPTOR_FACTORY.createInstance(
+ "{\n" +
+ " \"identities\": [\n" +
+ " {\n" +
+ " \"name\": \"ambari-server\",\n" +
+ " \"principal\": {\n" +
+ " \"value\": \"ambari-server-${cluster_name|toLower()}@${realm}\",\n" +
+ " \"configuration\": \"cluster-env/ambari_principal_name\",\n" +
+ " \"type\": \"user\"\n" +
+ " },\n" +
+ " \"keytab\": {\n" +
+ " \"file\": \"${keytab_dir}/ambari.server.keytab\"\n" +
+ " }\n" +
+ " },\n" +
+ " {\n" +
+ " \"name\": \"spnego\",\n" +
+ " \"principal\": {\n" +
+ " \"value\": \"CHANGED_HTTP/_HOST@${realm}\",\n" +
+ " \"type\": \"service\"\n" +
+ " },\n" +
+ " \"keytab\": {\n" +
+ " \"file\": \"${keytab_dir}/spnego.service.keytab\",\n" +
+ " \"owner\": {\n" +
+ " \"name\": \"root\",\n" +
+ " \"access\": \"r\"\n" +
+ " },\n" +
+ " \"group\": {\n" +
+ " \"name\": \"${cluster-env/user_group}\",\n" +
+ " \"access\": \"r\"\n" +
+ " }\n" +
+ " }\n" +
+ " },\n" +
+ " {\n" +
+ " \"name\": \"smokeuser\",\n" +
+ " \"principal\": {\n" +
+ " \"value\": \"${cluster-env/smokeuser}-${cluster_name|toLower()}@${realm}\",\n" +
+ " \"local_username\": \"${cluster-env/smokeuser}\",\n" +
+ " \"configuration\": \"cluster-env/smokeuser_principal_name\",\n" +
+ " \"type\": \"user\"\n" +
+ " },\n" +
+ " \"keytab\": {\n" +
+ " \"file\": \"custom_dir/smokeuser.headless.keytab\",\n" +
+ " \"owner\": {\n" +
+ " \"name\": \"${cluster-env/smokeuser}\",\n" +
+ " \"access\": \"r\"\n" +
+ " },\n" +
+ " \"group\": {\n" +
+ " \"name\": \"${cluster-env/user_group}\",\n" +
+ " \"access\": \"r\"\n" +
+ " },\n" +
+ " \"configuration\": \"cluster-env/smokeuser_keytab\"\n" +
+ " }\n" +
+ " }\n" +
+ " ]\n" +
+ "}").toMap()),
+ GSON.toJson(newValue.toMap()));
+ }
+
+ @Test
+ public void testUpdateConfigurations() throws AmbariException {
+ KerberosDescriptor oldValue = KERBEROS_DESCRIPTOR_FACTORY.createInstance(
+ "{\n" +
+ " \"configurations\": [\n" +
+ " {\n" +
+ " \"core-site\": {\n" +
+ " \"hadoop.security.authentication\": \"kerberos\",\n" +
+ " \"hadoop.security.authorization\": \"true\",\n" +
+ " \"hadoop.proxyuser.HTTP.groups\": \"${hadoop-env/proxyuser_group}\"\n" +
+ " }\n" +
+ " },\n" +
+ " {\n" +
+ " \"some-site\": {\n" +
+ " \"property.unchanged\": \"value 1\",\n" +
+ " \"property.removed\": \"removed value\",\n" +
+ " \"property.altered\": \"old value\"\n," +
+ " \"property.property.changed.in.new\": \"orig value\"\n" +
+ " }\n" +
+ " },\n" +
+ " {\n" +
+ " \"old-site\": {\n" +
+ " \"property\": \"value\"\n" +
+ " }\n" +
+ " }\n" +
+ " ]\n" +
+ "}\n");
+
+ KerberosDescriptor newValue = KERBEROS_DESCRIPTOR_FACTORY.createInstance(
+ "{\n" +
+ " \"configurations\": [\n" +
+ " {\n" +
+ " \"core-site\": {\n" +
+ " \"hadoop.security.authentication\": \"kerberos\",\n" +
+ " \"hadoop.security.authorization\": \"true\",\n" +
+ " \"hadoop.proxyuser.HTTP.groups\": \"${hadoop-env/proxyuser_group}\"\n" +
+ " }\n" +
+ " },\n" +
+ " {\n" +
+ " \"some-site\": {\n" +
+ " \"property.unchanged\": \"value 1\",\n" +
+ " \"property.added\": \"added value\",\n" +
+ " \"property.altered\": \"new value\",\n" +
+ " \"property.changed.in.new\": \"new value\"\n" +
+ " }\n" +
+ " },\n" +
+ " {\n" +
+ " \"new-site\": {\n" +
+ " \"property.for.new.site\": \"value\"\n" +
+ " }\n" +
+ " }\n" +
+ " ]\n" +
+ "}\n");
+
+ KerberosDescriptor userValue = KERBEROS_DESCRIPTOR_FACTORY.createInstance(
+ "{\n" +
+ " \"configurations\": [\n" +
+ " {\n" +
+ " \"core-site\": {\n" +
+ " \"hadoop.security.authentication\": \"kerberos\",\n" +
+ " \"hadoop.security.authorization\": \"true\",\n" +
+ " \"hadoop.proxyuser.HTTP.groups\": \"${hadoop-env/proxyuser_group}\"\n" +
+ " }\n" +
+ " },\n" +
+ " {\n" +
+ " \"some-site\": {\n" +
+ " \"property.unchanged\": \"value 1\",\n" +
+ " \"property.removed\": \"changed removed value\",\n" +
+ " \"property.altered\": \"custom value\"\n," +
+ " \"property.property.changed.in.new\": \"orig value\"\n" +
+ " }\n" +
+ " },\n" +
+ " {\n" +
+ " \"old-site\": {\n" +
+ " \"property\": \"value\"\n" +
+ " }\n" +
+ " }\n" +
+ " ]\n" +
+ "}\n");
+
+ KerberosDescriptor updatedUserValue = KerberosDescriptorUpdateHelper.updateUserKerberosDescriptor(
+ oldValue,
+ newValue,
+ userValue);
+
+ Assert.assertEquals(
+ GSON.toJson(KERBEROS_DESCRIPTOR_FACTORY.createInstance(
+ "{\n" +
+ " \"configurations\": [\n" +
+ " {\n" +
+ " \"core-site\": {\n" +
+ " \"hadoop.security.authentication\": \"kerberos\",\n" +
+ " \"hadoop.security.authorization\": \"true\",\n" +
+ " \"hadoop.proxyuser.HTTP.groups\": \"${hadoop-env/proxyuser_group}\"\n" +
+ " }\n" +
+ " },\n" +
+ " {\n" +
+ " \"some-site\": {\n" +
+ " \"property.unchanged\": \"value 1\",\n" +
+ " \"property.added\": \"added value\",\n" +
+ " \"property.altered\": \"custom value\",\n" +
+ " \"property.changed.in.new\": \"new value\"\n" +
+ " }\n" +
+ " }\n" +
+ " ]\n" +
+ "}\n").toMap()),
+ GSON.toJson(updatedUserValue.toMap()));
+ }
+
+ @Test
+ public void testUpdateAuthToLocalRules() throws AmbariException {
+ KerberosDescriptor oldValue = KERBEROS_DESCRIPTOR_FACTORY.createInstance(
+ "{\n" +
+ " \"auth_to_local_properties\" : [\n" +
+ " \"core-site/hadoop.security.auth_to_local\",\n" +
+ " \"some-site/to.be.removed\"\n" +
+ " ]\n" +
+ "}\n");
+
+ KerberosDescriptor newValue = KERBEROS_DESCRIPTOR_FACTORY.createInstance(
+ "{\n" +
+ " \"auth_to_local_properties\" : [\n" +
+ " \"core-site/hadoop.security.auth_to_local\",\n" +
+ " \"some-site/to.be.added\"\n" +
+ " ]\n" +
+ "}\n");
+
+ KerberosDescriptor userValue = KERBEROS_DESCRIPTOR_FACTORY.createInstance(
+ "{\n" +
+ " \"auth_to_local_properties\" : [\n" +
+ " \"core-site/hadoop.security.auth_to_local\",\n" +
+ " \"some-site/added.by.user\"\n" +
+ " ]\n" +
+ "}\n");
+
+ KerberosDescriptor updatedUserValue = KerberosDescriptorUpdateHelper.updateUserKerberosDescriptor(
+ oldValue,
+ newValue,
+ userValue);
+
+ Assert.assertEquals(
+ KERBEROS_DESCRIPTOR_FACTORY.createInstance(
+ "{\n" +
+ " \"auth_to_local_properties\" : [\n" +
+ " \"core-site/hadoop.security.auth_to_local\",\n" +
+ " \"some-site/to.be.added\",\n" +
+ " \"some-site/added.by.user\"\n" +
+ " ]\n" +
+ "}\n"),
+ updatedUserValue);
+ }
+
+ @Test
+ public void testUpdateServices() throws AmbariException {
+ KerberosDescriptor oldValue = KERBEROS_DESCRIPTOR_FACTORY.createInstance(
+ "{\n" +
+ " \"services\": [\n" +
+ " {\n" +
+ " \"components\": [\n" +
+ " {\n" +
+ " \"identities\": [\n" +
+ " {\n" +
+ " \"principal\": {\n" +
+ " \"configuration\": \"hdfs-site/dfs.secondary.namenode.kerberos.principal\",\n" +
+ " \"type\": \"service\",\n" +
+ " \"local_username\": \"${hadoop-env/hdfs_user}\",\n" +
+ " \"value\": \"nn/_HOST@${realm}\"\n" +
+ " },\n" +
+ " \"name\": \"secondary_namenode_nn\",\n" +
+ " \"keytab\": {\n" +
+ " \"owner\": {\n" +
+ " \"access\": \"r\",\n" +
+ " \"name\": \"${hadoop-env/hdfs_user}\"\n" +
+ " },\n" +
+ " \"file\": \"${keytab_dir}/nn.service.keytab\",\n" +
+ " \"configuration\": \"hdfs-site/dfs.secondary.namenode.keytab.file\",\n" +
+ " \"group\": {\n" +
+ " \"access\": \"\",\n" +
+ " \"name\": \"${cluster-env/user_group}\"\n" +
+ " }\n" +
+ " }\n" +
+ " },\n" +
+ " {\n" +
+ " \"principal\": {\n" +
+ " \"configuration\": \"hdfs-site/dfs.secondary.namenode.kerberos.internal.spnego.principal\",\n" +
+ " \"type\": null,\n" +
+ " \"local_username\": null,\n" +
+ " \"value\": null\n" +
+ " },\n" +
+ " \"name\": \"/spnego\"\n" +
+ " }\n" +
+ " ],\n" +
+ " \"name\": \"SECONDARY_NAMENODE\"\n" +
+ " },\n" +
+ " {\n" +
+ " \"identities\": [\n" +
+ " {\n" +
+ " \"name\": \"/HDFS/NAMENODE/hdfs\"\n" +
+ " }\n" +
+ " ],\n" +
+ " \"name\": \"HDFS_CLIENT\"\n" +
+ " },\n" +
+ " {\n" +
+ " \"identities\": [\n" +
+ " {\n" +
+ " \"principal\": {\n" +
+ " \"configuration\": \"hdfs-site/dfs.datanode.kerberos.principal\",\n" +
+ " \"type\": \"service\",\n" +
+ " \"local_username\": \"${hadoop-env/hdfs_user}\",\n" +
+ " \"value\": \"dn/_HOST@${realm}\"\n" +
+ " },\n" +
+ " \"name\": \"datanode_dn\",\n" +
+ " \"keytab\": {\n" +
+ " \"owner\": {\n" +
+ " \"access\": \"r\",\n" +
+ " \"name\": \"${hadoop-env/hdfs_user}\"\n" +
+ " },\n" +
+ " \"file\": \"${keytab_dir}/dn.service.keytab\",\n" +
+ " \"configuration\": \"hdfs-site/dfs.datanode.keytab.file\",\n" +
+ " \"group\": {\n" +
+ " \"access\": \"\",\n" +
+ " \"name\": \"${cluster-env/user_group}\"\n" +
+ " }\n" +
+ " }\n" +
+ " }\n" +
+ " ],\n" +
+ " \"configurations\": [\n" +
+ " {\n" +
+ " \"hdfs-site\": {\n" +
+ " \"dfs.datanode.address\": \"0.0.0.0:1019\",\n" +
+ " \"dfs.datanode.http.address\": \"0.0.0.0:1022\"\n" +
+ " }\n" +
+ " }\n" +
+ " ],\n" +
+ " \"name\": \"DATANODE\"\n" +
+ " },\n" +
+ " {\n" +
+ " \"identities\": [\n" +
+ " {\n" +
+ " \"principal\": {\n" +
+ " \"configuration\": \"hdfs-site/nfs.kerberos.principal\",\n" +
+ " \"type\": \"service\",\n" +
+ " \"local_username\": \"${hadoop-env/hdfs_user}\",\n" +
+ " \"value\": \"nfs/_HOST@${realm}\"\n" +
+ " },\n" +
+ " \"name\": \"nfsgateway\",\n" +
+ " \"keytab\": {\n" +
+ " \"owner\": {\n" +
+ " \"access\": \"r\",\n" +
+ " \"name\": \"${hadoop-env/hdfs_user}\"\n" +
+ " },\n" +
+ " \"file\": \"${keytab_dir}/nfs.service.keytab\",\n" +
+ " \"configuration\": \"hdfs-site/nfs.keytab.file\",\n" +
+ " \"group\": {\n" +
+ " \"access\": \"\",\n" +
+ " \"name\": \"${cluster-env/user_group}\"\n" +
+ " }\n" +
+ " }\n" +
+ " }\n" +
+ " ],\n" +
+ " \"name\": \"NFS_GATEWAY\"\n" +
+ " },\n" +
+ " {\n" +
+ " \"identities\": [\n" +
+ " {\n" +
+ " \"principal\": {\n" +
+ " \"configuration\": \"hdfs-site/dfs.journalnode.kerberos.principal\",\n" +
+ " \"type\": \"service\",\n" +
+ " \"local_username\": \"${hadoop-env/hdfs_user}\",\n" +
+ " \"value\": \"jn/_HOST@${realm}\"\n" +
+ " },\n" +
+ " \"name\": \"journalnode_jn\",\n" +
+ " \"keytab\": {\n" +
+ " \"owner\": {\n" +
+ " \"access\": \"r\",\n" +
+ " \"name\": \"${hadoop-env/hdfs_user}\"\n" +
+ " },\n" +
+ " \"file\": \"${keytab_dir}/jn.service.keytab\",\n" +
+ " \"configuration\": \"hdfs-site/dfs.journalnode.keytab.file\",\n" +
+ " \"group\": {\n" +
+ " \"access\": \"\",\n" +
+ " \"name\": \"${cluster-env/user_group}\"\n" +
+ " }\n" +
+ " }\n" +
+ " },\n" +
+ " {\n" +
+ " \"principal\": {\n" +
+ " \"configuration\": \"hdfs-site/dfs.journalnode.kerberos.internal.spnego.principal\",\n" +
+ " \"type\": null,\n" +
+ " \"local_username\": null,\n" +
+ " \"value\": null\n" +
+ " },\n" +
+ " \"name\": \"/spnego\"\n" +
+ " }\n" +
+ " ],\n" +
+ " \"name\": \"JOURNALNODE\"\n" +
+ " },\n" +
+ " {\n" +
+ " \"identities\": [\n" +
+ " {\n" +
+ " \"principal\": {\n" +
+ " \"configuration\": \"hadoop-env/hdfs_principal_name\",\n" +
+ " \"type\": \"user\",\n" +
+ " \"local_username\": \"${hadoop-env/hdfs_user}\",\n" +
+ " \"value\": \"${hadoop-env/hdfs_user}-${cluster_name|toLower()}@${realm}\"\n" +
+ " },\n" +
+ " \"name\": \"hdfs\",\n" +
+ " \"keytab\": {\n" +
+ " \"owner\": {\n" +
+ " \"access\": \"r\",\n" +
+ " \"name\": \"${hadoop-env/hdfs_user}\"\n" +
+ " },\n" +
+ " \"file\": \"${keytab_dir}/hdfs.headless.keytab\",\n" +
+ " \"configuration\": \"hadoop-env/hdfs_user_keytab\",\n" +
+ " \"group\": {\n" +
+ " \"access\": \"r\",\n" +
+ " \"name\": \"${cluster-env/user_group}\"\n" +
+ " }\n" +
+ " }\n" +
+ " },\n" +
+ " {\n" +
+ " \"principal\": {\n" +
+ " \"configuration\": \"hdfs-site/dfs.namenode.kerberos.principal\",\n" +
+ " \"type\": \"service\",\n" +
+ " \"local_username\": \"${hadoop-env/hdfs_user}\",\n" +
+ " \"value\": \"nn/_HOST@${realm}\"\n" +
+ " },\n" +
+ " \"name\": \"namenode_nn\",\n" +
+ " \"keytab\": {\n" +
+ " \"owner\": {\n" +
+ " \"access\": \"r\",\n" +
+ " \"name\": \"${hadoop-env/hdfs_user}\"\n" +
+ " },\n" +
+ " \"file\": \"${keytab_dir}/nn.service.keytab\",\n" +
+ " \"configuration\": \"hdfs-site/dfs.namenode.keytab.file\",\n" +
+ " \"group\": {\n" +
+ " \"access\": \"\",\n" +
+ " \"name\": \"${cluster-env/user_group}\"\n" +
+ " }\n" +
+ " }\n" +
+ " },\n" +
+ " {\n" +
+ " \"principal\": {\n" +
+ " \"configuration\": \"hdfs-site/dfs.namenode.kerberos.internal.spnego.principal\",\n" +
+ " \"type\": null,\n" +
+ " \"local_username\": null,\n" +
+ " \"value\": null\n" +
+ " },\n" +
+ " \"name\": \"/spnego\"\n" +
+ " }\n" +
+ " ],\n" +
+ " \"configurations\": [\n" +
+ " {\n" +
+ " \"hdfs-site\": {\n" +
+ " \"dfs.block.access.token.enable\": \"true\"\n" +
+ " }\n" +
+ " }\n" +
+ " ],\n" +
+ " \"name\": \"NAMENODE\"\n" +
+ " }\n" +
+ " ],\n" +
+ " \"identities\": [\n" +
+ " {\n" +
+ " \"principal\": {\n" +
+ " \"configuration\": \"hdfs-site/dfs.web.authentication.kerberos.principal\",\n" +
+ " \"type\": null,\n" +
+ " \"local_username\": null,\n" +
+ " \"value\": null\n" +
+ " },\n" +
+ " \"name\": \"/spnego\",\n" +
+ " \"keytab\": {\n" +
+ " \"owner\": {\n" +
+ " \"access\": null,\n" +
+ " \"name\": null\n" +
+ " },\n" +
+ " \"file\": null,\n" +
+ " \"configuration\": \"hdfs-site/dfs.web.authentication.kerberos.keytab\",\n" +
+ " \"group\": {\n" +
+ " \"access\": null,\n" +
+ " \"name\": null\n" +
+ " }\n" +
+ " }\n" +
+ " },\n" +
+ " {\n" +
+ " \"name\": \"/smokeuser\"\n" +
+ " }\n" +
+ " ],\n" +
+ " \"auth_to_local_properties\": [\n" +
+ " \"core-site/hadoop.security.auth_to_local\"\n" +
+ " ],\n" +
+ " \"configurations\": [\n" +
+ " {\n" +
+ " \"core-site\": {\n" +
+ " \"hadoop.security.authorization\": \"true\",\n" +
+ " \"hadoop.security.authentication\": \"kerberos\",\n" +
+ " \"hadoop.proxyuser.HTTP.groups\": \"${hadoop-env/proxyuser_group}\"\n" +
+ " }\n" +
+ " }\n" +
+ " ],\n" +
+ " \"name\": \"HDFS\"\n" +
+ " },\n" +
+ " {\n" +
+ " \"components\" : [\n" +
+ " {\n" +
+ " \"name\" : \"OLD_SERVICE_CLIENT\"\n" +
+ " },\n" +
+ " {\n" +
+ " \"identities\" : [\n" +
+ " {\n" +
+ " \"name\" : \"/HDFS/NAMENODE/hdfs\"\n" +
+ " }\n" +
+ " ],\n" +
+ " \"name\" : \"OLD_SERVICE_FOOBAR\"\n" +
+ " },\n" +
+ " {\n" +
+ " \"identities\" : [\n" +
+ " {\n" +
+ " \"name\" : \"/HDFS/NAMENODE/hdfs\"\n" +
+ " },\n" +
+ " {\n" +
+ " \"name\" : \"/HIVE/HIVE_SERVER/hive_server_hive\"\n" +
+ " }\n" +
+ " ],\n" +
+ " \"name\" : \"OLD_SERVICE_SERVER\"\n" +
+ " }\n" +
+ " ],\n" +
+ " \"identities\" : [\n" +
+ " {\n" +
+ " \"name\" : \"/smokeuser\"\n" +
+ " }\n" +
+ " ],\n" +
+ " \"name\" : \"OLD_SERVICE\"\n" +
+ " },\n" +
+ " {\n" +
+ " \"components\": [\n" +
+ " {\n" +
+ " \"identities\": [\n" +
+ " {\n" +
+ " \"principal\": {\n" +
+ " \"configuration\": \"application-properties/atlas.authentication.principal\",\n" +
+ " \"type\": \"service\",\n" +
+ " \"local_username\": \"${atlas-env/metadata_user}\",\n" +
+ " \"value\": \"atlas/_HOST@${realm}\"\n" +
+ " },\n" +
+ " \"name\": \"atlas\",\n" +
+ " \"keytab\": {\n" +
+ " \"owner\": {\n" +
+ " \"access\": \"r\",\n" +
+ " \"name\": \"${atlas-env/metadata_user}\"\n" +
+ " },\n" +
+ " \"file\": \"${keytab_dir}/atlas.service.keytab\",\n" +
+ " \"configuration\": \"application-properties/atlas.authentication.keytab\",\n" +
+ " \"group\": {\n" +
+ " \"access\": \"\",\n" +
+ " \"name\": \"${cluster-env/user_group}\"\n" +
+ " }\n" +
+ " }\n" +
+ " },\n" +
+ " {\n" +
+ " \"principal\": {\n" +
+ " \"configuration\": \"application-properties/atlas.http.authentication.kerberos.principal\",\n" +
+ " \"type\": null,\n" +
+ " \"local_username\": null,\n" +
+ " \"value\": \"HTTP/_HOST@${realm}\"\n" +
+ " },\n" +
+ " \"name\": \"/spnego\",\n" +
+ " \"keytab\": {\n" +
+ " \"owner\": {\n" +
+ " \"access\": null,\n" +
+ " \"name\": null\n" +
+ " },\n" +
+ " \"file\": null,\n" +
+ " \"configuration\": \"application-properties/atlas.http.authentication.kerberos.keytab\",\n" +
+ " \"group\": {\n" +
+ " \"access\": null,\n" +
+ " \"name\": null\n" +
+ " }\n" +
+ " }\n" +
+ " }\n" +
+ " ],\n" +
+ " \"name\": \"ATLAS_SERVER\"\n" +
+ " }\n" +
+ " ],\n" +
+ " \"auth_to_local_properties\": [\n" +
+ " \"application-properties/atlas.http.authentication.kerberos.name.rules|new_lines_escaped\"\n" +
+ " ],\n" +
+ " \"configurations\": [\n" +
+ " {\n" +
+ " \"application-properties\": {\n" +
+ " \"atlas.authentication.method\": \"kerberos\",\n" +
+ " \"atlas.http.authentication.enabled\": \"true\",\n" +
+ " \"atlas.http.authentication.type\": \"kerberos\"\n" +
+ " }\n" +
+ " }\n" +
+ " ],\n" +
+ " \"name\": \"ATLAS\"\n" +
+ " },\n" +
+ " {\n" +
+ " \"components\": [\n" +
+ " {\n" +
+ " \"name\": \"EXISTING_SERVICE_CLIENT\"\n" +
+ " },\n" +
+ " {\n" +
+ " \"identities\": [\n" +
+ " {\n" +
+ " \"name\": \"/HDFS/NAMENODE/hdfs\"\n" +
+ " },\n" +
+ " {\n" +
+ " \"name\": \"/HIVE/HIVE_SERVER/hive_server_hive\"\n" +
+ " }\n" +
+ " ],\n" +
+ " \"name\": \"EXISTING_SERVICE_SERVER\"\n" +
+ " },\n" +
+ " {\n" +
+ " \"identities\": [\n" +
+ " {\n" +
+ " \"name\": \"/HDFS/NAMENODE/hdfs\"\n" +
+ " }\n" +
+ " ],\n" +
+ " \"name\": \"EXISTING_SERVICE_ORIG_SERVER\"\n" +
+ " }\n" +
+ " ],\n" +
+ " \"identities\": [\n" +
+ " {\n" +
+ " \"name\": \"/smokeuser\"\n" +
+ " }\n" +
+ " ],\n" +
+ " \"name\": \"EXISTING_SERVICE\"\n" +
+ " }\n" +
+ " ]\n" +
+ "}\n");
+
+ KerberosDescriptor newValue = KERBEROS_DESCRIPTOR_FACTORY.createInstance(
+ "{\n" +
+ " \"services\": [\n" +
+ " {\n" +
+ " \"components\": [\n" +
+ " {\n" +
+ " \"identities\": [\n" +
+ " {\n" +
+ " \"principal\": {\n" +
+ " \"configuration\": \"application-properties/atlas.jaas.KafkaClient.option.principal\",\n" +
+ " \"type\": \"service\",\n" +
+ " \"local_username\": \"${atlas-env/metadata_user}\",\n" +
+ " \"value\": \"atlas/_HOST@${realm}\"\n" +
+ " },\n" +
+ " \"name\": \"atlas\",\n" +
+ " \"keytab\": {\n" +
+ " \"owner\": {\n" +
+ " \"access\": \"r\",\n" +
+ " \"name\": \"${atlas-env/metadata_user}\"\n" +
+ " },\n" +
+ " \"file\": \"${keytab_dir}/atlas.service.keytab\",\n" +
+ " \"configuration\": \"application-properties/atlas.jaas.KafkaClient.option.keyTab\",\n" +
+ " \"group\": {\n" +
+ " \"access\": \"\",\n" +
+ " \"name\": \"${cluster-env/user_group}\"\n" +
+ " }\n" +
+ " }\n" +
+ " },\n" +
+ " {\n" +
+ " \"reference\": \"/ATLAS/ATLAS_SERVER/atlas\",\n" +
+ " \"principal\": {\n" +
+ " \"configuration\": \"application-properties/atlas.authentication.principal\",\n" +
+ " \"type\": null,\n" +
+ " \"local_username\": null,\n" +
+ " \"value\": null\n" +
+ " },\n" +
+ " \"name\": \"atlas_auth\",\n" +
+ " \"keytab\": {\n" +
+ " \"owner\": {\n" +
+ " \"access\": null,\n" +
+ " \"name\": null\n" +
+ " },\n" +
+ " \"file\": null,\n" +
+ " \"configuration\": \"application-properties/atlas.authentication.keytab\",\n" +
+ " \"group\": {\n" +
+ " \"access\": null,\n" +
+ " \"name\": null\n" +
+ " }\n" +
+ " }\n" +
+ " },\n" +
+ " {\n" +
+ " \"principal\": {\n" +
+ " \"configuration\": \"application-properties/atlas.authentication.method.kerberos.principal\",\n" +
+ " \"type\": null,\n" +
+ " \"local_username\": null,\n" +
+ " \"value\": \"HTTP/_HOST@${realm}\"\n" +
+ " },\n" +
+ " \"name\": \"/spnego\",\n" +
+ " \"keytab\": {\n" +
+ " \"owner\": {\n" +
+ " \"access\": null,\n" +
+ " \"name\": null\n" +
+ " },\n" +
+ " \"file\": null,\n" +
+ " \"configuration\": \"application-properties/atlas.authentication.method.kerberos.keytab\",\n" +
+ " \"group\": {\n" +
+ " \"access\": null,\n" +
+ " \"name\": null\n" +
+ " }\n" +
+ " }\n" +
+ " },\n" +
+ " {\n" +
+ " \"reference\": \"/ATLAS/ATLAS_SERVER/atlas\",\n" +
+ " \"principal\": {\n" +
+ " \"configuration\": \"ranger-atlas-audit/xasecure.audit.jaas.Client.option.principal\",\n" +
+ " \"type\": null,\n" +
+ " \"local_username\": null,\n" +
+ " \"value\": null\n" +
+ " },\n" +
+ " \"name\": \"ranger_atlas_audit\",\n" +
+ " \"keytab\": {\n" +
+ " \"owner\": {\n" +
+ " \"access\": null,\n" +
+ " \"name\": null\n" +
+ " },\n" +
+ " \"file\": null,\n" +
+ " \"configuration\": \"ranger-atlas-audit/xasecure.audit.jaas.Client.option.keyTab\",\n" +
+ " \"group\": {\n" +
+ " \"access\": null,\n" +
+ " \"name\": null\n" +
+ " }\n" +
+ " }\n" +
+ " }\n" +
+ " ],\n" +
+ " \"name\": \"ATLAS_SERVER\"\n" +
+ " }\n" +
+ " ],\n" +
+ " \"auth_to_local_properties\": [\n" +
+ " \"application-properties/atlas.authentication.method.kerberos.name.rules|new_lines_escaped\"\n" +
+ " ],\n" +
+ " \"configurations\": [\n" +
+ " {\n" +
+ " \"ranger-atlas-audit\": {\n" +
+ " \"xasecure.audit.jaas.Client.loginModuleControlFlag\": \"required\",\n" +
+ " \"xasecure.audit.jaas.Client.option.serviceName\": \"solr\",\n" +
+ " \"xasecure.audit.jaas.Client.loginModuleName\": \"com.sun.security.auth.module.Krb5LoginModule\",\n" +
+ " \"xasecure.audit.jaas.Client.option.useKeyTab\": \"true\",\n" +
+ " \"xasecure.audit.jaas.Client.option.storeKey\": \"false\",\n" +
+ " \"xasecure.audit.destination.solr.force.use.inmemory.jaas.config\": \"true\"\n" +
+ " }\n" +
+ " },\n" +
+ " {\n" +
+ " \"application-properties\": {\n" +
+ " \"atlas.kafka.security.protocol\": \"PLAINTEXTSASL\",\n" +
+ " \"atlas.jaas.KafkaClient.option.storeKey\": \"true\",\n" +
+ " \"atlas.solr.kerberos.enable\": \"true\",\n" +
+ " \"atlas.jaas.KafkaClient.loginModuleControlFlag\": \"required\",\n" +
+ " \"atlas.authentication.method.kerberos\": \"true\",\n" +
+ " \"atlas.jaas.KafkaClient.option.useKeyTab\": \"true\",\n" +
+ " \"atlas.kafka.sasl.kerberos.service.name\": \"${kafka-env/kafka_user}\",\n" +
+ " \"atlas.jaas.KafkaClient.loginModuleName\": \"com.sun.security.auth.module.Krb5LoginModule\",\n" +
+ " \"atlas.jaas.KafkaClient.option.serviceName\": \"${kafka-env/kafka_user}\"\n" +
+ " }\n" +
+ " }\n" +
+ " ],\n" +
+ " \"name\": \"ATLAS\"\n" +
+ " },\n" +
+ " {\n" +
+ " \"components\": [\n" +
+ " {\n" +
+ " \"identities\": [\n" +
+ " {\n" +
+ " \"principal\": {\n" +
+ " \"configuration\": \"hdfs-site/dfs.secondary.namenode.kerberos.principal\",\n" +
+ " \"type\": \"service\",\n" +
+ " \"local_username\": \"${hadoop-env/hdfs_user}\",\n" +
+ " \"value\": \"nn/_HOST@${realm}\"\n" +
+ " },\n" +
+ " \"name\": \"secondary_namenode_nn\",\n" +
+ " \"keytab\": {\n" +
+ " \"owner\": {\n" +
+ " \"access\": \"r\",\n" +
+ " \"name\": \"${hadoop-env/hdfs_user}\"\n" +
+ " },\n" +
+ " \"file\": \"${keytab_dir}/nn.service.keytab\",\n" +
+ " \"configuration\": \"hdfs-site/dfs.secondary.namenode.keytab.file\",\n" +
+ " \"group\": {\n" +
+ " \"access\": \"\",\n" +
+ " \"name\": \"${cluster-env/user_group}\"\n" +
+ " }\n" +
+ " }\n" +
+ " },\n" +
+ " {\n" +
+ " \"principal\": {\n" +
+ " \"configuration\": \"hdfs-site/dfs.secondary.namenode.kerberos.internal.spnego.principal\",\n" +
+ " \"type\": null,\n" +
+ " \"local_username\": null,\n" +
+ " \"value\": null\n" +
+ " },\n" +
+ " \"name\": \"/spnego\"\n" +
+ " }\n" +
+ " ],\n" +
+ " \"name\": \"SECONDARY_NAMENODE\"\n" +
+ " },\n" +
+ " {\n" +
+ " \"identities\": [\n" +
+ " {\n" +
+ " \"name\": \"/HDFS/NAMENODE/hdfs\"\n" +
+ " }\n" +
+ " ],\n" +
+ " \"name\": \"HDFS_CLIENT\"\n" +
+ " },\n" +
+ " {\n" +
+ " \"identities\": [\n" +
+ " {\n" +
+ " \"principal\": {\n" +
+ " \"configuration\": \"hdfs-site/dfs.datanode.kerberos.principal\",\n" +
+ " \"type\": \"service\",\n" +
+ " \"local_username\": \"${hadoop-env/hdfs_user}\",\n" +
+ " \"value\": \"dn/_HOST@${realm}\"\n" +
+ " },\n" +
+ " \"name\": \"datanode_dn\",\n" +
+ " \"keytab\": {\n" +
+ " \"owner\": {\n" +
+ " \"access\": \"r\",\n" +
+ " \"name\": \"${hadoop-env/hdfs_user}\"\n" +
+ " },\n" +
+ " \"file\": \"${keytab_dir}/dn.service.keytab\",\n" +
+ " \"configuration\": \"hdfs-site/dfs.datanode.keytab.file\",\n" +
+ " \"group\": {\n" +
+ " \"access\": \"\",\n" +
+ " \"name\": \"${cluster-env/user_group}\"\n" +
+ " }\n" +
+ " }\n" +
+ " }\n" +
+ " ],\n" +
+ " \"configurations\": [\n" +
+ " {\n" +
+ " \"hdfs-site\": {\n" +
+ " \"dfs.datanode.address\": \"0.0.0.0:1019\",\n" +
+ " \"dfs.datanode.http.address\": \"0.0.0.0:1022\"\n" +
+ " }\n" +
+ " }\n" +
+ " ],\n" +
+ " \"name\": \"DATANODE\"\n" +
+ " },\n" +
+ " {\n" +
+ " \"identities\": [\n" +
+ " {\n" +
+ " \"principal\": {\n" +
+ " \"configuration\": \"hdfs-site/nfs.kerberos.principal\",\n" +
+ " \"type\": \"service\",\n" +
+ " \"local_username\": \"${hadoop-env/hdfs_user}\",\n" +
+ " \"value\": \"nfs/_HOST@${realm}\"\n" +
+ " },\n" +
+ " \"name\": \"nfsgateway\",\n" +
+ " \"keytab\": {\n" +
+ " \"owner\": {\n" +
+ " \"access\": \"r\",\n" +
+ " \"name\": \"${hadoop-env/hdfs_user}\"\n" +
+ " },\n" +
+ " \"file\": \"${keytab_dir}/nfs.service.keytab\",\n" +
+ " \"configuration\": \"hdfs-site/nfs.keytab.file\",\n" +
+ " \"group\": {\n" +
+ " \"access\": \"\",\n" +
+ " \"name\": \"${cluster-env/user_group}\"\n" +
+ " }\n" +
+ " }\n" +
+ " }\n" +
+ " ],\n" +
+ " \"name\": \"NFS_GATEWAY\"\n" +
+ " },\n" +
+ " {\n" +
+ " \"identities\": [\n" +
+ " {\n" +
+ " \"principal\": {\n" +
+ " \"configuration\": \"hdfs-site/dfs.journalnode.kerberos.principal\",\n" +
+ " \"type\": \"service\",\n" +
+ " \"local_username\": \"${hadoop-env/hdfs_user}\",\n" +
+ " \"value\": \"jn/_HOST@${realm}\"\n" +
+ " },\n" +
+ " \"name\": \"journalnode_jn\",\n" +
+ " \"keytab\": {\n" +
+ " \"owner\": {\n" +
+ " \"access\": \"r\",\n" +
+ " \"name\": \"${hadoop-env/hdfs_user}\"\n" +
+ " },\n" +
+ " \"file\": \"${keytab_dir}/jn.service.keytab\",\n" +
+ " \"configuration\": \"hdfs-site/dfs.journalnode.keytab.file\",\n" +
+ " \"group\": {\n" +
+ " \"access\": \"\",\n" +
+ " \"name\": \"${cluster-env/user_group}\"\n" +
+ " }\n" +
+ " }\n" +
+ " },\n" +
+ " {\n" +
+ " \"principal\": {\n" +
+ " \"configuration\": \"hdfs-site/dfs.journalnode.kerberos.internal.spnego.principal\",\n" +
+ " \"type\": null,\n" +
+ " \"local_username\": null,\n" +
+ " \"value\": null\n" +
+ " },\n" +
+ " \"name\": \"/spnego\"\n" +
+ " }\n" +
+ " ],\n" +
+ " \"name\": \"JOURNALNODE\"\n" +
+ " },\n" +
+ " {\n" +
+ " \"identities\": [\n" +
+ " {\n" +
+ " \"principal\": {\n" +
+ " \"configuration\": \"hadoop-env/hdfs_principal_name\",\n" +
+ " \"type\": \"user\",\n" +
+ " \"local_username\": \"${hadoop-env/hdfs_user}\",\n" +
+ " \"value\": \"${hadoop-env/hdfs_user}-${cluster_name|toLower()}@${realm}\"\n" +
+ " },\n" +
+ " \"name\": \"hdfs\",\n" +
+ " \"keytab\": {\n" +
+ " \"owner\": {\n" +
+ " \"access\": \"r\",\n" +
+ " \"name\": \"${hadoop-env/hdfs_user}\"\n" +
+ " },\n" +
+ " \"file\": \"${keytab_dir}/hdfs.headless.keytab\",\n" +
+ " \"configuration\": \"hadoop-env/hdfs_user_keytab\",\n" +
+ " \"group\": {\n" +
+ " \"access\": \"r\",\n" +
+ " \"name\": \"${cluster-env/user_group}\"\n" +
+ " }\n" +
+ " }\n" +
+ " },\n" +
+ " {\n" +
+ " \"principal\": {\n" +
+ " \"configuration\": \"hdfs-site/dfs.namenode.kerberos.principal\",\n" +
+ " \"type\": \"service\",\n" +
+ " \"local_username\": \"${hadoop-env/hdfs_user}\",\n" +
+ " \"value\": \"nn/_HOST@${realm}\"\n" +
+ " },\n" +
+ " \"name\": \"namenode_nn\",\n" +
+ " \"keytab\": {\n" +
+ " \"owner\": {\n" +
+ " \"access\": \"r\",\n" +
+ " \"name\": \"${hadoop-env/hdfs_user}\"\n" +
+ " },\n" +
+ " \"file\": \"${keytab_dir}/nn.service.keytab\",\n" +
+ " \"configuration\": \"hdfs-site/dfs.namenode.keytab.file\",\n" +
+ " \"group\": {\n" +
+ " \"access\": \"\",\n" +
+ " \"name\": \"${cluster-env/user_group}\"\n" +
+ " }\n" +
+ " }\n" +
+ " },\n" +
+ " {\n" +
+ " \"principal\": {\n" +
+ " \"configuration\": \"hdfs-site/dfs.namenode.kerberos.internal.spnego.principal\",\n" +
+ " \"type\": null,\n" +
+ " \"local_username\": null,\n" +
+ " \"value\": null\n" +
+ " },\n" +
+ " \"name\": \"/spnego\"\n" +
+ " },\n" +
+ " {\n" +
+ " \"principal\": {\n" +
+ " \"configuration\": \"ranger-hdfs-audit/xasecure.audit.jaas.Client.option.principal\",\n" +
+ " \"type\": null,\n" +
+ " \"local_username\": null,\n" +
+ " \"value\": null\n" +
+ " },\n" +
+ " \"name\": \"/HDFS/NAMENODE/namenode_nn\",\n" +
+ " \"keytab\": {\n" +
+ " \"owner\": {\n" +
+ " \"access\": null,\n" +
+ " \"name\": null\n" +
+ " },\n" +
+ " \"file\": null,\n" +
+ " \"configuration\": \"ranger-hdfs-audit/xasecure.audit.jaas.Client.option.keyTab\",\n" +
+ " \"group\": {\n" +
+ " \"access\": null,\n" +
+ " \"name\": null\n" +
+ " }\n" +
+ " }\n" +
+ " }\n" +
+ " ],\n" +
+ " \"configurations\": [\n" +
+ " {\n" +
+ " \"hdfs-site\": {\n" +
+ " \"dfs.block.access.token.enable\": \"true\"\n" +
+ " }\n" +
+ " }\n" +
+ " ],\n" +
+ " \"name\": \"NAMENODE\"\n" +
+ " }\n" +
+ " ],\n" +
+ " \"identities\": [\n" +
+ " {\n" +
+ " \"principal\": {\n" +
+ " \"configuration\": \"hdfs-site/dfs.web.authentication.kerberos.principal\",\n" +
+ " \"type\": null,\n" +
+ " \"local_username\": null,\n" +
+ " \"value\": null\n" +
+ " },\n" +
+ " \"name\": \"/spnego\",\n" +
+ " \"keytab\": {\n" +
+ " \"owner\": {\n" +
+ " \"access\": null,\n" +
+ " \"name\": null\n" +
+ " },\n" +
+ " \"file\": null,\n" +
+ " \"configuration\": \"hdfs-site/dfs.web.authentication.kerberos.keytab\",\n" +
+ " \"group\": {\n" +
+ " \"access\": null,\n" +
+ " \"name\": null\n" +
+ " }\n" +
+ " }\n" +
+ " },\n" +
+ " {\n" +
+ " \"name\": \"/smokeuser\"\n" +
+ " }\n" +
+ " ],\n" +
+ " \"auth_to_local_properties\": [\n" +
+ " \"core-site/hadoop.security.auth_to_local\"\n" +
+ " ],\n" +
+ " \"configurations\": [\n" +
+ " {\n" +
+ " \"ranger-hdfs-audit\": {\n" +
+ " \"xasecure.audit.jaas.Client.loginModuleControlFlag\": \"required\",\n" +
+ " \"xasecure.audit.jaas.Client.option.serviceName\": \"solr\",\n" +
+ " \"xasecure.audit.jaas.Client.loginModuleName\": \"com.sun.security.auth.module.Krb5LoginModule\",\n" +
+ " \"xasecure.audit.jaas.Client.option.useKeyTab\": \"true\",\n" +
+ " \"xasecure.audit.jaas.Client.option.storeKey\": \"false\",\n" +
+ " \"xasecure.audit.destination.solr.force.use.inmemory.jaas.config\": \"true\"\n" +
+ " }\n" +
+ " },\n" +
+ " {\n" +
+ " \"core-site\": {\n" +
+ " \"hadoop.security.authorization\": \"true\",\n" +
+ " \"hadoop.security.authentication\": \"kerberos\",\n" +
+ " \"hadoop.proxyuser.HTTP.groups\": \"${hadoop-env/proxyuser_group}\"\n" +
+ " }\n" +
+ " }\n" +
+ " ],\n" +
+ " \"name\": \"HDFS\"\n" +
+ " },\n" +
+ " {\n" +
+ " \"components\": [\n" +
+ " {\n" +
+ " \"name\": \"NEW_SERVICE_CLIENT\"\n" +
+ " },\n" +
+ " {\n" +
+ " \"identities\": [\n" +
+ " {\n" +
+ " \"name\": \"/HDFS/NAMENODE/hdfs\"\n" +
+ " },\n" +
+ " {\n" +
+ " \"name\": \"/HIVE/HIVE_SERVER/hive_server_hive\"\n" +
+ " }\n" +
+ " ],\n" +
+ " \"name\": \"NEW_SERVICE_FOO_BAR\"\n" +
+ " },\n" +
+ " {\n" +
+ " \"identities\": [\n" +
+ " {\n" +
+ " \"name\": \"/HDFS/NAMENODE/hdfs\"\n" +
+ " }\n" +
+ " ],\n" +
+ " \"name\": \"NEW_SERVICE_SERVER\"\n" +
+ " }\n" +
+ " ],\n" +
+ " \"identities\": [\n" +
+ " {\n" +
+ " \"name\": \"/smokeuser\"\n" +
+ " }\n" +
+ " ],\n" +
+ " \"name\": \"NEW_SERVICE\"\n" +
+ " },\n" +
+ " {\n" +
+ " \"components\": [\n" +
+ " {\n" +
+ " \"name\": \"EXISTING_SERVICE_CLIENT\"\n" +
+ " },\n" +
+ " {\n" +
+ " \"identities\": [\n" +
+ " {\n" +
+ " \"name\": \"/HDFS/NAMENODE/hdfs\"\n" +
+ " }\n" +
+ " ],\n" +
+ " \"name\": \"EXISTING_SERVICE_SERVER\"\n" +
+ " },\n" +
+ " {\n" +
+ " \"identities\": [\n" +
+ " {\n" +
+ " \"name\": \"/HDFS/NAMENODE/hdfs\"\n" +
+ " }\n" +
+ " ],\n" +
+ " \"name\": \"EXISTING_SERVICE_NEW_SERVER\"\n" +
+ " }\n" +
+ " ],\n" +
+ " \"identities\": [\n" +
+ " {\n" +
+ " \"name\": \"/smokeuser\"\n" +
+ " },\n" +
+ " {\n" +
+ " \"name\": \"/HIVE/HIVE_SERVER/hive_server_hive\"\n" +
+ " }\n" +
+ " ],\n" +
+ " \"name\": \"EXISTING_SERVICE\"\n" +
+ " }\n" +
+ " ]\n" +
+ "}\n");
+
+ KerberosDescriptor userValue = KERBEROS_DESCRIPTOR_FACTORY.createInstance(oldValue.toMap());
+
+ KerberosDescriptor updatedUserValue = KerberosDescriptorUpdateHelper.updateUserKerberosDescriptor(
+ oldValue,
+ newValue,
+ userValue);
+
+ Assert.assertEquals(
+ GSON.toJson(KERBEROS_DESCRIPTOR_FACTORY.createInstance(
+ "{\n" +
+ " \"services\": [\n" +
+ " {\n" +
+ " \"components\": [\n" +
+ " {\n" +
+ " \"identities\": [\n" +
+ " {\n" +
+ " \"principal\": {\n" +
+ " \"configuration\": \"application-properties/atlas.jaas.KafkaClient.option.principal\",\n" +
+ " \"type\": \"service\",\n" +
+ " \"local_username\": \"${atlas-env/metadata_user}\",\n" +
+ " \"value\": \"atlas/_HOST@${realm}\"\n" +
+ " },\n" +
+ " \"name\": \"atlas\",\n" +
+ " \"keytab\": {\n" +
+ " \"owner\": {\n" +
+ " \"access\": \"r\",\n" +
+ " \"name\": \"${atlas-env/metadata_user}\"\n" +
+ " },\n" +
+ " \"file\": \"${keytab_dir}/atlas.service.keytab\",\n" +
+ " \"configuration\": \"application-properties/atlas.jaas.KafkaClient.option.keyTab\",\n" +
+ " \"group\": {\n" +
+ " \"access\": \"\",\n" +
+ " \"name\": \"${cluster-env/user_group}\"\n" +
+ " }\n" +
+ " }\n" +
+ " },\n" +
+ " {\n" +
+ " \"principal\": {\n" +
+ " \"configuration\": \"application-properties/atlas.authentication.method.kerberos.principal\",\n" +
+ " \"type\": null,\n" +
+ " \"local_username\": null,\n" +
+ " \"value\": \"HTTP/_HOST@${realm}\"\n" +
+ " },\n" +
+ " \"name\": \"/spnego\",\n" +
+ " \"keytab\": {\n" +
+ " \"owner\": {\n" +
+ " \"access\": null,\n" +
+ " \"name\": null\n" +
+ " },\n" +
+ " \"file\": null,\n" +
+ " \"configuration\": \"application-properties/atlas.authentication.method.kerberos.keytab\",\n" +
+ " \"group\": {\n" +
+ " \"access\": null,\n" +
+ " \"name\": null\n" +
+ " }\n" +
+ " }\n" +
+ " }\n" +
+ " ],\n" +
+ " \"name\": \"ATLAS_SERVER\"\n" +
+ " }\n" +
+ " ],\n" +
+ " \"auth_to_local_properties\": [\n" +
+ " \"application-properties/atlas.authentication.method.kerberos.name.rules|new_lines_escaped\"\n" +
+ " ],\n" +
+ " \"configurations\": [\n" +
+ " {\n" +
+ " \"application-properties\": {\n" +
+ " \"atlas.kafka.security.protocol\": \"PLAINTEXTSASL\",\n" +
+ " \"atlas.jaas.KafkaClient.option.storeKey\": \"true\",\n" +
+ " \"atlas.solr.kerberos.enable\": \"true\",\n" +
+ " \"atlas.jaas.KafkaClient.loginModuleControlFlag\": \"required\",\n" +
+ " \"atlas.authentication.method.kerberos\": \"true\",\n" +
+ " \"atlas.jaas.KafkaClient.option.useKeyTab\": \"true\",\n" +
+ " \"atlas.kafka.sasl.kerberos.service.name\": \"${kafka-env/kafka_user}\",\n" +
+ " \"atlas.jaas.KafkaClient.loginModuleName\": \"com.sun.security.auth.module.Krb5LoginModule\",\n" +
+ " \"atlas.jaas.KafkaClient.option.serviceName\": \"${kafka-env/kafka_user}\"\n" +
+ " }\n" +
+ " }\n" +
+ " ],\n" +
+ " \"name\": \"ATLAS\"\n" +
+ " },\n" +
+ " {\n" +
+ " \"components\": [\n" +
+ " {\n" +
+ " \"identities\": [\n" +
+ " {\n" +
+ " \"principal\": {\n" +
+ " \"configuration\": \"hdfs-site/dfs.secondary.namenode.kerberos.principal\",\n" +
+ " \"type\": \"service\",\n" +
+ " \"local_username\": \"${hadoop-env/hdfs_user}\",\n" +
+ " \"value\": \"nn/_HOST@${realm}\"\n" +
+ " },\n" +
+ " \"name\": \"secondary_namenode_nn\",\n" +
+ " \"keytab\": {\n" +
+ " \"owner\": {\n" +
+ " \"access\": \"r\",\n" +
+ " \"name\": \"${hadoop-env/hdfs_user}\"\n" +
+ " },\n" +
+ " \"file\": \"${keytab_dir}/nn.service.keytab\",\n" +
+ " \"configuration\": \"hdfs-site/dfs.secondary.namenode.keytab.file\",\n" +
+ " \"group\": {\n" +
+ " \"access\": \"\",\n" +
+ " \"name\": \"${cluster-env/user_group}\"\n" +
+ " }\n" +
+ " }\n" +
+ " },\n" +
+ " {\n" +
+ " \"principal\": {\n" +
+ " \"configuration\": \"hdfs-site/dfs.secondary.namenode.kerberos.internal.spnego.principal\",\n" +
+ " \"type\": null,\n" +
+ " \"local_username\": null,\n" +
+ " \"value\": null\n" +
+ " },\n" +
+ " \"name\": \"/spnego\"\n" +
+ " }\n" +
+ " ],\n" +
+ " \"name\": \"SECONDARY_NAMENODE\"\n" +
+ " },\n" +
+ " {\n" +
+ " \"identities\": [\n" +
+ " {\n" +
+ " \"name\": \"/HDFS/NAMENODE/hdfs\"\n" +
+ " }\n" +
+ " ],\n" +
+ " \"name\": \"HDFS_CLIENT\"\n" +
+ " },\n" +
+ " {\n" +
+ " \"identities\": [\n" +
+ " {\n" +
+ " \"principal\": {\n" +
+ " \"configuration\": \"hdfs-site/dfs.datanode.kerberos.principal\",\n" +
+ " \"type\": \"service\",\n" +
+ " \"local_username\": \"${hadoop-env/hdfs_user}\",\n" +
+ " \"value\": \"dn/_HOST@${realm}\"\n" +
+ " },\n" +
+ " \"name\": \"datanode_dn\",\n" +
+ " \"keytab\": {\n" +
+ " \"owner\": {\n" +
+ " \"access\": \"r\",\n" +
+ " \"name\": \"${hadoop-env/hdfs_user}\"\n" +
+ " },\n" +
+ " \"file\": \"${keytab_dir}/dn.service.keytab\",\n" +
+ " \"configuration\": \"hdfs-site/dfs.datanode.keytab.file\",\n" +
+ " \"group\": {\n" +
+ " \"access\": \"\",\n" +
+ " \"name\": \"${cluster-env/user_group}\"\n" +
+ " }\n" +
+ " }\n" +
+ " }\n" +
+ " ],\n" +
+ " \"configurations\": [\n" +
+ " {\n" +
+ " \"hdfs-site\": {\n" +
+ " \"dfs.datanode.address\": \"0.0.0.0:1019\",\n" +
+ " \"dfs.datanode.http.address\": \"0.0.0.0:1022\"\n" +
+ " }\n" +
+ " }\n" +
+ " ],\n" +
+ " \"name\": \"DATANODE\"\n" +
+ " },\n" +
+ " {\n" +
+ " \"identities\": [\n" +
+ " {\n" +
+ " \"principal\": {\n" +
+ " \"configuration\": \"hdfs-site/nfs.kerberos.principal\",\n" +
+ " \"type\": \"service\",\n" +
+ " \"local_username\": \"${hadoop-env/hdfs_user}\",\n" +
+ " \"value\": \"nfs/_HOST@${realm}\"\n" +
+ " },\n" +
+ " \"name\": \"nfsgateway\",\n" +
+ " \"keytab\": {\n" +
+ " \"owner\": {\n" +
+ " \"access\": \"r\",\n" +
+ " \"name\": \"${hadoop-env/hdfs_user}\"\n" +
+ " },\n" +
+ " \"file\": \"${keytab_dir}/nfs.service.keytab\",\n" +
+ " \"configuration\": \"hdfs-site/nfs.keytab.file\",\n" +
+ " \"group\": {\n" +
+ " \"access\": \"\",\n" +
+ " \"name\": \"${cluster-env/user_group}\"\n" +
+ " }\n" +
+ " }\n" +
+ " }\n" +
+ " ],\n" +
+ " \"name\": \"NFS_GATEWAY\"\n" +
+ " },\n" +
+ " {\n" +
+ " \"identities\": [\n" +
+ " {\n" +
+ " \"principal\": {\n" +
+ " \"configuration\": \"hdfs-site/dfs.journalnode.kerberos.principal\",\n" +
+ " \"type\": \"service\",\n" +
+ " \"local_username\": \"${hadoop-env/hdfs_user}\",\n" +
+ " \"value\": \"jn/_HOST@${realm}\"\n" +
+ " },\n" +
+ " \"name\": \"journalnode_jn\",\n" +
+ " \"keytab\": {\n" +
+ " \"owner\": {\n" +
+ " \"access\": \"r\",\n" +
+ " \"name\": \"${hadoop-env/hdfs_user}\"\n" +
+ " },\n" +
+ " \"file\": \"${keytab_dir}/jn.service.keytab\",\n" +
+ " \"configuration\": \"hdfs-site/dfs.journalnode.keytab.file\",\n" +
+ " \"group\": {\n" +
+ " \"access\": \"\",\n" +
+ " \"name\": \"${cluster-env/user_group}\"\n" +
+ " }\n" +
+ " }\n" +
+ " },\n" +
+ " {\n" +
+ " \"principal\": {\n" +
+ " \"configuration\": \"hdfs-site/dfs.journalnode.kerberos.internal.spnego.principal\",\n" +
+ " \"type\": null,\n" +
+ " \"local_username\": null,\n" +
+ " \"value\": null\n" +
+ " },\n" +
+ " \"name\": \"/spnego\"\n" +
+ " }\n" +
+ " ],\n" +
+ " \"name\": \"JOURNALNODE\"\n" +
+ " },\n" +
+ " {\n" +
+ " \"identities\": [\n" +
+ " {\n" +
+ " \"principal\": {\n" +
+ " \"configuration\": \"hadoop-env/hdfs_principal_name\",\n" +
+ " \"type\": \"user\",\n" +
+ " \"local_username\": \"${hadoop-env/hdfs_user}\",\n" +
+ " \"value\": \"${hadoop-env/hdfs_user}-${cluster_name|toLower()}@${realm}\"\n" +
+ " },\n" +
+ " \"name\": \"hdfs\",\n" +
+ " \"keytab\": {\n" +
+ " \"owner\": {\n" +
+ " \"access\": \"r\",\n" +
+ " \"name\": \"${hadoop-env/hdfs_user}\"\n" +
+ " },\n" +
+ " \"file\": \"${keytab_dir}/hdfs.headless.keytab\",\n" +
+ " \"configuration\": \"hadoop-env/hdfs_user_keytab\",\n" +
+ " \"group\": {\n" +
+ " \"access\": \"r\",\n" +
+ " \"name\": \"${cluster-env/user_group}\"\n" +
+ " }\n" +
+ " }\n" +
+ " },\n" +
+ " {\n" +
+ " \"principal\": {\n" +
+ " \"configuration\": \"hdfs-site/dfs.namenode.kerberos.principal\",\n" +
+ " \"type\": \"service\",\n" +
+ " \"local_username\": \"${hadoop-env/hdfs_user}\",\n" +
+ " \"value\": \"nn/_HOST@${realm}\"\n" +
+ " },\n" +
+ " \"name\": \"namenode_nn\",\n" +
+ " \"keytab\": {\n" +
+ " \"owner\": {\n" +
+ " \"access\": \"r\",\n" +
+ " \"name\": \"${hadoop-env/hdfs_user}\"\n" +
+ " },\n" +
+ " \"file\": \"${keytab_dir}/nn.service.keytab\",\n" +
+ " \"configuration\": \"hdfs-site/dfs.namenode.keytab.file\",\n" +
+ " \"group\": {\n" +
+ " \"access\": \"\",\n" +
+ " \"name\": \"${cluster-env/user_group}\"\n" +
+ " }\n" +
+ " }\n" +
+ " },\n" +
+ " {\n" +
+ " \"principal\": {\n" +
+ " \"configuration\": \"hdfs-site/dfs.namenode.kerberos.internal.spnego.principal\",\n" +
+ " \"type\": null,\n" +
+ " \"local_username\": null,\n" +
+ " \"value\": null\n" +
+ " },\n" +
+ " \"name\": \"/spnego\"\n" +
+ " }\n" +
+ " ],\n" +
+ " \"configurations\": [\n" +
+ " {\n" +
+ " \"hdfs-site\": {\n" +
+ " \"dfs.block.access.token.enable\": \"true\"\n" +
+ " }\n" +
+ " }\n" +
+ " ],\n" +
+ " \"name\": \"NAMENODE\"\n" +
+ " }\n" +
+ " ],\n" +
+ " \"identities\": [\n" +
+ " {\n" +
+ " \"principal\": {\n" +
+ " \"configuration\": \"hdfs-site/dfs.web.authentication.kerberos.principal\",\n" +
+ " \"type\": null,\n" +
+ " \"local_username\": null,\n" +
+ " \"value\": null\n" +
+ " },\n" +
+ " \"name\": \"/spnego\",\n" +
+ " \"keytab\": {\n" +
+ " \"owner\": {\n" +
+ " \"access\": null,\n" +
+ " \"name\": null\n" +
+ " },\n" +
+ " \"file\": null,\n" +
+ " \"configuration\": \"hdfs-site/dfs.web.authentication.kerberos.keytab\",\n" +
+ " \"group\": {\n" +
+ " \"access\": null,\n" +
+ " \"name\": null\n" +
+ " }\n" +
+ " }\n" +
+ " },\n" +
+ " {\n" +
+ " \"name\": \"/smokeuser\"\n" +
+ " }\n" +
+ " ],\n" +
+ " \"auth_to_local_properties\": [\n" +
+ " \"core-site/hadoop.security.auth_to_local\"\n" +
+ " ],\n" +
+ " \"configurations\": [\n" +
+ " {\n" +
+ " \"core-site\": {\n" +
+ " \"hadoop.security.authorization\": \"true\",\n" +
+ " \"hadoop.security.authentication\": \"kerberos\",\n" +
+ " \"hadoop.proxyuser.HTTP.groups\": \"${hadoop-env/proxyuser_group}\"\n" +
+ " }\n" +
+ " }\n" +
+ " ],\n" +
+ " \"name\": \"HDFS\"\n" +
+ " },\n" +
+ " {\n" +
+ " \"components\": [\n" +
+ " {\n" +
+ " \"name\": \"EXISTING_SERVICE_CLIENT\"\n" +
+ " },\n" +
+ " {\n" +
+ " \"identities\": [\n" +
+ " {\n" +
+ " \"name\": \"/HDFS/NAMENODE/hdfs\"\n" +
+ " }\n" +
+ " ],\n" +
+ " \"name\": \"EXISTING_SERVICE_SERVER\"\n" +
+ " }\n" +
+ " ],\n" +
+ " \"identities\": [\n" +
+ " {\n" +
+ " \"name\": \"/smokeuser\"\n" +
+ " }\n" +
+ " ],\n" +
+ " \"name\": \"EXISTING_SERVICE\"\n" +
+ " }\n" +
+ " ]\n" +
+ "}\n").toMap()),
+ GSON.toJson(updatedUserValue.toMap()));
+
+ // Test that the merge of the default (stack) Kerberos descriptor and the updated user-specified
+ // Kerberos descriptor yield the expected composite Kerberos descriptor.
+ newValue.update(updatedUserValue);
+
+ Assert.assertEquals(
+ GSON.toJson(KERBEROS_DESCRIPTOR_FACTORY.createInstance(
+ "{\n" +
+ " \"services\": [\n" +
+ " {\n" +
+ " \"components\": [\n" +
+ " {\n" +
+ " \"identities\": [\n" +
+ " {\n" +
+ " \"principal\": {\n" +
+ " \"configuration\": \"application-properties/atlas.jaas.KafkaClient.option.principal\",\n" +
+ " \"type\": \"service\",\n" +
+ " \"local_username\": \"${atlas-env/metadata_user}\",\n" +
+ " \"value\": \"atlas/_HOST@${realm}\"\n" +
+ " },\n" +
+ " \"name\": \"atlas\",\n" +
+ " \"keytab\": {\n" +
+ " \"owner\": {\n" +
+ " \"access\": \"r\",\n" +
+ " \"name\": \"${atlas-env/metadata_user}\"\n" +
+ " },\n" +
+ " \"file\": \"${keytab_dir}/atlas.service.keytab\",\n" +
+ " \"configuration\": \"application-properties/atlas.jaas.KafkaClient.option.keyTab\",\n" +
+ " \"group\": {\n" +
+ " \"access\": \"\",\n" +
+ " \"name\": \"${cluster-env/user_group}\"\n" +
+ " }\n" +
+ " }\n" +
+ " },\n" +
+ " {\n" +
+ " \"reference\": \"/ATLAS/ATLAS_SERVER/atlas\",\n" +
+ " \"principal\": {\n" +
+ " \"configuration\": \"application-properties/atlas.authentication.principal\",\n" +
+ " \"type\": null,\n" +
+ " \"local_username\": null,\n" +
+ " \"value\": null\n" +
+ " },\n" +
+ " \"name\": \"atlas_auth\",\n" +
+ " \"keytab\": {\n" +
+ " \"owner\": {\n" +
+ " \"access\": null,\n" +
+ " \"name\": null\n" +
+ " },\n" +
+ " \"file\": null,\n" +
+ " \"configuration\": \"application-properties/atlas.authentication.keytab\",\n" +
+ " \"group\": {\n" +
+ " \"access\": null,\n" +
+ " \"name\": null\n" +
+ " }\n" +
+ " }\n" +
+ " },\n" +
+ " {\n" +
+ " \"principal\": {\n" +
+ " \"configuration\": \"application-properties/atlas.authentication.method.kerberos.principal\",\n" +
+ " \"type\": null,\n" +
+ " \"local_username\": null,\n" +
+ " \"value\": \"HTTP/_HOST@${realm}\"\n" +
+ " },\n" +
+ " \"name\": \"/spnego\",\n" +
+ " \"keytab\": {\n" +
+ " \"owner\": {\n" +
+ " \"access\": null,\n" +
+ " \"name\": null\n" +
+ " },\n" +
+ " \"file\": null,\n" +
+ " \"configuration\": \"application-properties/atlas.authentication.method.kerberos.keytab\",\n" +
+ " \"group\": {\n" +
+ " \"access\": null,\n" +
+ " \"name\": null\n" +
+ " }\n" +
+ " }\n" +
+ " },\n" +
+ " {\n" +
+ " \"reference\": \"/ATLAS/ATLAS_SERVER/atlas\",\n" +
+ " \"principal\": {\n" +
+ " \"configuration\": \"ranger-atlas-audit/xasecure.audit.jaas.Client.option.principal\",\n" +
+ " \"type\": null,\n" +
+ " \"local_username\": null,\n" +
+ " \"value\": null\n" +
+ " },\n" +
+ " \"name\": \"ranger_atlas_audit\",\n" +
+ " \"keytab\": {\n" +
+ " \"owner\": {\n" +
+ " \"access\": null,\n" +
+ " \"name\": null\n" +
+ " },\n" +
+ " \"file\": null,\n" +
+ " \"configuration\": \"ranger-atlas-audit/xasecure.audit.jaas.Client.option.keyTab\",\n" +
+ " \"group\": {\n" +
+ " \"access\": null,\n" +
+ " \"name\": null\n" +
+ " }\n" +
+ " }\n" +
+ " }\n" +
+ " ],\n" +
+ " \"name\": \"ATLAS_SERVER\"\n" +
+ " }\n" +
+ " ],\n" +
+ " \"auth_to_local_properties\": [\n" +
+ " \"application-properties/atlas.authentication.method.kerberos.name.rules|new_lines_escaped\"\n" +
+ " ],\n" +
+ " \"configurations\": [\n" +
+ " {\n" +
+ " \"ranger-atlas-audit\": {\n" +
+ " \"xasecure.audit.jaas.Client.loginModuleControlFlag\": \"required\",\n" +
+ " \"xasecure.audit.jaas.Client.option.serviceName\": \"solr\",\n" +
+ " \"xasecure.audit.jaas.Client.loginModuleName\": \"com.sun.security.auth.module.Krb5LoginModule\",\n" +
+ " \"xasecure.audit.jaas.Client.option.useKeyTab\": \"true\",\n" +
+ " \"xasecure.audit.jaas.Client.option.storeKey\": \"false\",\n" +
+ " \"xasecure.audit.destination.solr.force.use.inmemory.jaas.config\": \"true\"\n" +
+ " }\n" +
+ " },\n" +
+ " {\n" +
+ " \"application-properties\": {\n" +
+ " \"atlas.kafka.security.protocol\": \"PLAINTEXTSASL\",\n" +
+ " \"atlas.jaas.KafkaClient.option.storeKey\": \"true\",\n" +
+ " \"atlas.solr.kerberos.enable\": \"true\",\n" +
+ " \"atlas.jaas.KafkaClient.loginModuleControlFlag\": \"required\",\n" +
+ " \"atlas.authentication.method.kerberos\": \"true\",\n" +
+ " \"atlas.jaas.KafkaClient.option.useKeyTab\": \"true\",\n" +
+ " \"atlas.kafka.sasl.kerberos.service.name\": \"${kafka-env/kafka_user}\",\n" +
+ " \"atlas.jaas.KafkaClient.loginModuleName\": \"com.sun.security.auth.module.Krb5LoginModule\",\n" +
+ " \"atlas.jaas.KafkaClient.option.serviceName\": \"${kafka-env/kafka_user}\"\n" +
+ " }\n" +
+ " }\n" +
+ " ],\n" +
+ " \"name\": \"ATLAS\"\n" +
+ " },\n" +
+ " {\n" +
+ " \"components\": [\n" +
+ " {\n" +
+ " \"identities\": [\n" +
+ " {\n" +
+ " \"principal\": {\n" +
+ " \"configuration\": \"hdfs-site/dfs.secondary.namenode.kerberos.principal\",\n" +
+ " \"type\": \"service\",\n" +
+ " \"local_username\": \"${hadoop-env/hdfs_user}\",\n" +
+ " \"value\": \"nn/_HOST@${realm}\"\n" +
+ " },\n" +
+ " \"name\": \"secondary_namenode_nn\",\n" +
+ " \"keytab\": {\n" +
+ " \"owner\": {\n" +
+ " \"access\": \"r\",\n" +
+ " \"name\": \"${hadoop-env/hdfs_user}\"\n" +
+ " },\n" +
+ " \"file\": \"${keytab_dir}/nn.service.keytab\",\n" +
+ " \"configuration\": \"hdfs-site/dfs.secondary.namenode.keytab.file\",\n" +
+ " \"group\": {\n" +
+ " \"access\": \"\",\n" +
+ " \"name\": \"${cluster-env/user_group}\"\n" +
+ " }\n" +
+ " }\n" +
+ " },\n" +
+ " {\n" +
+ " \"principal\": {\n" +
+ " \"configuration\": \"hdfs-site/dfs.secondary.namenode.kerberos.internal.spnego.principal\",\n" +
+ " \"type\": null,\n" +
+ " \"local_username\": null,\n" +
+ " \"value\": null\n" +
+ " },\n" +
+ " \"name\": \"/spnego\"\n" +
+ " }\n" +
+ " ],\n" +
+ " \"name\": \"SECONDARY_NAMENODE\"\n" +
+ " },\n" +
+ " {\n" +
+ " \"identities\": [\n" +
+ " {\n" +
+ " \"name\": \"/HDFS/NAMENODE/hdfs\"\n" +
+ " }\n" +
+ " ],\n" +
+ " \"name\": \"HDFS_CLIENT\"\n" +
+ " },\n" +
+ " {\n" +
+ " \"identities\": [\n" +
+ " {\n" +
+ " \"principal\": {\n" +
+ " \"configuration\": \"hdfs-site/dfs.datanode.kerberos.principal\",\n" +
+ " \"type\": \"service\",\n" +
+ " \"local_username\": \"${hadoop-env/hdfs_user}\",\n" +
+ " \"value\": \"dn/_HOST@${realm}\"\n" +
+ " },\n" +
+ " \"name\": \"datanode_dn\",\n" +
+ " \"keytab\": {\n" +
+ " \"owner\": {\n" +
+ " \"access\": \"r\",\n" +
+ " \"name\": \"${hadoop-env/hdfs_user}\"\n" +
+ " },\n" +
+ " \"file\": \"${keytab_dir}/dn.service.keytab\",\n" +
+ " \"configuration\": \"hdfs-site/dfs.datanode.keytab.file\",\n" +
+ " \"group\": {\n" +
+ " \"access\": \"\",\n" +
+ " \"name\": \"${cluster-env/user_group}\"\n" +
+ " }\n" +
+ " }\n" +
+ " }\n" +
+ " ],\n" +
+ " \"configurations\": [\n" +
+ " {\n" +
+ " \"hdfs-site\": {\n" +
+ " \"dfs.datanode.address\": \"0.0.0.0:1019\",\n" +
+ " \"dfs.datanode.http.address\": \"0.0.0.0:1022\"\n" +
+ " }\n" +
+ " }\n" +
+ " ],\n" +
+ " \"name\": \"DATANODE\"\n" +
+ " },\n" +
+ " {\n" +
+ " \"identities\": [\n" +
+ " {\n" +
+ " \"principal\": {\n" +
+ " \"configuration\": \"hdfs-site/nfs.kerberos.principal\",\n" +
+ " \"type\": \"service\",\n" +
+ " \"local_username\": \"${hadoop-env/hdfs_user}\",\n" +
+ " \"value\": \"nfs/_HOST@${realm}\"\n" +
+ " },\n" +
+ " \"name\": \"nfsgateway\",\n" +
+ " \"keytab\": {\n" +
+ " \"owner\": {\n" +
+ " \"access\": \"r\",\n" +
+ " \"name\": \"${hadoop-env/hdfs_user}\"\n" +
+ " },\n" +
+ " \"file\": \"${keytab_dir}/nfs.service.keytab\",\n" +
+ " \"configuration\": \"hdfs-site/nfs.keytab.file\",\n" +
+ " \"group\": {\n" +
+ " \"access\": \"\",\n" +
+ " \"name\": \"${cluster-env/user_group}\"\n" +
+ " }\n" +
+ " }\n" +
+ " }\n" +
+ " ],\n" +
+ " \"name\": \"NFS_GATEWAY\"\n" +
+ " },\n" +
+ " {\n" +
+ " \"identities\": [\n" +
+ " {\n" +
+ " \"principal\": {\n" +
+ " \"configuration\": \"hdfs-site/dfs.journalnode.kerberos.principal\",\n" +
+ " \"type\": \"service\",\n" +
+ " \"local_username\": \"${hadoop-env/hdfs_user}\",\n" +
+ " \"value\": \"jn/_HOST@${realm}\"\n" +
+ " },\n" +
+ " \"name\": \"journalnode_jn\",\n" +
+ " \"keytab\": {\n" +
+ " \"owner\": {\n" +
+ " \"access\": \"r\",\n" +
+ " \"name\": \"${hadoop-env/hdfs_user}\"\n" +
+ " },\n" +
+ " \"file\": \"${keytab_dir}/jn.service.keytab\",\n" +
+ " \"configuration\": \"hdfs-site/dfs.journalnode.keytab.file\",\n" +
+ " \"group\": {\n" +
+ " \"access\": \"\",\n" +
+ " \"name\": \"${cluster-env/user_group}\"\n" +
+ " }\n" +
+ " }\n" +
+ " },\n" +
+ " {\n" +
+ " \"principal\": {\n" +
+ " \"configuration\": \"hdfs-site/dfs.journalnode.kerberos.internal.spnego.principal\",\n" +
+ " \"type\": null,\n" +
+ " \"local_username\": null,\n" +
+ " \"value\": null\n" +
+ " },\n" +
+ " \"name\": \"/spnego\"\n" +
+ " }\n" +
+ " ],\n" +
+ " \"name\": \"JOURNALNODE\"\n" +
+ " },\n" +
+ " {\n" +
+ " \"identities\": [\n" +
+ " {\n" +
+ " \"principal\": {\n" +
+ " \"configuration\": \"hadoop-env/hdfs_principal_name\",\n" +
+ " \"type\": \"user\",\n" +
+ " \"local_username\": \"${hadoop-env/hdfs_user}\",\n" +
+ " \"value\": \"${hadoop-env/hdfs_user}-${cluster_name|toLower()}@${realm}\"\n" +
+ " },\n" +
+ " \"name\": \"hdfs\",\n" +
+ " \"keytab\": {\n" +
+ " \"owner\": {\n" +
+ " \"access\": \"r\",\n" +
+ " \"name\": \"${hadoop-env/hdfs_user}\"\n" +
+ " },\n" +
+ " \"file\": \"${keytab_dir}/hdfs.headless.keytab\",\n" +
+ " \"configuration\": \"hadoop-env/hdfs_user_keytab\",\n" +
+ " \"group\": {\n" +
+ " \"access\": \"r\",\n" +
+ " \"name\": \"${cluster-env/user_group}\"\n" +
+ " }\n" +
+ " }\n" +
+ " },\n" +
+ " {\n" +
+ " \"principal\": {\n" +
+ " \"configuration\": \"hdfs-site/dfs.namenode.kerberos.principal\",\n" +
+ " \"type\": \"service\",\n" +
+ " \"local_username\": \"${hadoop-env/hdfs_user}\",\n" +
+ " \"value\": \"nn/_HOST@${realm}\"\n" +
+ " },\n" +
+ " \"name\": \"namenode_nn\",\n" +
+ " \"keytab\": {\n" +
+ " \"owner\": {\n" +
+ " \"access\": \"r\",\n" +
+ " \"name\": \"${hadoop-env/hdfs_user}\"\n" +
+ " },\n" +
+ " \"file\": \"${keytab_dir}/nn.service.keytab\",\n" +
+ " \"configuration\": \"hdfs-site/dfs.namenode.keytab.file\",\n" +
+ " \"group\": {\n" +
+ " \"access\": \"\",\n" +
+ " \"name\": \"${cluster-env/user_group}\"\n" +
+ " }\n" +
+ " }\n" +
+ " },\n" +
+ " {\n" +
+ " \"principal\": {\n" +
+ " \"configuration\": \"hdfs-site/dfs.namenode.kerberos.internal.spnego.principal\",\n" +
+ " \"type\": null,\n" +
+ " \"local_username\": null,\n" +
+ " \"value\": null\n" +
+ " },\n" +
+ " \"name\": \"/spnego\"\n" +
+ " },\n" +
+ " {\n" +
+ " \"principal\": {\n" +
+ " \"configuration\": \"ranger-hdfs-audit/xasecure.audit.jaas.Client.option.principal\",\n" +
+ " \"type\": null,\n" +
+ " \"local_username\": null,\n" +
+ " \"value\": null\n" +
+ " },\n" +
+ " \"name\": \"/HDFS/NAMENODE/namenode_nn\",\n" +
+ " \"keytab\": {\n" +
+ " \"owner\": {\n" +
+ " \"access\": null,\n" +
+ " \"name\": null\n" +
+ " },\n" +
+ " \"file\": null,\n" +
+ " \"configuration\": \"ranger-hdfs-audit/xasecure.audit.jaas.Client.option.keyTab\",\n" +
+ " \"group\": {\n" +
+ " \"access\": null,\n" +
+ " \"name\": null\n" +
+ " }\n" +
+ " }\n" +
+ " }\n" +
+ " ],\n" +
+ " \"configurations\": [\n" +
+ " {\n" +
+ " \"hdfs-site\": {\n" +
+ " \"dfs.block.access.token.enable\": \"true\"\n" +
+ " }\n" +
+ " }\n" +
+ " ],\n" +
+ " \"name\": \"NAMENODE\"\n" +
+ " }\n" +
+ " ],\n" +
+ " \"identities\": [\n" +
+ " {\n" +
+ " \"principal\": {\n" +
+ " \"configuration\": \"hdfs-site/dfs.web.authentication.kerberos.principal\",\n" +
+ " \"type\": null,\n" +
+ " \"local_username\": null,\n" +
+ " \"value\": null\n" +
+ " },\n" +
+ " \"name\": \"/spnego\",\n" +
+ " \"keytab\": {\n" +
+ " \"owner\": {\n" +
+ " \"access\": null,\n" +
+ " \"name\": null\n" +
+ " },\n" +
+ " \"file\": null,\n" +
+ " \"configuration\": \"hdfs-site/dfs.web.authentication.kerberos.keytab\",\n" +
+ " \"group\": {\n" +
+ " \"access\": null,\n" +
+ " \"name\": null\n" +
+ " }\n" +
+ " }\n" +
+ " },\n" +
+ " {\n" +
+ " \"name\": \"/smokeuser\"\n" +
+ " }\n" +
+ " ],\n" +
+ " \"auth_to_local_properties\": [\n" +
+ " \"core-site/hadoop.security.auth_to_local\"\n" +
+ " ],\n" +
+ " \"configurations\": [\n" +
+ " {\n" +
+ " \"ranger-hdfs-audit\": {\n" +
+ " \"xasecure.audit.jaas.Client.loginModuleControlFlag\": \"required\",\n" +
+ " \"xasecure.audit.jaas.Client.option.serviceName\": \"solr\",\n" +
+ " \"xasecure.audit.jaas.Client.loginModuleName\": \"com.sun.security.auth.module.Krb5LoginModule\",\n" +
+ " \"xasecure.audit.jaas.Client.option.useKeyTab\": \"true\",\n" +
+ " \"xasecure.audit.jaas.Client.option.storeKey\": \"false\",\n" +
+ " \"xasecure.audit.destination.solr.force.use.inmemory.jaas.config\": \"true\"\n" +
+ " }\n" +
+ " },\n" +
+ " {\n" +
+ " \"core-site\": {\n" +
+ " \"hadoop.security.authorization\": \"true\",\n" +
+ " \"hadoop.security.authentication\": \"kerberos\",\n" +
+ " \"hadoop.proxyuser.HTTP.groups\": \"${hadoop-env/proxyuser_group}\"\n" +
+ " }\n" +
+ " }\n" +
+ " ],\n" +
+ " \"name\": \"HDFS\"\n" +
+ " },\n" +
+ " {\n" +
+ " \"components\": [\n" +
+ " {\n" +
+ " \"name\": \"NEW_SERVICE_CLIENT\"\n" +
+ " },\n" +
+ " {\n" +
+ " \"identities\": [\n" +
+ " {\n" +
+ " \"name\": \"/HDFS/NAMENODE/hdfs\"\n" +
+ " },\n" +
+ " {\n" +
+ " \"name\": \"/HIVE/HIVE_SERVER/hive_server_hive\"\n" +
+ " }\n" +
+ " ],\n" +
+ " \"name\": \"NEW_SERVICE_FOO_BAR\"\n" +
+ " },\n" +
+ " {\n" +
+ " \"identities\": [\n" +
+ " {\n" +
+ " \"name\": \"/HDFS/NAMENODE/hdfs\"\n" +
+ " }\n" +
+ " ],\n" +
+ " \"name\": \"NEW_SERVICE_SERVER\"\n" +
+ " }\n" +
+ " ],\n" +
+ " \"identities\": [\n" +
+ " {\n" +
+ " \"name\": \"/smokeuser\"\n" +
+ " }\n" +
+ " ],\n" +
+ " \"name\": \"NEW_SERVICE\"\n" +
+ " },\n" +
+ " {\n" +
+ " \"components\": [\n" +
+ " {\n" +
+ " \"name\": \"EXISTING_SERVICE_CLIENT\"\n" +
+ " },\n" +
+ " {\n" +
+ " \"identities\": [\n" +
+ " {\n" +
+ " \"name\": \"/HDFS/NAMENODE/hdfs\"\n" +
+ " }\n" +
+ " ],\n" +
+ " \"name\": \"EXISTING_SERVICE_SERVER\"\n" +
+ " },\n" +
+ " {\n" +
+ " \"identities\": [\n" +
+ " {\n" +
+ " \"name\": \"/HDFS/NAMENODE/hdfs\"\n" +
+ " }\n" +
+ " ],\n" +
+ " \"name\": \"EXISTING_SERVICE_NEW_SERVER\"\n" +
+ " }\n" +
+ " ],\n" +
+ " \"identities\": [\n" +
+ " {\n" +
+ " \"name\": \"/smokeuser\"\n" +
+ " },\n" +
+ " {\n" +
+ " \"name\": \"/HIVE/HIVE_SERVER/hive_server_hive\"\n" +
+ " }\n" +
+ " ],\n" +
+ " \"name\": \"EXISTING_SERVICE\"\n" +
+ " }\n" +
+ " ]\n" +
+ "}\n").toMap()),
+ GSON.toJson(newValue.toMap()));
+ }
+}
\ No newline at end of file
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/state/kerberos/KerberosIdentityDescriptorTest.java b/ambari-server/src/test/java/org/apache/ambari/server/state/kerberos/KerberosIdentityDescriptorTest.java
index 874da31..ef1c7bb 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/state/kerberos/KerberosIdentityDescriptorTest.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/state/kerberos/KerberosIdentityDescriptorTest.java
@@ -1,4 +1,4 @@
-/**
+/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
@@ -24,9 +24,9 @@
import org.junit.Test;
import java.util.Arrays;
-import java.util.HashMap;
import java.util.HashSet;
import java.util.Map;
+import java.util.TreeMap;
public class KerberosIdentityDescriptorTest {
public static final String JSON_VALUE =
@@ -40,50 +40,44 @@
" \"when\": {\"contains\" : [\"services\", \"HIVE\"]}" +
"}";
- public static final Map<String, Object> MAP_VALUE =
- new HashMap<String, Object>() {
- {
- put("name", "identity_1");
- put("principal", KerberosPrincipalDescriptorTest.MAP_VALUE);
- put("keytab", KerberosKeytabDescriptorTest.MAP_VALUE);
- put("password", "secret");
- }
- };
+ static final Map<String, Object> MAP_VALUE;
+ static final Map<String, Object> MAP_VALUE_ALT;
+ static final Map<String, Object> MAP_VALUE_REFERENCE;
- public static final Map<String, Object> MAP_VALUE_ALT =
- new HashMap<String, Object>() {
- {
- put("name", "identity_2");
- put("principal", KerberosPrincipalDescriptorTest.MAP_VALUE);
- put("keytab", KerberosKeytabDescriptorTest.MAP_VALUE);
- put("password", "secret2");
- }
- };
+ static {
+ MAP_VALUE = new TreeMap<String, Object>();
+ MAP_VALUE.put("name", "identity_1");
+ MAP_VALUE.put("principal", KerberosPrincipalDescriptorTest.MAP_VALUE);
+ MAP_VALUE.put("keytab", KerberosKeytabDescriptorTest.MAP_VALUE);
+ MAP_VALUE.put("password", "secret");
- public static final Map<String, Object> MAP_VALUE_REFERENCE =
- new HashMap<String, Object>() {
- {
- put("name", "shared_identity");
- put("reference", "/shared");
- put("keytab", new HashMap<String, Object>() {
- {
- put("file", "/home/user/me/subject.service.keytab");
+ MAP_VALUE_ALT = new TreeMap<String, Object>();
+ MAP_VALUE_ALT.put("name", "identity_2");
+ MAP_VALUE_ALT.put("principal", KerberosPrincipalDescriptorTest.MAP_VALUE);
+ MAP_VALUE_ALT.put("keytab", KerberosKeytabDescriptorTest.MAP_VALUE);
+ MAP_VALUE_ALT.put("password", "secret2");
- put("owner", new HashMap<String, Object>() {{
- put("name", "me");
- put("access", "rw");
- }});
+ TreeMap<String, Object> ownerMap = new TreeMap<String, Object>();
+ ownerMap.put("name", "me");
+ ownerMap.put("access", "rw");
- put("group", new HashMap<String, Object>() {{
- put("name", "nobody");
- put("access", "");
- }});
+ TreeMap<String, Object> groupMap = new TreeMap<String, Object>();
+ groupMap.put("name", "nobody");
+ groupMap.put("access", "");
- put("configuration", "service-site/me.component.keytab.file");
- }
- });
- }
- };
+
+ TreeMap<String, Object> keytabMap = new TreeMap<String, Object>();
+ keytabMap.put("file", "/home/user/me/subject.service.keytab");
+ keytabMap.put("owner", ownerMap);
+ keytabMap.put("group", groupMap);
+ keytabMap.put("configuration", "service-site/me.component.keytab.file");
+
+ MAP_VALUE_REFERENCE = new TreeMap<String, Object>();
+ MAP_VALUE_REFERENCE.put("name", "shared_identity");
+ MAP_VALUE_REFERENCE.put("reference", "/shared");
+ MAP_VALUE_REFERENCE.put("keytab", keytabMap);
+ }
+
public static void validateFromJSON(KerberosIdentityDescriptor identityDescriptor) {
Assert.assertNotNull(identityDescriptor);
@@ -161,7 +155,7 @@
public void testShouldInclude() {
KerberosIdentityDescriptor identityDescriptor = createFromJSON();
- Map<String, Object> context = new HashMap<String, Object>();
+ Map<String, Object> context = new TreeMap<String, Object>();
context.put("services", new HashSet<String>(Arrays.asList("HIVE", "HDFS", "ZOOKEEPER")));
Assert.assertTrue(identityDescriptor.shouldInclude(context));
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/state/kerberos/KerberosKeytabDescriptorTest.java b/ambari-server/src/test/java/org/apache/ambari/server/state/kerberos/KerberosKeytabDescriptorTest.java
index c10d106..79350eb 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/state/kerberos/KerberosKeytabDescriptorTest.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/state/kerberos/KerberosKeytabDescriptorTest.java
@@ -1,4 +1,4 @@
-/**
+/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
@@ -23,8 +23,8 @@
import org.apache.ambari.server.AmbariException;
import org.junit.Test;
-import java.util.HashMap;
import java.util.Map;
+import java.util.TreeMap;
public class KerberosKeytabDescriptorTest {
public static final String JSON_VALUE =
@@ -41,24 +41,23 @@
" \"configuration\": \"service-site/service.component.keytab.file\"" +
"}";
- public static final Map<String, Object> MAP_VALUE =
- new HashMap<String, Object>() {
- {
- put("file", "/etc/security/keytabs/subject.service.keytab");
+ public static final Map<String, Object> MAP_VALUE;
- put("owner", new HashMap<String, Object>() {{
- put("name", "root");
- put("access", "rw");
- }});
+ static {
+ TreeMap<String, Object> ownerMap = new TreeMap<String, Object>();
+ ownerMap.put("name", "root");
+ ownerMap.put("access", "rw");
- put("group", new HashMap<String, Object>() {{
- put("name", "hadoop");
- put("access", "r");
- }});
+ TreeMap<String, Object> groupMap = new TreeMap<String, Object>();
+ groupMap.put("name", "hadoop");
+ groupMap.put("access", "r");
- put("configuration", "service-site/service2.component.keytab.file");
- }
- };
+ MAP_VALUE = new TreeMap<String, Object>();
+ MAP_VALUE.put("file", "/etc/security/keytabs/subject.service.keytab");
+ MAP_VALUE.put("owner", ownerMap);
+ MAP_VALUE.put("group", groupMap);
+ MAP_VALUE.put("configuration", "service-site/service2.component.keytab.file");
+ }
public static void validateFromJSON(KerberosKeytabDescriptor keytabDescriptor) {
Assert.assertNotNull(keytabDescriptor);
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/state/kerberos/KerberosPrincipalDescriptorTest.java b/ambari-server/src/test/java/org/apache/ambari/server/state/kerberos/KerberosPrincipalDescriptorTest.java
index 5c249e2..635cc30 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/state/kerberos/KerberosPrincipalDescriptorTest.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/state/kerberos/KerberosPrincipalDescriptorTest.java
@@ -1,4 +1,4 @@
-/**
+/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
@@ -39,22 +39,20 @@
"\"value\": \"serviceOther/_HOST@_REALM\"" +
"}";
- public static final Map<String, Object> MAP_VALUE =
- new HashMap<String, Object>() {
- {
- put("value", "user@_REALM");
- put("configuration", "service-site/service.component.kerberos.https.principal");
- put("type", "user");
- put("local_username", null);
- }
- };
+ public static final Map<String, Object> MAP_VALUE;
+ public static final Map<String, Object> MAP_VALUE_SPARSE;
- public static final Map<String, Object> MAP_VALUE_SPARSE =
- new HashMap<String, Object>() {
- {
- put("value", "userOther@_REALM");
- }
- };
+ static {
+ MAP_VALUE = new TreeMap<String, Object>();
+ MAP_VALUE.put("value", "user@_REALM");
+ MAP_VALUE.put("configuration", "service-site/service.component.kerberos.https.principal");
+ MAP_VALUE.put("type", "user");
+ MAP_VALUE.put("local_username", null);
+
+ MAP_VALUE_SPARSE = new TreeMap<String, Object>();
+ MAP_VALUE_SPARSE.put("value", "userOther@_REALM");
+ }
+
public static void validateFromJSON(KerberosPrincipalDescriptor principalDescriptor) {
Assert.assertNotNull(principalDescriptor);
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/state/kerberos/KerberosServiceDescriptorTest.java b/ambari-server/src/test/java/org/apache/ambari/server/state/kerberos/KerberosServiceDescriptorTest.java
index e1af515..9896317 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/state/kerberos/KerberosServiceDescriptorTest.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/state/kerberos/KerberosServiceDescriptorTest.java
@@ -1,4 +1,4 @@
-/**
+/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
@@ -17,6 +17,7 @@
*/
package org.apache.ambari.server.state.kerberos;
+import com.google.gson.Gson;
import junit.framework.Assert;
import org.apache.ambari.server.AmbariException;
import org.junit.Test;
@@ -25,11 +26,12 @@
import java.io.IOException;
import java.net.URL;
import java.util.ArrayList;
-import java.util.HashMap;
+import java.util.Collection;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Set;
+import java.util.TreeMap;
import java.util.TreeSet;
public class KerberosServiceDescriptorTest {
@@ -98,33 +100,35 @@
"]" +
"}";
- public static final Map<String, Object> MAP_VALUE =
- new HashMap<String, Object>() {
- {
- put("name", "A_DIFFERENT_SERVICE_NAME");
- put(AbstractKerberosDescriptor.Type.IDENTITY.getDescriptorPluralName(), new ArrayList<Object>() {{
- add(KerberosIdentityDescriptorTest.MAP_VALUE);
- }});
- put(AbstractKerberosDescriptor.Type.COMPONENT.getDescriptorPluralName(), new ArrayList<Object>() {{
- add(KerberosComponentDescriptorTest.MAP_VALUE);
- }});
- put(AbstractKerberosDescriptor.Type.CONFIGURATION.getDescriptorPluralName(), new ArrayList<Map<String, Object>>() {{
- add(new HashMap<String, Object>() {
- {
- put("service-site", new HashMap<String, String>() {
- {
- put("service.property1", "red");
- put("service.property", "green");
- }
- });
- }
- });
- }});
- put(AbstractKerberosDescriptor.Type.AUTH_TO_LOCAL_PROPERTY.getDescriptorPluralName(), new ArrayList<String>() {{
- add("service.name.rules2");
- }});
- }
- };
+ public static final Map<String, Object> MAP_VALUE;
+
+ static {
+ Map<String, Object> identitiesMap = new TreeMap<String, Object>();
+ identitiesMap.put((String) KerberosIdentityDescriptorTest.MAP_VALUE.get("name"), KerberosIdentityDescriptorTest.MAP_VALUE);
+
+ Map<String, Object> componentsMap = new TreeMap<String, Object>();
+ componentsMap.put((String) KerberosComponentDescriptorTest.MAP_VALUE.get("name"), KerberosComponentDescriptorTest.MAP_VALUE);
+
+ Map<String, Object> serviceSiteProperties = new TreeMap<String, Object>();
+ serviceSiteProperties.put("service.property1", "red");
+ serviceSiteProperties.put("service.property", "green");
+
+ Map<String, Map<String, Object>> serviceSiteMap = new TreeMap<String, Map<String, Object>>();
+ serviceSiteMap.put("service-site", serviceSiteProperties);
+
+ TreeMap<String, Map<String, Map<String, Object>>> configurationsMap = new TreeMap<String, Map<String, Map<String, Object>>>();
+ configurationsMap.put("service-site", serviceSiteMap);
+
+ Collection<String> authToLocalRules = new ArrayList<String>();
+ authToLocalRules.add("service.name.rules2");
+
+ MAP_VALUE = new TreeMap<String, Object>();
+ MAP_VALUE.put("name", "A_DIFFERENT_SERVICE_NAME");
+ MAP_VALUE.put(AbstractKerberosDescriptor.Type.IDENTITY.getDescriptorPluralName(), identitiesMap.values());
+ MAP_VALUE.put(AbstractKerberosDescriptor.Type.COMPONENT.getDescriptorPluralName(), componentsMap.values());
+ MAP_VALUE.put(AbstractKerberosDescriptor.Type.CONFIGURATION.getDescriptorPluralName(), configurationsMap.values());
+ MAP_VALUE.put(AbstractKerberosDescriptor.Type.AUTH_TO_LOCAL_PROPERTY.getDescriptorPluralName(), authToLocalRules);
+ }
private static final KerberosServiceDescriptorFactory KERBEROS_SERVICE_DESCRIPTOR_FACTORY = new KerberosServiceDescriptorFactory();
@@ -356,9 +360,10 @@
@Test
public void testToMap() throws AmbariException {
+ Gson gson = new Gson();
KerberosServiceDescriptor descriptor = createFromMap();
Assert.assertNotNull(descriptor);
- Assert.assertEquals(MAP_VALUE, descriptor.toMap());
+ Assert.assertEquals(gson.toJson(MAP_VALUE), gson.toJson(descriptor.toMap()));
}
@Test
@@ -376,6 +381,7 @@
/**
* Test a JSON object in which only only a Service and configs are defined, but no Components.
+ *
* @throws AmbariException
*/
@Test
@@ -393,7 +399,9 @@
" ]" +
"}";
- HashMap<String, Object> CHANGE_NAME = new HashMap<String, Object>() {{ put("name", "A_DIFFERENT_SERVICE_NAME"); }};
+ TreeMap<String, Object> CHANGE_NAME = new TreeMap<String, Object>() {{
+ put("name", "A_DIFFERENT_SERVICE_NAME");
+ }};
KerberosServiceDescriptor serviceDescriptor = KERBEROS_SERVICE_DESCRIPTOR_FACTORY.createInstance("SERVICE_NAME", JSON_VALUE_ONLY_NAME_AND_CONFIGS);
KerberosServiceDescriptor updatedServiceDescriptor = new KerberosServiceDescriptor(CHANGE_NAME);