LENS-1506: Kerberos authentication in lens
diff --git a/lens-driver-hive/src/main/java/org/apache/lens/driver/hive/HiveDriver.java b/lens-driver-hive/src/main/java/org/apache/lens/driver/hive/HiveDriver.java
index 2eb94aa..3fd5d2c 100644
--- a/lens-driver-hive/src/main/java/org/apache/lens/driver/hive/HiveDriver.java
+++ b/lens-driver-hive/src/main/java/org/apache/lens/driver/hive/HiveDriver.java
@@ -57,6 +57,8 @@
import org.apache.hadoop.hive.ql.exec.Task;
import org.apache.hadoop.hive.ql.metadata.HiveException;
import org.apache.hadoop.hive.ql.session.SessionState;
+import org.apache.hadoop.security.UserGroupInformation;
+import org.apache.hive.service.auth.HiveAuthFactory;
import org.apache.hive.service.cli.*;
import org.apache.hive.service.rpc.thrift.TOperationHandle;
import org.apache.hive.service.rpc.thrift.TProtocolVersion;
@@ -934,7 +936,13 @@
SessionHandle hiveSession;
if (!lensToHiveSession.containsKey(sessionDbKey)) {
try {
- hiveSession = getClient().openSession(ctx.getClusterUser(), "", SESSION_CONF);
+ if (ctx.getHiveConf().getVar(HiveConf.ConfVars.HIVE_SERVER2_AUTHENTICATION)
+ .equals(HiveAuthFactory.AuthTypes.KERBEROS.toString())) {
+ String user = UserGroupInformation.getLoginUser().getUserName();
+ hiveSession = getClient().openSession(user, "", SESSION_CONF);
+ } else {
+ hiveSession = getClient().openSession(ctx.getClusterUser(), "", SESSION_CONF);
+ }
lensToHiveSession.put(sessionDbKey, hiveSession);
log.info("New hive session for user: {} , lens session: {} , hive session handle: {} , driver : {}",
ctx.getClusterUser(), sessionDbKey, hiveSession.getHandleIdentifier(), getFullyQualifiedName());
diff --git a/lens-driver-hive/src/main/java/org/apache/lens/driver/hive/RemoteThriftConnection.java b/lens-driver-hive/src/main/java/org/apache/lens/driver/hive/RemoteThriftConnection.java
index 54885f7..d47cb89 100644
--- a/lens-driver-hive/src/main/java/org/apache/lens/driver/hive/RemoteThriftConnection.java
+++ b/lens-driver-hive/src/main/java/org/apache/lens/driver/hive/RemoteThriftConnection.java
@@ -18,15 +18,17 @@
*/
package org.apache.lens.driver.hive;
+
import org.apache.lens.server.api.error.LensException;
import org.apache.hadoop.hive.conf.HiveConf;
+import org.apache.hive.service.auth.HiveAuthFactory;
import org.apache.hive.service.cli.CLIServiceClient;
-import org.apache.hive.service.cli.HiveSQLException;
import org.apache.hive.service.cli.thrift.RetryingThriftCLIServiceClient;
import lombok.extern.slf4j.Slf4j;
+
/**
* Connect to a remote Hive Server 2 service to run driver queries.
*/
@@ -66,12 +68,18 @@
log.info("HiveDriver connecting to HiveServer @ {}:{}",
conf.getVar(HiveConf.ConfVars.HIVE_SERVER2_THRIFT_BIND_HOST),
conf.getIntVar(HiveConf.ConfVars.HIVE_SERVER2_THRIFT_PORT));
- hs2Client = RetryingThriftCLIServiceClient.newRetryingCLIServiceClient(conf);
+
+ if (conf.getVar(HiveConf.ConfVars.HIVE_SERVER2_AUTHENTICATION)
+ .equals(HiveAuthFactory.AuthTypes.KERBEROS.toString())) {
+ hs2Client = RetryingThriftCLIServiceClientSasl.newRetryingCLIServiceClient(conf);
+ } else {
+ hs2Client = RetryingThriftCLIServiceClient.newRetryingCLIServiceClient(conf);
+ }
log.info("HiveDriver connected to HiveServer @ {}:{}",
conf.getVar(HiveConf.ConfVars.HIVE_SERVER2_THRIFT_BIND_HOST),
conf.getIntVar(HiveConf.ConfVars.HIVE_SERVER2_THRIFT_PORT));
- } catch (HiveSQLException e) {
+ } catch (Exception e) {
throw new LensException(e);
}
connected = true;
diff --git a/lens-driver-hive/src/main/java/org/apache/lens/driver/hive/RetryingThriftCLIServiceClientSasl.java b/lens-driver-hive/src/main/java/org/apache/lens/driver/hive/RetryingThriftCLIServiceClientSasl.java
new file mode 100644
index 0000000..dfb0a95
--- /dev/null
+++ b/lens-driver-hive/src/main/java/org/apache/lens/driver/hive/RetryingThriftCLIServiceClientSasl.java
@@ -0,0 +1,237 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.lens.driver.hive;
+
+import java.lang.reflect.InvocationHandler;
+import java.lang.reflect.InvocationTargetException;
+import java.lang.reflect.Method;
+import java.lang.reflect.Proxy;
+import java.lang.reflect.UndeclaredThrowableException;
+import java.net.SocketException;
+import java.util.HashMap;
+import java.util.Map;
+import java.util.concurrent.TimeUnit;
+
+import javax.security.sasl.Sasl;
+import javax.security.sasl.SaslException;
+
+import org.apache.lens.server.api.util.LensUtil;
+
+import org.apache.hadoop.hive.conf.HiveConf;
+import org.apache.hive.service.auth.KerberosSaslHelper;
+import org.apache.hive.service.cli.CLIServiceClient;
+import org.apache.hive.service.cli.HiveSQLException;
+import org.apache.hive.service.cli.ICLIService;
+import org.apache.hive.service.cli.thrift.RetryingThriftCLIServiceClient;
+import org.apache.hive.service.cli.thrift.ThriftCLIServiceClient;
+import org.apache.hive.service.rpc.thrift.TCLIService;
+import org.apache.thrift.TApplicationException;
+import org.apache.thrift.protocol.TBinaryProtocol;
+import org.apache.thrift.protocol.TProtocol;
+import org.apache.thrift.protocol.TProtocolException;
+import org.apache.thrift.transport.TSocket;
+import org.apache.thrift.transport.TTransport;
+import org.apache.thrift.transport.TTransportException;
+
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+/**
+ * The Class RetryingThriftCLIServiceClientSasl.
+ *
+ * This class can be used to connect to hive server using ThriftCLIServiceClient
+ * with sasl transport protocol.
+ *
+ */
+public class RetryingThriftCLIServiceClientSasl implements InvocationHandler {
+
+ public static final Logger LOG = LoggerFactory.getLogger(RetryingThriftCLIServiceClientSasl.class);
+
+ // base client to open thrift connection with client
+ private ThriftCLIServiceClient base;
+
+ // auto retry on connection failure.
+ private final int retryLimit;
+
+ // delay in retry post failure
+ private final int retryDelaySeconds;
+
+ // Hive client conf
+ private HiveConf conf;
+
+ // transport protocol to use
+ private TTransport transport;
+
+ public static class CLIServiceClientWrapperSasl extends RetryingThriftCLIServiceClient.CLIServiceClientWrapper {
+
+ public CLIServiceClientWrapperSasl(ICLIService icliService, TTransport tTransport) {
+ super(icliService, tTransport);
+ }
+ }
+
+ private RetryingThriftCLIServiceClientSasl(HiveConf conf) {
+ this.conf = conf;
+ this.retryLimit = conf.getIntVar(HiveConf.ConfVars.HIVE_SERVER2_THRIFT_CLIENT_RETRY_LIMIT);
+ this.retryDelaySeconds = (int) conf.getTimeVar(HiveConf.ConfVars.HIVE_SERVER2_THRIFT_CLIENT_RETRY_DELAY_SECONDS,
+ TimeUnit.SECONDS);
+ }
+
+ public static RetryingThriftCLIServiceClient.CLIServiceClientWrapper newRetryingCLIServiceClient(HiveConf conf)
+ throws Exception {
+ RetryingThriftCLIServiceClientSasl retryClient = new RetryingThriftCLIServiceClientSasl(conf);
+ TTransport tTransport = retryClient
+ .connectWithRetry(conf.getIntVar(HiveConf.ConfVars.HIVE_SERVER2_THRIFT_CLIENT_CONNECTION_RETRY_LIMIT));
+ ICLIService cliService =
+ (ICLIService) Proxy.newProxyInstance(RetryingThriftCLIServiceClientSasl.class.getClassLoader(),
+ CLIServiceClient.class.getInterfaces(), retryClient);
+ return new RetryingThriftCLIServiceClient.CLIServiceClientWrapper(cliService, tTransport);
+ }
+
+ protected TTransport connectWithRetry(int retries) throws Exception {
+
+ TTransportException exception = null;
+
+ for (int i = 0; i < retries; i++) {
+ try {
+ return connect(conf);
+ } catch (TTransportException e) {
+ exception = e;
+ LOG.warn("Connection attempt " + i, e);
+ }
+ try {
+ Thread.sleep(retryDelaySeconds * 1000);
+ } catch (InterruptedException e) {
+ LOG.warn("Interrupted", e);
+ }
+ }
+ throw new HiveSQLException("Unable to connect after " + retries + " retries", exception);
+ }
+
+ protected synchronized TTransport connect(HiveConf conf) throws Exception {
+
+ /*
+ Can not get a renewed ugi in current thread spawned for hive client.
+ Need to explicitly refresh token with keytab.
+ This needs further investigation.
+ */
+ LensUtil.refreshLensTGT(conf);
+
+ if (transport != null && transport.isOpen()) {
+ transport.close();
+ }
+
+ String host = conf.getVar(HiveConf.ConfVars.HIVE_SERVER2_THRIFT_BIND_HOST);
+ int port = conf.getIntVar(HiveConf.ConfVars.HIVE_SERVER2_THRIFT_PORT);
+ LOG.info("Connecting to " + host + ":" + port);
+
+ transport = new TSocket(host, port);
+ ((TSocket) transport).setTimeout((int) conf.getTimeVar(HiveConf.ConfVars.SERVER_READ_SOCKET_TIMEOUT,
+ TimeUnit.SECONDS) * 1000);
+
+ try {
+ ((TSocket) transport).getSocket().setKeepAlive(conf.getBoolVar(HiveConf.ConfVars.SERVER_TCP_KEEP_ALIVE));
+ } catch (SocketException e) {
+ LOG.error("Error setting keep alive to " + conf.getBoolVar(HiveConf.ConfVars.SERVER_TCP_KEEP_ALIVE), e);
+ }
+
+ try {
+ Map<String, String> saslProps = new HashMap<String, String>();
+ saslProps.put(Sasl.QOP, "auth-conf,auth-int,auth");
+ saslProps.put(Sasl.SERVER_AUTH, "true");
+
+ transport = KerberosSaslHelper.getKerberosTransport(
+ conf.getVar(HiveConf.ConfVars.HIVE_SERVER2_KERBEROS_PRINCIPAL), host,
+ transport, saslProps, false);
+ } catch (SaslException e) {
+ LOG.error("Error creating SASL transport", e);
+ }
+
+ TProtocol protocol = new TBinaryProtocol(transport);
+ transport.open();
+
+ LOG.info("Connected to " + host + ":" + port);
+
+ base = new ThriftCLIServiceClient(new TCLIService.Client(protocol));
+ return transport;
+ }
+
+ protected class InvocationResult {
+ final boolean success;
+ final Object result;
+ final Throwable exception;
+
+ InvocationResult(boolean success, Object result, Throwable exception) {
+ this.success = success;
+ this.result = result;
+ this.exception = exception;
+ }
+ }
+
+ protected InvocationResult invokeInternal(Method method, Object[] args) throws Throwable {
+ InvocationResult result;
+ try {
+ Object methodResult = method.invoke(base, args);
+ result = new InvocationResult(true, methodResult, null);
+ } catch (UndeclaredThrowableException e) {
+ throw e.getCause();
+ } catch (InvocationTargetException e) {
+ if (e.getCause() instanceof HiveSQLException) {
+ HiveSQLException hiveExc = (HiveSQLException) e.getCause();
+ Throwable cause = hiveExc.getCause();
+ if ((cause instanceof TApplicationException)
+ || (cause instanceof TProtocolException)
+ || (cause instanceof TTransportException)) {
+ result =new InvocationResult(false, null, hiveExc);
+ } else {
+ throw hiveExc;
+ }
+ } else {
+ throw e.getCause();
+ }
+ }
+ return result;
+ }
+
+
+ @Override
+ public Object invoke(Object o, Method method, Object[] args) throws Throwable {
+ int attempts = 0;
+
+ while (true) {
+ attempts++;
+ InvocationResult invokeResult = invokeInternal(method, args);
+ if (invokeResult.success) {
+ return invokeResult.result;
+ }
+
+ // Error because of thrift client, we have to recreate base object
+ connectWithRetry(conf.getIntVar(HiveConf.ConfVars.HIVE_SERVER2_THRIFT_CLIENT_CONNECTION_RETRY_LIMIT));
+
+ if (attempts >=retryLimit) {
+ LOG.error(method.getName() + " failed after " + attempts + " retries.", invokeResult.exception);
+ throw invokeResult.exception;
+ }
+
+ LOG.warn("Last call ThriftCLIServiceClient." + method.getName() + " failed, attempts = " + attempts,
+ invokeResult.exception);
+ Thread.sleep(retryDelaySeconds * 1000);
+ }
+ }
+}
diff --git a/lens-server-api/src/main/java/org/apache/lens/server/api/LensConfConstants.java b/lens-server-api/src/main/java/org/apache/lens/server/api/LensConfConstants.java
index d5273be..0e05d28 100644
--- a/lens-server-api/src/main/java/org/apache/lens/server/api/LensConfConstants.java
+++ b/lens-server-api/src/main/java/org/apache/lens/server/api/LensConfConstants.java
@@ -718,8 +718,23 @@
public static final String SESSION_EXPIRY_SERVICE_INTERVAL_IN_SECS = SERVER_PFX
+ "session.expiry.service.interval.secs";
+ /**
+ * Interval at which KDC login runs
+ */
+ public static final String KDC_LOGIN_SERVICE_INTERVAL_IN_MINUTES = SERVER_PFX
+ + "kdc.login.service.interval.minutes";
+
public static final int DEFAULT_SESSION_EXPIRY_SERVICE_INTERVAL_IN_SECS = 3600;
+ public static final int DEFAULT_KDC_LOGIN_SERVICE_INTERVAL_IN_MINUTES = 360;
+
+
+ /**
+ * Lens principal for kerberos authentication
+ */
+ public static final String LENS_PRINCIPAL = SERVER_PFX
+ + "principal";
+
// Statistics Store configuration keys
/**
* The Constant STATS_STORE_CLASS.
diff --git a/lens-server-api/src/main/java/org/apache/lens/server/api/util/LensUtil.java b/lens-server-api/src/main/java/org/apache/lens/server/api/util/LensUtil.java
index 9d732c1..1060114 100644
--- a/lens-server-api/src/main/java/org/apache/lens/server/api/util/LensUtil.java
+++ b/lens-server-api/src/main/java/org/apache/lens/server/api/util/LensUtil.java
@@ -18,25 +18,35 @@
*/
package org.apache.lens.server.api.util;
+import java.io.File;
+import java.io.IOException;
+import java.net.InetAddress;
import java.net.SocketException;
import java.net.SocketTimeoutException;
import java.util.HashMap;
import java.util.Set;
+import org.apache.lens.server.api.LensConfConstants;
import org.apache.lens.server.api.common.ConfigBasedObjectCreationFactory;
+import org.apache.commons.lang.Validate;
import org.apache.commons.lang3.StringUtils;
import org.apache.hadoop.conf.Configurable;
import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.hive.conf.HiveConf;
+import org.apache.hadoop.security.SecurityUtil;
+import org.apache.hadoop.security.UserGroupInformation;
import com.google.common.collect.ImmutableSet;
import com.google.common.collect.Sets;
import lombok.NonNull;
+import lombok.extern.slf4j.Slf4j;
/**
* Utility methods for Lens
*/
+@Slf4j
public final class LensUtil {
private LensUtil() {
@@ -113,4 +123,48 @@
}
return map;
}
+
+ private static void checkIsReadable(String keytabFilePath) {
+ File keytabFile = new File(keytabFilePath);
+ if (!keytabFile.exists()) {
+ throw new IllegalArgumentException("The keytab file does not exist! " + keytabFilePath);
+ }
+
+ if (!keytabFile.isFile()) {
+ throw new IllegalArgumentException("The keytab file cannot be a directory! " + keytabFilePath);
+ }
+
+ if (!keytabFile.canRead()) {
+ throw new IllegalArgumentException("The keytab file is not readable! " + keytabFilePath);
+ }
+ }
+
+ public static void refreshLensTGT(HiveConf conf) throws IOException, IllegalArgumentException {
+
+ String principalString = conf.get(LensConfConstants.LENS_PRINCIPAL);
+
+ Validate.notEmpty(principalString,
+ "Missing required configuration property: " + LensConfConstants.LENS_PRINCIPAL);
+
+ String principal = SecurityUtil.getServerPrincipal(
+ principalString,
+ InetAddress.getLocalHost().getCanonicalHostName());
+
+ String keytabFilePath = conf.getVar(HiveConf.ConfVars.HIVE_SERVER2_KERBEROS_KEYTAB);
+
+ Validate.notEmpty(keytabFilePath, "Missing required configuration property: "
+ + HiveConf.ConfVars.HIVE_SERVER2_KERBEROS_KEYTAB.toString());
+ checkIsReadable(keytabFilePath);
+
+ Configuration hadoopConf = new Configuration();
+ hadoopConf.set("hadoop.security.authentication", "kerberos");
+
+ UserGroupInformation.setConfiguration(hadoopConf);
+
+ UserGroupInformation.loginUserFromKeytab(principal, keytabFilePath);
+
+ log.info("RetryingThriftCLIServiceClientSasl : Got Kerberos ticket, keytab: {}, Lens principal: {}",
+ keytabFilePath, principal);
+
+ }
}
diff --git a/lens-server/src/main/java/org/apache/lens/server/BaseLensService.java b/lens-server/src/main/java/org/apache/lens/server/BaseLensService.java
index c30a2d7..b5248f3 100644
--- a/lens-server/src/main/java/org/apache/lens/server/BaseLensService.java
+++ b/lens-server/src/main/java/org/apache/lens/server/BaseLensService.java
@@ -172,7 +172,12 @@
SessionHandle sessionHandle;
username = UtilityMethods.removeDomain(username);
if (auth) {
- doPasswdAuth(username, password);
+ if (cliService.getHiveConf().getVar(ConfVars.HIVE_SERVER2_AUTHENTICATION)
+ .equals(HiveAuthFactory.AuthTypes.KERBEROS.toString())) {
+ log.warn("Kerberos is not yet implemented, skipping authentication....");
+ } else {
+ doPasswdAuth(username, password);
+ }
}
SessionUser sessionUser = SESSION_USER_INSTANCE_MAP.get(username);
if (sessionUser == null) {
@@ -202,20 +207,7 @@
}
String clusterUser = sessionConf.get(LensConfConstants.SESSION_CLUSTER_USER);
password = "useless";
- if (cliService.getHiveConf().getVar(ConfVars.HIVE_SERVER2_AUTHENTICATION)
- .equals(HiveAuthFactory.AuthTypes.KERBEROS.toString())
- && cliService.getHiveConf().getBoolVar(ConfVars.HIVE_SERVER2_ENABLE_DOAS)) {
- String delegationTokenStr = null;
- try {
- delegationTokenStr = cliService.getDelegationTokenFromMetaStore(username);
- } catch (UnsupportedOperationException e) {
- // The delegation token is not applicable in the given deployment mode
- }
- sessionHandle = cliService.openSessionWithImpersonation(clusterUser, password, sessionConf,
- delegationTokenStr);
- } else {
- sessionHandle = cliService.openSession(clusterUser, password, sessionConf);
- }
+ sessionHandle = cliService.openSession(clusterUser, password, sessionConf);
} catch (Exception e) {
throw new LensException(e);
}
diff --git a/lens-server/src/main/java/org/apache/lens/server/LensServices.java b/lens-server/src/main/java/org/apache/lens/server/LensServices.java
index 903b19f..5f4a699 100644
--- a/lens-server/src/main/java/org/apache/lens/server/LensServices.java
+++ b/lens-server/src/main/java/org/apache/lens/server/LensServices.java
@@ -30,10 +30,12 @@
import org.apache.lens.api.error.ErrorCollection;
import org.apache.lens.api.error.ErrorCollectionFactory;
+import org.apache.lens.server.api.LensConfConstants;
import org.apache.lens.server.api.ServiceProvider;
import org.apache.lens.server.api.error.LensException;
import org.apache.lens.server.api.events.LensEventService;
import org.apache.lens.server.api.metrics.MetricsService;
+import org.apache.lens.server.api.util.LensUtil;
import org.apache.lens.server.metrics.MetricsServiceImpl;
import org.apache.lens.server.model.LogSegregationContext;
import org.apache.lens.server.model.MappedDiagnosticLogSegregationContext;
@@ -49,6 +51,7 @@
import org.apache.hadoop.hive.conf.HiveConf;
import org.apache.hive.service.CompositeService;
import org.apache.hive.service.Service;
+import org.apache.hive.service.auth.HiveAuthFactory;
import org.apache.hive.service.cli.CLIService;
import lombok.Getter;
@@ -104,6 +107,11 @@
*/
public static final String SERVER_STATE_PERSISTENCE_ERRORS = "total-server-state-persistence-errors";
+ /**
+ * The Constant SERVER_STATE_PERSISTENCE_ERRORS.
+ */
+ public static final String KDC_LOGIN_ERRORS = "total-kdc-login-errors";
+
/** The service mode. */
@Getter
@Setter
@@ -112,6 +120,9 @@
/** Scheduled Executor which persists the server state periodically*/
private ScheduledExecutorService serverSnapshotScheduler;
+ /** Scheduled Executor to refresh kerberos tgt*/
+ private ScheduledExecutorService kerberosTgtScheduler;
+
/* Lock for synchronizing persistence of LensServices state */
private final Object statePersistenceLock = new Object();
@@ -122,6 +133,7 @@
private long serverStatePersistenceInterval;
+ private long serverKdcLoginInterval;
@Getter
private final LogSegregationContext logSegregationContext;
@@ -277,12 +289,58 @@
}
}
+ /**
+ * Setup KDC logint thread.
+ *
+ */
+ private void enableKDCLoginThread() {
+
+ try {
+ LensUtil.refreshLensTGT(conf);
+ } catch (Exception e) {
+ throw new RuntimeException(e);
+ }
+
+ serverKdcLoginInterval = conf.getInt(LensConfConstants.KDC_LOGIN_SERVICE_INTERVAL_IN_MINUTES,
+ LensConfConstants.DEFAULT_KDC_LOGIN_SERVICE_INTERVAL_IN_MINUTES);
+
+ ThreadFactory factory = new BasicThreadFactory.Builder()
+ .namingPattern("Lens-server-refresh-tgt-Thread-%d")
+ .daemon(true)
+ .priority(Thread.NORM_PRIORITY)
+ .build();
+ kerberosTgtScheduler = Executors.newSingleThreadScheduledExecutor(factory);
+ kerberosTgtScheduler.scheduleWithFixedDelay(new Runnable() {
+ @Override
+ public void run() {
+ try {
+ final String runId = UUID.randomUUID().toString();
+ logSegregationContext.setLogSegregationId(runId);
+ LensUtil.refreshLensTGT(conf);
+ log.info("KDC login successful for lens.");
+ } catch (Exception e) {
+ incrCounter(SERVER_STATE_PERSISTENCE_ERRORS);
+ log.error("Unable to login to KDC...", e);
+ }
+ }
+ }, 0, serverKdcLoginInterval, TimeUnit.MINUTES);
+ }
+
+
/*
* (non-Javadoc)
*
* @see org.apache.hive.service.CompositeService#start()
*/
public synchronized void start() {
+
+ if (cliService.getHiveConf().getVar(HiveConf.ConfVars.HIVE_SERVER2_AUTHENTICATION)
+ .equals(HiveAuthFactory.AuthTypes.KERBEROS.toString())) {
+
+ enableKDCLoginThread();
+ log.info("Enabled kerberos tgt login at {} minutes interval", serverKdcLoginInterval);
+ }
+
if (getServiceState() != STATE.STARTED) {
super.start();
}
@@ -310,9 +368,12 @@
}
}
}, serverStatePersistenceInterval, serverStatePersistenceInterval, TimeUnit.MILLISECONDS);
+
log.info("Enabled periodic persistence of lens server state at {} millis interval",
- serverStatePersistenceInterval);
+ serverStatePersistenceInterval);
+
}
+
}
/**
diff --git a/lens-server/src/main/java/org/apache/lens/server/session/HiveSessionService.java b/lens-server/src/main/java/org/apache/lens/server/session/HiveSessionService.java
index 31ac358..d1ef716 100644
--- a/lens-server/src/main/java/org/apache/lens/server/session/HiveSessionService.java
+++ b/lens-server/src/main/java/org/apache/lens/server/session/HiveSessionService.java
@@ -335,6 +335,7 @@
this.databaseResourceService = new DatabaseResourceService(DatabaseResourceService.NAME);
addService(this.databaseResourceService);
this.conf = hiveConf;
+
super.init(hiveConf);
}
diff --git a/lens-server/src/main/resources/lensserver-default.xml b/lens-server/src/main/resources/lensserver-default.xml
index 28b1db2..bef8251 100644
--- a/lens-server/src/main/resources/lensserver-default.xml
+++ b/lens-server/src/main/resources/lensserver-default.xml
@@ -965,4 +965,17 @@
"lens.cube.metastore.enable.datacompleteness.check" is set.
</description>
</property>
+
+ <property>
+ <name>lens.server.kdc.login.service.interval.minutes</name>
+ <value>360</value>
+ <description>interval in minutes to refresh auth token when kerberos is enabled on hdfs and/or hive, metastore</description>
+ </property>
+
+ <property>
+ <name>lens.server.principal</name>
+ <value>lens/_HOST@APACHE.COM</value>
+ <description>lens server principal name, must be in format lens/_HOST@KDC_REALM</description>
+ </property>
+
</configuration>
diff --git a/lens-server/src/test/resources/lens-site.xml b/lens-server/src/test/resources/lens-site.xml
index 7e5f522..f5a0237 100644
--- a/lens-server/src/test/resources/lens-site.xml
+++ b/lens-server/src/test/resources/lens-site.xml
@@ -207,4 +207,16 @@
<value>3</value>
</property>
+ <property>
+ <name>lens.server.kdc.login.service.interval.minutes</name>
+ <value>360</value>
+ <description>interval in minutes to refresh auth token when kerberos is enabled on hdfs and/or hive, metastore</description>
+ </property>
+
+ <property>
+ <name>lens.server.principal</name>
+ <value>lens/_HOST@APACHE.COM</value>
+ <description>lens server principal name, must be in format lens/_HOST@KDC_REALM</description>
+ </property>
+
</configuration>
diff --git a/tools/scripts/lens-ctl b/tools/scripts/lens-ctl
index 304b4f5..073ecd8 100755
--- a/tools/scripts/lens-ctl
+++ b/tools/scripts/lens-ctl
@@ -155,7 +155,8 @@
pushd ${BASEDIR} > /dev/null
TIME=`date +%Y%m%d%H%M%s`
- nohup ${JAVA_BIN} ${JAVA_PROPERTIES} -cp ${LENSCPPATH} org.apache.lens.server.LensServer $* > "${LENS_LOG_DIR}/lensserver.out.$TIME" 2>&1 < /dev/null &
+ JAVA_OPTS="-Djavax.security.auth.useSubjectCredsOnly=false"
+ nohup ${JAVA_BIN} ${JAVA_PROPERTIES} ${JAVA_OPTS} -cp ${LENSCPPATH} org.apache.lens.server.LensServer $* > "${LENS_LOG_DIR}/lensserver.out.$TIME" 2>&1 < /dev/null &
echo $! > $LENS_PID_FILE
popd > /dev/null
@@ -353,5 +354,4 @@
else
run $args
fi
-exit 0
-
+exit 0
\ No newline at end of file