blob: 0975c395fac202703626c9031be41a741a8d625d [file] [log] [blame]
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.ambari.logsearch.config.zookeeper;
import com.google.gson.Gson;
import org.apache.ambari.logsearch.config.api.LogLevelFilterManager;
import org.apache.ambari.logsearch.config.api.model.loglevelfilter.LogLevelFilter;
import org.apache.ambari.logsearch.config.api.model.loglevelfilter.LogLevelFilterMap;
import org.apache.curator.framework.CuratorFramework;
import org.apache.curator.framework.recipes.cache.ChildData;
import org.apache.curator.framework.recipes.cache.TreeCache;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import org.apache.zookeeper.KeeperException;
import org.apache.zookeeper.data.ACL;
import java.util.List;
import java.util.Map;
import java.util.TreeMap;
/**
* Managing log level filters in ZooKeeper (store them in ZNodes per cluster). Operations: create / get / update log level filters.
*/
public class LogLevelFilterManagerZK implements LogLevelFilterManager {
private static final Logger logger = LogManager.getLogger(LogLevelFilterManagerZK.class);
private final CuratorFramework client;
private final TreeCache serverCache;
private final Gson gson;
private final List<ACL> aclList;
public LogLevelFilterManagerZK(Map<String, String> properties) throws Exception {
this.client = LogSearchConfigZKHelper.createZKClient(properties);
this.client.start();
this.serverCache = new TreeCache(client, "/");
this.aclList = LogSearchConfigZKHelper.getAcls(properties);
this.gson = LogSearchConfigZKHelper.createGson();
this.serverCache.start();
}
public LogLevelFilterManagerZK(Map<String, String> properties, CuratorFramework client) throws Exception {
this.client = client;
this.serverCache = new TreeCache(client, "/");
this.aclList = LogSearchConfigZKHelper.getAcls(properties);
this.gson = LogSearchConfigZKHelper.createGson();
this.serverCache.start();
}
public LogLevelFilterManagerZK(CuratorFramework client, TreeCache serverCache, List<ACL> aclList, Gson gson) {
this.client = client;
this.serverCache = serverCache;
this.aclList = aclList;
this.gson = gson;
}
@Override
public void createLogLevelFilter(String clusterName, String logId, LogLevelFilter filter) throws Exception {
String nodePath = String.format("/%s/loglevelfilter/%s", clusterName, logId);
String logLevelFilterJson = gson.toJson(filter);
try {
client.create().creatingParentContainersIfNeeded().withACL(aclList).forPath(nodePath, logLevelFilterJson.getBytes());
logger.info("Uploaded log level filter for the log " + logId + " for cluster " + clusterName);
} catch (KeeperException.NodeExistsException e) {
logger.debug("Did not upload log level filters for log " + logId + " as it was already uploaded by another Log Feeder");
}
}
@Override
public void setLogLevelFilters(String clusterName, LogLevelFilterMap filters) throws Exception {
for (Map.Entry<String, LogLevelFilter> e : filters.getFilter().entrySet()) {
String nodePath = String.format("/%s/loglevelfilter/%s", clusterName, e.getKey());
String logLevelFilterJson = gson.toJson(e.getValue());
String currentLogLevelFilterJson = new String(serverCache.getCurrentData(nodePath).getData());
if (!logLevelFilterJson.equals(currentLogLevelFilterJson)) {
client.setData().forPath(nodePath, logLevelFilterJson.getBytes());
logger.info("Set log level filter for the log " + e.getKey() + " for cluster " + clusterName);
}
}
}
@Override
public LogLevelFilterMap getLogLevelFilters(String clusterName) {
String parentPath = String.format("/%s/loglevelfilter", clusterName);
TreeMap<String, LogLevelFilter> filters = new TreeMap<>();
Map<String, ChildData> logLevelFilterNodes = serverCache.getCurrentChildren(parentPath);
if (logLevelFilterNodes != null && !logLevelFilterNodes.isEmpty()) {
for (Map.Entry<String, ChildData> e : logLevelFilterNodes.entrySet()) {
LogLevelFilter logLevelFilter = gson.fromJson(new String(e.getValue().getData()), LogLevelFilter.class);
filters.put(e.getKey(), logLevelFilter);
}
}
LogLevelFilterMap logLevelFilters = new LogLevelFilterMap();
logLevelFilters.setFilter(filters);
return logLevelFilters;
}
public CuratorFramework getClient() {
return client;
}
public TreeCache getServerCache() {
return serverCache;
}
public Gson getGson() {
return gson;
}
}