HBASE-27276 Reduce reflection overhead in Filter deserialization (#5488)
Signed-off-by: Nick Dimiduk <ndimiduk@apache.org>
Signed-off-by: Duo Zhang <zhangduo@apache.org>
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/ProtobufUtil.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/ProtobufUtil.java
index d2e14df..598ad93 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/ProtobufUtil.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/ProtobufUtil.java
@@ -127,6 +127,7 @@
import org.apache.hadoop.hbase.util.DynamicClassLoader;
import org.apache.hadoop.hbase.util.ExceptionUtil;
import org.apache.hadoop.hbase.util.Methods;
+import org.apache.hadoop.hbase.util.ReflectedFunctionCache;
import org.apache.hadoop.hbase.util.VersionInfo;
import org.apache.hadoop.ipc.RemoteException;
import org.apache.yetus.audience.InterfaceAudience;
@@ -306,6 +307,23 @@
return classLoaderLoaded;
}
+ private static final String PARSE_FROM = "parseFrom";
+
+ // We don't bother using the dynamic CLASS_LOADER above, because currently we can't support
+ // optimizing dynamically loaded classes. We can do it once we build for java9+, see the todo
+ // in ReflectedFunctionCache
+ private static final ReflectedFunctionCache<byte[], Filter> FILTERS =
+ new ReflectedFunctionCache<>(Filter.class, byte[].class, PARSE_FROM);
+ private static final ReflectedFunctionCache<byte[], ByteArrayComparable> COMPARATORS =
+ new ReflectedFunctionCache<>(ByteArrayComparable.class, byte[].class, PARSE_FROM);
+
+ private static volatile boolean ALLOW_FAST_REFLECTION_FALLTHROUGH = true;
+
+ // Visible for tests
+ public static void setAllowFastReflectionFallthrough(boolean val) {
+ ALLOW_FAST_REFLECTION_FALLTHROUGH = val;
+ }
+
/**
* Prepend the passed bytes with four bytes of magic, {@link ProtobufMagic#PB_MAGIC}, to flag what
* follows as a protobuf in hbase. Prepend these bytes to all content written to znodes, etc.
@@ -1554,13 +1572,23 @@
public static ByteArrayComparable toComparator(ComparatorProtos.Comparator proto)
throws IOException {
String type = proto.getName();
- String funcName = "parseFrom";
byte[] value = proto.getSerializedComparator().toByteArray();
+
try {
+ ByteArrayComparable result = COMPARATORS.getAndCallByName(type, value);
+ if (result != null) {
+ return result;
+ }
+
+ if (!ALLOW_FAST_REFLECTION_FALLTHROUGH) {
+ throw new IllegalStateException("Failed to deserialize comparator " + type
+ + " because fast reflection returned null and fallthrough is disabled");
+ }
+
Class<?> c = Class.forName(type, true, ClassLoaderHolder.CLASS_LOADER);
- Method parseFrom = c.getMethod(funcName, byte[].class);
+ Method parseFrom = c.getMethod(PARSE_FROM, byte[].class);
if (parseFrom == null) {
- throw new IOException("Unable to locate function: " + funcName + " in type: " + type);
+ throw new IOException("Unable to locate function: " + PARSE_FROM + " in type: " + type);
}
return (ByteArrayComparable) parseFrom.invoke(null, value);
} catch (Exception e) {
@@ -1577,12 +1605,22 @@
public static Filter toFilter(FilterProtos.Filter proto) throws IOException {
String type = proto.getName();
final byte[] value = proto.getSerializedFilter().toByteArray();
- String funcName = "parseFrom";
+
try {
+ Filter result = FILTERS.getAndCallByName(type, value);
+ if (result != null) {
+ return result;
+ }
+
+ if (!ALLOW_FAST_REFLECTION_FALLTHROUGH) {
+ throw new IllegalStateException("Failed to deserialize comparator " + type
+ + " because fast reflection returned null and fallthrough is disabled");
+ }
+
Class<?> c = Class.forName(type, true, ClassLoaderHolder.CLASS_LOADER);
- Method parseFrom = c.getMethod(funcName, byte[].class);
+ Method parseFrom = c.getMethod(PARSE_FROM, byte[].class);
if (parseFrom == null) {
- throw new IOException("Unable to locate function: " + funcName + " in type: " + type);
+ throw new IOException("Unable to locate function: " + PARSE_FROM + " in type: " + type);
}
return (Filter) parseFrom.invoke(c, value);
} catch (Exception e) {
diff --git a/hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestGet.java b/hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestGet.java
index 69c33c8..ca1a708 100644
--- a/hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestGet.java
+++ b/hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestGet.java
@@ -17,6 +17,8 @@
*/
package org.apache.hadoop.hbase.client;
+import static org.hamcrest.CoreMatchers.instanceOf;
+import static org.hamcrest.MatcherAssert.assertThat;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertTrue;
@@ -25,7 +27,6 @@
import java.io.File;
import java.io.FileOutputStream;
import java.io.IOException;
-import java.lang.reflect.InvocationTargetException;
import java.nio.ByteBuffer;
import java.util.Arrays;
import java.util.Base64;
@@ -34,7 +35,6 @@
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HBaseClassTestRule;
import org.apache.hadoop.hbase.HBaseConfiguration;
-import org.apache.hadoop.hbase.exceptions.DeserializationException;
import org.apache.hadoop.hbase.filter.Filter;
import org.apache.hadoop.hbase.filter.FilterList;
import org.apache.hadoop.hbase.filter.KeyOnlyFilter;
@@ -48,6 +48,8 @@
import org.junit.Test;
import org.junit.experimental.categories.Category;
+import org.apache.hbase.thirdparty.com.google.common.base.Throwables;
+
import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;
import org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos;
@@ -226,9 +228,9 @@
ProtobufUtil.toGet(getProto2);
fail("Should not be able to load the filter class");
} catch (IOException ioe) {
- assertTrue(ioe.getCause() instanceof InvocationTargetException);
- InvocationTargetException ite = (InvocationTargetException) ioe.getCause();
- assertTrue(ite.getTargetException() instanceof DeserializationException);
+ // This test is deserializing a FilterList, and one of the sub-filters is not found.
+ // So the actual caused by is buried a few levels deep.
+ assertThat(Throwables.getRootCause(ioe), instanceOf(ClassNotFoundException.class));
}
FileOutputStream fos = new FileOutputStream(jarFile);
fos.write(Base64.getDecoder().decode(MOCK_FILTER_JAR));
diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/ReflectedFunctionCache.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/ReflectedFunctionCache.java
new file mode 100644
index 0000000..61b6086
--- /dev/null
+++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/ReflectedFunctionCache.java
@@ -0,0 +1,110 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hbase.util;
+
+import edu.umd.cs.findbugs.annotations.Nullable;
+import java.util.concurrent.ConcurrentHashMap;
+import java.util.concurrent.ConcurrentMap;
+import java.util.concurrent.TimeUnit;
+import java.util.function.Function;
+import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+/**
+ * Cache to hold resolved Functions of a specific signature, generated through reflection. These can
+ * be (relatively) costly to create, but then are much faster than typical Method.invoke calls when
+ * executing. The cache is built-up on demand as calls are made to new classes. The functions are
+ * cached for the lifetime of the process. If a function cannot be created (security reasons, method
+ * not found, etc), a fallback function is cached which always returns null. Callers to
+ * {@link #getAndCallByName(String, Object)} should have handling for null return values.
+ * <p>
+ * An instance is created for a specified baseClass (i.e. Filter), argClass (i.e. byte[]), and
+ * static methodName to call. These are used to resolve a Function which delegates to that static
+ * method, if it is found.
+ * @param <I> the input argument type for the resolved functions
+ * @param <R> the return type for the resolved functions
+ */
+@InterfaceAudience.Private
+public final class ReflectedFunctionCache<I, R> {
+
+ private static final Logger LOG = LoggerFactory.getLogger(ReflectedFunctionCache.class);
+
+ private final ConcurrentMap<String, Function<I, ? extends R>> lambdasByClass =
+ new ConcurrentHashMap<>();
+ private final Class<R> baseClass;
+ private final Class<I> argClass;
+ private final String methodName;
+ private final ClassLoader classLoader;
+
+ public ReflectedFunctionCache(Class<R> baseClass, Class<I> argClass, String staticMethodName) {
+ this.classLoader = getClass().getClassLoader();
+ this.baseClass = baseClass;
+ this.argClass = argClass;
+ this.methodName = staticMethodName;
+ }
+
+ /**
+ * Get and execute the Function for the given className, passing the argument to the function and
+ * returning the result.
+ * @param className the full name of the class to lookup
+ * @param argument the argument to pass to the function, if found.
+ * @return null if a function is not found for classname, otherwise the result of the function.
+ */
+ @Nullable
+ public R getAndCallByName(String className, I argument) {
+ // todo: if we ever make java9+ our lowest supported jdk version, we can
+ // handle generating these for newly loaded classes from our DynamicClassLoader using
+ // MethodHandles.privateLookupIn(). For now this is not possible, because we can't easily
+ // create a privileged lookup in a non-default ClassLoader. So while this cache loads
+ // over time, it will never load a custom filter from "hbase.dynamic.jars.dir".
+ Function<I, ? extends R> lambda =
+ ConcurrentMapUtils.computeIfAbsent(lambdasByClass, className, () -> loadFunction(className));
+
+ return lambda.apply(argument);
+ }
+
+ private Function<I, ? extends R> loadFunction(String className) {
+ long startTime = System.nanoTime();
+ try {
+ Class<?> clazz = Class.forName(className, false, classLoader);
+ if (!baseClass.isAssignableFrom(clazz)) {
+ LOG.debug("Requested class {} is not assignable to {}, skipping creation of function",
+ className, baseClass.getName());
+ return this::notFound;
+ }
+ return ReflectionUtils.getOneArgStaticMethodAsFunction(clazz, methodName, argClass,
+ (Class<? extends R>) clazz);
+ } catch (Throwable t) {
+ LOG.debug("Failed to create function for {}", className, t);
+ return this::notFound;
+ } finally {
+ LOG.debug("Populated cache for {} in {}ms", className,
+ TimeUnit.NANOSECONDS.toMillis(System.nanoTime() - startTime));
+ }
+ }
+
+ /**
+ * In order to use computeIfAbsent, we can't store nulls in our cache. So we store a lambda which
+ * resolves to null. The contract is that getAndCallByName returns null in this case.
+ */
+ private R notFound(I argument) {
+ return null;
+ }
+
+}
diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/ReflectionUtils.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/ReflectionUtils.java
index 2d893e5..304358e 100644
--- a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/ReflectionUtils.java
+++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/ReflectionUtils.java
@@ -21,6 +21,11 @@
import java.io.ByteArrayOutputStream;
import java.io.PrintStream;
import java.io.UnsupportedEncodingException;
+import java.lang.invoke.CallSite;
+import java.lang.invoke.LambdaMetafactory;
+import java.lang.invoke.MethodHandle;
+import java.lang.invoke.MethodHandles;
+import java.lang.invoke.MethodType;
import java.lang.management.ManagementFactory;
import java.lang.management.ThreadInfo;
import java.lang.management.ThreadMXBean;
@@ -29,6 +34,7 @@
import java.lang.reflect.InvocationTargetException;
import java.lang.reflect.Method;
import java.nio.charset.Charset;
+import java.util.function.Function;
import org.apache.yetus.audience.InterfaceAudience;
import org.slf4j.Logger;
@@ -209,6 +215,30 @@
}
/**
+ * Creates a Function which can be called to performantly execute a reflected static method. The
+ * creation of the Function itself may not be fast, but executing that method thereafter should be
+ * much faster than {@link #invokeMethod(Object, String, Object...)}.
+ * @param lookupClazz the class to find the static method in
+ * @param methodName the method name
+ * @param argumentClazz the type of the argument
+ * @param returnValueClass the type of the return value
+ * @return a function which when called executes the requested static method.
+ * @throws Throwable exception types from the underlying reflection
+ */
+ public static <I, R> Function<I, R> getOneArgStaticMethodAsFunction(Class<?> lookupClazz,
+ String methodName, Class<I> argumentClazz, Class<R> returnValueClass) throws Throwable {
+ MethodHandles.Lookup lookup = MethodHandles.lookup();
+ MethodHandle methodHandle = lookup.findStatic(lookupClazz, methodName,
+ MethodType.methodType(returnValueClass, argumentClazz));
+ CallSite site =
+ LambdaMetafactory.metafactory(lookup, "apply", MethodType.methodType(Function.class),
+ methodHandle.type().generic(), methodHandle, methodHandle.type());
+
+ return (Function<I, R>) site.getTarget().invokeExact();
+
+ }
+
+ /**
* Get and invoke the target method from the given object with given parameters
* @param obj the object to get and invoke method from
* @param methodName the name of the method to invoke
diff --git a/hbase-common/src/test/java/org/apache/hadoop/hbase/util/ClassLoaderTestHelper.java b/hbase-common/src/test/java/org/apache/hadoop/hbase/util/ClassLoaderTestHelper.java
index da11879..2bfce99 100644
--- a/hbase-common/src/test/java/org/apache/hadoop/hbase/util/ClassLoaderTestHelper.java
+++ b/hbase-common/src/test/java/org/apache/hadoop/hbase/util/ClassLoaderTestHelper.java
@@ -17,6 +17,7 @@
*/
package org.apache.hadoop.hbase.util;
+import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertTrue;
import static org.junit.Assert.fail;
@@ -202,4 +203,20 @@
public static String localDirPath(Configuration conf) {
return conf.get(ClassLoaderBase.LOCAL_DIR_KEY) + File.separator + "jars" + File.separator;
}
+
+ public static void deleteClass(String className, String testDir, Configuration conf)
+ throws Exception {
+ String jarFileName = className + ".jar";
+ File file = new File(testDir, jarFileName);
+ file.delete();
+ assertFalse("Should be deleted: " + file.getPath(), file.exists());
+
+ file = new File(conf.get("hbase.dynamic.jars.dir"), jarFileName);
+ file.delete();
+ assertFalse("Should be deleted: " + file.getPath(), file.exists());
+
+ file = new File(ClassLoaderTestHelper.localDirPath(conf), jarFileName);
+ file.delete();
+ assertFalse("Should be deleted: " + file.getPath(), file.exists());
+ }
}
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestComparatorSerialization.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestComparatorSerialization.java
index 74fc546..b99538e 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestComparatorSerialization.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestComparatorSerialization.java
@@ -19,19 +19,35 @@
import static org.junit.Assert.assertNull;
import static org.junit.Assert.assertTrue;
+import static org.junit.Assert.fail;
+import java.io.IOException;
import java.math.BigDecimal;
+import java.nio.charset.Charset;
+import java.util.Collections;
import java.util.regex.Pattern;
+import org.apache.commons.io.IOUtils;
+import org.apache.commons.text.StringSubstitutor;
+import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HBaseClassTestRule;
+import org.apache.hadoop.hbase.HBaseCommonTestingUtil;
+import org.apache.hadoop.hbase.HBaseConfiguration;
+import org.apache.hadoop.hbase.HBaseTestingUtil;
import org.apache.hadoop.hbase.testclassification.FilterTests;
import org.apache.hadoop.hbase.testclassification.SmallTests;
import org.apache.hadoop.hbase.util.Bytes;
+import org.apache.hadoop.hbase.util.ClassLoaderTestHelper;
+import org.junit.AfterClass;
import org.junit.ClassRule;
import org.junit.Test;
import org.junit.experimental.categories.Category;
+import org.junit.runner.RunWith;
+import org.junit.runners.Parameterized;
import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;
+import org.apache.hadoop.hbase.shaded.protobuf.generated.ComparatorProtos;
+@RunWith(Parameterized.class)
@Category({ FilterTests.class, SmallTests.class })
public class TestComparatorSerialization {
@@ -39,6 +55,20 @@
public static final HBaseClassTestRule CLASS_RULE =
HBaseClassTestRule.forClass(TestComparatorSerialization.class);
+ @Parameterized.Parameter(0)
+ public boolean allowFastReflectionFallthrough;
+
+ @Parameterized.Parameters(name = "{index}: allowFastReflectionFallthrough={0}")
+ public static Iterable<Object[]> data() {
+ return HBaseCommonTestingUtil.BOOLEAN_PARAMETERIZED;
+ }
+
+ @AfterClass
+ public static void afterClass() throws Exception {
+ // set back to true so that it doesn't affect any other tests
+ ProtobufUtil.setAllowFastReflectionFallthrough(true);
+ }
+
@Test
public void testBinaryComparator() throws Exception {
BinaryComparator binaryComparator = new BinaryComparator(Bytes.toBytes("binaryComparator"));
@@ -99,4 +129,55 @@
ProtobufUtil.toComparator(ProtobufUtil.toComparator(bigDecimalComparator))));
}
+ /**
+ * Test that we can load and deserialize custom comparators. Good to have generally, but also
+ * proves that this still works after HBASE-27276 despite not going through our fast function
+ * caches.
+ */
+ @Test
+ public void testCustomComparator() throws Exception {
+ ByteArrayComparable baseFilter = new BinaryComparator("foo".getBytes());
+ ComparatorProtos.Comparator proto = ProtobufUtil.toComparator(baseFilter);
+ String suffix = "" + System.currentTimeMillis() + allowFastReflectionFallthrough;
+ String className = "CustomLoadedComparator" + suffix;
+ proto = proto.toBuilder().setName(className).build();
+
+ Configuration conf = HBaseConfiguration.create();
+ HBaseTestingUtil testUtil = new HBaseTestingUtil();
+ String dataTestDir = testUtil.getDataTestDir().toString();
+
+ // First make sure the test bed is clean, delete any pre-existing class.
+ // Below toComparator call is expected to fail because the comparator is not loaded now
+ ClassLoaderTestHelper.deleteClass(className, dataTestDir, conf);
+ try {
+ ProtobufUtil.toComparator(proto);
+ fail("expected to fail");
+ } catch (IOException e) {
+ // do nothing, this is expected
+ }
+
+ // Write a jar to be loaded into the classloader
+ String code = StringSubstitutor.replace(
+ IOUtils.toString(getClass().getResourceAsStream("/CustomLoadedComparator.java.template"),
+ Charset.defaultCharset()),
+ Collections.singletonMap("suffix", suffix));
+ ClassLoaderTestHelper.buildJar(dataTestDir, className, code,
+ ClassLoaderTestHelper.localDirPath(conf));
+
+ // Disallow fallthrough at first. We expect below to fail because the custom comparator is not
+ // available at initialization so not in the cache.
+ ProtobufUtil.setAllowFastReflectionFallthrough(false);
+ try {
+ ProtobufUtil.toComparator(proto);
+ fail("expected to fail");
+ } catch (IOException e) {
+ // do nothing, this is expected
+ }
+
+ // Now the deserialization should pass with fallthrough enabled. This proves that custom
+ // comparators can work despite not being supported by cache.
+ ProtobufUtil.setAllowFastReflectionFallthrough(true);
+ ProtobufUtil.toComparator(proto);
+ }
+
}
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFilterSerialization.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFilterSerialization.java
index d580528..e3a13e5 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFilterSerialization.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFilterSerialization.java
@@ -18,24 +18,40 @@
package org.apache.hadoop.hbase.filter;
import static org.junit.Assert.assertTrue;
+import static org.junit.Assert.fail;
+import java.nio.charset.Charset;
import java.util.ArrayList;
+import java.util.Collections;
import java.util.LinkedList;
import java.util.List;
+import org.apache.commons.io.IOUtils;
+import org.apache.commons.text.StringSubstitutor;
+import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.CompareOperator;
+import org.apache.hadoop.hbase.DoNotRetryIOException;
import org.apache.hadoop.hbase.HBaseClassTestRule;
+import org.apache.hadoop.hbase.HBaseCommonTestingUtil;
+import org.apache.hadoop.hbase.HBaseConfiguration;
+import org.apache.hadoop.hbase.HBaseTestingUtil;
import org.apache.hadoop.hbase.filter.MultiRowRangeFilter.RowRange;
import org.apache.hadoop.hbase.testclassification.FilterTests;
import org.apache.hadoop.hbase.testclassification.MediumTests;
import org.apache.hadoop.hbase.util.Bytes;
+import org.apache.hadoop.hbase.util.ClassLoaderTestHelper;
import org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
import org.apache.hadoop.hbase.util.Pair;
+import org.junit.AfterClass;
import org.junit.ClassRule;
import org.junit.Test;
import org.junit.experimental.categories.Category;
+import org.junit.runner.RunWith;
+import org.junit.runners.Parameterized;
import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;
+import org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos;
+@RunWith(Parameterized.class)
@Category({ FilterTests.class, MediumTests.class })
public class TestFilterSerialization {
@@ -43,6 +59,20 @@
public static final HBaseClassTestRule CLASS_RULE =
HBaseClassTestRule.forClass(TestFilterSerialization.class);
+ @Parameterized.Parameter(0)
+ public boolean allowFastReflectionFallthrough;
+
+ @Parameterized.Parameters(name = "{index}: allowFastReflectionFallthrough={0}")
+ public static Iterable<Object[]> data() {
+ return HBaseCommonTestingUtil.BOOLEAN_PARAMETERIZED;
+ }
+
+ @AfterClass
+ public static void afterClass() throws Exception {
+ // set back to true so that it doesn't affect any other tests
+ ProtobufUtil.setAllowFastReflectionFallthrough(true);
+ }
+
@Test
public void testColumnCountGetFilter() throws Exception {
ColumnCountGetFilter columnCountGetFilter = new ColumnCountGetFilter(1);
@@ -322,4 +352,55 @@
assertTrue(columnValueFilter
.areSerializedFieldsEqual(ProtobufUtil.toFilter(ProtobufUtil.toFilter(columnValueFilter))));
}
+
+ /**
+ * Test that we can load and deserialize custom filters. Good to have generally, but also proves
+ * that this still works after HBASE-27276 despite not going through our fast function caches.
+ */
+ @Test
+ public void testCustomFilter() throws Exception {
+ Filter baseFilter = new PrefixFilter("foo".getBytes());
+ FilterProtos.Filter filterProto = ProtobufUtil.toFilter(baseFilter);
+ String suffix = "" + System.currentTimeMillis() + allowFastReflectionFallthrough;
+ String className = "CustomLoadedFilter" + suffix;
+ filterProto = filterProto.toBuilder().setName(className).build();
+
+ Configuration conf = HBaseConfiguration.create();
+ HBaseTestingUtil testUtil = new HBaseTestingUtil();
+ String dataTestDir = testUtil.getDataTestDir().toString();
+
+ // First make sure the test bed is clean, delete any pre-existing class.
+ // Below toComparator call is expected to fail because the comparator is not loaded now
+ ClassLoaderTestHelper.deleteClass(className, dataTestDir, conf);
+ try {
+ Filter filter = ProtobufUtil.toFilter(filterProto);
+ fail("expected to fail");
+ } catch (DoNotRetryIOException e) {
+ // do nothing, this is expected
+ }
+
+ // Write a jar to be loaded into the classloader
+ String code = StringSubstitutor
+ .replace(IOUtils.toString(getClass().getResourceAsStream("/CustomLoadedFilter.java.template"),
+ Charset.defaultCharset()), Collections.singletonMap("suffix", suffix));
+ ClassLoaderTestHelper.buildJar(dataTestDir, className, code,
+ ClassLoaderTestHelper.localDirPath(conf));
+
+ // Disallow fallthrough at first. We expect below to fail because the custom filter is not
+ // available at initialization so not in the cache.
+ ProtobufUtil.setAllowFastReflectionFallthrough(false);
+ try {
+ ProtobufUtil.toFilter(filterProto);
+ fail("expected to fail");
+ } catch (DoNotRetryIOException e) {
+ // do nothing, this is expected
+ }
+
+ // Now the deserialization should pass with fallthrough enabled. This proves that custom
+ // filters can work despite not being supported by cache.
+ ProtobufUtil.setAllowFastReflectionFallthrough(true);
+ ProtobufUtil.toFilter(filterProto);
+
+ }
+
}
diff --git a/hbase-server/src/test/resources/CustomLoadedComparator.java.template b/hbase-server/src/test/resources/CustomLoadedComparator.java.template
new file mode 100644
index 0000000..38572f6
--- /dev/null
+++ b/hbase-server/src/test/resources/CustomLoadedComparator.java.template
@@ -0,0 +1,66 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+import org.apache.hadoop.hbase.exceptions.DeserializationException;
+import org.apache.hadoop.hbase.filter.BinaryComparator;
+import org.apache.hadoop.hbase.filter.ByteArrayComparable;
+import org.apache.hadoop.hbase.filter.TestFilterSerialization;
+
+/**
+ * Just wraps around a delegate, the only goal here is to create a Comparable which doesn't exist
+ * in org.apache.hadoop.hbase.filter so it doesn't get automatically loaded at startup. We can
+ * pass it into the DynamicClassLoader to prove that (de)serialization works.
+ */
+public class CustomLoadedComparator${suffix} extends ByteArrayComparable {
+
+ private final BinaryComparator delegate;
+
+ public CustomLoadedComparator${suffix}(BinaryComparator delegate) {
+ super(delegate.getValue());
+ this.delegate = delegate;
+ }
+
+ @Override
+ public byte[] toByteArray() {
+ return delegate.toByteArray();
+ }
+
+ public static CustomLoadedComparator${suffix} parseFrom(final byte[] pbBytes) throws
+ DeserializationException {
+ return new CustomLoadedComparator${suffix}(BinaryComparator.parseFrom(pbBytes));
+ }
+
+ @Override public int compareTo(byte[] value, int offset, int length) {
+ return delegate.compareTo(value, offset, length);
+ }
+
+ @Override public byte[] getValue() {
+ return delegate.getValue();
+ }
+
+ @Override public int compareTo(byte[] value) {
+ return delegate.compareTo(value);
+ }
+
+ @Override public int hashCode() {
+ return delegate.hashCode();
+ }
+
+ @Override public boolean equals(Object obj) {
+ return super.equals(obj);
+ }
+}
diff --git a/hbase-server/src/test/resources/CustomLoadedFilter.java.template b/hbase-server/src/test/resources/CustomLoadedFilter.java.template
new file mode 100644
index 0000000..84ef99f
--- /dev/null
+++ b/hbase-server/src/test/resources/CustomLoadedFilter.java.template
@@ -0,0 +1,69 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+import java.util.Objects;
+import org.apache.hadoop.hbase.exceptions.DeserializationException;
+import org.apache.hadoop.hbase.filter.FilterBase;
+import org.apache.hadoop.hbase.filter.PrefixFilter;
+import org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos;
+import org.apache.hbase.thirdparty.com.google.protobuf.InvalidProtocolBufferException;
+import org.apache.hbase.thirdparty.com.google.protobuf.UnsafeByteOperations;
+
+/**
+ * Just wraps around a delegate, the only goal here is to create a filter which doesn't exist
+ * in org.apache.hadoop.hbase.filter so it doesn't get automatically loaded at startup. We can
+ * pass it into the DynamicClassLoader to prove that (de)serialization works.
+ */
+public class CustomLoadedFilter${suffix} extends FilterBase {
+
+ private final PrefixFilter delegate;
+
+ public CustomLoadedFilter${suffix}(PrefixFilter delegate) {
+ this.delegate = delegate;
+ }
+
+ @Override
+ public byte[] toByteArray() {
+ FilterProtos.PrefixFilter.Builder builder = FilterProtos.PrefixFilter.newBuilder();
+ if (this.delegate.getPrefix() != null) builder.setPrefix(UnsafeByteOperations.unsafeWrap(this.delegate.getPrefix()));
+ return builder.build().toByteArray();
+ }
+
+ public static CustomLoadedFilter${suffix} parseFrom(final byte[] pbBytes) throws
+ DeserializationException {
+ FilterProtos.PrefixFilter proto;
+ try {
+ proto = FilterProtos.PrefixFilter.parseFrom(pbBytes);
+ } catch (InvalidProtocolBufferException e) {
+ throw new DeserializationException(e);
+ }
+ return new CustomLoadedFilter${suffix}(new PrefixFilter(proto.hasPrefix() ? proto.getPrefix().toByteArray() : null));
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ CustomLoadedFilter${suffix} that = (CustomLoadedFilter${suffix}) o;
+ return Objects.equals(delegate, that.delegate);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(delegate);
+ }
+}