blob: bce027e85baaff033eead7992cdd3a23f8cab03b [file] [log] [blame]
package org.apache.lucene.search;
/**
* Copyright 2004 The Apache Software Foundation
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import org.apache.lucene.index.IndexReader;
import java.util.BitSet;
import java.util.WeakHashMap;
import java.util.Map;
import java.io.IOException;
/**
* Wraps another filters result and caches it. The caching
* behavior is like {@link QueryFilter}. The purpose is to allow
* filters to simply filter, and then wrap with this class to add
* caching, keeping the two concerns decoupled yet composable.
*/
public class CachingWrapperFilter extends Filter {
private Filter filter;
/**
* @todo What about serialization in RemoteSearchable? Caching won't work.
* Should transient be removed?
*/
private transient Map cache;
/**
* @param filter Filter to cache results of
*/
public CachingWrapperFilter(Filter filter) {
this.filter = filter;
}
public BitSet bits(IndexReader reader) throws IOException {
if (cache == null) {
cache = new WeakHashMap();
}
synchronized (cache) { // check cache
BitSet cached = (BitSet) cache.get(reader);
if (cached != null) {
return cached;
}
}
final BitSet bits = filter.bits(reader);
synchronized (cache) { // update cache
cache.put(reader, bits);
}
return bits;
}
public String toString() {
return "CachingWrapperFilter("+filter+")";
}
}