merged with trunk
git-svn-id: https://svn.apache.org/repos/asf/lucene/dev/branches/LUCENE2793@1144189 13f79535-47bb-0310-9956-ffa450edef68
diff --git a/lucene/CHANGES.txt b/lucene/CHANGES.txt
index 815cc0a..7d76af0 100644
--- a/lucene/CHANGES.txt
+++ b/lucene/CHANGES.txt
@@ -462,6 +462,15 @@
IndexSearcher. SortFields can have SortField.REWRITEABLE type which
requires they are rewritten before they are used. (Chris Male)
+* LUCENE-3203: FSDirectory can now limit the max allowed write rate
+ (MB/sec) of all running merges, to reduce impact ongoing merging has
+ on searching, NRT reopen time, etc. (Mike McCandless)
+
+* LUCENE-2793: Directory#createOutput & Directory#openInput now accept an
+ IOContext instead of a buffer size to allow low level optimizations for
+ different usecases like merging, flushing and reading.
+ (Simon Willnauer, Mike McCandless, Varun Thacker)
+
Optimizations
* LUCENE-2588: Don't store unnecessary suffixes when writing the terms
diff --git a/lucene/contrib/misc/src/java/org/apache/lucene/index/codecs/appending/AppendingCodec.java b/lucene/contrib/misc/src/java/org/apache/lucene/index/codecs/appending/AppendingCodec.java
index 996293d..f39ce3d 100644
--- a/lucene/contrib/misc/src/java/org/apache/lucene/index/codecs/appending/AppendingCodec.java
+++ b/lucene/contrib/misc/src/java/org/apache/lucene/index/codecs/appending/AppendingCodec.java
@@ -93,7 +93,7 @@
@Override
public FieldsProducer fieldsProducer(SegmentReadState state)
throws IOException {
- PostingsReaderBase docsReader = new StandardPostingsReader(state.dir, state.segmentInfo, state.readBufferSize, state.codecId);
+ PostingsReaderBase docsReader = new StandardPostingsReader(state.dir, state.segmentInfo, state.context, state.codecId);
TermsIndexReaderBase indexReader;
boolean success = false;
@@ -103,7 +103,7 @@
state.segmentInfo.name,
state.termsIndexDivisor,
BytesRef.getUTF8SortedAsUnicodeComparator(),
- state.codecId);
+ state.codecId, state.context);
success = true;
} finally {
if (!success) {
@@ -115,7 +115,7 @@
FieldsProducer ret = new AppendingTermsDictReader(indexReader,
state.dir, state.fieldInfos, state.segmentInfo.name,
docsReader,
- state.readBufferSize,
+ state.context,
StandardCodec.TERMS_CACHE_SIZE,
state.codecId);
success = true;
@@ -153,6 +153,6 @@
@Override
public PerDocValues docsProducer(SegmentReadState state) throws IOException {
- return new DefaultDocValuesProducer(state.segmentInfo, state.dir, state.fieldInfos, state.codecId, getDocValuesUseCFS(), getDocValuesSortComparator());
+ return new DefaultDocValuesProducer(state.segmentInfo, state.dir, state.fieldInfos, state.codecId, getDocValuesUseCFS(), getDocValuesSortComparator(), state.context);
}
}
diff --git a/lucene/contrib/misc/src/java/org/apache/lucene/index/codecs/appending/AppendingSegmentInfosReader.java b/lucene/contrib/misc/src/java/org/apache/lucene/index/codecs/appending/AppendingSegmentInfosReader.java
index bd4b26c..aac8be1 100644
--- a/lucene/contrib/misc/src/java/org/apache/lucene/index/codecs/appending/AppendingSegmentInfosReader.java
+++ b/lucene/contrib/misc/src/java/org/apache/lucene/index/codecs/appending/AppendingSegmentInfosReader.java
@@ -22,6 +22,7 @@
import org.apache.lucene.index.CorruptIndexException;
import org.apache.lucene.index.codecs.DefaultSegmentInfosReader;
import org.apache.lucene.store.Directory;
+import org.apache.lucene.store.IOContext;
import org.apache.lucene.store.IndexInput;
public class AppendingSegmentInfosReader extends DefaultSegmentInfosReader {
@@ -33,9 +34,9 @@
}
@Override
- public IndexInput openInput(Directory dir, String segmentsFileName)
+ public IndexInput openInput(Directory dir, String segmentsFileName, IOContext context)
throws IOException {
- return dir.openInput(segmentsFileName);
+ return dir.openInput(segmentsFileName, context);
}
}
diff --git a/lucene/contrib/misc/src/java/org/apache/lucene/index/codecs/appending/AppendingSegmentInfosWriter.java b/lucene/contrib/misc/src/java/org/apache/lucene/index/codecs/appending/AppendingSegmentInfosWriter.java
index 45d53e0..2850037 100644
--- a/lucene/contrib/misc/src/java/org/apache/lucene/index/codecs/appending/AppendingSegmentInfosWriter.java
+++ b/lucene/contrib/misc/src/java/org/apache/lucene/index/codecs/appending/AppendingSegmentInfosWriter.java
@@ -21,14 +21,15 @@
import org.apache.lucene.index.codecs.DefaultSegmentInfosWriter;
import org.apache.lucene.store.Directory;
+import org.apache.lucene.store.IOContext;
import org.apache.lucene.store.IndexOutput;
public class AppendingSegmentInfosWriter extends DefaultSegmentInfosWriter {
@Override
- protected IndexOutput createOutput(Directory dir, String segmentsFileName)
+ protected IndexOutput createOutput(Directory dir, String segmentsFileName, IOContext context)
throws IOException {
- return dir.createOutput(segmentsFileName);
+ return dir.createOutput(segmentsFileName, context);
}
@Override
diff --git a/lucene/contrib/misc/src/java/org/apache/lucene/index/codecs/appending/AppendingTermsDictReader.java b/lucene/contrib/misc/src/java/org/apache/lucene/index/codecs/appending/AppendingTermsDictReader.java
index b12c4f8..7f885ee 100644
--- a/lucene/contrib/misc/src/java/org/apache/lucene/index/codecs/appending/AppendingTermsDictReader.java
+++ b/lucene/contrib/misc/src/java/org/apache/lucene/index/codecs/appending/AppendingTermsDictReader.java
@@ -25,6 +25,7 @@
import org.apache.lucene.index.codecs.BlockTermsWriter;
import org.apache.lucene.index.codecs.TermsIndexReaderBase;
import org.apache.lucene.store.Directory;
+import org.apache.lucene.store.IOContext;
import org.apache.lucene.store.IndexInput;
import org.apache.lucene.util.CodecUtil;
@@ -32,9 +33,9 @@
public AppendingTermsDictReader(TermsIndexReaderBase indexReader,
Directory dir, FieldInfos fieldInfos, String segment,
- PostingsReaderBase postingsReader, int readBufferSize,
+ PostingsReaderBase postingsReader, IOContext context,
int termsCacheSize, int codecId) throws IOException {
- super(indexReader, dir, fieldInfos, segment, postingsReader, readBufferSize,
+ super(indexReader, dir, fieldInfos, segment, postingsReader, context,
termsCacheSize, codecId);
}
diff --git a/lucene/contrib/misc/src/java/org/apache/lucene/index/codecs/appending/AppendingTermsIndexReader.java b/lucene/contrib/misc/src/java/org/apache/lucene/index/codecs/appending/AppendingTermsIndexReader.java
index 0a44970..205dc15 100644
--- a/lucene/contrib/misc/src/java/org/apache/lucene/index/codecs/appending/AppendingTermsIndexReader.java
+++ b/lucene/contrib/misc/src/java/org/apache/lucene/index/codecs/appending/AppendingTermsIndexReader.java
@@ -23,6 +23,7 @@
import org.apache.lucene.index.FieldInfos;
import org.apache.lucene.index.codecs.FixedGapTermsIndexReader;
import org.apache.lucene.store.Directory;
+import org.apache.lucene.store.IOContext;
import org.apache.lucene.store.IndexInput;
import org.apache.lucene.util.BytesRef;
import org.apache.lucene.util.CodecUtil;
@@ -30,9 +31,9 @@
public class AppendingTermsIndexReader extends FixedGapTermsIndexReader {
public AppendingTermsIndexReader(Directory dir, FieldInfos fieldInfos,
- String segment, int indexDivisor, Comparator<BytesRef> termComp, int codecId)
+ String segment, int indexDivisor, Comparator<BytesRef> termComp, int codecId, IOContext context)
throws IOException {
- super(dir, fieldInfos, segment, indexDivisor, termComp, codecId);
+ super(dir, fieldInfos, segment, indexDivisor, termComp, codecId, context);
}
@Override
diff --git a/lucene/contrib/misc/src/java/org/apache/lucene/store/DirectIOLinuxDirectory.java b/lucene/contrib/misc/src/java/org/apache/lucene/store/DirectIOLinuxDirectory.java
index 93ace13..7fc0456 100644
--- a/lucene/contrib/misc/src/java/org/apache/lucene/store/DirectIOLinuxDirectory.java
+++ b/lucene/contrib/misc/src/java/org/apache/lucene/store/DirectIOLinuxDirectory.java
@@ -69,16 +69,22 @@
}
@Override
- public IndexInput openInput(String name, int bufferSize) throws IOException {
+ public IndexInput openInput(String name, IOContext context) throws IOException {
ensureOpen();
- return new DirectIOLinuxIndexInput(new File(getDirectory(), name), forcedBufferSize == 0 ? bufferSize : forcedBufferSize);
+ return new DirectIOLinuxIndexInput(new File(getDirectory(), name),
+ bufferSize(context));
}
@Override
- public IndexOutput createOutput(String name) throws IOException {
+ public IndexOutput createOutput(String name, IOContext context) throws IOException {
ensureOpen();
ensureCanWrite(name);
- return new DirectIOLinuxIndexOutput(new File(getDirectory(), name), forcedBufferSize == 0 ? BufferedIndexOutput.BUFFER_SIZE : forcedBufferSize);
+ return new DirectIOLinuxIndexOutput(new File(getDirectory(), name), bufferSize(context));
+ }
+
+ private int bufferSize(IOContext context) {
+ return forcedBufferSize != 0 ? forcedBufferSize : BufferedIndexInput
+ .bufferSize(context);
}
private final static class DirectIOLinuxIndexOutput extends IndexOutput {
@@ -238,6 +244,7 @@
private int bufferPos;
public DirectIOLinuxIndexInput(File path, int bufferSize) throws IOException {
+ // TODO make use of IOContext
FileDescriptor fd = NativePosixUtil.open_direct(path.toString(), true);
fis = new FileInputStream(fd);
channel = fis.getChannel();
diff --git a/lucene/contrib/misc/src/java/org/apache/lucene/store/NRTCachingDirectory.java b/lucene/contrib/misc/src/java/org/apache/lucene/store/NRTCachingDirectory.java
index 738397b..98356b4 100644
--- a/lucene/contrib/misc/src/java/org/apache/lucene/store/NRTCachingDirectory.java
+++ b/lucene/contrib/misc/src/java/org/apache/lucene/store/NRTCachingDirectory.java
@@ -21,13 +21,8 @@
import java.util.Collection;
import java.util.HashSet;
import java.util.Set;
-import java.util.concurrent.ConcurrentHashMap;
-import org.apache.lucene.index.ConcurrentMergeScheduler;
import org.apache.lucene.index.IndexFileNames;
-import org.apache.lucene.index.IndexWriter; // javadocs
-import org.apache.lucene.index.MergePolicy;
-import org.apache.lucene.index.MergeScheduler;
import org.apache.lucene.store.RAMDirectory; // javadocs
import org.apache.lucene.util.IOUtils;
@@ -38,11 +33,7 @@
/**
* Wraps a {@link RAMDirectory}
* around any provided delegate directory, to
- * be used during NRT search. Make sure you pull the merge
- * scheduler using {@link #getMergeScheduler} and pass that to your
- * {@link IndexWriter}; this class uses that to keep track of which
- * merges are being done by which threads, to decide when to
- * cache each written file.
+ * be used during NRT search.
*
* <p>This class is likely only useful in a near-real-time
* context, where indexing rate is lowish but reopen
@@ -54,20 +45,12 @@
* <p>This is safe to use: when your app calls {IndexWriter#commit},
* all cached files will be flushed from the cached and sync'd.</p>
*
- * <p><b>NOTE</b>: this class is somewhat sneaky in its
- * approach for spying on merges to determine the size of a
- * merge: it records which threads are running which merges
- * by watching ConcurrentMergeScheduler's doMerge method.
- * While this works correctly, likely future versions of
- * this class will take a more general approach.
- *
* <p>Here's a simple example usage:
*
* <pre>
* Directory fsDir = FSDirectory.open(new File("/path/to/index"));
* NRTCachingDirectory cachedFSDir = new NRTCachingDirectory(fsDir, 5.0, 60.0);
* IndexWriterConfig conf = new IndexWriterConfig(Version.LUCENE_32, analyzer);
- * conf.setMergeScheduler(cachedFSDir.getMergeScheduler());
* IndexWriter writer = new IndexWriter(cachedFSDir, conf);
* </pre>
*
@@ -193,17 +176,17 @@
}
@Override
- public IndexOutput createOutput(String name) throws IOException {
+ public IndexOutput createOutput(String name, IOContext context) throws IOException {
if (VERBOSE) {
System.out.println("nrtdir.createOutput name=" + name);
}
- if (doCacheWrite(name)) {
+ if (doCacheWrite(name, context)) {
if (VERBOSE) {
System.out.println(" to cache");
}
- return cache.createOutput(name);
+ return cache.createOutput(name, context);
} else {
- return delegate.createOutput(name);
+ return delegate.createOutput(name, context);
}
}
@@ -219,7 +202,7 @@
}
@Override
- public synchronized IndexInput openInput(String name) throws IOException {
+ public synchronized IndexInput openInput(String name, IOContext context) throws IOException {
if (VERBOSE) {
System.out.println("nrtdir.openInput name=" + name);
}
@@ -227,39 +210,31 @@
if (VERBOSE) {
System.out.println(" from cache");
}
- return cache.openInput(name);
+ return cache.openInput(name, context);
} else {
- return delegate.openInput(name);
+ return delegate.openInput(name, context);
}
}
@Override
- public synchronized CompoundFileDirectory openCompoundInput(String name, int bufferSize) throws IOException {
+ public synchronized CompoundFileDirectory openCompoundInput(String name, IOContext context) throws IOException {
if (cache.fileExists(name)) {
- return cache.openCompoundInput(name, bufferSize);
+ return cache.openCompoundInput(name, context);
} else {
- return delegate.openCompoundInput(name, bufferSize);
+ return delegate.openCompoundInput(name, context);
}
}
@Override
- public synchronized CompoundFileDirectory createCompoundOutput(String name)
+ public synchronized CompoundFileDirectory createCompoundOutput(String name, IOContext context)
throws IOException {
if (cache.fileExists(name)) {
throw new IOException("File " + name + "already exists");
} else {
- return delegate.createCompoundOutput(name);
+ return delegate.createCompoundOutput(name, context);
}
}
- @Override
- public synchronized IndexInput openInput(String name, int bufferSize) throws IOException {
- if (cache.fileExists(name)) {
- return cache.openInput(name, bufferSize);
- } else {
- return delegate.openInput(name, bufferSize);
- }
- }
/** Close this directory, which flushes any cached files
* to the delegate and then closes the delegate. */
@@ -272,36 +247,21 @@
delegate.close();
}
- private final ConcurrentHashMap<Thread,MergePolicy.OneMerge> merges = new ConcurrentHashMap<Thread,MergePolicy.OneMerge>();
-
- public MergeScheduler getMergeScheduler() {
- return new ConcurrentMergeScheduler() {
- @Override
- protected void doMerge(MergePolicy.OneMerge merge) throws IOException {
- try {
- merges.put(Thread.currentThread(), merge);
- super.doMerge(merge);
- } finally {
- merges.remove(Thread.currentThread());
- }
- }
- };
- }
-
/** Subclass can override this to customize logic; return
* true if this file should be written to the RAMDirectory. */
- protected boolean doCacheWrite(String name) {
- final MergePolicy.OneMerge merge = merges.get(Thread.currentThread());
+ protected boolean doCacheWrite(String name, IOContext context) {
+ final MergeInfo merge = context.mergeInfo;
//System.out.println(Thread.currentThread().getName() + ": CACHE check merge=" + merge + " size=" + (merge==null ? 0 : merge.estimatedMergeBytes));
return !name.equals(IndexFileNames.SEGMENTS_GEN) && (merge == null || merge.estimatedMergeBytes <= maxMergeSizeBytes) && cache.sizeInBytes() <= maxCachedBytes;
}
private void unCache(String fileName) throws IOException {
final IndexOutput out;
+ IOContext context = IOContext.DEFAULT;
synchronized(this) {
if (!delegate.fileExists(fileName)) {
assert cache.fileExists(fileName);
- out = delegate.createOutput(fileName);
+ out = delegate.createOutput(fileName, context);
} else {
out = null;
}
@@ -310,7 +270,7 @@
if (out != null) {
IndexInput in = null;
try {
- in = cache.openInput(fileName);
+ in = cache.openInput(fileName, context);
in.copyBytes(out, in.length());
} finally {
IOUtils.closeSafely(false, in, out);
diff --git a/lucene/contrib/misc/src/java/org/apache/lucene/store/WindowsDirectory.java b/lucene/contrib/misc/src/java/org/apache/lucene/store/WindowsDirectory.java
index a4c6301..29d8998 100644
--- a/lucene/contrib/misc/src/java/org/apache/lucene/store/WindowsDirectory.java
+++ b/lucene/contrib/misc/src/java/org/apache/lucene/store/WindowsDirectory.java
@@ -19,6 +19,7 @@
import java.io.File;
import java.io.IOException;
+
import org.apache.lucene.store.Directory; // javadoc
import org.apache.lucene.store.NativeFSLockFactory; // javadoc
@@ -67,9 +68,9 @@
}
@Override
- public IndexInput openInput(String name, int bufferSize) throws IOException {
+ public IndexInput openInput(String name, IOContext context) throws IOException {
ensureOpen();
- return new WindowsIndexInput(new File(getDirectory(), name), Math.max(bufferSize, DEFAULT_BUFFERSIZE));
+ return new WindowsIndexInput(new File(getDirectory(), name), Math.max(BufferedIndexInput.bufferSize(context), DEFAULT_BUFFERSIZE));
}
protected static class WindowsIndexInput extends BufferedIndexInput {
diff --git a/lucene/contrib/misc/src/test/org/apache/lucene/index/TestNRTManager.java b/lucene/contrib/misc/src/test/org/apache/lucene/index/TestNRTManager.java
index cb6acda..7a7ece4 100644
--- a/lucene/contrib/misc/src/test/org/apache/lucene/index/TestNRTManager.java
+++ b/lucene/contrib/misc/src/test/org/apache/lucene/index/TestNRTManager.java
@@ -159,9 +159,7 @@
System.out.println("TEST: wrap NRTCachingDir");
}
- NRTCachingDirectory nrtDir = new NRTCachingDirectory(dir, 5.0, 60.0);
- conf.setMergeScheduler(nrtDir.getMergeScheduler());
- dir = nrtDir;
+ dir = new NRTCachingDirectory(dir, 5.0, 60.0);
}
final IndexWriter writer = new IndexWriter(dir, conf);
diff --git a/lucene/contrib/misc/src/test/org/apache/lucene/index/codecs/appending/TestAppendingCodec.java b/lucene/contrib/misc/src/test/org/apache/lucene/index/codecs/appending/TestAppendingCodec.java
index c7b55f1..53ee338 100644
--- a/lucene/contrib/misc/src/test/org/apache/lucene/index/codecs/appending/TestAppendingCodec.java
+++ b/lucene/contrib/misc/src/test/org/apache/lucene/index/codecs/appending/TestAppendingCodec.java
@@ -40,6 +40,7 @@
import org.apache.lucene.index.codecs.SegmentInfosReader;
import org.apache.lucene.index.codecs.SegmentInfosWriter;
import org.apache.lucene.store.Directory;
+import org.apache.lucene.store.IOContext;
import org.apache.lucene.store.IndexOutput;
import org.apache.lucene.store.MockDirectoryWrapper;
import org.apache.lucene.store.RAMDirectory;
@@ -124,8 +125,8 @@
}
@Override
- public IndexOutput createOutput(String name) throws IOException {
- return new AppendingIndexOutputWrapper(super.createOutput(name));
+ public IndexOutput createOutput(String name, IOContext context) throws IOException {
+ return new AppendingIndexOutputWrapper(super.createOutput(name, context));
}
}
diff --git a/lucene/contrib/misc/src/test/org/apache/lucene/store/TestNRTCachingDirectory.java b/lucene/contrib/misc/src/test/org/apache/lucene/store/TestNRTCachingDirectory.java
index 9001d13..ed27e1c 100644
--- a/lucene/contrib/misc/src/test/org/apache/lucene/store/TestNRTCachingDirectory.java
+++ b/lucene/contrib/misc/src/test/org/apache/lucene/store/TestNRTCachingDirectory.java
@@ -44,7 +44,6 @@
Directory dir = newDirectory();
NRTCachingDirectory cachedDir = new NRTCachingDirectory(dir, 2.0, 25.0);
IndexWriterConfig conf = newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random));
- conf.setMergeScheduler(cachedDir.getMergeScheduler());
RandomIndexWriter w = new RandomIndexWriter(random, cachedDir, conf);
w.w.setInfoStream(VERBOSE ? System.out : null);
final LineFileDocs docs = new LineFileDocs(random);
@@ -108,13 +107,12 @@
Directory fsDir = FSDirectory.open(new File("/path/to/index"));
NRTCachingDirectory cachedFSDir = new NRTCachingDirectory(fsDir, 2.0, 25.0);
IndexWriterConfig conf = new IndexWriterConfig(Version.LUCENE_32, analyzer);
- conf.setMergeScheduler(cachedFSDir.getMergeScheduler());
IndexWriter writer = new IndexWriter(cachedFSDir, conf);
}
public void testDeleteFile() throws Exception {
Directory dir = new NRTCachingDirectory(newDirectory(), 2.0, 25.0);
- dir.createOutput("foo.txt").close();
+ dir.createOutput("foo.txt", IOContext.DEFAULT).close();
dir.deleteFile("foo.txt");
assertEquals(0, dir.listAll().length);
dir.close();
diff --git a/lucene/src/java/org/apache/lucene/index/BufferedDeletesStream.java b/lucene/src/java/org/apache/lucene/index/BufferedDeletesStream.java
index 01b4a0c..c8d703c 100644
--- a/lucene/src/java/org/apache/lucene/index/BufferedDeletesStream.java
+++ b/lucene/src/java/org/apache/lucene/index/BufferedDeletesStream.java
@@ -32,6 +32,7 @@
import org.apache.lucene.search.DocIdSetIterator;
import org.apache.lucene.search.Query;
import org.apache.lucene.search.QueryWrapperFilter;
+import org.apache.lucene.store.IOContext;
/* Tracks the stream of {@link BufferedDeletes}.
* When DocumentsWriterPerThread flushes, its buffered
@@ -224,7 +225,7 @@
// Lock order: IW -> BD -> RP
assert readerPool.infoIsLive(info);
- final SegmentReader reader = readerPool.get(info, false);
+ final SegmentReader reader = readerPool.get(info, false, IOContext.READ);
int delCount = 0;
final boolean segAllDeletes;
try {
@@ -273,7 +274,7 @@
if (coalescedDeletes != null) {
// Lock order: IW -> BD -> RP
assert readerPool.infoIsLive(info);
- SegmentReader reader = readerPool.get(info, false);
+ SegmentReader reader = readerPool.get(info, false, IOContext.READ);
int delCount = 0;
final boolean segAllDeletes;
try {
diff --git a/lucene/src/java/org/apache/lucene/index/CheckIndex.java b/lucene/src/java/org/apache/lucene/index/CheckIndex.java
index 0754512..a3fec4d 100644
--- a/lucene/src/java/org/apache/lucene/index/CheckIndex.java
+++ b/lucene/src/java/org/apache/lucene/index/CheckIndex.java
@@ -17,6 +17,13 @@
* limitations under the License.
*/
+import org.apache.lucene.search.DocIdSetIterator;
+import org.apache.lucene.search.IndexSearcher;
+import org.apache.lucene.search.TermQuery;
+import org.apache.lucene.store.FSDirectory;
+import org.apache.lucene.store.Directory;
+import org.apache.lucene.store.IOContext;
+import org.apache.lucene.store.IndexInput;
import java.io.File;
import java.io.IOException;
import java.io.PrintStream;
@@ -34,12 +41,6 @@
import org.apache.lucene.index.codecs.PerDocValues;
import org.apache.lucene.index.values.IndexDocValues;
import org.apache.lucene.index.values.ValuesEnum;
-import org.apache.lucene.search.DocIdSetIterator;
-import org.apache.lucene.search.IndexSearcher;
-import org.apache.lucene.search.TermQuery;
-import org.apache.lucene.store.Directory;
-import org.apache.lucene.store.FSDirectory;
-import org.apache.lucene.store.IndexInput;
import org.apache.lucene.util.Bits;
import org.apache.lucene.util.BytesRef;
import org.apache.lucene.util.StringHelper;
@@ -364,7 +365,7 @@
final String segmentsFileName = sis.getCurrentSegmentFileName();
IndexInput input = null;
try {
- input = dir.openInput(segmentsFileName);
+ input = dir.openInput(segmentsFileName, IOContext.DEFAULT);
} catch (Throwable t) {
msg("ERROR: could not open segments file in directory");
if (infoStream != null)
@@ -513,7 +514,7 @@
}
if (infoStream != null)
infoStream.print(" test: open reader.........");
- reader = SegmentReader.get(true, info, IndexReader.DEFAULT_TERMS_INDEX_DIVISOR);
+ reader = SegmentReader.get(true, info, IndexReader.DEFAULT_TERMS_INDEX_DIVISOR, IOContext.DEFAULT);
segInfoStat.openReaderPassed = true;
diff --git a/lucene/src/java/org/apache/lucene/index/DirectoryReader.java b/lucene/src/java/org/apache/lucene/index/DirectoryReader.java
index 426935b..59e39bc 100644
--- a/lucene/src/java/org/apache/lucene/index/DirectoryReader.java
+++ b/lucene/src/java/org/apache/lucene/index/DirectoryReader.java
@@ -32,6 +32,7 @@
import org.apache.lucene.document.Document;
import org.apache.lucene.document.FieldSelector;
import org.apache.lucene.store.Directory;
+import org.apache.lucene.store.IOContext;
import org.apache.lucene.store.Lock;
import org.apache.lucene.store.LockObtainFailedException;
import org.apache.lucene.index.codecs.CodecProvider;
@@ -121,7 +122,7 @@
for (int i = sis.size()-1; i >= 0; i--) {
boolean success = false;
try {
- readers[i] = SegmentReader.get(readOnly, sis.info(i), termInfosIndexDivisor);
+ readers[i] = SegmentReader.get(readOnly, sis.info(i), termInfosIndexDivisor, IOContext.READ);
readers[i].readerFinishedListeners = readerFinishedListeners;
success = true;
} finally {
@@ -170,7 +171,8 @@
try {
final SegmentInfo info = infos.info(i);
assert info.dir == dir;
- final SegmentReader reader = writer.readerPool.getReadOnlyClone(info, true, termInfosIndexDivisor);
+ final SegmentReader reader = writer.readerPool.getReadOnlyClone(info, true, termInfosIndexDivisor,
+ IOContext.READ);
if (reader.numDocs() > 0 || writer.getKeepFullyDeletedSegments()) {
reader.readerFinishedListeners = readerFinishedListeners;
readers.add(reader);
@@ -254,7 +256,7 @@
assert !doClone;
// this is a new reader; in case we hit an exception we can close it safely
- newReader = SegmentReader.get(readOnly, infos.info(i), termInfosIndexDivisor);
+ newReader = SegmentReader.get(readOnly, infos.info(i), termInfosIndexDivisor, IOContext.READ);
newReader.readerFinishedListeners = readerFinishedListeners;
} else {
newReader = newReaders[i].reopenSegment(infos.info(i), doClone, readOnly);
diff --git a/lucene/src/java/org/apache/lucene/index/DocumentsWriterPerThread.java b/lucene/src/java/org/apache/lucene/index/DocumentsWriterPerThread.java
index d2a3e7a..1cd79da 100644
--- a/lucene/src/java/org/apache/lucene/index/DocumentsWriterPerThread.java
+++ b/lucene/src/java/org/apache/lucene/index/DocumentsWriterPerThread.java
@@ -30,6 +30,8 @@
import org.apache.lucene.index.DocumentsWriterDeleteQueue.DeleteSlice;
import org.apache.lucene.search.SimilarityProvider;
import org.apache.lucene.store.Directory;
+import org.apache.lucene.store.FlushInfo;
+import org.apache.lucene.store.IOContext;
import org.apache.lucene.util.BitVector;
import org.apache.lucene.util.ByteBlockPool.Allocator;
import org.apache.lucene.util.ByteBlockPool.DirectTrackingAllocator;
@@ -428,7 +430,7 @@
assert deleteSlice == null : "all deletes must be applied in prepareFlush";
flushState = new SegmentWriteState(infoStream, directory, segment, fieldInfos,
numDocsInRAM, writer.getConfig().getTermIndexInterval(),
- fieldInfos.buildSegmentCodecs(true), pendingDeletes);
+ fieldInfos.buildSegmentCodecs(true), pendingDeletes, new IOContext(new FlushInfo(numDocsInRAM, bytesUsed())));
final double startMBUsed = parent.flushControl.netBytes() / 1024. / 1024.;
// Apply delete-by-docID now (delete-byDocID only
// happens when an exception is hit processing that
@@ -543,7 +545,7 @@
PerDocWriteState newPerDocWriteState(int codecId) {
assert segment != null;
- return new PerDocWriteState(infoStream, directory, segment, fieldInfos, bytesUsed, codecId);
+ return new PerDocWriteState(infoStream, directory, segment, fieldInfos, bytesUsed, codecId, IOContext.DEFAULT);
}
void setInfoStream(PrintStream infoStream) {
diff --git a/lucene/src/java/org/apache/lucene/index/FieldInfos.java b/lucene/src/java/org/apache/lucene/index/FieldInfos.java
index 5e1ddea..389d472 100644
--- a/lucene/src/java/org/apache/lucene/index/FieldInfos.java
+++ b/lucene/src/java/org/apache/lucene/index/FieldInfos.java
@@ -33,6 +33,7 @@
import org.apache.lucene.index.codecs.CodecProvider;
import org.apache.lucene.index.values.ValueType;
import org.apache.lucene.store.Directory;
+import org.apache.lucene.store.IOContext;
import org.apache.lucene.store.IndexInput;
import org.apache.lucene.store.IndexOutput;
import org.apache.lucene.util.CodecUtil;
@@ -270,7 +271,7 @@
*/
public FieldInfos(Directory d, String name) throws IOException {
this((FieldNumberBiMap)null, null); // use null here to make this FIs Read-Only
- final IndexInput input = d.openInput(name);
+ final IndexInput input = d.openInput(name, IOContext.READONCE);
try {
read(input, name);
} finally {
@@ -562,7 +563,7 @@
}
public void write(Directory d, String name) throws IOException {
- IndexOutput output = d.createOutput(name);
+ IndexOutput output = d.createOutput(name, IOContext.READONCE);
try {
write(output);
} finally {
diff --git a/lucene/src/java/org/apache/lucene/index/FieldsReader.java b/lucene/src/java/org/apache/lucene/index/FieldsReader.java
index 6ac0d43..f56769b 100644
--- a/lucene/src/java/org/apache/lucene/index/FieldsReader.java
+++ b/lucene/src/java/org/apache/lucene/index/FieldsReader.java
@@ -28,6 +28,7 @@
import org.apache.lucene.store.AlreadyClosedException;
import org.apache.lucene.store.BufferedIndexInput;
import org.apache.lucene.store.Directory;
+import org.apache.lucene.store.IOContext;
import org.apache.lucene.store.IndexInput;
import org.apache.lucene.util.CloseableThreadLocal;
import org.apache.lucene.util.IOUtils;
@@ -84,7 +85,7 @@
/** Verifies that the code version which wrote the segment is supported. */
public static void checkCodeVersion(Directory dir, String segment) throws IOException {
final String indexStreamFN = IndexFileNames.segmentFileName(segment, "", IndexFileNames.FIELDS_INDEX_EXTENSION);
- IndexInput idxStream = dir.openInput(indexStreamFN, 1024);
+ IndexInput idxStream = dir.openInput(indexStreamFN, IOContext.DEFAULT);
try {
int format = idxStream.readInt();
@@ -113,18 +114,18 @@
}
public FieldsReader(Directory d, String segment, FieldInfos fn) throws IOException {
- this(d, segment, fn, BufferedIndexInput.BUFFER_SIZE, -1, 0);
+ this(d, segment, fn, IOContext.DEFAULT, -1, 0);
}
- public FieldsReader(Directory d, String segment, FieldInfos fn, int readBufferSize, int docStoreOffset, int size) throws IOException {
+ public FieldsReader(Directory d, String segment, FieldInfos fn, IOContext context, int docStoreOffset, int size) throws IOException {
boolean success = false;
isOriginal = true;
try {
fieldInfos = fn;
- cloneableFieldsStream = d.openInput(IndexFileNames.segmentFileName(segment, "", IndexFileNames.FIELDS_EXTENSION), readBufferSize);
+ cloneableFieldsStream = d.openInput(IndexFileNames.segmentFileName(segment, "", IndexFileNames.FIELDS_EXTENSION), context);
final String indexStreamFN = IndexFileNames.segmentFileName(segment, "", IndexFileNames.FIELDS_INDEX_EXTENSION);
- cloneableIndexStream = d.openInput(indexStreamFN, readBufferSize);
+ cloneableIndexStream = d.openInput(indexStreamFN, context);
format = cloneableIndexStream.readInt();
diff --git a/lucene/src/java/org/apache/lucene/index/FieldsWriter.java b/lucene/src/java/org/apache/lucene/index/FieldsWriter.java
index 5542acf..e44cfd1 100644
--- a/lucene/src/java/org/apache/lucene/index/FieldsWriter.java
+++ b/lucene/src/java/org/apache/lucene/index/FieldsWriter.java
@@ -23,6 +23,7 @@
import org.apache.lucene.document.Fieldable;
import org.apache.lucene.document.NumericField;
import org.apache.lucene.store.Directory;
+import org.apache.lucene.store.IOContext;
import org.apache.lucene.store.IndexInput;
import org.apache.lucene.store.IndexOutput;
import org.apache.lucene.util.IOUtils;
@@ -65,14 +66,14 @@
private IndexOutput fieldsStream;
private IndexOutput indexStream;
- FieldsWriter(Directory directory, String segment) throws IOException {
+ FieldsWriter(Directory directory, String segment, IOContext context) throws IOException {
this.directory = directory;
this.segment = segment;
boolean success = false;
try {
- fieldsStream = directory.createOutput(IndexFileNames.segmentFileName(segment, "", IndexFileNames.FIELDS_EXTENSION));
- indexStream = directory.createOutput(IndexFileNames.segmentFileName(segment, "", IndexFileNames.FIELDS_INDEX_EXTENSION));
+ fieldsStream = directory.createOutput(IndexFileNames.segmentFileName(segment, "", IndexFileNames.FIELDS_EXTENSION), context);
+ indexStream = directory.createOutput(IndexFileNames.segmentFileName(segment, "", IndexFileNames.FIELDS_INDEX_EXTENSION), context);
fieldsStream.writeInt(FORMAT_CURRENT);
indexStream.writeInt(FORMAT_CURRENT);
diff --git a/lucene/src/java/org/apache/lucene/index/IndexReader.java b/lucene/src/java/org/apache/lucene/index/IndexReader.java
index 1ee84b1..6d94250 100644
--- a/lucene/src/java/org/apache/lucene/index/IndexReader.java
+++ b/lucene/src/java/org/apache/lucene/index/IndexReader.java
@@ -1436,13 +1436,14 @@
Directory dir = null;
CompoundFileDirectory cfr = null;
+ IOContext context = IOContext.READ;
try {
File file = new File(filename);
String dirname = file.getAbsoluteFile().getParent();
filename = file.getName();
dir = FSDirectory.open(new File(dirname));
- cfr = dir.openCompoundInput(filename, BufferedIndexInput.BUFFER_SIZE);
+ cfr = dir.openCompoundInput(filename, IOContext.DEFAULT);
String [] files = cfr.listAll();
ArrayUtil.mergeSort(files); // sort the array of filename so that the output is more readable
@@ -1452,7 +1453,7 @@
if (extract) {
System.out.println("extract " + files[i] + " with " + len + " bytes to local directory...");
- IndexInput ii = cfr.openInput(files[i]);
+ IndexInput ii = cfr.openInput(files[i], context);
FileOutputStream f = new FileOutputStream(files[i]);
diff --git a/lucene/src/java/org/apache/lucene/index/IndexWriter.java b/lucene/src/java/org/apache/lucene/index/IndexWriter.java
index 8efa46c..32c8c97 100644
--- a/lucene/src/java/org/apache/lucene/index/IndexWriter.java
+++ b/lucene/src/java/org/apache/lucene/index/IndexWriter.java
@@ -45,11 +45,13 @@
import org.apache.lucene.index.codecs.CodecProvider;
import org.apache.lucene.search.Query;
import org.apache.lucene.store.AlreadyClosedException;
-import org.apache.lucene.store.BufferedIndexInput;
import org.apache.lucene.store.CompoundFileDirectory;
import org.apache.lucene.store.Directory;
+import org.apache.lucene.store.FlushInfo;
+import org.apache.lucene.store.IOContext;
import org.apache.lucene.store.Lock;
import org.apache.lucene.store.LockObtainFailedException;
+import org.apache.lucene.store.MergeInfo;
import org.apache.lucene.util.BitVector;
import org.apache.lucene.util.Bits;
import org.apache.lucene.util.Constants;
@@ -207,15 +209,6 @@
* #setInfoStream}).
*/
public final static int MAX_TERM_LENGTH = DocumentsWriterPerThread.MAX_TERM_LENGTH_UTF8;
-
- // The normal read buffer size defaults to 1024, but
- // increasing this during merging seems to yield
- // performance gains. However we don't want to increase
- // it too much because there are quite a few
- // BufferedIndexInputs created during merging. See
- // LUCENE-888 for details.
- private final static int MERGE_READ_BUFFER_SIZE = 4096;
-
// Used for printing messages
private static final AtomicInteger MESSAGE_ID = new AtomicInteger();
private int messageID = MESSAGE_ID.getAndIncrement();
@@ -594,8 +587,8 @@
* enrolled in the pool, so you should simply close()
* it when you're done (ie, do not call release()).
*/
- public synchronized SegmentReader getReadOnlyClone(SegmentInfo info, boolean doOpenStores, int termInfosIndexDivisor) throws IOException {
- SegmentReader sr = get(info, doOpenStores, BufferedIndexInput.BUFFER_SIZE, termInfosIndexDivisor);
+ public synchronized SegmentReader getReadOnlyClone(SegmentInfo info, boolean doOpenStores, int termInfosIndexDivisor, IOContext context) throws IOException {
+ SegmentReader sr = get(info, doOpenStores, context, termInfosIndexDivisor);
try {
return (SegmentReader) sr.clone(true);
} finally {
@@ -611,8 +604,8 @@
* @param doOpenStores
* @throws IOException
*/
- public synchronized SegmentReader get(SegmentInfo info, boolean doOpenStores) throws IOException {
- return get(info, doOpenStores, BufferedIndexInput.BUFFER_SIZE, config.getReaderTermsIndexDivisor());
+ public synchronized SegmentReader get(SegmentInfo info, boolean doOpenStores, IOContext context) throws IOException {
+ return get(info, doOpenStores, context, config.getReaderTermsIndexDivisor());
}
/**
@@ -626,18 +619,20 @@
* @param termsIndexDivisor
* @throws IOException
*/
- public synchronized SegmentReader get(SegmentInfo info, boolean doOpenStores, int readBufferSize, int termsIndexDivisor) throws IOException {
+ public synchronized SegmentReader get(SegmentInfo info, boolean doOpenStores, IOContext context, int termsIndexDivisor) throws IOException {
- if (poolReaders) {
- readBufferSize = BufferedIndexInput.BUFFER_SIZE;
- }
+ // if (poolReaders) {
+ // readBufferSize = BufferedIndexInput.BUFFER_SIZE;
+ // }
+
+ // TODO: context should be part of the key used to cache that reader in the pool.
SegmentReader sr = readerMap.get(info);
if (sr == null) {
// TODO: we may want to avoid doing this while
// synchronized
// Returns a ref, which we xfer to readerMap:
- sr = SegmentReader.get(false, info.dir, info, readBufferSize, doOpenStores, termsIndexDivisor);
+ sr = SegmentReader.get(false, info.dir, info, doOpenStores, termsIndexDivisor, context);
sr.readerFinishedListeners = readerFinishedListeners;
if (info.dir == directory) {
@@ -2185,6 +2180,8 @@
SegmentInfo newSegment = flushedSegment.segmentInfo;
setDiagnostics(newSegment, "flush");
+
+ IOContext context = new IOContext(new FlushInfo(newSegment.docCount, newSegment.sizeInBytes(true)));
boolean success = false;
try {
@@ -2192,11 +2189,11 @@
String compoundFileName = IndexFileNames.segmentFileName(newSegment.name, "", IndexFileNames.COMPOUND_FILE_EXTENSION);
message("creating compound file " + compoundFileName);
// Now build compound file
- final Directory cfsDir = directory.createCompoundOutput(compoundFileName);
+ final Directory cfsDir = directory.createCompoundOutput(compoundFileName, context);
IOException prior = null;
try {
for(String fileName : newSegment.files()) {
- directory.copy(cfsDir, fileName, fileName);
+ directory.copy(cfsDir, fileName, fileName, context);
}
} catch(IOException ex) {
prior = ex;
@@ -2230,7 +2227,7 @@
// shortly-to-be-opened SegmentReader and let it
// carry the changes; there's no reason to use
// filesystem as intermediary here.
- flushedSegment.liveDocs.write(directory, delFileName);
+ flushedSegment.liveDocs.write(directory, delFileName, context);
success2 = true;
} finally {
if (!success2) {
@@ -2399,11 +2396,13 @@
// optimize case only for segments that don't share doc stores
&& versionComparator.compare(info.getVersion(), "3.1") >= 0;
}
-
+
+ IOContext context = new IOContext(new MergeInfo(info.docCount, info.sizeInBytes(true), true, false));
+
if (createCFS) {
- copySegmentIntoCFS(info, newSegName);
+ copySegmentIntoCFS(info, newSegName, context);
} else {
- copySegmentAsIs(info, newSegName, dsNames, dsFilesCopied);
+ copySegmentAsIs(info, newSegName, dsNames, dsFilesCopied, context);
}
infos.add(info);
@@ -2447,6 +2446,7 @@
*/
public void addIndexes(IndexReader... readers) throws CorruptIndexException, IOException {
ensureOpen();
+ int numDocs = 0;
try {
if (infoStream != null)
@@ -2454,15 +2454,19 @@
flush(false, true);
String mergedName = newSegmentName();
+ for (IndexReader indexReader : readers) {
+ numDocs += indexReader.numDocs();
+ }
+ final IOContext context = new IOContext(new MergeInfo(numDocs, -1, true, false));
+
// TODO: somehow we should fix this merge so it's
// abortable so that IW.close(false) is able to stop it
SegmentMerger merger = new SegmentMerger(directory, config.getTermIndexInterval(),
mergedName, null, payloadProcessorProvider,
- globalFieldNumberMap.newFieldInfos(SegmentCodecsBuilder.create(codecs)));
+ globalFieldNumberMap.newFieldInfos(SegmentCodecsBuilder.create(codecs)), context);
for (IndexReader reader : readers) // add new indexes
merger.add(reader);
-
int docCount = merger.merge(); // merge 'em
final FieldInfos fieldInfos = merger.fieldInfos();
@@ -2483,7 +2487,7 @@
// Now create the compound file if needed
if (useCompoundFile) {
- merger.createCompoundFile(mergedName + ".cfs", info);
+ merger.createCompoundFile(mergedName + ".cfs", info, context);
// delete new non cfs files directly: they were never
// registered with IFD
@@ -2507,19 +2511,19 @@
}
/** Copies the segment into the IndexWriter's directory, as a compound segment. */
- private void copySegmentIntoCFS(SegmentInfo info, String segName) throws IOException {
+ private void copySegmentIntoCFS(SegmentInfo info, String segName, IOContext context) throws IOException {
String segFileName = IndexFileNames.segmentFileName(segName, "", IndexFileNames.COMPOUND_FILE_EXTENSION);
Collection<String> files = info.files();
- final CompoundFileDirectory cfsdir = directory.createCompoundOutput(segFileName);
+ final CompoundFileDirectory cfsdir = directory.createCompoundOutput(segFileName, context);
try {
for (String file : files) {
String newFileName = segName + IndexFileNames.stripSegmentName(file);
if (!IndexFileNames.matchesExtension(file, IndexFileNames.DELETES_EXTENSION)
&& !IndexFileNames.isSeparateNormsFile(file)) {
- info.dir.copy(cfsdir, file, file);
+ info.dir.copy(cfsdir, file, file, context);
} else {
assert !directory.fileExists(newFileName): "file \"" + newFileName + "\" already exists";
- info.dir.copy(directory, file, newFileName);
+ info.dir.copy(directory, file, newFileName, context);
}
}
} finally {
@@ -2533,7 +2537,7 @@
/** Copies the segment files as-is into the IndexWriter's directory. */
private void copySegmentAsIs(SegmentInfo info, String segName,
- Map<String, String> dsNames, Set<String> dsFilesCopied)
+ Map<String, String> dsNames, Set<String> dsFilesCopied, IOContext context)
throws IOException {
// Determine if the doc store of this segment needs to be copied. It's
// only relevant for segments that share doc store with others,
@@ -2569,7 +2573,7 @@
}
assert !directory.fileExists(newFileName): "file \"" + newFileName + "\" already exists";
- info.dir.copy(directory, file, newFileName);
+ info.dir.copy(directory, file, newFileName, context);
}
info.setDocStore(info.getDocStoreOffset(), newDsName, info.getDocStoreIsCompoundFile());
@@ -3425,9 +3429,11 @@
int mergedDocCount = 0;
List<SegmentInfo> sourceSegments = merge.segments;
+
+ IOContext context = new IOContext(merge.getMergeInfo());
SegmentMerger merger = new SegmentMerger(directory, config.getTermIndexInterval(), mergedName, merge,
- payloadProcessorProvider, merge.info.getFieldInfos());
+ payloadProcessorProvider, merge.info.getFieldInfos(), context);
if (infoStream != null) {
message("merging " + merge.segString(directory) + " mergeVectors=" + merge.info.getFieldInfos().hasVectors());
@@ -3448,7 +3454,7 @@
// Hold onto the "live" reader; we will use this to
// commit merged deletes
final SegmentReader reader = readerPool.get(info, true,
- MERGE_READ_BUFFER_SIZE,
+ context,
-config.getReaderTermsIndexDivisor());
merge.readers.add(reader);
@@ -3502,7 +3508,7 @@
if (infoStream != null) {
message("create compound file " + compoundFileName);
}
- merger.createCompoundFile(compoundFileName, merge.info);
+ merger.createCompoundFile(compoundFileName, merge.info, new IOContext(merge.getMergeInfo()));
success = true;
} catch (IOException ioe) {
synchronized(this) {
@@ -3574,7 +3580,7 @@
// keep deletes (it's costly to open entire reader
// when we just need deletes)
- final SegmentReader mergedReader = readerPool.get(merge.info, loadDocStores, BufferedIndexInput.BUFFER_SIZE, termsIndexDivisor);
+ final SegmentReader mergedReader = readerPool.get(merge.info, loadDocStores, context, termsIndexDivisor);
try {
if (poolReaders && mergedSegmentWarmer != null) {
mergedSegmentWarmer.warm(mergedReader);
diff --git a/lucene/src/java/org/apache/lucene/index/MergePolicy.java b/lucene/src/java/org/apache/lucene/index/MergePolicy.java
index 093ac8c..7298ecd 100644
--- a/lucene/src/java/org/apache/lucene/index/MergePolicy.java
+++ b/lucene/src/java/org/apache/lucene/index/MergePolicy.java
@@ -18,6 +18,7 @@
*/
import org.apache.lucene.store.Directory;
+import org.apache.lucene.store.MergeInfo;
import org.apache.lucene.util.SetOnce;
import org.apache.lucene.util.SetOnce.AlreadySetException;
@@ -189,6 +190,10 @@
}
return total;
}
+
+ public MergeInfo getMergeInfo() {
+ return new MergeInfo(totalDocCount, estimatedMergeBytes, isExternal, optimize);
+ }
}
/**
diff --git a/lucene/src/java/org/apache/lucene/index/NormsWriter.java b/lucene/src/java/org/apache/lucene/index/NormsWriter.java
index 91c7eed..e435382 100644
--- a/lucene/src/java/org/apache/lucene/index/NormsWriter.java
+++ b/lucene/src/java/org/apache/lucene/index/NormsWriter.java
@@ -22,6 +22,7 @@
import java.util.Map;
import org.apache.lucene.store.IndexOutput;
+import org.apache.lucene.store.IOContext.Context;
import org.apache.lucene.util.IOUtils;
// TODO FI: norms could actually be stored as doc store
@@ -49,7 +50,7 @@
}
final String normsFileName = IndexFileNames.segmentFileName(state.segmentName, "", IndexFileNames.NORMS_EXTENSION);
- IndexOutput normsOut = state.directory.createOutput(normsFileName);
+ IndexOutput normsOut = state.directory.createOutput(normsFileName, state.context);
boolean success = false;
try {
normsOut.writeBytes(SegmentNorms.NORMS_HEADER, 0, SegmentNorms.NORMS_HEADER.length);
diff --git a/lucene/src/java/org/apache/lucene/index/PerDocWriteState.java b/lucene/src/java/org/apache/lucene/index/PerDocWriteState.java
index e7b1d93..8bf08f3 100644
--- a/lucene/src/java/org/apache/lucene/index/PerDocWriteState.java
+++ b/lucene/src/java/org/apache/lucene/index/PerDocWriteState.java
@@ -20,6 +20,7 @@
import org.apache.lucene.index.codecs.PerDocConsumer;
import org.apache.lucene.store.Directory;
+import org.apache.lucene.store.IOContext;
/**
* Encapsulates all necessary state to initiate a {@link PerDocConsumer} and
@@ -35,10 +36,11 @@
public final AtomicLong bytesUsed;
public final SegmentCodecs segmentCodecs;
public final int codecId;
+ public final IOContext context;
PerDocWriteState(PrintStream infoStream, Directory directory,
String segmentName, FieldInfos fieldInfos, AtomicLong bytesUsed,
- int codecId) {
+ int codecId, IOContext context) {
this.infoStream = infoStream;
this.directory = directory;
this.segmentName = segmentName;
@@ -46,6 +48,7 @@
this.segmentCodecs = fieldInfos.buildSegmentCodecs(false);
this.codecId = codecId;
this.bytesUsed = bytesUsed;
+ this.context = context;
}
PerDocWriteState(SegmentWriteState state) {
@@ -56,6 +59,7 @@
fieldInfos = state.fieldInfos;
codecId = state.codecId;
bytesUsed = new AtomicLong(0);
+ context = state.context;
}
PerDocWriteState(PerDocWriteState state, int codecId) {
@@ -66,5 +70,6 @@
this.segmentCodecs = state.segmentCodecs;
this.codecId = codecId;
this.bytesUsed = state.bytesUsed;
+ this.context = state.context;
}
}
diff --git a/lucene/src/java/org/apache/lucene/index/PerFieldCodecWrapper.java b/lucene/src/java/org/apache/lucene/index/PerFieldCodecWrapper.java
index a8e3c99..8159230 100644
--- a/lucene/src/java/org/apache/lucene/index/PerFieldCodecWrapper.java
+++ b/lucene/src/java/org/apache/lucene/index/PerFieldCodecWrapper.java
@@ -36,6 +36,7 @@
import org.apache.lucene.index.codecs.DocValuesConsumer;
import org.apache.lucene.index.values.IndexDocValues;
import org.apache.lucene.store.Directory;
+import org.apache.lucene.store.IOContext;
import org.apache.lucene.util.IOUtils;
/**
@@ -99,7 +100,7 @@
private final Map<String, FieldsProducer> codecs = new HashMap<String, FieldsProducer>();
public FieldsReader(Directory dir, FieldInfos fieldInfos, SegmentInfo si,
- int readBufferSize, int indexDivisor) throws IOException {
+ IOContext context, int indexDivisor) throws IOException {
final Map<Codec, FieldsProducer> producers = new HashMap<Codec, FieldsProducer>();
boolean success = false;
@@ -111,7 +112,7 @@
Codec codec = segmentCodecs.codecs[fi.getCodecId()];
if (!producers.containsKey(codec)) {
producers.put(codec, codec.fieldsProducer(new SegmentReadState(dir,
- si, fieldInfos, readBufferSize, indexDivisor, fi.getCodecId())));
+ si, fieldInfos, context, indexDivisor, fi.getCodecId())));
}
codecs.put(fi.name, producers.get(codec));
}
@@ -187,7 +188,7 @@
public FieldsProducer fieldsProducer(SegmentReadState state)
throws IOException {
return new FieldsReader(state.dir, state.fieldInfos, state.segmentInfo,
- state.readBufferSize, state.termsIndexDivisor);
+ state.context, state.termsIndexDivisor);
}
@Override
@@ -212,14 +213,14 @@
@Override
public PerDocValues docsProducer(SegmentReadState state) throws IOException {
return new PerDocProducers(state.dir, state.fieldInfos, state.segmentInfo,
- state.readBufferSize, state.termsIndexDivisor);
+ state.context, state.termsIndexDivisor);
}
private final class PerDocProducers extends PerDocValues {
private final TreeMap<String, PerDocValues> codecs = new TreeMap<String, PerDocValues>();
public PerDocProducers(Directory dir, FieldInfos fieldInfos, SegmentInfo si,
- int readBufferSize, int indexDivisor) throws IOException {
+ IOContext context, int indexDivisor) throws IOException {
final Map<Codec, PerDocValues> producers = new HashMap<Codec, PerDocValues>();
boolean success = false;
try {
@@ -229,7 +230,7 @@
Codec codec = segmentCodecs.codecs[fi.getCodecId()];
if (!producers.containsKey(codec)) {
producers.put(codec, codec.docsProducer(new SegmentReadState(dir,
- si, fieldInfos, readBufferSize, indexDivisor, fi.getCodecId())));
+ si, fieldInfos, context, indexDivisor, fi.getCodecId())));
}
codecs.put(fi.name, producers.get(codec));
}
diff --git a/lucene/src/java/org/apache/lucene/index/SegmentCoreReaders.java b/lucene/src/java/org/apache/lucene/index/SegmentCoreReaders.java
index 8534156..2e392e5 100644
--- a/lucene/src/java/org/apache/lucene/index/SegmentCoreReaders.java
+++ b/lucene/src/java/org/apache/lucene/index/SegmentCoreReaders.java
@@ -25,6 +25,7 @@
import org.apache.lucene.index.codecs.PerDocValues;
import org.apache.lucene.store.CompoundFileDirectory;
import org.apache.lucene.store.Directory;
+import org.apache.lucene.store.IOContext;
import org.apache.lucene.util.IOUtils;
/** Holds core readers that are shared (unchanged) when
@@ -47,7 +48,7 @@
final Directory dir;
final Directory cfsDir;
- final int readBufferSize;
+ final IOContext context;
final int termsIndexDivisor;
private final SegmentReader owner;
@@ -59,7 +60,7 @@
- SegmentCoreReaders(SegmentReader owner, Directory dir, SegmentInfo si, int readBufferSize, int termsIndexDivisor) throws IOException {
+ SegmentCoreReaders(SegmentReader owner, Directory dir, SegmentInfo si, IOContext context, int termsIndexDivisor) throws IOException {
if (termsIndexDivisor == 0) {
throw new IllegalArgumentException("indexDivisor must be < 0 (don't load terms index) or greater than 0 (got 0)");
@@ -67,7 +68,7 @@
segment = si.name;
final SegmentCodecs segmentCodecs = si.getSegmentCodecs();
- this.readBufferSize = readBufferSize;
+ this.context = context;
this.dir = dir;
boolean success = false;
@@ -75,7 +76,7 @@
try {
Directory dir0 = dir;
if (si.getUseCompoundFile()) {
- cfsReader = dir.openCompoundInput(IndexFileNames.segmentFileName(segment, "", IndexFileNames.COMPOUND_FILE_EXTENSION), readBufferSize);
+ cfsReader = dir.openCompoundInput(IndexFileNames.segmentFileName(segment, "", IndexFileNames.COMPOUND_FILE_EXTENSION), context);
dir0 = cfsReader;
}
cfsDir = dir0;
@@ -84,7 +85,7 @@
this.termsIndexDivisor = termsIndexDivisor;
final Codec codec = segmentCodecs.codec();
- final SegmentReadState segmentReadState = new SegmentReadState(cfsDir, si, fieldInfos, readBufferSize, termsIndexDivisor);
+ final SegmentReadState segmentReadState = new SegmentReadState(cfsDir, si, fieldInfos, context, termsIndexDivisor);
// Ask codec for its Fields
fields = codec.fieldsProducer(segmentReadState);
assert fields != null;
@@ -141,7 +142,7 @@
assert storeCFSReader == null;
storeCFSReader = dir.openCompoundInput(
IndexFileNames.segmentFileName(si.getDocStoreSegment(), "", IndexFileNames.COMPOUND_FILE_STORE_EXTENSION),
- readBufferSize);
+ context);
storeDir = storeCFSReader;
assert storeDir != null;
} else {
@@ -153,7 +154,7 @@
// was not used, but then we are asked to open doc
// stores after the segment has switched to CFS
if (cfsReader == null) {
- cfsReader = dir.openCompoundInput(IndexFileNames.segmentFileName(segment, "", IndexFileNames.COMPOUND_FILE_EXTENSION), readBufferSize);
+ cfsReader = dir.openCompoundInput(IndexFileNames.segmentFileName(segment, "", IndexFileNames.COMPOUND_FILE_EXTENSION), context);
}
storeDir = cfsReader;
assert storeDir != null;
@@ -163,7 +164,7 @@
}
final String storesSegment = si.getDocStoreSegment();
- fieldsReaderOrig = new FieldsReader(storeDir, storesSegment, fieldInfos, readBufferSize,
+ fieldsReaderOrig = new FieldsReader(storeDir, storesSegment, fieldInfos, context,
si.getDocStoreOffset(), si.docCount);
// Verify two sources of "maxDoc" agree:
@@ -172,7 +173,7 @@
}
if (si.getHasVectors()) { // open term vector files only as needed
- termVectorsReaderOrig = new TermVectorsReader(storeDir, storesSegment, fieldInfos, readBufferSize, si.getDocStoreOffset(), si.docCount);
+ termVectorsReaderOrig = new TermVectorsReader(storeDir, storesSegment, fieldInfos, context, si.getDocStoreOffset(), si.docCount);
}
}
}
diff --git a/lucene/src/java/org/apache/lucene/index/SegmentInfo.java b/lucene/src/java/org/apache/lucene/index/SegmentInfo.java
index e25f080..f2180b0 100644
--- a/lucene/src/java/org/apache/lucene/index/SegmentInfo.java
+++ b/lucene/src/java/org/apache/lucene/index/SegmentInfo.java
@@ -30,8 +30,8 @@
import org.apache.lucene.index.codecs.Codec;
import org.apache.lucene.index.codecs.CodecProvider;
import org.apache.lucene.index.codecs.DefaultSegmentInfosWriter;
-import org.apache.lucene.store.BufferedIndexInput;
import org.apache.lucene.store.Directory;
+import org.apache.lucene.store.IOContext;
import org.apache.lucene.store.IndexInput;
import org.apache.lucene.store.IndexOutput;
import org.apache.lucene.util.Constants;
@@ -247,7 +247,7 @@
}
final Directory dirToTest;
if (isCompoundFile) {
- dirToTest = dir.openCompoundInput(IndexFileNames.segmentFileName(storesSegment, "", ext), BufferedIndexInput.BUFFER_SIZE);
+ dirToTest = dir.openCompoundInput(IndexFileNames.segmentFileName(storesSegment, "", ext), IOContext.READONCE);
} else {
dirToTest = dir;
}
@@ -266,7 +266,7 @@
Directory dir0 = dir;
if (isCompoundFile && checkCompoundFile) {
dir0 = dir.openCompoundInput(IndexFileNames.segmentFileName(name,
- "", IndexFileNames.COMPOUND_FILE_EXTENSION), BufferedIndexInput.BUFFER_SIZE);
+ "", IndexFileNames.COMPOUND_FILE_EXTENSION), IOContext.READONCE);
}
try {
fieldInfos = new FieldInfos(dir0, IndexFileNames.segmentFileName(name,
diff --git a/lucene/src/java/org/apache/lucene/index/SegmentInfos.java b/lucene/src/java/org/apache/lucene/index/SegmentInfos.java
index 4a5e784..26e71cf 100644
--- a/lucene/src/java/org/apache/lucene/index/SegmentInfos.java
+++ b/lucene/src/java/org/apache/lucene/index/SegmentInfos.java
@@ -37,6 +37,7 @@
import org.apache.lucene.index.codecs.SegmentInfosReader;
import org.apache.lucene.index.codecs.SegmentInfosWriter;
import org.apache.lucene.store.Directory;
+import org.apache.lucene.store.IOContext;
import org.apache.lucene.store.IndexInput;
import org.apache.lucene.store.IndexOutput;
import org.apache.lucene.store.NoSuchDirectoryException;
@@ -254,7 +255,7 @@
try {
SegmentInfosReader infosReader = codecs.getSegmentInfosReader();
- infosReader.read(directory, segmentFileName, codecs, this);
+ infosReader.read(directory, segmentFileName, codecs, this, IOContext.READ);
success = true;
}
finally {
@@ -322,7 +323,7 @@
try {
SegmentInfosWriter infosWriter = codecs.getSegmentInfosWriter();
- segnOutput = infosWriter.writeInfos(directory, segmentFileName, this);
+ segnOutput = infosWriter.writeInfos(directory, segmentFileName, this, IOContext.DEFAULT);
infosWriter.prepareCommit(segnOutput);
pendingSegnOutput = segnOutput;
success = true;
@@ -597,7 +598,7 @@
for(int i=0;i<defaultGenFileRetryCount;i++) {
IndexInput genInput = null;
try {
- genInput = directory.openInput(IndexFileNames.SEGMENTS_GEN);
+ genInput = directory.openInput(IndexFileNames.SEGMENTS_GEN, IOContext.READONCE);
} catch (FileNotFoundException e) {
if (infoStream != null) {
message("segments.gen open: FileNotFoundException " + e);
@@ -814,7 +815,7 @@
}
private final long writeGlobalFieldMap(FieldNumberBiMap map, Directory dir, String name) throws IOException {
- final IndexOutput output = dir.createOutput(name);
+ final IndexOutput output = dir.createOutput(name, IOContext.READONCE);
boolean success = false;
long version;
try {
@@ -843,7 +844,7 @@
private void readGlobalFieldMap(FieldNumberBiMap map, Directory dir) throws IOException {
final String name = getGlobalFieldNumberName(lastGlobalFieldMapVersion);
- final IndexInput input = dir.openInput(name);
+ final IndexInput input = dir.openInput(name, IOContext.READONCE);
try {
map.read(input);
} finally {
@@ -934,7 +935,7 @@
}
try {
- IndexOutput genOutput = dir.createOutput(IndexFileNames.SEGMENTS_GEN);
+ IndexOutput genOutput = dir.createOutput(IndexFileNames.SEGMENTS_GEN, IOContext.READONCE);
try {
genOutput.writeInt(FORMAT_SEGMENTS_GEN_CURRENT);
genOutput.writeLong(generation);
diff --git a/lucene/src/java/org/apache/lucene/index/SegmentMerger.java b/lucene/src/java/org/apache/lucene/index/SegmentMerger.java
index fac20c4..7f75463 100644
--- a/lucene/src/java/org/apache/lucene/index/SegmentMerger.java
+++ b/lucene/src/java/org/apache/lucene/index/SegmentMerger.java
@@ -33,6 +33,7 @@
import org.apache.lucene.index.codecs.PerDocValues;
import org.apache.lucene.store.CompoundFileDirectory;
import org.apache.lucene.store.Directory;
+import org.apache.lucene.store.IOContext;
import org.apache.lucene.store.IndexInput;
import org.apache.lucene.store.IndexOutput;
import org.apache.lucene.util.Bits;
@@ -68,8 +69,10 @@
private SegmentWriteState segmentWriteState;
private PayloadProcessorProvider payloadProcessorProvider;
+
+ private IOContext context;
- SegmentMerger(Directory dir, int termIndexInterval, String name, MergePolicy.OneMerge merge, PayloadProcessorProvider payloadProcessorProvider, FieldInfos fieldInfos) {
+ SegmentMerger(Directory dir, int termIndexInterval, String name, MergePolicy.OneMerge merge, PayloadProcessorProvider payloadProcessorProvider, FieldInfos fieldInfos, IOContext context) {
this.payloadProcessorProvider = payloadProcessorProvider;
directory = dir;
segment = name;
@@ -85,6 +88,7 @@
};
}
this.termIndexInterval = termIndexInterval;
+ this.context = context;
}
public FieldInfos fieldInfos() {
@@ -129,19 +133,19 @@
* deletion files, this SegmentInfo must not reference such files when this
* method is called, because they are not allowed within a compound file.
*/
- final Collection<String> createCompoundFile(String fileName, final SegmentInfo info)
+ final Collection<String> createCompoundFile(String fileName, final SegmentInfo info, IOContext context)
throws IOException {
// Now merge all added files
Collection<String> files = info.files();
- CompoundFileDirectory cfsDir = directory.createCompoundOutput(fileName);
+ CompoundFileDirectory cfsDir = directory.createCompoundOutput(fileName, context);
try {
for (String file : files) {
assert !IndexFileNames.matchesExtension(file, IndexFileNames.DELETES_EXTENSION)
: ".del file is not allowed in .cfs: " + file;
assert !IndexFileNames.isSeparateNormsFile(file)
: "separate norms file (.s[0-9]+) is not allowed in .cfs: " + file;
- directory.copy(cfsDir, file, file);
+ directory.copy(cfsDir, file, file, context);
checkAbort.work(directory.fileLength(file));
}
} finally {
@@ -236,9 +240,7 @@
int docCount = 0;
setMatchingSegmentReaders();
-
- final FieldsWriter fieldsWriter = new FieldsWriter(directory, segment);
-
+ final FieldsWriter fieldsWriter = new FieldsWriter(directory, segment, context);
try {
int idx = 0;
for (IndexReader reader : readers) {
@@ -272,8 +274,7 @@
// entering the index. See LUCENE-1282 for
// details.
throw new RuntimeException("mergeFields produced an invalid result: docCount is " + docCount + " but fdx file size is " + fdxFileLength + " file=" + fileName + " file exists?=" + directory.fileExists(fileName) + "; now aborting this merge to prevent index corruption");
-
- segmentWriteState = new SegmentWriteState(null, directory, segment, fieldInfos, docCount, termIndexInterval, codecInfo, null);
+ segmentWriteState = new SegmentWriteState(null, directory, segment, fieldInfos, docCount, termIndexInterval, codecInfo, null, context);
return docCount;
}
@@ -360,7 +361,7 @@
*/
private final void mergeVectors() throws IOException {
TermVectorsWriter termVectorsWriter =
- new TermVectorsWriter(directory, segment, fieldInfos);
+ new TermVectorsWriter(directory, segment, fieldInfos, context);
try {
int idx = 0;
@@ -629,7 +630,7 @@
for (FieldInfo fi : fieldInfos) {
if (fi.isIndexed && !fi.omitNorms) {
if (output == null) {
- output = directory.createOutput(IndexFileNames.segmentFileName(segment, "", IndexFileNames.NORMS_EXTENSION));
+ output = directory.createOutput(IndexFileNames.segmentFileName(segment, "", IndexFileNames.NORMS_EXTENSION), context);
output.writeBytes(SegmentNorms.NORMS_HEADER, SegmentNorms.NORMS_HEADER.length);
}
for (IndexReader reader : readers) {
diff --git a/lucene/src/java/org/apache/lucene/index/SegmentNorms.java b/lucene/src/java/org/apache/lucene/index/SegmentNorms.java
index df8bf9a..75d32af 100644
--- a/lucene/src/java/org/apache/lucene/index/SegmentNorms.java
+++ b/lucene/src/java/org/apache/lucene/index/SegmentNorms.java
@@ -20,9 +20,10 @@
import java.io.IOException;
import java.util.concurrent.atomic.AtomicInteger;
+import org.apache.lucene.store.FlushInfo;
+import org.apache.lucene.store.IOContext;
import org.apache.lucene.store.IndexInput;
import org.apache.lucene.store.IndexOutput;
-
/**
* Byte[] referencing is used because a new norm object needs
* to be created for each clone, and the byte array is all
@@ -219,7 +220,7 @@
// NOTE: norms are re-written in regular directory, not cfs
si.advanceNormGen(this.number);
final String normFileName = si.getNormFileName(this.number);
- IndexOutput out = owner.directory().createOutput(normFileName);
+ IndexOutput out = owner.directory().createOutput(normFileName, new IOContext(new FlushInfo(si.docCount, 0)));
boolean success = false;
try {
try {
diff --git a/lucene/src/java/org/apache/lucene/index/SegmentReadState.java b/lucene/src/java/org/apache/lucene/index/SegmentReadState.java
index d2159d9..e7e717f 100644
--- a/lucene/src/java/org/apache/lucene/index/SegmentReadState.java
+++ b/lucene/src/java/org/apache/lucene/index/SegmentReadState.java
@@ -18,6 +18,7 @@
*/
import org.apache.lucene.store.Directory;
+import org.apache.lucene.store.IOContext;
/**
* @lucene.experimental
@@ -26,7 +27,7 @@
public final Directory dir;
public final SegmentInfo segmentInfo;
public final FieldInfos fieldInfos;
- public final int readBufferSize;
+ public final IOContext context;
// NOTE: if this is < 0, that means "defer terms index
// load until needed". But if the codec must load the
@@ -37,20 +38,20 @@
public final int codecId;
public SegmentReadState(Directory dir, SegmentInfo info,
- FieldInfos fieldInfos, int readBufferSize, int termsIndexDivisor) {
- this(dir, info, fieldInfos, readBufferSize, termsIndexDivisor, -1);
+ FieldInfos fieldInfos, IOContext context, int termsIndexDivisor) {
+ this(dir, info, fieldInfos, context, termsIndexDivisor, -1);
}
public SegmentReadState(Directory dir,
SegmentInfo info,
FieldInfos fieldInfos,
- int readBufferSize,
+ IOContext context,
int termsIndexDivisor,
int codecId) {
this.dir = dir;
this.segmentInfo = info;
this.fieldInfos = fieldInfos;
- this.readBufferSize = readBufferSize;
+ this.context = context;
this.termsIndexDivisor = termsIndexDivisor;
this.codecId = codecId;
}
diff --git a/lucene/src/java/org/apache/lucene/index/SegmentReader.java b/lucene/src/java/org/apache/lucene/index/SegmentReader.java
index 456d46f..e1aa5da 100644
--- a/lucene/src/java/org/apache/lucene/index/SegmentReader.java
+++ b/lucene/src/java/org/apache/lucene/index/SegmentReader.java
@@ -32,6 +32,7 @@
import org.apache.lucene.index.codecs.PerDocValues;
import org.apache.lucene.store.BufferedIndexInput;
import org.apache.lucene.store.Directory;
+import org.apache.lucene.store.IOContext;
import org.apache.lucene.store.IndexInput;
import org.apache.lucene.util.BitVector;
import org.apache.lucene.util.Bits;
@@ -46,7 +47,6 @@
protected boolean readOnly;
private SegmentInfo si;
- private int readBufferSize;
private final ReaderContext readerContext = new AtomicReaderContext(this);
CloseableThreadLocal<FieldsReader> fieldsReaderLocal = new FieldsReaderLocal();
CloseableThreadLocal<TermVectorsReader> termVectorsLocal = new CloseableThreadLocal<TermVectorsReader>();
@@ -88,8 +88,9 @@
* @throws CorruptIndexException if the index is corrupt
* @throws IOException if there is a low-level IO error
*/
- public static SegmentReader get(boolean readOnly, SegmentInfo si, int termInfosIndexDivisor) throws CorruptIndexException, IOException {
- return get(readOnly, si.dir, si, BufferedIndexInput.BUFFER_SIZE, true, termInfosIndexDivisor);
+ public static SegmentReader get(boolean readOnly, SegmentInfo si, int termInfosIndexDivisor, IOContext context) throws CorruptIndexException, IOException {
+ // TODO should we check if readOnly and context combination makes sense like asserting that if read only we don't get a default?
+ return get(readOnly, si.dir, si, true, termInfosIndexDivisor, context);
}
/**
@@ -99,25 +100,23 @@
public static SegmentReader get(boolean readOnly,
Directory dir,
SegmentInfo si,
- int readBufferSize,
boolean doOpenStores,
- int termInfosIndexDivisor)
+ int termInfosIndexDivisor,
+ IOContext context)
throws CorruptIndexException, IOException {
SegmentReader instance = new SegmentReader();
instance.readOnly = readOnly;
instance.si = si;
- instance.readBufferSize = readBufferSize;
-
boolean success = false;
try {
- instance.core = new SegmentCoreReaders(instance, dir, si, readBufferSize, termInfosIndexDivisor);
+ instance.core = new SegmentCoreReaders(instance, dir, si, context, termInfosIndexDivisor);
if (doOpenStores) {
instance.core.openDocStores(si);
}
instance.loadLiveDocs();
- instance.openNorms(instance.core.cfsDir, readBufferSize);
+ instance.openNorms(instance.core.cfsDir, context);
success = true;
} finally {
@@ -161,7 +160,7 @@
private void loadLiveDocs() throws IOException {
// NOTE: the bitvector is stored using the regular directory, not cfs
if (hasDeletions(si)) {
- liveDocs = new BitVector(directory(), si.getDelFileName());
+ liveDocs = new BitVector(directory(), si.getDelFileName(), IOContext.DEFAULT);
if (liveDocs.getVersion() < BitVector.VERSION_DGAPS_CLEARED) {
liveDocs.invertAll();
}
@@ -253,7 +252,6 @@
clone.core = core;
clone.readOnly = openReadOnly;
clone.si = si;
- clone.readBufferSize = readBufferSize;
clone.pendingDeleteCount = pendingDeleteCount;
clone.readerFinishedListeners = readerFinishedListeners;
@@ -298,7 +296,7 @@
// If we are not cloning, then this will open anew
// any norms that have changed:
- clone.openNorms(si.getUseCompoundFile() ? core.getCFSReader() : directory(), readBufferSize);
+ clone.openNorms(si.getUseCompoundFile() ? core.getCFSReader() : directory(), IOContext.DEFAULT);
success = true;
} finally {
@@ -340,7 +338,7 @@
final String delFileName = si.getDelFileName();
boolean success = false;
try {
- liveDocs.write(directory(), delFileName);
+ liveDocs.write(directory(), delFileName, IOContext.DEFAULT);
success = true;
} finally {
if (!success) {
@@ -580,7 +578,7 @@
norm.copyOnWrite()[doc] = value; // set the value
}
- private void openNorms(Directory cfsDir, int readBufferSize) throws IOException {
+ private void openNorms(Directory cfsDir, IOContext context) throws IOException {
long nextNormSeek = SegmentNorms.NORMS_HEADER.length; //skip header (header unused for now)
int maxDoc = maxDoc();
for (FieldInfo fi : core.fieldInfos) {
@@ -604,7 +602,7 @@
if (singleNormFile) {
normSeek = nextNormSeek;
if (singleNormStream == null) {
- singleNormStream = d.openInput(fileName, readBufferSize);
+ singleNormStream = d.openInput(fileName, context);
singleNormRef = new AtomicInteger(1);
} else {
singleNormRef.incrementAndGet();
@@ -614,7 +612,7 @@
// If this were to change in the future, a clone could be done here.
normInput = singleNormStream;
} else {
- normInput = d.openInput(fileName);
+ normInput = d.openInput(fileName, context);
// if the segment was created in 3.2 or after, we wrote the header for sure,
// and don't need to do the sketchy file size check. otherwise, we check
// if the size is exactly equal to maxDoc to detect a headerless file.
diff --git a/lucene/src/java/org/apache/lucene/index/SegmentWriteState.java b/lucene/src/java/org/apache/lucene/index/SegmentWriteState.java
index f9f4cc0..270c084 100644
--- a/lucene/src/java/org/apache/lucene/index/SegmentWriteState.java
+++ b/lucene/src/java/org/apache/lucene/index/SegmentWriteState.java
@@ -20,6 +20,7 @@
import java.io.PrintStream;
import org.apache.lucene.store.Directory;
+import org.apache.lucene.store.IOContext;
import org.apache.lucene.util.BitVector;
/**
@@ -51,9 +52,11 @@
* slower. Searching is typically not dominated by dictionary lookup, so
* tweaking this is rarely useful.*/
public int termIndexInterval; // TODO: this should be private to the codec, not settable here or in IWC
+
+ public final IOContext context;
public SegmentWriteState(PrintStream infoStream, Directory directory, String segmentName, FieldInfos fieldInfos,
- int numDocs, int termIndexInterval, SegmentCodecs segmentCodecs, BufferedDeletes segDeletes) {
+ int numDocs, int termIndexInterval, SegmentCodecs segmentCodecs, BufferedDeletes segDeletes, IOContext context) {
this.infoStream = infoStream;
this.segDeletes = segDeletes;
this.directory = directory;
@@ -63,6 +66,7 @@
this.termIndexInterval = termIndexInterval;
this.segmentCodecs = segmentCodecs;
codecId = -1;
+ this.context = context;
}
/**
@@ -76,6 +80,7 @@
numDocs = state.numDocs;
termIndexInterval = state.termIndexInterval;
segmentCodecs = state.segmentCodecs;
+ context = state.context;
this.codecId = codecId;
segDeletes = state.segDeletes;
}
diff --git a/lucene/src/java/org/apache/lucene/index/StoredFieldsWriter.java b/lucene/src/java/org/apache/lucene/index/StoredFieldsWriter.java
index c3aa5c8..de441d2 100644
--- a/lucene/src/java/org/apache/lucene/index/StoredFieldsWriter.java
+++ b/lucene/src/java/org/apache/lucene/index/StoredFieldsWriter.java
@@ -20,6 +20,7 @@
import java.io.IOException;
import org.apache.lucene.document.Fieldable;
+import org.apache.lucene.store.IOContext;
import org.apache.lucene.util.ArrayUtil;
import org.apache.lucene.util.RamUsageEstimator;
@@ -59,7 +60,7 @@
// It's possible that all documents seen in this segment
// hit non-aborting exceptions, in which case we will
// not have yet init'd the FieldsWriter:
- initFieldsWriter();
+ initFieldsWriter(state.context);
fill(state.numDocs);
}
@@ -75,9 +76,9 @@
}
}
- private synchronized void initFieldsWriter() throws IOException {
+ private synchronized void initFieldsWriter(IOContext context) throws IOException {
if (fieldsWriter == null) {
- fieldsWriter = new FieldsWriter(docWriter.directory, docWriter.getSegment());
+ fieldsWriter = new FieldsWriter(docWriter.directory, docWriter.getSegment(), context);
lastDocID = 0;
}
}
@@ -107,7 +108,7 @@
void finishDocument() throws IOException {
assert docWriter.writer.testPoint("StoredFieldsWriter.finishDocument start");
- initFieldsWriter();
+ initFieldsWriter(IOContext.DEFAULT);
fill(docState.docID);
if (fieldsWriter != null && numStoredFields > 0) {
diff --git a/lucene/src/java/org/apache/lucene/index/TermVectorsReader.java b/lucene/src/java/org/apache/lucene/index/TermVectorsReader.java
index 0d6b7dc..b034637 100644
--- a/lucene/src/java/org/apache/lucene/index/TermVectorsReader.java
+++ b/lucene/src/java/org/apache/lucene/index/TermVectorsReader.java
@@ -17,9 +17,12 @@
* limitations under the License.
*/
+import org.apache.lucene.index.MergePolicy.OneMerge;
import org.apache.lucene.store.BufferedIndexInput;
import org.apache.lucene.store.Directory;
+import org.apache.lucene.store.IOContext;
import org.apache.lucene.store.IndexInput;
+import org.apache.lucene.store.IOContext.Context;
import org.apache.lucene.util.ArrayUtil;
import org.apache.lucene.util.BytesRef;
import org.apache.lucene.util.IOUtils;
@@ -63,29 +66,24 @@
private final int format;
- TermVectorsReader(Directory d, String segment, FieldInfos fieldInfos)
+ TermVectorsReader(Directory d, String segment, FieldInfos fieldInfos, IOContext context)
throws CorruptIndexException, IOException {
- this(d, segment, fieldInfos, BufferedIndexInput.BUFFER_SIZE);
- }
-
- TermVectorsReader(Directory d, String segment, FieldInfos fieldInfos, int readBufferSize)
- throws CorruptIndexException, IOException {
- this(d, segment, fieldInfos, readBufferSize, -1, 0);
+ this(d, segment, fieldInfos, context, -1, 0);
}
- TermVectorsReader(Directory d, String segment, FieldInfos fieldInfos, int readBufferSize, int docStoreOffset, int size)
+ TermVectorsReader(Directory d, String segment, FieldInfos fieldInfos, IOContext context, int docStoreOffset, int size)
throws CorruptIndexException, IOException {
boolean success = false;
try {
String idxName = IndexFileNames.segmentFileName(segment, "", IndexFileNames.VECTORS_INDEX_EXTENSION);
- tvx = d.openInput(idxName, readBufferSize);
+ tvx = d.openInput(idxName, context);
format = checkValidFormat(tvx, idxName);
String fn = IndexFileNames.segmentFileName(segment, "", IndexFileNames.VECTORS_DOCUMENTS_EXTENSION);
- tvd = d.openInput(fn, readBufferSize);
+ tvd = d.openInput(fn, context);
final int tvdFormat = checkValidFormat(tvd, fn);
fn = IndexFileNames.segmentFileName(segment, "", IndexFileNames.VECTORS_FIELDS_EXTENSION);
- tvf = d.openInput(fn, readBufferSize);
+ tvf = d.openInput(fn, context);
final int tvfFormat = checkValidFormat(tvf, fn);
assert format == tvdFormat;
diff --git a/lucene/src/java/org/apache/lucene/index/TermVectorsTermsWriter.java b/lucene/src/java/org/apache/lucene/index/TermVectorsTermsWriter.java
index 4fbc6be..686c355 100644
--- a/lucene/src/java/org/apache/lucene/index/TermVectorsTermsWriter.java
+++ b/lucene/src/java/org/apache/lucene/index/TermVectorsTermsWriter.java
@@ -20,7 +20,10 @@
import java.io.IOException;
import java.util.Map;
+import org.apache.lucene.store.FlushInfo;
+import org.apache.lucene.store.IOContext;
import org.apache.lucene.store.IndexOutput;
+import org.apache.lucene.store.IOContext.Context;
import org.apache.lucene.util.ArrayUtil;
import org.apache.lucene.util.BytesRef;
import org.apache.lucene.util.IOUtils;
@@ -89,13 +92,14 @@
if (tvx == null) {
boolean success = false;
try {
+ IOContext context = new IOContext(new FlushInfo(docWriter.getNumDocsInRAM(), docWriter.bytesUsed()));
// If we hit an exception while init'ing the term
// vector output files, we must abort this segment
// because those files will be in an unknown
// state:
- tvx = docWriter.directory.createOutput(IndexFileNames.segmentFileName(docWriter.getSegment(), "", IndexFileNames.VECTORS_INDEX_EXTENSION));
- tvd = docWriter.directory.createOutput(IndexFileNames.segmentFileName(docWriter.getSegment(), "", IndexFileNames.VECTORS_DOCUMENTS_EXTENSION));
- tvf = docWriter.directory.createOutput(IndexFileNames.segmentFileName(docWriter.getSegment(), "", IndexFileNames.VECTORS_FIELDS_EXTENSION));
+ tvx = docWriter.directory.createOutput(IndexFileNames.segmentFileName(docWriter.getSegment(), "", IndexFileNames.VECTORS_INDEX_EXTENSION), context);
+ tvd = docWriter.directory.createOutput(IndexFileNames.segmentFileName(docWriter.getSegment(), "", IndexFileNames.VECTORS_DOCUMENTS_EXTENSION), context);
+ tvf = docWriter.directory.createOutput(IndexFileNames.segmentFileName(docWriter.getSegment(), "", IndexFileNames.VECTORS_FIELDS_EXTENSION), context);
tvx.writeInt(TermVectorsReader.FORMAT_CURRENT);
tvd.writeInt(TermVectorsReader.FORMAT_CURRENT);
diff --git a/lucene/src/java/org/apache/lucene/index/TermVectorsWriter.java b/lucene/src/java/org/apache/lucene/index/TermVectorsWriter.java
index 44d1b5b..f94365a 100644
--- a/lucene/src/java/org/apache/lucene/index/TermVectorsWriter.java
+++ b/lucene/src/java/org/apache/lucene/index/TermVectorsWriter.java
@@ -18,6 +18,7 @@
*/
import org.apache.lucene.store.Directory;
+import org.apache.lucene.store.IOContext;
import org.apache.lucene.store.IndexOutput;
import org.apache.lucene.util.BytesRef;
import org.apache.lucene.util.IOUtils;
@@ -31,15 +32,15 @@
private FieldInfos fieldInfos;
public TermVectorsWriter(Directory directory, String segment,
- FieldInfos fieldInfos) throws IOException {
+ FieldInfos fieldInfos, IOContext context) throws IOException {
boolean success = false;
try {
// Open files for TermVector storage
- tvx = directory.createOutput(IndexFileNames.segmentFileName(segment, "", IndexFileNames.VECTORS_INDEX_EXTENSION));
+ tvx = directory.createOutput(IndexFileNames.segmentFileName(segment, "", IndexFileNames.VECTORS_INDEX_EXTENSION), context);
tvx.writeInt(TermVectorsReader.FORMAT_CURRENT);
- tvd = directory.createOutput(IndexFileNames.segmentFileName(segment, "", IndexFileNames.VECTORS_DOCUMENTS_EXTENSION));
+ tvd = directory.createOutput(IndexFileNames.segmentFileName(segment, "", IndexFileNames.VECTORS_DOCUMENTS_EXTENSION), context);
tvd.writeInt(TermVectorsReader.FORMAT_CURRENT);
- tvf = directory.createOutput(IndexFileNames.segmentFileName(segment, "", IndexFileNames.VECTORS_FIELDS_EXTENSION));
+ tvf = directory.createOutput(IndexFileNames.segmentFileName(segment, "", IndexFileNames.VECTORS_FIELDS_EXTENSION), context);
tvf.writeInt(TermVectorsReader.FORMAT_CURRENT);
success = true;
} finally {
diff --git a/lucene/src/java/org/apache/lucene/index/codecs/BlockTermsReader.java b/lucene/src/java/org/apache/lucene/index/codecs/BlockTermsReader.java
index 6e61e7e..03b6bca 100644
--- a/lucene/src/java/org/apache/lucene/index/codecs/BlockTermsReader.java
+++ b/lucene/src/java/org/apache/lucene/index/codecs/BlockTermsReader.java
@@ -37,6 +37,7 @@
import org.apache.lucene.index.codecs.standard.StandardPostingsReader; // javadocs
import org.apache.lucene.store.ByteArrayDataInput;
import org.apache.lucene.store.Directory;
+import org.apache.lucene.store.IOContext;
import org.apache.lucene.store.IndexInput;
import org.apache.lucene.util.ArrayUtil;
import org.apache.lucene.util.Bits;
@@ -107,7 +108,7 @@
//private String segment;
- public BlockTermsReader(TermsIndexReaderBase indexReader, Directory dir, FieldInfos fieldInfos, String segment, PostingsReaderBase postingsReader, int readBufferSize,
+ public BlockTermsReader(TermsIndexReaderBase indexReader, Directory dir, FieldInfos fieldInfos, String segment, PostingsReaderBase postingsReader, IOContext context,
int termsCacheSize, int codecId)
throws IOException {
@@ -116,7 +117,7 @@
//this.segment = segment;
in = dir.openInput(IndexFileNames.segmentFileName(segment, codecId, BlockTermsWriter.TERMS_EXTENSION),
- readBufferSize);
+ context);
boolean success = false;
try {
diff --git a/lucene/src/java/org/apache/lucene/index/codecs/BlockTermsWriter.java b/lucene/src/java/org/apache/lucene/index/codecs/BlockTermsWriter.java
index 9cb9d4c..926a6af 100644
--- a/lucene/src/java/org/apache/lucene/index/codecs/BlockTermsWriter.java
+++ b/lucene/src/java/org/apache/lucene/index/codecs/BlockTermsWriter.java
@@ -72,7 +72,7 @@
throws IOException {
final String termsFileName = IndexFileNames.segmentFileName(state.segmentName, state.codecId, TERMS_EXTENSION);
this.termsIndexWriter = termsIndexWriter;
- out = state.directory.createOutput(termsFileName);
+ out = state.directory.createOutput(termsFileName, state.context);
boolean success = false;
try {
fieldInfos = state.fieldInfos;
diff --git a/lucene/src/java/org/apache/lucene/index/codecs/DefaultDocValuesConsumer.java b/lucene/src/java/org/apache/lucene/index/codecs/DefaultDocValuesConsumer.java
index af9c416..d1749fb 100644
--- a/lucene/src/java/org/apache/lucene/index/codecs/DefaultDocValuesConsumer.java
+++ b/lucene/src/java/org/apache/lucene/index/codecs/DefaultDocValuesConsumer.java
@@ -29,6 +29,7 @@
import org.apache.lucene.index.SegmentInfo;
import org.apache.lucene.index.values.Writer;
import org.apache.lucene.store.Directory;
+import org.apache.lucene.store.IOContext;
import org.apache.lucene.util.BytesRef;
/**
@@ -42,13 +43,17 @@
private final AtomicLong bytesUsed;
private final Comparator<BytesRef> comparator;
private boolean useCompoundFile;
+ private final IOContext context;
public DefaultDocValuesConsumer(PerDocWriteState state, Comparator<BytesRef> comparator, boolean useCompoundFile) throws IOException {
this.segmentName = state.segmentName;
this.codecId = state.codecId;
this.bytesUsed = state.bytesUsed;
+ this.context = state.context;
//TODO maybe we should enable a global CFS that all codecs can pull on demand to further reduce the number of files?
- this.directory = useCompoundFile ? state.directory.createCompoundOutput(IndexFileNames.segmentFileName(segmentName, state.codecId, IndexFileNames.COMPOUND_FILE_EXTENSION)) : state.directory;
+ this.directory = useCompoundFile ? state.directory.createCompoundOutput(
+ IndexFileNames.segmentFileName(segmentName, codecId,
+ IndexFileNames.COMPOUND_FILE_EXTENSION), context) : state.directory;
this.comparator = comparator;
this.useCompoundFile = useCompoundFile;
}
@@ -63,7 +68,7 @@
public DocValuesConsumer addValuesField(FieldInfo field) throws IOException {
return Writer.create(field.getDocValues(),
docValuesId(segmentName, codecId, field.number),
- directory, comparator, bytesUsed);
+ directory, comparator, bytesUsed, context);
}
@SuppressWarnings("fallthrough")
diff --git a/lucene/src/java/org/apache/lucene/index/codecs/DefaultDocValuesProducer.java b/lucene/src/java/org/apache/lucene/index/codecs/DefaultDocValuesProducer.java
index 0747708..6a3207d 100644
--- a/lucene/src/java/org/apache/lucene/index/codecs/DefaultDocValuesProducer.java
+++ b/lucene/src/java/org/apache/lucene/index/codecs/DefaultDocValuesProducer.java
@@ -33,6 +33,7 @@
import org.apache.lucene.index.values.Ints;
import org.apache.lucene.index.values.ValueType;
import org.apache.lucene.store.Directory;
+import org.apache.lucene.store.IOContext;
import org.apache.lucene.util.BytesRef;
import org.apache.lucene.util.IOUtils;
@@ -72,17 +73,17 @@
* if an {@link IOException} occurs
*/
public DefaultDocValuesProducer(SegmentInfo si, Directory dir,
- FieldInfos fieldInfo, int codecId, boolean useCompoundFile, Comparator<BytesRef> sortComparator) throws IOException {
+ FieldInfos fieldInfo, int codecId, boolean useCompoundFile, Comparator<BytesRef> sortComparator, IOContext context) throws IOException {
this.useCompoundFile = useCompoundFile;
this.sortComparator = sortComparator;
final Directory directory;
if (useCompoundFile) {
- cfs = directory = dir.openCompoundInput(IndexFileNames.segmentFileName(si.name, codecId, IndexFileNames.COMPOUND_FILE_EXTENSION), 1024);
+ cfs = directory = dir.openCompoundInput(IndexFileNames.segmentFileName(si.name, codecId, IndexFileNames.COMPOUND_FILE_EXTENSION), context);
} else {
cfs = null;
directory = dir;
}
- docValues = load(fieldInfo, si.name, si.docCount, directory, codecId);
+ docValues = load(fieldInfo, si.name, si.docCount, directory, codecId, context);
}
/**
@@ -96,7 +97,7 @@
// Only opens files... doesn't actually load any values
protected TreeMap<String, IndexDocValues> load(FieldInfos fieldInfos,
- String segment, int docCount, Directory dir, int codecId)
+ String segment, int docCount, Directory dir, int codecId, IOContext context)
throws IOException {
TreeMap<String, IndexDocValues> values = new TreeMap<String, IndexDocValues>();
boolean success = false;
@@ -110,7 +111,7 @@
final String id = DefaultDocValuesConsumer.docValuesId(segment,
codecId, fieldInfo.number);
values.put(field,
- loadDocValues(docCount, dir, id, fieldInfo.getDocValues(), sortComparator));
+ loadDocValues(docCount, dir, id, fieldInfo.getDocValues(), sortComparator, context));
}
}
success = true;
@@ -145,30 +146,30 @@
* if the given {@link ValueType} is not supported
*/
protected IndexDocValues loadDocValues(int docCount, Directory dir, String id,
- ValueType type, Comparator<BytesRef> sortComparator) throws IOException {
+ ValueType type, Comparator<BytesRef> sortComparator, IOContext context) throws IOException {
switch (type) {
case FIXED_INTS_16:
case FIXED_INTS_32:
case FIXED_INTS_64:
case FIXED_INTS_8:
case VAR_INTS:
- return Ints.getValues(dir, id, docCount);
+ return Ints.getValues(dir, id, docCount, context);
case FLOAT_32:
- return Floats.getValues(dir, id, docCount);
+ return Floats.getValues(dir, id, docCount, context);
case FLOAT_64:
- return Floats.getValues(dir, id, docCount);
+ return Floats.getValues(dir, id, docCount, context);
case BYTES_FIXED_STRAIGHT:
- return Bytes.getValues(dir, id, Bytes.Mode.STRAIGHT, true, docCount, sortComparator);
+ return Bytes.getValues(dir, id, Bytes.Mode.STRAIGHT, true, docCount, sortComparator, context);
case BYTES_FIXED_DEREF:
- return Bytes.getValues(dir, id, Bytes.Mode.DEREF, true, docCount, sortComparator);
+ return Bytes.getValues(dir, id, Bytes.Mode.DEREF, true, docCount, sortComparator, context);
case BYTES_FIXED_SORTED:
- return Bytes.getValues(dir, id, Bytes.Mode.SORTED, true, docCount, sortComparator);
+ return Bytes.getValues(dir, id, Bytes.Mode.SORTED, true, docCount, sortComparator, context);
case BYTES_VAR_STRAIGHT:
- return Bytes.getValues(dir, id, Bytes.Mode.STRAIGHT, false, docCount, sortComparator);
+ return Bytes.getValues(dir, id, Bytes.Mode.STRAIGHT, false, docCount, sortComparator, context);
case BYTES_VAR_DEREF:
- return Bytes.getValues(dir, id, Bytes.Mode.DEREF, false, docCount, sortComparator);
+ return Bytes.getValues(dir, id, Bytes.Mode.DEREF, false, docCount, sortComparator, context);
case BYTES_VAR_SORTED:
- return Bytes.getValues(dir, id, Bytes.Mode.SORTED, false, docCount, sortComparator);
+ return Bytes.getValues(dir, id, Bytes.Mode.SORTED, false, docCount, sortComparator, context);
default:
throw new IllegalStateException("unrecognized index values mode " + type);
}
diff --git a/lucene/src/java/org/apache/lucene/index/codecs/DefaultSegmentInfosReader.java b/lucene/src/java/org/apache/lucene/index/codecs/DefaultSegmentInfosReader.java
index b21fb78..443d41d 100644
--- a/lucene/src/java/org/apache/lucene/index/codecs/DefaultSegmentInfosReader.java
+++ b/lucene/src/java/org/apache/lucene/index/codecs/DefaultSegmentInfosReader.java
@@ -28,6 +28,7 @@
import org.apache.lucene.index.SegmentInfos;
import org.apache.lucene.store.ChecksumIndexInput;
import org.apache.lucene.store.Directory;
+import org.apache.lucene.store.IOContext;
import org.apache.lucene.store.IndexInput;
/**
@@ -38,10 +39,10 @@
@Override
public void read(Directory directory, String segmentsFileName, CodecProvider codecs,
- SegmentInfos infos) throws IOException {
+ SegmentInfos infos, IOContext context) throws IOException {
IndexInput input = null;
try {
- input = openInput(directory, segmentsFileName);
+ input = openInput(directory, segmentsFileName, context);
final int format = input.readInt();
infos.setFormat(format);
@@ -69,11 +70,11 @@
if (si.getDocStoreIsCompoundFile()) {
dir = dir.openCompoundInput(IndexFileNames.segmentFileName(
si.getDocStoreSegment(), "",
- IndexFileNames.COMPOUND_FILE_STORE_EXTENSION), 1024);
+ IndexFileNames.COMPOUND_FILE_STORE_EXTENSION), context);
}
} else if (si.getUseCompoundFile()) {
dir = dir.openCompoundInput(IndexFileNames.segmentFileName(
- si.name, "", IndexFileNames.COMPOUND_FILE_EXTENSION), 1024);
+ si.name, "", IndexFileNames.COMPOUND_FILE_EXTENSION), context);
}
try {
@@ -107,8 +108,8 @@
}
- public IndexInput openInput(Directory dir, String segmentsFileName) throws IOException {
- IndexInput in = dir.openInput(segmentsFileName);
+ public IndexInput openInput(Directory dir, String segmentsFileName, IOContext context) throws IOException {
+ IndexInput in = dir.openInput(segmentsFileName, context);
return new ChecksumIndexInput(in);
}
diff --git a/lucene/src/java/org/apache/lucene/index/codecs/DefaultSegmentInfosWriter.java b/lucene/src/java/org/apache/lucene/index/codecs/DefaultSegmentInfosWriter.java
index 7a1b61b..8e53aaf 100644
--- a/lucene/src/java/org/apache/lucene/index/codecs/DefaultSegmentInfosWriter.java
+++ b/lucene/src/java/org/apache/lucene/index/codecs/DefaultSegmentInfosWriter.java
@@ -23,6 +23,8 @@
import org.apache.lucene.index.SegmentInfos;
import org.apache.lucene.store.ChecksumIndexOutput;
import org.apache.lucene.store.Directory;
+import org.apache.lucene.store.FlushInfo;
+import org.apache.lucene.store.IOContext;
import org.apache.lucene.store.IndexOutput;
import org.apache.lucene.util.IOUtils;
@@ -54,9 +56,9 @@
public static final int FORMAT_MINIMUM = FORMAT_DIAGNOSTICS;
@Override
- public IndexOutput writeInfos(Directory dir, String segmentFileName, SegmentInfos infos)
+ public IndexOutput writeInfos(Directory dir, String segmentFileName, SegmentInfos infos, IOContext context)
throws IOException {
- IndexOutput out = createOutput(dir, segmentFileName);
+ IndexOutput out = createOutput(dir, segmentFileName, new IOContext(new FlushInfo(infos.size(), infos.totalDocCount())));
boolean success = false;
try {
out.writeInt(FORMAT_CURRENT); // write FORMAT
@@ -77,9 +79,9 @@
}
}
- protected IndexOutput createOutput(Directory dir, String segmentFileName)
+ protected IndexOutput createOutput(Directory dir, String segmentFileName, IOContext context)
throws IOException {
- IndexOutput plainOut = dir.createOutput(segmentFileName);
+ IndexOutput plainOut = dir.createOutput(segmentFileName, context);
ChecksumIndexOutput out = new ChecksumIndexOutput(plainOut);
return out;
}
diff --git a/lucene/src/java/org/apache/lucene/index/codecs/FixedGapTermsIndexReader.java b/lucene/src/java/org/apache/lucene/index/codecs/FixedGapTermsIndexReader.java
index ad48f03..fa2880c 100644
--- a/lucene/src/java/org/apache/lucene/index/codecs/FixedGapTermsIndexReader.java
+++ b/lucene/src/java/org/apache/lucene/index/codecs/FixedGapTermsIndexReader.java
@@ -18,7 +18,9 @@
*/
import org.apache.lucene.store.Directory;
+import org.apache.lucene.store.IOContext;
import org.apache.lucene.store.IndexInput;
+import org.apache.lucene.store.IOContext.Context;
import org.apache.lucene.index.FieldInfos;
import org.apache.lucene.index.FieldInfo;
import org.apache.lucene.index.SegmentInfo;
@@ -68,12 +70,12 @@
// start of the field info data
protected long dirOffset;
- public FixedGapTermsIndexReader(Directory dir, FieldInfos fieldInfos, String segment, int indexDivisor, Comparator<BytesRef> termComp, int codecId)
+ public FixedGapTermsIndexReader(Directory dir, FieldInfos fieldInfos, String segment, int indexDivisor, Comparator<BytesRef> termComp, int codecId, IOContext context)
throws IOException {
this.termComp = termComp;
- in = dir.openInput(IndexFileNames.segmentFileName(segment, codecId, FixedGapTermsIndexWriter.TERMS_INDEX_EXTENSION));
+ in = dir.openInput(IndexFileNames.segmentFileName(segment, codecId, FixedGapTermsIndexWriter.TERMS_INDEX_EXTENSION), context);
boolean success = false;
diff --git a/lucene/src/java/org/apache/lucene/index/codecs/FixedGapTermsIndexWriter.java b/lucene/src/java/org/apache/lucene/index/codecs/FixedGapTermsIndexWriter.java
index 38f094d..28149ee 100644
--- a/lucene/src/java/org/apache/lucene/index/codecs/FixedGapTermsIndexWriter.java
+++ b/lucene/src/java/org/apache/lucene/index/codecs/FixedGapTermsIndexWriter.java
@@ -58,7 +58,7 @@
public FixedGapTermsIndexWriter(SegmentWriteState state) throws IOException {
final String indexFileName = IndexFileNames.segmentFileName(state.segmentName, state.codecId, TERMS_INDEX_EXTENSION);
termIndexInterval = state.termIndexInterval;
- out = state.directory.createOutput(indexFileName);
+ out = state.directory.createOutput(indexFileName, state.context);
boolean success = false;
try {
fieldInfos = state.fieldInfos;
diff --git a/lucene/src/java/org/apache/lucene/index/codecs/SegmentInfosReader.java b/lucene/src/java/org/apache/lucene/index/codecs/SegmentInfosReader.java
index 4a90fb9..69b6804 100644
--- a/lucene/src/java/org/apache/lucene/index/codecs/SegmentInfosReader.java
+++ b/lucene/src/java/org/apache/lucene/index/codecs/SegmentInfosReader.java
@@ -21,6 +21,7 @@
import org.apache.lucene.index.SegmentInfos;
import org.apache.lucene.store.Directory;
+import org.apache.lucene.store.IOContext;
/**
* Specifies an API for classes that can read {@link SegmentInfos} information.
@@ -36,5 +37,5 @@
* @param infos empty instance to be populated with data
* @throws IOException
*/
- public abstract void read(Directory directory, String segmentsFileName, CodecProvider codecs, SegmentInfos infos) throws IOException;
+ public abstract void read(Directory directory, String segmentsFileName, CodecProvider codecs, SegmentInfos infos, IOContext context) throws IOException;
}
diff --git a/lucene/src/java/org/apache/lucene/index/codecs/SegmentInfosWriter.java b/lucene/src/java/org/apache/lucene/index/codecs/SegmentInfosWriter.java
index 19f2e5d..42a3d81 100644
--- a/lucene/src/java/org/apache/lucene/index/codecs/SegmentInfosWriter.java
+++ b/lucene/src/java/org/apache/lucene/index/codecs/SegmentInfosWriter.java
@@ -21,6 +21,7 @@
import org.apache.lucene.index.SegmentInfos;
import org.apache.lucene.store.Directory;
+import org.apache.lucene.store.IOContext;
import org.apache.lucene.store.IndexOutput;
/**
@@ -41,7 +42,7 @@
* phase commit" operations as described above.
* @throws IOException
*/
- public abstract IndexOutput writeInfos(Directory dir, String segmentsFileName, SegmentInfos infos) throws IOException;
+ public abstract IndexOutput writeInfos(Directory dir, String segmentsFileName, SegmentInfos infos, IOContext context) throws IOException;
/**
* First phase of the two-phase commit - ensure that all output can be
diff --git a/lucene/src/java/org/apache/lucene/index/codecs/VariableGapTermsIndexReader.java b/lucene/src/java/org/apache/lucene/index/codecs/VariableGapTermsIndexReader.java
index e66f413..0befc75 100644
--- a/lucene/src/java/org/apache/lucene/index/codecs/VariableGapTermsIndexReader.java
+++ b/lucene/src/java/org/apache/lucene/index/codecs/VariableGapTermsIndexReader.java
@@ -30,7 +30,9 @@
import org.apache.lucene.index.IndexFileNames;
import org.apache.lucene.index.SegmentInfo;
import org.apache.lucene.store.Directory;
+import org.apache.lucene.store.IOContext;
import org.apache.lucene.store.IndexInput;
+import org.apache.lucene.store.IOContext.Context;
import org.apache.lucene.util.BytesRef;
import org.apache.lucene.util.CodecUtil;
import org.apache.lucene.util.fst.Builder;
@@ -57,10 +59,10 @@
protected long dirOffset;
final String segment;
- public VariableGapTermsIndexReader(Directory dir, FieldInfos fieldInfos, String segment, int indexDivisor, int codecId)
+ public VariableGapTermsIndexReader(Directory dir, FieldInfos fieldInfos, String segment, int indexDivisor, int codecId, IOContext context)
throws IOException {
- in = dir.openInput(IndexFileNames.segmentFileName(segment, codecId, VariableGapTermsIndexWriter.TERMS_INDEX_EXTENSION));
+ in = dir.openInput(IndexFileNames.segmentFileName(segment, codecId, VariableGapTermsIndexWriter.TERMS_INDEX_EXTENSION), context);
this.segment = segment;
boolean success = false;
diff --git a/lucene/src/java/org/apache/lucene/index/codecs/VariableGapTermsIndexWriter.java b/lucene/src/java/org/apache/lucene/index/codecs/VariableGapTermsIndexWriter.java
index d106088..c8d3d3e 100644
--- a/lucene/src/java/org/apache/lucene/index/codecs/VariableGapTermsIndexWriter.java
+++ b/lucene/src/java/org/apache/lucene/index/codecs/VariableGapTermsIndexWriter.java
@@ -159,7 +159,7 @@
public VariableGapTermsIndexWriter(SegmentWriteState state, IndexTermSelector policy) throws IOException {
final String indexFileName = IndexFileNames.segmentFileName(state.segmentName, state.codecId, TERMS_INDEX_EXTENSION);
- out = state.directory.createOutput(indexFileName);
+ out = state.directory.createOutput(indexFileName, state.context);
boolean success = false;
try {
fieldInfos = state.fieldInfos;
diff --git a/lucene/src/java/org/apache/lucene/index/codecs/memory/MemoryCodec.java b/lucene/src/java/org/apache/lucene/index/codecs/memory/MemoryCodec.java
index b165f62..b4fed2d 100644
--- a/lucene/src/java/org/apache/lucene/index/codecs/memory/MemoryCodec.java
+++ b/lucene/src/java/org/apache/lucene/index/codecs/memory/MemoryCodec.java
@@ -48,6 +48,7 @@
import org.apache.lucene.index.codecs.TermsConsumer;
import org.apache.lucene.store.ByteArrayDataInput;
import org.apache.lucene.store.Directory;
+import org.apache.lucene.store.IOContext;
import org.apache.lucene.store.IndexInput;
import org.apache.lucene.store.IndexOutput;
import org.apache.lucene.store.RAMOutputStream;
@@ -242,7 +243,7 @@
public FieldsConsumer fieldsConsumer(SegmentWriteState state) throws IOException {
final String fileName = IndexFileNames.segmentFileName(state.segmentName, state.codecId, EXTENSION);
- final IndexOutput out = state.directory.createOutput(fileName);
+ final IndexOutput out = state.directory.createOutput(fileName, state.context);
return new FieldsConsumer() {
@Override
@@ -717,7 +718,7 @@
@Override
public FieldsProducer fieldsProducer(SegmentReadState state) throws IOException {
final String fileName = IndexFileNames.segmentFileName(state.segmentInfo.name, state.codecId, EXTENSION);
- final IndexInput in = state.dir.openInput(fileName);
+ final IndexInput in = state.dir.openInput(fileName, IOContext.READONCE);
final SortedMap<String,TermsReader> fields = new TreeMap<String,TermsReader>();
@@ -794,6 +795,6 @@
@Override
public PerDocValues docsProducer(SegmentReadState state) throws IOException {
- return new DefaultDocValuesProducer(state.segmentInfo, state.dir, state.fieldInfos, state.codecId, getDocValuesUseCFS(), getDocValuesSortComparator());
+ return new DefaultDocValuesProducer(state.segmentInfo, state.dir, state.fieldInfos, state.codecId, getDocValuesUseCFS(), getDocValuesSortComparator(), IOContext.READONCE);
}
}
diff --git a/lucene/src/java/org/apache/lucene/index/codecs/preflex/PreFlexCodec.java b/lucene/src/java/org/apache/lucene/index/codecs/preflex/PreFlexCodec.java
index 49c2a15..e5ce0b6 100644
--- a/lucene/src/java/org/apache/lucene/index/codecs/preflex/PreFlexCodec.java
+++ b/lucene/src/java/org/apache/lucene/index/codecs/preflex/PreFlexCodec.java
@@ -65,7 +65,7 @@
@Override
public FieldsProducer fieldsProducer(SegmentReadState state) throws IOException {
- return new PreFlexFields(state.dir, state.fieldInfos, state.segmentInfo, state.readBufferSize, state.termsIndexDivisor);
+ return new PreFlexFields(state.dir, state.fieldInfos, state.segmentInfo, state.context, state.termsIndexDivisor);
}
@Override
diff --git a/lucene/src/java/org/apache/lucene/index/codecs/preflex/PreFlexFields.java b/lucene/src/java/org/apache/lucene/index/codecs/preflex/PreFlexFields.java
index 78253ac..ac3962d 100644
--- a/lucene/src/java/org/apache/lucene/index/codecs/preflex/PreFlexFields.java
+++ b/lucene/src/java/org/apache/lucene/index/codecs/preflex/PreFlexFields.java
@@ -38,6 +38,7 @@
import org.apache.lucene.index.codecs.FieldsProducer;
import org.apache.lucene.store.CompoundFileDirectory;
import org.apache.lucene.store.Directory;
+import org.apache.lucene.store.IOContext;
import org.apache.lucene.store.IndexInput;
import org.apache.lucene.util.Bits;
import org.apache.lucene.util.BytesRef;
@@ -62,10 +63,10 @@
final TreeMap<String,FieldInfo> fields = new TreeMap<String,FieldInfo>();
final Map<String,Terms> preTerms = new HashMap<String,Terms>();
private final Directory dir;
- private final int readBufferSize;
+ private final IOContext context;
private Directory cfsReader;
- public PreFlexFields(Directory dir, FieldInfos fieldInfos, SegmentInfo info, int readBufferSize, int indexDivisor)
+ public PreFlexFields(Directory dir, FieldInfos fieldInfos, SegmentInfo info, IOContext context, int indexDivisor)
throws IOException {
si = info;
@@ -80,19 +81,19 @@
boolean success = false;
try {
- TermInfosReader r = new TermInfosReader(dir, info.name, fieldInfos, readBufferSize, indexDivisor);
+ TermInfosReader r = new TermInfosReader(dir, info.name, fieldInfos, context, indexDivisor);
if (indexDivisor == -1) {
tisNoIndex = r;
} else {
tisNoIndex = null;
tis = r;
}
- this.readBufferSize = readBufferSize;
+ this.context = context;
this.fieldInfos = fieldInfos;
// make sure that all index files have been read or are kept open
// so that if an index update removes them we'll still have them
- freqStream = dir.openInput(IndexFileNames.segmentFileName(info.name, "", PreFlexCodec.FREQ_EXTENSION), readBufferSize);
+ freqStream = dir.openInput(IndexFileNames.segmentFileName(info.name, "", PreFlexCodec.FREQ_EXTENSION), context);
boolean anyProx = false;
for (FieldInfo fi : fieldInfos) {
if (fi.isIndexed) {
@@ -105,7 +106,7 @@
}
if (anyProx) {
- proxStream = dir.openInput(IndexFileNames.segmentFileName(info.name, "", PreFlexCodec.PROX_EXTENSION), readBufferSize);
+ proxStream = dir.openInput(IndexFileNames.segmentFileName(info.name, "", PreFlexCodec.PROX_EXTENSION), context);
} else {
proxStream = null;
}
@@ -178,7 +179,7 @@
// to CFS
if (!(dir instanceof CompoundFileDirectory)) {
- dir0 = cfsReader = dir.openCompoundInput(IndexFileNames.segmentFileName(si.name, "", IndexFileNames.COMPOUND_FILE_EXTENSION), readBufferSize);
+ dir0 = cfsReader = dir.openCompoundInput(IndexFileNames.segmentFileName(si.name, "", IndexFileNames.COMPOUND_FILE_EXTENSION), context);
} else {
dir0 = dir;
}
@@ -187,7 +188,7 @@
dir0 = dir;
}
- tis = new TermInfosReader(dir0, si.name, fieldInfos, readBufferSize, indexDivisor);
+ tis = new TermInfosReader(dir0, si.name, fieldInfos, context, indexDivisor);
}
}
diff --git a/lucene/src/java/org/apache/lucene/index/codecs/preflex/TermInfosReader.java b/lucene/src/java/org/apache/lucene/index/codecs/preflex/TermInfosReader.java
index ab99c16..3ca8ca6 100644
--- a/lucene/src/java/org/apache/lucene/index/codecs/preflex/TermInfosReader.java
+++ b/lucene/src/java/org/apache/lucene/index/codecs/preflex/TermInfosReader.java
@@ -25,6 +25,7 @@
import org.apache.lucene.index.IndexFileNames;
import org.apache.lucene.index.Term;
import org.apache.lucene.store.Directory;
+import org.apache.lucene.store.IOContext;
import org.apache.lucene.util.BytesRef;
import org.apache.lucene.util.CloseableThreadLocal;
import org.apache.lucene.util.DoubleBarrelLRUCache;
@@ -96,7 +97,7 @@
SegmentTermEnum termEnum;
}
- TermInfosReader(Directory dir, String seg, FieldInfos fis, int readBufferSize, int indexDivisor)
+ TermInfosReader(Directory dir, String seg, FieldInfos fis, IOContext context, int indexDivisor)
throws CorruptIndexException, IOException {
boolean success = false;
@@ -110,7 +111,7 @@
fieldInfos = fis;
origEnum = new SegmentTermEnum(directory.openInput(IndexFileNames.segmentFileName(segment, "", PreFlexCodec.TERMS_EXTENSION),
- readBufferSize), fieldInfos, false);
+ context), fieldInfos, false);
size = origEnum.size;
@@ -118,7 +119,7 @@
// Load terms index
totalIndexInterval = origEnum.indexInterval * indexDivisor;
final SegmentTermEnum indexEnum = new SegmentTermEnum(directory.openInput(IndexFileNames.segmentFileName(segment, "", PreFlexCodec.TERMS_INDEX_EXTENSION),
- readBufferSize), fieldInfos, true);
+ context), fieldInfos, true);
try {
int indexSize = 1+((int)indexEnum.size-1)/indexDivisor; // otherwise read index
diff --git a/lucene/src/java/org/apache/lucene/index/codecs/pulsing/PulsingCodec.java b/lucene/src/java/org/apache/lucene/index/codecs/pulsing/PulsingCodec.java
index 1badc6f..2bb6d97 100644
--- a/lucene/src/java/org/apache/lucene/index/codecs/pulsing/PulsingCodec.java
+++ b/lucene/src/java/org/apache/lucene/index/codecs/pulsing/PulsingCodec.java
@@ -118,7 +118,7 @@
// We wrap StandardPostingsReader, but any StandardPostingsReader
// will work:
- PostingsReaderBase docsReader = new StandardPostingsReader(state.dir, state.segmentInfo, state.readBufferSize, state.codecId);
+ PostingsReaderBase docsReader = new StandardPostingsReader(state.dir, state.segmentInfo, state.context, state.codecId);
PostingsReaderBase pulsingReader = new PulsingPostingsReaderImpl(docsReader);
// Terms dict index reader
@@ -130,7 +130,7 @@
state.fieldInfos,
state.segmentInfo.name,
state.termsIndexDivisor,
- state.codecId);
+ state.codecId, state.context);
success = true;
} finally {
if (!success) {
@@ -144,7 +144,7 @@
FieldsProducer ret = new BlockTermsReader(indexReader,
state.dir, state.fieldInfos, state.segmentInfo.name,
pulsingReader,
- state.readBufferSize,
+ state.context,
StandardCodec.TERMS_CACHE_SIZE,
state.codecId);
success = true;
@@ -181,6 +181,6 @@
@Override
public PerDocValues docsProducer(SegmentReadState state) throws IOException {
- return new DefaultDocValuesProducer(state.segmentInfo, state.dir, state.fieldInfos, state.codecId, getDocValuesUseCFS(), getDocValuesSortComparator());
+ return new DefaultDocValuesProducer(state.segmentInfo, state.dir, state.fieldInfos, state.codecId, getDocValuesUseCFS(), getDocValuesSortComparator(), state.context);
}
}
diff --git a/lucene/src/java/org/apache/lucene/index/codecs/sep/IntStreamFactory.java b/lucene/src/java/org/apache/lucene/index/codecs/sep/IntStreamFactory.java
index da91f2b..2d11059 100644
--- a/lucene/src/java/org/apache/lucene/index/codecs/sep/IntStreamFactory.java
+++ b/lucene/src/java/org/apache/lucene/index/codecs/sep/IntStreamFactory.java
@@ -18,16 +18,16 @@
*/
import org.apache.lucene.store.Directory;
-import org.apache.lucene.store.BufferedIndexInput;
+import org.apache.lucene.store.IOContext;
import java.io.IOException;
/** @lucene.experimental */
public abstract class IntStreamFactory {
- public IntIndexInput openInput(Directory dir, String fileName) throws IOException {
- return openInput(dir, fileName, BufferedIndexInput.BUFFER_SIZE);
+ public IntIndexInput openInput(Directory dir, String fileName, IOContext context) throws IOException {
+ return openInput(dir, fileName, context);
}
- public abstract IntIndexInput openInput(Directory dir, String fileName, int readBufferSize) throws IOException;
- public abstract IntIndexOutput createOutput(Directory dir, String fileName) throws IOException;
+// public abstract IntIndexInput openInput(Directory dir, String fileName, IOContext context) throws IOException;
+ public abstract IntIndexOutput createOutput(Directory dir, String fileName, IOContext context) throws IOException;
}
diff --git a/lucene/src/java/org/apache/lucene/index/codecs/sep/SepPostingsReaderImpl.java b/lucene/src/java/org/apache/lucene/index/codecs/sep/SepPostingsReaderImpl.java
index febb756..62831dc 100644
--- a/lucene/src/java/org/apache/lucene/index/codecs/sep/SepPostingsReaderImpl.java
+++ b/lucene/src/java/org/apache/lucene/index/codecs/sep/SepPostingsReaderImpl.java
@@ -30,6 +30,7 @@
import org.apache.lucene.index.codecs.PostingsReaderBase;
import org.apache.lucene.store.ByteArrayDataInput;
import org.apache.lucene.store.Directory;
+import org.apache.lucene.store.IOContext;
import org.apache.lucene.store.IndexInput;
import org.apache.lucene.util.ArrayUtil;
import org.apache.lucene.util.Bits;
@@ -58,19 +59,19 @@
int maxSkipLevels;
int skipMinimum;
- public SepPostingsReaderImpl(Directory dir, SegmentInfo segmentInfo, int readBufferSize, IntStreamFactory intFactory, int codecId) throws IOException {
+ public SepPostingsReaderImpl(Directory dir, SegmentInfo segmentInfo, IOContext context, IntStreamFactory intFactory, int codecId) throws IOException {
boolean success = false;
try {
final String docFileName = IndexFileNames.segmentFileName(segmentInfo.name, codecId, SepPostingsWriterImpl.DOC_EXTENSION);
- docIn = intFactory.openInput(dir, docFileName);
+ docIn = intFactory.openInput(dir, docFileName, context);
- skipIn = dir.openInput(IndexFileNames.segmentFileName(segmentInfo.name, codecId, SepPostingsWriterImpl.SKIP_EXTENSION), readBufferSize);
+ skipIn = dir.openInput(IndexFileNames.segmentFileName(segmentInfo.name, codecId, SepPostingsWriterImpl.SKIP_EXTENSION), context);
if (segmentInfo.getHasProx()) {
- freqIn = intFactory.openInput(dir, IndexFileNames.segmentFileName(segmentInfo.name, codecId, SepPostingsWriterImpl.FREQ_EXTENSION));
- posIn = intFactory.openInput(dir, IndexFileNames.segmentFileName(segmentInfo.name, codecId, SepPostingsWriterImpl.POS_EXTENSION), readBufferSize);
- payloadIn = dir.openInput(IndexFileNames.segmentFileName(segmentInfo.name, codecId, SepPostingsWriterImpl.PAYLOAD_EXTENSION), readBufferSize);
+ freqIn = intFactory.openInput(dir, IndexFileNames.segmentFileName(segmentInfo.name, codecId, SepPostingsWriterImpl.FREQ_EXTENSION), context);
+ posIn = intFactory.openInput(dir, IndexFileNames.segmentFileName(segmentInfo.name, codecId, SepPostingsWriterImpl.POS_EXTENSION), context);
+ payloadIn = dir.openInput(IndexFileNames.segmentFileName(segmentInfo.name, codecId, SepPostingsWriterImpl.PAYLOAD_EXTENSION), context);
} else {
posIn = null;
payloadIn = null;
diff --git a/lucene/src/java/org/apache/lucene/index/codecs/sep/SepPostingsWriterImpl.java b/lucene/src/java/org/apache/lucene/index/codecs/sep/SepPostingsWriterImpl.java
index ac72d5a..5ede651 100644
--- a/lucene/src/java/org/apache/lucene/index/codecs/sep/SepPostingsWriterImpl.java
+++ b/lucene/src/java/org/apache/lucene/index/codecs/sep/SepPostingsWriterImpl.java
@@ -118,25 +118,25 @@
this.skipInterval = skipInterval;
this.skipMinimum = skipInterval; /* set to the same for now */
final String docFileName = IndexFileNames.segmentFileName(state.segmentName, state.codecId, DOC_EXTENSION);
- docOut = factory.createOutput(state.directory, docFileName);
+ docOut = factory.createOutput(state.directory, docFileName, state.context);
docIndex = docOut.index();
if (state.fieldInfos.hasProx()) {
final String frqFileName = IndexFileNames.segmentFileName(state.segmentName, state.codecId, FREQ_EXTENSION);
- freqOut = factory.createOutput(state.directory, frqFileName);
+ freqOut = factory.createOutput(state.directory, frqFileName, state.context);
freqIndex = freqOut.index();
final String posFileName = IndexFileNames.segmentFileName(state.segmentName, state.codecId, POS_EXTENSION);
- posOut = factory.createOutput(state.directory, posFileName);
+ posOut = factory.createOutput(state.directory, posFileName, state.context);
posIndex = posOut.index();
// TODO: -- only if at least one field stores payloads?
final String payloadFileName = IndexFileNames.segmentFileName(state.segmentName, state.codecId, PAYLOAD_EXTENSION);
- payloadOut = state.directory.createOutput(payloadFileName);
+ payloadOut = state.directory.createOutput(payloadFileName, state.context);
}
final String skipFileName = IndexFileNames.segmentFileName(state.segmentName, state.codecId, SKIP_EXTENSION);
- skipOut = state.directory.createOutput(skipFileName);
+ skipOut = state.directory.createOutput(skipFileName, state.context);
totalNumDocs = state.numDocs;
diff --git a/lucene/src/java/org/apache/lucene/index/codecs/simpletext/SimpleTextCodec.java b/lucene/src/java/org/apache/lucene/index/codecs/simpletext/SimpleTextCodec.java
index d33cd1a..1bdb88f 100644
--- a/lucene/src/java/org/apache/lucene/index/codecs/simpletext/SimpleTextCodec.java
+++ b/lucene/src/java/org/apache/lucene/index/codecs/simpletext/SimpleTextCodec.java
@@ -86,6 +86,6 @@
@Override
public PerDocValues docsProducer(SegmentReadState state) throws IOException {
- return new DefaultDocValuesProducer(state.segmentInfo, state.dir, state.fieldInfos, state.codecId, getDocValuesUseCFS(), getDocValuesSortComparator());
+ return new DefaultDocValuesProducer(state.segmentInfo, state.dir, state.fieldInfos, state.codecId, getDocValuesUseCFS(), getDocValuesSortComparator(), state.context);
}
}
diff --git a/lucene/src/java/org/apache/lucene/index/codecs/simpletext/SimpleTextFieldsReader.java b/lucene/src/java/org/apache/lucene/index/codecs/simpletext/SimpleTextFieldsReader.java
index 9d34b03..3445193 100644
--- a/lucene/src/java/org/apache/lucene/index/codecs/simpletext/SimpleTextFieldsReader.java
+++ b/lucene/src/java/org/apache/lucene/index/codecs/simpletext/SimpleTextFieldsReader.java
@@ -57,7 +57,7 @@
final static BytesRef PAYLOAD = SimpleTextFieldsWriter.PAYLOAD;
public SimpleTextFieldsReader(SegmentReadState state) throws IOException {
- in = state.dir.openInput(SimpleTextCodec.getPostingsFileName(state.segmentInfo.name, state.codecId));
+ in = state.dir.openInput(SimpleTextCodec.getPostingsFileName(state.segmentInfo.name, state.codecId), state.context);
fieldInfos = state.fieldInfos;
}
diff --git a/lucene/src/java/org/apache/lucene/index/codecs/simpletext/SimpleTextFieldsWriter.java b/lucene/src/java/org/apache/lucene/index/codecs/simpletext/SimpleTextFieldsWriter.java
index d1d5f33..f822ec6 100644
--- a/lucene/src/java/org/apache/lucene/index/codecs/simpletext/SimpleTextFieldsWriter.java
+++ b/lucene/src/java/org/apache/lucene/index/codecs/simpletext/SimpleTextFieldsWriter.java
@@ -46,7 +46,7 @@
public SimpleTextFieldsWriter(SegmentWriteState state) throws IOException {
final String fileName = SimpleTextCodec.getPostingsFileName(state.segmentName, state.codecId);
- out = state.directory.createOutput(fileName);
+ out = state.directory.createOutput(fileName, state.context);
}
private void write(String s) throws IOException {
diff --git a/lucene/src/java/org/apache/lucene/index/codecs/standard/StandardCodec.java b/lucene/src/java/org/apache/lucene/index/codecs/standard/StandardCodec.java
index ffe1813..eed2648 100644
--- a/lucene/src/java/org/apache/lucene/index/codecs/standard/StandardCodec.java
+++ b/lucene/src/java/org/apache/lucene/index/codecs/standard/StandardCodec.java
@@ -88,7 +88,7 @@
@Override
public FieldsProducer fieldsProducer(SegmentReadState state) throws IOException {
- PostingsReaderBase postings = new StandardPostingsReader(state.dir, state.segmentInfo, state.readBufferSize, state.codecId);
+ PostingsReaderBase postings = new StandardPostingsReader(state.dir, state.segmentInfo, state.context, state.codecId);
TermsIndexReaderBase indexReader;
boolean success = false;
@@ -97,7 +97,7 @@
state.fieldInfos,
state.segmentInfo.name,
state.termsIndexDivisor,
- state.codecId);
+ state.codecId, state.context);
success = true;
} finally {
if (!success) {
@@ -112,7 +112,7 @@
state.fieldInfos,
state.segmentInfo.name,
postings,
- state.readBufferSize,
+ state.context,
TERMS_CACHE_SIZE,
state.codecId);
success = true;
@@ -162,6 +162,6 @@
@Override
public PerDocValues docsProducer(SegmentReadState state) throws IOException {
- return new DefaultDocValuesProducer(state.segmentInfo, state.dir, state.fieldInfos, state.codecId, getDocValuesUseCFS(), getDocValuesSortComparator());
+ return new DefaultDocValuesProducer(state.segmentInfo, state.dir, state.fieldInfos, state.codecId, getDocValuesUseCFS(), getDocValuesSortComparator(), state.context);
}
}
diff --git a/lucene/src/java/org/apache/lucene/index/codecs/standard/StandardPostingsReader.java b/lucene/src/java/org/apache/lucene/index/codecs/standard/StandardPostingsReader.java
index 51243c9..f1c640a 100644
--- a/lucene/src/java/org/apache/lucene/index/codecs/standard/StandardPostingsReader.java
+++ b/lucene/src/java/org/apache/lucene/index/codecs/standard/StandardPostingsReader.java
@@ -30,6 +30,7 @@
import org.apache.lucene.index.codecs.PostingsReaderBase;
import org.apache.lucene.store.ByteArrayDataInput;
import org.apache.lucene.store.Directory;
+import org.apache.lucene.store.IOContext;
import org.apache.lucene.store.IndexInput;
import org.apache.lucene.util.ArrayUtil;
import org.apache.lucene.util.Bits;
@@ -51,15 +52,15 @@
//private String segment;
- public StandardPostingsReader(Directory dir, SegmentInfo segmentInfo, int readBufferSize, int codecId) throws IOException {
+ public StandardPostingsReader(Directory dir, SegmentInfo segmentInfo, IOContext context, int codecId) throws IOException {
freqIn = dir.openInput(IndexFileNames.segmentFileName(segmentInfo.name, codecId, StandardCodec.FREQ_EXTENSION),
- readBufferSize);
+ context);
//this.segment = segmentInfo.name;
if (segmentInfo.getHasProx()) {
boolean success = false;
try {
proxIn = dir.openInput(IndexFileNames.segmentFileName(segmentInfo.name, codecId, StandardCodec.PROX_EXTENSION),
- readBufferSize);
+ context);
success = true;
} finally {
if (!success) {
diff --git a/lucene/src/java/org/apache/lucene/index/codecs/standard/StandardPostingsWriter.java b/lucene/src/java/org/apache/lucene/index/codecs/standard/StandardPostingsWriter.java
index 474485b..0549cc6 100644
--- a/lucene/src/java/org/apache/lucene/index/codecs/standard/StandardPostingsWriter.java
+++ b/lucene/src/java/org/apache/lucene/index/codecs/standard/StandardPostingsWriter.java
@@ -92,14 +92,14 @@
this.skipMinimum = skipInterval; /* set to the same for now */
//this.segment = state.segmentName;
String fileName = IndexFileNames.segmentFileName(state.segmentName, state.codecId, StandardCodec.FREQ_EXTENSION);
- freqOut = state.directory.createOutput(fileName);
+ freqOut = state.directory.createOutput(fileName, state.context);
boolean success = false;
try {
if (state.fieldInfos.hasProx()) {
// At least one field does not omit TF, so create the
// prox file
fileName = IndexFileNames.segmentFileName(state.segmentName, state.codecId, StandardCodec.PROX_EXTENSION);
- proxOut = state.directory.createOutput(fileName);
+ proxOut = state.directory.createOutput(fileName, state.context);
} else {
// Every field omits TF so we will write no prox file
proxOut = null;
diff --git a/lucene/src/java/org/apache/lucene/index/values/Bytes.java b/lucene/src/java/org/apache/lucene/index/values/Bytes.java
index cd4b06b..f92e657 100644
--- a/lucene/src/java/org/apache/lucene/index/values/Bytes.java
+++ b/lucene/src/java/org/apache/lucene/index/values/Bytes.java
@@ -28,6 +28,7 @@
import org.apache.lucene.index.values.IndexDocValues.Source;
import org.apache.lucene.index.values.IndexDocValues.SourceEnum;
import org.apache.lucene.store.Directory;
+import org.apache.lucene.store.IOContext;
import org.apache.lucene.store.IndexInput;
import org.apache.lucene.store.IndexOutput;
import org.apache.lucene.util.AttributeSource;
@@ -100,12 +101,13 @@
* {@link Writer}. A call to {@link Writer#finish(int)} will release
* all internally used resources and frees the memeory tracking
* reference.
+ * @param context
* @return a new {@link Writer} instance
* @throws IOException
* if the files for the writer can not be created.
*/
public static Writer getWriter(Directory dir, String id, Mode mode,
- Comparator<BytesRef> comp, boolean fixedSize, AtomicLong bytesUsed)
+ Comparator<BytesRef> comp, boolean fixedSize, AtomicLong bytesUsed, IOContext context)
throws IOException {
// TODO -- i shouldn't have to specify fixed? can
// track itself & do the write thing at write time?
@@ -115,19 +117,19 @@
if (fixedSize) {
if (mode == Mode.STRAIGHT) {
- return new FixedStraightBytesImpl.Writer(dir, id, bytesUsed);
+ return new FixedStraightBytesImpl.Writer(dir, id, bytesUsed, context);
} else if (mode == Mode.DEREF) {
- return new FixedDerefBytesImpl.Writer(dir, id, bytesUsed);
+ return new FixedDerefBytesImpl.Writer(dir, id, bytesUsed, context);
} else if (mode == Mode.SORTED) {
- return new FixedSortedBytesImpl.Writer(dir, id, comp, bytesUsed);
+ return new FixedSortedBytesImpl.Writer(dir, id, comp, bytesUsed, context);
}
} else {
if (mode == Mode.STRAIGHT) {
- return new VarStraightBytesImpl.Writer(dir, id, bytesUsed);
+ return new VarStraightBytesImpl.Writer(dir, id, bytesUsed, context);
} else if (mode == Mode.DEREF) {
- return new VarDerefBytesImpl.Writer(dir, id, bytesUsed);
+ return new VarDerefBytesImpl.Writer(dir, id, bytesUsed, context);
} else if (mode == Mode.SORTED) {
- return new VarSortedBytesImpl.Writer(dir, id, comp, bytesUsed);
+ return new VarSortedBytesImpl.Writer(dir, id, comp, bytesUsed, context);
}
}
@@ -157,23 +159,24 @@
* if an {@link IOException} occurs
*/
public static IndexDocValues getValues(Directory dir, String id, Mode mode,
- boolean fixedSize, int maxDoc, Comparator<BytesRef> sortComparator) throws IOException {
+ boolean fixedSize, int maxDoc, Comparator<BytesRef> sortComparator, IOContext context) throws IOException {
+
// TODO -- I can peek @ header to determing fixed/mode?
if (fixedSize) {
if (mode == Mode.STRAIGHT) {
- return new FixedStraightBytesImpl.Reader(dir, id, maxDoc);
+ return new FixedStraightBytesImpl.Reader(dir, id, maxDoc, context);
} else if (mode == Mode.DEREF) {
- return new FixedDerefBytesImpl.Reader(dir, id, maxDoc);
+ return new FixedDerefBytesImpl.Reader(dir, id, maxDoc, context);
} else if (mode == Mode.SORTED) {
- return new FixedSortedBytesImpl.Reader(dir, id, maxDoc);
+ return new FixedSortedBytesImpl.Reader(dir, id, maxDoc, context);
}
} else {
if (mode == Mode.STRAIGHT) {
- return new VarStraightBytesImpl.Reader(dir, id, maxDoc);
+ return new VarStraightBytesImpl.Reader(dir, id, maxDoc, context);
} else if (mode == Mode.DEREF) {
- return new VarDerefBytesImpl.Reader(dir, id, maxDoc);
+ return new VarDerefBytesImpl.Reader(dir, id, maxDoc, context);
} else if (mode == Mode.SORTED) {
- return new VarSortedBytesImpl.Reader(dir, id, maxDoc, sortComparator);
+ return new VarSortedBytesImpl.Reader(dir, id, maxDoc, sortComparator, context);
}
}
@@ -343,15 +346,16 @@
private final Directory dir;
private final String codecName;
private final int version;
+ private final IOContext context;
protected BytesWriterBase(Directory dir, String id, String codecName,
- int version,
- AtomicLong bytesUsed) throws IOException {
+ int version, AtomicLong bytesUsed, IOContext context) throws IOException {
super(bytesUsed);
this.id = id;
this.dir = dir;
this.codecName = codecName;
this.version = version;
+ this.context = context;
}
protected IndexOutput getDataOut() throws IOException {
@@ -359,7 +363,7 @@
boolean success = false;
try {
datOut = dir.createOutput(IndexFileNames.segmentFileName(id, "",
- DATA_EXTENSION));
+ DATA_EXTENSION), context);
CodecUtil.writeHeader(datOut, codecName, version);
success = true;
} finally {
@@ -376,7 +380,7 @@
try {
if (idxOut == null) {
idxOut = dir.createOutput(IndexFileNames.segmentFileName(id, "",
- INDEX_EXTENSION));
+ INDEX_EXTENSION), context);
CodecUtil.writeHeader(idxOut, codecName, version);
}
success = true;
@@ -439,16 +443,16 @@
protected final String id;
protected BytesReaderBase(Directory dir, String id, String codecName,
- int maxVersion, boolean doIndex) throws IOException {
+ int maxVersion, boolean doIndex, IOContext context) throws IOException {
this.id = id;
datIn = dir.openInput(IndexFileNames.segmentFileName(id, "",
- Writer.DATA_EXTENSION));
+ Writer.DATA_EXTENSION), context);
boolean success = false;
try {
version = CodecUtil.checkHeader(datIn, codecName, maxVersion, maxVersion);
if (doIndex) {
idxIn = dir.openInput(IndexFileNames.segmentFileName(id, "",
- Writer.INDEX_EXTENSION));
+ Writer.INDEX_EXTENSION), context);
final int version2 = CodecUtil.checkHeader(idxIn, codecName,
maxVersion, maxVersion);
assert version == version2;
diff --git a/lucene/src/java/org/apache/lucene/index/values/FixedDerefBytesImpl.java b/lucene/src/java/org/apache/lucene/index/values/FixedDerefBytesImpl.java
index f11186f..704c7f0 100644
--- a/lucene/src/java/org/apache/lucene/index/values/FixedDerefBytesImpl.java
+++ b/lucene/src/java/org/apache/lucene/index/values/FixedDerefBytesImpl.java
@@ -24,6 +24,7 @@
import org.apache.lucene.index.values.Bytes.BytesReaderBase;
import org.apache.lucene.index.values.Bytes.BytesWriterBase;
import org.apache.lucene.store.Directory;
+import org.apache.lucene.store.IOContext;
import org.apache.lucene.store.IndexInput;
import org.apache.lucene.store.IndexOutput;
import org.apache.lucene.util.ArrayUtil;
@@ -54,15 +55,15 @@
private int size = -1;
private int[] docToID;
private final BytesRefHash hash;
- public Writer(Directory dir, String id, AtomicLong bytesUsed)
+ public Writer(Directory dir, String id, AtomicLong bytesUsed, IOContext context)
throws IOException {
this(dir, id, new DirectTrackingAllocator(ByteBlockPool.BYTE_BLOCK_SIZE, bytesUsed),
- bytesUsed);
+ bytesUsed, context);
}
public Writer(Directory dir, String id, Allocator allocator,
- AtomicLong bytesUsed) throws IOException {
- super(dir, id, CODEC_NAME, VERSION_CURRENT, bytesUsed);
+ AtomicLong bytesUsed, IOContext context) throws IOException {
+ super(dir, id, CODEC_NAME, VERSION_CURRENT, bytesUsed, context);
hash = new BytesRefHash(new ByteBlockPool(allocator),
BytesRefHash.DEFAULT_CAPACITY, new TrackingDirectBytesStartArray(
BytesRefHash.DEFAULT_CAPACITY, bytesUsed));
@@ -144,8 +145,8 @@
public static class Reader extends BytesReaderBase {
private final int size;
- Reader(Directory dir, String id, int maxDoc) throws IOException {
- super(dir, id, CODEC_NAME, VERSION_START, true);
+ Reader(Directory dir, String id, int maxDoc, IOContext context) throws IOException {
+ super(dir, id, CODEC_NAME, VERSION_START, true, context);
size = datIn.readInt();
}
diff --git a/lucene/src/java/org/apache/lucene/index/values/FixedSortedBytesImpl.java b/lucene/src/java/org/apache/lucene/index/values/FixedSortedBytesImpl.java
index d1dc0f8..afaf533 100644
--- a/lucene/src/java/org/apache/lucene/index/values/FixedSortedBytesImpl.java
+++ b/lucene/src/java/org/apache/lucene/index/values/FixedSortedBytesImpl.java
@@ -26,6 +26,7 @@
import org.apache.lucene.index.values.Bytes.BytesWriterBase;
import org.apache.lucene.index.values.FixedDerefBytesImpl.Reader.DerefBytesEnum;
import org.apache.lucene.store.Directory;
+import org.apache.lucene.store.IOContext;
import org.apache.lucene.store.IndexInput;
import org.apache.lucene.store.IndexOutput;
import org.apache.lucene.util.ArrayUtil;
@@ -61,14 +62,14 @@
private final BytesRefHash hash;
public Writer(Directory dir, String id, Comparator<BytesRef> comp,
- AtomicLong bytesUsed) throws IOException {
+ AtomicLong bytesUsed, IOContext context) throws IOException {
this(dir, id, comp, new DirectTrackingAllocator(ByteBlockPool.BYTE_BLOCK_SIZE, bytesUsed),
- bytesUsed);
+ bytesUsed, context);
}
public Writer(Directory dir, String id, Comparator<BytesRef> comp,
- Allocator allocator, AtomicLong bytesUsed) throws IOException {
- super(dir, id, CODEC_NAME, VERSION_CURRENT, bytesUsed);
+ Allocator allocator, AtomicLong bytesUsed, IOContext context) throws IOException {
+ super(dir, id, CODEC_NAME, VERSION_CURRENT, bytesUsed, context);
ByteBlockPool pool = new ByteBlockPool(allocator);
hash = new BytesRefHash(pool, BytesRefHash.DEFAULT_CAPACITY,
new TrackingDirectBytesStartArray(BytesRefHash.DEFAULT_CAPACITY,
@@ -169,8 +170,8 @@
public static class Reader extends BytesReaderBase {
private final int size;
- public Reader(Directory dir, String id, int maxDoc) throws IOException {
- super(dir, id, CODEC_NAME, VERSION_START, true);
+ public Reader(Directory dir, String id, int maxDoc, IOContext context) throws IOException {
+ super(dir, id, CODEC_NAME, VERSION_START, true, context);
size = datIn.readInt();
}
diff --git a/lucene/src/java/org/apache/lucene/index/values/FixedStraightBytesImpl.java b/lucene/src/java/org/apache/lucene/index/values/FixedStraightBytesImpl.java
index a500027..1e93d77 100644
--- a/lucene/src/java/org/apache/lucene/index/values/FixedStraightBytesImpl.java
+++ b/lucene/src/java/org/apache/lucene/index/values/FixedStraightBytesImpl.java
@@ -26,6 +26,7 @@
import org.apache.lucene.index.values.Bytes.BytesReaderBase;
import org.apache.lucene.index.values.Bytes.BytesWriterBase;
import org.apache.lucene.store.Directory;
+import org.apache.lucene.store.IOContext;
import org.apache.lucene.store.IndexInput;
import org.apache.lucene.store.IndexOutput;
import org.apache.lucene.util.AttributeSource;
@@ -55,8 +56,8 @@
private final int byteBlockSize;
private IndexOutput datOut;
- public Writer(Directory dir, String id, AtomicLong bytesUsed) throws IOException {
- super(dir, id, CODEC_NAME, VERSION_CURRENT, bytesUsed);
+ public Writer(Directory dir, String id, AtomicLong bytesUsed, IOContext context) throws IOException {
+ super(dir, id, CODEC_NAME, VERSION_CURRENT, bytesUsed, context);
pool = new ByteBlockPool(new DirectTrackingAllocator(bytesUsed));
byteBlockSize = BYTE_BLOCK_SIZE;
}
@@ -204,8 +205,8 @@
private final int size;
private final int maxDoc;
- Reader(Directory dir, String id, int maxDoc) throws IOException {
- super(dir, id, CODEC_NAME, VERSION_START, false);
+ Reader(Directory dir, String id, int maxDoc, IOContext context) throws IOException {
+ super(dir, id, CODEC_NAME, VERSION_START, false, context);
size = datIn.readInt();
this.maxDoc = maxDoc;
}
diff --git a/lucene/src/java/org/apache/lucene/index/values/Floats.java b/lucene/src/java/org/apache/lucene/index/values/Floats.java
index 6a35a85..a8c1e2e 100644
--- a/lucene/src/java/org/apache/lucene/index/values/Floats.java
+++ b/lucene/src/java/org/apache/lucene/index/values/Floats.java
@@ -23,6 +23,7 @@
import org.apache.lucene.index.IndexFileNames;
import org.apache.lucene.index.values.IndexDocValues.Source;
import org.apache.lucene.store.Directory;
+import org.apache.lucene.store.IOContext;
import org.apache.lucene.store.IndexInput;
import org.apache.lucene.store.IndexOutput;
import org.apache.lucene.util.ArrayUtil;
@@ -49,21 +50,21 @@
private static final byte[] DEFAULTS = new byte[] {0,0,0,0,0,0,0,0};
public static Writer getWriter(Directory dir, String id, int precisionBytes,
- AtomicLong bytesUsed) throws IOException {
+ AtomicLong bytesUsed, IOContext context) throws IOException {
if (precisionBytes != 4 && precisionBytes != 8) {
throw new IllegalArgumentException("precisionBytes must be 4 or 8; got "
+ precisionBytes);
}
if (precisionBytes == 4) {
- return new Float4Writer(dir, id, bytesUsed);
+ return new Float4Writer(dir, id, bytesUsed, context);
} else {
- return new Float8Writer(dir, id, bytesUsed);
+ return new Float8Writer(dir, id, bytesUsed, context);
}
}
- public static IndexDocValues getValues(Directory dir, String id, int maxDoc)
+ public static IndexDocValues getValues(Directory dir, String id, int maxDoc, IOContext context)
throws IOException {
- return new FloatsReader(dir, id, maxDoc);
+ return new FloatsReader(dir, id, maxDoc, context);
}
abstract static class FloatsWriter extends Writer {
@@ -73,13 +74,15 @@
protected IndexOutput datOut;
private final byte precision;
private final Directory dir;
+ private final IOContext context;
protected FloatsWriter(Directory dir, String id, int precision,
- AtomicLong bytesUsed) throws IOException {
+ AtomicLong bytesUsed, IOContext context) throws IOException {
super(bytesUsed);
this.id = id;
this.precision = (byte) precision;
this.dir = dir;
+ this.context = context;
}
@@ -90,7 +93,7 @@
final void initDataOut() throws IOException {
assert datOut == null;
datOut = dir.createOutput(IndexFileNames.segmentFileName(id, "",
- Writer.DATA_EXTENSION));
+ Writer.DATA_EXTENSION), context);
boolean success = false;
try {
CodecUtil.writeHeader(datOut, CODEC_NAME, VERSION_CURRENT);
@@ -158,9 +161,9 @@
// Writes 4 bytes (float) per value
static final class Float4Writer extends FloatsWriter {
private int[] values;
- protected Float4Writer(Directory dir, String id, AtomicLong bytesUsed)
+ protected Float4Writer(Directory dir, String id, AtomicLong bytesUsed, IOContext context)
throws IOException {
- super(dir, id, 4, bytesUsed);
+ super(dir, id, 4, bytesUsed, context);
values = new int[1];
bytesUsed.addAndGet(RamUsageEstimator.NUM_BYTES_INT);
}
@@ -221,9 +224,9 @@
// Writes 8 bytes (double) per value
static final class Float8Writer extends FloatsWriter {
private long[] values;
- protected Float8Writer(Directory dir, String id, AtomicLong bytesUsed)
+ protected Float8Writer(Directory dir, String id, AtomicLong bytesUsed, IOContext context)
throws IOException {
- super(dir, id, 8, bytesUsed);
+ super(dir, id, 8, bytesUsed, context);
values = new long[1];
bytesUsed.addAndGet(RamUsageEstimator.NUM_BYTES_LONG);
}
@@ -288,10 +291,10 @@
// TODO(simonw) is ByteBuffer the way to go here?
private final int maxDoc;
- protected FloatsReader(Directory dir, String id, int maxDoc)
+ protected FloatsReader(Directory dir, String id, int maxDoc, IOContext context)
throws IOException {
datIn = dir.openInput(IndexFileNames.segmentFileName(id, "",
- Writer.DATA_EXTENSION));
+ Writer.DATA_EXTENSION), context);
CodecUtil.checkHeader(datIn, CODEC_NAME, VERSION_START, VERSION_START);
precisionBytes = datIn.readByte();
assert precisionBytes == 4 || precisionBytes == 8;
diff --git a/lucene/src/java/org/apache/lucene/index/values/Ints.java b/lucene/src/java/org/apache/lucene/index/values/Ints.java
index 8431657..ba57640 100644
--- a/lucene/src/java/org/apache/lucene/index/values/Ints.java
+++ b/lucene/src/java/org/apache/lucene/index/values/Ints.java
@@ -23,6 +23,7 @@
import org.apache.lucene.index.values.IntsImpl.IntsReader;
import org.apache.lucene.index.values.IntsImpl.IntsWriter;
import org.apache.lucene.store.Directory;
+import org.apache.lucene.store.IOContext;
/**
* @lucene.experimental
@@ -33,11 +34,13 @@
private Ints() {
}
- public static Writer getWriter(Directory dir, String id, AtomicLong bytesUsed, ValueType type) throws IOException {
- return new IntsWriter(dir, id, bytesUsed, type);
+ public static Writer getWriter(Directory dir, String id,
+ AtomicLong bytesUsed, ValueType type, IOContext context) throws IOException {
+ return new IntsWriter(dir, id, bytesUsed, type, context);
}
- public static IndexDocValues getValues(Directory dir, String id, int numDocs) throws IOException {
- return new IntsReader(dir, id, numDocs);
+ public static IndexDocValues getValues(Directory dir, String id,
+ int numDocs, IOContext context) throws IOException {
+ return new IntsReader(dir, id, numDocs, context);
}
}
diff --git a/lucene/src/java/org/apache/lucene/index/values/IntsImpl.java b/lucene/src/java/org/apache/lucene/index/values/IntsImpl.java
index 0e64e25..56e9ad9 100644
--- a/lucene/src/java/org/apache/lucene/index/values/IntsImpl.java
+++ b/lucene/src/java/org/apache/lucene/index/values/IntsImpl.java
@@ -29,6 +29,7 @@
import org.apache.lucene.index.values.IndexDocValuesArray.LongValues;
import org.apache.lucene.index.values.IndexDocValuesArray.ShortValues;
import org.apache.lucene.store.Directory;
+import org.apache.lucene.store.IOContext;
import org.apache.lucene.store.IndexInput;
import org.apache.lucene.store.IndexOutput;
import org.apache.lucene.util.AttributeSource;
@@ -67,11 +68,13 @@
private final byte typeOrd;
private IndexOutput datOut;
private boolean merging;
+ private final IOContext context;
protected IntsWriter(Directory dir, String id, AtomicLong bytesUsed,
- ValueType valueType) throws IOException {
+ ValueType valueType, IOContext context) throws IOException {
super(bytesUsed);
+ this.context = context;
this.dir = dir;
this.id = id;
switch (valueType) {
@@ -122,7 +125,7 @@
boolean success = false;
try {
datOut = dir.createOutput(IndexFileNames.segmentFileName(id, "",
- DATA_EXTENSION));
+ DATA_EXTENSION), context);
CodecUtil.writeHeader(datOut, CODEC_NAME, VERSION_CURRENT);
datOut.writeByte(typeOrd);
success = true;
@@ -273,9 +276,9 @@
private final byte type;
private final int numDocs;
- protected IntsReader(Directory dir, String id, int numDocs) throws IOException {
+ protected IntsReader(Directory dir, String id, int numDocs, IOContext context) throws IOException {
datIn = dir.openInput(IndexFileNames.segmentFileName(id, "",
- Writer.DATA_EXTENSION));
+ Writer.DATA_EXTENSION), context);
this.numDocs = numDocs;
boolean success = false;
try {
diff --git a/lucene/src/java/org/apache/lucene/index/values/VarDerefBytesImpl.java b/lucene/src/java/org/apache/lucene/index/values/VarDerefBytesImpl.java
index ced6ebe..c862dd1 100644
--- a/lucene/src/java/org/apache/lucene/index/values/VarDerefBytesImpl.java
+++ b/lucene/src/java/org/apache/lucene/index/values/VarDerefBytesImpl.java
@@ -26,6 +26,7 @@
import org.apache.lucene.index.values.FixedDerefBytesImpl.Reader.DerefBytesEnum;
import org.apache.lucene.store.DataOutput;
import org.apache.lucene.store.Directory;
+import org.apache.lucene.store.IOContext;
import org.apache.lucene.store.IndexInput;
import org.apache.lucene.store.IndexOutput;
import org.apache.lucene.util.ArrayUtil;
@@ -117,15 +118,15 @@
bytesUsed);
private final BytesRefHash hash;
- public Writer(Directory dir, String id, AtomicLong bytesUsed)
+ public Writer(Directory dir, String id, AtomicLong bytesUsed, IOContext context)
throws IOException {
this(dir, id, new DirectTrackingAllocator(ByteBlockPool.BYTE_BLOCK_SIZE, bytesUsed),
- bytesUsed);
+ bytesUsed, context);
}
public Writer(Directory dir, String id, Allocator allocator,
- AtomicLong bytesUsed) throws IOException {
- super(dir, id, CODEC_NAME, VERSION_CURRENT, bytesUsed);
+ AtomicLong bytesUsed, IOContext context) throws IOException {
+ super(dir, id, CODEC_NAME, VERSION_CURRENT, bytesUsed, context);
hash = new BytesRefHash(new ByteBlockPool(allocator), 16, array);
docToAddress = new int[1];
bytesUsed.addAndGet(RamUsageEstimator.NUM_BYTES_INT);
@@ -220,8 +221,8 @@
public static class Reader extends BytesReaderBase {
- Reader(Directory dir, String id, int maxDoc) throws IOException {
- super(dir, id, CODEC_NAME, VERSION_START, true);
+ Reader(Directory dir, String id, int maxDoc, IOContext context) throws IOException {
+ super(dir, id, CODEC_NAME, VERSION_START, true, context);
}
@Override
diff --git a/lucene/src/java/org/apache/lucene/index/values/VarSortedBytesImpl.java b/lucene/src/java/org/apache/lucene/index/values/VarSortedBytesImpl.java
index e8e8500..3e884b3 100644
--- a/lucene/src/java/org/apache/lucene/index/values/VarSortedBytesImpl.java
+++ b/lucene/src/java/org/apache/lucene/index/values/VarSortedBytesImpl.java
@@ -26,6 +26,7 @@
import org.apache.lucene.index.values.Bytes.BytesReaderBase;
import org.apache.lucene.index.values.Bytes.BytesWriterBase;
import org.apache.lucene.store.Directory;
+import org.apache.lucene.store.IOContext;
import org.apache.lucene.store.IndexInput;
import org.apache.lucene.store.IndexOutput;
import org.apache.lucene.util.ArrayUtil;
@@ -61,14 +62,14 @@
private final BytesRefHash hash;
public Writer(Directory dir, String id, Comparator<BytesRef> comp,
- AtomicLong bytesUsed) throws IOException {
+ AtomicLong bytesUsed, IOContext context) throws IOException {
this(dir, id, comp, new DirectTrackingAllocator(ByteBlockPool.BYTE_BLOCK_SIZE, bytesUsed),
- bytesUsed);
+ bytesUsed, context);
}
public Writer(Directory dir, String id, Comparator<BytesRef> comp,
- Allocator allocator, AtomicLong bytesUsed) throws IOException {
- super(dir, id, CODEC_NAME, VERSION_CURRENT, bytesUsed);
+ Allocator allocator, AtomicLong bytesUsed, IOContext context) throws IOException {
+ super(dir, id, CODEC_NAME, VERSION_CURRENT, bytesUsed, context);
this.hash = new BytesRefHash(new ByteBlockPool(allocator),
BytesRefHash.DEFAULT_CAPACITY, new TrackingDirectBytesStartArray(
BytesRefHash.DEFAULT_CAPACITY, bytesUsed));
@@ -168,8 +169,9 @@
public static class Reader extends BytesReaderBase {
private final Comparator<BytesRef> defaultComp;
- Reader(Directory dir, String id, int maxDoc, Comparator<BytesRef> comparator) throws IOException {
- super(dir, id, CODEC_NAME, VERSION_START, true);
+
+ Reader(Directory dir, String id, int maxDoc, Comparator<BytesRef> comparator, IOContext context) throws IOException {
+ super(dir, id, CODEC_NAME, VERSION_START, true, context);
this.defaultComp = comparator;
}
diff --git a/lucene/src/java/org/apache/lucene/index/values/VarStraightBytesImpl.java b/lucene/src/java/org/apache/lucene/index/values/VarStraightBytesImpl.java
index 6128100..5cb90ff 100644
--- a/lucene/src/java/org/apache/lucene/index/values/VarStraightBytesImpl.java
+++ b/lucene/src/java/org/apache/lucene/index/values/VarStraightBytesImpl.java
@@ -24,6 +24,7 @@
import org.apache.lucene.index.values.Bytes.BytesReaderBase;
import org.apache.lucene.index.values.Bytes.BytesWriterBase;
import org.apache.lucene.store.Directory;
+import org.apache.lucene.store.IOContext;
import org.apache.lucene.store.IndexInput;
import org.apache.lucene.store.IndexOutput;
import org.apache.lucene.util.ArrayUtil;
@@ -56,9 +57,9 @@
private final ByteBlockPool pool;
private IndexOutput datOut;
private boolean merge = false;
- public Writer(Directory dir, String id, AtomicLong bytesUsed)
+ public Writer(Directory dir, String id, AtomicLong bytesUsed, IOContext context)
throws IOException {
- super(dir, id, CODEC_NAME, VERSION_CURRENT, bytesUsed);
+ super(dir, id, CODEC_NAME, VERSION_CURRENT, bytesUsed, context);
pool = new ByteBlockPool(new DirectTrackingAllocator(bytesUsed));
docToAddress = new long[1];
pool.nextBuffer(); // init
@@ -215,8 +216,8 @@
public static class Reader extends BytesReaderBase {
private final int maxDoc;
- Reader(Directory dir, String id, int maxDoc) throws IOException {
- super(dir, id, CODEC_NAME, VERSION_START, true);
+ Reader(Directory dir, String id, int maxDoc, IOContext context) throws IOException {
+ super(dir, id, CODEC_NAME, VERSION_START, true, context);
this.maxDoc = maxDoc;
}
diff --git a/lucene/src/java/org/apache/lucene/index/values/Writer.java b/lucene/src/java/org/apache/lucene/index/values/Writer.java
index abcbbce..5a7217c 100644
--- a/lucene/src/java/org/apache/lucene/index/values/Writer.java
+++ b/lucene/src/java/org/apache/lucene/index/values/Writer.java
@@ -22,6 +22,7 @@
import org.apache.lucene.index.codecs.DocValuesConsumer;
import org.apache.lucene.store.Directory;
+import org.apache.lucene.store.IOContext;
import org.apache.lucene.util.Bits;
import org.apache.lucene.util.BytesRef;
@@ -192,7 +193,7 @@
* @throws IOException
*/
public static Writer create(ValueType type, String id, Directory directory,
- Comparator<BytesRef> comp, AtomicLong bytesUsed) throws IOException {
+ Comparator<BytesRef> comp, AtomicLong bytesUsed, IOContext context) throws IOException {
if (comp == null) {
comp = BytesRef.getUTF8SortedAsUnicodeComparator();
}
@@ -202,29 +203,29 @@
case FIXED_INTS_64:
case FIXED_INTS_8:
case VAR_INTS:
- return Ints.getWriter(directory, id, bytesUsed, type);
+ return Ints.getWriter(directory, id, bytesUsed, type, context);
case FLOAT_32:
- return Floats.getWriter(directory, id, 4, bytesUsed);
+ return Floats.getWriter(directory, id, 4, bytesUsed, context);
case FLOAT_64:
- return Floats.getWriter(directory, id, 8, bytesUsed);
+ return Floats.getWriter(directory, id, 8, bytesUsed, context);
case BYTES_FIXED_STRAIGHT:
return Bytes.getWriter(directory, id, Bytes.Mode.STRAIGHT, comp, true,
- bytesUsed);
+ bytesUsed, context);
case BYTES_FIXED_DEREF:
return Bytes.getWriter(directory, id, Bytes.Mode.DEREF, comp, true,
- bytesUsed);
+ bytesUsed, context);
case BYTES_FIXED_SORTED:
return Bytes.getWriter(directory, id, Bytes.Mode.SORTED, comp, true,
- bytesUsed);
+ bytesUsed, context);
case BYTES_VAR_STRAIGHT:
return Bytes.getWriter(directory, id, Bytes.Mode.STRAIGHT, comp, false,
- bytesUsed);
+ bytesUsed, context);
case BYTES_VAR_DEREF:
return Bytes.getWriter(directory, id, Bytes.Mode.DEREF, comp, false,
- bytesUsed);
+ bytesUsed, context);
case BYTES_VAR_SORTED:
return Bytes.getWriter(directory, id, Bytes.Mode.SORTED, comp, false,
- bytesUsed);
+ bytesUsed, context);
default:
throw new IllegalArgumentException("Unknown Values: " + type);
diff --git a/lucene/src/java/org/apache/lucene/store/BufferedIndexInput.java b/lucene/src/java/org/apache/lucene/store/BufferedIndexInput.java
index d8ed2c7..9d4e5a2 100644
--- a/lucene/src/java/org/apache/lucene/store/BufferedIndexInput.java
+++ b/lucene/src/java/org/apache/lucene/store/BufferedIndexInput.java
@@ -22,8 +22,19 @@
/** Base implementation class for buffered {@link IndexInput}. */
public abstract class BufferedIndexInput extends IndexInput {
- /** Default buffer size */
+ /** Default buffer size set to 1024*/
public static final int BUFFER_SIZE = 1024;
+
+ // The normal read buffer size defaults to 1024, but
+ // increasing this during merging seems to yield
+ // performance gains. However we don't want to increase
+ // it too much because there are quite a few
+ // BufferedIndexInputs created during merging. See
+ // LUCENE-888 for details.
+ /**
+ * A buffer size for merges set to 4096
+ */
+ public static final int MERGE_BUFFER_SIZE = 4096;
private int bufferSize = BUFFER_SIZE;
@@ -41,6 +52,10 @@
}
public BufferedIndexInput() {}
+
+ public BufferedIndexInput(IOContext context) {
+ this(bufferSize(context));
+ }
/** Inits BufferedIndexInput with a specific bufferSize */
public BufferedIndexInput(int bufferSize) {
@@ -300,4 +315,21 @@
}
}
+ /**
+ * Returns default buffer sizes for the given {@link IOContext}
+ */
+ public static int bufferSize(IOContext context) {
+ switch (context.context) {
+ case DEFAULT:
+ case FLUSH:
+ case READ:
+ return BUFFER_SIZE;
+ case MERGE:
+ return MERGE_BUFFER_SIZE;
+ default:
+ assert false : "unknown IOContext " + context.context;
+ return BUFFER_SIZE;
+ }
+ }
+
}
diff --git a/lucene/src/java/org/apache/lucene/store/CompoundFileDirectory.java b/lucene/src/java/org/apache/lucene/store/CompoundFileDirectory.java
index 40ec565..92c5256 100644
--- a/lucene/src/java/org/apache/lucene/store/CompoundFileDirectory.java
+++ b/lucene/src/java/org/apache/lucene/store/CompoundFileDirectory.java
@@ -48,7 +48,7 @@
private final Directory directory;
private final String fileName;
- private final int readBufferSize;
+ protected final int readBufferSize;
private Map<String,FileEntry> entries;
private boolean openForWrite;
private static final Map<String,FileEntry> SENTINEL = Collections.emptyMap();
@@ -59,11 +59,11 @@
* <p>
* NOTE: subclasses must call {@link #initForRead(Map)} before the directory can be used.
*/
- public CompoundFileDirectory(Directory directory, String fileName, int readBufferSize) throws IOException {
+ public CompoundFileDirectory(Directory directory, String fileName, IOContext context) throws IOException {
this.directory = directory;
this.fileName = fileName;
- this.readBufferSize = readBufferSize;
+ this.readBufferSize = BufferedIndexInput.bufferSize(context);
this.isOpen = false;
}
@@ -91,7 +91,7 @@
IndexInput input = null;
try {
input = dir.openInput(IndexFileNames.segmentFileName(IndexFileNames.stripExtension(name), "",
- IndexFileNames.COMPOUND_FILE_ENTRIES_EXTENSION));
+ IndexFileNames.COMPOUND_FILE_ENTRIES_EXTENSION), IOContext.READONCE);
final int readInt = input.readInt(); // unused right now
assert readInt == CompoundFileWriter.ENTRY_FORMAT_CURRENT;
final int numEntries = input.readVInt();
@@ -189,13 +189,7 @@
}
@Override
- public synchronized IndexInput openInput(String id) throws IOException {
- // Default to readBufferSize passed in when we were opened
- return openInput(id, readBufferSize);
- }
-
- @Override
- public synchronized IndexInput openInput(String id, int readBufferSize) throws IOException {
+ public synchronized IndexInput openInput(String id, IOContext context) throws IOException {
ensureOpen();
assert !openForWrite;
id = IndexFileNames.stripSegmentName(id);
@@ -273,9 +267,9 @@
}
@Override
- public IndexOutput createOutput(String name) throws IOException {
+ public IndexOutput createOutput(String name, IOContext context) throws IOException {
ensureOpen();
- return writer.createOutput(name);
+ return writer.createOutput(name, context);
}
@Override
@@ -291,18 +285,18 @@
}
@Override
- public CompoundFileDirectory openCompoundInput(String name, int bufferSize) throws IOException {
+ public CompoundFileDirectory openCompoundInput(String name, IOContext context) throws IOException {
FileEntry fileEntry = this.entries.get(IndexFileNames.stripSegmentName(name));
if (fileEntry == null) {
throw new FileNotFoundException("file " + name + " does not exists in this CFS");
}
- return new NestedCompoundFileDirectory(name, bufferSize, fileEntry.offset, fileEntry.length);
+ return new NestedCompoundFileDirectory(name, context, fileEntry.offset, fileEntry.length);
}
/** Not implemented
* @throws UnsupportedOperationException */
@Override
- public CompoundFileDirectory createCompoundOutput(String name)
+ public CompoundFileDirectory createCompoundOutput(String name, IOContext context)
throws IOException {
throw new UnsupportedOperationException("can not create nested CFS, create seperately and use Directory.copy instead");
}
@@ -312,14 +306,14 @@
private final long cfsOffset;
private final long cfsLength;
- public NestedCompoundFileDirectory(String fileName, int readBufferSize, long offset, long length)
+ public NestedCompoundFileDirectory(String fileName, IOContext context, long offset, long length)
throws IOException {
- super(directory, fileName, readBufferSize);
+ super(directory, fileName, context);
this.cfsOffset = offset;
this.cfsLength = length;
IndexInput input = null;
try {
- input = CompoundFileDirectory.this.openInput(fileName, 128);
+ input = CompoundFileDirectory.this.openInput(fileName, IOContext.READONCE);
initForRead(CompoundFileDirectory.readEntries(input,
CompoundFileDirectory.this, fileName));
} finally {
diff --git a/lucene/src/java/org/apache/lucene/store/CompoundFileWriter.java b/lucene/src/java/org/apache/lucene/store/CompoundFileWriter.java
index 19e1c77..cd56d8f 100644
--- a/lucene/src/java/org/apache/lucene/store/CompoundFileWriter.java
+++ b/lucene/src/java/org/apache/lucene/store/CompoundFileWriter.java
@@ -136,7 +136,7 @@
IOException priorException = null;
IndexOutput entryTableOut = null;
try {
- initDataOut();
+ initDataOut(IOContext.DEFAULT);
if (!pendingEntries.isEmpty() || outputTaken.get()) {
throw new IllegalStateException("CFS has pending open files");
}
@@ -151,7 +151,7 @@
IOUtils.closeSafely(priorException, dataOut);
}
try {
- entryTableOut = directory.createOutput(entryTableName);
+ entryTableOut = directory.createOutput(entryTableName, IOContext.DEFAULT);
writeEntryTable(entries.values(), entryTableOut);
} catch (IOException e) {
priorException = e;
@@ -180,7 +180,7 @@
*/
private final long copyFileEntry(IndexOutput dataOut, FileEntry fileEntry)
throws IOException, MergeAbortedException {
- final IndexInput is = fileEntry.dir.openInput(fileEntry.file);
+ final IndexInput is = fileEntry.dir.openInput(fileEntry.file, IOContext.READONCE);
try {
final long startPtr = dataOut.getFilePointer();
final long length = fileEntry.length;
@@ -212,7 +212,7 @@
}
}
- IndexOutput createOutput(String name) throws IOException {
+ IndexOutput createOutput(String name, IOContext context) throws IOException {
ensureOpen();
boolean success = false;
try {
@@ -225,7 +225,7 @@
entries.put(name, entry);
final DirectCFSIndexOutput out;
if (outputTaken.compareAndSet(false, true)) {
- initDataOut();
+ initDataOut(context);
success = true;
out = new DirectCFSIndexOutput(dataOut, entry, false);
} else {
@@ -233,7 +233,7 @@
if (directory.fileExists(name)) {
throw new IOException("File already exists");
}
- out = new DirectCFSIndexOutput(directory.createOutput(name), entry,
+ out = new DirectCFSIndexOutput(directory.createOutput(name, context), entry,
true);
}
success = true;
@@ -249,11 +249,11 @@
outputTaken.compareAndSet(true, false);
}
- private synchronized final void initDataOut() throws IOException {
+ private synchronized final void initDataOut(IOContext context) throws IOException {
if (dataOut == null) {
boolean success = false;
try {
- dataOut = directory.createOutput(dataFileName);
+ dataOut = directory.createOutput(dataFileName, context);
dataOut.writeVInt(FORMAT_CURRENT);
success = true;
} finally {
diff --git a/lucene/src/java/org/apache/lucene/store/DefaultCompoundFileDirectory.java b/lucene/src/java/org/apache/lucene/store/DefaultCompoundFileDirectory.java
index e1c35cb..35c036b 100644
--- a/lucene/src/java/org/apache/lucene/store/DefaultCompoundFileDirectory.java
+++ b/lucene/src/java/org/apache/lucene/store/DefaultCompoundFileDirectory.java
@@ -31,11 +31,11 @@
public class DefaultCompoundFileDirectory extends CompoundFileDirectory {
protected IndexInput stream;
- public DefaultCompoundFileDirectory(Directory directory, String fileName, int readBufferSize, boolean writeable) throws IOException {
- super(directory, fileName, readBufferSize);
+ public DefaultCompoundFileDirectory(Directory directory, String fileName, IOContext context, boolean writeable) throws IOException {
+ super(directory, fileName, context);
if (!writeable) {
try {
- stream = directory.openInput(fileName, readBufferSize);
+ stream = directory.openInput(fileName, context);
initForRead(CompoundFileDirectory.readEntries(stream, directory, fileName));
} catch (IOException e) {
IOUtils.closeSafely(e, stream);
diff --git a/lucene/src/java/org/apache/lucene/store/Directory.java b/lucene/src/java/org/apache/lucene/store/Directory.java
index 964f502..cd82f3f 100644
--- a/lucene/src/java/org/apache/lucene/store/Directory.java
+++ b/lucene/src/java/org/apache/lucene/store/Directory.java
@@ -87,7 +87,7 @@
/** Creates a new, empty file in the directory with the given name.
Returns a stream writing this file. */
- public abstract IndexOutput createOutput(String name)
+ public abstract IndexOutput createOutput(String name, IOContext context)
throws IOException;
/**
@@ -103,10 +103,6 @@
*/
public abstract void sync(Collection<String> names) throws IOException;
- /** Returns a stream reading an existing file. */
- public abstract IndexInput openInput(String name)
- throws IOException;
-
/** Returns a stream reading an existing file, with the
* specified read buffer size. The particular Directory
* implementation may ignore the buffer size. Currently
@@ -114,9 +110,7 @@
* parameter are {@link FSDirectory} and {@link
* CompoundFileDirectory}.
*/
- public IndexInput openInput(String name, int bufferSize) throws IOException {
- return openInput(name);
- }
+ public abstract IndexInput openInput(String name, IOContext context) throws IOException;
/**
* Returns a {@link CompoundFileDirectory} capable of
@@ -126,8 +120,8 @@
* {@link DefaultCompoundFileDirectory}.
* @lucene.experimental
*/
- public CompoundFileDirectory openCompoundInput(String name, int bufferSize) throws IOException {
- return new DefaultCompoundFileDirectory(this, name, bufferSize, false);
+ public CompoundFileDirectory openCompoundInput(String name, IOContext context) throws IOException {
+ return new DefaultCompoundFileDirectory(this, name, context, false);
}
/**
@@ -138,8 +132,8 @@
* {@link DefaultCompoundFileDirectory}.
* @lucene.experimental
*/
- public CompoundFileDirectory createCompoundOutput(String name) throws IOException {
- return new DefaultCompoundFileDirectory(this, name, 1024, true);
+ public CompoundFileDirectory createCompoundOutput(String name, IOContext context) throws IOException {
+ return new DefaultCompoundFileDirectory(this, name, context, true);
}
/** Construct a {@link Lock}.
@@ -223,13 +217,13 @@
* <b>NOTE:</b> this method does not check whether <i>dest<i> exist and will
* overwrite it if it does.
*/
- public void copy(Directory to, String src, String dest) throws IOException {
+ public void copy(Directory to, String src, String dest, IOContext context) throws IOException {
IndexOutput os = null;
IndexInput is = null;
IOException priorException = null;
try {
- os = to.createOutput(dest);
- is = openInput(src);
+ os = to.createOutput(dest, context);
+ is = openInput(src, context);
is.copyBytes(os, is.length());
} catch (IOException ioe) {
priorException = ioe;
diff --git a/lucene/src/java/org/apache/lucene/store/FSDirectory.java b/lucene/src/java/org/apache/lucene/store/FSDirectory.java
index 6498954..e3a36b5 100644
--- a/lucene/src/java/org/apache/lucene/store/FSDirectory.java
+++ b/lucene/src/java/org/apache/lucene/store/FSDirectory.java
@@ -122,6 +122,10 @@
protected final Set<String> staleFiles = synchronizedSet(new HashSet<String>()); // Files written, but not yet sync'ed
private int chunkSize = DEFAULT_READ_CHUNK_SIZE; // LUCENE-1566
+ // null means no limite
+ private Double maxMergeWriteMBPerSec;
+ private RateLimiter mergeWriteRateLimiter;
+
// returns the canonical version of the directory, creating it if it doesn't exist.
private static File getCanonicalPath(File file) throws IOException {
return new File(file.getCanonicalPath());
@@ -286,11 +290,40 @@
/** Creates an IndexOutput for the file with the given name. */
@Override
- public IndexOutput createOutput(String name) throws IOException {
+ public IndexOutput createOutput(String name, IOContext context) throws IOException {
ensureOpen();
ensureCanWrite(name);
- return new FSIndexOutput(this, name);
+ return new FSIndexOutput(this, name, context.context == IOContext.Context.MERGE ? mergeWriteRateLimiter : null);
+ }
+
+ /** Sets the maximum (approx) MB/sec allowed by all write
+ * IO performed by merging. Pass null to have no limit.
+ *
+ * <p><b>NOTE</b>: if merges are already running there is
+ * no guarantee this new rate will apply to them; it will
+ * only apply for certain to new merges.
+ *
+ * @lucene.experimental */
+ public synchronized void setMaxMergeWriteMBPerSec(Double mbPerSec) {
+ maxMergeWriteMBPerSec = mbPerSec;
+ if (mbPerSec == null) {
+ if (mergeWriteRateLimiter != null) {
+ mergeWriteRateLimiter.setMaxRate(Double.MAX_VALUE);
+ mergeWriteRateLimiter = null;
+ }
+ } else if (mergeWriteRateLimiter != null) {
+ mergeWriteRateLimiter.setMaxRate(mbPerSec);
+ } else {
+ mergeWriteRateLimiter = new RateLimiter(mbPerSec);
+ }
+ }
+
+ /** See {@link #setMaxMergeWriteMBPerSec}.
+ *
+ * @lucene.experimental */
+ public Double getMaxMergeWriteMBPerSec() {
+ return maxMergeWriteMBPerSec;
}
protected void ensureCanWrite(String name) throws IOException {
@@ -319,13 +352,6 @@
staleFiles.removeAll(toSync);
}
- // Inherit javadoc
- @Override
- public IndexInput openInput(String name) throws IOException {
- ensureOpen();
- return openInput(name, BufferedIndexInput.BUFFER_SIZE);
- }
-
@Override
public String getLockID() {
ensureOpen();
@@ -409,17 +435,22 @@
private final String name;
private final RandomAccessFile file;
private volatile boolean isOpen; // remember if the file is open, so that we don't try to close it more than once
-
- public FSIndexOutput(FSDirectory parent, String name) throws IOException {
+ private final RateLimiter rateLimiter;
+
+ public FSIndexOutput(FSDirectory parent, String name, RateLimiter rateLimiter) throws IOException {
this.parent = parent;
this.name = name;
file = new RandomAccessFile(new File(parent.directory, name), "rw");
isOpen = true;
+ this.rateLimiter = rateLimiter;
}
/** output methods: */
@Override
public void flushBuffer(byte[] b, int offset, int size) throws IOException {
+ if (rateLimiter != null) {
+ rateLimiter.pause(size);
+ }
file.write(b, offset, size);
}
diff --git a/lucene/src/java/org/apache/lucene/store/FileSwitchDirectory.java b/lucene/src/java/org/apache/lucene/store/FileSwitchDirectory.java
index c170163..5b76e46 100644
--- a/lucene/src/java/org/apache/lucene/store/FileSwitchDirectory.java
+++ b/lucene/src/java/org/apache/lucene/store/FileSwitchDirectory.java
@@ -25,6 +25,7 @@
import java.util.Set;
import java.util.HashSet;
+
/**
* Expert: A Directory instance that switches files between
* two other Directory instances.
@@ -125,8 +126,8 @@
}
@Override
- public IndexOutput createOutput(String name) throws IOException {
- return getDirectory(name).createOutput(name);
+ public IndexOutput createOutput(String name, IOContext context) throws IOException {
+ return getDirectory(name).createOutput(name, context);
}
@Override
@@ -145,17 +146,17 @@
}
@Override
- public IndexInput openInput(String name) throws IOException {
- return getDirectory(name).openInput(name);
+ public IndexInput openInput(String name, IOContext context) throws IOException {
+ return getDirectory(name).openInput(name, context);
}
@Override
- public CompoundFileDirectory openCompoundInput(String name, int bufferSize) throws IOException {
- return getDirectory(name).openCompoundInput(name, bufferSize);
+ public CompoundFileDirectory openCompoundInput(String name, IOContext context) throws IOException {
+ return getDirectory(name).openCompoundInput(name, context);
}
@Override
- public CompoundFileDirectory createCompoundOutput(String name) throws IOException {
- return getDirectory(name).createCompoundOutput(name);
+ public CompoundFileDirectory createCompoundOutput(String name, IOContext context) throws IOException {
+ return getDirectory(name).createCompoundOutput(name, context);
}
}
diff --git a/lucene/src/java/org/apache/lucene/store/FlushInfo.java b/lucene/src/java/org/apache/lucene/store/FlushInfo.java
new file mode 100644
index 0000000..3bde34f
--- /dev/null
+++ b/lucene/src/java/org/apache/lucene/store/FlushInfo.java
@@ -0,0 +1,77 @@
+package org.apache.lucene.store;
+
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+/**
+ * <p>A FlushInfo provides information required for a FLUSH context and other optimization operations.
+ * It is used as part of an {@link IOContext} in case of FLUSH context.</p>
+ */
+
+
+public class FlushInfo {
+
+ public final int numDocs;
+
+ public final long estimatedSegmentSize;
+
+ /**
+ * <p>Creates a new {@link FlushInfo} instance from
+ * the values required for a FLUSH {@link IOContext} context.
+ *
+ * These values are only estimates and are not the actual values.
+ *
+ */
+
+ public FlushInfo(int numDocs, long estimatedSegmentSize) {
+ this.numDocs = numDocs;
+ this.estimatedSegmentSize = estimatedSegmentSize;
+ }
+
+ @Override
+ public int hashCode() {
+ final int prime = 31;
+ int result = 1;
+ result = prime * result
+ + (int) (estimatedSegmentSize ^ (estimatedSegmentSize >>> 32));
+ result = prime * result + numDocs;
+ return result;
+ }
+
+ @Override
+ public boolean equals(Object obj) {
+ if (this == obj)
+ return true;
+ if (obj == null)
+ return false;
+ if (getClass() != obj.getClass())
+ return false;
+ FlushInfo other = (FlushInfo) obj;
+ if (estimatedSegmentSize != other.estimatedSegmentSize)
+ return false;
+ if (numDocs != other.numDocs)
+ return false;
+ return true;
+ }
+
+ @Override
+ public String toString() {
+ return "FlushInfo [numDocs=" + numDocs + ", estimatedSegmentSize="
+ + estimatedSegmentSize + "]";
+ }
+
+}
diff --git a/lucene/src/java/org/apache/lucene/store/IOContext.java b/lucene/src/java/org/apache/lucene/store/IOContext.java
new file mode 100644
index 0000000..7f253a2
--- /dev/null
+++ b/lucene/src/java/org/apache/lucene/store/IOContext.java
@@ -0,0 +1,132 @@
+package org.apache.lucene.store;
+
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+/**
+ * IOContext holds additional details on the merge/search context. A IOContext
+ * object can never be initialized as null as passed as a parameter to either
+ * {@link #org.apache.lucene.store.Directory.openInput()} or
+ * {@link #org.apache.lucene.store.Directory.createInput()}
+ */
+public class IOContext {
+
+ /**
+ * Context is a enumerator which specifies the context in which the Directory
+ * is being used for.
+ */
+ public enum Context {
+ MERGE, READ, FLUSH, DEFAULT
+ };
+
+ /**
+ * An object of a enumerator Context type
+ */
+ public final Context context;
+
+ public final MergeInfo mergeInfo;
+
+ public final FlushInfo flushInfo;
+
+ public final boolean readOnce;
+
+ public static final IOContext DEFAULT = new IOContext(Context.DEFAULT);
+
+ public static final IOContext READONCE = new IOContext(true);
+
+ public static final IOContext READ = new IOContext(false);
+
+ public IOContext() {
+ this(false);
+ }
+
+ public IOContext(FlushInfo flushInfo) {
+ assert flushInfo != null;
+ this.context = Context.FLUSH;
+ this.mergeInfo = null;
+ this.readOnce = false;
+ this.flushInfo = flushInfo;
+ }
+
+ public IOContext(Context context) {
+ this(context, null);
+ }
+
+ private IOContext(boolean readOnce) {
+ this.context = Context.READ;
+ this.mergeInfo = null;
+ this.readOnce = readOnce;
+ this.flushInfo = null;
+ }
+
+ public IOContext(MergeInfo mergeInfo) {
+ this(Context.MERGE, mergeInfo);
+ }
+
+ private IOContext(Context context, MergeInfo mergeInfo) {
+ assert context != Context.MERGE || mergeInfo != null : "MergeInfo must not be null if context is MERGE";
+ assert context != Context.FLUSH : "Use IOContext(FlushInfo) to create a FLUSH IOContext";
+ this.context = context;
+ this.readOnce = false;
+ this.mergeInfo = mergeInfo;
+ this.flushInfo = null;
+ }
+
+ @Override
+ public int hashCode() {
+ final int prime = 31;
+ int result = 1;
+ result = prime * result + ((context == null) ? 0 : context.hashCode());
+ result = prime * result + ((flushInfo == null) ? 0 : flushInfo.hashCode());
+ result = prime * result + ((mergeInfo == null) ? 0 : mergeInfo.hashCode());
+ result = prime * result + (readOnce ? 1231 : 1237);
+ return result;
+ }
+
+ @Override
+ public boolean equals(Object obj) {
+ if (this == obj)
+ return true;
+ if (obj == null)
+ return false;
+ if (getClass() != obj.getClass())
+ return false;
+ IOContext other = (IOContext) obj;
+ if (context != other.context)
+ return false;
+ if (flushInfo == null) {
+ if (other.flushInfo != null)
+ return false;
+ } else if (!flushInfo.equals(other.flushInfo))
+ return false;
+ if (mergeInfo == null) {
+ if (other.mergeInfo != null)
+ return false;
+ } else if (!mergeInfo.equals(other.mergeInfo))
+ return false;
+ if (readOnce != other.readOnce)
+ return false;
+ return true;
+ }
+
+ @Override
+ public String toString() {
+ return "IOContext [context=" + context + ", mergeInfo=" + mergeInfo
+ + ", flushInfo=" + flushInfo + ", readOnce=" + readOnce + "]";
+ }
+
+}
\ No newline at end of file
diff --git a/lucene/src/java/org/apache/lucene/store/MMapDirectory.java b/lucene/src/java/org/apache/lucene/store/MMapDirectory.java
index 91f5386..1b8cb5a 100644
--- a/lucene/src/java/org/apache/lucene/store/MMapDirectory.java
+++ b/lucene/src/java/org/apache/lucene/store/MMapDirectory.java
@@ -209,7 +209,7 @@
/** Creates an IndexInput for the file with the given name. */
@Override
- public IndexInput openInput(String name, int bufferSize) throws IOException {
+ public IndexInput openInput(String name, IOContext context) throws IOException {
ensureOpen();
File f = new File(getDirectory(), name);
RandomAccessFile raf = new RandomAccessFile(f, "r");
@@ -221,15 +221,15 @@
}
@Override
- public CompoundFileDirectory openCompoundInput(String name, int bufferSize) throws IOException {
- return new MMapCompoundFileDirectory(name, bufferSize);
+ public CompoundFileDirectory openCompoundInput(String name, IOContext context) throws IOException {
+ return new MMapCompoundFileDirectory(name, context);
}
private final class MMapCompoundFileDirectory extends CompoundFileDirectory {
private RandomAccessFile raf = null;
- public MMapCompoundFileDirectory(String fileName, int readBufferSize) throws IOException {
- super(MMapDirectory.this, fileName, readBufferSize);
+ public MMapCompoundFileDirectory(String fileName, IOContext context) throws IOException {
+ super(MMapDirectory.this, fileName, context);
IndexInput stream = null;
try {
File f = new File(MMapDirectory.this.getDirectory(), fileName);
@@ -438,4 +438,5 @@
}
}
}
+
}
diff --git a/lucene/src/java/org/apache/lucene/store/MergeInfo.java b/lucene/src/java/org/apache/lucene/store/MergeInfo.java
new file mode 100644
index 0000000..7dabc4b
--- /dev/null
+++ b/lucene/src/java/org/apache/lucene/store/MergeInfo.java
@@ -0,0 +1,89 @@
+package org.apache.lucene.store;
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+/**
+ * <p>A MergeInfo provides information required for a MERGE context and other optimization operations.
+ * It is used as part of an {@link IOContext} in case of MERGE context.</p>
+ */
+
+public class MergeInfo {
+
+ public final int totalDocCount;
+
+ public final long estimatedMergeBytes;
+
+ public final boolean isExternal;
+
+ public final boolean optimize;
+
+
+ /**
+ * <p>Creates a new {@link MergeInfo} instance from
+ * the values required for a MERGE {@link IOContext} context.
+ *
+ * These values are only estimates and are not the actual values.
+ *
+ */
+
+ public MergeInfo(int totalDocCount, long estimatedMergeBytes, boolean isExternal, boolean optimize) {
+ this.totalDocCount = totalDocCount;
+ this.estimatedMergeBytes = estimatedMergeBytes;
+ this.isExternal = isExternal;
+ this.optimize = optimize;
+ }
+
+
+ @Override
+ public int hashCode() {
+ final int prime = 31;
+ int result = 1;
+ result = prime * result
+ + (int) (estimatedMergeBytes ^ (estimatedMergeBytes >>> 32));
+ result = prime * result + (isExternal ? 1231 : 1237);
+ result = prime * result + (optimize ? 1231 : 1237);
+ result = prime * result + totalDocCount;
+ return result;
+ }
+
+ @Override
+ public boolean equals(Object obj) {
+ if (this == obj)
+ return true;
+ if (obj == null)
+ return false;
+ if (getClass() != obj.getClass())
+ return false;
+ MergeInfo other = (MergeInfo) obj;
+ if (estimatedMergeBytes != other.estimatedMergeBytes)
+ return false;
+ if (isExternal != other.isExternal)
+ return false;
+ if (optimize != other.optimize)
+ return false;
+ if (totalDocCount != other.totalDocCount)
+ return false;
+ return true;
+ }
+
+ @Override
+ public String toString() {
+ return "MergeInfo [totalDocCount=" + totalDocCount
+ + ", estimatedMergeBytes=" + estimatedMergeBytes + ", isExternal="
+ + isExternal + ", optimize=" + optimize + "]";
+ }
+}
\ No newline at end of file
diff --git a/lucene/src/java/org/apache/lucene/store/NIOFSDirectory.java b/lucene/src/java/org/apache/lucene/store/NIOFSDirectory.java
index 40f2a99..6ac4380 100644
--- a/lucene/src/java/org/apache/lucene/store/NIOFSDirectory.java
+++ b/lucene/src/java/org/apache/lucene/store/NIOFSDirectory.java
@@ -76,22 +76,22 @@
/** Creates an IndexInput for the file with the given name. */
@Override
- public IndexInput openInput(String name, int bufferSize) throws IOException {
+ public IndexInput openInput(String name, IOContext context) throws IOException {
ensureOpen();
- return new NIOFSIndexInput(new File(getDirectory(), name), bufferSize, getReadChunkSize());
+ return new NIOFSIndexInput(new File(getDirectory(), name), context, getReadChunkSize());
}
@Override
- public CompoundFileDirectory openCompoundInput(String name, int bufferSize) throws IOException {
- return new NIOFSCompoundFileDirectory(name, bufferSize);
+ public CompoundFileDirectory openCompoundInput(String name, IOContext context) throws IOException {
+ return new NIOFSCompoundFileDirectory(name, context);
}
private final class NIOFSCompoundFileDirectory extends CompoundFileDirectory {
private SimpleFSIndexInput.Descriptor fd;
private FileChannel fc;
- public NIOFSCompoundFileDirectory(String fileName, int readBufferSize) throws IOException {
- super(NIOFSDirectory.this, fileName, readBufferSize);
+ public NIOFSCompoundFileDirectory(String fileName, IOContext context) throws IOException {
+ super(NIOFSDirectory.this, fileName, context);
IndexInput stream = null;
try {
File f = new File(NIOFSDirectory.this.getDirectory(), fileName);
@@ -131,8 +131,8 @@
final FileChannel channel;
- public NIOFSIndexInput(File path, int bufferSize, int chunkSize) throws IOException {
- super(path, bufferSize, chunkSize);
+ public NIOFSIndexInput(File path, IOContext context, int chunkSize) throws IOException {
+ super(path, context, chunkSize);
channel = file.getChannel();
}
@@ -229,4 +229,5 @@
}
}
}
+
}
diff --git a/lucene/src/java/org/apache/lucene/store/RAMDirectory.java b/lucene/src/java/org/apache/lucene/store/RAMDirectory.java
index e33af1a..0e0cc18 100644
--- a/lucene/src/java/org/apache/lucene/store/RAMDirectory.java
+++ b/lucene/src/java/org/apache/lucene/store/RAMDirectory.java
@@ -27,6 +27,7 @@
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.atomic.AtomicLong;
+
/**
* A memory-resident {@link Directory} implementation. Locking
* implementation is by default the {@link SingleInstanceLockFactory}
@@ -38,7 +39,7 @@
// *****
// Lock acquisition sequence: RAMDirectory, then RAMFile
- // *****
+ // *****
/** Constructs an empty {@link Directory}. */
public RAMDirectory() {
@@ -65,14 +66,14 @@
* @param dir a <code>Directory</code> value
* @exception IOException if an error occurs
*/
- public RAMDirectory(Directory dir) throws IOException {
- this(dir, false);
+ public RAMDirectory(Directory dir, IOContext context) throws IOException {
+ this(dir, false, context);
}
- private RAMDirectory(Directory dir, boolean closeDir) throws IOException {
+ private RAMDirectory(Directory dir, boolean closeDir, IOContext context) throws IOException {
this();
for (String file : dir.listAll()) {
- dir.copy(this, file, file);
+ dir.copy(this, file, file, context);
}
if (closeDir) {
dir.close();
@@ -149,7 +150,7 @@
/** Creates a new, empty file in the directory with the given name. Returns a stream writing this file. */
@Override
- public IndexOutput createOutput(String name) throws IOException {
+ public IndexOutput createOutput(String name, IOContext context) throws IOException {
ensureOpen();
RAMFile file = newRAMFile();
RAMFile existing = fileMap.remove(name);
@@ -176,7 +177,7 @@
/** Returns a stream reading an existing file. */
@Override
- public IndexInput openInput(String name) throws IOException {
+ public IndexInput openInput(String name, IOContext context) throws IOException {
ensureOpen();
RAMFile file = fileMap.get(name);
if (file == null) {
diff --git a/lucene/src/java/org/apache/lucene/store/RateLimiter.java b/lucene/src/java/org/apache/lucene/store/RateLimiter.java
new file mode 100644
index 0000000..df94911
--- /dev/null
+++ b/lucene/src/java/org/apache/lucene/store/RateLimiter.java
@@ -0,0 +1,78 @@
+package org.apache.lucene.store;
+
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+import org.apache.lucene.util.ThreadInterruptedException;
+
+/** Simple class to rate limit IO. Typically it's shared
+ * across multiple IndexInputs or IndexOutputs (for example
+ * those involved all merging). Those IndexInputs and
+ * IndexOutputs would call {@link #pause} whenever they
+ * want to read bytes or write bytes. */
+
+public class RateLimiter {
+ private volatile double nsPerByte;
+ private volatile long lastNS;
+
+ // TODO: we could also allow eg a sub class to dynamically
+ // determine the allowed rate, eg if an app wants to
+ // change the allowed rate over time or something
+
+ /** mbPerSec is the MB/sec max IO rate */
+ public RateLimiter(double mbPerSec) {
+ setMaxRate(mbPerSec);
+ }
+
+ public void setMaxRate(double mbPerSec) {
+ nsPerByte = 1000000000. / (1024*1024*mbPerSec);
+ }
+
+ /** Pauses, if necessary, to keep the instantaneous IO
+ * rate at or below the target. NOTE: multiple threads
+ * may safely use this, however the implementation is
+ * not perfectly thread safe but likely in practice this
+ * is harmless (just means in some rare cases the rate
+ * might exceed the target). It's best to call this
+ * with a biggish count, not one byte at a time. */
+ public void pause(long bytes) {
+
+ // TODO: this is purely instantenous rate; maybe we
+ // should also offer decayed recent history one?
+ final long targetNS = lastNS = lastNS + ((long) (bytes * nsPerByte));
+ long curNS = System.nanoTime();
+ if (lastNS < curNS) {
+ lastNS = curNS;
+ }
+
+ // While loop because Thread.sleep doesn't alway sleep
+ // enough:
+ while(true) {
+ final long pauseNS = targetNS - curNS;
+ if (pauseNS > 0) {
+ try {
+ Thread.sleep((int) (pauseNS/1000000), (int) (pauseNS % 1000000));
+ } catch (InterruptedException ie) {
+ throw new ThreadInterruptedException(ie);
+ }
+ curNS = System.nanoTime();
+ continue;
+ }
+ break;
+ }
+ }
+}
diff --git a/lucene/src/java/org/apache/lucene/store/SimpleFSDirectory.java b/lucene/src/java/org/apache/lucene/store/SimpleFSDirectory.java
index 07cb321..8174fa4 100644
--- a/lucene/src/java/org/apache/lucene/store/SimpleFSDirectory.java
+++ b/lucene/src/java/org/apache/lucene/store/SimpleFSDirectory.java
@@ -23,6 +23,7 @@
import org.apache.lucene.util.IOUtils;
+
/** A straightforward implementation of {@link FSDirectory}
* using java.io.RandomAccessFile. However, this class has
* poor concurrent performance (multiple threads will
@@ -53,21 +54,21 @@
/** Creates an IndexInput for the file with the given name. */
@Override
- public IndexInput openInput(String name, int bufferSize) throws IOException {
+ public IndexInput openInput(String name, IOContext context) throws IOException {
ensureOpen();
- return new SimpleFSIndexInput(new File(directory, name), bufferSize, getReadChunkSize());
+ return new SimpleFSIndexInput(new File(directory, name), context, getReadChunkSize());
}
@Override
- public CompoundFileDirectory openCompoundInput(String name, int bufferSize) throws IOException {
- return new SimpleFSCompoundFileDirectory(name, bufferSize);
+ public CompoundFileDirectory openCompoundInput(String name, IOContext context) throws IOException {
+ return new SimpleFSCompoundFileDirectory(name, context);
}
private final class SimpleFSCompoundFileDirectory extends CompoundFileDirectory {
private SimpleFSIndexInput.Descriptor fd;
- public SimpleFSCompoundFileDirectory(String fileName, int readBufferSize) throws IOException {
- super(SimpleFSDirectory.this, fileName, readBufferSize);
+ public SimpleFSCompoundFileDirectory(String fileName, IOContext context) throws IOException {
+ super(SimpleFSDirectory.this, fileName, context);
IndexInput stream = null;
try {
final File f = new File(SimpleFSDirectory.this.getDirectory(), fileName);
@@ -128,8 +129,8 @@
protected final long off;
protected final long end;
- public SimpleFSIndexInput(File path, int bufferSize, int chunkSize) throws IOException {
- super(bufferSize);
+ public SimpleFSIndexInput(File path, IOContext context, int chunkSize) throws IOException {
+ super(context);
this.file = new Descriptor(path, "r");
this.chunkSize = chunkSize;
this.off = 0L;
diff --git a/lucene/src/java/org/apache/lucene/util/BitVector.java b/lucene/src/java/org/apache/lucene/util/BitVector.java
index c664df0..b58c7af 100644
--- a/lucene/src/java/org/apache/lucene/util/BitVector.java
+++ b/lucene/src/java/org/apache/lucene/util/BitVector.java
@@ -20,9 +20,12 @@
import java.io.IOException;
import java.util.Arrays;
+import org.apache.lucene.index.MergePolicy.OneMerge;
import org.apache.lucene.store.Directory;
+import org.apache.lucene.store.IOContext;
import org.apache.lucene.store.IndexInput;
import org.apache.lucene.store.IndexOutput;
+import org.apache.lucene.store.IOContext.Context;
/** Optimized implementation of a vector of bits. This is more-or-less like
* java.util.BitSet, but also includes the following:
@@ -215,8 +218,8 @@
/** Writes this vector to the file <code>name</code> in Directory
<code>d</code>, in a format that can be read by the constructor {@link
#BitVector(Directory, String)}. */
- public final void write(Directory d, String name) throws IOException {
- IndexOutput output = d.createOutput(name);
+ public final void write(Directory d, String name, IOContext context) throws IOException {
+ IndexOutput output = d.createOutput(name, context);
try {
output.writeInt(-2);
CodecUtil.writeHeader(output, CODEC, VERSION_CURRENT);
@@ -328,8 +331,8 @@
/** Constructs a bit vector from the file <code>name</code> in Directory
<code>d</code>, as written by the {@link #write} method.
*/
- public BitVector(Directory d, String name) throws IOException {
- IndexInput input = d.openInput(name);
+ public BitVector(Directory d, String name, IOContext context) throws IOException {
+ IndexInput input = d.openInput(name, context);
try {
final int firstInt = input.readInt();
diff --git a/lucene/src/test-framework/org/apache/lucene/index/codecs/mockintblock/MockFixedIntBlockCodec.java b/lucene/src/test-framework/org/apache/lucene/index/codecs/mockintblock/MockFixedIntBlockCodec.java
index c533416..e665e82 100644
--- a/lucene/src/test-framework/org/apache/lucene/index/codecs/mockintblock/MockFixedIntBlockCodec.java
+++ b/lucene/src/test-framework/org/apache/lucene/index/codecs/mockintblock/MockFixedIntBlockCodec.java
@@ -84,8 +84,8 @@
}
@Override
- public IntIndexInput openInput(Directory dir, String fileName, int readBufferSize) throws IOException {
- return new FixedIntBlockIndexInput(dir.openInput(fileName, readBufferSize)) {
+ public IntIndexInput openInput(Directory dir, String fileName, IOContext context) throws IOException {
+ return new FixedIntBlockIndexInput(dir.openInput(fileName, context)) {
@Override
protected BlockReader getBlockReader(final IndexInput in, final int[] buffer) throws IOException {
@@ -102,8 +102,8 @@
}
@Override
- public IntIndexOutput createOutput(Directory dir, String fileName) throws IOException {
- IndexOutput out = dir.createOutput(fileName);
+ public IntIndexOutput createOutput(Directory dir, String fileName, IOContext context) throws IOException {
+ IndexOutput out = dir.createOutput(fileName, context);
boolean success = false;
try {
FixedIntBlockIndexOutput ret = new FixedIntBlockIndexOutput(out, blockSize) {
@@ -160,7 +160,7 @@
public FieldsProducer fieldsProducer(SegmentReadState state) throws IOException {
PostingsReaderBase postingsReader = new SepPostingsReaderImpl(state.dir,
state.segmentInfo,
- state.readBufferSize,
+ state.context,
new MockIntFactory(blockSize), state.codecId);
TermsIndexReaderBase indexReader;
@@ -170,7 +170,8 @@
state.fieldInfos,
state.segmentInfo.name,
state.termsIndexDivisor,
- BytesRef.getUTF8SortedAsUnicodeComparator(), state.codecId);
+ BytesRef.getUTF8SortedAsUnicodeComparator(), state.codecId,
+ IOContext.DEFAULT);
success = true;
} finally {
if (!success) {
@@ -185,7 +186,7 @@
state.fieldInfos,
state.segmentInfo.name,
postingsReader,
- state.readBufferSize,
+ state.context,
StandardCodec.TERMS_CACHE_SIZE,
state.codecId);
success = true;
@@ -224,6 +225,6 @@
@Override
public PerDocValues docsProducer(SegmentReadState state) throws IOException {
- return new DefaultDocValuesProducer(state.segmentInfo, state.dir, state.fieldInfos, state.codecId, getDocValuesUseCFS(), getDocValuesSortComparator());
+ return new DefaultDocValuesProducer(state.segmentInfo, state.dir, state.fieldInfos, state.codecId, getDocValuesUseCFS(), getDocValuesSortComparator(), state.context);
}
}
diff --git a/lucene/src/test-framework/org/apache/lucene/index/codecs/mockintblock/MockVariableIntBlockCodec.java b/lucene/src/test-framework/org/apache/lucene/index/codecs/mockintblock/MockVariableIntBlockCodec.java
index 6bdac7e..6d15b92 100644
--- a/lucene/src/test-framework/org/apache/lucene/index/codecs/mockintblock/MockVariableIntBlockCodec.java
+++ b/lucene/src/test-framework/org/apache/lucene/index/codecs/mockintblock/MockVariableIntBlockCodec.java
@@ -48,6 +48,7 @@
import org.apache.lucene.index.codecs.TermsIndexReaderBase;
import org.apache.lucene.index.codecs.TermsIndexWriterBase;
import org.apache.lucene.store.Directory;
+import org.apache.lucene.store.IOContext;
import org.apache.lucene.store.IndexInput;
import org.apache.lucene.store.IndexOutput;
import org.apache.lucene.util.BytesRef;
@@ -82,8 +83,8 @@
}
@Override
- public IntIndexInput openInput(Directory dir, String fileName, int readBufferSize) throws IOException {
- final IndexInput in = dir.openInput(fileName, readBufferSize);
+ public IntIndexInput openInput(Directory dir, String fileName, IOContext context) throws IOException {
+ final IndexInput in = dir.openInput(fileName, context);
final int baseBlockSize = in.readInt();
return new VariableIntBlockIndexInput(in) {
@@ -106,8 +107,8 @@
}
@Override
- public IntIndexOutput createOutput(Directory dir, String fileName) throws IOException {
- final IndexOutput out = dir.createOutput(fileName);
+ public IntIndexOutput createOutput(Directory dir, String fileName, IOContext context) throws IOException {
+ final IndexOutput out = dir.createOutput(fileName, context);
boolean success = false;
try {
out.writeInt(baseBlockSize);
@@ -182,7 +183,7 @@
public FieldsProducer fieldsProducer(SegmentReadState state) throws IOException {
PostingsReaderBase postingsReader = new SepPostingsReaderImpl(state.dir,
state.segmentInfo,
- state.readBufferSize,
+ state.context,
new MockIntFactory(baseBlockSize), state.codecId);
TermsIndexReaderBase indexReader;
@@ -193,7 +194,7 @@
state.segmentInfo.name,
state.termsIndexDivisor,
BytesRef.getUTF8SortedAsUnicodeComparator(),
- state.codecId);
+ state.codecId, state.context);
success = true;
} finally {
if (!success) {
@@ -208,7 +209,7 @@
state.fieldInfos,
state.segmentInfo.name,
postingsReader,
- state.readBufferSize,
+ state.context,
StandardCodec.TERMS_CACHE_SIZE,
state.codecId);
success = true;
@@ -247,6 +248,6 @@
@Override
public PerDocValues docsProducer(SegmentReadState state) throws IOException {
- return new DefaultDocValuesProducer(state.segmentInfo, state.dir, state.fieldInfos, state.codecId, getDocValuesUseCFS(), getDocValuesSortComparator());
+ return new DefaultDocValuesProducer(state.segmentInfo, state.dir, state.fieldInfos, state.codecId, getDocValuesUseCFS(), getDocValuesSortComparator(), state.context);
}
}
diff --git a/lucene/src/test-framework/org/apache/lucene/index/codecs/mockrandom/MockRandomCodec.java b/lucene/src/test-framework/org/apache/lucene/index/codecs/mockrandom/MockRandomCodec.java
index ac06feb..c15865c 100644
--- a/lucene/src/test-framework/org/apache/lucene/index/codecs/mockrandom/MockRandomCodec.java
+++ b/lucene/src/test-framework/org/apache/lucene/index/codecs/mockrandom/MockRandomCodec.java
@@ -61,6 +61,7 @@
import org.apache.lucene.index.codecs.standard.StandardPostingsReader;
import org.apache.lucene.index.codecs.standard.StandardPostingsWriter;
import org.apache.lucene.store.Directory;
+import org.apache.lucene.store.IOContext;
import org.apache.lucene.store.IndexInput;
import org.apache.lucene.store.IndexOutput;
import org.apache.lucene.util.BytesRef;
@@ -103,23 +104,23 @@
}
@Override
- public IntIndexInput openInput(Directory dir, String fileName, int readBufferSize) throws IOException {
+ public IntIndexInput openInput(Directory dir, String fileName, IOContext context) throws IOException {
// Must only use extension, because IW.addIndexes can
// rename segment!
final IntStreamFactory f = delegates.get((Math.abs(salt ^ getExtension(fileName).hashCode())) % delegates.size());
if (LuceneTestCase.VERBOSE) {
System.out.println("MockRandomCodec: read using int factory " + f + " from fileName=" + fileName);
}
- return f.openInput(dir, fileName, readBufferSize);
+ return f.openInput(dir, fileName, context);
}
@Override
- public IntIndexOutput createOutput(Directory dir, String fileName) throws IOException {
+ public IntIndexOutput createOutput(Directory dir, String fileName, IOContext context) throws IOException {
final IntStreamFactory f = delegates.get((Math.abs(salt ^ getExtension(fileName).hashCode())) % delegates.size());
if (LuceneTestCase.VERBOSE) {
System.out.println("MockRandomCodec: write using int factory " + f + " to fileName=" + fileName);
}
- return f.createOutput(dir, fileName);
+ return f.createOutput(dir, fileName, context);
}
}
@@ -140,7 +141,7 @@
}
final String seedFileName = IndexFileNames.segmentFileName(state.segmentName, state.codecId, SEED_EXT);
- final IndexOutput out = state.directory.createOutput(seedFileName);
+ final IndexOutput out = state.directory.createOutput(seedFileName, state.context);
try {
out.writeLong(seed);
} finally {
@@ -241,7 +242,7 @@
public FieldsProducer fieldsProducer(SegmentReadState state) throws IOException {
final String seedFileName = IndexFileNames.segmentFileName(state.segmentInfo.name, state.codecId, SEED_EXT);
- final IndexInput in = state.dir.openInput(seedFileName);
+ final IndexInput in = state.dir.openInput(seedFileName, state.context);
final long seed = in.readLong();
if (LuceneTestCase.VERBOSE) {
System.out.println("MockRandomCodec: reading from seg=" + state.segmentInfo.name + " seed=" + seed);
@@ -259,12 +260,12 @@
if (random.nextBoolean()) {
postingsReader = new SepPostingsReaderImpl(state.dir, state.segmentInfo,
- readBufferSize, new MockIntStreamFactory(random), state.codecId);
+ state.context, new MockIntStreamFactory(random), state.codecId);
} else {
if (LuceneTestCase.VERBOSE) {
System.out.println("MockRandomCodec: reading Standard postings");
}
- postingsReader = new StandardPostingsReader(state.dir, state.segmentInfo, readBufferSize, state.codecId);
+ postingsReader = new StandardPostingsReader(state.dir, state.segmentInfo, state.context, state.codecId);
}
if (random.nextBoolean()) {
@@ -293,7 +294,7 @@
state.segmentInfo.name,
state.termsIndexDivisor,
BytesRef.getUTF8SortedAsUnicodeComparator(),
- state.codecId);
+ state.codecId, state.context);
} else {
final int n2 = random.nextInt(3);
if (n2 == 1) {
@@ -311,7 +312,7 @@
state.fieldInfos,
state.segmentInfo.name,
state.termsIndexDivisor,
- state.codecId);
+ state.codecId, state.context);
}
success = true;
} finally {
@@ -329,7 +330,7 @@
state.fieldInfos,
state.segmentInfo.name,
postingsReader,
- readBufferSize,
+ state.context,
termsCacheSize,
state.codecId);
success = true;
@@ -385,6 +386,6 @@
@Override
public PerDocValues docsProducer(SegmentReadState state) throws IOException {
- return new DefaultDocValuesProducer(state.segmentInfo, state.dir, state.fieldInfos, state.codecId, getDocValuesUseCFS(), getDocValuesSortComparator());
+ return new DefaultDocValuesProducer(state.segmentInfo, state.dir, state.fieldInfos, state.codecId, getDocValuesUseCFS(), getDocValuesSortComparator(), state.context);
}
}
diff --git a/lucene/src/test-framework/org/apache/lucene/index/codecs/mocksep/MockSepCodec.java b/lucene/src/test-framework/org/apache/lucene/index/codecs/mocksep/MockSepCodec.java
index 75e3362..30cd364 100644
--- a/lucene/src/test-framework/org/apache/lucene/index/codecs/mocksep/MockSepCodec.java
+++ b/lucene/src/test-framework/org/apache/lucene/index/codecs/mocksep/MockSepCodec.java
@@ -93,7 +93,7 @@
public FieldsProducer fieldsProducer(SegmentReadState state) throws IOException {
PostingsReaderBase postingsReader = new SepPostingsReaderImpl(state.dir, state.segmentInfo,
- state.readBufferSize, new MockSingleIntFactory(), state.codecId);
+ state.context, new MockSingleIntFactory(), state.codecId);
TermsIndexReaderBase indexReader;
boolean success = false;
@@ -103,7 +103,7 @@
state.segmentInfo.name,
state.termsIndexDivisor,
BytesRef.getUTF8SortedAsUnicodeComparator(),
- state.codecId);
+ state.codecId, state.context);
success = true;
} finally {
if (!success) {
@@ -118,7 +118,7 @@
state.fieldInfos,
state.segmentInfo.name,
postingsReader,
- state.readBufferSize,
+ state.context,
StandardCodec.TERMS_CACHE_SIZE,
state.codecId);
success = true;
@@ -161,6 +161,6 @@
@Override
public PerDocValues docsProducer(SegmentReadState state) throws IOException {
- return new DefaultDocValuesProducer(state.segmentInfo, state.dir, state.fieldInfos, state.codecId, getDocValuesUseCFS(), getDocValuesSortComparator());
+ return new DefaultDocValuesProducer(state.segmentInfo, state.dir, state.fieldInfos, state.codecId, getDocValuesUseCFS(), getDocValuesSortComparator(), state.context);
}
}
diff --git a/lucene/src/test-framework/org/apache/lucene/index/codecs/mocksep/MockSingleIntFactory.java b/lucene/src/test-framework/org/apache/lucene/index/codecs/mocksep/MockSingleIntFactory.java
index 092db12..6dce24a 100644
--- a/lucene/src/test-framework/org/apache/lucene/index/codecs/mocksep/MockSingleIntFactory.java
+++ b/lucene/src/test-framework/org/apache/lucene/index/codecs/mocksep/MockSingleIntFactory.java
@@ -18,6 +18,7 @@
*/
import org.apache.lucene.store.Directory;
+import org.apache.lucene.store.IOContext;
import org.apache.lucene.index.codecs.sep.IntStreamFactory;
import org.apache.lucene.index.codecs.sep.IntIndexInput;
import org.apache.lucene.index.codecs.sep.IntIndexOutput;
@@ -27,11 +28,11 @@
/** @lucene.experimental */
public class MockSingleIntFactory extends IntStreamFactory {
@Override
- public IntIndexInput openInput(Directory dir, String fileName, int readBufferSize) throws IOException {
- return new MockSingleIntIndexInput(dir, fileName, readBufferSize);
+ public IntIndexInput openInput(Directory dir, String fileName, IOContext context) throws IOException {
+ return new MockSingleIntIndexInput(dir, fileName, context);
}
@Override
- public IntIndexOutput createOutput(Directory dir, String fileName) throws IOException {
- return new MockSingleIntIndexOutput(dir, fileName);
+ public IntIndexOutput createOutput(Directory dir, String fileName, IOContext context) throws IOException {
+ return new MockSingleIntIndexOutput(dir, fileName, context);
}
}
diff --git a/lucene/src/test-framework/org/apache/lucene/index/codecs/mocksep/MockSingleIntIndexInput.java b/lucene/src/test-framework/org/apache/lucene/index/codecs/mocksep/MockSingleIntIndexInput.java
index d5d45bf..df34e46 100644
--- a/lucene/src/test-framework/org/apache/lucene/index/codecs/mocksep/MockSingleIntIndexInput.java
+++ b/lucene/src/test-framework/org/apache/lucene/index/codecs/mocksep/MockSingleIntIndexInput.java
@@ -22,6 +22,7 @@
import org.apache.lucene.index.codecs.sep.IntIndexInput;
import org.apache.lucene.store.DataInput;
import org.apache.lucene.store.Directory;
+import org.apache.lucene.store.IOContext;
import org.apache.lucene.store.IndexInput;
import org.apache.lucene.util.CodecUtil;
@@ -35,9 +36,9 @@
public class MockSingleIntIndexInput extends IntIndexInput {
private final IndexInput in;
- public MockSingleIntIndexInput(Directory dir, String fileName, int readBufferSize)
+ public MockSingleIntIndexInput(Directory dir, String fileName, IOContext context)
throws IOException {
- in = dir.openInput(fileName, readBufferSize);
+ in = dir.openInput(fileName, context);
CodecUtil.checkHeader(in, MockSingleIntIndexOutput.CODEC,
MockSingleIntIndexOutput.VERSION_START,
MockSingleIntIndexOutput.VERSION_START);
diff --git a/lucene/src/test-framework/org/apache/lucene/index/codecs/mocksep/MockSingleIntIndexOutput.java b/lucene/src/test-framework/org/apache/lucene/index/codecs/mocksep/MockSingleIntIndexOutput.java
index 3deb1d4..7830b78 100644
--- a/lucene/src/test-framework/org/apache/lucene/index/codecs/mocksep/MockSingleIntIndexOutput.java
+++ b/lucene/src/test-framework/org/apache/lucene/index/codecs/mocksep/MockSingleIntIndexOutput.java
@@ -17,6 +17,7 @@
* limitations under the License.
*/
+import org.apache.lucene.store.IOContext;
import org.apache.lucene.store.IndexOutput;
import org.apache.lucene.store.Directory;
import org.apache.lucene.util.CodecUtil;
@@ -35,8 +36,8 @@
final static int VERSION_START = 0;
final static int VERSION_CURRENT = VERSION_START;
- public MockSingleIntIndexOutput(Directory dir, String fileName) throws IOException {
- out = dir.createOutput(fileName);
+ public MockSingleIntIndexOutput(Directory dir, String fileName, IOContext context) throws IOException {
+ out = dir.createOutput(fileName, context);
boolean success = false;
try {
CodecUtil.writeHeader(out, CODEC, VERSION_CURRENT);
diff --git a/lucene/src/test-framework/org/apache/lucene/index/codecs/preflexrw/PreFlexFieldsWriter.java b/lucene/src/test-framework/org/apache/lucene/index/codecs/preflexrw/PreFlexFieldsWriter.java
index 390d10d..4950cf9 100644
--- a/lucene/src/test-framework/org/apache/lucene/index/codecs/preflexrw/PreFlexFieldsWriter.java
+++ b/lucene/src/test-framework/org/apache/lucene/index/codecs/preflexrw/PreFlexFieldsWriter.java
@@ -50,12 +50,12 @@
state.termIndexInterval);
final String freqFile = IndexFileNames.segmentFileName(state.segmentName, "", PreFlexCodec.FREQ_EXTENSION);
- freqOut = state.directory.createOutput(freqFile);
+ freqOut = state.directory.createOutput(freqFile, state.context);
totalNumDocs = state.numDocs;
if (state.fieldInfos.hasProx()) {
final String proxFile = IndexFileNames.segmentFileName(state.segmentName, "", PreFlexCodec.PROX_EXTENSION);
- proxOut = state.directory.createOutput(proxFile);
+ proxOut = state.directory.createOutput(proxFile, state.context);
} else {
proxOut = null;
}
diff --git a/lucene/src/test-framework/org/apache/lucene/index/codecs/preflexrw/PreFlexRWCodec.java b/lucene/src/test-framework/org/apache/lucene/index/codecs/preflexrw/PreFlexRWCodec.java
index 07f0555..f911ef2 100644
--- a/lucene/src/test-framework/org/apache/lucene/index/codecs/preflexrw/PreFlexRWCodec.java
+++ b/lucene/src/test-framework/org/apache/lucene/index/codecs/preflexrw/PreFlexRWCodec.java
@@ -50,7 +50,7 @@
// Whenever IW opens readers, eg for merging, we have to
// keep terms order in UTF16:
- return new PreFlexFields(state.dir, state.fieldInfos, state.segmentInfo, state.readBufferSize, state.termsIndexDivisor) {
+ return new PreFlexFields(state.dir, state.fieldInfos, state.segmentInfo, state.context, state.termsIndexDivisor) {
@Override
protected boolean sortTermsByUnicode() {
// We carefully peek into stack track above us: if
diff --git a/lucene/src/test-framework/org/apache/lucene/index/codecs/preflexrw/TermInfosWriter.java b/lucene/src/test-framework/org/apache/lucene/index/codecs/preflexrw/TermInfosWriter.java
index d3e0025..5179912 100644
--- a/lucene/src/test-framework/org/apache/lucene/index/codecs/preflexrw/TermInfosWriter.java
+++ b/lucene/src/test-framework/org/apache/lucene/index/codecs/preflexrw/TermInfosWriter.java
@@ -26,6 +26,7 @@
import org.apache.lucene.index.codecs.preflex.PreFlexCodec;
import org.apache.lucene.index.codecs.preflex.TermInfo;
import org.apache.lucene.store.Directory;
+import org.apache.lucene.store.IOContext;
import org.apache.lucene.store.IndexOutput;
import org.apache.lucene.util.BytesRef;
import org.apache.lucene.util.CharsRef;
@@ -125,7 +126,7 @@
isIndex = isi;
output = directory.createOutput(IndexFileNames.segmentFileName(segment, "",
(isIndex ? PreFlexCodec.TERMS_INDEX_EXTENSION
- : PreFlexCodec.TERMS_EXTENSION)));
+ : PreFlexCodec.TERMS_EXTENSION)), IOContext.DEFAULT);
boolean success = false;
try {
output.writeInt(FORMAT_CURRENT); // write format
diff --git a/lucene/src/test-framework/org/apache/lucene/store/MockCompoundFileDirectoryWrapper.java b/lucene/src/test-framework/org/apache/lucene/store/MockCompoundFileDirectoryWrapper.java
index f707f9c..cc46535 100644
--- a/lucene/src/test-framework/org/apache/lucene/store/MockCompoundFileDirectoryWrapper.java
+++ b/lucene/src/test-framework/org/apache/lucene/store/MockCompoundFileDirectoryWrapper.java
@@ -26,7 +26,7 @@
private final String name;
public MockCompoundFileDirectoryWrapper(String name, MockDirectoryWrapper parent, CompoundFileDirectory delegate, boolean forWrite) throws IOException {
- super(parent, name, 1024);
+ super(parent, name, IOContext.DEFAULT);
this.name = name;
this.parent = parent;
this.delegate = delegate;
@@ -51,8 +51,8 @@
}
@Override
- public synchronized IndexInput openInput(String id, int readBufferSize) throws IOException {
- return delegate.openInput(id, readBufferSize);
+ public synchronized IndexInput openInput(String id, IOContext context) throws IOException {
+ return delegate.openInput(id, context);
}
@Override
@@ -86,8 +86,8 @@
}
@Override
- public IndexOutput createOutput(String name) throws IOException {
- return delegate.createOutput(name);
+ public IndexOutput createOutput(String name, IOContext context) throws IOException {
+ return delegate.createOutput(name, context);
}
@Override
@@ -126,8 +126,8 @@
}
@Override
- public void copy(Directory to, String src, String dest) throws IOException {
- delegate.copy(to, src, dest);
+ public void copy(Directory to, String src, String dest, IOContext context) throws IOException {
+ delegate.copy(to, src, dest, context);
}
@Override
@@ -136,12 +136,14 @@
}
@Override
- public CompoundFileDirectory createCompoundOutput(String name) throws IOException {
- return delegate.createCompoundOutput(name);
+ public CompoundFileDirectory createCompoundOutput(String name, IOContext context) throws IOException {
+ return delegate.createCompoundOutput(name, context);
}
-
- public CompoundFileDirectory openCompoundInput(String name, int bufferSize) throws IOException {
- return delegate.openCompoundInput(name, bufferSize);
+
+ @Override
+ public CompoundFileDirectory openCompoundInput(String name, IOContext context)
+ throws IOException {
+ return delegate.openCompoundInput(name, context);
}
}
diff --git a/lucene/src/test-framework/org/apache/lucene/store/MockDirectoryWrapper.java b/lucene/src/test-framework/org/apache/lucene/store/MockDirectoryWrapper.java
index a3d8745..3a05b3f 100644
--- a/lucene/src/test-framework/org/apache/lucene/store/MockDirectoryWrapper.java
+++ b/lucene/src/test-framework/org/apache/lucene/store/MockDirectoryWrapper.java
@@ -196,7 +196,7 @@
long length = fileLength(name);
byte[] zeroes = new byte[256];
long upto = 0;
- IndexOutput out = delegate.createOutput(name);
+ IndexOutput out = delegate.createOutput(name, LuceneTestCase.newIOContext(randomState));
while(upto < length) {
final int limit = (int) Math.min(length-upto, zeroes.length);
out.writeBytes(zeroes, 0, limit);
@@ -205,7 +205,7 @@
out.close();
} else if (count % 3 == 2) {
// Truncate the file:
- IndexOutput out = delegate.createOutput(name);
+ IndexOutput out = delegate.createOutput(name, LuceneTestCase.newIOContext(randomState));
out.setLength(fileLength(name)/2);
out.close();
}
@@ -337,7 +337,7 @@
}
@Override
- public synchronized IndexOutput createOutput(String name) throws IOException {
+ public synchronized IndexOutput createOutput(String name, IOContext context) throws IOException {
maybeYield();
if (crashed)
throw new IOException("cannot createOutput after crash");
@@ -372,7 +372,7 @@
}
//System.out.println(Thread.currentThread().getName() + ": MDW: create " + name);
- IndexOutput io = new MockIndexOutputWrapper(this, delegate.createOutput(name), name);
+ IndexOutput io = new MockIndexOutputWrapper(this, delegate.createOutput(name, LuceneTestCase.newIOContext(randomState)), name);
addFileHandle(io, name, false);
openFilesForWrite.add(name);
@@ -401,7 +401,7 @@
}
@Override
- public synchronized IndexInput openInput(String name) throws IOException {
+ public synchronized IndexInput openInput(String name, IOContext context) throws IOException {
maybeYield();
if (!delegate.fileExists(name))
throw new FileNotFoundException(name);
@@ -412,21 +412,21 @@
throw fillOpenTrace(new IOException("MockDirectoryWrapper: file \"" + name + "\" is still open for writing"), name, false);
}
- IndexInput ii = new MockIndexInputWrapper(this, name, delegate.openInput(name));
+ IndexInput ii = new MockIndexInputWrapper(this, name, delegate.openInput(name, LuceneTestCase.newIOContext(randomState)));
addFileHandle(ii, name, true);
return ii;
}
@Override
- public synchronized CompoundFileDirectory openCompoundInput(String name, int bufferSize) throws IOException {
+ public synchronized CompoundFileDirectory openCompoundInput(String name, IOContext context) throws IOException {
maybeYield();
- return new MockCompoundFileDirectoryWrapper(name, this, delegate.openCompoundInput(name, bufferSize), false);
+ return new MockCompoundFileDirectoryWrapper(name, this, delegate.openCompoundInput(name, context), false);
}
@Override
- public CompoundFileDirectory createCompoundOutput(String name) throws IOException {
+ public CompoundFileDirectory createCompoundOutput(String name, IOContext context) throws IOException {
maybeYield();
- return new MockCompoundFileDirectoryWrapper(name, this, delegate.createCompoundOutput(name), true);
+ return new MockCompoundFileDirectoryWrapper(name, this, delegate.createCompoundOutput(name, context), true);
}
/** Provided for testing purposes. Use sizeInBytes() instead. */
@@ -649,9 +649,10 @@
}
@Override
- public synchronized void copy(Directory to, String src, String dest) throws IOException {
+ public synchronized void copy(Directory to, String src, String dest, IOContext context) throws IOException {
maybeYield();
- delegate.copy(to, src, dest);
+ // randomize the IOContext here?
+ delegate.copy(to, src, dest, context);
}
}
diff --git a/lucene/src/test-framework/org/apache/lucene/util/LuceneTestCase.java b/lucene/src/test-framework/org/apache/lucene/util/LuceneTestCase.java
index 5660caa..c8a016f 100644
--- a/lucene/src/test-framework/org/apache/lucene/util/LuceneTestCase.java
+++ b/lucene/src/test-framework/org/apache/lucene/util/LuceneTestCase.java
@@ -60,7 +60,10 @@
import org.apache.lucene.search.IndexSearcher;
import org.apache.lucene.store.Directory;
import org.apache.lucene.store.FSDirectory;
+import org.apache.lucene.store.FlushInfo;
+import org.apache.lucene.store.IOContext;
import org.apache.lucene.store.LockFactory;
+import org.apache.lucene.store.MergeInfo;
import org.apache.lucene.store.MockDirectoryWrapper;
import org.apache.lucene.store.MockDirectoryWrapper.Throttling;
import org.apache.lucene.util.FieldCacheSanityChecker.Insanity;
@@ -1070,7 +1073,7 @@
public static MockDirectoryWrapper newDirectory(Random r, Directory d) throws IOException {
Directory impl = newDirectoryImpl(r, TEST_DIRECTORY);
for (String file : d.listAll()) {
- d.copy(impl, file, file);
+ d.copy(impl, file, file, newIOContext(r));
}
MockDirectoryWrapper dir = new MockDirectoryWrapper(r, impl);
stores.put(dir, Thread.currentThread().getStackTrace());
@@ -1331,6 +1334,32 @@
return sb.toString();
}
+ public static IOContext newIOContext(Random random) {
+ final int randomNumDocs = random.nextInt(4192);
+ final int size = random.nextInt(512) * randomNumDocs;
+ final IOContext context;
+ switch (random.nextInt(5)) {
+ case 0:
+ context = IOContext.DEFAULT;
+ break;
+ case 1:
+ context = IOContext.READ;
+ break;
+ case 2:
+ context = IOContext.READONCE;
+ break;
+ case 3:
+ context = new IOContext(new MergeInfo(randomNumDocs, size, true, false));
+ break;
+ case 4:
+ context = new IOContext(new FlushInfo(randomNumDocs, size));
+ break;
+ default:
+ context = IOContext.DEFAULT;
+ }
+ return context;
+ }
+
// recorded seed: for beforeClass
private static long staticSeed;
// seed for individual test methods, changed in @before
diff --git a/lucene/src/test/org/apache/lucene/index/TestAddIndexes.java b/lucene/src/test/org/apache/lucene/index/TestAddIndexes.java
index ea3e771..a2c4ec4 100755
--- a/lucene/src/test/org/apache/lucene/index/TestAddIndexes.java
+++ b/lucene/src/test/org/apache/lucene/index/TestAddIndexes.java
@@ -38,6 +38,7 @@
import org.apache.lucene.search.PhraseQuery;
import org.apache.lucene.store.AlreadyClosedException;
import org.apache.lucene.store.Directory;
+import org.apache.lucene.store.IOContext;
import org.apache.lucene.store.MockDirectoryWrapper;
import org.apache.lucene.store.RAMDirectory;
import org.apache.lucene.util.LuceneTestCase;
@@ -395,7 +396,7 @@
setMergePolicy(newLogMergePolicy(4))
);
- writer.addIndexes(aux, new MockDirectoryWrapper(random, new RAMDirectory(aux)));
+ writer.addIndexes(aux, new MockDirectoryWrapper(random, new RAMDirectory(aux, newIOContext(random))));
assertEquals(1060, writer.maxDoc());
assertEquals(1000, writer.getDocCount(0));
writer.close();
@@ -430,7 +431,7 @@
setMergePolicy(newLogMergePolicy(4))
);
- writer.addIndexes(aux, new MockDirectoryWrapper(random, new RAMDirectory(aux)));
+ writer.addIndexes(aux, new MockDirectoryWrapper(random, new RAMDirectory(aux, newIOContext(random))));
assertEquals(1020, writer.maxDoc());
assertEquals(1000, writer.getDocCount(0));
writer.close();
@@ -665,7 +666,7 @@
final Directory[] dirs = new Directory[NUM_COPY];
for(int k=0;k<NUM_COPY;k++)
- dirs[k] = new MockDirectoryWrapper(random, new RAMDirectory(dir));
+ dirs[k] = new MockDirectoryWrapper(random, new RAMDirectory(dir, newIOContext(random)));
int j=0;
diff --git a/lucene/src/test/org/apache/lucene/index/TestBackwardsCompatibility.java b/lucene/src/test/org/apache/lucene/index/TestBackwardsCompatibility.java
index d1dbefe..5a629da 100644
--- a/lucene/src/test/org/apache/lucene/index/TestBackwardsCompatibility.java
+++ b/lucene/src/test/org/apache/lucene/index/TestBackwardsCompatibility.java
@@ -538,7 +538,7 @@
// figure out which field number corresponds to
// "content", and then set our expected file names below
// accordingly:
- CompoundFileDirectory cfsReader = dir.openCompoundInput("_0.cfs", 1024);
+ CompoundFileDirectory cfsReader = dir.openCompoundInput("_0.cfs", newIOContext(random));
FieldInfos fieldInfos = new FieldInfos(cfsReader, "_0.fnm");
int contentFieldIndex = -1;
for (FieldInfo fi : fieldInfos) {
diff --git a/lucene/src/test/org/apache/lucene/index/TestCodecs.java b/lucene/src/test/org/apache/lucene/index/TestCodecs.java
index e397bc7..fb70229 100644
--- a/lucene/src/test/org/apache/lucene/index/TestCodecs.java
+++ b/lucene/src/test/org/apache/lucene/index/TestCodecs.java
@@ -39,6 +39,8 @@
import org.apache.lucene.search.Query;
import org.apache.lucene.search.ScoreDoc;
import org.apache.lucene.store.Directory;
+import org.apache.lucene.store.IOContext;
+import org.apache.lucene.store.IOContext.Context;
import org.apache.lucene.util.BytesRef;
import org.apache.lucene.util.LuceneTestCase;
import org.apache.lucene.util.Version;
@@ -242,7 +244,7 @@
this.write(fieldInfos, dir, fields, true);
final SegmentInfo si = new SegmentInfo(SEGMENT, 10000, dir, false, clonedFieldInfos.buildSegmentCodecs(false), clonedFieldInfos);
- final FieldsProducer reader = si.getSegmentCodecs().codec().fieldsProducer(new SegmentReadState(dir, si, fieldInfos, 64, IndexReader.DEFAULT_TERMS_INDEX_DIVISOR));
+ final FieldsProducer reader = si.getSegmentCodecs().codec().fieldsProducer(new SegmentReadState(dir, si, fieldInfos, newIOContext(random), IndexReader.DEFAULT_TERMS_INDEX_DIVISOR));
final FieldsEnum fieldsEnum = reader.iterator();
assertNotNull(fieldsEnum.next());
@@ -297,7 +299,7 @@
if (VERBOSE) {
System.out.println("TEST: now read postings");
}
- final FieldsProducer terms = si.getSegmentCodecs().codec().fieldsProducer(new SegmentReadState(dir, si, fieldInfos, 1024, IndexReader.DEFAULT_TERMS_INDEX_DIVISOR));
+ final FieldsProducer terms = si.getSegmentCodecs().codec().fieldsProducer(new SegmentReadState(dir, si, fieldInfos, newIOContext(random), IndexReader.DEFAULT_TERMS_INDEX_DIVISOR));
final Verify[] threads = new Verify[NUM_TEST_THREADS-1];
for(int i=0;i<NUM_TEST_THREADS-1;i++) {
@@ -592,7 +594,7 @@
final int termIndexInterval = _TestUtil.nextInt(random, 13, 27);
final SegmentCodecs codecInfo = fieldInfos.buildSegmentCodecs(false);
- final SegmentWriteState state = new SegmentWriteState(null, dir, SEGMENT, fieldInfos, 10000, termIndexInterval, codecInfo, null);
+ final SegmentWriteState state = new SegmentWriteState(null, dir, SEGMENT, fieldInfos, 10000, termIndexInterval, codecInfo, null, newIOContext(random));
final FieldsConsumer consumer = state.segmentCodecs.codec().fieldsConsumer(state);
Arrays.sort(fields);
diff --git a/lucene/src/test/org/apache/lucene/index/TestCompoundFile.java b/lucene/src/test/org/apache/lucene/index/TestCompoundFile.java
index 3eb5967..f6acb3b 100644
--- a/lucene/src/test/org/apache/lucene/index/TestCompoundFile.java
+++ b/lucene/src/test/org/apache/lucene/index/TestCompoundFile.java
@@ -23,11 +23,10 @@
import org.apache.lucene.util.LuceneTestCase;
import org.apache.lucene.store.CompoundFileDirectory;
+import org.apache.lucene.store.IOContext;
import org.apache.lucene.store.IndexOutput;
import org.apache.lucene.store.Directory;
import org.apache.lucene.store.IndexInput;
-import org.apache.lucene.store.MockDirectoryWrapper;
-import org.apache.lucene.store.MockDirectoryWrapper.Failure;
import org.apache.lucene.store.SimpleFSDirectory;
import org.apache.lucene.store._TestHelper;
import org.apache.lucene.util._TestUtil;
@@ -55,7 +54,7 @@
private void createRandomFile(Directory dir, String name, int size)
throws IOException
{
- IndexOutput os = dir.createOutput(name);
+ IndexOutput os = dir.createOutput(name, newIOContext(random));
for (int i=0; i<size; i++) {
byte b = (byte) (Math.random() * 256);
os.writeByte(b);
@@ -73,7 +72,7 @@
int size)
throws IOException
{
- IndexOutput os = dir.createOutput(name);
+ IndexOutput os = dir.createOutput(name, newIOContext(random));
for (int i=0; i < size; i++) {
os.writeByte(start);
start ++;
@@ -183,13 +182,13 @@
for (int i=0; i<data.length; i++) {
String name = "t" + data[i];
createSequenceFile(dir, name, (byte) 0, data[i]);
- CompoundFileDirectory csw = dir.createCompoundOutput(name + ".cfs");
- dir.copy(csw, name, name);
+ CompoundFileDirectory csw = dir.createCompoundOutput(name + ".cfs", newIOContext(random));
+ dir.copy(csw, name, name, newIOContext(random));
csw.close();
- CompoundFileDirectory csr = dir.openCompoundInput(name + ".cfs", 1024);
- IndexInput expected = dir.openInput(name);
- IndexInput actual = csr.openInput(name);
+ CompoundFileDirectory csr = dir.openCompoundInput(name + ".cfs", newIOContext(random));
+ IndexInput expected = dir.openInput(name, newIOContext(random));
+ IndexInput actual = csr.openInput(name, newIOContext(random));
assertSameStreams(name, expected, actual);
assertSameSeekBehavior(name, expected, actual);
expected.close();
@@ -206,21 +205,21 @@
createSequenceFile(dir, "d1", (byte) 0, 15);
createSequenceFile(dir, "d2", (byte) 0, 114);
- CompoundFileDirectory csw = dir.createCompoundOutput("d.cfs");
- dir.copy(csw, "d1", "d1");
- dir.copy(csw, "d2", "d2");
+ CompoundFileDirectory csw = dir.createCompoundOutput("d.cfs", newIOContext(random));
+ dir.copy(csw, "d1", "d1", newIOContext(random));
+ dir.copy(csw, "d2", "d2", newIOContext(random));
csw.close();
- CompoundFileDirectory csr = dir.openCompoundInput("d.cfs", 1024);
- IndexInput expected = dir.openInput("d1");
- IndexInput actual = csr.openInput("d1");
+ CompoundFileDirectory csr = dir.openCompoundInput("d.cfs", newIOContext(random));
+ IndexInput expected = dir.openInput("d1", newIOContext(random));
+ IndexInput actual = csr.openInput("d1", newIOContext(random));
assertSameStreams("d1", expected, actual);
assertSameSeekBehavior("d1", expected, actual);
expected.close();
actual.close();
- expected = dir.openInput("d2");
- actual = csr.openInput("d2");
+ expected = dir.openInput("d2", newIOContext(random));
+ actual = csr.openInput("d2", newIOContext(random));
assertSameStreams("d2", expected, actual);
assertSameSeekBehavior("d2", expected, actual);
expected.close();
@@ -256,21 +255,21 @@
createRandomFile(dir, segment + ".notIn2", 51);
// Now test
- CompoundFileDirectory csw = dir.createCompoundOutput("test.cfs");
+ CompoundFileDirectory csw = dir.createCompoundOutput("test.cfs", newIOContext(random));
final String data[] = new String[] {
".zero", ".one", ".ten", ".hundred", ".big1", ".big2", ".big3",
".big4", ".big5", ".big6", ".big7"
};
for (int i=0; i<data.length; i++) {
String fileName = segment + data[i];
- dir.copy(csw, fileName, fileName);
+ dir.copy(csw, fileName, fileName, newIOContext(random));
}
csw.close();
- CompoundFileDirectory csr = dir.openCompoundInput("test.cfs", 1024);
+ CompoundFileDirectory csr = dir.openCompoundInput("test.cfs", newIOContext(random));
for (int i=0; i<data.length; i++) {
- IndexInput check = dir.openInput(segment + data[i]);
- IndexInput test = csr.openInput(segment + data[i]);
+ IndexInput check = dir.openInput(segment + data[i], newIOContext(random));
+ IndexInput test = csr.openInput(segment + data[i], newIOContext(random));
assertSameStreams(data[i], check, test);
assertSameSeekBehavior(data[i], check, test);
test.close();
@@ -286,11 +285,11 @@
* the size of each file is 1000 bytes.
*/
private void setUp_2() throws IOException {
- CompoundFileDirectory cw = dir.createCompoundOutput("f.comp");
+ CompoundFileDirectory cw = dir.createCompoundOutput("f.comp", newIOContext(random));
for (int i=0; i<20; i++) {
createSequenceFile(dir, "f" + i, (byte) 0, 2000);
String fileName = "f" + i;
- dir.copy(cw, fileName, fileName);
+ dir.copy(cw, fileName, fileName, newIOContext(random));
}
cw.close();
}
@@ -304,13 +303,13 @@
throws IOException
{
// Setup the test file - we need more than 1024 bytes
- IndexOutput os = fsdir.createOutput(file);
+ IndexOutput os = fsdir.createOutput(file, IOContext.DEFAULT);
for(int i=0; i<2000; i++) {
os.writeByte((byte) i);
}
os.close();
- IndexInput in = fsdir.openInput(file);
+ IndexInput in = fsdir.openInput(file, IOContext.DEFAULT);
// This read primes the buffer in IndexInput
in.readByte();
@@ -337,16 +336,16 @@
public void testClonedStreamsClosing() throws IOException {
setUp_2();
- CompoundFileDirectory cr = dir.openCompoundInput("f.comp", 1024);
+ CompoundFileDirectory cr = dir.openCompoundInput("f.comp", newIOContext(random));
// basic clone
- IndexInput expected = dir.openInput("f11");
+ IndexInput expected = dir.openInput("f11", newIOContext(random));
// this test only works for FSIndexInput
assertTrue(_TestHelper.isSimpleFSIndexInput(expected));
assertTrue(_TestHelper.isSimpleFSIndexInputOpen(expected));
- IndexInput one = cr.openInput("f11");
+ IndexInput one = cr.openInput("f11", newIOContext(random));
IndexInput two = (IndexInput) one.clone();
@@ -389,14 +388,14 @@
*/
public void testRandomAccess() throws IOException {
setUp_2();
- CompoundFileDirectory cr = dir.openCompoundInput("f.comp", 1024);
+ CompoundFileDirectory cr = dir.openCompoundInput("f.comp", newIOContext(random));
// Open two files
- IndexInput e1 = dir.openInput("f11");
- IndexInput e2 = dir.openInput("f3");
+ IndexInput e1 = dir.openInput("f11", newIOContext(random));
+ IndexInput e2 = dir.openInput("f3", newIOContext(random));
- IndexInput a1 = cr.openInput("f11");
- IndexInput a2 = dir.openInput("f3");
+ IndexInput a1 = cr.openInput("f11", newIOContext(random));
+ IndexInput a2 = dir.openInput("f3", newIOContext(random));
// Seek the first pair
e1.seek(100);
@@ -468,11 +467,11 @@
*/
public void testRandomAccessClones() throws IOException {
setUp_2();
- CompoundFileDirectory cr = dir.openCompoundInput("f.comp", 1024);
+ CompoundFileDirectory cr = dir.openCompoundInput("f.comp", newIOContext(random));
// Open two files
- IndexInput e1 = cr.openInput("f11");
- IndexInput e2 = cr.openInput("f3");
+ IndexInput e1 = cr.openInput("f11", newIOContext(random));
+ IndexInput e2 = cr.openInput("f3", newIOContext(random));
IndexInput a1 = (IndexInput) e1.clone();
IndexInput a2 = (IndexInput) e2.clone();
@@ -545,11 +544,11 @@
public void testFileNotFound() throws IOException {
setUp_2();
- CompoundFileDirectory cr = dir.openCompoundInput("f.comp", 1024);
+ CompoundFileDirectory cr = dir.openCompoundInput("f.comp", newIOContext(random));
// Open two files
try {
- cr.openInput("bogus");
+ cr.openInput("bogus", newIOContext(random));
fail("File not found");
} catch (IOException e) {
@@ -563,8 +562,8 @@
public void testReadPastEOF() throws IOException {
setUp_2();
- CompoundFileDirectory cr = dir.openCompoundInput("f.comp", 1024);
- IndexInput is = cr.openInput("f2");
+ CompoundFileDirectory cr = dir.openCompoundInput("f.comp", newIOContext(random));
+ IndexInput is = cr.openInput("f2", newIOContext(random));
is.seek(is.length() - 10);
byte b[] = new byte[100];
is.readBytes(b, 0, 10);
@@ -594,7 +593,7 @@
* will correctly increment the file pointer.
*/
public void testLargeWrites() throws IOException {
- IndexOutput os = dir.createOutput("testBufferStart.txt");
+ IndexOutput os = dir.createOutput("testBufferStart.txt", newIOContext(random));
byte[] largeBuf = new byte[2048];
for (int i=0; i<largeBuf.length; i++) {
@@ -616,13 +615,13 @@
createSequenceFile(dir, "d1", (byte) 0, 15);
Directory newDir = newDirectory();
- CompoundFileDirectory csw = newDir.createCompoundOutput("d.cfs");
- dir.copy(csw, "d1", "d1");
+ CompoundFileDirectory csw = newDir.createCompoundOutput("d.cfs", newIOContext(random));
+ dir.copy(csw, "d1", "d1", newIOContext(random));
csw.close();
- CompoundFileDirectory csr = newDir.openCompoundInput("d.cfs", 1024);
- IndexInput expected = dir.openInput("d1");
- IndexInput actual = csr.openInput("d1");
+ CompoundFileDirectory csr = newDir.openCompoundInput("d.cfs", newIOContext(random));
+ IndexInput expected = dir.openInput("d1", newIOContext(random));
+ IndexInput actual = csr.openInput("d1", newIOContext(random));
assertSameStreams("d1", expected, actual);
assertSameSeekBehavior("d1", expected, actual);
expected.close();
@@ -635,10 +634,10 @@
public void testAppend() throws IOException {
Directory newDir = newDirectory();
- CompoundFileDirectory csw = newDir.createCompoundOutput("d.cfs");
+ CompoundFileDirectory csw = newDir.createCompoundOutput("d.cfs", newIOContext(random));
int size = 5 + random.nextInt(128);
for (int j = 0; j < 2; j++) {
- IndexOutput os = csw.createOutput("seg" + j + "_foo.txt");
+ IndexOutput os = csw.createOutput("seg" + j + "_foo.txt", newIOContext(random));
for (int i = 0; i < size; i++) {
os.writeInt(i);
}
@@ -648,14 +647,14 @@
assertEquals("d.cfs", listAll[0]);
}
createSequenceFile(dir, "d1", (byte) 0, 15);
- dir.copy(csw, "d1", "d1");
+ dir.copy(csw, "d1", "d1", newIOContext(random));
String[] listAll = newDir.listAll();
assertEquals(1, listAll.length);
assertEquals("d.cfs", listAll[0]);
csw.close();
- CompoundFileDirectory csr = newDir.openCompoundInput("d.cfs", 1024);
+ CompoundFileDirectory csr = newDir.openCompoundInput("d.cfs", newIOContext(random));
for (int j = 0; j < 2; j++) {
- IndexInput openInput = csr.openInput("seg" + j + "_foo.txt");
+ IndexInput openInput = csr.openInput("seg" + j + "_foo.txt", newIOContext(random));
assertEquals(size * 4, openInput.length());
for (int i = 0; i < size; i++) {
assertEquals(i, openInput.readInt());
@@ -664,8 +663,8 @@
openInput.close();
}
- IndexInput expected = dir.openInput("d1");
- IndexInput actual = csr.openInput("d1");
+ IndexInput expected = dir.openInput("d1", newIOContext(random));
+ IndexInput actual = csr.openInput("d1", newIOContext(random));
assertSameStreams("d1", expected, actual);
assertSameSeekBehavior("d1", expected, actual);
expected.close();
@@ -676,12 +675,12 @@
public void testAppendTwice() throws IOException {
Directory newDir = newDirectory();
- CompoundFileDirectory csw = newDir.createCompoundOutput("d.cfs");
+ CompoundFileDirectory csw = newDir.createCompoundOutput("d.cfs", newIOContext(random));
createSequenceFile(newDir, "d1", (byte) 0, 15);
- IndexOutput out = csw.createOutput("d.xyz");
+ IndexOutput out = csw.createOutput("d.xyz", newIOContext(random));
out.writeInt(0);
try {
- newDir.copy(csw, "d1", "d1");
+ newDir.copy(csw, "d1", "d1", newIOContext(random));
fail("file does already exist");
} catch (IOException e) {
//
@@ -692,7 +691,7 @@
csw.close();
- CompoundFileDirectory cfr = newDir.openCompoundInput("d.cfs", 1024);
+ CompoundFileDirectory cfr = newDir.openCompoundInput("d.cfs", newIOContext(random));
assertEquals(1, cfr.listAll().length);
assertEquals("d.xyz", cfr.listAll()[0]);
cfr.close();
@@ -701,10 +700,10 @@
public void testEmptyCFS() throws IOException {
Directory newDir = newDirectory();
- CompoundFileDirectory csw = newDir.createCompoundOutput("d.cfs");
+ CompoundFileDirectory csw = newDir.createCompoundOutput("d.cfs", newIOContext(random));
csw.close();
- CompoundFileDirectory csr = newDir.openCompoundInput("d.cfs", 1024);
+ CompoundFileDirectory csr = newDir.openCompoundInput("d.cfs", newIOContext(random));
assertEquals(0, csr.listAll().length);
csr.close();
@@ -713,32 +712,32 @@
public void testReadNestedCFP() throws IOException {
Directory newDir = newDirectory();
- CompoundFileDirectory csw = newDir.createCompoundOutput("d.cfs");
- CompoundFileDirectory nested = newDir.createCompoundOutput("b.cfs");
- IndexOutput out = nested.createOutput("b.xyz");
- IndexOutput out1 = nested.createOutput("b_1.xyz");
+ CompoundFileDirectory csw = newDir.createCompoundOutput("d.cfs", newIOContext(random));
+ CompoundFileDirectory nested = newDir.createCompoundOutput("b.cfs", newIOContext(random));
+ IndexOutput out = nested.createOutput("b.xyz", newIOContext(random));
+ IndexOutput out1 = nested.createOutput("b_1.xyz", newIOContext(random));
out.writeInt(0);
out1.writeInt(1);
out.close();
out1.close();
nested.close();
- newDir.copy(csw, "b.cfs", "b.cfs");
- newDir.copy(csw, "b.cfe", "b.cfe");
+ newDir.copy(csw, "b.cfs", "b.cfs", newIOContext(random));
+ newDir.copy(csw, "b.cfe", "b.cfe", newIOContext(random));
newDir.deleteFile("b.cfs");
newDir.deleteFile("b.cfe");
csw.close();
assertEquals(2, newDir.listAll().length);
- csw = newDir.openCompoundInput("d.cfs", 1024);
+ csw = newDir.openCompoundInput("d.cfs", newIOContext(random));
assertEquals(2, csw.listAll().length);
- nested = csw.openCompoundInput("b.cfs", 1024);
+ nested = csw.openCompoundInput("b.cfs", newIOContext(random));
assertEquals(2, nested.listAll().length);
- IndexInput openInput = nested.openInput("b.xyz");
+ IndexInput openInput = nested.openInput("b.xyz", newIOContext(random));
assertEquals(0, openInput.readInt());
openInput.close();
- openInput = nested.openInput("b_1.xyz");
+ openInput = nested.openInput("b_1.xyz", newIOContext(random));
assertEquals(1, openInput.readInt());
openInput.close();
nested.close();
@@ -748,8 +747,8 @@
public void testDoubleClose() throws IOException {
Directory newDir = newDirectory();
- CompoundFileDirectory csw = newDir.createCompoundOutput("d.cfs");
- IndexOutput out = csw.createOutput("d.xyz");
+ CompoundFileDirectory csw = newDir.createCompoundOutput("d.cfs", newIOContext(random));
+ IndexOutput out = csw.createOutput("d.xyz", newIOContext(random));
out.writeInt(0);
out.close();
@@ -757,8 +756,8 @@
// close a second time - must have no effect according to Closeable
csw.close();
- csw = newDir.openCompoundInput("d.cfs", 1024);
- IndexInput openInput = csw.openInput("d.xyz");
+ csw = newDir.openCompoundInput("d.cfs", newIOContext(random));
+ IndexInput openInput = csw.openInput("d.xyz", newIOContext(random));
assertEquals(0, openInput.readInt());
openInput.close();
csw.close();
diff --git a/lucene/src/test/org/apache/lucene/index/TestDoc.java b/lucene/src/test/org/apache/lucene/index/TestDoc.java
index 78c069d..b4d23b5 100644
--- a/lucene/src/test/org/apache/lucene/index/TestDoc.java
+++ b/lucene/src/test/org/apache/lucene/index/TestDoc.java
@@ -35,6 +35,7 @@
import org.apache.lucene.index.IndexWriterConfig.OpenMode;
import org.apache.lucene.search.DocIdSetIterator;
import org.apache.lucene.store.Directory;
+import org.apache.lucene.store.IOContext;
import org.apache.lucene.util.LuceneTestCase;
import org.apache.lucene.util._TestUtil;
import org.apache.lucene.index.codecs.CodecProvider;
@@ -193,10 +194,11 @@
private SegmentInfo merge(SegmentInfo si1, SegmentInfo si2, String merged, boolean useCompoundFile)
throws Exception {
- SegmentReader r1 = SegmentReader.get(true, si1, IndexReader.DEFAULT_TERMS_INDEX_DIVISOR);
- SegmentReader r2 = SegmentReader.get(true, si2, IndexReader.DEFAULT_TERMS_INDEX_DIVISOR);
+ IOContext context = newIOContext(random);
+ SegmentReader r1 = SegmentReader.get(true, si1, IndexReader.DEFAULT_TERMS_INDEX_DIVISOR, context);
+ SegmentReader r2 = SegmentReader.get(true, si2, IndexReader.DEFAULT_TERMS_INDEX_DIVISOR, context);
- SegmentMerger merger = new SegmentMerger(si1.dir, IndexWriterConfig.DEFAULT_TERM_INDEX_INTERVAL, merged, null, null, new FieldInfos());
+ SegmentMerger merger = new SegmentMerger(si1.dir, IndexWriterConfig.DEFAULT_TERM_INDEX_INTERVAL, merged, null, null, new FieldInfos(), context);
merger.add(r1);
merger.add(r2);
@@ -208,7 +210,7 @@
false, merger.getSegmentCodecs(), fieldInfos);
if (useCompoundFile) {
- Collection<String> filesToDelete = merger.createCompoundFile(merged + ".cfs", info);
+ Collection<String> filesToDelete = merger.createCompoundFile(merged + ".cfs", info, newIOContext(random));
info.setUseCompoundFile(true);
for (final String fileToDelete : filesToDelete)
si1.dir.deleteFile(fileToDelete);
@@ -220,7 +222,7 @@
private void printSegment(PrintWriter out, SegmentInfo si)
throws Exception {
- SegmentReader reader = SegmentReader.get(true, si, IndexReader.DEFAULT_TERMS_INDEX_DIVISOR);
+ SegmentReader reader = SegmentReader.get(true, si, IndexReader.DEFAULT_TERMS_INDEX_DIVISOR, newIOContext(random));
for (int i = 0; i < reader.numDocs(); i++)
out.println(reader.document(i));
diff --git a/lucene/src/test/org/apache/lucene/index/TestDocTermOrds.java b/lucene/src/test/org/apache/lucene/index/TestDocTermOrds.java
index 24413cb..b568279 100644
--- a/lucene/src/test/org/apache/lucene/index/TestDocTermOrds.java
+++ b/lucene/src/test/org/apache/lucene/index/TestDocTermOrds.java
@@ -149,7 +149,7 @@
@Override
public FieldsProducer fieldsProducer(SegmentReadState state) throws IOException {
- PostingsReaderBase postings = new StandardPostingsReader(state.dir, state.segmentInfo, state.readBufferSize, state.codecId);
+ PostingsReaderBase postings = new StandardPostingsReader(state.dir, state.segmentInfo, state.context, state.codecId);
TermsIndexReaderBase indexReader;
boolean success = false;
@@ -159,7 +159,7 @@
state.segmentInfo.name,
state.termsIndexDivisor,
BytesRef.getUTF8SortedAsUnicodeComparator(),
- state.codecId);
+ state.codecId, state.context);
success = true;
} finally {
if (!success) {
@@ -174,7 +174,7 @@
state.fieldInfos,
state.segmentInfo.name,
postings,
- state.readBufferSize,
+ state.context,
TERMS_CACHE_SIZE,
state.codecId);
success = true;
@@ -224,7 +224,7 @@
@Override
public PerDocValues docsProducer(SegmentReadState state) throws IOException {
- return new DefaultDocValuesProducer(state.segmentInfo, state.dir, state.fieldInfos, state.codecId, getDocValuesUseCFS(), getDocValuesSortComparator());
+ return new DefaultDocValuesProducer(state.segmentInfo, state.dir, state.fieldInfos, state.codecId, getDocValuesUseCFS(), getDocValuesSortComparator(), state.context);
}
}
diff --git a/lucene/src/test/org/apache/lucene/index/TestDocumentWriter.java b/lucene/src/test/org/apache/lucene/index/TestDocumentWriter.java
index 9ec66d5..61894fe 100644
--- a/lucene/src/test/org/apache/lucene/index/TestDocumentWriter.java
+++ b/lucene/src/test/org/apache/lucene/index/TestDocumentWriter.java
@@ -35,6 +35,8 @@
import org.apache.lucene.document.Field.TermVector;
import org.apache.lucene.document.Fieldable;
import org.apache.lucene.store.Directory;
+import org.apache.lucene.store.IOContext;
+import org.apache.lucene.store.IOContext.Context;
import org.apache.lucene.util.AttributeSource;
import org.apache.lucene.util.BytesRef;
import org.apache.lucene.util.LuceneTestCase;
@@ -68,7 +70,7 @@
SegmentInfo info = writer.newestSegment();
writer.close();
//After adding the document, we should be able to read it back in
- SegmentReader reader = SegmentReader.get(true, info, IndexReader.DEFAULT_TERMS_INDEX_DIVISOR);
+ SegmentReader reader = SegmentReader.get(true, info, IndexReader.DEFAULT_TERMS_INDEX_DIVISOR, newIOContext(random));
assertTrue(reader != null);
Document doc = reader.document(0);
assertTrue(doc != null);
@@ -129,7 +131,7 @@
writer.commit();
SegmentInfo info = writer.newestSegment();
writer.close();
- SegmentReader reader = SegmentReader.get(true, info, IndexReader.DEFAULT_TERMS_INDEX_DIVISOR);
+ SegmentReader reader = SegmentReader.get(true, info, IndexReader.DEFAULT_TERMS_INDEX_DIVISOR, newIOContext(random));
DocsAndPositionsEnum termPositions = MultiFields.getTermPositionsEnum(reader, MultiFields.getLiveDocs(reader),
"repeated", new BytesRef("repeated"));
@@ -193,7 +195,7 @@
writer.commit();
SegmentInfo info = writer.newestSegment();
writer.close();
- SegmentReader reader = SegmentReader.get(true, info, IndexReader.DEFAULT_TERMS_INDEX_DIVISOR);
+ SegmentReader reader = SegmentReader.get(true, info, IndexReader.DEFAULT_TERMS_INDEX_DIVISOR, newIOContext(random));
DocsAndPositionsEnum termPositions = reader.fields().terms("f1").docsAndPositions(reader.getLiveDocs(), new BytesRef("a"), null);
assertTrue(termPositions.nextDoc() != termPositions.NO_MORE_DOCS);
@@ -237,7 +239,7 @@
writer.commit();
SegmentInfo info = writer.newestSegment();
writer.close();
- SegmentReader reader = SegmentReader.get(true, info, IndexReader.DEFAULT_TERMS_INDEX_DIVISOR);
+ SegmentReader reader = SegmentReader.get(true, info, IndexReader.DEFAULT_TERMS_INDEX_DIVISOR, newIOContext(random));
DocsAndPositionsEnum termPositions = reader.fields().terms("preanalyzed").docsAndPositions(reader.getLiveDocs(), new BytesRef("term1"), null);
assertTrue(termPositions.nextDoc() != termPositions.NO_MORE_DOCS);
diff --git a/lucene/src/test/org/apache/lucene/index/TestFieldInfos.java b/lucene/src/test/org/apache/lucene/index/TestFieldInfos.java
index d68afdd..ad6113a 100644
--- a/lucene/src/test/org/apache/lucene/index/TestFieldInfos.java
+++ b/lucene/src/test/org/apache/lucene/index/TestFieldInfos.java
@@ -47,7 +47,7 @@
assertTrue(fieldInfos.size() == DocHelper.all.size()); //this is all b/c we are using the no-arg constructor
- IndexOutput output = dir.createOutput(filename);
+ IndexOutput output = dir.createOutput(filename, newIOContext(random));
assertTrue(output != null);
//Use a RAMOutputStream
diff --git a/lucene/src/test/org/apache/lucene/index/TestFieldsReader.java b/lucene/src/test/org/apache/lucene/index/TestFieldsReader.java
index 70e8cbc..9fb0071 100644
--- a/lucene/src/test/org/apache/lucene/index/TestFieldsReader.java
+++ b/lucene/src/test/org/apache/lucene/index/TestFieldsReader.java
@@ -35,6 +35,7 @@
import org.apache.lucene.store.AlreadyClosedException;
import org.apache.lucene.store.BufferedIndexInput;
import org.apache.lucene.store.Directory;
+import org.apache.lucene.store.IOContext;
import org.apache.lucene.store.IndexInput;
import org.apache.lucene.store.IndexOutput;
import org.apache.lucene.util.LuceneTestCase;
@@ -403,8 +404,8 @@
lockFactory = fsDir.getLockFactory();
}
@Override
- public IndexInput openInput(String name) throws IOException {
- return new FaultyIndexInput(fsDir.openInput(name));
+ public IndexInput openInput(String name, IOContext context) throws IOException {
+ return new FaultyIndexInput(fsDir.openInput(name, context));
}
@Override
public String[] listAll() throws IOException {
@@ -427,8 +428,8 @@
return fsDir.fileLength(name);
}
@Override
- public IndexOutput createOutput(String name) throws IOException {
- return fsDir.createOutput(name);
+ public IndexOutput createOutput(String name, IOContext context) throws IOException {
+ return fsDir.createOutput(name, context);
}
@Override
public void sync(Collection<String> names) throws IOException {
diff --git a/lucene/src/test/org/apache/lucene/index/TestIndexFileDeleter.java b/lucene/src/test/org/apache/lucene/index/TestIndexFileDeleter.java
index fd72f45..a87bcb9 100644
--- a/lucene/src/test/org/apache/lucene/index/TestIndexFileDeleter.java
+++ b/lucene/src/test/org/apache/lucene/index/TestIndexFileDeleter.java
@@ -92,7 +92,7 @@
// figure out which field number corresponds to
// "content", and then set our expected file names below
// accordingly:
- CompoundFileDirectory cfsReader = dir.openCompoundInput("_2.cfs", 1024);
+ CompoundFileDirectory cfsReader = dir.openCompoundInput("_2.cfs", newIOContext(random));
FieldInfos fieldInfos = new FieldInfos(cfsReader, "_2.fnm");
int contentFieldIndex = -1;
for (FieldInfo fi : fieldInfos) {
@@ -213,8 +213,8 @@
}
public void copyFile(Directory dir, String src, String dest) throws IOException {
- IndexInput in = dir.openInput(src);
- IndexOutput out = dir.createOutput(dest);
+ IndexInput in = dir.openInput(src, newIOContext(random));
+ IndexOutput out = dir.createOutput(dest, newIOContext(random));
byte[] b = new byte[1024];
long remainder = in.length();
while(remainder > 0) {
diff --git a/lucene/src/test/org/apache/lucene/index/TestIndexInput.java b/lucene/src/test/org/apache/lucene/index/TestIndexInput.java
index c5fc426..4c6b881 100644
--- a/lucene/src/test/org/apache/lucene/index/TestIndexInput.java
+++ b/lucene/src/test/org/apache/lucene/index/TestIndexInput.java
@@ -99,10 +99,10 @@
// this test checks the raw IndexInput methods as it uses RAMIndexInput which extends IndexInput directly
public void testRawIndexInputRead() throws IOException {
final RAMDirectory dir = new RAMDirectory();
- final IndexOutput os = dir.createOutput("foo");
+ final IndexOutput os = dir.createOutput("foo", newIOContext(random));
os.writeBytes(READ_TEST_BYTES, READ_TEST_BYTES.length);
os.close();
- final IndexInput is = dir.openInput("foo");
+ final IndexInput is = dir.openInput("foo", newIOContext(random));
checkReads(is);
is.close();
dir.close();
diff --git a/lucene/src/test/org/apache/lucene/index/TestIndexReaderOnDiskFull.java b/lucene/src/test/org/apache/lucene/index/TestIndexReaderOnDiskFull.java
index 791a14f..058939e 100644
--- a/lucene/src/test/org/apache/lucene/index/TestIndexReaderOnDiskFull.java
+++ b/lucene/src/test/org/apache/lucene/index/TestIndexReaderOnDiskFull.java
@@ -84,7 +84,7 @@
// Iterate w/ ever increasing free disk space:
while(!done) {
- MockDirectoryWrapper dir = new MockDirectoryWrapper(random, new RAMDirectory(startDir));
+ MockDirectoryWrapper dir = new MockDirectoryWrapper(random, new RAMDirectory(startDir, newIOContext(random)));
// If IndexReader hits disk full, it can write to
// the same files again.
diff --git a/lucene/src/test/org/apache/lucene/index/TestIndexWriter.java b/lucene/src/test/org/apache/lucene/index/TestIndexWriter.java
index 87f307f..fc1448e 100644
--- a/lucene/src/test/org/apache/lucene/index/TestIndexWriter.java
+++ b/lucene/src/test/org/apache/lucene/index/TestIndexWriter.java
@@ -60,6 +60,7 @@
import org.apache.lucene.search.spans.SpanTermQuery;
import org.apache.lucene.store.AlreadyClosedException;
import org.apache.lucene.store.Directory;
+import org.apache.lucene.store.IOContext;
import org.apache.lucene.store.IndexOutput;
import org.apache.lucene.store.Lock;
import org.apache.lucene.store.LockFactory;
@@ -1029,7 +1030,7 @@
Directory dir = newDirectory();
try {
// Create my own random file:
- IndexOutput out = dir.createOutput("myrandomfile");
+ IndexOutput out = dir.createOutput("myrandomfile", newIOContext(random));
out.writeByte((byte) 42);
out.close();
diff --git a/lucene/src/test/org/apache/lucene/index/TestIndexWriterDelete.java b/lucene/src/test/org/apache/lucene/index/TestIndexWriterDelete.java
index b1a38fc..0579d6c 100644
--- a/lucene/src/test/org/apache/lucene/index/TestIndexWriterDelete.java
+++ b/lucene/src/test/org/apache/lucene/index/TestIndexWriterDelete.java
@@ -453,7 +453,7 @@
if (VERBOSE) {
System.out.println("TEST: cycle");
}
- MockDirectoryWrapper dir = new MockDirectoryWrapper(random, new RAMDirectory(startDir));
+ MockDirectoryWrapper dir = new MockDirectoryWrapper(random, new RAMDirectory(startDir, newIOContext(random)));
dir.setPreventDoubleWrite(false);
IndexWriter modifier = new IndexWriter(dir,
newIndexWriterConfig(
diff --git a/lucene/src/test/org/apache/lucene/index/TestIndexWriterExceptions.java b/lucene/src/test/org/apache/lucene/index/TestIndexWriterExceptions.java
index a112e21..7e1078c 100644
--- a/lucene/src/test/org/apache/lucene/index/TestIndexWriterExceptions.java
+++ b/lucene/src/test/org/apache/lucene/index/TestIndexWriterExceptions.java
@@ -39,6 +39,7 @@
import org.apache.lucene.search.IndexSearcher;
import org.apache.lucene.search.PhraseQuery;
import org.apache.lucene.store.Directory;
+import org.apache.lucene.store.IOContext;
import org.apache.lucene.store.IndexInput;
import org.apache.lucene.store.IndexOutput;
import org.apache.lucene.store.MockDirectoryWrapper;
@@ -935,7 +936,7 @@
if (VERBOSE) {
System.out.println("TEST: iter " + i);
}
- MockDirectoryWrapper dir = new MockDirectoryWrapper(random, new RAMDirectory(startDir));
+ MockDirectoryWrapper dir = new MockDirectoryWrapper(random, new RAMDirectory(startDir, newIOContext(random)));
conf = newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random)).setMergeScheduler(new ConcurrentMergeScheduler());
((ConcurrentMergeScheduler) conf.getMergeScheduler()).setSuppressExceptions();
w = new IndexWriter(dir, conf);
@@ -1039,8 +1040,8 @@
assertTrue("segment generation should be > 0 but got " + gen, gen > 0);
final String segmentsFileName = SegmentInfos.getCurrentSegmentFileName(dir);
- IndexInput in = dir.openInput(segmentsFileName);
- IndexOutput out = dir.createOutput(IndexFileNames.fileNameFromGeneration(IndexFileNames.SEGMENTS, "", 1+gen));
+ IndexInput in = dir.openInput(segmentsFileName, newIOContext(random));
+ IndexOutput out = dir.createOutput(IndexFileNames.fileNameFromGeneration(IndexFileNames.SEGMENTS, "", 1+gen), newIOContext(random));
out.copyBytes(in, in.length()-1);
byte b = in.readByte();
out.writeByte((byte) (1+b));
@@ -1084,8 +1085,8 @@
String fileNameOut = IndexFileNames.fileNameFromGeneration(IndexFileNames.SEGMENTS,
"",
1+gen);
- IndexInput in = dir.openInput(fileNameIn);
- IndexOutput out = dir.createOutput(fileNameOut);
+ IndexInput in = dir.openInput(fileNameIn, newIOContext(random));
+ IndexOutput out = dir.createOutput(fileNameOut, newIOContext(random));
long length = in.length();
for(int i=0;i<length-1;i++) {
out.writeByte(in.readByte());
@@ -1185,8 +1186,8 @@
String fileNameOut = IndexFileNames.fileNameFromGeneration(IndexFileNames.SEGMENTS,
"",
1+gen);
- IndexInput in = dir.openInput(fileNameIn);
- IndexOutput out = dir.createOutput(fileNameOut);
+ IndexInput in = dir.openInput(fileNameIn, newIOContext(random));
+ IndexOutput out = dir.createOutput(fileNameOut, newIOContext(random));
long length = in.length();
for(int i=0;i<length-1;i++) {
out.writeByte(in.readByte());
diff --git a/lucene/src/test/org/apache/lucene/index/TestIndexWriterOnDiskFull.java b/lucene/src/test/org/apache/lucene/index/TestIndexWriterOnDiskFull.java
index 10492a2..50febbd 100644
--- a/lucene/src/test/org/apache/lucene/index/TestIndexWriterOnDiskFull.java
+++ b/lucene/src/test/org/apache/lucene/index/TestIndexWriterOnDiskFull.java
@@ -27,6 +27,7 @@
import org.apache.lucene.search.ScoreDoc;
import org.apache.lucene.search.TermQuery;
import org.apache.lucene.store.Directory;
+import org.apache.lucene.store.IOContext;
import org.apache.lucene.store.MockDirectoryWrapper;
import org.apache.lucene.store.RAMDirectory;
import org.apache.lucene.util.LuceneTestCase;
@@ -230,7 +231,7 @@
}
// Make a new dir that will enforce disk usage:
- MockDirectoryWrapper dir = new MockDirectoryWrapper(random, new RAMDirectory(startDir));
+ MockDirectoryWrapper dir = new MockDirectoryWrapper(random, new RAMDirectory(startDir, newIOContext(random)));
writer = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random)).setOpenMode(OpenMode.APPEND).setMergePolicy(newLogMergePolicy()));
IOException err = null;
writer.setInfoStream(VERBOSE ? System.out : null);
diff --git a/lucene/src/test/org/apache/lucene/index/TestIndexWriterReader.java b/lucene/src/test/org/apache/lucene/index/TestIndexWriterReader.java
index 5f2205e..231b0e1 100644
--- a/lucene/src/test/org/apache/lucene/index/TestIndexWriterReader.java
+++ b/lucene/src/test/org/apache/lucene/index/TestIndexWriterReader.java
@@ -36,6 +36,7 @@
import org.apache.lucene.search.Query;
import org.apache.lucene.search.TopDocs;
import org.apache.lucene.store.Directory;
+import org.apache.lucene.store.IOContext;
import org.apache.lucene.store.MockDirectoryWrapper;
import org.apache.lucene.store.AlreadyClosedException;
import org.apache.lucene.store.RAMDirectory;
@@ -470,7 +471,7 @@
try {
final Directory[] dirs = new Directory[numDirs];
for (int k = 0; k < numDirs; k++)
- dirs[k] = new MockDirectoryWrapper(random, new RAMDirectory(addDir));
+ dirs[k] = new MockDirectoryWrapper(random, new RAMDirectory(addDir, newIOContext(random)));
//int j = 0;
//while (true) {
// System.out.println(Thread.currentThread().getName() + ": iter
@@ -755,7 +756,7 @@
final Directory[] dirs = new Directory[10];
for (int i=0;i<10;i++) {
- dirs[i] = new MockDirectoryWrapper(random, new RAMDirectory(dir1));
+ dirs[i] = new MockDirectoryWrapper(random, new RAMDirectory(dir1, newIOContext(random)));
}
IndexReader r = writer.getReader();
diff --git a/lucene/src/test/org/apache/lucene/index/TestLazyProxSkipping.java b/lucene/src/test/org/apache/lucene/index/TestLazyProxSkipping.java
index 70ac03d..fabac14 100755
--- a/lucene/src/test/org/apache/lucene/index/TestLazyProxSkipping.java
+++ b/lucene/src/test/org/apache/lucene/index/TestLazyProxSkipping.java
@@ -31,6 +31,7 @@
import org.apache.lucene.search.PhraseQuery;
import org.apache.lucene.search.ScoreDoc;
import org.apache.lucene.store.Directory;
+import org.apache.lucene.store.IOContext;
import org.apache.lucene.store.IndexInput;
import org.apache.lucene.store.MockDirectoryWrapper;
import org.apache.lucene.store.RAMDirectory;
@@ -56,8 +57,8 @@
}
@Override
- public IndexInput openInput(String name) throws IOException {
- IndexInput ii = super.openInput(name);
+ public IndexInput openInput(String name, IOContext context) throws IOException {
+ IndexInput ii = super.openInput(name, context);
if (name.endsWith(".prx") || name.endsWith(".pos") ) {
// we decorate the proxStream with a wrapper class that allows to count the number of calls of seek()
ii = new SeeksCountingStream(ii);
diff --git a/lucene/src/test/org/apache/lucene/index/TestMultiLevelSkipList.java b/lucene/src/test/org/apache/lucene/index/TestMultiLevelSkipList.java
index 971da6e..4e35080 100644
--- a/lucene/src/test/org/apache/lucene/index/TestMultiLevelSkipList.java
+++ b/lucene/src/test/org/apache/lucene/index/TestMultiLevelSkipList.java
@@ -30,6 +30,7 @@
import org.apache.lucene.document.Field.Index;
import org.apache.lucene.document.Field.Store;
import org.apache.lucene.store.Directory;
+import org.apache.lucene.store.IOContext;
import org.apache.lucene.store.IndexInput;
import org.apache.lucene.store.MockDirectoryWrapper;
import org.apache.lucene.store.RAMDirectory;
@@ -54,8 +55,8 @@
}
@Override
- public IndexInput openInput(String fileName) throws IOException {
- IndexInput in = super.openInput(fileName);
+ public IndexInput openInput(String fileName, IOContext context) throws IOException {
+ IndexInput in = super.openInput(fileName, context);
if (fileName.endsWith(".frq"))
in = new CountingStream(in);
return in;
diff --git a/lucene/src/test/org/apache/lucene/index/TestMultiReader.java b/lucene/src/test/org/apache/lucene/index/TestMultiReader.java
index c934b4d..235e9db 100644
--- a/lucene/src/test/org/apache/lucene/index/TestMultiReader.java
+++ b/lucene/src/test/org/apache/lucene/index/TestMultiReader.java
@@ -19,6 +19,8 @@
import java.io.IOException;
+import org.apache.lucene.store.IOContext;
+
public class TestMultiReader extends TestDirectoryReader {
@Override
@@ -26,8 +28,8 @@
IndexReader reader;
sis.read(dir);
- SegmentReader reader1 = SegmentReader.get(false, sis.info(0), IndexReader.DEFAULT_TERMS_INDEX_DIVISOR);
- SegmentReader reader2 = SegmentReader.get(false, sis.info(1), IndexReader.DEFAULT_TERMS_INDEX_DIVISOR);
+ SegmentReader reader1 = SegmentReader.get(false, sis.info(0), IndexReader.DEFAULT_TERMS_INDEX_DIVISOR, newIOContext(random));
+ SegmentReader reader2 = SegmentReader.get(false, sis.info(1), IndexReader.DEFAULT_TERMS_INDEX_DIVISOR, newIOContext(random));
readers[0] = reader1;
readers[1] = reader2;
assertTrue(reader1 != null);
diff --git a/lucene/src/test/org/apache/lucene/index/TestSegmentMerger.java b/lucene/src/test/org/apache/lucene/index/TestSegmentMerger.java
index 0a6103d..ba73fc3 100644
--- a/lucene/src/test/org/apache/lucene/index/TestSegmentMerger.java
+++ b/lucene/src/test/org/apache/lucene/index/TestSegmentMerger.java
@@ -20,6 +20,7 @@
import org.apache.lucene.util.LuceneTestCase;
import org.apache.lucene.store.BufferedIndexInput;
import org.apache.lucene.store.Directory;
+import org.apache.lucene.store.IOContext;
import org.apache.lucene.analysis.MockAnalyzer;
import org.apache.lucene.document.Document;
import org.apache.lucene.document.Field;
@@ -54,8 +55,8 @@
SegmentInfo info1 = DocHelper.writeDoc(random, merge1Dir, doc1);
DocHelper.setupDoc(doc2);
SegmentInfo info2 = DocHelper.writeDoc(random, merge2Dir, doc2);
- reader1 = SegmentReader.get(true, info1, IndexReader.DEFAULT_TERMS_INDEX_DIVISOR);
- reader2 = SegmentReader.get(true, info2, IndexReader.DEFAULT_TERMS_INDEX_DIVISOR);
+ reader1 = SegmentReader.get(true, info1, IndexReader.DEFAULT_TERMS_INDEX_DIVISOR, newIOContext(random));
+ reader2 = SegmentReader.get(true, info2, IndexReader.DEFAULT_TERMS_INDEX_DIVISOR, newIOContext(random));
}
@Override
@@ -77,7 +78,7 @@
}
public void testMerge() throws IOException {
- SegmentMerger merger = new SegmentMerger(mergedDir, IndexWriterConfig.DEFAULT_TERM_INDEX_INTERVAL, mergedSegment, null, null, new FieldInfos());
+ SegmentMerger merger = new SegmentMerger(mergedDir, IndexWriterConfig.DEFAULT_TERM_INDEX_INTERVAL, mergedSegment, null, null, new FieldInfos(), newIOContext(random));
merger.add(reader1);
merger.add(reader2);
int docsMerged = merger.merge();
@@ -86,7 +87,7 @@
//Should be able to open a new SegmentReader against the new directory
SegmentReader mergedReader = SegmentReader.get(false, mergedDir, new SegmentInfo(mergedSegment, docsMerged, mergedDir, false,
merger.getSegmentCodecs(), fieldInfos),
- BufferedIndexInput.BUFFER_SIZE, true, IndexReader.DEFAULT_TERMS_INDEX_DIVISOR);
+ true, IndexReader.DEFAULT_TERMS_INDEX_DIVISOR, newIOContext(random));
assertTrue(mergedReader != null);
assertTrue(mergedReader.numDocs() == 2);
Document newDoc1 = mergedReader.document(0);
@@ -148,9 +149,9 @@
w.close();
// Assert that SM fails if .del exists
- SegmentMerger sm = new SegmentMerger(dir, 1, "a", null, null, null);
+ SegmentMerger sm = new SegmentMerger(dir, 1, "a", null, null, null, newIOContext(random));
try {
- sm.createCompoundFile("b1", w.segmentInfos.info(0));
+ sm.createCompoundFile("b1", w.segmentInfos.info(0), newIOContext(random));
fail("should not have been able to create a .cfs with .del and .s* files");
} catch (AssertionError e) {
// expected
@@ -168,7 +169,7 @@
// Assert that SM fails if .s* exists
try {
- sm.createCompoundFile("b2", w.segmentInfos.info(0));
+ sm.createCompoundFile("b2", w.segmentInfos.info(0), newIOContext(random));
fail("should not have been able to create a .cfs with .del and .s* files");
} catch (AssertionError e) {
// expected
diff --git a/lucene/src/test/org/apache/lucene/index/TestSegmentReader.java b/lucene/src/test/org/apache/lucene/index/TestSegmentReader.java
index e8d87b1..2c780f6 100644
--- a/lucene/src/test/org/apache/lucene/index/TestSegmentReader.java
+++ b/lucene/src/test/org/apache/lucene/index/TestSegmentReader.java
@@ -28,6 +28,8 @@
import org.apache.lucene.document.Document;
import org.apache.lucene.document.Fieldable;
import org.apache.lucene.store.Directory;
+import org.apache.lucene.store.IOContext;
+import org.apache.lucene.store.IOContext.Context;
public class TestSegmentReader extends LuceneTestCase {
private Directory dir;
@@ -41,7 +43,7 @@
dir = newDirectory();
DocHelper.setupDoc(testDoc);
SegmentInfo info = DocHelper.writeDoc(random, dir, testDoc);
- reader = SegmentReader.get(true, info, IndexReader.DEFAULT_TERMS_INDEX_DIVISOR);
+ reader = SegmentReader.get(true, info, IndexReader.DEFAULT_TERMS_INDEX_DIVISOR, IOContext.READ);
}
@Override
@@ -77,7 +79,7 @@
Document docToDelete = new Document();
DocHelper.setupDoc(docToDelete);
SegmentInfo info = DocHelper.writeDoc(random, dir, docToDelete);
- SegmentReader deleteReader = SegmentReader.get(false, info, IndexReader.DEFAULT_TERMS_INDEX_DIVISOR);
+ SegmentReader deleteReader = SegmentReader.get(false, info, IndexReader.DEFAULT_TERMS_INDEX_DIVISOR, newIOContext(random));
assertTrue(deleteReader != null);
assertTrue(deleteReader.numDocs() == 1);
deleteReader.deleteDocument(0);
diff --git a/lucene/src/test/org/apache/lucene/index/TestSegmentTermDocs.java b/lucene/src/test/org/apache/lucene/index/TestSegmentTermDocs.java
index 17dbae5..9ffa8b4 100644
--- a/lucene/src/test/org/apache/lucene/index/TestSegmentTermDocs.java
+++ b/lucene/src/test/org/apache/lucene/index/TestSegmentTermDocs.java
@@ -19,6 +19,7 @@
import org.apache.lucene.util.LuceneTestCase;
import org.apache.lucene.store.Directory;
+import org.apache.lucene.store.IOContext;
import org.apache.lucene.analysis.MockAnalyzer;
import org.apache.lucene.document.Document;
import org.apache.lucene.document.Field;
@@ -55,7 +56,7 @@
public void testTermDocs(int indexDivisor) throws IOException {
//After adding the document, we should be able to read it back in
- SegmentReader reader = SegmentReader.get(true, info, indexDivisor);
+ SegmentReader reader = SegmentReader.get(true, info, indexDivisor, newIOContext(random));
assertTrue(reader != null);
assertEquals(indexDivisor, reader.getTermInfosIndexDivisor());
@@ -78,7 +79,7 @@
public void testBadSeek(int indexDivisor) throws IOException {
{
//After adding the document, we should be able to read it back in
- SegmentReader reader = SegmentReader.get(true, info, indexDivisor);
+ SegmentReader reader = SegmentReader.get(true, info, indexDivisor, newIOContext(random));
assertTrue(reader != null);
DocsEnum termDocs = reader.termDocsEnum(reader.getLiveDocs(),
"textField2",
@@ -89,7 +90,7 @@
}
{
//After adding the document, we should be able to read it back in
- SegmentReader reader = SegmentReader.get(true, info, indexDivisor);
+ SegmentReader reader = SegmentReader.get(true, info, indexDivisor, newIOContext(random));
assertTrue(reader != null);
DocsEnum termDocs = reader.termDocsEnum(reader.getLiveDocs(),
"junk",
diff --git a/lucene/src/test/org/apache/lucene/index/TestSnapshotDeletionPolicy.java b/lucene/src/test/org/apache/lucene/index/TestSnapshotDeletionPolicy.java
index 6d325a7..fb6402d 100644
--- a/lucene/src/test/org/apache/lucene/index/TestSnapshotDeletionPolicy.java
+++ b/lucene/src/test/org/apache/lucene/index/TestSnapshotDeletionPolicy.java
@@ -25,6 +25,7 @@
import org.apache.lucene.document.Document;
import org.apache.lucene.document.Field;
import org.apache.lucene.store.Directory;
+import org.apache.lucene.store.IOContext;
import org.apache.lucene.store.IndexInput;
import org.apache.lucene.analysis.MockAnalyzer;
import org.apache.lucene.index.IndexCommit;
@@ -202,7 +203,7 @@
byte[] buffer = new byte[4096];
private void readFile(Directory dir, String name) throws Exception {
- IndexInput input = dir.openInput(name);
+ IndexInput input = dir.openInput(name, newIOContext(random));
try {
long size = dir.fileLength(name);
long bytesLeft = size;
diff --git a/lucene/src/test/org/apache/lucene/index/TestTermVectorsReader.java b/lucene/src/test/org/apache/lucene/index/TestTermVectorsReader.java
index c3d1afe..7139b78 100644
--- a/lucene/src/test/org/apache/lucene/index/TestTermVectorsReader.java
+++ b/lucene/src/test/org/apache/lucene/index/TestTermVectorsReader.java
@@ -32,6 +32,8 @@
import org.apache.lucene.document.Document;
import org.apache.lucene.document.Field;
import org.apache.lucene.store.Directory;
+import org.apache.lucene.store.IOContext;
+import org.apache.lucene.store.IOContext.Context;
import org.apache.lucene.util.BytesRef;
import org.apache.lucene.util.LuceneTestCase;
@@ -174,7 +176,7 @@
}
public void testReader() throws IOException {
- TermVectorsReader reader = new TermVectorsReader(dir, seg, fieldInfos);
+ TermVectorsReader reader = new TermVectorsReader(dir, seg, fieldInfos, newIOContext(random));
for (int j = 0; j < 5; j++) {
TermFreqVector vector = reader.get(j, testFields[0]);
assertTrue(vector != null);
@@ -191,7 +193,7 @@
}
public void testPositionReader() throws IOException {
- TermVectorsReader reader = new TermVectorsReader(dir, seg, fieldInfos);
+ TermVectorsReader reader = new TermVectorsReader(dir, seg, fieldInfos, newIOContext(random));
TermPositionVector vector;
BytesRef[] terms;
vector = (TermPositionVector) reader.get(0, testFields[0]);
@@ -234,7 +236,7 @@
}
public void testOffsetReader() throws IOException {
- TermVectorsReader reader = new TermVectorsReader(dir, seg, fieldInfos);
+ TermVectorsReader reader = new TermVectorsReader(dir, seg, fieldInfos, newIOContext(random));
TermPositionVector vector = (TermPositionVector) reader.get(0, testFields[0]);
assertTrue(vector != null);
BytesRef[] terms = vector.getTerms();
@@ -263,7 +265,7 @@
}
public void testMapper() throws IOException {
- TermVectorsReader reader = new TermVectorsReader(dir, seg, fieldInfos);
+ TermVectorsReader reader = new TermVectorsReader(dir, seg, fieldInfos, newIOContext(random));
SortedTermVectorMapper mapper = new SortedTermVectorMapper(new TermVectorEntryFreqSortedComparator());
reader.get(0, mapper);
SortedSet<TermVectorEntry> set = mapper.getTermVectorEntrySet();
@@ -384,7 +386,7 @@
public void testBadParams() throws IOException {
TermVectorsReader reader = null;
try {
- reader = new TermVectorsReader(dir, seg, fieldInfos);
+ reader = new TermVectorsReader(dir, seg, fieldInfos, newIOContext(random));
//Bad document number, good field number
reader.get(50, testFields[0]);
fail();
@@ -394,7 +396,7 @@
reader.close();
}
try {
- reader = new TermVectorsReader(dir, seg, fieldInfos);
+ reader = new TermVectorsReader(dir, seg, fieldInfos, newIOContext(random));
//Bad document number, no field
reader.get(50);
fail();
@@ -404,7 +406,7 @@
reader.close();
}
try {
- reader = new TermVectorsReader(dir, seg, fieldInfos);
+ reader = new TermVectorsReader(dir, seg, fieldInfos, newIOContext(random));
//good document number, bad field number
TermFreqVector vector = reader.get(0, "f50");
assertTrue(vector == null);
diff --git a/lucene/src/test/org/apache/lucene/index/TestTermVectorsWriter.java b/lucene/src/test/org/apache/lucene/index/TestTermVectorsWriter.java
index 350a05b..7cb139e 100644
--- a/lucene/src/test/org/apache/lucene/index/TestTermVectorsWriter.java
+++ b/lucene/src/test/org/apache/lucene/index/TestTermVectorsWriter.java
@@ -299,7 +299,7 @@
.setMergeScheduler(new SerialMergeScheduler()).setMergePolicy(
new LogDocMergePolicy()));
- Directory[] indexDirs = {new MockDirectoryWrapper(random, new RAMDirectory(dir))};
+ Directory[] indexDirs = {new MockDirectoryWrapper(random, new RAMDirectory(dir, newIOContext(random)))};
writer.addIndexes(indexDirs);
writer.optimize();
writer.close();
diff --git a/lucene/src/test/org/apache/lucene/index/codecs/intblock/TestIntBlockCodec.java b/lucene/src/test/org/apache/lucene/index/codecs/intblock/TestIntBlockCodec.java
index 71f155f..dbd01a0 100644
--- a/lucene/src/test/org/apache/lucene/index/codecs/intblock/TestIntBlockCodec.java
+++ b/lucene/src/test/org/apache/lucene/index/codecs/intblock/TestIntBlockCodec.java
@@ -29,13 +29,13 @@
IntStreamFactory f = new MockFixedIntBlockCodec(128).getIntFactory();
- IntIndexOutput out = f.createOutput(dir, "test");
+ IntIndexOutput out = f.createOutput(dir, "test", newIOContext(random));
for(int i=0;i<11777;i++) {
out.write(i);
}
out.close();
- IntIndexInput in = f.openInput(dir, "test");
+ IntIndexInput in = f.openInput(dir, "test", newIOContext(random));
IntIndexInput.Reader r = in.reader();
for(int i=0;i<11777;i++) {
@@ -50,12 +50,12 @@
Directory dir = newDirectory();
IntStreamFactory f = new MockFixedIntBlockCodec(128).getIntFactory();
- IntIndexOutput out = f.createOutput(dir, "test");
+ IntIndexOutput out = f.createOutput(dir, "test", newIOContext(random));
// write no ints
out.close();
- IntIndexInput in = f.openInput(dir, "test");
+ IntIndexInput in = f.openInput(dir, "test", newIOContext(random));
in.reader();
// read no ints
in.close();
diff --git a/lucene/src/test/org/apache/lucene/index/values/TestDocValues.java b/lucene/src/test/org/apache/lucene/index/values/TestDocValues.java
index aed271e..2fc02a2 100644
--- a/lucene/src/test/org/apache/lucene/index/values/TestDocValues.java
+++ b/lucene/src/test/org/apache/lucene/index/values/TestDocValues.java
@@ -61,7 +61,7 @@
Directory dir = newDirectory();
final AtomicLong trackBytes = new AtomicLong(0);
- Writer w = Bytes.getWriter(dir, "test", mode, comp, fixedSize, trackBytes);
+ Writer w = Bytes.getWriter(dir, "test", mode, comp, fixedSize, trackBytes, newIOContext(random));
int maxDoc = 220;
final String[] values = new String[maxDoc];
final int fixedLength = 1 + atLeast(50);
@@ -81,7 +81,7 @@
w.finish(maxDoc);
assertEquals(0, trackBytes.get());
- IndexDocValues r = Bytes.getValues(dir, "test", mode, fixedSize, maxDoc, comp);
+ IndexDocValues r = Bytes.getValues(dir, "test", mode, fixedSize, maxDoc, comp, newIOContext(random));
for (int iter = 0; iter < 2; iter++) {
ValuesEnum bytesEnum = getEnum(r);
assertNotNull("enum is null", bytesEnum);
@@ -183,12 +183,12 @@
for (int i = 0; i < minMax.length; i++) {
Directory dir = newDirectory();
final AtomicLong trackBytes = new AtomicLong(0);
- Writer w = Ints.getWriter(dir, "test", trackBytes, ValueType.VAR_INTS);
+ Writer w = Ints.getWriter(dir, "test", trackBytes, ValueType.VAR_INTS, newIOContext(random));
w.add(0, minMax[i][0]);
w.add(1, minMax[i][1]);
w.finish(2);
assertEquals(0, trackBytes.get());
- IndexDocValues r = Ints.getValues(dir, "test", 2);
+ IndexDocValues r = Ints.getValues(dir, "test", 2, newIOContext(random));
Source source = getSource(r);
assertEquals(i + " with min: " + minMax[i][0] + " max: " + minMax[i][1],
expectedTypes[i], source.type());
@@ -224,12 +224,12 @@
byte[] sourceArray = new byte[] {1,2,3};
Directory dir = newDirectory();
final AtomicLong trackBytes = new AtomicLong(0);
- Writer w = Ints.getWriter(dir, "test", trackBytes, ValueType.FIXED_INTS_8);
+ Writer w = Ints.getWriter(dir, "test", trackBytes, ValueType.FIXED_INTS_8, newIOContext(random));
for (int i = 0; i < sourceArray.length; i++) {
w.add(i, (long) sourceArray[i]);
}
w.finish(sourceArray.length);
- IndexDocValues r = Ints.getValues(dir, "test", sourceArray.length);
+ IndexDocValues r = Ints.getValues(dir, "test", sourceArray.length, newIOContext(random));
Source source = r.getSource();
assertTrue(source.hasArray());
byte[] loaded = ((byte[])source.getArray());
@@ -245,12 +245,12 @@
short[] sourceArray = new short[] {1,2,3};
Directory dir = newDirectory();
final AtomicLong trackBytes = new AtomicLong(0);
- Writer w = Ints.getWriter(dir, "test", trackBytes, ValueType.FIXED_INTS_16);
+ Writer w = Ints.getWriter(dir, "test", trackBytes, ValueType.FIXED_INTS_16, newIOContext(random));
for (int i = 0; i < sourceArray.length; i++) {
w.add(i, (long) sourceArray[i]);
}
w.finish(sourceArray.length);
- IndexDocValues r = Ints.getValues(dir, "test", sourceArray.length);
+ IndexDocValues r = Ints.getValues(dir, "test", sourceArray.length, newIOContext(random));
Source source = r.getSource();
assertTrue(source.hasArray());
short[] loaded = ((short[])source.getArray());
@@ -266,12 +266,12 @@
long[] sourceArray = new long[] {1,2,3};
Directory dir = newDirectory();
final AtomicLong trackBytes = new AtomicLong(0);
- Writer w = Ints.getWriter(dir, "test", trackBytes, ValueType.FIXED_INTS_64);
+ Writer w = Ints.getWriter(dir, "test", trackBytes, ValueType.FIXED_INTS_64, newIOContext(random));
for (int i = 0; i < sourceArray.length; i++) {
w.add(i, sourceArray[i]);
}
w.finish(sourceArray.length);
- IndexDocValues r = Ints.getValues(dir, "test", sourceArray.length);
+ IndexDocValues r = Ints.getValues(dir, "test", sourceArray.length, newIOContext(random));
Source source = r.getSource();
assertTrue(source.hasArray());
long[] loaded = ((long[])source.getArray());
@@ -287,12 +287,12 @@
int[] sourceArray = new int[] {1,2,3};
Directory dir = newDirectory();
final AtomicLong trackBytes = new AtomicLong(0);
- Writer w = Ints.getWriter(dir, "test", trackBytes, ValueType.FIXED_INTS_32);
+ Writer w = Ints.getWriter(dir, "test", trackBytes, ValueType.FIXED_INTS_32, newIOContext(random));
for (int i = 0; i < sourceArray.length; i++) {
w.add(i, (long) sourceArray[i]);
}
w.finish(sourceArray.length);
- IndexDocValues r = Ints.getValues(dir, "test", sourceArray.length);
+ IndexDocValues r = Ints.getValues(dir, "test", sourceArray.length, newIOContext(random));
Source source = r.getSource();
assertTrue(source.hasArray());
int[] loaded = ((int[])source.getArray());
@@ -308,12 +308,12 @@
float[] sourceArray = new float[] {1,2,3};
Directory dir = newDirectory();
final AtomicLong trackBytes = new AtomicLong(0);
- Writer w = Floats.getWriter(dir, "test", 4, trackBytes);
+ Writer w = Floats.getWriter(dir, "test", 4, trackBytes, newIOContext(random));
for (int i = 0; i < sourceArray.length; i++) {
w.add(i, sourceArray[i]);
}
w.finish(sourceArray.length);
- IndexDocValues r = Floats.getValues(dir, "test", 3);
+ IndexDocValues r = Floats.getValues(dir, "test", 3, newIOContext(random));
Source source = r.getSource();
assertTrue(source.hasArray());
float[] loaded = ((float[])source.getArray());
@@ -329,12 +329,12 @@
double[] sourceArray = new double[] {1,2,3};
Directory dir = newDirectory();
final AtomicLong trackBytes = new AtomicLong(0);
- Writer w = Floats.getWriter(dir, "test", 8, trackBytes);
+ Writer w = Floats.getWriter(dir, "test", 8, trackBytes, newIOContext(random));
for (int i = 0; i < sourceArray.length; i++) {
w.add(i, sourceArray[i]);
}
w.finish(sourceArray.length);
- IndexDocValues r = Floats.getValues(dir, "test", 3);
+ IndexDocValues r = Floats.getValues(dir, "test", 3, newIOContext(random));
Source source = r.getSource();
assertTrue(source.hasArray());
double[] loaded = ((double[])source.getArray());
@@ -353,7 +353,7 @@
for (int rx = 1; rx < maxBit; rx++, maxV *= 2) {
Directory dir = newDirectory();
final AtomicLong trackBytes = new AtomicLong(0);
- Writer w = Ints.getWriter(dir, "test", trackBytes, type);
+ Writer w = Ints.getWriter(dir, "test", trackBytes, type, newIOContext(random));
for (int i = 0; i < NUM_VALUES; i++) {
final long v = random.nextLong() % (1 + maxV);
values[i] = v;
@@ -363,7 +363,7 @@
w.finish(NUM_VALUES + additionalDocs);
assertEquals(0, trackBytes.get());
- IndexDocValues r = Ints.getValues(dir, "test", NUM_VALUES + additionalDocs);
+ IndexDocValues r = Ints.getValues(dir, "test", NUM_VALUES + additionalDocs, newIOContext(random));
for (int iter = 0; iter < 2; iter++) {
Source s = getSource(r);
assertEquals(type, s.type());
@@ -416,7 +416,7 @@
private void runTestFloats(int precision, double delta) throws IOException {
Directory dir = newDirectory();
final AtomicLong trackBytes = new AtomicLong(0);
- Writer w = Floats.getWriter(dir, "test", precision, trackBytes);
+ Writer w = Floats.getWriter(dir, "test", precision, trackBytes, newIOContext(random));
final int NUM_VALUES = 777 + random.nextInt(777);;
final double[] values = new double[NUM_VALUES];
for (int i = 0; i < NUM_VALUES; i++) {
@@ -429,7 +429,7 @@
w.finish(NUM_VALUES + additionalValues);
assertEquals(0, trackBytes.get());
- IndexDocValues r = Floats.getValues(dir, "test", NUM_VALUES + additionalValues);
+ IndexDocValues r = Floats.getValues(dir, "test", NUM_VALUES + additionalValues, newIOContext(random));
for (int iter = 0; iter < 2; iter++) {
Source s = getSource(r);
for (int i = 0; i < NUM_VALUES; i++) {
diff --git a/lucene/src/test/org/apache/lucene/search/TestBoolean2.java b/lucene/src/test/org/apache/lucene/search/TestBoolean2.java
index 98999ae..2a460a5 100644
--- a/lucene/src/test/org/apache/lucene/search/TestBoolean2.java
+++ b/lucene/src/test/org/apache/lucene/search/TestBoolean2.java
@@ -27,6 +27,7 @@
import org.apache.lucene.index.Term;
import org.apache.lucene.index.IndexReader;
import org.apache.lucene.store.Directory;
+import org.apache.lucene.store.IOContext;
import org.apache.lucene.store.MockDirectoryWrapper;
import org.apache.lucene.store.RAMDirectory;
import org.apache.lucene.util.LuceneTestCase;
@@ -62,13 +63,13 @@
searcher = new IndexSearcher(directory, true);
// Make big index
- dir2 = new MockDirectoryWrapper(random, new RAMDirectory(directory));
+ dir2 = new MockDirectoryWrapper(random, new RAMDirectory(directory, IOContext.DEFAULT));
// First multiply small test index:
mulFactor = 1;
int docCount = 0;
do {
- final Directory copy = new MockDirectoryWrapper(random, new RAMDirectory(dir2));
+ final Directory copy = new MockDirectoryWrapper(random, new RAMDirectory(dir2, IOContext.DEFAULT));
RandomIndexWriter w = new RandomIndexWriter(random, dir2);
w.addIndexes(copy);
docCount = w.maxDoc();
diff --git a/lucene/src/test/org/apache/lucene/store/TestBufferedIndexInput.java b/lucene/src/test/org/apache/lucene/store/TestBufferedIndexInput.java
index 33e48c4..9d9957e 100755
--- a/lucene/src/test/org/apache/lucene/store/TestBufferedIndexInput.java
+++ b/lucene/src/test/org/apache/lucene/store/TestBufferedIndexInput.java
@@ -93,11 +93,11 @@
// run test with chunk size of 10 bytes
runReadBytesAndClose(new SimpleFSIndexInput(tmpInputFile,
- inputBufferSize, 10), inputBufferSize, random);
+ newIOContext(random), 10), inputBufferSize, random);
// run test with chunk size of 10 bytes
runReadBytesAndClose(new NIOFSIndexInput(tmpInputFile,
- inputBufferSize, 10), inputBufferSize, random);
+ newIOContext(random), 10), inputBufferSize, random);
}
private void runReadBytesAndClose(IndexInput input, int bufferSize, Random r)
@@ -303,11 +303,6 @@
dir = new SimpleFSDirectory(path, null);
}
- @Override
- public IndexInput openInput(String name) throws IOException {
- return openInput(name, BufferedIndexInput.BUFFER_SIZE);
- }
-
public void tweakBufferSizes() {
//int count = 0;
for (final IndexInput ip : allIndexInputs) {
@@ -320,17 +315,17 @@
}
@Override
- public IndexInput openInput(String name, int bufferSize) throws IOException {
+ public IndexInput openInput(String name, IOContext context) throws IOException {
// Make random changes to buffer size
- bufferSize = 1+Math.abs(rand.nextInt() % 10);
- IndexInput f = dir.openInput(name, bufferSize);
+ //bufferSize = 1+Math.abs(rand.nextInt() % 10);
+ IndexInput f = dir.openInput(name, context);
allIndexInputs.add(f);
return f;
}
@Override
- public IndexOutput createOutput(String name) throws IOException {
- return dir.createOutput(name);
+ public IndexOutput createOutput(String name, IOContext context) throws IOException {
+ return dir.createOutput(name, context);
}
@Override
diff --git a/lucene/src/test/org/apache/lucene/store/TestCopyBytes.java b/lucene/src/test/org/apache/lucene/store/TestCopyBytes.java
index 08f41e4..156a58f 100644
--- a/lucene/src/test/org/apache/lucene/store/TestCopyBytes.java
+++ b/lucene/src/test/org/apache/lucene/store/TestCopyBytes.java
@@ -17,35 +17,33 @@
* limitations under the License.
*/
-
import org.apache.lucene.util.LuceneTestCase;
import org.apache.lucene.util._TestUtil;
import org.junit.Test;
public class TestCopyBytes extends LuceneTestCase {
-
+
private byte value(int idx) {
- return (byte) ((idx%256) * (1+(idx/256)));
+ return (byte) ((idx % 256) * (1 + (idx / 256)));
}
-
-
+
@Test
public void testCopyBytes() throws Exception {
int num = atLeast(10);
- for(int iter=0;iter<num;iter++) {
+ for (int iter = 0; iter < num; iter++) {
Directory dir = newDirectory();
if (VERBOSE) {
System.out.println("TEST: iter=" + iter + " dir=" + dir);
}
-
+
// make random file
- IndexOutput out = dir.createOutput("test");
+ IndexOutput out = dir.createOutput("test", newIOContext(random));
byte[] bytes = new byte[_TestUtil.nextInt(random, 1, 77777)];
final int size = _TestUtil.nextInt(random, 1, 1777777);
int upto = 0;
int byteUpto = 0;
- while(upto < size) {
+ while (upto < size) {
bytes[byteUpto++] = value(upto);
upto++;
if (byteUpto == bytes.length) {
@@ -53,24 +51,25 @@
byteUpto = 0;
}
}
-
+
out.writeBytes(bytes, 0, byteUpto);
assertEquals(size, out.getFilePointer());
out.close();
assertEquals(size, dir.fileLength("test"));
-
+
// copy from test -> test2
- final IndexInput in = dir.openInput("test");
-
- out = dir.createOutput("test2");
-
+ final IndexInput in = dir.openInput("test", newIOContext(random));
+
+ out = dir.createOutput("test2", newIOContext(random));
+
upto = 0;
- while(upto < size) {
+ while (upto < size) {
if (random.nextBoolean()) {
out.writeByte(in.readByte());
upto++;
} else {
- final int chunk = Math.min(_TestUtil.nextInt(random, 1, bytes.length), size-upto);
+ final int chunk = Math.min(
+ _TestUtil.nextInt(random, 1, bytes.length), size - upto);
out.copyBytes(in, chunk);
upto += chunk;
}
@@ -78,26 +77,27 @@
assertEquals(size, upto);
out.close();
in.close();
-
+
// verify
- IndexInput in2 = dir.openInput("test2");
+ IndexInput in2 = dir.openInput("test2", newIOContext(random));
upto = 0;
- while(upto < size) {
+ while (upto < size) {
if (random.nextBoolean()) {
final byte v = in2.readByte();
assertEquals(value(upto), v);
upto++;
} else {
- final int limit = Math.min(_TestUtil.nextInt(random, 1, bytes.length), size-upto);
+ final int limit = Math.min(
+ _TestUtil.nextInt(random, 1, bytes.length), size - upto);
in2.readBytes(bytes, 0, limit);
- for(int byteIdx=0;byteIdx<limit;byteIdx++) {
+ for (int byteIdx = 0; byteIdx < limit; byteIdx++) {
assertEquals(value(upto), bytes[byteIdx]);
upto++;
}
}
}
in2.close();
-
+
dir.deleteFile("test");
dir.deleteFile("test2");
diff --git a/lucene/src/test/org/apache/lucene/store/TestDirectory.java b/lucene/src/test/org/apache/lucene/store/TestDirectory.java
index 03e6f15..e17cf53 100644
--- a/lucene/src/test/org/apache/lucene/store/TestDirectory.java
+++ b/lucene/src/test/org/apache/lucene/store/TestDirectory.java
@@ -31,7 +31,7 @@
for (Directory dir : dirs) {
dir.close();
try {
- dir.createOutput("test");
+ dir.createOutput("test", newIOContext(random));
fail("did not hit expected exception");
} catch (AlreadyClosedException ace) {
}
@@ -56,7 +56,7 @@
dir.ensureOpen();
String fname = "foo." + i;
String lockname = "foo" + i + ".lck";
- IndexOutput out = dir.createOutput(fname);
+ IndexOutput out = dir.createOutput(fname, newIOContext(random));
out.writeByte((byte)i);
out.close();
@@ -70,7 +70,7 @@
// closed and will cause a failure to delete the file.
if (d2 instanceof MMapDirectory) continue;
- IndexInput input = d2.openInput(fname);
+ IndexInput input = d2.openInput(fname, newIOContext(random));
assertEquals((byte)i, input.readByte());
input.close();
}
@@ -141,7 +141,7 @@
private void checkDirectoryFilter(Directory dir) throws IOException {
String name = "file";
try {
- dir.createOutput(name).close();
+ dir.createOutput(name, newIOContext(random)).close();
assertTrue(dir.fileExists(name));
assertTrue(Arrays.asList(dir.listAll()).contains(name));
} finally {
@@ -156,7 +156,7 @@
path.mkdirs();
new File(path, "subdir").mkdirs();
Directory fsDir = new SimpleFSDirectory(path, null);
- assertEquals(0, new RAMDirectory(fsDir).listAll().length);
+ assertEquals(0, new RAMDirectory(fsDir, newIOContext(random)).listAll().length);
} finally {
_TestUtil.rmDir(path);
}
@@ -167,7 +167,7 @@
File path = _TestUtil.getTempDir("testnotdir");
Directory fsDir = new SimpleFSDirectory(path, null);
try {
- IndexOutput out = fsDir.createOutput("afile");
+ IndexOutput out = fsDir.createOutput("afile", newIOContext(random));
out.close();
assertTrue(fsDir.fileExists("afile"));
try {
diff --git a/lucene/src/test/org/apache/lucene/store/TestMultiMMap.java b/lucene/src/test/org/apache/lucene/store/TestMultiMMap.java
index 7669bb1..ad1739a 100644
--- a/lucene/src/test/org/apache/lucene/store/TestMultiMMap.java
+++ b/lucene/src/test/org/apache/lucene/store/TestMultiMMap.java
@@ -51,9 +51,9 @@
for (int i = 0; i < 31; i++) {
MMapDirectory mmapDir = new MMapDirectory(_TestUtil.getTempDir("testSeekZero"));
mmapDir.setMaxChunkSize(1<<i);
- IndexOutput io = mmapDir.createOutput("zeroBytes");
+ IndexOutput io = mmapDir.createOutput("zeroBytes", newIOContext(random));
io.close();
- IndexInput ii = mmapDir.openInput("zeroBytes");
+ IndexInput ii = mmapDir.openInput("zeroBytes", newIOContext(random));
ii.seek(0L);
ii.close();
mmapDir.close();
@@ -64,12 +64,12 @@
for (int i = 0; i < 17; i++) {
MMapDirectory mmapDir = new MMapDirectory(_TestUtil.getTempDir("testSeekEnd"));
mmapDir.setMaxChunkSize(1<<i);
- IndexOutput io = mmapDir.createOutput("bytes");
+ IndexOutput io = mmapDir.createOutput("bytes", newIOContext(random));
byte bytes[] = new byte[1<<i];
random.nextBytes(bytes);
io.writeBytes(bytes, bytes.length);
io.close();
- IndexInput ii = mmapDir.openInput("bytes");
+ IndexInput ii = mmapDir.openInput("bytes", newIOContext(random));
byte actual[] = new byte[1<<i];
ii.readBytes(actual, 0, actual.length);
assertEquals(new BytesRef(bytes), new BytesRef(actual));
@@ -83,12 +83,12 @@
for (int i = 0; i < 10; i++) {
MMapDirectory mmapDir = new MMapDirectory(_TestUtil.getTempDir("testSeeking"));
mmapDir.setMaxChunkSize(1<<i);
- IndexOutput io = mmapDir.createOutput("bytes");
+ IndexOutput io = mmapDir.createOutput("bytes", newIOContext(random));
byte bytes[] = new byte[1<<(i+1)]; // make sure we switch buffers
random.nextBytes(bytes);
io.writeBytes(bytes, bytes.length);
io.close();
- IndexInput ii = mmapDir.openInput("bytes");
+ IndexInput ii = mmapDir.openInput("bytes", newIOContext(random));
byte actual[] = new byte[1<<(i+1)]; // first read all bytes
ii.readBytes(actual, 0, actual.length);
assertEquals(new BytesRef(bytes), new BytesRef(actual));
diff --git a/lucene/src/test/org/apache/lucene/store/TestRAMDirectory.java b/lucene/src/test/org/apache/lucene/store/TestRAMDirectory.java
index 592481d..dadb66a 100644
--- a/lucene/src/test/org/apache/lucene/store/TestRAMDirectory.java
+++ b/lucene/src/test/org/apache/lucene/store/TestRAMDirectory.java
@@ -70,7 +70,7 @@
public void testRAMDirectory () throws IOException {
Directory dir = newFSDirectory(indexDir);
- MockDirectoryWrapper ramDir = new MockDirectoryWrapper(random, new RAMDirectory(dir));
+ MockDirectoryWrapper ramDir = new MockDirectoryWrapper(random, new RAMDirectory(dir, newIOContext(random)));
// close the underlaying directory
dir.close();
@@ -102,7 +102,7 @@
public void testRAMDirectorySize() throws IOException, InterruptedException {
Directory dir = newFSDirectory(indexDir);
- final MockDirectoryWrapper ramDir = new MockDirectoryWrapper(random, new RAMDirectory(dir));
+ final MockDirectoryWrapper ramDir = new MockDirectoryWrapper(random, new RAMDirectory(dir, newIOContext(random)));
dir.close();
final IndexWriter writer = new IndexWriter(ramDir, new IndexWriterConfig(
@@ -152,11 +152,11 @@
// LUCENE-1196
public void testIllegalEOF() throws Exception {
RAMDirectory dir = new RAMDirectory();
- IndexOutput o = dir.createOutput("out");
+ IndexOutput o = dir.createOutput("out", newIOContext(random));
byte[] b = new byte[1024];
o.writeBytes(b, 0, 1024);
o.close();
- IndexInput i = dir.openInput("out");
+ IndexInput i = dir.openInput("out", newIOContext(random));
i.seek(1024);
i.close();
dir.close();
@@ -174,12 +174,12 @@
public void testSeekToEOFThenBack() throws Exception {
RAMDirectory dir = new RAMDirectory();
- IndexOutput o = dir.createOutput("out");
+ IndexOutput o = dir.createOutput("out", newIOContext(random));
byte[] bytes = new byte[3*RAMInputStream.BUFFER_SIZE];
o.writeBytes(bytes, 0, bytes.length);
o.close();
- IndexInput i = dir.openInput("out");
+ IndexInput i = dir.openInput("out", newIOContext(random));
i.seek(2*RAMInputStream.BUFFER_SIZE-1);
i.seek(3*RAMInputStream.BUFFER_SIZE);
i.seek(RAMInputStream.BUFFER_SIZE);
diff --git a/lucene/src/test/org/apache/lucene/util/TestBitVector.java b/lucene/src/test/org/apache/lucene/util/TestBitVector.java
index 45d473b..1944cc0 100644
--- a/lucene/src/test/org/apache/lucene/util/TestBitVector.java
+++ b/lucene/src/test/org/apache/lucene/util/TestBitVector.java
@@ -19,6 +19,7 @@
import java.io.IOException;
+import org.apache.lucene.store.IOContext;
import org.apache.lucene.store.MockDirectoryWrapper;
import org.apache.lucene.store.RAMDirectory;
@@ -146,8 +147,8 @@
bv.set(i);
assertTrue(bv.get(i));
assertEquals(i+1,bv.count());
- bv.write(d, "TESTBV");
- BitVector compare = new BitVector(d, "TESTBV");
+ bv.write(d, "TESTBV", newIOContext(random));
+ BitVector compare = new BitVector(d, "TESTBV", newIOContext(random));
// compare bit vectors with bits set incrementally
assertTrue(doCompare(bv,compare));
}
@@ -175,24 +176,24 @@
bv.clear(i);
assertEquals(i+1,size-bv.count());
}
- bv.write(d, "TESTBV");
+ bv.write(d, "TESTBV", newIOContext(random));
// gradually increase number of set bits
for (int i=count1; i<count2; i++) {
- BitVector bv2 = new BitVector(d, "TESTBV");
+ BitVector bv2 = new BitVector(d, "TESTBV", newIOContext(random));
assertTrue(doCompare(bv,bv2));
bv = bv2;
bv.clear(i);
assertEquals(i+1,size-bv.count());
- bv.write(d, "TESTBV");
+ bv.write(d, "TESTBV", newIOContext(random));
}
// now start decreasing number of set bits
for (int i=count2-1; i>=count1; i--) {
- BitVector bv2 = new BitVector(d, "TESTBV");
+ BitVector bv2 = new BitVector(d, "TESTBV", newIOContext(random));
assertTrue(doCompare(bv,bv2));
bv = bv2;
bv.set(i);
assertEquals(i,size-bv.count());
- bv.write(d, "TESTBV");
+ bv.write(d, "TESTBV", newIOContext(random));
}
}
/**
diff --git a/lucene/src/test/org/apache/lucene/util/TestByteBlockPool.java b/lucene/src/test/org/apache/lucene/util/TestByteBlockPool.java
index ef12523..92ff5b7 100644
--- a/lucene/src/test/org/apache/lucene/util/TestByteBlockPool.java
+++ b/lucene/src/test/org/apache/lucene/util/TestByteBlockPool.java
@@ -41,11 +41,11 @@
pool.copy(ref);
}
RAMDirectory dir = new RAMDirectory();
- IndexOutput stream = dir.createOutput("foo.txt");
+ IndexOutput stream = dir.createOutput("foo.txt", newIOContext(random));
pool.writePool(stream);
stream.flush();
stream.close();
- IndexInput input = dir.openInput("foo.txt");
+ IndexInput input = dir.openInput("foo.txt", newIOContext(random));
assertEquals(pool.byteOffset + pool.byteUpto, stream.length());
BytesRef expected = new BytesRef();
BytesRef actual = new BytesRef();
diff --git a/lucene/src/test/org/apache/lucene/util/fst/TestFSTs.java b/lucene/src/test/org/apache/lucene/util/fst/TestFSTs.java
index bb9778b..e298a5d 100644
--- a/lucene/src/test/org/apache/lucene/util/fst/TestFSTs.java
+++ b/lucene/src/test/org/apache/lucene/util/fst/TestFSTs.java
@@ -29,6 +29,7 @@
import org.apache.lucene.analysis.MockAnalyzer;
import org.apache.lucene.document.Document;
+import org.apache.lucene.store.IOContext;
import org.apache.lucene.index.IndexReader;
import org.apache.lucene.index.IndexWriter;
import org.apache.lucene.index.IndexWriterConfig;
@@ -467,11 +468,13 @@
}
FST<T> fst = builder.finish();
- if (random.nextBoolean() && fst != null) {
- IndexOutput out = dir.createOutput("fst.bin");
+ if (random.nextBoolean() && fst != null) {
+ TestFSTs t = new TestFSTs();
+ IOContext context = t.newIOContext(random);
+ IndexOutput out = dir.createOutput("fst.bin", context);
fst.save(out);
out.close();
- IndexInput in = dir.openInput("fst.bin");
+ IndexInput in = dir.openInput("fst.bin", context);
try {
fst = new FST<T>(in, outputs);
} finally {
@@ -1192,7 +1195,7 @@
}
Directory dir = FSDirectory.open(new File(dirOut));
- IndexOutput out = dir.createOutput("fst.bin");
+ IndexOutput out = dir.createOutput("fst.bin", IOContext.DEFAULT);
fst.save(out);
out.close();
@@ -1521,11 +1524,11 @@
// Make sure it still works after save/load:
Directory dir = newDirectory();
- IndexOutput out = dir.createOutput("fst");
+ IndexOutput out = dir.createOutput("fst", IOContext.DEFAULT);
fst.save(out);
out.close();
- IndexInput in = dir.openInput("fst");
+ IndexInput in = dir.openInput("fst", IOContext.DEFAULT);
final FST<Long> fst2 = new FST<Long>(in, outputs);
checkStopNodes(fst2, outputs);
in.close();
diff --git a/lucene/src/test/org/apache/lucene/util/packed/TestPackedInts.java b/lucene/src/test/org/apache/lucene/util/packed/TestPackedInts.java
index 6fdc6c8..107bf02 100644
--- a/lucene/src/test/org/apache/lucene/util/packed/TestPackedInts.java
+++ b/lucene/src/test/org/apache/lucene/util/packed/TestPackedInts.java
@@ -54,7 +54,7 @@
final int valueCount = 100+random.nextInt(500);
final Directory d = newDirectory();
- IndexOutput out = d.createOutput("out.bin");
+ IndexOutput out = d.createOutput("out.bin", newIOContext(random));
PackedInts.Writer w = PackedInts.getWriter(
out, valueCount, nbits);
@@ -71,7 +71,7 @@
final long fp = out.getFilePointer();
out.close();
{// test reader
- IndexInput in = d.openInput("out.bin");
+ IndexInput in = d.openInput("out.bin", newIOContext(random));
PackedInts.Reader r = PackedInts.getReader(in);
assertEquals(fp, in.getFilePointer());
for(int i=0;i<valueCount;i++) {
@@ -82,7 +82,7 @@
in.close();
}
{ // test reader iterator next
- IndexInput in = d.openInput("out.bin");
+ IndexInput in = d.openInput("out.bin", newIOContext(random));
PackedInts.ReaderIterator r = PackedInts.getReaderIterator(in);
for(int i=0;i<valueCount;i++) {
assertEquals("index=" + i + " ceil=" + ceil + " valueCount="
@@ -93,7 +93,7 @@
in.close();
}
{ // test reader iterator next vs. advance
- IndexInput in = d.openInput("out.bin");
+ IndexInput in = d.openInput("out.bin", newIOContext(random));
PackedInts.ReaderIterator intsEnum = PackedInts.getReaderIterator(in);
for (int i = 0; i < valueCount; i +=
1 + ((valueCount - i) <= 20 ? random.nextInt(valueCount - i)
@@ -229,14 +229,14 @@
public void testSingleValue() throws Exception {
Directory dir = newDirectory();
- IndexOutput out = dir.createOutput("out");
+ IndexOutput out = dir.createOutput("out", newIOContext(random));
PackedInts.Writer w = PackedInts.getWriter(out, 1, 8);
w.add(17);
w.finish();
final long end = out.getFilePointer();
out.close();
- IndexInput in = dir.openInput("out");
+ IndexInput in = dir.openInput("out", newIOContext(random));
PackedInts.getReader(in);
assertEquals(end, in.getFilePointer());
in.close();
diff --git a/modules/facet/src/test/org/apache/lucene/util/SlowRAMDirectory.java b/modules/facet/src/test/org/apache/lucene/util/SlowRAMDirectory.java
index 11e51b6..33b0010 100644
--- a/modules/facet/src/test/org/apache/lucene/util/SlowRAMDirectory.java
+++ b/modules/facet/src/test/org/apache/lucene/util/SlowRAMDirectory.java
@@ -3,6 +3,7 @@
import java.io.IOException;
import java.util.Random;
+import org.apache.lucene.store.IOContext;
import org.apache.lucene.store.IndexInput;
import org.apache.lucene.store.IndexOutput;
import org.apache.lucene.store.RAMDirectory;
@@ -46,28 +47,20 @@
}
@Override
- public IndexOutput createOutput(String name) throws IOException {
+ public IndexOutput createOutput(String name, IOContext context) throws IOException {
if (sleepMillis != -1) {
- return new SlowIndexOutput(super.createOutput(name));
+ return new SlowIndexOutput(super.createOutput(name, context));
}
- return super.createOutput(name);
+ return super.createOutput(name, context);
}
@Override
- public IndexInput openInput(String name) throws IOException {
+ public IndexInput openInput(String name, IOContext context) throws IOException {
if (sleepMillis != -1) {
- return new SlowIndexInput(super.openInput(name));
+ return new SlowIndexInput(super.openInput(name, context));
}
- return super.openInput(name);
- }
-
- @Override
- public IndexInput openInput(String name, int bufferSize) throws IOException {
- if (sleepMillis != -1) {
- return new SlowIndexInput(super.openInput(name, bufferSize));
- }
- return super.openInput(name, bufferSize);
+ return super.openInput(name, context);
}
void doSleep(int length) {
diff --git a/solr/src/java/org/apache/solr/core/RefCntRamDirectory.java b/solr/src/java/org/apache/solr/core/RefCntRamDirectory.java
index 29e5f65..fe48e7e 100644
--- a/solr/src/java/org/apache/solr/core/RefCntRamDirectory.java
+++ b/solr/src/java/org/apache/solr/core/RefCntRamDirectory.java
@@ -21,6 +21,7 @@
import java.util.concurrent.atomic.AtomicInteger;
import org.apache.lucene.store.Directory;
+import org.apache.lucene.store.IOContext;
import org.apache.lucene.store.RAMDirectory;
public class RefCntRamDirectory extends RAMDirectory {
@@ -35,7 +36,7 @@
public RefCntRamDirectory(Directory dir) throws IOException {
this();
for (String file : dir.listAll()) {
- dir.copy(this, file, file);
+ dir.copy(this, file, file, IOContext.DEFAULT);
}
}