blob: f9e6391781a01f41d167b1f7ec8d0fd5d091af6e [file] [log] [blame]
Index: src/test/org/apache/lucene/index/TestDocumentWriter.java
===================================================================
--- src/test/org/apache/lucene/index/TestDocumentWriter.java (revision 792244)
+++ src/test/org/apache/lucene/index/TestDocumentWriter.java (working copy)
@@ -97,8 +97,8 @@
// test that the norms are not present in the segment if
// omitNorms is true
- for (int i = 0; i < reader.fieldInfos.size(); i++) {
- FieldInfo fi = reader.fieldInfos.fieldInfo(i);
+ for (int i = 0; i < reader.core.fieldInfos.size(); i++) {
+ FieldInfo fi = reader.core.fieldInfos.fieldInfo(i);
if (fi.isIndexed) {
assertTrue(fi.omitNorms == !reader.hasNorms(fi.name));
}
Index: src/test/org/apache/lucene/index/TestIndexWriterReader.java
===================================================================
--- src/test/org/apache/lucene/index/TestIndexWriterReader.java (revision 792244)
+++ src/test/org/apache/lucene/index/TestIndexWriterReader.java (working copy)
@@ -42,7 +42,7 @@
public class TestIndexWriterReader extends LuceneTestCase {
static PrintStream infoStream;
- private static class HeavyAtomicInt {
+ public static class HeavyAtomicInt {
private int value;
public HeavyAtomicInt(int start) {
value = start;
Index: src/test/org/apache/lucene/index/TestNRTReaderWithThreads.java
===================================================================
--- src/test/org/apache/lucene/index/TestNRTReaderWithThreads.java (revision 0)
+++ src/test/org/apache/lucene/index/TestNRTReaderWithThreads.java (revision 0)
@@ -0,0 +1,111 @@
+package org.apache.lucene.index;
+
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+import java.util.Random;
+
+import org.apache.lucene.analysis.WhitespaceAnalyzer;
+import org.apache.lucene.document.Document;
+import org.apache.lucene.index.TestIndexWriterReader.HeavyAtomicInt;
+import org.apache.lucene.store.Directory;
+import org.apache.lucene.store.MockRAMDirectory;
+import org.apache.lucene.util.LuceneTestCase;
+
+public class TestNRTReaderWithThreads extends LuceneTestCase {
+ Random random = new Random();
+ HeavyAtomicInt seq = new HeavyAtomicInt(1);
+
+ public void testIndexing() throws Exception {
+ Directory mainDir = new MockRAMDirectory();
+ IndexWriter writer = new IndexWriter(mainDir, new WhitespaceAnalyzer(),
+ IndexWriter.MaxFieldLength.LIMITED);
+ writer.setUseCompoundFile(false);
+ IndexReader reader = writer.getReader(); // start pooling readers
+ reader.close();
+ writer.setMergeFactor(2);
+ writer.setMaxBufferedDocs(10);
+ RunThread[] indexThreads = new RunThread[4];
+ for (int x=0; x < indexThreads.length; x++) {
+ indexThreads[x] = new RunThread(x % 2, writer);
+ indexThreads[x].setName("Thread " + x);
+ indexThreads[x].start();
+ }
+ long startTime = System.currentTimeMillis();
+ long duration = 5*1000;
+ while ((System.currentTimeMillis() - startTime) < duration) {
+ Thread.sleep(100);
+ }
+ int delCount = 0;
+ int addCount = 0;
+ for (int x=0; x < indexThreads.length; x++) {
+ indexThreads[x].run = false;
+ assertTrue(indexThreads[x].ex == null);
+ addCount += indexThreads[x].addCount;
+ delCount += indexThreads[x].delCount;
+ }
+ for (int x=0; x < indexThreads.length; x++) {
+ indexThreads[x].join();
+ }
+ //System.out.println("addCount:"+addCount);
+ //System.out.println("delCount:"+delCount);
+ writer.close();
+ mainDir.close();
+ }
+
+ public class RunThread extends Thread {
+ IndexWriter writer;
+ boolean run = true;
+ Throwable ex;
+ int delCount = 0;
+ int addCount = 0;
+ int type;
+
+ public RunThread(int type, IndexWriter writer) {
+ this.type = type;
+ this.writer = writer;
+ }
+
+ public void run() {
+ try {
+ while (run) {
+ //int n = random.nextInt(2);
+ if (type == 0) {
+ int i = seq.addAndGet(1);
+ Document doc = TestIndexWriterReader.createDocument(i, "index1", 10);
+ writer.addDocument(doc);
+ addCount++;
+ } else if (type == 1) {
+ // we may or may not delete because the term may not exist,
+ // however we're opening and closing the reader rapidly
+ IndexReader reader = writer.getReader();
+ int id = random.nextInt(seq.intValue());
+ Term term = new Term("id", Integer.toString(id));
+ int count = TestIndexWriterReader.count(term, reader);
+ writer.deleteDocuments(term);
+ reader.close();
+ delCount += count;
+ }
+ }
+ } catch (Throwable ex) {
+ ex.printStackTrace(System.out);
+ this.ex = ex;
+ run = false;
+ }
+ }
+ }
+}
Property changes on: src/test/org/apache/lucene/index/TestNRTReaderWithThreads.java
___________________________________________________________________
Added: svn:eol-style
+ native
Index: src/test/org/apache/lucene/index/TestLazyProxSkipping.java
===================================================================
--- src/test/org/apache/lucene/index/TestLazyProxSkipping.java (revision 792244)
+++ src/test/org/apache/lucene/index/TestLazyProxSkipping.java (working copy)
@@ -43,12 +43,24 @@
private String term1 = "xx";
private String term2 = "yy";
private String term3 = "zz";
+
+ private class SeekCountingDirectory extends RAMDirectory {
+ public IndexInput openInput(String name) throws IOException {
+ IndexInput ii = super.openInput(name);
+ if (name.endsWith(".prx")) {
+ // we decorate the proxStream with a wrapper class that allows to count the number of calls of seek()
+ ii = new SeeksCountingStream(ii);
+ }
+ return ii;
+ }
+ }
private void createIndex(int numHits) throws IOException {
int numDocs = 500;
- Directory directory = new RAMDirectory();
+ Directory directory = new SeekCountingDirectory();
IndexWriter writer = new IndexWriter(directory, new WhitespaceAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED);
+ writer.setUseCompoundFile(false);
writer.setMaxBufferedDocs(10);
for (int i = 0; i < numDocs; i++) {
Document doc = new Document();
@@ -74,9 +86,6 @@
SegmentReader reader = SegmentReader.getOnlySegmentReader(directory);
- // we decorate the proxStream with a wrapper class that allows to count the number of calls of seek()
- reader.proxStream = new SeeksCountingStream(reader.proxStream);
-
this.searcher = new IndexSearcher(reader);
}
@@ -96,6 +105,7 @@
assertEquals(numHits, hits.length);
// check if the number of calls of seek() does not exceed the number of hits
+ assertTrue(this.seeksCounter > 0);
assertTrue(this.seeksCounter <= numHits + 1);
}
Index: src/java/org/apache/lucene/index/SegmentTermPositions.java
===================================================================
--- src/java/org/apache/lucene/index/SegmentTermPositions.java (revision 792244)
+++ src/java/org/apache/lucene/index/SegmentTermPositions.java (working copy)
@@ -146,7 +146,7 @@
private void lazySkip() throws IOException {
if (proxStream == null) {
// clone lazily
- proxStream = (IndexInput)parent.proxStream.clone();
+ proxStream = (IndexInput) parent.core.proxStream.clone();
}
// we might have to skip the current payload
Index: src/java/org/apache/lucene/index/SegmentReader.java
===================================================================
--- src/java/org/apache/lucene/index/SegmentReader.java (revision 792244)
+++ src/java/org/apache/lucene/index/SegmentReader.java (working copy)
@@ -40,18 +40,12 @@
/** @version $Id */
class SegmentReader extends IndexReader implements Cloneable {
- protected Directory directory;
protected boolean readOnly;
- private String segment;
private SegmentInfo si;
private int readBufferSize;
- FieldInfos fieldInfos;
- private FieldsReader fieldsReaderOrig = null;
CloseableThreadLocal fieldsReaderLocal = new FieldsReaderLocal();
- TermInfosReader tis;
- TermVectorsReader termVectorsReaderOrig = null;
CloseableThreadLocal termVectorsLocal = new CloseableThreadLocal();
BitVector deletedDocs = null;
@@ -64,31 +58,197 @@
private boolean rollbackDeletedDocsDirty = false;
private boolean rollbackNormsDirty = false;
private int rollbackPendingDeleteCount;
- IndexInput freqStream;
- IndexInput proxStream;
// optionally used for the .nrm file shared by multiple norms
private IndexInput singleNormStream;
private Ref singleNormRef;
- // Counts how many other reader share the core objects
- // (freqStream, proxStream, tis, etc.) of this reader;
- // when coreRef drops to 0, these core objects may be
- // closed. A given insance of SegmentReader may be
- // closed, even those it shares core objects with other
- // SegmentReaders:
- private Ref coreRef = new Ref();
+ CoreReaders core;
- // Compound File Reader when based on a compound file segment
- CompoundFileReader cfsReader = null;
- CompoundFileReader storeCFSReader = null;
+ // Holds core readers that are shared (unchanged) when
+ // SegmentReader is cloned or reopened
+ static final class CoreReaders {
+
+ // Counts how many other reader share the core objects
+ // (freqStream, proxStream, tis, etc.) of this reader;
+ // when coreRef drops to 0, these core objects may be
+ // closed. A given insance of SegmentReader may be
+ // closed, even those it shares core objects with other
+ // SegmentReaders:
+ private final Ref ref = new Ref();
+
+ final String segment;
+ final FieldInfos fieldInfos;
+ final IndexInput freqStream;
+ final IndexInput proxStream;
+
+ final Directory dir;
+ final Directory cfsDir;
+ final int readBufferSize;
+
+ TermInfosReader tis;
+ FieldsReader fieldsReaderOrig;
+ TermVectorsReader termVectorsReaderOrig;
+ CompoundFileReader cfsReader;
+ CompoundFileReader storeCFSReader;
+
+ CoreReaders(Directory dir, SegmentInfo si, int readBufferSize) throws IOException {
+ segment = si.name;
+ this.readBufferSize = readBufferSize;
+ this.dir = dir;
+
+ boolean success = false;
+
+ try {
+ Directory dir0 = dir;
+ if (si.getUseCompoundFile()) {
+ cfsReader = new CompoundFileReader(dir, segment + "." + IndexFileNames.COMPOUND_FILE_EXTENSION, readBufferSize);
+ dir0 = cfsReader;
+ }
+ cfsDir = dir0;
+
+ fieldInfos = new FieldInfos(cfsDir, segment + "." + IndexFileNames.FIELD_INFOS_EXTENSION);
+
+ tis = new TermInfosReader(cfsDir, segment, fieldInfos, readBufferSize);
+
+ boolean anyProx = false;
+ final int numFields = fieldInfos.size();
+ for(int i=0;!anyProx && i<numFields;i++) {
+ if (!fieldInfos.fieldInfo(i).omitTermFreqAndPositions) {
+ anyProx = true;
+ }
+ }
+
+ // make sure that all index files have been read or are kept open
+ // so that if an index update removes them we'll still have them
+ freqStream = cfsDir.openInput(segment + "." + IndexFileNames.FREQ_EXTENSION, readBufferSize);
+
+ if (anyProx) {
+ proxStream = cfsDir.openInput(segment + "." + IndexFileNames.PROX_EXTENSION, readBufferSize);
+ } else {
+ proxStream = null;
+ }
+ success = true;
+ } finally {
+ if (!success) {
+ decRef();
+ }
+ }
+ }
+
+ synchronized TermVectorsReader getTermVectorsReaderOrig() {
+ return termVectorsReaderOrig;
+ }
+
+ synchronized FieldsReader getFieldsReaderOrig() {
+ return fieldsReaderOrig;
+ }
+
+ synchronized void incRef() {
+ ref.incRef();
+ }
+
+ synchronized Directory getCFSReader() {
+ return cfsReader;
+ }
+
+ synchronized void decRef() throws IOException {
+
+ if (ref.decRef() == 0) {
+
+ // close everything, nothing is shared anymore with other readers
+ if (tis != null) {
+ tis.close();
+ // null so if an app hangs on to us we still free most ram
+ tis = null;
+ }
+
+ if (freqStream != null) {
+ freqStream.close();
+ }
+
+ if (proxStream != null) {
+ proxStream.close();
+ }
+
+ if (termVectorsReaderOrig != null) {
+ termVectorsReaderOrig.close();
+ }
+ if (fieldsReaderOrig != null) {
+ fieldsReaderOrig.close();
+ }
+
+ if (cfsReader != null) {
+ cfsReader.close();
+ }
+
+ if (storeCFSReader != null) {
+ storeCFSReader.close();
+ }
+ }
+ }
+
+ synchronized void openDocStores(SegmentInfo si) throws IOException {
+
+ assert si.name.equals(segment);
+
+ if (fieldsReaderOrig == null) {
+ final Directory storeDir;
+ if (si.getDocStoreOffset() != -1) {
+ if (si.getDocStoreIsCompoundFile()) {
+ assert storeCFSReader == null;
+ storeCFSReader = new CompoundFileReader(dir,
+ si.getDocStoreSegment() + "." + IndexFileNames.COMPOUND_FILE_STORE_EXTENSION,
+ readBufferSize);
+ storeDir = storeCFSReader;
+ assert storeDir != null;
+ } else {
+ storeDir = dir;
+ assert storeDir != null;
+ }
+ } else if (si.getUseCompoundFile()) {
+ // In some cases, we were originally opened when CFS
+ // was not used, but then we are asked to open doc
+ // stores after the segment has switched to CFS
+ if (cfsReader == null) {
+ cfsReader = new CompoundFileReader(dir, segment + "." + IndexFileNames.COMPOUND_FILE_EXTENSION, readBufferSize);
+ }
+ storeDir = cfsReader;
+ assert storeDir != null;
+ } else {
+ storeDir = dir;
+ assert storeDir != null;
+ }
+
+ final String storesSegment;
+ if (si.getDocStoreOffset() != -1) {
+ storesSegment = si.getDocStoreSegment();
+ } else {
+ storesSegment = segment;
+ }
+
+ fieldsReaderOrig = new FieldsReader(storeDir, storesSegment, fieldInfos, readBufferSize,
+ si.getDocStoreOffset(), si.docCount);
+
+ // Verify two sources of "maxDoc" agree:
+ if (si.getDocStoreOffset() == -1 && fieldsReaderOrig.size() != si.docCount) {
+ throw new CorruptIndexException("doc counts differ for segment " + segment + ": fieldsReader shows " + fieldsReaderOrig.size() + " but segmentInfo shows " + si.docCount);
+ }
+
+ if (fieldInfos.hasVectors()) { // open term vector files only as needed
+ termVectorsReaderOrig = new TermVectorsReader(storeDir, storesSegment, fieldInfos, readBufferSize, si.getDocStoreOffset(), si.docCount);
+ }
+ }
+ }
+ }
+
/**
* Sets the initial value
*/
private class FieldsReaderLocal extends CloseableThreadLocal {
protected Object initialValue() {
- return (FieldsReader) fieldsReaderOrig.clone();
+ return core.getFieldsReaderOrig().clone();
}
}
@@ -421,45 +581,19 @@
} catch (Exception e) {
throw new RuntimeException("cannot load SegmentReader class: " + e, e);
}
- instance.directory = dir;
instance.readOnly = readOnly;
- instance.segment = si.name;
instance.si = si;
instance.readBufferSize = readBufferSize;
boolean success = false;
try {
- // Use compound file directory for some files, if it exists
- Directory cfsDir = instance.directory();
- if (si.getUseCompoundFile()) {
- instance.cfsReader = new CompoundFileReader(instance.directory(), instance.segment + "." + IndexFileNames.COMPOUND_FILE_EXTENSION, readBufferSize);
- cfsDir = instance.cfsReader;
- }
-
- instance.fieldInfos = new FieldInfos(cfsDir, instance.segment + ".fnm");
-
+ instance.core = new CoreReaders(dir, si, readBufferSize);
if (doOpenStores) {
- instance.openDocStores();
+ instance.core.openDocStores(si);
}
-
- boolean anyProx = false;
- final int numFields = instance.fieldInfos.size();
- for(int i=0;!anyProx && i<numFields;i++)
- if (!instance.fieldInfos.fieldInfo(i).omitTermFreqAndPositions)
- anyProx = true;
-
- instance.tis = new TermInfosReader(cfsDir, instance.segment, instance.fieldInfos, readBufferSize);
-
instance.loadDeletedDocs();
-
- // make sure that all index files have been read or are kept open
- // so that if an index update removes them we'll still have them
- instance.freqStream = cfsDir.openInput(instance.segment + ".frq", readBufferSize);
- if (anyProx)
- instance.proxStream = cfsDir.openInput(instance.segment + ".prx", readBufferSize);
- instance.openNorms(cfsDir, readBufferSize);
-
+ instance.openNorms(instance.core.cfsDir, readBufferSize);
success = true;
} finally {
@@ -475,66 +609,10 @@
return instance;
}
- synchronized void openDocStores(SegmentReader orig) throws IOException {
- if (fieldsReaderOrig == null) {
- orig.openDocStores();
-
- fieldsReaderOrig = orig.fieldsReaderOrig;
- termVectorsReaderOrig = orig.termVectorsReaderOrig;
- storeCFSReader = orig.storeCFSReader;
- cfsReader = orig.cfsReader;
- }
+ void openDocStores() throws IOException {
+ core.openDocStores(si);
}
- synchronized void openDocStores() throws IOException {
- if (fieldsReaderOrig == null) {
- final Directory storeDir;
- if (si.getDocStoreOffset() != -1) {
- if (si.getDocStoreIsCompoundFile()) {
- storeCFSReader = new CompoundFileReader(directory(),
- si.getDocStoreSegment() + "." + IndexFileNames.COMPOUND_FILE_STORE_EXTENSION,
- readBufferSize);
- storeDir = storeCFSReader;
- assert storeDir != null;
- } else {
- storeDir = directory();
- assert storeDir != null;
- }
- } else if (si.getUseCompoundFile()) {
- // In some cases, we were originally opened when CFS
- // was not used, but then we are asked to open doc
- // stores after the segment has switched to CFS
- if (cfsReader == null) {
- cfsReader = new CompoundFileReader(directory(), segment + "." + IndexFileNames.COMPOUND_FILE_EXTENSION, readBufferSize);
- }
- storeDir = cfsReader;
- assert storeDir != null;
- } else {
- storeDir = directory();
- assert storeDir != null;
- }
-
- final String storesSegment;
- if (si.getDocStoreOffset() != -1) {
- storesSegment = si.getDocStoreSegment();
- } else {
- storesSegment = segment;
- }
-
- fieldsReaderOrig = new FieldsReader(storeDir, storesSegment, fieldInfos, readBufferSize,
- si.getDocStoreOffset(), si.docCount);
-
- // Verify two sources of "maxDoc" agree:
- if (si.getDocStoreOffset() == -1 && fieldsReaderOrig.size() != si.docCount) {
- throw new CorruptIndexException("doc counts differ for segment " + si.name + ": fieldsReader shows " + fieldsReaderOrig.size() + " but segmentInfo shows " + si.docCount);
- }
-
- if (fieldInfos.hasVectors()) { // open term vector files only as needed
- termVectorsReaderOrig = new TermVectorsReader(storeDir, storesSegment, fieldInfos, readBufferSize, si.getDocStoreOffset(), si.docCount);
- }
- }
- }
-
private void loadDeletedDocs() throws IOException {
// NOTE: the bitvector is stored using the regular directory, not cfs
if (hasDeletions(si)) {
@@ -590,8 +668,8 @@
&& (!si.hasDeletions() || this.si.getDelFileName().equals(si.getDelFileName()));
boolean normsUpToDate = true;
- boolean[] fieldNormsChanged = new boolean[fieldInfos.size()];
- final int fieldCount = fieldInfos.size();
+ boolean[] fieldNormsChanged = new boolean[core.fieldInfos.size()];
+ final int fieldCount = core.fieldInfos.size();
for (int i = 0; i < fieldCount; i++) {
if (!this.si.getNormFileName(i).equals(si.getNormFileName(i))) {
normsUpToDate = false;
@@ -622,23 +700,14 @@
boolean success = false;
try {
- coreRef.incRef();
- clone.coreRef = coreRef;
+ core.incRef();
+ clone.core = core;
clone.readOnly = openReadOnly;
- clone.directory = directory;
+ //clone.directory = directory;
clone.si = si;
- clone.segment = segment;
+ //clone.segment = segment;
clone.readBufferSize = readBufferSize;
- clone.cfsReader = cfsReader;
- clone.storeCFSReader = storeCFSReader;
- clone.fieldInfos = fieldInfos;
- clone.tis = tis;
- clone.freqStream = freqStream;
- clone.proxStream = proxStream;
- clone.termVectorsReaderOrig = termVectorsReaderOrig;
- clone.fieldsReaderOrig = fieldsReaderOrig;
-
if (!openReadOnly && hasChanges) {
// My pending changes transfer to the new reader
clone.pendingDeleteCount = pendingDeleteCount;
@@ -674,16 +743,16 @@
// Clone unchanged norms to the cloned reader
if (doClone || !fieldNormsChanged[i]) {
- final String curField = fieldInfos.fieldInfo(i).name;
+ final String curField = core.fieldInfos.fieldInfo(i).name;
Norm norm = (Norm) this.norms.get(curField);
if (norm != null)
clone.norms.put(curField, norm.clone());
}
}
-
+
// If we are not cloning, then this will open anew
// any norms that have changed:
- clone.openNorms(si.getUseCompoundFile() ? cfsReader : directory(), readBufferSize);
+ clone.openNorms(si.getUseCompoundFile() ? core.getCFSReader() : directory(), readBufferSize);
success = true;
} finally {
@@ -720,7 +789,7 @@
}
if (normsDirty) { // re-write norms
- si.setNumFields(fieldInfos.size());
+ si.setNumFields(core.fieldInfos.size());
Iterator it = norms.values().iterator();
while (it.hasNext()) {
Norm norm = (Norm) it.next();
@@ -738,7 +807,7 @@
FieldsReader getFieldsReader() {
return (FieldsReader) fieldsReaderLocal.get();
}
-
+
protected void doClose() throws IOException {
termVectorsLocal.close();
fieldsReaderLocal.close();
@@ -753,32 +822,8 @@
while (it.hasNext()) {
((Norm) it.next()).decRef();
}
-
- if (coreRef.decRef() == 0) {
-
- // close everything, nothing is shared anymore with other readers
- if (tis != null) {
- tis.close();
- // null so if an app hangs on to us we still free most ram
- tis = null;
- }
-
- if (freqStream != null)
- freqStream.close();
- if (proxStream != null)
- proxStream.close();
-
- if (termVectorsReaderOrig != null)
- termVectorsReaderOrig.close();
-
- if (fieldsReaderOrig != null)
- fieldsReaderOrig.close();
-
- if (cfsReader != null)
- cfsReader.close();
-
- if (storeCFSReader != null)
- storeCFSReader.close();
+ if (core != null) {
+ core.decRef();
}
}
@@ -841,16 +886,16 @@
public TermEnum terms() {
ensureOpen();
- return tis.terms();
+ return core.tis.terms();
}
public TermEnum terms(Term t) throws IOException {
ensureOpen();
- return tis.terms(t);
+ return core.tis.terms(t);
}
- FieldInfos getFieldInfos() {
- return fieldInfos;
+ FieldInfos fieldInfos() {
+ return core.fieldInfos;
}
public Document document(int n, FieldSelector fieldSelector) throws CorruptIndexException, IOException {
@@ -882,7 +927,7 @@
public int docFreq(Term t) throws IOException {
ensureOpen();
- TermInfo ti = tis.get(t);
+ TermInfo ti = core.tis.get(t);
if (ti != null)
return ti.docFreq;
else
@@ -903,11 +948,11 @@
}
public void setTermInfosIndexDivisor(int indexDivisor) throws IllegalStateException {
- tis.setIndexDivisor(indexDivisor);
+ core.tis.setIndexDivisor(indexDivisor);
}
public int getTermInfosIndexDivisor() {
- return tis.getIndexDivisor();
+ return core.tis.getIndexDivisor();
}
/**
@@ -917,8 +962,8 @@
ensureOpen();
Set fieldSet = new HashSet();
- for (int i = 0; i < fieldInfos.size(); i++) {
- FieldInfo fi = fieldInfos.fieldInfo(i);
+ for (int i = 0; i < core.fieldInfos.size(); i++) {
+ FieldInfo fi = core.fieldInfos.fieldInfo(i);
if (fieldOption == IndexReader.FieldOption.ALL) {
fieldSet.add(fi.name);
}
@@ -1022,8 +1067,8 @@
private void openNorms(Directory cfsDir, int readBufferSize) throws IOException {
long nextNormSeek = SegmentMerger.NORMS_HEADER.length; //skip header (header unused for now)
int maxDoc = maxDoc();
- for (int i = 0; i < fieldInfos.size(); i++) {
- FieldInfo fi = fieldInfos.fieldInfo(i);
+ for (int i = 0; i < core.fieldInfos.size(); i++) {
+ FieldInfo fi = core.fieldInfos.fieldInfo(i);
if (norms.containsKey(fi.name)) {
// in case this SegmentReader is being re-opened, we might be able to
// reuse some norm instances and skip loading them here
@@ -1089,19 +1134,27 @@
* Create a clone from the initial TermVectorsReader and store it in the ThreadLocal.
* @return TermVectorsReader
*/
- private TermVectorsReader getTermVectorsReader() {
- assert termVectorsReaderOrig != null;
- TermVectorsReader tvReader = (TermVectorsReader)termVectorsLocal.get();
+ TermVectorsReader getTermVectorsReader() {
+ TermVectorsReader tvReader = (TermVectorsReader) termVectorsLocal.get();
if (tvReader == null) {
- try {
- tvReader = (TermVectorsReader)termVectorsReaderOrig.clone();
- } catch (CloneNotSupportedException cnse) {
+ TermVectorsReader orig = core.getTermVectorsReaderOrig();
+ if (orig == null) {
return null;
+ } else {
+ try {
+ tvReader = (TermVectorsReader) orig.clone();
+ } catch (CloneNotSupportedException cnse) {
+ return null;
+ }
}
termVectorsLocal.set(tvReader);
}
return tvReader;
}
+
+ TermVectorsReader getTermVectorsReaderOrig() {
+ return core.getTermVectorsReaderOrig();
+ }
/** Return a term frequency vector for the specified document and field. The
* vector returned contains term numbers and frequencies for all terms in
@@ -1112,8 +1165,8 @@
public TermFreqVector getTermFreqVector(int docNumber, String field) throws IOException {
// Check if this field is invalid or has no stored term vector
ensureOpen();
- FieldInfo fi = fieldInfos.fieldInfo(field);
- if (fi == null || !fi.storeTermVector || termVectorsReaderOrig == null)
+ FieldInfo fi = core.fieldInfos.fieldInfo(field);
+ if (fi == null || !fi.storeTermVector)
return null;
TermVectorsReader termVectorsReader = getTermVectorsReader();
@@ -1126,13 +1179,12 @@
public void getTermFreqVector(int docNumber, String field, TermVectorMapper mapper) throws IOException {
ensureOpen();
- FieldInfo fi = fieldInfos.fieldInfo(field);
- if (fi == null || !fi.storeTermVector || termVectorsReaderOrig == null)
+ FieldInfo fi = core.fieldInfos.fieldInfo(field);
+ if (fi == null || !fi.storeTermVector)
return;
TermVectorsReader termVectorsReader = getTermVectorsReader();
- if (termVectorsReader == null)
- {
+ if (termVectorsReader == null) {
return;
}
@@ -1143,8 +1195,6 @@
public void getTermFreqVector(int docNumber, TermVectorMapper mapper) throws IOException {
ensureOpen();
- if (termVectorsReaderOrig == null)
- return;
TermVectorsReader termVectorsReader = getTermVectorsReader();
if (termVectorsReader == null)
@@ -1162,8 +1212,6 @@
*/
public TermFreqVector[] getTermFreqVectors(int docNumber) throws IOException {
ensureOpen();
- if (termVectorsReaderOrig == null)
- return null;
TermVectorsReader termVectorsReader = getTermVectorsReader();
if (termVectorsReader == null)
@@ -1172,16 +1220,11 @@
return termVectorsReader.get(docNumber);
}
- /** Returns the field infos of this segment */
- FieldInfos fieldInfos() {
- return fieldInfos;
- }
-
/**
* Return the name of the segment this reader is reading.
*/
public String getSegmentName() {
- return segment;
+ return core.segment;
}
/**
@@ -1224,18 +1267,18 @@
// Don't ensureOpen here -- in certain cases, when a
// cloned/reopened reader needs to commit, it may call
// this method on the closed original reader
- return directory;
+ return core.dir;
}
// This is necessary so that cloned SegmentReaders (which
// share the underlying postings data) will map to the
// same entry in the FieldCache. See LUCENE-1579.
public final Object getFieldCacheKey() {
- return freqStream;
+ return core.freqStream;
}
public long getUniqueTermCount() {
- return tis.size();
+ return core.tis.size();
}
/**
@@ -1261,4 +1304,5 @@
throw new IllegalArgumentException(reader + " is not a SegmentReader or a single-segment DirectoryReader");
}
+
}
Index: src/java/org/apache/lucene/index/SegmentTermDocs.java
===================================================================
--- src/java/org/apache/lucene/index/SegmentTermDocs.java (revision 792244)
+++ src/java/org/apache/lucene/index/SegmentTermDocs.java (working copy)
@@ -45,16 +45,16 @@
protected SegmentTermDocs(SegmentReader parent) {
this.parent = parent;
- this.freqStream = (IndexInput) parent.freqStream.clone();
+ this.freqStream = (IndexInput) parent.core.freqStream.clone();
synchronized (parent) {
this.deletedDocs = parent.deletedDocs;
}
- this.skipInterval = parent.tis.getSkipInterval();
- this.maxSkipLevels = parent.tis.getMaxSkipLevels();
+ this.skipInterval = parent.core.tis.getSkipInterval();
+ this.maxSkipLevels = parent.core.tis.getMaxSkipLevels();
}
public void seek(Term term) throws IOException {
- TermInfo ti = parent.tis.get(term);
+ TermInfo ti = parent.core.tis.get(term);
seek(ti, term);
}
@@ -63,13 +63,13 @@
Term term;
// use comparison of fieldinfos to verify that termEnum belongs to the same segment as this SegmentTermDocs
- if (termEnum instanceof SegmentTermEnum && ((SegmentTermEnum) termEnum).fieldInfos == parent.fieldInfos) { // optimized case
+ if (termEnum instanceof SegmentTermEnum && ((SegmentTermEnum) termEnum).fieldInfos == parent.core.fieldInfos) { // optimized case
SegmentTermEnum segmentTermEnum = ((SegmentTermEnum) termEnum);
term = segmentTermEnum.term();
ti = segmentTermEnum.termInfo();
} else { // punt case
term = termEnum.term();
- ti = parent.tis.get(term);
+ ti = parent.core.tis.get(term);
}
seek(ti, term);
@@ -77,7 +77,7 @@
void seek(TermInfo ti, Term term) throws IOException {
count = 0;
- FieldInfo fi = parent.fieldInfos.fieldInfo(term.field);
+ FieldInfo fi = parent.core.fieldInfos.fieldInfo(term.field);
currentFieldOmitTermFreqAndPositions = (fi != null) ? fi.omitTermFreqAndPositions : false;
currentFieldStoresPayloads = (fi != null) ? fi.storePayloads : false;
if (ti == null) {
Index: src/java/org/apache/lucene/index/SegmentMerger.java
===================================================================
--- src/java/org/apache/lucene/index/SegmentMerger.java (revision 792244)
+++ src/java/org/apache/lucene/index/SegmentMerger.java (working copy)
@@ -253,7 +253,7 @@
if (reader instanceof SegmentReader) {
SegmentReader segmentReader = (SegmentReader) reader;
boolean same = true;
- FieldInfos segmentFieldInfos = segmentReader.getFieldInfos();
+ FieldInfos segmentFieldInfos = segmentReader.fieldInfos();
int numFieldInfos = segmentFieldInfos.size();
for (int j = 0; same && j < numFieldInfos; j++) {
same = fieldInfos.fieldName(j).equals(segmentFieldInfos.fieldName(j));
@@ -285,7 +285,7 @@
// with the fieldInfos of the last segment in this
// case, to keep that numbering.
final SegmentReader sr = (SegmentReader) readers.get(readers.size()-1);
- fieldInfos = (FieldInfos) sr.fieldInfos.clone();
+ fieldInfos = (FieldInfos) sr.core.fieldInfos.clone();
} else {
fieldInfos = new FieldInfos(); // merge field names
}
@@ -294,7 +294,7 @@
IndexReader reader = (IndexReader) iter.next();
if (reader instanceof SegmentReader) {
SegmentReader segmentReader = (SegmentReader) reader;
- FieldInfos readerFieldInfos = segmentReader.getFieldInfos();
+ FieldInfos readerFieldInfos = segmentReader.fieldInfos();
int numReaderFieldInfos = readerFieldInfos.size();
for (int j = 0; j < numReaderFieldInfos; j++) {
FieldInfo fi = readerFieldInfos.fieldInfo(j);
@@ -468,7 +468,7 @@
final SegmentReader matchingSegmentReader = matchingSegmentReaders[idx++];
TermVectorsReader matchingVectorsReader = null;
if (matchingSegmentReader != null) {
- TermVectorsReader vectorsReader = matchingSegmentReader.termVectorsReaderOrig;
+ TermVectorsReader vectorsReader = matchingSegmentReader.getTermVectorsReaderOrig();
// If the TV* files are an older format then they cannot read raw docs:
if (vectorsReader != null && vectorsReader.canReadRawDocs()) {
Index: src/java/org/apache/lucene/index/IndexWriter.java
===================================================================
--- src/java/org/apache/lucene/index/IndexWriter.java (revision 792244)
+++ src/java/org/apache/lucene/index/IndexWriter.java (working copy)
@@ -4891,7 +4891,7 @@
}
for(int i=0;i<numSegments;i++) {
- merge.readersClone[i].openDocStores(merge.readers[i]);
+ merge.readersClone[i].openDocStores();
}
// Clear DSS
Index: tags/lucene_2_4_back_compat_tests_20090704/src/test/org/apache/lucene/index/TestDocumentWriter.java
===================================================================
--- tags/lucene_2_4_back_compat_tests_20090704/src/test/org/apache/lucene/index/TestDocumentWriter.java (revision 792498)
+++ tags/lucene_2_4_back_compat_tests_20090704/src/test/org/apache/lucene/index/TestDocumentWriter.java (working copy)
@@ -94,8 +94,8 @@
// test that the norms are not present in the segment if
// omitNorms is true
- for (int i = 0; i < reader.fieldInfos.size(); i++) {
- FieldInfo fi = reader.fieldInfos.fieldInfo(i);
+ for (int i = 0; i < reader.core.fieldInfos.size(); i++) {
+ FieldInfo fi = reader.core.fieldInfos.fieldInfo(i);
if (fi.isIndexed) {
assertTrue(fi.omitNorms == !reader.hasNorms(fi.name));
}
Index: tags/lucene_2_4_back_compat_tests_20090704/src/test/org/apache/lucene/index/TestLazyProxSkipping.java
===================================================================
--- tags/lucene_2_4_back_compat_tests_20090704/src/test/org/apache/lucene/index/TestLazyProxSkipping.java (revision 792498)
+++ tags/lucene_2_4_back_compat_tests_20090704/src/test/org/apache/lucene/index/TestLazyProxSkipping.java (working copy)
@@ -44,11 +44,22 @@
private String term2 = "yy";
private String term3 = "zz";
+ private class SeekCountingDirectory extends RAMDirectory {
+ public IndexInput openInput(String name) throws IOException {
+ IndexInput ii = super.openInput(name);
+ if (name.endsWith(".prx")) {
+ // we decorate the proxStream with a wrapper class that allows to count the number of calls of seek()
+ ii = new SeeksCountingStream(ii);
+ }
+ return ii;
+ }
+ }
private void createIndex(int numHits) throws IOException {
int numDocs = 500;
- Directory directory = new RAMDirectory();
+ Directory directory = new SeekCountingDirectory();
IndexWriter writer = new IndexWriter(directory, new WhitespaceAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED);
+ writer.setUseCompoundFile(false);
writer.setMaxBufferedDocs(10);
for (int i = 0; i < numDocs; i++) {
Document doc = new Document();
@@ -74,9 +85,6 @@
SegmentReader reader = SegmentReader.getOnlySegmentReader(directory);
- // we decorate the proxStream with a wrapper class that allows to count the number of calls of seek()
- reader.proxStream = new SeeksCountingStream(reader.proxStream);
-
this.searcher = new IndexSearcher(reader);
}
@@ -96,6 +104,7 @@
assertEquals(numHits, hits.length);
// check if the number of calls of seek() does not exceed the number of hits
+ assertTrue(this.seeksCounter > 0);
assertTrue(this.seeksCounter <= numHits + 1);
}
Index: tags/lucene_2_4_back_compat_tests_20090704/src/java/org/apache/lucene/index/SegmentReader.java
===================================================================
--- tags/lucene_2_4_back_compat_tests_20090704/src/java/org/apache/lucene/index/SegmentReader.java (revision 792498)
+++ tags/lucene_2_4_back_compat_tests_20090704/src/java/org/apache/lucene/index/SegmentReader.java (working copy)
@@ -78,7 +78,27 @@
// indicates the SegmentReader with which the resources are being shared,
// in case this is a re-opened reader
private SegmentReader referencedSegmentReader = null;
-
+
+ // stub
+ static final class CoreReaders {
+ // Counts how many other reader share the core objects
+ // (freqStream, proxStream, tis, etc.) of this reader;
+ // when coreRef drops to 0, these core objects may be
+ // closed. A given insance of SegmentReader may be
+ // closed, even those it shares core objects with other
+ // SegmentReaders:
+ FieldsReader fieldsReaderOrig;
+ TermVectorsReader termVectorsReaderOrig;
+ CompoundFileReader cfsReader;
+ CompoundFileReader storeCFSReader;
+ TermInfosReader tis;
+ IndexInput freqStream;
+ IndexInput proxStream;
+ FieldInfos fieldInfos;
+ }
+
+ CoreReaders core;
+
private class Norm {
volatile int refCount;
boolean useSingleNormStream;