blob: b302840731917779cacb107db41698b4c9de1371 [file] [log] [blame]
Index: lucene/backwards/src/test/org/apache/lucene/queryParser/TestMultiFieldQueryParser.java
===================================================================
--- lucene/backwards/src/test/org/apache/lucene/queryParser/TestMultiFieldQueryParser.java (revision 965596)
+++ lucene/backwards/src/test/org/apache/lucene/queryParser/TestMultiFieldQueryParser.java (working copy)
@@ -60,18 +60,18 @@
String[] fields = {"b", "t"};
Occur occur[] = {Occur.SHOULD, Occur.SHOULD};
TestQueryParser.QPTestAnalyzer a = new TestQueryParser.QPTestAnalyzer();
- MultiFieldQueryParser mfqp = new MultiFieldQueryParser(Version.LUCENE_CURRENT, fields, a);
+ MultiFieldQueryParser mfqp = new MultiFieldQueryParser(TEST_VERSION_CURRENT, fields, a);
Query q = mfqp.parse(qtxt);
assertEquals(expectedRes, q.toString());
- q = MultiFieldQueryParser.parse(Version.LUCENE_CURRENT, qtxt, fields, occur, a);
+ q = MultiFieldQueryParser.parse(TEST_VERSION_CURRENT, qtxt, fields, occur, a);
assertEquals(expectedRes, q.toString());
}
public void testSimple() throws Exception {
String[] fields = {"b", "t"};
- MultiFieldQueryParser mfqp = new MultiFieldQueryParser(Version.LUCENE_CURRENT, fields, new StandardAnalyzer(org.apache.lucene.util.Version.LUCENE_CURRENT));
+ MultiFieldQueryParser mfqp = new MultiFieldQueryParser(TEST_VERSION_CURRENT, fields, new StandardAnalyzer(TEST_VERSION_CURRENT));
Query q = mfqp.parse("one");
assertEquals("b:one t:one", q.toString());
@@ -134,7 +134,7 @@
boosts.put("b", Float.valueOf(5));
boosts.put("t", Float.valueOf(10));
String[] fields = {"b", "t"};
- MultiFieldQueryParser mfqp = new MultiFieldQueryParser(Version.LUCENE_CURRENT, fields, new StandardAnalyzer(org.apache.lucene.util.Version.LUCENE_CURRENT), boosts);
+ MultiFieldQueryParser mfqp = new MultiFieldQueryParser(TEST_VERSION_CURRENT, fields, new StandardAnalyzer(TEST_VERSION_CURRENT), boosts);
//Check for simple
@@ -160,24 +160,24 @@
public void testStaticMethod1() throws ParseException {
String[] fields = {"b", "t"};
String[] queries = {"one", "two"};
- Query q = MultiFieldQueryParser.parse(Version.LUCENE_CURRENT, queries, fields, new StandardAnalyzer(org.apache.lucene.util.Version.LUCENE_CURRENT));
+ Query q = MultiFieldQueryParser.parse(TEST_VERSION_CURRENT, queries, fields, new StandardAnalyzer(TEST_VERSION_CURRENT));
assertEquals("b:one t:two", q.toString());
String[] queries2 = {"+one", "+two"};
- q = MultiFieldQueryParser.parse(Version.LUCENE_CURRENT, queries2, fields, new StandardAnalyzer(org.apache.lucene.util.Version.LUCENE_CURRENT));
+ q = MultiFieldQueryParser.parse(TEST_VERSION_CURRENT, queries2, fields, new StandardAnalyzer(TEST_VERSION_CURRENT));
assertEquals("(+b:one) (+t:two)", q.toString());
String[] queries3 = {"one", "+two"};
- q = MultiFieldQueryParser.parse(Version.LUCENE_CURRENT, queries3, fields, new StandardAnalyzer(org.apache.lucene.util.Version.LUCENE_CURRENT));
+ q = MultiFieldQueryParser.parse(TEST_VERSION_CURRENT, queries3, fields, new StandardAnalyzer(TEST_VERSION_CURRENT));
assertEquals("b:one (+t:two)", q.toString());
String[] queries4 = {"one +more", "+two"};
- q = MultiFieldQueryParser.parse(Version.LUCENE_CURRENT, queries4, fields, new StandardAnalyzer(org.apache.lucene.util.Version.LUCENE_CURRENT));
+ q = MultiFieldQueryParser.parse(TEST_VERSION_CURRENT, queries4, fields, new StandardAnalyzer(TEST_VERSION_CURRENT));
assertEquals("(b:one +b:more) (+t:two)", q.toString());
String[] queries5 = {"blah"};
try {
- q = MultiFieldQueryParser.parse(Version.LUCENE_CURRENT, queries5, fields, new StandardAnalyzer(org.apache.lucene.util.Version.LUCENE_CURRENT));
+ q = MultiFieldQueryParser.parse(TEST_VERSION_CURRENT, queries5, fields, new StandardAnalyzer(TEST_VERSION_CURRENT));
fail();
} catch(IllegalArgumentException e) {
// expected exception, array length differs
@@ -187,11 +187,11 @@
TestQueryParser.QPTestAnalyzer stopA = new TestQueryParser.QPTestAnalyzer();
String[] queries6 = {"((+stop))", "+((stop))"};
- q = MultiFieldQueryParser.parse(Version.LUCENE_CURRENT, queries6, fields, stopA);
+ q = MultiFieldQueryParser.parse(TEST_VERSION_CURRENT, queries6, fields, stopA);
assertEquals("", q.toString());
String[] queries7 = {"one ((+stop)) +more", "+((stop)) +two"};
- q = MultiFieldQueryParser.parse(Version.LUCENE_CURRENT, queries7, fields, stopA);
+ q = MultiFieldQueryParser.parse(TEST_VERSION_CURRENT, queries7, fields, stopA);
assertEquals("(b:one +b:more) (+t:two)", q.toString());
}
@@ -199,15 +199,15 @@
public void testStaticMethod2() throws ParseException {
String[] fields = {"b", "t"};
BooleanClause.Occur[] flags = {BooleanClause.Occur.MUST, BooleanClause.Occur.MUST_NOT};
- Query q = MultiFieldQueryParser.parse(Version.LUCENE_CURRENT, "one", fields, flags, new StandardAnalyzer(org.apache.lucene.util.Version.LUCENE_CURRENT));
+ Query q = MultiFieldQueryParser.parse(TEST_VERSION_CURRENT, "one", fields, flags, new StandardAnalyzer(TEST_VERSION_CURRENT));
assertEquals("+b:one -t:one", q.toString());
- q = MultiFieldQueryParser.parse(Version.LUCENE_CURRENT, "one two", fields, flags, new StandardAnalyzer(org.apache.lucene.util.Version.LUCENE_CURRENT));
+ q = MultiFieldQueryParser.parse(TEST_VERSION_CURRENT, "one two", fields, flags, new StandardAnalyzer(TEST_VERSION_CURRENT));
assertEquals("+(b:one b:two) -(t:one t:two)", q.toString());
try {
BooleanClause.Occur[] flags2 = {BooleanClause.Occur.MUST};
- q = MultiFieldQueryParser.parse(Version.LUCENE_CURRENT, "blah", fields, flags2, new StandardAnalyzer(org.apache.lucene.util.Version.LUCENE_CURRENT));
+ q = MultiFieldQueryParser.parse(TEST_VERSION_CURRENT, "blah", fields, flags2, new StandardAnalyzer(TEST_VERSION_CURRENT));
fail();
} catch(IllegalArgumentException e) {
// expected exception, array length differs
@@ -218,17 +218,17 @@
String[] fields = {"b", "t"};
//int[] flags = {MultiFieldQueryParser.REQUIRED_FIELD, MultiFieldQueryParser.PROHIBITED_FIELD};
BooleanClause.Occur[] flags = {BooleanClause.Occur.MUST, BooleanClause.Occur.MUST_NOT};
- MultiFieldQueryParser parser = new MultiFieldQueryParser(Version.LUCENE_CURRENT, fields, new StandardAnalyzer(org.apache.lucene.util.Version.LUCENE_CURRENT));
+ MultiFieldQueryParser parser = new MultiFieldQueryParser(TEST_VERSION_CURRENT, fields, new StandardAnalyzer(TEST_VERSION_CURRENT));
- Query q = MultiFieldQueryParser.parse(Version.LUCENE_CURRENT, "one", fields, flags, new StandardAnalyzer(org.apache.lucene.util.Version.LUCENE_CURRENT));//, fields, flags, new StandardAnalyzer());
+ Query q = MultiFieldQueryParser.parse(TEST_VERSION_CURRENT, "one", fields, flags, new StandardAnalyzer(TEST_VERSION_CURRENT));//, fields, flags, new StandardAnalyzer());
assertEquals("+b:one -t:one", q.toString());
- q = MultiFieldQueryParser.parse(Version.LUCENE_CURRENT, "one two", fields, flags, new StandardAnalyzer(org.apache.lucene.util.Version.LUCENE_CURRENT));
+ q = MultiFieldQueryParser.parse(TEST_VERSION_CURRENT, "one two", fields, flags, new StandardAnalyzer(TEST_VERSION_CURRENT));
assertEquals("+(b:one b:two) -(t:one t:two)", q.toString());
try {
BooleanClause.Occur[] flags2 = {BooleanClause.Occur.MUST};
- q = MultiFieldQueryParser.parse(Version.LUCENE_CURRENT, "blah", fields, flags2, new StandardAnalyzer(org.apache.lucene.util.Version.LUCENE_CURRENT));
+ q = MultiFieldQueryParser.parse(TEST_VERSION_CURRENT, "blah", fields, flags2, new StandardAnalyzer(TEST_VERSION_CURRENT));
fail();
} catch(IllegalArgumentException e) {
// expected exception, array length differs
@@ -240,12 +240,12 @@
String[] fields = {"f1", "f2", "f3"};
BooleanClause.Occur[] flags = {BooleanClause.Occur.MUST,
BooleanClause.Occur.MUST_NOT, BooleanClause.Occur.SHOULD};
- Query q = MultiFieldQueryParser.parse(Version.LUCENE_CURRENT, queries, fields, flags, new StandardAnalyzer(org.apache.lucene.util.Version.LUCENE_CURRENT));
+ Query q = MultiFieldQueryParser.parse(TEST_VERSION_CURRENT, queries, fields, flags, new StandardAnalyzer(TEST_VERSION_CURRENT));
assertEquals("+f1:one -f2:two f3:three", q.toString());
try {
BooleanClause.Occur[] flags2 = {BooleanClause.Occur.MUST};
- q = MultiFieldQueryParser.parse(Version.LUCENE_CURRENT, queries, fields, flags2, new StandardAnalyzer(org.apache.lucene.util.Version.LUCENE_CURRENT));
+ q = MultiFieldQueryParser.parse(TEST_VERSION_CURRENT, queries, fields, flags2, new StandardAnalyzer(TEST_VERSION_CURRENT));
fail();
} catch(IllegalArgumentException e) {
// expected exception, array length differs
@@ -256,12 +256,12 @@
String[] queries = {"one", "two"};
String[] fields = {"b", "t"};
BooleanClause.Occur[] flags = {BooleanClause.Occur.MUST, BooleanClause.Occur.MUST_NOT};
- Query q = MultiFieldQueryParser.parse(Version.LUCENE_CURRENT, queries, fields, flags, new StandardAnalyzer(org.apache.lucene.util.Version.LUCENE_CURRENT));
+ Query q = MultiFieldQueryParser.parse(TEST_VERSION_CURRENT, queries, fields, flags, new StandardAnalyzer(TEST_VERSION_CURRENT));
assertEquals("+b:one -t:two", q.toString());
try {
BooleanClause.Occur[] flags2 = {BooleanClause.Occur.MUST};
- q = MultiFieldQueryParser.parse(Version.LUCENE_CURRENT, queries, fields, flags2, new StandardAnalyzer(org.apache.lucene.util.Version.LUCENE_CURRENT));
+ q = MultiFieldQueryParser.parse(TEST_VERSION_CURRENT, queries, fields, flags2, new StandardAnalyzer(TEST_VERSION_CURRENT));
fail();
} catch(IllegalArgumentException e) {
// expected exception, array length differs
@@ -270,7 +270,7 @@
public void testAnalyzerReturningNull() throws ParseException {
String[] fields = new String[] { "f1", "f2", "f3" };
- MultiFieldQueryParser parser = new MultiFieldQueryParser(Version.LUCENE_CURRENT, fields, new AnalyzerReturningNull());
+ MultiFieldQueryParser parser = new MultiFieldQueryParser(TEST_VERSION_CURRENT, fields, new AnalyzerReturningNull());
Query q = parser.parse("bla AND blo");
assertEquals("+(f2:bla f3:bla) +(f2:blo f3:blo)", q.toString());
// the following queries are not affected as their terms are not analyzed anyway:
@@ -283,7 +283,7 @@
}
public void testStopWordSearching() throws Exception {
- Analyzer analyzer = new StandardAnalyzer(org.apache.lucene.util.Version.LUCENE_CURRENT);
+ Analyzer analyzer = new StandardAnalyzer(TEST_VERSION_CURRENT);
Directory ramDir = new RAMDirectory();
IndexWriter iw = new IndexWriter(ramDir, analyzer, true, IndexWriter.MaxFieldLength.LIMITED);
Document doc = new Document();
@@ -292,7 +292,7 @@
iw.close();
MultiFieldQueryParser mfqp =
- new MultiFieldQueryParser(Version.LUCENE_CURRENT, new String[] {"body"}, analyzer);
+ new MultiFieldQueryParser(TEST_VERSION_CURRENT, new String[] {"body"}, analyzer);
mfqp.setDefaultOperator(QueryParser.Operator.AND);
Query q = mfqp.parse("the footest");
IndexSearcher is = new IndexSearcher(ramDir, true);
@@ -305,7 +305,7 @@
* Return empty tokens for field "f1".
*/
private static class AnalyzerReturningNull extends Analyzer {
- StandardAnalyzer stdAnalyzer = new StandardAnalyzer(org.apache.lucene.util.Version.LUCENE_CURRENT);
+ StandardAnalyzer stdAnalyzer = new StandardAnalyzer(TEST_VERSION_CURRENT);
public AnalyzerReturningNull() {
}
Index: lucene/backwards/src/test/org/apache/lucene/analysis/TestStandardAnalyzer.java
===================================================================
--- lucene/backwards/src/test/org/apache/lucene/analysis/TestStandardAnalyzer.java (revision 965596)
+++ lucene/backwards/src/test/org/apache/lucene/analysis/TestStandardAnalyzer.java (working copy)
@@ -27,16 +27,16 @@
public class TestStandardAnalyzer extends BaseTokenStreamTestCase {
- private Analyzer a = new StandardAnalyzer(org.apache.lucene.util.Version.LUCENE_CURRENT);
+ private Analyzer a = new StandardAnalyzer(TEST_VERSION_CURRENT);
public void testMaxTermLength() throws Exception {
- StandardAnalyzer sa = new StandardAnalyzer(org.apache.lucene.util.Version.LUCENE_CURRENT);
+ StandardAnalyzer sa = new StandardAnalyzer(TEST_VERSION_CURRENT);
sa.setMaxTokenLength(5);
assertAnalyzesTo(sa, "ab cd toolong xy z", new String[]{"ab", "cd", "xy", "z"});
}
public void testMaxTermLength2() throws Exception {
- StandardAnalyzer sa = new StandardAnalyzer(org.apache.lucene.util.Version.LUCENE_CURRENT);
+ StandardAnalyzer sa = new StandardAnalyzer(TEST_VERSION_CURRENT);
assertAnalyzesTo(sa, "ab cd toolong xy z", new String[]{"ab", "cd", "toolong", "xy", "z"});
sa.setMaxTokenLength(5);
@@ -100,7 +100,7 @@
public void testLucene1140() throws Exception {
try {
- StandardAnalyzer analyzer = new StandardAnalyzer(org.apache.lucene.util.Version.LUCENE_CURRENT);
+ StandardAnalyzer analyzer = new StandardAnalyzer(TEST_VERSION_CURRENT);
assertAnalyzesTo(analyzer, "www.nutch.org.", new String[]{ "www.nutch.org" }, new String[] { "<HOST>" });
} catch (NullPointerException e) {
fail("Should not throw an NPE and it did");
@@ -110,7 +110,7 @@
public void testDomainNames() throws Exception {
// Current lucene should not show the bug
- StandardAnalyzer a2 = new StandardAnalyzer(org.apache.lucene.util.Version.LUCENE_CURRENT);
+ StandardAnalyzer a2 = new StandardAnalyzer(TEST_VERSION_CURRENT);
// domain names
assertAnalyzesTo(a2, "www.nutch.org", new String[]{"www.nutch.org"});
Index: lucene/backwards/src/test/org/apache/lucene/analysis/TestAnalyzers.java
===================================================================
--- lucene/backwards/src/test/org/apache/lucene/analysis/TestAnalyzers.java (revision 965596)
+++ lucene/backwards/src/test/org/apache/lucene/analysis/TestAnalyzers.java (working copy)
@@ -75,7 +75,7 @@
}
public void testStop() throws Exception {
- Analyzer a = new StopAnalyzer(Version.LUCENE_CURRENT);
+ Analyzer a = new StopAnalyzer(TEST_VERSION_CURRENT);
assertAnalyzesTo(a, "foo bar FOO BAR",
new String[] { "foo", "bar", "foo", "bar" });
assertAnalyzesTo(a, "foo a bar such FOO THESE BAR",
@@ -123,7 +123,7 @@
/* StandardAnalyzer was made final in 3.1:
private static class MyStandardAnalyzer extends StandardAnalyzer {
public MyStandardAnalyzer() {
- super(org.apache.lucene.util.Version.LUCENE_CURRENT);
+ super(org.apache.lucene.util.TEST_VERSION_CURRENT);
}
@Override
Index: lucene/backwards/src/test/org/apache/lucene/analysis/TestTeeSinkTokenFilter.java
===================================================================
--- lucene/backwards/src/test/org/apache/lucene/analysis/TestTeeSinkTokenFilter.java (revision 965596)
+++ lucene/backwards/src/test/org/apache/lucene/analysis/TestTeeSinkTokenFilter.java (working copy)
@@ -171,10 +171,10 @@
buffer.append(English.intToEnglish(i).toUpperCase()).append(' ');
}
//make sure we produce the same tokens
- TeeSinkTokenFilter teeStream = new TeeSinkTokenFilter(new StandardFilter(new StandardTokenizer(Version.LUCENE_CURRENT, new StringReader(buffer.toString()))));
+ TeeSinkTokenFilter teeStream = new TeeSinkTokenFilter(new StandardFilter(new StandardTokenizer(TEST_VERSION_CURRENT, new StringReader(buffer.toString()))));
TokenStream sink = teeStream.newSinkTokenStream(new ModuloSinkFilter(100));
teeStream.consumeAllTokens();
- TokenStream stream = new ModuloTokenFilter(new StandardFilter(new StandardTokenizer(Version.LUCENE_CURRENT, new StringReader(buffer.toString()))), 100);
+ TokenStream stream = new ModuloTokenFilter(new StandardFilter(new StandardTokenizer(TEST_VERSION_CURRENT, new StringReader(buffer.toString()))), 100);
TermAttribute tfTok = stream.addAttribute(TermAttribute.class);
TermAttribute sinkTok = sink.addAttribute(TermAttribute.class);
for (int i=0; stream.incrementToken(); i++) {
@@ -187,12 +187,12 @@
int tfPos = 0;
long start = System.currentTimeMillis();
for (int i = 0; i < 20; i++) {
- stream = new StandardFilter(new StandardTokenizer(Version.LUCENE_CURRENT, new StringReader(buffer.toString())));
+ stream = new StandardFilter(new StandardTokenizer(TEST_VERSION_CURRENT, new StringReader(buffer.toString())));
PositionIncrementAttribute posIncrAtt = stream.getAttribute(PositionIncrementAttribute.class);
while (stream.incrementToken()) {
tfPos += posIncrAtt.getPositionIncrement();
}
- stream = new ModuloTokenFilter(new StandardFilter(new StandardTokenizer(Version.LUCENE_CURRENT, new StringReader(buffer.toString()))), modCounts[j]);
+ stream = new ModuloTokenFilter(new StandardFilter(new StandardTokenizer(TEST_VERSION_CURRENT, new StringReader(buffer.toString()))), modCounts[j]);
posIncrAtt = stream.getAttribute(PositionIncrementAttribute.class);
while (stream.incrementToken()) {
tfPos += posIncrAtt.getPositionIncrement();
@@ -204,7 +204,7 @@
//simulate one field with one sink
start = System.currentTimeMillis();
for (int i = 0; i < 20; i++) {
- teeStream = new TeeSinkTokenFilter(new StandardFilter(new StandardTokenizer(Version.LUCENE_CURRENT, new StringReader(buffer.toString()))));
+ teeStream = new TeeSinkTokenFilter(new StandardFilter(new StandardTokenizer(TEST_VERSION_CURRENT, new StringReader(buffer.toString()))));
sink = teeStream.newSinkTokenStream(new ModuloSinkFilter(modCounts[j]));
PositionIncrementAttribute posIncrAtt = teeStream.getAttribute(PositionIncrementAttribute.class);
while (teeStream.incrementToken()) {
Index: lucene/backwards/src/test/org/apache/lucene/analysis/TestKeywordAnalyzer.java
===================================================================
--- lucene/backwards/src/test/org/apache/lucene/analysis/TestKeywordAnalyzer.java (revision 965596)
+++ lucene/backwards/src/test/org/apache/lucene/analysis/TestKeywordAnalyzer.java (working copy)
@@ -60,7 +60,7 @@
PerFieldAnalyzerWrapper analyzer = new PerFieldAnalyzerWrapper(new SimpleAnalyzer());
analyzer.addAnalyzer("partnum", new KeywordAnalyzer());
- QueryParser queryParser = new QueryParser(Version.LUCENE_CURRENT, "description", analyzer);
+ QueryParser queryParser = new QueryParser(TEST_VERSION_CURRENT, "description", analyzer);
Query query = queryParser.parse("partnum:Q36 AND SPACE");
ScoreDoc[] hits = searcher.search(query, null, 1000).scoreDocs;
Index: lucene/backwards/src/test/org/apache/lucene/analysis/TestStopAnalyzer.java
===================================================================
--- lucene/backwards/src/test/org/apache/lucene/analysis/TestStopAnalyzer.java (revision 965596)
+++ lucene/backwards/src/test/org/apache/lucene/analysis/TestStopAnalyzer.java (working copy)
@@ -29,7 +29,7 @@
public class TestStopAnalyzer extends BaseTokenStreamTestCase {
- private StopAnalyzer stop = new StopAnalyzer(Version.LUCENE_CURRENT);
+ private StopAnalyzer stop = new StopAnalyzer(TEST_VERSION_CURRENT);
private Set inValidTokens = new HashSet();
public TestStopAnalyzer(String s) {
@@ -82,7 +82,7 @@
stopWordsSet.add("good");
stopWordsSet.add("test");
stopWordsSet.add("analyzer");
- StopAnalyzer newStop = new StopAnalyzer(Version.LUCENE_CURRENT, stopWordsSet);
+ StopAnalyzer newStop = new StopAnalyzer(TEST_VERSION_CURRENT, stopWordsSet);
StringReader reader = new StringReader("This is a good test of the english stop analyzer with positions");
int expectedIncr[] = { 1, 1, 1, 3, 1, 1, 1, 2, 1};
TokenStream stream = newStop.tokenStream("test", reader);
Index: lucene/backwards/src/test/org/apache/lucene/TestDemo.java
===================================================================
--- lucene/backwards/src/test/org/apache/lucene/TestDemo.java (revision 965596)
+++ lucene/backwards/src/test/org/apache/lucene/TestDemo.java (working copy)
@@ -45,7 +45,7 @@
public void testDemo() throws IOException, ParseException {
- Analyzer analyzer = new StandardAnalyzer(Version.LUCENE_CURRENT);
+ Analyzer analyzer = new StandardAnalyzer(TEST_VERSION_CURRENT);
// Store the index in memory:
Directory directory = new RAMDirectory();
@@ -63,7 +63,7 @@
// Now search the index:
IndexSearcher isearcher = new IndexSearcher(directory, true); // read-only=true
// Parse a simple query that searches for "text":
- QueryParser parser = new QueryParser(Version.LUCENE_CURRENT, "fieldname", analyzer);
+ QueryParser parser = new QueryParser(TEST_VERSION_CURRENT, "fieldname", analyzer);
Query query = parser.parse("text");
ScoreDoc[] hits = isearcher.search(query, null, 1000).scoreDocs;
assertEquals(1, hits.length);
Index: lucene/backwards/src/test/org/apache/lucene/search/TestNot.java
===================================================================
--- lucene/backwards/src/test/org/apache/lucene/search/TestNot.java (revision 965596)
+++ lucene/backwards/src/test/org/apache/lucene/search/TestNot.java (working copy)
@@ -49,7 +49,7 @@
writer.close();
Searcher searcher = new IndexSearcher(store, true);
- QueryParser parser = new QueryParser(Version.LUCENE_CURRENT, "field", new SimpleAnalyzer());
+ QueryParser parser = new QueryParser(TEST_VERSION_CURRENT, "field", new SimpleAnalyzer());
Query query = parser.parse("a NOT b");
//System.out.println(query);
ScoreDoc[] hits = searcher.search(query, null, 1000).scoreDocs;
Index: lucene/backwards/src/test/org/apache/lucene/search/TestTimeLimitingCollector.java
===================================================================
--- lucene/backwards/src/test/org/apache/lucene/search/TestTimeLimitingCollector.java (revision 965596)
+++ lucene/backwards/src/test/org/apache/lucene/search/TestTimeLimitingCollector.java (working copy)
@@ -89,7 +89,7 @@
for (int i = 1; i < docText.length; i++) {
qtxt += ' ' + docText[i]; // large query so that search will be longer
}
- QueryParser queryParser = new QueryParser(Version.LUCENE_CURRENT, FIELD_NAME, new WhitespaceAnalyzer());
+ QueryParser queryParser = new QueryParser(TEST_VERSION_CURRENT, FIELD_NAME, new WhitespaceAnalyzer());
query = queryParser.parse(qtxt);
// warm the searcher
Index: lucene/backwards/src/test/org/apache/lucene/search/TestCachingWrapperFilter.java
===================================================================
--- lucene/backwards/src/test/org/apache/lucene/search/TestCachingWrapperFilter.java (revision 965596)
+++ lucene/backwards/src/test/org/apache/lucene/search/TestCachingWrapperFilter.java (working copy)
@@ -32,7 +32,7 @@
public class TestCachingWrapperFilter extends LuceneTestCase {
public void testCachingWorks() throws Exception {
Directory dir = new RAMDirectory();
- IndexWriter writer = new IndexWriter(dir, new StandardAnalyzer(org.apache.lucene.util.Version.LUCENE_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED);
+ IndexWriter writer = new IndexWriter(dir, new StandardAnalyzer(TEST_VERSION_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED);
writer.close();
IndexReader reader = IndexReader.open(dir, true);
@@ -71,7 +71,7 @@
public void testIsCacheAble() throws Exception {
Directory dir = new RAMDirectory();
- IndexWriter writer = new IndexWriter(dir, new StandardAnalyzer(org.apache.lucene.util.Version.LUCENE_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED);
+ IndexWriter writer = new IndexWriter(dir, new StandardAnalyzer(TEST_VERSION_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED);
writer.close();
IndexReader reader = IndexReader.open(dir, true);
Index: lucene/backwards/src/test/org/apache/lucene/search/TestPhraseQuery.java
===================================================================
--- lucene/backwards/src/test/org/apache/lucene/search/TestPhraseQuery.java (revision 965596)
+++ lucene/backwards/src/test/org/apache/lucene/search/TestPhraseQuery.java (working copy)
@@ -362,8 +362,8 @@
}
public void testToString() throws Exception {
- StopAnalyzer analyzer = new StopAnalyzer(Version.LUCENE_CURRENT);
- QueryParser qp = new QueryParser(Version.LUCENE_CURRENT, "field", analyzer);
+ StopAnalyzer analyzer = new StopAnalyzer(TEST_VERSION_CURRENT);
+ QueryParser qp = new QueryParser(TEST_VERSION_CURRENT, "field", analyzer);
qp.setEnablePositionIncrements(true);
PhraseQuery q = (PhraseQuery)qp.parse("\"this hi this is a test is\"");
assertEquals("field:\"? hi ? ? ? test\"", q.toString());
Index: lucene/backwards/src/test/org/apache/lucene/search/TestPositionIncrement.java
===================================================================
--- lucene/backwards/src/test/org/apache/lucene/search/TestPositionIncrement.java (revision 965596)
+++ lucene/backwards/src/test/org/apache/lucene/search/TestPositionIncrement.java (working copy)
@@ -191,7 +191,7 @@
assertEquals(0, hits.length);
// should not find "1 2" because there is a gap of 1 in the index
- QueryParser qp = new QueryParser(Version.LUCENE_CURRENT, "field",
+ QueryParser qp = new QueryParser(TEST_VERSION_CURRENT, "field",
new StopWhitespaceAnalyzer(false));
q = (PhraseQuery) qp.parse("\"1 2\"");
hits = searcher.search(q, null, 1000).scoreDocs;
@@ -215,7 +215,7 @@
assertEquals(0, hits.length);
// when both qp qnd stopFilter propagate increments, we should find the doc.
- qp = new QueryParser(Version.LUCENE_CURRENT, "field",
+ qp = new QueryParser(TEST_VERSION_CURRENT, "field",
new StopWhitespaceAnalyzer(true));
qp.setEnablePositionIncrements(true);
q = (PhraseQuery) qp.parse("\"1 stop 2\"");
Index: lucene/backwards/src/test/org/apache/lucene/search/TestBooleanOr.java
===================================================================
--- lucene/backwards/src/test/org/apache/lucene/search/TestBooleanOr.java (revision 965596)
+++ lucene/backwards/src/test/org/apache/lucene/search/TestBooleanOr.java (working copy)
@@ -135,7 +135,7 @@
RAMDirectory rd = new RAMDirectory();
//
- IndexWriter writer = new IndexWriter(rd, new StandardAnalyzer(org.apache.lucene.util.Version.LUCENE_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED);
+ IndexWriter writer = new IndexWriter(rd, new StandardAnalyzer(TEST_VERSION_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED);
//
Document d = new Document();
Index: lucene/backwards/src/test/org/apache/lucene/search/TestDateSort.java
===================================================================
--- lucene/backwards/src/test/org/apache/lucene/search/TestDateSort.java (revision 965596)
+++ lucene/backwards/src/test/org/apache/lucene/search/TestDateSort.java (working copy)
@@ -76,7 +76,7 @@
Sort sort = new Sort(new SortField(DATE_TIME_FIELD, SortField.STRING, true));
- QueryParser queryParser = new QueryParser(Version.LUCENE_CURRENT, TEXT_FIELD, new WhitespaceAnalyzer());
+ QueryParser queryParser = new QueryParser(TEST_VERSION_CURRENT, TEXT_FIELD, new WhitespaceAnalyzer());
Query query = queryParser.parse("Document");
// Execute the search and process the search results.
Index: lucene/backwards/src/test/org/apache/lucene/search/spans/TestNearSpansOrdered.java
===================================================================
--- lucene/backwards/src/test/org/apache/lucene/search/spans/TestNearSpansOrdered.java (revision 965596)
+++ lucene/backwards/src/test/org/apache/lucene/search/spans/TestNearSpansOrdered.java (working copy)
@@ -37,7 +37,7 @@
public static final String FIELD = "field";
public static final QueryParser qp =
- new QueryParser(Version.LUCENE_CURRENT, FIELD, new WhitespaceAnalyzer());
+ new QueryParser(TEST_VERSION_CURRENT, FIELD, new WhitespaceAnalyzer());
@Override
public void tearDown() throws Exception {
Index: lucene/backwards/src/test/org/apache/lucene/search/spans/TestSpans.java
===================================================================
--- lucene/backwards/src/test/org/apache/lucene/search/spans/TestSpans.java (revision 965596)
+++ lucene/backwards/src/test/org/apache/lucene/search/spans/TestSpans.java (working copy)
@@ -452,7 +452,7 @@
// LUCENE-1404
public void testNPESpanQuery() throws Throwable {
final Directory dir = new MockRAMDirectory();
- final IndexWriter writer = new IndexWriter(dir, new StandardAnalyzer(org.apache.lucene.util.Version.LUCENE_CURRENT, Collections.emptySet()), IndexWriter.MaxFieldLength.LIMITED);
+ final IndexWriter writer = new IndexWriter(dir, new StandardAnalyzer(TEST_VERSION_CURRENT, Collections.emptySet()), IndexWriter.MaxFieldLength.LIMITED);
// Add documents
addDoc(writer, "1", "the big dogs went running to the market");
Index: lucene/backwards/src/test/org/apache/lucene/search/spans/TestSpansAdvanced.java
===================================================================
--- lucene/backwards/src/test/org/apache/lucene/search/spans/TestSpansAdvanced.java (revision 965596)
+++ lucene/backwards/src/test/org/apache/lucene/search/spans/TestSpansAdvanced.java (working copy)
@@ -56,7 +56,7 @@
// create test index
mDirectory = new RAMDirectory();
- final IndexWriter writer = new IndexWriter(mDirectory, new StandardAnalyzer(org.apache.lucene.util.Version.LUCENE_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED);
+ final IndexWriter writer = new IndexWriter(mDirectory, new StandardAnalyzer(TEST_VERSION_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED);
addDocument(writer, "1", "I think it should work.");
addDocument(writer, "2", "I think it should work.");
addDocument(writer, "3", "I think it should work.");
Index: lucene/backwards/src/test/org/apache/lucene/search/spans/TestSpansAdvanced2.java
===================================================================
--- lucene/backwards/src/test/org/apache/lucene/search/spans/TestSpansAdvanced2.java (revision 965596)
+++ lucene/backwards/src/test/org/apache/lucene/search/spans/TestSpansAdvanced2.java (working copy)
@@ -40,7 +40,7 @@
super.setUp();
// create test index
- final IndexWriter writer = new IndexWriter(mDirectory, new StandardAnalyzer(org.apache.lucene.util.Version.LUCENE_CURRENT), false, IndexWriter.MaxFieldLength.LIMITED);
+ final IndexWriter writer = new IndexWriter(mDirectory, new StandardAnalyzer(TEST_VERSION_CURRENT), false, IndexWriter.MaxFieldLength.LIMITED);
addDocument(writer, "A", "Should we, could we, would we?");
addDocument(writer, "B", "It should. Should it?");
addDocument(writer, "C", "It shouldn't.");
Index: lucene/backwards/src/test/org/apache/lucene/search/TestMultiSearcher.java
===================================================================
--- lucene/backwards/src/test/org/apache/lucene/search/TestMultiSearcher.java (revision 965596)
+++ lucene/backwards/src/test/org/apache/lucene/search/TestMultiSearcher.java (working copy)
@@ -83,9 +83,9 @@
lDoc3.add(new Field("handle", "1", Field.Store.YES, Field.Index.NOT_ANALYZED));
// creating an index writer for the first index
- IndexWriter writerA = new IndexWriter(indexStoreA, new StandardAnalyzer(org.apache.lucene.util.Version.LUCENE_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED);
+ IndexWriter writerA = new IndexWriter(indexStoreA, new StandardAnalyzer(TEST_VERSION_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED);
// creating an index writer for the second index, but writing nothing
- IndexWriter writerB = new IndexWriter(indexStoreB, new StandardAnalyzer(org.apache.lucene.util.Version.LUCENE_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED);
+ IndexWriter writerB = new IndexWriter(indexStoreB, new StandardAnalyzer(TEST_VERSION_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED);
//--------------------------------------------------------------------
// scenario 1
@@ -102,7 +102,7 @@
writerB.close();
// creating the query
- QueryParser parser = new QueryParser(Version.LUCENE_CURRENT, "fulltext", new StandardAnalyzer(org.apache.lucene.util.Version.LUCENE_CURRENT));
+ QueryParser parser = new QueryParser(TEST_VERSION_CURRENT, "fulltext", new StandardAnalyzer(TEST_VERSION_CURRENT));
Query query = parser.parse("handle:1");
// building the searchables
@@ -129,7 +129,7 @@
//--------------------------------------------------------------------
// adding one document to the empty index
- writerB = new IndexWriter(indexStoreB, new StandardAnalyzer(org.apache.lucene.util.Version.LUCENE_CURRENT), false, IndexWriter.MaxFieldLength.LIMITED);
+ writerB = new IndexWriter(indexStoreB, new StandardAnalyzer(TEST_VERSION_CURRENT), false, IndexWriter.MaxFieldLength.LIMITED);
writerB.addDocument(lDoc);
writerB.optimize();
writerB.close();
@@ -175,7 +175,7 @@
readerB.close();
// optimizing the index with the writer
- writerB = new IndexWriter(indexStoreB, new StandardAnalyzer(org.apache.lucene.util.Version.LUCENE_CURRENT), false, IndexWriter.MaxFieldLength.LIMITED);
+ writerB = new IndexWriter(indexStoreB, new StandardAnalyzer(TEST_VERSION_CURRENT), false, IndexWriter.MaxFieldLength.LIMITED);
writerB.optimize();
writerB.close();
Index: lucene/backwards/src/test/org/apache/lucene/search/TestQueryWrapperFilter.java
===================================================================
--- lucene/backwards/src/test/org/apache/lucene/search/TestQueryWrapperFilter.java (revision 965596)
+++ lucene/backwards/src/test/org/apache/lucene/search/TestQueryWrapperFilter.java (working copy)
@@ -33,7 +33,7 @@
public void testBasic() throws Exception {
Directory dir = new RAMDirectory();
- IndexWriter writer = new IndexWriter(dir, new StandardAnalyzer(org.apache.lucene.util.Version.LUCENE_CURRENT), true,
+ IndexWriter writer = new IndexWriter(dir, new StandardAnalyzer(TEST_VERSION_CURRENT), true,
IndexWriter.MaxFieldLength.LIMITED);
Document doc = new Document();
doc.add(new Field("field", "value", Store.NO, Index.ANALYZED));
Index: lucene/backwards/src/test/org/apache/lucene/search/TestSimpleExplanations.java
===================================================================
--- lucene/backwards/src/test/org/apache/lucene/search/TestSimpleExplanations.java (revision 965596)
+++ lucene/backwards/src/test/org/apache/lucene/search/TestSimpleExplanations.java (working copy)
@@ -317,8 +317,8 @@
Document lDoc3 = new Document();
lDoc3.add(new Field("handle", "1 2", Field.Store.YES, Field.Index.ANALYZED));
- IndexWriter writerA = new IndexWriter(indexStoreA, new StandardAnalyzer(org.apache.lucene.util.Version.LUCENE_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED);
- IndexWriter writerB = new IndexWriter(indexStoreB, new StandardAnalyzer(org.apache.lucene.util.Version.LUCENE_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED);
+ IndexWriter writerA = new IndexWriter(indexStoreA, new StandardAnalyzer(TEST_VERSION_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED);
+ IndexWriter writerB = new IndexWriter(indexStoreB, new StandardAnalyzer(TEST_VERSION_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED);
writerA.addDocument(lDoc);
writerA.addDocument(lDoc2);
@@ -328,7 +328,7 @@
writerB.addDocument(lDoc3);
writerB.close();
- QueryParser parser = new QueryParser(Version.LUCENE_CURRENT, "fulltext", new StandardAnalyzer(org.apache.lucene.util.Version.LUCENE_CURRENT));
+ QueryParser parser = new QueryParser(TEST_VERSION_CURRENT, "fulltext", new StandardAnalyzer(TEST_VERSION_CURRENT));
Query query = parser.parse("handle:1");
Searcher[] searchers = new Searcher[2];
Index: lucene/backwards/src/test/org/apache/lucene/search/TestWildcard.java
===================================================================
--- lucene/backwards/src/test/org/apache/lucene/search/TestWildcard.java (revision 965596)
+++ lucene/backwards/src/test/org/apache/lucene/search/TestWildcard.java (working copy)
@@ -240,7 +240,7 @@
public void testParsingAndSearching() throws Exception {
String field = "content";
boolean dbg = false;
- QueryParser qp = new QueryParser(Version.LUCENE_CURRENT, field, new WhitespaceAnalyzer());
+ QueryParser qp = new QueryParser(TEST_VERSION_CURRENT, field, new WhitespaceAnalyzer());
qp.setAllowLeadingWildcard(true);
String docs[] = {
"\\ abcdefg1",
Index: lucene/backwards/src/test/org/apache/lucene/search/TestBoolean2.java
===================================================================
--- lucene/backwards/src/test/org/apache/lucene/search/TestBoolean2.java (revision 965596)
+++ lucene/backwards/src/test/org/apache/lucene/search/TestBoolean2.java (working copy)
@@ -107,7 +107,7 @@
};
public Query makeQuery(String queryText) throws ParseException {
- Query q = (new QueryParser(Version.LUCENE_CURRENT, field, new WhitespaceAnalyzer())).parse(queryText);
+ Query q = (new QueryParser(TEST_VERSION_CURRENT, field, new WhitespaceAnalyzer())).parse(queryText);
return q;
}
Index: lucene/backwards/src/test/org/apache/lucene/search/function/FunctionTestSetup.java
===================================================================
--- lucene/backwards/src/test/org/apache/lucene/search/function/FunctionTestSetup.java (revision 965596)
+++ lucene/backwards/src/test/org/apache/lucene/search/function/FunctionTestSetup.java (working copy)
@@ -88,7 +88,7 @@
// prepare a small index with just a few documents.
super.setUp();
dir = new RAMDirectory();
- anlzr = new StandardAnalyzer(org.apache.lucene.util.Version.LUCENE_CURRENT);
+ anlzr = new StandardAnalyzer(TEST_VERSION_CURRENT);
IndexWriter iw = new IndexWriter(dir, anlzr,
IndexWriter.MaxFieldLength.LIMITED);
// add docs not exactly in natural ID order, to verify we do check the order of docs by scores
Index: lucene/backwards/src/test/org/apache/lucene/search/function/TestCustomScoreQuery.java
===================================================================
--- lucene/backwards/src/test/org/apache/lucene/search/function/TestCustomScoreQuery.java (revision 965596)
+++ lucene/backwards/src/test/org/apache/lucene/search/function/TestCustomScoreQuery.java (working copy)
@@ -160,7 +160,7 @@
float boost = (float) dboost;
IndexSearcher s = new IndexSearcher(dir, true);
FieldScoreQuery qValSrc = new FieldScoreQuery(field,tp); // a query that would score by the field
- QueryParser qp = new QueryParser(Version.LUCENE_CURRENT, TEXT_FIELD,anlzr);
+ QueryParser qp = new QueryParser(TEST_VERSION_CURRENT, TEXT_FIELD,anlzr);
String qtxt = "first aid text"; // from the doc texts in FunctionQuerySetup.
// regular (boolean) query.
Index: lucene/backwards/src/test/org/apache/lucene/search/TestExplanations.java
===================================================================
--- lucene/backwards/src/test/org/apache/lucene/search/TestExplanations.java (revision 965596)
+++ lucene/backwards/src/test/org/apache/lucene/search/TestExplanations.java (working copy)
@@ -52,7 +52,7 @@
public static final String KEY = "KEY";
public static final String FIELD = "field";
public static final QueryParser qp =
- new QueryParser(Version.LUCENE_CURRENT, FIELD, new WhitespaceAnalyzer());
+ new QueryParser(TEST_VERSION_CURRENT, FIELD, new WhitespaceAnalyzer());
@Override
public void tearDown() throws Exception {
Index: lucene/backwards/src/test/org/apache/lucene/search/TestFuzzyQuery.java
===================================================================
--- lucene/backwards/src/test/org/apache/lucene/search/TestFuzzyQuery.java (revision 965596)
+++ lucene/backwards/src/test/org/apache/lucene/search/TestFuzzyQuery.java (working copy)
@@ -309,7 +309,7 @@
public void testGiga() throws Exception {
- StandardAnalyzer analyzer = new StandardAnalyzer(org.apache.lucene.util.Version.LUCENE_CURRENT);
+ StandardAnalyzer analyzer = new StandardAnalyzer(TEST_VERSION_CURRENT);
Directory index = new MockRAMDirectory();
IndexWriter w = new IndexWriter(index, analyzer, true, IndexWriter.MaxFieldLength.UNLIMITED);
@@ -334,7 +334,7 @@
IndexReader r = w.getReader();
w.close();
- Query q = new QueryParser(Version.LUCENE_CURRENT, "field", analyzer).parse( "giga~0.9" );
+ Query q = new QueryParser(TEST_VERSION_CURRENT, "field", analyzer).parse( "giga~0.9" );
// 3. search
IndexSearcher searcher = new IndexSearcher(r);
Index: lucene/backwards/src/test/org/apache/lucene/search/TestMatchAllDocsQuery.java
===================================================================
--- lucene/backwards/src/test/org/apache/lucene/search/TestMatchAllDocsQuery.java (revision 965596)
+++ lucene/backwards/src/test/org/apache/lucene/search/TestMatchAllDocsQuery.java (working copy)
@@ -36,7 +36,7 @@
*
*/
public class TestMatchAllDocsQuery extends LuceneTestCase {
- private Analyzer analyzer = new StandardAnalyzer(Version.LUCENE_CURRENT);
+ private Analyzer analyzer = new StandardAnalyzer(TEST_VERSION_CURRENT);
public void testQuery() throws Exception {
@@ -100,7 +100,7 @@
assertEquals(2, hits.length);
// test parsable toString()
- QueryParser qp = new QueryParser(Version.LUCENE_CURRENT, "key", analyzer);
+ QueryParser qp = new QueryParser(TEST_VERSION_CURRENT, "key", analyzer);
hits = is.search(qp.parse(new MatchAllDocsQuery().toString()), null, 1000).scoreDocs;
assertEquals(2, hits.length);
Index: lucene/backwards/src/test/org/apache/lucene/search/TestCustomSearcherSort.java
===================================================================
--- lucene/backwards/src/test/org/apache/lucene/search/TestCustomSearcherSort.java (revision 965596)
+++ lucene/backwards/src/test/org/apache/lucene/search/TestCustomSearcherSort.java (working copy)
@@ -70,7 +70,7 @@
private Directory getIndex()
throws IOException {
RAMDirectory indexStore = new RAMDirectory ();
- IndexWriter writer = new IndexWriter (indexStore, new StandardAnalyzer(org.apache.lucene.util.Version.LUCENE_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED);
+ IndexWriter writer = new IndexWriter (indexStore, new StandardAnalyzer(TEST_VERSION_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED);
RandomGen random = new RandomGen(newRandom());
for (int i=0; i<INDEX_SIZE; ++i) { // don't decrease; if to low the problem doesn't show up
Document doc = new Document();
Index: lucene/backwards/src/test/org/apache/lucene/search/TestMultiSearcherRanking.java
===================================================================
--- lucene/backwards/src/test/org/apache/lucene/search/TestMultiSearcherRanking.java (revision 965596)
+++ lucene/backwards/src/test/org/apache/lucene/search/TestMultiSearcherRanking.java (working copy)
@@ -88,7 +88,7 @@
private void checkQuery(String queryStr) throws IOException, ParseException {
// check result hit ranking
if(verbose) System.out.println("Query: " + queryStr);
- QueryParser queryParser = new QueryParser(Version.LUCENE_CURRENT, FIELD_NAME, new StandardAnalyzer(org.apache.lucene.util.Version.LUCENE_CURRENT));
+ QueryParser queryParser = new QueryParser(TEST_VERSION_CURRENT, FIELD_NAME, new StandardAnalyzer(TEST_VERSION_CURRENT));
Query query = queryParser.parse(queryStr);
ScoreDoc[] multiSearcherHits = multiSearcher.search(query, null, 1000).scoreDocs;
ScoreDoc[] singleSearcherHits = singleSearcher.search(query, null, 1000).scoreDocs;
@@ -115,12 +115,12 @@
super.setUp();
// create MultiSearcher from two seperate searchers
Directory d1 = new RAMDirectory();
- IndexWriter iw1 = new IndexWriter(d1, new StandardAnalyzer(org.apache.lucene.util.Version.LUCENE_CURRENT), true,
+ IndexWriter iw1 = new IndexWriter(d1, new StandardAnalyzer(TEST_VERSION_CURRENT), true,
IndexWriter.MaxFieldLength.LIMITED);
addCollection1(iw1);
iw1.close();
Directory d2 = new RAMDirectory();
- IndexWriter iw2 = new IndexWriter(d2, new StandardAnalyzer(org.apache.lucene.util.Version.LUCENE_CURRENT), true,
+ IndexWriter iw2 = new IndexWriter(d2, new StandardAnalyzer(TEST_VERSION_CURRENT), true,
IndexWriter.MaxFieldLength.LIMITED);
addCollection2(iw2);
iw2.close();
@@ -132,7 +132,7 @@
// create IndexSearcher which contains all documents
Directory d = new RAMDirectory();
- IndexWriter iw = new IndexWriter(d, new StandardAnalyzer(org.apache.lucene.util.Version.LUCENE_CURRENT), true,
+ IndexWriter iw = new IndexWriter(d, new StandardAnalyzer(TEST_VERSION_CURRENT), true,
IndexWriter.MaxFieldLength.LIMITED);
addCollection1(iw);
addCollection2(iw);
Index: lucene/backwards/src/test/org/apache/lucene/search/TestMultiPhraseQuery.java
===================================================================
--- lucene/backwards/src/test/org/apache/lucene/search/TestMultiPhraseQuery.java (revision 965596)
+++ lucene/backwards/src/test/org/apache/lucene/search/TestMultiPhraseQuery.java (working copy)
@@ -170,7 +170,7 @@
public void testPhrasePrefixWithBooleanQuery() throws IOException {
RAMDirectory indexStore = new RAMDirectory();
- IndexWriter writer = new IndexWriter(indexStore, new StandardAnalyzer(org.apache.lucene.util.Version.LUCENE_CURRENT, Collections.emptySet()), true, IndexWriter.MaxFieldLength.LIMITED);
+ IndexWriter writer = new IndexWriter(indexStore, new StandardAnalyzer(TEST_VERSION_CURRENT, Collections.emptySet()), true, IndexWriter.MaxFieldLength.LIMITED);
add("This is a test", "object", writer);
add("a note", "note", writer);
writer.close();
Index: lucene/backwards/src/test/org/apache/lucene/TestSearch.java
===================================================================
--- lucene/backwards/src/test/org/apache/lucene/TestSearch.java (revision 965596)
+++ lucene/backwards/src/test/org/apache/lucene/TestSearch.java (working copy)
@@ -108,7 +108,7 @@
};
ScoreDoc[] hits = null;
- QueryParser parser = new QueryParser(Version.LUCENE_CURRENT, "contents", analyzer);
+ QueryParser parser = new QueryParser(TEST_VERSION_CURRENT, "contents", analyzer);
parser.setPhraseSlop(4);
for (int j = 0; j < queries.length; j++) {
Query query = parser.parse(queries[j]);
Index: lucene/backwards/src/test/org/apache/lucene/index/TestIndexReader.java
===================================================================
--- lucene/backwards/src/test/org/apache/lucene/index/TestIndexReader.java (revision 965596)
+++ lucene/backwards/src/test/org/apache/lucene/index/TestIndexReader.java (working copy)
@@ -77,7 +77,7 @@
commitUserData.put("foo", "fighters");
// set up writer
- IndexWriter writer = new IndexWriter(d, new StandardAnalyzer(org.apache.lucene.util.Version.LUCENE_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED);
+ IndexWriter writer = new IndexWriter(d, new StandardAnalyzer(TEST_VERSION_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED);
writer.setMaxBufferedDocs(2);
for(int i=0;i<27;i++)
addDocumentWithFields(writer);
@@ -99,7 +99,7 @@
assertTrue(c.equals(r.getIndexCommit()));
// Change the index
- writer = new IndexWriter(d, new StandardAnalyzer(org.apache.lucene.util.Version.LUCENE_CURRENT), false, IndexWriter.MaxFieldLength.LIMITED);
+ writer = new IndexWriter(d, new StandardAnalyzer(TEST_VERSION_CURRENT), false, IndexWriter.MaxFieldLength.LIMITED);
writer.setMaxBufferedDocs(2);
for(int i=0;i<7;i++)
addDocumentWithFields(writer);
@@ -110,7 +110,7 @@
assertFalse(r2.getIndexCommit().isOptimized());
r3.close();
- writer = new IndexWriter(d, new StandardAnalyzer(org.apache.lucene.util.Version.LUCENE_CURRENT), false, IndexWriter.MaxFieldLength.LIMITED);
+ writer = new IndexWriter(d, new StandardAnalyzer(TEST_VERSION_CURRENT), false, IndexWriter.MaxFieldLength.LIMITED);
writer.optimize();
writer.close();
@@ -124,19 +124,19 @@
public void testIsCurrent() throws Exception
{
RAMDirectory d = new MockRAMDirectory();
- IndexWriter writer = new IndexWriter(d, new StandardAnalyzer(org.apache.lucene.util.Version.LUCENE_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED);
+ IndexWriter writer = new IndexWriter(d, new StandardAnalyzer(TEST_VERSION_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED);
addDocumentWithFields(writer);
writer.close();
// set up reader:
IndexReader reader = IndexReader.open(d, false);
assertTrue(reader.isCurrent());
// modify index by adding another document:
- writer = new IndexWriter(d, new StandardAnalyzer(org.apache.lucene.util.Version.LUCENE_CURRENT), false, IndexWriter.MaxFieldLength.LIMITED);
+ writer = new IndexWriter(d, new StandardAnalyzer(TEST_VERSION_CURRENT), false, IndexWriter.MaxFieldLength.LIMITED);
addDocumentWithFields(writer);
writer.close();
assertFalse(reader.isCurrent());
// re-create index:
- writer = new IndexWriter(d, new StandardAnalyzer(org.apache.lucene.util.Version.LUCENE_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED);
+ writer = new IndexWriter(d, new StandardAnalyzer(TEST_VERSION_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED);
addDocumentWithFields(writer);
writer.close();
assertFalse(reader.isCurrent());
@@ -152,7 +152,7 @@
{
RAMDirectory d = new MockRAMDirectory();
// set up writer
- IndexWriter writer = new IndexWriter(d, new StandardAnalyzer(org.apache.lucene.util.Version.LUCENE_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED);
+ IndexWriter writer = new IndexWriter(d, new StandardAnalyzer(TEST_VERSION_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED);
addDocumentWithFields(writer);
writer.close();
// set up reader
@@ -164,7 +164,7 @@
assertTrue(fieldNames.contains("unstored"));
reader.close();
// add more documents
- writer = new IndexWriter(d, new StandardAnalyzer(org.apache.lucene.util.Version.LUCENE_CURRENT), false, IndexWriter.MaxFieldLength.LIMITED);
+ writer = new IndexWriter(d, new StandardAnalyzer(TEST_VERSION_CURRENT), false, IndexWriter.MaxFieldLength.LIMITED);
// want to get some more segments here
for (int i = 0; i < 5*writer.getMergeFactor(); i++)
{
@@ -244,7 +244,7 @@
public void testTermVectors() throws Exception {
RAMDirectory d = new MockRAMDirectory();
// set up writer
- IndexWriter writer = new IndexWriter(d, new StandardAnalyzer(org.apache.lucene.util.Version.LUCENE_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED);
+ IndexWriter writer = new IndexWriter(d, new StandardAnalyzer(TEST_VERSION_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED);
// want to get some more segments here
// new termvector fields
for (int i = 0; i < 5 * writer.getMergeFactor(); i++) {
@@ -1420,7 +1420,7 @@
RAMDirectory d = new MockRAMDirectory();
// set up writer
- IndexWriter writer = new IndexWriter(d, new StandardAnalyzer(org.apache.lucene.util.Version.LUCENE_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED);
+ IndexWriter writer = new IndexWriter(d, new StandardAnalyzer(TEST_VERSION_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED);
writer.setMaxBufferedDocs(2);
for(int i=0;i<27;i++)
addDocumentWithFields(writer);
@@ -1436,7 +1436,7 @@
assertTrue(c.equals(r.getIndexCommit()));
// Change the index
- writer = new IndexWriter(d, new StandardAnalyzer(org.apache.lucene.util.Version.LUCENE_CURRENT), false, IndexWriter.MaxFieldLength.LIMITED);
+ writer = new IndexWriter(d, new StandardAnalyzer(TEST_VERSION_CURRENT), false, IndexWriter.MaxFieldLength.LIMITED);
writer.setMaxBufferedDocs(2);
for(int i=0;i<7;i++)
addDocumentWithFields(writer);
@@ -1447,7 +1447,7 @@
assertFalse(r2.getIndexCommit().isOptimized());
r2.close();
- writer = new IndexWriter(d, new StandardAnalyzer(org.apache.lucene.util.Version.LUCENE_CURRENT), false, IndexWriter.MaxFieldLength.LIMITED);
+ writer = new IndexWriter(d, new StandardAnalyzer(TEST_VERSION_CURRENT), false, IndexWriter.MaxFieldLength.LIMITED);
writer.optimize();
writer.close();
@@ -1461,7 +1461,7 @@
public void testReadOnly() throws Throwable {
RAMDirectory d = new MockRAMDirectory();
- IndexWriter writer = new IndexWriter(d, new StandardAnalyzer(org.apache.lucene.util.Version.LUCENE_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED);
+ IndexWriter writer = new IndexWriter(d, new StandardAnalyzer(TEST_VERSION_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED);
addDocumentWithFields(writer);
writer.commit();
addDocumentWithFields(writer);
@@ -1475,7 +1475,7 @@
// expected
}
- writer = new IndexWriter(d, new StandardAnalyzer(org.apache.lucene.util.Version.LUCENE_CURRENT), false, IndexWriter.MaxFieldLength.LIMITED);
+ writer = new IndexWriter(d, new StandardAnalyzer(TEST_VERSION_CURRENT), false, IndexWriter.MaxFieldLength.LIMITED);
addDocumentWithFields(writer);
writer.close();
@@ -1492,7 +1492,7 @@
// expected
}
- writer = new IndexWriter(d, new StandardAnalyzer(org.apache.lucene.util.Version.LUCENE_CURRENT), false, IndexWriter.MaxFieldLength.LIMITED);
+ writer = new IndexWriter(d, new StandardAnalyzer(TEST_VERSION_CURRENT), false, IndexWriter.MaxFieldLength.LIMITED);
writer.optimize();
writer.close();
@@ -1510,7 +1510,7 @@
}
// Make sure write lock isn't held
- writer = new IndexWriter(d, new StandardAnalyzer(org.apache.lucene.util.Version.LUCENE_CURRENT), false, IndexWriter.MaxFieldLength.LIMITED);
+ writer = new IndexWriter(d, new StandardAnalyzer(TEST_VERSION_CURRENT), false, IndexWriter.MaxFieldLength.LIMITED);
writer.close();
r3.close();
@@ -1520,7 +1520,7 @@
// LUCENE-1474
public void testIndexReader() throws Exception {
Directory dir = new RAMDirectory();
- IndexWriter writer = new IndexWriter(dir, new StandardAnalyzer(org.apache.lucene.util.Version.LUCENE_CURRENT),
+ IndexWriter writer = new IndexWriter(dir, new StandardAnalyzer(TEST_VERSION_CURRENT),
IndexWriter.MaxFieldLength.UNLIMITED);
writer.addDocument(createDocument("a"));
writer.addDocument(createDocument("b"));
@@ -1538,7 +1538,7 @@
public void testIndexReaderUnDeleteAll() throws Exception {
MockRAMDirectory dir = new MockRAMDirectory();
dir.setPreventDoubleWrite(false);
- IndexWriter writer = new IndexWriter(dir, new StandardAnalyzer(org.apache.lucene.util.Version.LUCENE_CURRENT),
+ IndexWriter writer = new IndexWriter(dir, new StandardAnalyzer(TEST_VERSION_CURRENT),
IndexWriter.MaxFieldLength.UNLIMITED);
writer.addDocument(createDocument("a"));
writer.addDocument(createDocument("b"));
@@ -1580,7 +1580,7 @@
Directory dir = new MockRAMDirectory();
- IndexWriter writer = new IndexWriter(dir, new StandardAnalyzer(org.apache.lucene.util.Version.LUCENE_CURRENT),
+ IndexWriter writer = new IndexWriter(dir, new StandardAnalyzer(TEST_VERSION_CURRENT),
IndexWriter.MaxFieldLength.LIMITED);
writer.setMaxBufferedDocs(2);
Index: lucene/backwards/src/test/org/apache/lucene/index/TestIndexWriter.java
===================================================================
--- lucene/backwards/src/test/org/apache/lucene/index/TestIndexWriter.java (revision 965596)
+++ lucene/backwards/src/test/org/apache/lucene/index/TestIndexWriter.java (working copy)
@@ -544,7 +544,7 @@
*/
public void testWickedLongTerm() throws IOException {
RAMDirectory dir = new RAMDirectory();
- IndexWriter writer = new IndexWriter(dir, new StandardAnalyzer(org.apache.lucene.util.Version.LUCENE_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED);
+ IndexWriter writer = new IndexWriter(dir, new StandardAnalyzer(TEST_VERSION_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED);
char[] chars = new char[DocumentsWriter.CHAR_BLOCK_SIZE-1];
Arrays.fill(chars, 'x');
@@ -588,7 +588,7 @@
// maximum length term, and search on that term:
doc = new Document();
doc.add(new Field("content", bigTerm, Field.Store.NO, Field.Index.ANALYZED));
- StandardAnalyzer sa = new StandardAnalyzer(org.apache.lucene.util.Version.LUCENE_CURRENT);
+ StandardAnalyzer sa = new StandardAnalyzer(TEST_VERSION_CURRENT);
sa.setMaxTokenLength(100000);
writer = new IndexWriter(dir, sa, IndexWriter.MaxFieldLength.LIMITED);
writer.addDocument(doc);
@@ -1578,7 +1578,7 @@
*/
public void testBadSegment() throws IOException {
MockRAMDirectory dir = new MockRAMDirectory();
- IndexWriter ir = new IndexWriter(dir, new StandardAnalyzer(org.apache.lucene.util.Version.LUCENE_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED);
+ IndexWriter ir = new IndexWriter(dir, new StandardAnalyzer(TEST_VERSION_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED);
Document document = new Document();
document.add(new Field("tvtest", "", Field.Store.NO, Field.Index.ANALYZED,
@@ -1591,7 +1591,7 @@
// LUCENE-1008
public void testNoTermVectorAfterTermVector() throws IOException {
MockRAMDirectory dir = new MockRAMDirectory();
- IndexWriter iw = new IndexWriter(dir, new StandardAnalyzer(org.apache.lucene.util.Version.LUCENE_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED);
+ IndexWriter iw = new IndexWriter(dir, new StandardAnalyzer(TEST_VERSION_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED);
Document document = new Document();
document.add(new Field("tvtest", "a b c", Field.Store.NO, Field.Index.ANALYZED,
Field.TermVector.YES));
@@ -1617,7 +1617,7 @@
// LUCENE-1010
public void testNoTermVectorAfterTermVectorMerge() throws IOException {
MockRAMDirectory dir = new MockRAMDirectory();
- IndexWriter iw = new IndexWriter(dir, new StandardAnalyzer(org.apache.lucene.util.Version.LUCENE_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED);
+ IndexWriter iw = new IndexWriter(dir, new StandardAnalyzer(TEST_VERSION_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED);
Document document = new Document();
document.add(new Field("tvtest", "a b c", Field.Store.NO, Field.Index.ANALYZED,
Field.TermVector.YES));
@@ -1649,7 +1649,7 @@
int pri = Thread.currentThread().getPriority();
try {
MockRAMDirectory dir = new MockRAMDirectory();
- IndexWriter iw = new IndexWriter(dir, new StandardAnalyzer(org.apache.lucene.util.Version.LUCENE_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED);
+ IndexWriter iw = new IndexWriter(dir, new StandardAnalyzer(TEST_VERSION_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED);
Document document = new Document();
document.add(new Field("tvtest", "a b c", Field.Store.NO, Field.Index.ANALYZED,
Field.TermVector.YES));
@@ -1689,7 +1689,7 @@
// LUCENE-1013
public void testSetMaxMergeDocs() throws IOException {
MockRAMDirectory dir = new MockRAMDirectory();
- IndexWriter iw = new IndexWriter(dir, new StandardAnalyzer(org.apache.lucene.util.Version.LUCENE_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED);
+ IndexWriter iw = new IndexWriter(dir, new StandardAnalyzer(TEST_VERSION_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED);
iw.setMergeScheduler(new MyMergeScheduler());
iw.setMaxMergeDocs(20);
iw.setMaxBufferedDocs(2);
@@ -1709,7 +1709,7 @@
@Override
public TokenStream tokenStream(String fieldName, Reader reader) {
- return new TokenFilter(new StandardTokenizer(Version.LUCENE_CURRENT, reader)) {
+ return new TokenFilter(new StandardTokenizer(TEST_VERSION_CURRENT, reader)) {
private int count = 0;
@Override
@@ -2760,7 +2760,7 @@
Directory dir = new MockRAMDirectory();
for(int iter=0;iter<2;iter++) {
IndexWriter writer = new IndexWriter(dir,
- new StandardAnalyzer(org.apache.lucene.util.Version.LUCENE_CURRENT), IndexWriter.MaxFieldLength.UNLIMITED);
+ new StandardAnalyzer(TEST_VERSION_CURRENT), IndexWriter.MaxFieldLength.UNLIMITED);
writer.setMaxBufferedDocs(2);
writer.setRAMBufferSizeMB(IndexWriter.DISABLE_AUTO_FLUSH);
writer.setMergeScheduler(new SerialMergeScheduler());
@@ -2793,7 +2793,7 @@
reader.close();
writer = new IndexWriter(dir,
- new StandardAnalyzer(org.apache.lucene.util.Version.LUCENE_CURRENT), IndexWriter.MaxFieldLength.UNLIMITED);
+ new StandardAnalyzer(TEST_VERSION_CURRENT), IndexWriter.MaxFieldLength.UNLIMITED);
writer.setMaxBufferedDocs(2);
writer.setRAMBufferSizeMB(IndexWriter.DISABLE_AUTO_FLUSH);
writer.setMergeScheduler(new SerialMergeScheduler());
@@ -2812,7 +2812,7 @@
Directory dir = new MockRAMDirectory();
for(int iter=0;iter<2;iter++) {
IndexWriter writer = new IndexWriter(dir,
- new StandardAnalyzer(org.apache.lucene.util.Version.LUCENE_CURRENT), IndexWriter.MaxFieldLength.UNLIMITED);
+ new StandardAnalyzer(TEST_VERSION_CURRENT), IndexWriter.MaxFieldLength.UNLIMITED);
writer.setMaxBufferedDocs(2);
writer.setRAMBufferSizeMB(IndexWriter.DISABLE_AUTO_FLUSH);
writer.setMergeScheduler(new SerialMergeScheduler());
@@ -2849,7 +2849,7 @@
public void testTermVectorCorruption3() throws IOException {
Directory dir = new MockRAMDirectory();
IndexWriter writer = new IndexWriter(dir,
- new StandardAnalyzer(org.apache.lucene.util.Version.LUCENE_CURRENT),
+ new StandardAnalyzer(TEST_VERSION_CURRENT),
IndexWriter.MaxFieldLength.LIMITED);
writer.setMaxBufferedDocs(2);
writer.setRAMBufferSizeMB(IndexWriter.DISABLE_AUTO_FLUSH);
@@ -2871,7 +2871,7 @@
writer.close();
writer = new IndexWriter(dir,
- new StandardAnalyzer(org.apache.lucene.util.Version.LUCENE_CURRENT),
+ new StandardAnalyzer(TEST_VERSION_CURRENT),
IndexWriter.MaxFieldLength.LIMITED);
writer.setMaxBufferedDocs(2);
writer.setRAMBufferSizeMB(IndexWriter.DISABLE_AUTO_FLUSH);
@@ -2919,7 +2919,7 @@
public void testExpungeDeletes() throws IOException {
Directory dir = new MockRAMDirectory();
IndexWriter writer = new IndexWriter(dir,
- new StandardAnalyzer(org.apache.lucene.util.Version.LUCENE_CURRENT),
+ new StandardAnalyzer(TEST_VERSION_CURRENT),
IndexWriter.MaxFieldLength.LIMITED);
writer.setMaxBufferedDocs(2);
writer.setRAMBufferSizeMB(IndexWriter.DISABLE_AUTO_FLUSH);
@@ -2947,7 +2947,7 @@
ir.close();
writer = new IndexWriter(dir,
- new StandardAnalyzer(org.apache.lucene.util.Version.LUCENE_CURRENT),
+ new StandardAnalyzer(TEST_VERSION_CURRENT),
IndexWriter.MaxFieldLength.LIMITED);
assertEquals(8, writer.numDocs());
assertEquals(10, writer.maxDoc());
@@ -2965,7 +2965,7 @@
public void testExpungeDeletes2() throws IOException {
Directory dir = new MockRAMDirectory();
IndexWriter writer = new IndexWriter(dir,
- new StandardAnalyzer(org.apache.lucene.util.Version.LUCENE_CURRENT),
+ new StandardAnalyzer(TEST_VERSION_CURRENT),
IndexWriter.MaxFieldLength.LIMITED);
writer.setMaxBufferedDocs(2);
writer.setMergeFactor(50);
@@ -2994,7 +2994,7 @@
ir.close();
writer = new IndexWriter(dir,
- new StandardAnalyzer(org.apache.lucene.util.Version.LUCENE_CURRENT),
+ new StandardAnalyzer(TEST_VERSION_CURRENT),
IndexWriter.MaxFieldLength.LIMITED);
writer.setMergeFactor(3);
assertEquals(49, writer.numDocs());
@@ -3012,7 +3012,7 @@
public void testExpungeDeletes3() throws IOException {
Directory dir = new MockRAMDirectory();
IndexWriter writer = new IndexWriter(dir,
- new StandardAnalyzer(org.apache.lucene.util.Version.LUCENE_CURRENT),
+ new StandardAnalyzer(TEST_VERSION_CURRENT),
IndexWriter.MaxFieldLength.LIMITED);
writer.setMaxBufferedDocs(2);
writer.setMergeFactor(50);
@@ -3041,7 +3041,7 @@
ir.close();
writer = new IndexWriter(dir,
- new StandardAnalyzer(org.apache.lucene.util.Version.LUCENE_CURRENT),
+ new StandardAnalyzer(TEST_VERSION_CURRENT),
IndexWriter.MaxFieldLength.LIMITED);
// Force many merges to happen
writer.setMergeFactor(3);
@@ -4060,7 +4060,7 @@
final List thrown = new ArrayList();
- final IndexWriter writer = new IndexWriter(new MockRAMDirectory(), new StandardAnalyzer(org.apache.lucene.util.Version.LUCENE_CURRENT), IndexWriter.MaxFieldLength.UNLIMITED) {
+ final IndexWriter writer = new IndexWriter(new MockRAMDirectory(), new StandardAnalyzer(TEST_VERSION_CURRENT), IndexWriter.MaxFieldLength.UNLIMITED) {
@Override
public void message(final String message) {
if (message.startsWith("now flush at close") && 0 == thrown.size()) {
@@ -4214,7 +4214,7 @@
// LUCENE-1448
public void testEndOffsetPositionStopFilter() throws Exception {
MockRAMDirectory dir = new MockRAMDirectory();
- IndexWriter w = new IndexWriter(dir, new StopAnalyzer(Version.LUCENE_CURRENT), IndexWriter.MaxFieldLength.LIMITED);
+ IndexWriter w = new IndexWriter(dir, new StopAnalyzer(TEST_VERSION_CURRENT), IndexWriter.MaxFieldLength.LIMITED);
Document doc = new Document();
Field f = new Field("field", "abcd the", Field.Store.NO, Field.Index.ANALYZED, Field.TermVector.WITH_POSITIONS_OFFSETS);
doc.add(f);
@@ -4236,7 +4236,7 @@
// LUCENE-1448
public void testEndOffsetPositionStandard() throws Exception {
MockRAMDirectory dir = new MockRAMDirectory();
- IndexWriter w = new IndexWriter(dir, new StandardAnalyzer(org.apache.lucene.util.Version.LUCENE_CURRENT), IndexWriter.MaxFieldLength.LIMITED);
+ IndexWriter w = new IndexWriter(dir, new StandardAnalyzer(TEST_VERSION_CURRENT), IndexWriter.MaxFieldLength.LIMITED);
Document doc = new Document();
Field f = new Field("field", "abcd the ", Field.Store.NO,
Field.Index.ANALYZED, Field.TermVector.WITH_POSITIONS_OFFSETS);
@@ -4266,7 +4266,7 @@
// LUCENE-1448
public void testEndOffsetPositionStandardEmptyField() throws Exception {
MockRAMDirectory dir = new MockRAMDirectory();
- IndexWriter w = new IndexWriter(dir, new StandardAnalyzer(org.apache.lucene.util.Version.LUCENE_CURRENT), IndexWriter.MaxFieldLength.LIMITED);
+ IndexWriter w = new IndexWriter(dir, new StandardAnalyzer(TEST_VERSION_CURRENT), IndexWriter.MaxFieldLength.LIMITED);
Document doc = new Document();
Field f = new Field("field", "", Field.Store.NO,
Field.Index.ANALYZED, Field.TermVector.WITH_POSITIONS_OFFSETS);
@@ -4293,7 +4293,7 @@
// LUCENE-1448
public void testEndOffsetPositionStandardEmptyField2() throws Exception {
MockRAMDirectory dir = new MockRAMDirectory();
- IndexWriter w = new IndexWriter(dir, new StandardAnalyzer(org.apache.lucene.util.Version.LUCENE_CURRENT), IndexWriter.MaxFieldLength.LIMITED);
+ IndexWriter w = new IndexWriter(dir, new StandardAnalyzer(TEST_VERSION_CURRENT), IndexWriter.MaxFieldLength.LIMITED);
Document doc = new Document();
Field f = new Field("field", "abcd", Field.Store.NO,
Index: lucene/backwards/src/test/org/apache/lucene/index/TestDocumentWriter.java
===================================================================
--- lucene/backwards/src/test/org/apache/lucene/index/TestDocumentWriter.java (revision 965596)
+++ lucene/backwards/src/test/org/apache/lucene/index/TestDocumentWriter.java (working copy)
@@ -265,7 +265,7 @@
doc.add(new Field("f2", "v1", Store.YES, Index.NOT_ANALYZED, TermVector.WITH_POSITIONS_OFFSETS));
doc.add(new Field("f2", "v2", Store.YES, Index.NOT_ANALYZED, TermVector.NO));
- IndexWriter writer = new IndexWriter(dir, new StandardAnalyzer(org.apache.lucene.util.Version.LUCENE_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED);
+ IndexWriter writer = new IndexWriter(dir, new StandardAnalyzer(TEST_VERSION_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED);
writer.addDocument(doc);
writer.close();
@@ -298,7 +298,7 @@
doc.add(f);
doc.add(new Field("f2", "v2", Store.YES, Index.NO));
- IndexWriter writer = new IndexWriter(dir, new StandardAnalyzer(org.apache.lucene.util.Version.LUCENE_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED);
+ IndexWriter writer = new IndexWriter(dir, new StandardAnalyzer(TEST_VERSION_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED);
writer.addDocument(doc);
writer.optimize(); // be sure to have a single segment
writer.close();
Index: lucene/backwards/src/test/org/apache/lucene/index/TestIndexWriterLockRelease.java
===================================================================
--- lucene/backwards/src/test/org/apache/lucene/index/TestIndexWriterLockRelease.java (revision 965596)
+++ lucene/backwards/src/test/org/apache/lucene/index/TestIndexWriterLockRelease.java (working copy)
@@ -79,10 +79,10 @@
IndexWriter im;
FSDirectory dir = FSDirectory.open(this.__test_dir);
try {
- im = new IndexWriter(dir, new org.apache.lucene.analysis.standard.StandardAnalyzer(org.apache.lucene.util.Version.LUCENE_CURRENT), false, IndexWriter.MaxFieldLength.LIMITED);
+ im = new IndexWriter(dir, new org.apache.lucene.analysis.standard.StandardAnalyzer(TEST_VERSION_CURRENT), false, IndexWriter.MaxFieldLength.LIMITED);
} catch (FileNotFoundException e) {
try {
- im = new IndexWriter(dir, new org.apache.lucene.analysis.standard.StandardAnalyzer(org.apache.lucene.util.Version.LUCENE_CURRENT), false, IndexWriter.MaxFieldLength.LIMITED);
+ im = new IndexWriter(dir, new org.apache.lucene.analysis.standard.StandardAnalyzer(TEST_VERSION_CURRENT), false, IndexWriter.MaxFieldLength.LIMITED);
} catch (FileNotFoundException e1) {
}
} finally {
Index: lucene/backwards/src/test/org/apache/lucene/index/TestIndexReaderCloneNorms.java
===================================================================
--- lucene/backwards/src/test/org/apache/lucene/index/TestIndexReaderCloneNorms.java (revision 965596)
+++ lucene/backwards/src/test/org/apache/lucene/index/TestIndexReaderCloneNorms.java (working copy)
@@ -71,7 +71,7 @@
protected void setUp() throws Exception {
super.setUp();
similarityOne = new SimilarityOne();
- anlzr = new StandardAnalyzer(org.apache.lucene.util.Version.LUCENE_CURRENT);
+ anlzr = new StandardAnalyzer(TEST_VERSION_CURRENT);
}
/**
Index: lucene/backwards/src/test/org/apache/lucene/index/TestOmitTf.java
===================================================================
--- lucene/backwards/src/test/org/apache/lucene/index/TestOmitTf.java (revision 965596)
+++ lucene/backwards/src/test/org/apache/lucene/index/TestOmitTf.java (working copy)
@@ -66,7 +66,7 @@
// omitTermFreqAndPositions bit in the FieldInfo
public void testOmitTermFreqAndPositions() throws Exception {
Directory ram = new MockRAMDirectory();
- Analyzer analyzer = new StandardAnalyzer(org.apache.lucene.util.Version.LUCENE_CURRENT);
+ Analyzer analyzer = new StandardAnalyzer(TEST_VERSION_CURRENT);
IndexWriter writer = new IndexWriter(ram, analyzer, true, IndexWriter.MaxFieldLength.LIMITED);
Document d = new Document();
@@ -112,7 +112,7 @@
// omitTermFreqAndPositions for the same field works
public void testMixedMerge() throws Exception {
Directory ram = new MockRAMDirectory();
- Analyzer analyzer = new StandardAnalyzer(org.apache.lucene.util.Version.LUCENE_CURRENT);
+ Analyzer analyzer = new StandardAnalyzer(TEST_VERSION_CURRENT);
IndexWriter writer = new IndexWriter(ram, analyzer, true, IndexWriter.MaxFieldLength.LIMITED);
writer.setMaxBufferedDocs(3);
writer.setMergeFactor(2);
@@ -165,7 +165,7 @@
// field,
public void testMixedRAM() throws Exception {
Directory ram = new MockRAMDirectory();
- Analyzer analyzer = new StandardAnalyzer(org.apache.lucene.util.Version.LUCENE_CURRENT);
+ Analyzer analyzer = new StandardAnalyzer(TEST_VERSION_CURRENT);
IndexWriter writer = new IndexWriter(ram, analyzer, true, IndexWriter.MaxFieldLength.LIMITED);
writer.setMaxBufferedDocs(10);
writer.setMergeFactor(2);
@@ -213,7 +213,7 @@
// Verifies no *.prx exists when all fields omit term freq:
public void testNoPrxFile() throws Throwable {
Directory ram = new MockRAMDirectory();
- Analyzer analyzer = new StandardAnalyzer(org.apache.lucene.util.Version.LUCENE_CURRENT);
+ Analyzer analyzer = new StandardAnalyzer(TEST_VERSION_CURRENT);
IndexWriter writer = new IndexWriter(ram, analyzer, true, IndexWriter.MaxFieldLength.LIMITED);
writer.setMaxBufferedDocs(3);
writer.setMergeFactor(2);
@@ -244,7 +244,7 @@
// Test scores with one field with Term Freqs and one without, otherwise with equal content
public void testBasic() throws Exception {
Directory dir = new MockRAMDirectory();
- Analyzer analyzer = new StandardAnalyzer(org.apache.lucene.util.Version.LUCENE_CURRENT);
+ Analyzer analyzer = new StandardAnalyzer(TEST_VERSION_CURRENT);
IndexWriter writer = new IndexWriter(dir, analyzer, true, IndexWriter.MaxFieldLength.LIMITED);
writer.setMergeFactor(2);
writer.setMaxBufferedDocs(2);
Index: lucene/backwards/src/test/org/apache/lucene/index/TestDirectoryReader.java
===================================================================
--- lucene/backwards/src/test/org/apache/lucene/index/TestDirectoryReader.java (revision 965596)
+++ lucene/backwards/src/test/org/apache/lucene/index/TestDirectoryReader.java (working copy)
@@ -194,7 +194,7 @@
}
private void addDoc(RAMDirectory ramDir1, String s, boolean create) throws IOException {
- IndexWriter iw = new IndexWriter(ramDir1, new StandardAnalyzer(org.apache.lucene.util.Version.LUCENE_CURRENT), create, IndexWriter.MaxFieldLength.LIMITED);
+ IndexWriter iw = new IndexWriter(ramDir1, new StandardAnalyzer(TEST_VERSION_CURRENT), create, IndexWriter.MaxFieldLength.LIMITED);
Document doc = new Document();
doc.add(new Field("body", s, Field.Store.YES, Field.Index.ANALYZED));
iw.addDocument(doc);
Index: lucene/backwards/src/test/org/apache/lucene/index/TestIndexReaderReopen.java
===================================================================
--- lucene/backwards/src/test/org/apache/lucene/index/TestIndexReaderReopen.java (revision 965596)
+++ lucene/backwards/src/test/org/apache/lucene/index/TestIndexReaderReopen.java (working copy)
@@ -703,7 +703,7 @@
final Directory dir = new MockRAMDirectory();
final int n = 30;
- IndexWriter writer = new IndexWriter(dir, new StandardAnalyzer(org.apache.lucene.util.Version.LUCENE_CURRENT), IndexWriter.MaxFieldLength.LIMITED);
+ IndexWriter writer = new IndexWriter(dir, new StandardAnalyzer(TEST_VERSION_CURRENT), IndexWriter.MaxFieldLength.LIMITED);
for (int i = 0; i < n; i++) {
writer.addDocument(createDocument(i, 3));
}
@@ -722,7 +722,7 @@
modifier.deleteDocument(i % modifier.maxDoc());
modifier.close();
} else {
- IndexWriter modifier = new IndexWriter(dir, new StandardAnalyzer(org.apache.lucene.util.Version.LUCENE_CURRENT), IndexWriter.MaxFieldLength.LIMITED);
+ IndexWriter modifier = new IndexWriter(dir, new StandardAnalyzer(TEST_VERSION_CURRENT), IndexWriter.MaxFieldLength.LIMITED);
modifier.addDocument(createDocument(n + i, 6));
modifier.close();
}
Index: lucene/backwards/src/test/org/apache/lucene/index/TestIndexWriterMerging.java
===================================================================
--- lucene/backwards/src/test/org/apache/lucene/index/TestIndexWriterMerging.java (revision 965596)
+++ lucene/backwards/src/test/org/apache/lucene/index/TestIndexWriterMerging.java (working copy)
@@ -56,7 +56,7 @@
Directory merged = new MockRAMDirectory();
- IndexWriter writer = new IndexWriter(merged, new StandardAnalyzer(org.apache.lucene.util.Version.LUCENE_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED);
+ IndexWriter writer = new IndexWriter(merged, new StandardAnalyzer(TEST_VERSION_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED);
writer.setMergeFactor(2);
writer.addIndexesNoOptimize(new Directory[]{indexA, indexB});
@@ -93,7 +93,7 @@
private void fillIndex(Directory dir, int start, int numDocs) throws IOException
{
- IndexWriter writer = new IndexWriter(dir, new StandardAnalyzer(org.apache.lucene.util.Version.LUCENE_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED);
+ IndexWriter writer = new IndexWriter(dir, new StandardAnalyzer(TEST_VERSION_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED);
writer.setMergeFactor(2);
writer.setMaxBufferedDocs(2);
Index: lucene/backwards/src/test/org/apache/lucene/index/TestParallelReader.java
===================================================================
--- lucene/backwards/src/test/org/apache/lucene/index/TestParallelReader.java (revision 965596)
+++ lucene/backwards/src/test/org/apache/lucene/index/TestParallelReader.java (working copy)
@@ -106,7 +106,7 @@
// one document only:
Directory dir2 = new MockRAMDirectory();
- IndexWriter w2 = new IndexWriter(dir2, new StandardAnalyzer(org.apache.lucene.util.Version.LUCENE_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED);
+ IndexWriter w2 = new IndexWriter(dir2, new StandardAnalyzer(TEST_VERSION_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED);
Document d3 = new Document();
d3.add(new Field("f3", "v1", Field.Store.YES, Field.Index.ANALYZED));
w2.addDocument(d3);
@@ -151,13 +151,13 @@
Directory dir2 = getDir2();
// add another document to ensure that the indexes are not optimized
- IndexWriter modifier = new IndexWriter(dir1, new StandardAnalyzer(org.apache.lucene.util.Version.LUCENE_CURRENT), IndexWriter.MaxFieldLength.LIMITED);
+ IndexWriter modifier = new IndexWriter(dir1, new StandardAnalyzer(TEST_VERSION_CURRENT), IndexWriter.MaxFieldLength.LIMITED);
Document d = new Document();
d.add(new Field("f1", "v1", Field.Store.YES, Field.Index.ANALYZED));
modifier.addDocument(d);
modifier.close();
- modifier = new IndexWriter(dir2, new StandardAnalyzer(org.apache.lucene.util.Version.LUCENE_CURRENT), IndexWriter.MaxFieldLength.LIMITED);
+ modifier = new IndexWriter(dir2, new StandardAnalyzer(TEST_VERSION_CURRENT), IndexWriter.MaxFieldLength.LIMITED);
d = new Document();
d.add(new Field("f2", "v2", Field.Store.YES, Field.Index.ANALYZED));
modifier.addDocument(d);
@@ -170,7 +170,7 @@
assertFalse(pr.isOptimized());
pr.close();
- modifier = new IndexWriter(dir1, new StandardAnalyzer(org.apache.lucene.util.Version.LUCENE_CURRENT), IndexWriter.MaxFieldLength.LIMITED);
+ modifier = new IndexWriter(dir1, new StandardAnalyzer(TEST_VERSION_CURRENT), IndexWriter.MaxFieldLength.LIMITED);
modifier.optimize();
modifier.close();
@@ -182,7 +182,7 @@
pr.close();
- modifier = new IndexWriter(dir2, new StandardAnalyzer(org.apache.lucene.util.Version.LUCENE_CURRENT), IndexWriter.MaxFieldLength.LIMITED);
+ modifier = new IndexWriter(dir2, new StandardAnalyzer(TEST_VERSION_CURRENT), IndexWriter.MaxFieldLength.LIMITED);
modifier.optimize();
modifier.close();
@@ -233,7 +233,7 @@
// Fields 1-4 indexed together:
private Searcher single() throws IOException {
Directory dir = new MockRAMDirectory();
- IndexWriter w = new IndexWriter(dir, new StandardAnalyzer(org.apache.lucene.util.Version.LUCENE_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED);
+ IndexWriter w = new IndexWriter(dir, new StandardAnalyzer(TEST_VERSION_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED);
Document d1 = new Document();
d1.add(new Field("f1", "v1", Field.Store.YES, Field.Index.ANALYZED));
d1.add(new Field("f2", "v1", Field.Store.YES, Field.Index.ANALYZED));
@@ -263,7 +263,7 @@
private Directory getDir1() throws IOException {
Directory dir1 = new MockRAMDirectory();
- IndexWriter w1 = new IndexWriter(dir1, new StandardAnalyzer(org.apache.lucene.util.Version.LUCENE_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED);
+ IndexWriter w1 = new IndexWriter(dir1, new StandardAnalyzer(TEST_VERSION_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED);
Document d1 = new Document();
d1.add(new Field("f1", "v1", Field.Store.YES, Field.Index.ANALYZED));
d1.add(new Field("f2", "v1", Field.Store.YES, Field.Index.ANALYZED));
@@ -278,7 +278,7 @@
private Directory getDir2() throws IOException {
Directory dir2 = new RAMDirectory();
- IndexWriter w2 = new IndexWriter(dir2, new StandardAnalyzer(org.apache.lucene.util.Version.LUCENE_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED);
+ IndexWriter w2 = new IndexWriter(dir2, new StandardAnalyzer(TEST_VERSION_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED);
Document d3 = new Document();
d3.add(new Field("f3", "v1", Field.Store.YES, Field.Index.ANALYZED));
d3.add(new Field("f4", "v1", Field.Store.YES, Field.Index.ANALYZED));
Index: lucene/backwards/src/test/org/apache/lucene/index/TestNorms.java
===================================================================
--- lucene/backwards/src/test/org/apache/lucene/index/TestNorms.java (revision 965596)
+++ lucene/backwards/src/test/org/apache/lucene/index/TestNorms.java (working copy)
@@ -65,7 +65,7 @@
protected void setUp() throws Exception {
super.setUp();
similarityOne = new SimilarityOne();
- anlzr = new StandardAnalyzer(org.apache.lucene.util.Version.LUCENE_CURRENT);
+ anlzr = new StandardAnalyzer(TEST_VERSION_CURRENT);
}
/**
Index: lucene/backwards/src/test/org/apache/lucene/TestSearchForDuplicates.java
===================================================================
--- lucene/backwards/src/test/org/apache/lucene/TestSearchForDuplicates.java (revision 965596)
+++ lucene/backwards/src/test/org/apache/lucene/TestSearchForDuplicates.java (working copy)
@@ -99,7 +99,7 @@
// try a search without OR
Searcher searcher = new IndexSearcher(directory, true);
- QueryParser parser = new QueryParser(Version.LUCENE_CURRENT, PRIORITY_FIELD, analyzer);
+ QueryParser parser = new QueryParser(TEST_VERSION_CURRENT, PRIORITY_FIELD, analyzer);
Query query = parser.parse(HIGH_PRIORITY);
out.println("Query: " + query.toString(PRIORITY_FIELD));
@@ -114,7 +114,7 @@
searcher = new IndexSearcher(directory, true);
hits = null;
- parser = new QueryParser(Version.LUCENE_CURRENT, PRIORITY_FIELD, analyzer);
+ parser = new QueryParser(TEST_VERSION_CURRENT, PRIORITY_FIELD, analyzer);
query = parser.parse(HIGH_PRIORITY + " OR " + MED_PRIORITY);
out.println("Query: " + query.toString(PRIORITY_FIELD));
Index: lucene/backwards/src/test/org/apache/lucene/store/TestWindowsMMap.java
===================================================================
--- lucene/backwards/src/test/org/apache/lucene/store/TestWindowsMMap.java (revision 965596)
+++ lucene/backwards/src/test/org/apache/lucene/store/TestWindowsMMap.java (working copy)
@@ -72,7 +72,7 @@
// plan to add a set of useful stopwords, consider changing some of the
// interior filters.
- StandardAnalyzer analyzer = new StandardAnalyzer(org.apache.lucene.util.Version.LUCENE_CURRENT, Collections.emptySet());
+ StandardAnalyzer analyzer = new StandardAnalyzer(TEST_VERSION_CURRENT, Collections.emptySet());
// TODO: something about lock timeouts and leftover locks.
IndexWriter writer = new IndexWriter(storeDirectory, analyzer, true, IndexWriter.MaxFieldLength.LIMITED);
writer.commit();
Index: lucene/backwards/src/test/org/apache/lucene/document/TestBinaryDocument.java
===================================================================
--- lucene/backwards/src/test/org/apache/lucene/document/TestBinaryDocument.java (revision 965596)
+++ lucene/backwards/src/test/org/apache/lucene/document/TestBinaryDocument.java (working copy)
@@ -59,7 +59,7 @@
/** add the doc to a ram index */
MockRAMDirectory dir = new MockRAMDirectory();
- IndexWriter writer = new IndexWriter(dir, new StandardAnalyzer(org.apache.lucene.util.Version.LUCENE_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED);
+ IndexWriter writer = new IndexWriter(dir, new StandardAnalyzer(TEST_VERSION_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED);
writer.addDocument(doc);
writer.close();
@@ -97,7 +97,7 @@
/** add the doc to a ram index */
MockRAMDirectory dir = new MockRAMDirectory();
- IndexWriter writer = new IndexWriter(dir, new StandardAnalyzer(org.apache.lucene.util.Version.LUCENE_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED);
+ IndexWriter writer = new IndexWriter(dir, new StandardAnalyzer(TEST_VERSION_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED);
writer.addDocument(doc);
writer.close();
Index: lucene/backwards/src/test/org/apache/lucene/document/TestDocument.java
===================================================================
--- lucene/backwards/src/test/org/apache/lucene/document/TestDocument.java (revision 965596)
+++ lucene/backwards/src/test/org/apache/lucene/document/TestDocument.java (working copy)
@@ -154,7 +154,7 @@
public void testGetValuesForIndexedDocument() throws Exception
{
RAMDirectory dir = new RAMDirectory();
- IndexWriter writer = new IndexWriter(dir, new StandardAnalyzer(org.apache.lucene.util.Version.LUCENE_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED);
+ IndexWriter writer = new IndexWriter(dir, new StandardAnalyzer(TEST_VERSION_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED);
writer.addDocument(makeDocumentWithFields());
writer.close();
@@ -225,7 +225,7 @@
doc.add(new Field("keyword", "test", Field.Store.YES, Field.Index.NOT_ANALYZED));
RAMDirectory dir = new RAMDirectory();
- IndexWriter writer = new IndexWriter(dir, new StandardAnalyzer(org.apache.lucene.util.Version.LUCENE_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED);
+ IndexWriter writer = new IndexWriter(dir, new StandardAnalyzer(TEST_VERSION_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED);
writer.addDocument(doc);
field.setValue("id2");
writer.addDocument(doc);