merge trunk

git-svn-id: https://svn.apache.org/repos/asf/lucene/dev/branches/lucene4446@1397893 13f79535-47bb-0310-9956-ffa450edef68
diff --git a/lucene/core/src/java/org/apache/lucene/util/fst/Util.java b/lucene/core/src/java/org/apache/lucene/util/fst/Util.java
index e7df2d8..31e81ab 100644
--- a/lucene/core/src/java/org/apache/lucene/util/fst/Util.java
+++ b/lucene/core/src/java/org/apache/lucene/util/fst/Util.java
@@ -271,9 +271,6 @@
     
     final Comparator<T> comparator;
 
-    // Set once the queue has filled:
-    FSTPath<T> bottom = null;
-
     TreeSet<FSTPath<T>> queue = null;
 
     public TopNSearcher(FST<T> fst, int topN, Comparator<T> comparator) {
@@ -291,9 +288,10 @@
       assert queue != null;
 
       T cost = fst.outputs.add(path.cost, path.arc.output);
-      //System.out.println("  addIfCompetitive bottom=" + bottom + " queue.size()=" + queue.size());
+      //System.out.println("  addIfCompetitive queue.size()=" + queue.size() + " path=" + path + " + label=" + path.arc.label);
 
-      if (bottom != null) {
+      if (queue.size() == topN) {
+        FSTPath<T> bottom = queue.last();
         int comp = comparator.compare(cost, bottom.cost);
         if (comp > 0) {
           // Doesn't compete
@@ -323,24 +321,11 @@
       newInput.length = path.input.length+1;
       final FSTPath<T> newPath = new FSTPath<T>(cost, path.arc, comparator, newInput);
 
-      // this is pointless right?  we do it above already:
-      //newPath.input.grow(path.input.length+1);
-      //System.arraycopy(path.input.ints, 0, newPath.input.ints, 0, path.input.length);
-      //newPath.input.ints[path.input.length] = path.arc.label;
-      //newPath.input.length = path.input.length+1;
-
-      //System.out.println("    add path=" + newPath);
       queue.add(newPath);
-      if (bottom != null) {
-        final FSTPath<T> removed = queue.pollLast();
-        assert removed == bottom;
-        bottom = queue.last();
-        //System.out.println("    now re-set bottom: " + bottom + " queue=" + queue);
-      } else if (queue.size() == topN) {
-        // Queue just filled up:
-        bottom = queue.last();
-        //System.out.println("    now set bottom: " + bottom);
-      }
+
+      if (queue.size() == topN+1) {
+        queue.pollLast();
+      } 
     }
 
     /** Adds all leaving arcs, including 'finished' arc, if
@@ -387,7 +372,7 @@
 
       // For each top N path:
       while (results.size() < topN) {
-        //System.out.println("\nfind next path");
+        //System.out.println("\nfind next path: queue.size=" + queue.size());
 
         FSTPath<T> path;
 
diff --git a/lucene/suggest/src/java/org/apache/lucene/search/suggest/analyzing/AnalyzingSuggester.java b/lucene/suggest/src/java/org/apache/lucene/search/suggest/analyzing/AnalyzingSuggester.java
index 48c5195..9f98814 100644
--- a/lucene/suggest/src/java/org/apache/lucene/search/suggest/analyzing/AnalyzingSuggester.java
+++ b/lucene/suggest/src/java/org/apache/lucene/search/suggest/analyzing/AnalyzingSuggester.java
@@ -533,9 +533,6 @@
 
       if (exactFirst) {
 
-        Util.TopNSearcher<Pair<Long,BytesRef>> searcher;
-        searcher = new Util.TopNSearcher<Pair<Long,BytesRef>>(fst, num, weightComparator);
-
         int count = 0;
         for (FSTUtil.Path<Pair<Long,BytesRef>> path : prefixPaths) {
           if (fst.findTargetArc(END_BYTE, path.fstNode, scratchArc, bytesReader) != null) {
@@ -545,6 +542,9 @@
           }
         }
 
+        // Searcher just to find the single exact only
+        // match, if present:
+        Util.TopNSearcher<Pair<Long,BytesRef>> searcher;
         searcher = new Util.TopNSearcher<Pair<Long,BytesRef>>(fst, count * maxSurfaceFormsPerAnalyzedForm, weightComparator);
 
         // NOTE: we could almost get away with only using
diff --git a/lucene/suggest/src/test/org/apache/lucene/search/suggest/analyzing/AnalyzingSuggesterTest.java b/lucene/suggest/src/test/org/apache/lucene/search/suggest/analyzing/AnalyzingSuggesterTest.java
index 9ce50b3..c883698 100644
--- a/lucene/suggest/src/test/org/apache/lucene/search/suggest/analyzing/AnalyzingSuggesterTest.java
+++ b/lucene/suggest/src/test/org/apache/lucene/search/suggest/analyzing/AnalyzingSuggesterTest.java
@@ -789,4 +789,18 @@
     assertEquals("a ", results.get(1).key);
     assertEquals(50, results.get(1).value);
   }
+
+  public void testQueueExhaustion() throws Exception {
+    Analyzer a = new MockAnalyzer(random());
+    AnalyzingSuggester suggester = new AnalyzingSuggester(a, a, AnalyzingSuggester.EXACT_FIRST, 256, -1);
+
+    suggester.build(new TermFreqArrayIterator(new TermFreq[] {
+          new TermFreq("a", 2),
+          new TermFreq("a b c", 3),
+          new TermFreq("a c a", 1),
+          new TermFreq("a c b", 1),
+        }));
+
+    List<LookupResult> results = suggester.lookup("a", false, 4);
+  }
 }
diff --git a/solr/CHANGES.txt b/solr/CHANGES.txt
index e964d57..779d9d7 100644
--- a/solr/CHANGES.txt
+++ b/solr/CHANGES.txt
@@ -42,6 +42,8 @@
   values of a multiValued field in their original order when highlighting.
   (Joel Bernstein via yonik)
 
+* SOLR-3929: Support configuring IndexWriter max thread count in solrconfig.
+  (phunt via Mark Miller)
 
 Optimizations
 ----------------------
@@ -58,6 +60,9 @@
 
 * SOLR-3734: Improve Schema-Browser Handling for CopyField using
   dynamicField's (steffkes)
+  
+* SOLR-3941: The "commitOnLeader" part of distributed recovery can use
+  openSearcher=false. (Tomas Fernandez Lobbe via Mark Miller)
 
 Bug Fixes
 ----------------------
@@ -69,6 +74,12 @@
 
 * SOLR-3917: Partial State on Schema-Browser UI is not defined for Dynamic
   Fields & Types (steffkes)
+  
+* SOLR-3939: Consider a sync attempt from leader to replica that fails due 
+  to 404 a success. (Mark Miller, Joel Bernstein)
+  
+* SOLR-3940: Rejoining the leader election incorrectly triggers the code path
+  for a fresh cluster start rather than fail over. (Mark Miller)
 
 Other Changes
 ----------------------
diff --git a/solr/contrib/uima/src/java/org/apache/solr/uima/processor/FieldMappingException.java b/solr/contrib/uima/src/java/org/apache/solr/uima/processor/FieldMappingException.java
index d9b566a..6aed0f3 100644
--- a/solr/contrib/uima/src/java/org/apache/solr/uima/processor/FieldMappingException.java
+++ b/solr/contrib/uima/src/java/org/apache/solr/uima/processor/FieldMappingException.java
@@ -18,9 +18,10 @@
  */
 
 /**
- * Exception thrown when an error happening while mapping UIMA CAS model to Solt fields
+ * Exception thrown when an error happening while mapping UIMA CAS model to Solr fields
  */
 public class FieldMappingException extends Exception {
   public FieldMappingException(Exception e) {
+    super(e);
   }
 }
diff --git a/solr/contrib/uima/src/java/org/apache/solr/uima/processor/SolrUIMAConfiguration.java b/solr/contrib/uima/src/java/org/apache/solr/uima/processor/SolrUIMAConfiguration.java
index 1f00498..6f5e4c7 100644
--- a/solr/contrib/uima/src/java/org/apache/solr/uima/processor/SolrUIMAConfiguration.java
+++ b/solr/contrib/uima/src/java/org/apache/solr/uima/processor/SolrUIMAConfiguration.java
@@ -26,19 +26,19 @@
  */
 public class SolrUIMAConfiguration {
 
-  private String[] fieldsToAnalyze;
+  private final String[] fieldsToAnalyze;
 
-  private boolean fieldsMerging;
+  private final boolean fieldsMerging;
 
-  private Map<String, Map<String, MapField>> typesFeaturesFieldsMapping;
+  private final Map<String, Map<String, MapField>> typesFeaturesFieldsMapping;
 
-  private String aePath;
+  private final String aePath;
 
-  private Map<String, Object> runtimeParameters;
+  private final Map<String, Object> runtimeParameters;
 
-  private boolean ignoreErrors;
+  private final boolean ignoreErrors;
   
-  private String logField;
+  private final String logField;
 
   SolrUIMAConfiguration(String aePath, String[] fieldsToAnalyze, boolean fieldsMerging,
           Map<String, Map<String, MapField>> typesFeaturesFieldsMapping,
@@ -82,7 +82,8 @@
   
   static final class MapField {
     
-    private String fieldName, fieldNameFeature;
+    private String fieldName;
+    private final String fieldNameFeature;
     private boolean prefix; // valid if dynamicField == true
                             // false: *_s, true: s_*
     
diff --git a/solr/contrib/uima/src/java/org/apache/solr/uima/processor/SolrUIMAConfigurationReader.java b/solr/contrib/uima/src/java/org/apache/solr/uima/processor/SolrUIMAConfigurationReader.java
index fdf82ff..e8e252a 100644
--- a/solr/contrib/uima/src/java/org/apache/solr/uima/processor/SolrUIMAConfigurationReader.java
+++ b/solr/contrib/uima/src/java/org/apache/solr/uima/processor/SolrUIMAConfigurationReader.java
@@ -32,7 +32,7 @@
  */
 public class SolrUIMAConfigurationReader {
 
-  private NamedList<Object> args;
+  private final NamedList<Object> args;
 
   public SolrUIMAConfigurationReader(NamedList<Object> args) {
     this.args = args;
diff --git a/solr/contrib/uima/src/java/org/apache/solr/uima/processor/UIMAToSolrMapper.java b/solr/contrib/uima/src/java/org/apache/solr/uima/processor/UIMAToSolrMapper.java
index 242e1b8..77dccea 100644
--- a/solr/contrib/uima/src/java/org/apache/solr/uima/processor/UIMAToSolrMapper.java
+++ b/solr/contrib/uima/src/java/org/apache/solr/uima/processor/UIMAToSolrMapper.java
@@ -38,9 +38,9 @@
 
   private final Logger log = LoggerFactory.getLogger(UIMAToSolrMapper.class);
 
-  private SolrInputDocument document;
+  private final SolrInputDocument document;
 
-  private JCas cas;
+  private final JCas cas;
 
   public UIMAToSolrMapper(SolrInputDocument document, JCas cas) {
     this.document = document;
@@ -64,15 +64,15 @@
           String fieldNameFeatureValue = fieldNameFeature == null ? null :
               fs.getFeatureValueAsString(type.getFeatureByBaseName(fieldNameFeature));
           String fieldName = mapField.getFieldName(fieldNameFeatureValue);
-          log.info(new StringBuffer("mapping ").append(typeName).append("@").append(featureName)
+          log.info(new StringBuilder("mapping ").append(typeName).append("@").append(featureName)
               .append(" to ").append(fieldName).toString());
-          String featureValue = null;
+          String featureValue;
           if (fs instanceof Annotation && "coveredText".equals(featureName)) {
             featureValue = ((Annotation) fs).getCoveredText();
           } else {
             featureValue = fs.getFeatureValueAsString(type.getFeatureByBaseName(featureName));
           }
-          log.info(new StringBuffer("writing ").append(featureValue).append(" in ").append(
+          log.info(new StringBuilder("writing ").append(featureValue).append(" in ").append(
               fieldName).toString());
           document.addField(fieldName, featureValue, 1.0f);
         }
diff --git a/solr/contrib/uima/src/java/org/apache/solr/uima/processor/UIMAUpdateRequestProcessor.java b/solr/contrib/uima/src/java/org/apache/solr/uima/processor/UIMAUpdateRequestProcessor.java
index 586355b..70245ae 100644
--- a/solr/contrib/uima/src/java/org/apache/solr/uima/processor/UIMAUpdateRequestProcessor.java
+++ b/solr/contrib/uima/src/java/org/apache/solr/uima/processor/UIMAUpdateRequestProcessor.java
@@ -73,16 +73,16 @@
 
       /* get the fields to analyze */
       String[] texts = getTextsToAnalyze(solrInputDocument);
-      for (int i = 0; i < texts.length; i++) {
-        text = texts[i];
-        if (text != null && text.length()>0) {
+      for (String currentText : texts) {
+        text = currentText;
+        if (text != null && text.length() > 0) {
           /* process the text value */
           JCas jcas = processText(text);
 
           UIMAToSolrMapper uimaToSolrMapper = new UIMAToSolrMapper(solrInputDocument, jcas);
           /* get field mapping from config */
           Map<String, Map<String, MapField>> typesAndFeaturesFieldsMap = solrUIMAConfiguration
-                  .getTypesFeaturesFieldsMapping();
+              .getTypesFeaturesFieldsMapping();
           /* map type features on fields */
           for (String typeFQN : typesAndFeaturesFieldsMap.keySet()) {
             uimaToSolrMapper.map(typeFQN, typesAndFeaturesFieldsMap.get(typeFQN));
@@ -133,8 +133,8 @@
     String[] textVals;
     if (merge) {
       StringBuilder unifiedText = new StringBuilder("");
-      for (int i = 0; i < fieldsToAnalyze.length; i++) {
-        unifiedText.append(String.valueOf(solrInputDocument.getFieldValue(fieldsToAnalyze[i])));
+      for (String aFieldsToAnalyze : fieldsToAnalyze) {
+        unifiedText.append(String.valueOf(solrInputDocument.getFieldValue(aFieldsToAnalyze)));
       }
       textVals = new String[1];
       textVals[0] = unifiedText.toString();
@@ -150,7 +150,7 @@
   /* process a field value executing UIMA the CAS containing it as document text */
   private JCas processText(String textFieldValue) throws ResourceInitializationException,
           AnalysisEngineProcessException {
-    log.info(new StringBuffer("Analyzing text").toString());
+    log.info(new StringBuilder("Analyzing text").toString());
     /* get the UIMA analysis engine */
     AnalysisEngine ae = aeProvider.getAE();
 
@@ -160,7 +160,7 @@
 
     /* perform analysis on text field */
     ae.process(jcas);
-    log.info(new StringBuilder("Text processing completed").toString());
+    log.info("Text processing completed");
     return jcas;
   }
 
diff --git a/solr/contrib/uima/src/test/org/apache/solr/uima/analysis/UIMAAnnotationsTokenizerFactoryTest.java b/solr/contrib/uima/src/test/org/apache/solr/uima/analysis/UIMAAnnotationsTokenizerFactoryTest.java
deleted file mode 100644
index c380fc0..0000000
--- a/solr/contrib/uima/src/test/org/apache/solr/uima/analysis/UIMAAnnotationsTokenizerFactoryTest.java
+++ /dev/null
@@ -1,49 +0,0 @@
-package org.apache.solr.uima.analysis;
-
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-import org.apache.solr.SolrTestCaseJ4;
-import org.apache.solr.request.SolrQueryRequest;
-import org.junit.BeforeClass;
-import org.junit.Test;
-
-/**
- */
-public class UIMAAnnotationsTokenizerFactoryTest extends SolrTestCaseJ4 {
-
-  @BeforeClass
-  public static void beforeClass() throws Exception {
-    initCore("uima/uima-tokenizers-solrconfig.xml", "uima/uima-tokenizers-schema.xml");
-  }
-
-  @Test
-  public void testInitialization() throws Exception {
-    assertNotNull(h.getCore().getSchema().getField("sentences"));
-    assertNotNull(h.getCore().getSchema().getFieldType("sentences"));
-  }
-
-  @Test
-  public void testIndexAndQuery() throws Exception {
-    assertU("<add><doc><field name=\"id\">123</field><field name=\"text\">One and 1 is two. Instead One or 1 is 0.</field></doc></add>");
-    assertU(commit());
-    SolrQueryRequest req = req("qt", "/terms", "terms.fl", "sentences");
-    assertQ(req, "//lst[@name='sentences']/int[@name='One and 1 is two.']");
-    assertQ(req, "//lst[@name='sentences']/int[@name=' Instead One or 1 is 0.']");
-    req.close();
-  }
-}
diff --git a/solr/contrib/uima/src/test/org/apache/solr/uima/analysis/UIMATypeAwareAnnotationsTokenizerFactoryTest.java b/solr/contrib/uima/src/test/org/apache/solr/uima/analysis/UIMATokenizersSolrIntegrationTest.java
similarity index 71%
rename from solr/contrib/uima/src/test/org/apache/solr/uima/analysis/UIMATypeAwareAnnotationsTokenizerFactoryTest.java
rename to solr/contrib/uima/src/test/org/apache/solr/uima/analysis/UIMATokenizersSolrIntegrationTest.java
index 59a4aa8..fde0b1d 100644
--- a/solr/contrib/uima/src/test/org/apache/solr/uima/analysis/UIMATypeAwareAnnotationsTokenizerFactoryTest.java
+++ b/solr/contrib/uima/src/test/org/apache/solr/uima/analysis/UIMATokenizersSolrIntegrationTest.java
@@ -23,8 +23,9 @@
 import org.junit.Test;
 
 /**
+ * Integration test which uses {@link org.apache.lucene.analysis.uima.UIMAAnnotationsTokenizerFactory} in Solr schema
  */
-public class UIMATypeAwareAnnotationsTokenizerFactoryTest extends SolrTestCaseJ4 {
+public class UIMATokenizersSolrIntegrationTest extends SolrTestCaseJ4 {
 
   @BeforeClass
   public static void beforeClass() throws Exception {
@@ -33,12 +34,24 @@
 
   @Test
   public void testInitialization() throws Exception {
+    assertNotNull(h.getCore().getSchema().getField("sentences"));
+    assertNotNull(h.getCore().getSchema().getFieldType("sentences"));
     assertNotNull(h.getCore().getSchema().getField("nouns"));
     assertNotNull(h.getCore().getSchema().getFieldType("nouns"));
   }
 
   @Test
-  public void testIndexAndQuery() throws Exception {
+  public void testUIMATokenizerIndexAndQuery() throws Exception {
+    assertU("<add><doc><field name=\"id\">123</field><field name=\"text\">One and 1 is two. Instead One or 1 is 0.</field></doc></add>");
+    assertU(commit());
+    SolrQueryRequest req = req("qt", "/terms", "terms.fl", "sentences");
+    assertQ(req, "//lst[@name='sentences']/int[@name='One and 1 is two.']");
+    assertQ(req, "//lst[@name='sentences']/int[@name=' Instead One or 1 is 0.']");
+    req.close();
+  }
+
+  @Test
+  public void testUIMATypeAwareTokenizerIndexAndQuery() throws Exception {
     assertU("<add><doc><field name=\"id\">123</field><field name=\"text\">The counter counts the beans: 1 and 2 and three.</field></doc></add>");
     assertU(commit());
     SolrQueryRequest req = req("qt", "/terms", "terms.fl", "nouns");
diff --git a/solr/core/src/java/org/apache/solr/cloud/ElectionContext.java b/solr/core/src/java/org/apache/solr/cloud/ElectionContext.java
index 7baa465..77417e9 100644
--- a/solr/core/src/java/org/apache/solr/cloud/ElectionContext.java
+++ b/solr/core/src/java/org/apache/solr/cloud/ElectionContext.java
@@ -324,7 +324,7 @@
       SolrException.log(log, "Error trying to start recovery", t);
     }
     
-    leaderElector.joinElection(this);
+    leaderElector.joinElection(this, true);
   }
 
   private boolean shouldIBeLeader(ZkNodeProps leaderProps, SolrCore core) {
diff --git a/solr/core/src/java/org/apache/solr/cloud/LeaderElector.java b/solr/core/src/java/org/apache/solr/cloud/LeaderElector.java
index 4d3a016..07caa55 100644
--- a/solr/core/src/java/org/apache/solr/cloud/LeaderElector.java
+++ b/solr/core/src/java/org/apache/solr/cloud/LeaderElector.java
@@ -18,7 +18,6 @@
  */
 
 import java.io.IOException;
-import java.io.UnsupportedEncodingException;
 import java.util.ArrayList;
 import java.util.Collections;
 import java.util.Comparator;
@@ -43,7 +42,7 @@
  * Leader Election process. This class contains the logic by which a
  * leader is chosen. First call * {@link #setup(ElectionContext)} to ensure
  * the election process is init'd. Next call
- * {@link #joinElection(ElectionContext)} to start the leader election.
+ * {@link #joinElection(ElectionContext, boolean)} to start the leader election.
  * 
  * The implementation follows the classic ZooKeeper recipe of creating an
  * ephemeral, sequential node for each candidate and then looking at the set
@@ -203,7 +202,7 @@
    * 
    * @return sequential node number
    */
-  public int joinElection(ElectionContext context) throws KeeperException, InterruptedException, IOException {
+  public int joinElection(ElectionContext context, boolean replacement) throws KeeperException, InterruptedException, IOException {
     final String shardsElectZkPath = context.electionPath + LeaderElector.ELECTION_NODE;
     
     long sessionId = zkClient.getSolrZooKeeper().getSessionId();
@@ -259,7 +258,7 @@
       }
     }
     int seq = getSeq(leaderSeqPath);
-    checkIfIamLeader(seq, context, false);
+    checkIfIamLeader(seq, context, replacement);
     
     return seq;
   }
diff --git a/solr/core/src/java/org/apache/solr/cloud/RecoveryStrategy.java b/solr/core/src/java/org/apache/solr/cloud/RecoveryStrategy.java
index fab5c2a..35fb620 100644
--- a/solr/core/src/java/org/apache/solr/cloud/RecoveryStrategy.java
+++ b/solr/core/src/java/org/apache/solr/cloud/RecoveryStrategy.java
@@ -37,6 +37,7 @@
 import org.apache.solr.common.cloud.ZkStateReader;
 import org.apache.solr.common.cloud.ZooKeeperException;
 import org.apache.solr.common.params.ModifiableSolrParams;
+import org.apache.solr.common.params.UpdateParams;
 import org.apache.solr.core.CoreContainer;
 import org.apache.solr.core.CoreDescriptor;
 import org.apache.solr.core.RequestHandlers.LazyRequestHandlerWrapper;
@@ -177,6 +178,7 @@
     UpdateRequest ureq = new UpdateRequest();
     ureq.setParams(new ModifiableSolrParams());
     ureq.getParams().set(DistributedUpdateProcessor.COMMIT_END_POINT, true);
+    ureq.getParams().set(UpdateParams.OPEN_SEARCHER, false);
     ureq.setAction(AbstractUpdateRequest.ACTION.COMMIT, false, true).process(
         server);
     server.shutdown();
diff --git a/solr/core/src/java/org/apache/solr/cloud/ZkController.java b/solr/core/src/java/org/apache/solr/cloud/ZkController.java
index 0ccab0e..db869ff 100644
--- a/solr/core/src/java/org/apache/solr/cloud/ZkController.java
+++ b/solr/core/src/java/org/apache/solr/cloud/ZkController.java
@@ -191,7 +191,7 @@
 
               ZkController.this.overseer = new Overseer(shardHandler, adminPath, zkStateReader);
               ElectionContext context = new OverseerElectionContext(zkClient, overseer, getNodeName());
-              overseerElector.joinElection(context);
+              overseerElector.joinElection(context, true);
               zkStateReader.createClusterStateWatchersAndUpdate();
 
             //  cc.newCmdDistribExecutor();
@@ -422,7 +422,7 @@
       this.overseer = new Overseer(shardHandler, adminPath, zkStateReader);
       ElectionContext context = new OverseerElectionContext(zkClient, overseer, getNodeName());
       overseerElector.setup(context);
-      overseerElector.joinElection(context);
+      overseerElector.joinElection(context, false);
       zkStateReader.createClusterStateWatchersAndUpdate();
       
     } catch (IOException e) {
@@ -730,7 +730,7 @@
 
     leaderElector.setup(context);
     electionContexts.put(coreZkNodeName, context);
-    leaderElector.joinElection(context);
+    leaderElector.joinElection(context, false);
   }
 
 
diff --git a/solr/core/src/java/org/apache/solr/update/PeerSync.java b/solr/core/src/java/org/apache/solr/update/PeerSync.java
index a98917e..0466864 100644
--- a/solr/core/src/java/org/apache/solr/update/PeerSync.java
+++ b/solr/core/src/java/org/apache/solr/update/PeerSync.java
@@ -312,6 +312,11 @@
         log.warn(msg() + " got a 503 from " + srsp.getShardAddress() + ", counting as success");
         return true;
       }
+      
+      if (cantReachIsSuccess && sreq.purpose == 1 && srsp.getException() instanceof SolrException && ((SolrException) srsp.getException()).code() == 404) {
+        log.warn(msg() + " got a 404 from " + srsp.getShardAddress() + ", counting as success");
+        return true;
+      }
       // TODO: at least log???
       // srsp.getException().printStackTrace(System.out);
      
diff --git a/solr/core/src/java/org/apache/solr/update/SolrIndexConfig.java b/solr/core/src/java/org/apache/solr/update/SolrIndexConfig.java
index a5693c1..6035304 100644
--- a/solr/core/src/java/org/apache/solr/update/SolrIndexConfig.java
+++ b/solr/core/src/java/org/apache/solr/update/SolrIndexConfig.java
@@ -44,6 +44,7 @@
   public final boolean useCompoundFile;
   public final int maxBufferedDocs;
   public final int maxMergeDocs;
+  public final int maxIndexingThreads;
   public final int mergeFactor;
 
   public final double ramBufferSizeMB;
@@ -71,6 +72,7 @@
     useCompoundFile = false;
     maxBufferedDocs = -1;
     maxMergeDocs = -1;
+    maxIndexingThreads = IndexWriterConfig.DEFAULT_MAX_THREAD_STATES;
     mergeFactor = -1;
     ramBufferSizeMB = 32;
     writeLockTimeout = -1;
@@ -116,6 +118,7 @@
     useCompoundFile=solrConfig.getBool(prefix+"/useCompoundFile", def.useCompoundFile);
     maxBufferedDocs=solrConfig.getInt(prefix+"/maxBufferedDocs",def.maxBufferedDocs);
     maxMergeDocs=solrConfig.getInt(prefix+"/maxMergeDocs",def.maxMergeDocs);
+    maxIndexingThreads=solrConfig.getInt(prefix+"/maxIndexingThreads",def.maxIndexingThreads);
     mergeFactor=solrConfig.getInt(prefix+"/mergeFactor",def.mergeFactor);
     ramBufferSizeMB = solrConfig.getDouble(prefix+"/ramBufferSizeMB", def.ramBufferSizeMB);
 
@@ -176,6 +179,10 @@
     iwc.setMergePolicy(buildMergePolicy(schema));
     iwc.setMergeScheduler(buildMergeScheduler(schema));
 
+    if (maxIndexingThreads != -1) {
+      iwc.setMaxThreadStates(maxIndexingThreads);
+    }
+
     return iwc;
   }
 
diff --git a/solr/core/src/test-files/solr/collection1/conf/solrconfig-indexconfig.xml b/solr/core/src/test-files/solr/collection1/conf/solrconfig-indexconfig.xml
new file mode 100644
index 0000000..74c8268
--- /dev/null
+++ b/solr/core/src/test-files/solr/collection1/conf/solrconfig-indexconfig.xml
@@ -0,0 +1,27 @@
+<?xml version="1.0" ?>
+
+<!--
+ Licensed to the Apache Software Foundation (ASF) under one or more
+ contributor license agreements.  See the NOTICE file distributed with
+ this work for additional information regarding copyright ownership.
+ The ASF licenses this file to You under the Apache License, Version 2.0
+ (the "License"); you may not use this file except in compliance with
+ the License.  You may obtain a copy of the License at
+
+     http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
+-->
+<config>
+  <dataDir>${solr.data.dir:}</dataDir>
+
+  <luceneMatchVersion>${tests.luceneMatchVersion:LUCENE_CURRENT}</luceneMatchVersion>
+
+  <indexConfig>
+    <maxIndexingThreads>123</maxIndexingThreads>
+  </indexConfig>
+</config>
diff --git a/solr/core/src/test/org/apache/solr/cloud/BasicDistributedZkTest.java b/solr/core/src/test/org/apache/solr/cloud/BasicDistributedZkTest.java
index dcf3963..ab55358 100644
--- a/solr/core/src/test/org/apache/solr/cloud/BasicDistributedZkTest.java
+++ b/solr/core/src/test/org/apache/solr/cloud/BasicDistributedZkTest.java
@@ -49,6 +49,7 @@
 import org.apache.solr.client.solrj.request.ContentStreamUpdateRequest;
 import org.apache.solr.client.solrj.request.CoreAdminRequest;
 import org.apache.solr.client.solrj.request.CoreAdminRequest.Create;
+import org.apache.solr.client.solrj.request.CoreAdminRequest.Unload;
 import org.apache.solr.client.solrj.request.QueryRequest;
 import org.apache.solr.client.solrj.response.CoreAdminResponse;
 import org.apache.solr.client.solrj.response.QueryResponse;
@@ -742,10 +743,10 @@
         0,
         ((HttpSolrServer) client).getBaseURL().length()
             - DEFAULT_COLLECTION.length() - 1);
-    createCollection(oneInstanceCollection2, collectionClients, baseUrl, 1, "slice1");
-    createCollection(oneInstanceCollection2, collectionClients, baseUrl, 2, "slice2");
-    createCollection(oneInstanceCollection2, collectionClients, baseUrl, 3, "slice2");
-    createCollection(oneInstanceCollection2, collectionClients, baseUrl, 4, "slice1");
+    createSolrCore(oneInstanceCollection2, collectionClients, baseUrl, 1, "slice1");
+    createSolrCore(oneInstanceCollection2, collectionClients, baseUrl, 2, "slice2");
+    createSolrCore(oneInstanceCollection2, collectionClients, baseUrl, 3, "slice2");
+    createSolrCore(oneInstanceCollection2, collectionClients, baseUrl, 4, "slice1");
     
    while (pending != null && pending.size() > 0) {
       
@@ -764,7 +765,7 @@
     
     assertAllActive(oneInstanceCollection2, solrj.getZkStateReader());
     
-    printLayout();
+    //printLayout();
     
    // TODO: enable when we don't falsely get slice1...
    // solrj.getZkStateReader().getLeaderUrl(oneInstanceCollection2, "slice1", 30000);
@@ -803,6 +804,27 @@
     assertNotNull(slices);
     String roles = slices.get("slice1").getReplicasMap().values().iterator().next().getStr(ZkStateReader.ROLES_PROP);
     assertEquals("none", roles);
+    
+    
+    ZkCoreNodeProps props = new ZkCoreNodeProps(solrj.getZkStateReader().getClusterState().getLeader(oneInstanceCollection2, "slice1"));
+    
+    // now test that unloading a core gets us a new leader
+    HttpSolrServer server = new HttpSolrServer(baseUrl);
+    Unload unloadCmd = new Unload(true);
+    unloadCmd.setCoreName(props.getCoreName());
+    
+    String leader = props.getCoreUrl();
+    
+    server.request(unloadCmd);
+    
+    int tries = 50;
+    while (leader.equals(zkStateReader.getLeaderUrl(oneInstanceCollection2, "slice1", 10000))) {
+      Thread.sleep(100);
+      if (tries-- == 0) {
+        fail("Leader never changed");
+      }
+    }
+
   }
 
   private void testSearchByCollectionName() throws SolrServerException {
@@ -875,10 +897,10 @@
 
   private void createCollection(String collection,
       List<SolrServer> collectionClients, String baseUrl, int num) {
-    createCollection(collection, collectionClients, baseUrl, num, null);
+    createSolrCore(collection, collectionClients, baseUrl, num, null);
   }
   
-  private void createCollection(final String collection,
+  private void createSolrCore(final String collection,
       List<SolrServer> collectionClients, final String baseUrl, final int num,
       final String shardId) {
     Callable call = new Callable() {
diff --git a/solr/core/src/test/org/apache/solr/cloud/LeaderElectionTest.java b/solr/core/src/test/org/apache/solr/cloud/LeaderElectionTest.java
index 1b12c65..a4f69fc 100644
--- a/solr/core/src/test/org/apache/solr/cloud/LeaderElectionTest.java
+++ b/solr/core/src/test/org/apache/solr/cloud/LeaderElectionTest.java
@@ -40,7 +40,6 @@
 import org.apache.zookeeper.KeeperException.NoNodeException;
 import org.junit.AfterClass;
 import org.junit.BeforeClass;
-import org.junit.Ignore;
 import org.junit.Test;
 
 @Slow
@@ -114,7 +113,7 @@
           elector, "shard1", "collection1", Integer.toString(nodeNumber),
           props, zkStateReader);
       elector.setup(context);
-      seq = elector.joinElection(context);
+      seq = elector.joinElection(context, false);
       electionDone = true;
       seqToThread.put(seq, this);
     }
@@ -175,7 +174,7 @@
     ElectionContext context = new ShardLeaderElectionContextBase(elector,
         "shard2", "collection1", "dummynode1", props, zkStateReader);
     elector.setup(context);
-    elector.joinElection(context);
+    elector.joinElection(context, false);
     assertEquals("http://127.0.0.1/solr/",
         getLeaderUrl("collection1", "shard2"));
   }
@@ -188,7 +187,7 @@
     ElectionContext firstContext = new ShardLeaderElectionContextBase(first,
         "slice1", "collection2", "dummynode1", props, zkStateReader);
     first.setup(firstContext);
-    first.joinElection(firstContext);
+    first.joinElection(firstContext, false);
 
     Thread.sleep(1000);
     assertEquals("original leader was not registered", "http://127.0.0.1/solr/1/", getLeaderUrl("collection2", "slice1"));
@@ -199,7 +198,7 @@
     ElectionContext context = new ShardLeaderElectionContextBase(second,
         "slice1", "collection2", "dummynode1", props, zkStateReader);
     second.setup(context);
-    second.joinElection(context);
+    second.joinElection(context, false);
     Thread.sleep(1000);
     assertEquals("original leader should have stayed leader", "http://127.0.0.1/solr/1/", getLeaderUrl("collection2", "slice1"));
     firstContext.cancelElection();
diff --git a/solr/core/src/test/org/apache/solr/cloud/OverseerTest.java b/solr/core/src/test/org/apache/solr/cloud/OverseerTest.java
index 6520c6b..59071c7 100644
--- a/solr/core/src/test/org/apache/solr/cloud/OverseerTest.java
+++ b/solr/core/src/test/org/apache/solr/cloud/OverseerTest.java
@@ -139,7 +139,7 @@
           ShardLeaderElectionContextBase ctx = new ShardLeaderElectionContextBase(
               elector, shardId, collection, nodeName + "_" + coreName, props,
               zkStateReader);
-          elector.joinElection(ctx);
+          elector.joinElection(ctx, false);
           return shardId;
         }
         Thread.sleep(500);
@@ -876,7 +876,7 @@
         new HttpShardHandlerFactory().getShardHandler(), "/admin/cores", reader);
     ElectionContext ec = new OverseerElectionContext(zkClient, overseer, address.replaceAll("/", "_"));
     overseerElector.setup(ec);
-    overseerElector.joinElection(ec);
+    overseerElector.joinElection(ec, false);
     return zkClient;
   }
   
diff --git a/solr/core/src/test/org/apache/solr/core/TestSolrIndexConfig.java b/solr/core/src/test/org/apache/solr/core/TestSolrIndexConfig.java
new file mode 100644
index 0000000..7ccdc4d
--- /dev/null
+++ b/solr/core/src/test/org/apache/solr/core/TestSolrIndexConfig.java
@@ -0,0 +1,36 @@
+package org.apache.solr.core;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+import org.apache.lucene.index.IndexWriterConfig;
+import org.apache.solr.SolrTestCaseJ4;
+import org.junit.BeforeClass;
+
+public class TestSolrIndexConfig extends SolrTestCaseJ4 {
+
+  @BeforeClass
+  public static void beforeClass() throws Exception {
+    initCore("solrconfig-indexconfig.xml","schema.xml");
+  }
+  
+  public void testIndexConfig() throws Exception {
+    IndexWriterConfig iwc = solrConfig.indexConfig.toIndexWriterConfig(h.getCore().getSchema());
+
+    assertEquals(123, iwc.getMaxThreadStates());
+  }
+}
diff --git a/solr/example/solr/collection1/conf/solrconfig.xml b/solr/example/solr/collection1/conf/solrconfig.xml
index fd72025..214dae9 100755
--- a/solr/example/solr/collection1/conf/solrconfig.xml
+++ b/solr/example/solr/collection1/conf/solrconfig.xml
@@ -136,6 +136,12 @@
     <!-- Maximum time to wait for a write lock (ms) for an IndexWriter. Default: 1000 -->
     <!-- <writeLockTimeout>1000</writeLockTimeout>  -->
 
+    <!-- The maximum number of simultaneous threads that may be
+         indexing documents at once in IndexWriter; if more than this
+         many threads arrive they will wait for others to finish.
+         Default in Solr/Lucene is 8. -->
+    <!-- <maxIndexingThreads>8</maxIndexingThreads>  -->
+
     <!-- Expert: Enabling compound file will use less files for the index, 
          using fewer file descriptors on the expense of performance decrease. 
          Default in Lucene is "true". Default in Solr is "false" (since 3.6) -->