Tagging the 1.0.3 release.

git-svn-id: https://svn.apache.org/repos/asf/couchdb/tags/1.0.3@1138690 13f79535-47bb-0310-9956-ffa450edef68
diff --git a/CHANGES b/CHANGES
index bc6fd2d..1d87da8 100644
--- a/CHANGES
+++ b/CHANGES
@@ -43,6 +43,11 @@
  * Etap tests no longer require use of port 5984. They now use a randomly
    selected port so they won't clash with a running CouchDB.
 
+Windows:
+
+ * Windows builds now require ICU >= 4.4.0 and Erlang >= R14B03. See
+   COUCHDB-1152, and COUCHDB-963 + OTP-9139 for more information.
+
 Version 1.0.2
 -------------
 
diff --git a/INSTALL.Windows b/INSTALL.Windows
index 5c4a958..d661f1d 100644
--- a/INSTALL.Windows
+++ b/INSTALL.Windows
@@ -8,8 +8,8 @@
 
 You will need the following installed:
 
- * Erlang OTP (>=R12B5)       (http://erlang.org/)
- * ICU                        (http://icu.sourceforge.net/)
+ * Erlang OTP (>=14B03)       (http://erlang.org/)
+ * ICU        (=4.4.*)         (http://icu.sourceforge.net/)
  * OpenSSL                    (http://www.openssl.org/)
  * Mozilla SpiderMonkey (1.8) (http://www.mozilla.org/js/spidermonkey/)
  * libcurl                    (http://curl.haxx.se/libcurl/)
@@ -21,7 +21,7 @@
 
  * When installing Erlang, you must build it from source.
 
-   The CouchDB build makes use of a number of the Erlang build scripts.
+The CouchDB build requires a number of the Erlang build scripts.
 
  * When installing ICU, select the binaries built with Visual Studio 2008.
 
@@ -50,12 +50,17 @@
 
     * The `which cl` command points to the Microsoft compiler.
 
-If you do not do this, the ones found in `/usr/bin` may be used instead.
+    * The `which mc` command points to the Microsoft message compiler.
+
+    * The `which mt` command points to the Microsoft manifest tool.
+
+If you do not do this, the build may fail due to Cygwin ones found in `/usr/bin`
+being used instead.
 
 Building Erlang
 ---------------
 
-You must include OpenSSL.
+You must include Win32 OpenSSL.
 
 However, you can skip the GUI tools by running:
 
@@ -89,7 +94,7 @@
 
 To set up your path, run:
 
-    export PATH=$ERL_TOP/release/win32/erts-5.7.2/bin:$PATH
+    export PATH=$ERL_TOP/release/win32/erts-5.8.2/bin:$PATH
 
 If everything was successful, you should be ready to build CouchDB.
 
@@ -101,8 +106,8 @@
 Once you have satisfied the dependencies you should run:
 
     ./configure \
-        --with-js-include=/cygdrive/c/path_to_seamonkey_include \
-        --with-js-lib=/cygdrive/c/path_to_seamonkey_lib \
+        --with-js-include=/cygdrive/c/path_to_spidermonkey_include \
+        --with-js-lib=/cygdrive/c/path_to_spidermonkey_lib \
         --with-win32-icu-binaries=/cygdrive/c/path_to_icu_binaries_root \
         --with-erlang=$ERL_TOP/release/win32/usr/include \
         --with-win32-curl=/cygdrive/c/path/to/curl/root/directory \
@@ -145,4 +150,4 @@
 
     http://127.0.0.1:5984/_utils/index.html
 
-From here you should run the test suite.
+From here you should run the test suite in either Firefox 3.6+ or Safari 4+.
diff --git a/NEWS b/NEWS
index a7b2647..09013ad 100644
--- a/NEWS
+++ b/NEWS
@@ -24,6 +24,7 @@
  * Made compatible with jQuery 1.5.x.
  * Added support for inclusive_end wiht reduce views.
  * Etap tests no longer require use of port 5984.
+ * Windows builds now require ICU >= 4.4.0 and Erlang >= R14B03.
 
 Version 1.0.2
 -------------
diff --git a/THANKS b/THANKS
index 2074364..9d60843 100644
--- a/THANKS
+++ b/THANKS
@@ -72,5 +72,7 @@
  * Mike Leddy <mike@loop.com.br>
  * Felix Hummel <apache@felixhummel.de>
  * Nathan Vander Wilt <natevw@yahoo.com>
+ * Caolan McMahon <caolan.mcmahon@googlemail.com>
+ 
 
 For a list of authors see the `AUTHORS` file.
diff --git a/etc/windows/couchdb.iss.tpl b/etc/windows/couchdb.iss.tpl
index 6efee0a..8a32561 100644
--- a/etc/windows/couchdb.iss.tpl
+++ b/etc/windows/couchdb.iss.tpl
@@ -74,11 +74,14 @@
 ; Commands for a service
 ; First attempt to nuke an existing service of this name, incase they are
 ; reinstalling without uninstalling
-Filename: "{app}\erts-%erts_version%\bin\erlsrv.exe"; Parameters: "remove ""%package_name%"""; Tasks: service
-; add a new one
-Filename: "{app}\erts-%erts_version%\bin\erlsrv.exe"; Parameters: "add ""%package_name%"" -w ""{app}\bin"" -ar ""-sasl errlog_type error -s couch"" -c ""%package_name% %version%"""; Tasks: service
+Filename: "{app}\erts-%erts_version%\bin\erlsrv.exe"; Parameters: "remove ""%package_name%"""; Flags: runhidden; Tasks: service
+; add a new service, including automatic restart by default on failure
+Filename: "{app}\erts-%erts_version%\bin\erlsrv.exe"; Parameters: "add ""%package_name%"" -workdir ""{app}\bin"" -onfail restart_always -args ""-sasl errlog_type error -s couch +A 4 +W w"" -comment ""%package_name% %version%"""; Flags: runhidden; Tasks: service
 ; and start it if requested
-Filename: "{app}\erts-%erts_version%\bin\erlsrv.exe"; Parameters: "start ""%package_name%"""; Tasks: service\start
+Filename: "{app}\erts-%erts_version%\bin\erlsrv.exe"; Parameters: "start ""%package_name%"""; Flags: runhidden; Tasks: service\start
 
 [UninstallRun]
-Filename: "{app}\erts-%erts_version%\bin\erlsrv.exe"; Parameters: "remove ""%package_name%"""; Tasks: service
+; erlsrv stops services prior to removing them
+Filename: "{app}\erts-%erts_version%\bin\erlsrv.exe"; Parameters: "remove ""%package_name%"""; Flags: runhidden; Tasks: service
+; kill epmd.exe if running to ensure uninstaller is not prevented from removing all binaries
+Filename: "{app}\erts-%erts_version%\bin\epmd.exe"; Parameters: "-kill"; Flags: runhidden
diff --git a/share/server/util.js b/share/server/util.js
index b55480b..e438670 100644
--- a/share/server/util.js
+++ b/share/server/util.js
@@ -31,16 +31,16 @@
     }
     return resolveModule(names, {
       id : mod.id.slice(0, mod.id.lastIndexOf('/')),
-      parent : mod.parent.parent.parent,
-      current : mod.parent.parent.current
+      parent : mod.parent.parent,
+      current : mod.parent.current
     });
   } else if (n == '.') {
     if (!mod.parent) {
       throw ["error", "invalid_require_path", 'Object has no parent '+JSON.stringify(mod.current)];
     }
     return resolveModule(names, {
-      parent : mod.parent.parent,
-      current : mod.parent.current,
+      parent : mod.parent,
+      current : mod.current,
       id : mod.id
     });
   } else if (root) {
@@ -66,17 +66,28 @@
     try {
       if (sandbox) {
         if (ddoc) {
+          if (!ddoc._module_cache) {
+            ddoc._module_cache = {};
+          }
           var require = function(name, module) {
             module = module || {};
-            var newModule = resolveModule(name.split('/'), module, ddoc);
-            var s = "function (module, exports, require) { " + newModule.current + " }";
-            try {
-              var func = sandbox ? evalcx(s, sandbox) : eval(s);
-              func.apply(sandbox, [newModule, newModule.exports, function(name) {return require(name, newModule)}]);
-            } catch(e) { 
-              throw ["error","compilation_error","Module require('"+name+"') raised error "+e.toSource()]; 
+            var newModule = resolveModule(name.split('/'), module.parent, ddoc);
+            if (!ddoc._module_cache.hasOwnProperty(newModule.id)) {
+              // create empty exports object before executing the module,
+              // stops circular requires from filling the stack
+              ddoc._module_cache[newModule.id] = {};
+              var s = "function (module, exports, require) { " + newModule.current + " }";
+              try {
+                var func = sandbox ? evalcx(s, sandbox) : eval(s);
+                func.apply(sandbox, [newModule, newModule.exports, function(name) {
+                  return require(name, newModule);
+                }]);
+              } catch(e) { 
+                throw ["error","compilation_error","Module require('"+name+"') raised error "+e.toSource()]; 
+              }
+              ddoc._module_cache[newModule.id] = newModule.exports;
             }
-            return newModule.exports;
+            return ddoc._module_cache[newModule.id];
           }
           sandbox.require = require;
         }
diff --git a/share/www/script/jquery.couch.js b/share/www/script/jquery.couch.js
index d4b7a2a..c273a62 100644
--- a/share/www/script/jquery.couch.js
+++ b/share/www/script/jquery.couch.js
@@ -615,8 +615,14 @@
   };
 
   function ajax(obj, options, errorMessage, ajaxOptions) {
+
+    var defaultAjaxOptions = {
+      contentType: "application/json",
+      headers: {"Accept": "application/json"}
+    };
+
     options = $.extend({successStatus: 200}, options);
-    ajaxOptions = $.extend({contentType: "application/json"}, ajaxOptions);
+    ajaxOptions = $.extend(defaultAjaxOptions, ajaxOptions);
     errorMessage = errorMessage || "Unknown error";
     $.ajax($.extend($.extend({
       type: "GET", dataType: "json", cache : !$.browser.msie,
diff --git a/share/www/script/test/attachment_names.js b/share/www/script/test/attachment_names.js
index 988dd2d..38584e9 100644
--- a/share/www/script/test/attachment_names.js
+++ b/share/www/script/test/attachment_names.js
@@ -16,6 +16,24 @@
   db.createDb();
   if (debug) debugger;
 
+  var goodDoc = {
+    _id: "good_doc",
+    _attachments: {
+      "Колян.txt": {
+       content_type:"text/plain",
+       data: "VGhpcyBpcyBhIGJhc2U2NCBlbmNvZGVkIHRleHQ="
+      }
+    }
+  };
+
+  var save_response = db.save(goodDoc);
+  T(save_response.ok);
+
+  var xhr = CouchDB.request("GET", "/test_suite_db/good_doc/Колян.txt");
+  T(xhr.responseText == "This is a base64 encoded text");
+  T(xhr.getResponseHeader("Content-Type") == "text/plain");
+  T(xhr.getResponseHeader("Etag") == '"' + save_response.rev + '"');
+
   var binAttDoc = {
     _id: "bin_doc",
     _attachments:{
@@ -26,14 +44,8 @@
     }
   };
 
-  // inline attachments
-  try {
-    db.save(binAttDoc);
-    TEquals(1, 2, "Attachment name with non UTF-8 encoding saved. Should never show!");
-  } catch (e) {
-    TEquals("bad_request", e.error, "attachment_name: inline attachments");
-    TEquals("Attachment name is not UTF-8 encoded", e.reason, "attachment_name: inline attachments");
-  }
+  resp = db.save(binAttDoc);
+  TEquals(true, resp.ok, "attachment_name: inline attachment");
 
 
   // standalone docs
@@ -45,10 +57,9 @@
   }));
 
   var resp = JSON.parse(xhr.responseText);
-  TEquals(400, xhr.status, "attachment_name: standalone API");
-  TEquals("bad_request", resp.error, "attachment_name: standalone API");
-  TEquals("Attachment name is not UTF-8 encoded", resp.reason, "attachment_name: standalone API");
-
+  TEquals(201, xhr.status, "attachment_name: standalone API");
+  TEquals("Created",  xhr.statusText, "attachment_name: standalone API");
+  TEquals(true, resp.ok, "attachment_name: standalone API");
 
   // bulk docs
   var docs = { docs: [binAttDoc] };
@@ -58,10 +69,8 @@
   });
 
   var resp = JSON.parse(xhr.responseText);
-  TEquals(400, xhr.status, "attachment_name: bulk docs");
-  TEquals("bad_request", resp.error, "attachment_name: bulk docs");
-  TEquals("Attachment name is not UTF-8 encoded", resp.reason, "attachment_name: bulk docs");
-
+  TEquals(201, xhr.status, "attachment_name: bulk docs");
+  TEquals("Created", xhr.statusText, "attachment_name: bulk docs");
 
   // leading underscores
   var binAttDoc = {
diff --git a/share/www/script/test/attachments.js b/share/www/script/test/attachments.js
index e16c384..aab9f15 100644
--- a/share/www/script/test/attachments.js
+++ b/share/www/script/test/attachments.js
@@ -270,4 +270,17 @@
   } catch (e) {
       T(e.error == "missing_stub");
   }
+
+  // test MD5 header
+  var bin_data = "foo bar"
+  var xhr = CouchDB.request("PUT", "/test_suite_db/bin_doc7/attachment.txt", {
+    headers:{"Content-Type":"application/octet-stream",
+             "Content-MD5":"MntvB0NYESObxH4VRDUycw=="},
+    body:bin_data
+  });
+  TEquals(201, xhr.status);
+
+  var xhr = CouchDB.request("GET", "/test_suite_db/bin_doc7/attachment.txt");
+  TEquals('MntvB0NYESObxH4VRDUycw==', xhr.getResponseHeader("Content-MD5"));
+
 };
diff --git a/share/www/script/test/attachments_multipart.js b/share/www/script/test/attachments_multipart.js
index f173d2b..3c165d0 100644
--- a/share/www/script/test/attachments_multipart.js
+++ b/share/www/script/test/attachments_multipart.js
@@ -39,7 +39,7 @@
             },
           "baz.txt": {
             "follows":true,
-            "content_type":"application/test",
+            "content_type":"text/plain",
             "length":19
             }
           }
@@ -78,12 +78,15 @@
   
   // now edit an attachment
   
-  var doc = db.open("multipart");
+  var doc = db.open("multipart", {att_encoding_info: true});
   var firstrev = doc._rev;
   
   T(doc._attachments["foo.txt"].stub == true);
   T(doc._attachments["bar.txt"].stub == true);
   T(doc._attachments["baz.txt"].stub == true);
+  TEquals("undefined", typeof doc._attachments["foo.txt"].encoding);
+  TEquals("undefined", typeof doc._attachments["bar.txt"].encoding);
+  TEquals("gzip", doc._attachments["baz.txt"].encoding);
   
   //lets change attachment bar
   delete doc._attachments["bar.txt"].stub; // remove stub member (or could set to false)
diff --git a/share/www/script/test/changes.js b/share/www/script/test/changes.js
index eca0ae3..92c4a29 100644
--- a/share/www/script/test/changes.js
+++ b/share/www/script/test/changes.js
@@ -440,6 +440,10 @@
   TEquals("0", resp.results[0].id);
   TEquals("1", resp.results[1].id);
 
+  TEquals(0, CouchDB.requestStats('httpd', 'clients_requesting_changes').current);
+  CouchDB.request("GET", "/" + db.name + "/_changes");
+  TEquals(0, CouchDB.requestStats('httpd', 'clients_requesting_changes').current);
+
   // cleanup
   db.deleteDb();
 };
diff --git a/share/www/script/test/conflicts.js b/share/www/script/test/conflicts.js
index 7258bc3..6512258 100644
--- a/share/www/script/test/conflicts.js
+++ b/share/www/script/test/conflicts.js
@@ -61,4 +61,29 @@
 
   T(db.save(doc2).ok);  // we can save a new document over a deletion without
                         // knowing the deletion rev.
+
+  // Verify COUCHDB-1178
+  var r1 = {"_id":"doc","foo":"bar"};
+  var r2 = {"_id":"doc","foo":"baz","_rev":"1-4c6114c65e295552ab1019e2b046b10e"};
+  var r3 = {"_id":"doc","foo":"bam","_rev":"2-cfcd6781f13994bde69a1c3320bfdadb"};
+  var r4 = {"_id":"doc","foo":"bat","_rev":"3-cc2f3210d779aef595cd4738be0ef8ff"};
+
+  T(db.save({"_id":"_design/couchdb-1178","validate_doc_update":"function(){}"}).ok);
+  T(db.save(r1).ok);
+  T(db.save(r2).ok);
+  T(db.save(r3).ok);
+
+  T(db.compact().ok);
+  while (db.info().compact_running) {};
+
+  TEquals({"_id":"doc",
+        "_rev":"3-cc2f3210d779aef595cd4738be0ef8ff",
+        "foo":"bam",
+        "_revisions":{"start":3,
+          "ids":["cc2f3210d779aef595cd4738be0ef8ff",
+                 "cfcd6781f13994bde69a1c3320bfdadb",
+                                      "4c6114c65e295552ab1019e2b046b10e"]}},
+    db.open("doc", {"revs": true}));
+  TEquals([], db.bulkSave([r4, r3, r2], {"new_edits":false}), "no failures");
+
 };
diff --git a/share/www/script/test/design_docs.js b/share/www/script/test/design_docs.js
index a24167b..040cc3b 100644
--- a/share/www/script/test/design_docs.js
+++ b/share/www/script/test/design_docs.js
@@ -13,180 +13,393 @@
 couchTests.design_docs = function(debug) {
   var db = new CouchDB("test_suite_db", {"X-Couch-Full-Commit":"false"});
   var db2 = new CouchDB("test_suite_db_a", {"X-Couch-Full-Commit":"false"});
+
+  if (debug) debugger;
+
   db.deleteDb();
   db.createDb();
   db2.deleteDb();
   db2.createDb();
-  if (debug) debugger;
 
-  run_on_modified_server(
-    [{section: "query_server_config",
+  var server_config = [
+    {
+      section: "query_server_config",
       key: "reduce_limit",
-      value: "false"}],
-function() {
-
-  var numDocs = 500;
-
-  function makebigstring(power) {
-    var str = "a";
-    while(power-- > 0) {
-      str = str + str;
+      value: "false"
     }
-    return str;
-  }
+  ];
 
-  var designDoc = {
-    _id:"_design/test", // turn off couch.js id escaping?
-    language: "javascript",
-    whatever : {
-      stringzone : "exports.string = 'plankton';",
-      commonjs : {
-        whynot : "exports.test = require('../stringzone'); exports.foo = require('whatever/stringzone');",
-        upper : "exports.testing = require('./whynot').test.string.toUpperCase()+module.id+require('./whynot').foo.string"
+  var testFun = function() {
+    var numDocs = 500;
+
+    function makebigstring(power) {
+      var str = "a";
+      while(power-- > 0) {
+        str = str + str;
       }
-    },
-    views: {
-      all_docs_twice: {map: "function(doc) { emit(doc.integer, null); emit(doc.integer, null) }"},
-      no_docs: {map: "function(doc) {}"},
-      single_doc: {map: "function(doc) { if (doc._id == \"1\") { emit(1, null) }}"},
-      summate: {map:"function (doc) {emit(doc.integer, doc.integer)};",
-                reduce:"function (keys, values) { return sum(values); };"},
-      summate2: {map:"function (doc) {emit(doc.integer, doc.integer)};",
-                reduce:"function (keys, values) { return sum(values); };"},
-      huge_src_and_results: {map: "function(doc) { if (doc._id == \"1\") { emit(\"" + makebigstring(16) + "\", null) }}",
-                reduce:"function (keys, values) { return \"" + makebigstring(16) + "\"; };"}
-    },
-    shows: {
-      simple: "function() {return 'ok'};",
-      requirey : "function() { var lib = require('whatever/commonjs/upper'); return lib.testing; };",
-      circular : "function() { var lib = require('whatever/commonjs/upper'); return JSON.stringify(this); };"
+      return str;
     }
-  }; 
 
-  var xhr = CouchDB.request("PUT", "/test_suite_db_a/_design/test", {body: JSON.stringify(designDoc)});
-  var resp = JSON.parse(xhr.responseText);
-  
-  TEquals(resp.rev, db.save(designDoc).rev);
+    var designDoc = {
+      _id: "_design/test",
+      language: "javascript",
+      whatever : {
+        stringzone : "exports.string = 'plankton';",
+        commonjs : {
+          whynot : "exports.test = require('../stringzone'); " +
+            "exports.foo = require('whatever/stringzone');",
+          upper : "exports.testing = require('./whynot').test.string.toUpperCase()+" +
+            "module.id+require('./whynot').foo.string",
+          circular_one: "require('./circular_two'); exports.name = 'One';",
+          circular_two: "require('./circular_one'); exports.name = 'Two';"
+        },
+        // paths relative to parent
+        idtest1: {
+          a: {
+            b: {d: "module.exports = require('../c/e').id;"},
+            c: {e: "exports.id = module.id;"}
+          }
+        },
+        // multiple paths relative to parent
+        idtest2: {
+          a: {
+            b: {d: "module.exports = require('../../a/c/e').id;"},
+            c: {e: "exports.id = module.id;"}
+          }
+        },
+        // paths relative to module
+        idtest3: {
+          a: {
+            b: "module.exports = require('./c/d').id;",
+            c: {
+              d: "module.exports = require('./e');",
+              e: "exports.id = module.id;"
+            }
+          }
+        },
+        // paths relative to module and parent
+        idtest4: {
+          a: {
+            b: "module.exports = require('../a/./c/d').id;",
+            c: {
+              d: "module.exports = require('./e');",
+              e: "exports.id = module.id;"
+            }
+          }
+        },
+        // paths relative to root
+        idtest5: {
+          a: "module.exports = require('whatever/idtest5/b').id;",
+          b: "exports.id = module.id;"
+        }
+      },
+      views: {
+        all_docs_twice: {
+          map:
+            (function(doc) {
+              emit(doc.integer, null);
+              emit(doc.integer, null);
+            }).toString()
+        },
+        no_docs: {
+          map:
+            (function(doc) {
+            }).toString()
+        },
+        single_doc: {
+          map:
+            (function(doc) {
+              if (doc._id === "1") {
+                emit(1, null);
+              }
+            }).toString()
+        },
+        summate: {
+          map:
+            (function(doc) {
+              emit(doc.integer, doc.integer);
+            }).toString(),
+          reduce:
+            (function(keys, values) {
+              return sum(values);
+            }).toString()
+        },
+        summate2: {
+          map:
+            (function(doc) {
+              emit(doc.integer, doc.integer);
+            }).toString(),
+          reduce:
+            (function(keys, values) {
+              return sum(values);
+            }).toString()
+        },
+        huge_src_and_results: {
+          map:
+            (function(doc) {
+              if (doc._id === "1") {
+                emit(makebigstring(16), null);
+              }
+            }).toString(),
+          reduce:
+            (function(keys, values) {
+              return makebigstring(16);
+            }).toString()
+        }
+      },
+      shows: {
+        simple:
+          (function() {
+            return 'ok';
+          }).toString(),
+        requirey:
+          (function() {
+            var lib = require('whatever/commonjs/upper');
+            return lib.testing;
+          }).toString(),
+        circular:
+          (function() {
+            var lib = require('whatever/commonjs/upper');
+            return JSON.stringify(this);
+          }).toString(),
+        circular_require:
+          (function() {
+            return require('whatever/commonjs/circular_one').name;
+          }).toString(),
+        idtest1: (function() {
+            return require('whatever/idtest1/a/b/d');
+          }).toString(),
+        idtest2: (function() {
+            return require('whatever/idtest2/a/b/d');
+          }).toString(),
+        idtest3: (function() {
+            return require('whatever/idtest3/a/b');
+          }).toString(),
+        idtest4: (function() {
+            return require('whatever/idtest4/a/b');
+          }).toString(),
+        idtest5: (function() {
+            return require('whatever/idtest5/a');
+          }).toString()
+      }
+    }; // designDoc
 
-  // test that editing a show fun on the ddoc results in a change in output
-  var xhr = CouchDB.request("GET", "/test_suite_db/_design/test/_show/simple");
-  T(xhr.status == 200);
-  TEquals(xhr.responseText, "ok");
+    var xhr = CouchDB.request(
+      "PUT", "/test_suite_db_a/_design/test", {body: JSON.stringify(designDoc)}
+    );
+    var resp = JSON.parse(xhr.responseText);
 
-  designDoc.shows.simple = "function() {return 'ko'};"
-  T(db.save(designDoc).ok);
+    TEquals(resp.rev, db.save(designDoc).rev);
 
-  var xhr = CouchDB.request("GET", "/test_suite_db/_design/test/_show/simple");
-  T(xhr.status == 200);
-  TEquals(xhr.responseText, "ko");
+    // test that editing a show fun on the ddoc results in a change in output
+    xhr = CouchDB.request("GET", "/test_suite_db/_design/test/_show/simple");
+    T(xhr.status == 200);
+    TEquals(xhr.responseText, "ok");
 
-  var xhr = CouchDB.request("GET", "/test_suite_db_a/_design/test/_show/simple?cache=buster");
-  T(xhr.status == 200);
-  TEquals("ok", xhr.responseText, 'query server used wrong ddoc');
+    designDoc.shows.simple = (function() {
+      return 'ko';
+    }).toString();
+    T(db.save(designDoc).ok);
 
-  // test commonjs require
-  var xhr = CouchDB.request("GET", "/test_suite_db/_design/test/_show/requirey");
-  T(xhr.status == 200);
-  TEquals("PLANKTONwhatever/commonjs/upperplankton", xhr.responseText);
+    xhr = CouchDB.request("GET", "/test_suite_db/_design/test/_show/simple");
+    T(xhr.status == 200);
+    TEquals(xhr.responseText, "ko");
 
-  var xhr = CouchDB.request("GET", "/test_suite_db/_design/test/_show/circular");
-  T(xhr.status == 200);
-  TEquals("javascript", JSON.parse(xhr.responseText).language);
+    xhr = CouchDB.request(
+      "GET", "/test_suite_db_a/_design/test/_show/simple?cache=buster"
+    );
+    T(xhr.status == 200);
+    TEquals("ok", xhr.responseText, 'query server used wrong ddoc');
 
-  var prev_view_sig = db.designInfo("_design/test").view_index.signature;
+    // test commonjs require
+    xhr = CouchDB.request("GET", "/test_suite_db/_design/test/_show/requirey");
+    T(xhr.status == 200);
+    TEquals("PLANKTONwhatever/commonjs/upperplankton", xhr.responseText);
 
-  db.bulkSave(makeDocs(1, numDocs + 1));
+    xhr = CouchDB.request("GET", "/test_suite_db/_design/test/_show/circular");
+    T(xhr.status == 200);
+    TEquals("javascript", JSON.parse(xhr.responseText).language);
 
-  // test that we get design doc info back
-  var dinfo = db.designInfo("_design/test");
-  TEquals("test", dinfo.name);
-  var vinfo = dinfo.view_index;
-  TEquals(51, vinfo.disk_size);
-  TEquals(false, vinfo.compact_running);
-  // test that GET /db/_design/test/_info
-  // hasn't triggered an update of the views
-  TEquals(prev_view_sig, vinfo.signature, 'ddoc sig');
-  for (var loop = 0; loop < 2; loop++) {
-    T(db.view("test/all_docs_twice", {stale: "ok"}).total_rows === 0);
-    T(db.view("test/single_doc", {stale: "ok"}).total_rows === 0);
-    T(db.view("test/summate", {stale: "ok"}).rows.length === 0);
+    // test circular commonjs dependencies
+    xhr = CouchDB.request(
+      "GET",
+      "/test_suite_db/_design/test/_show/circular_require"
+    );
+    TEquals(200, xhr.status);
+    TEquals("One", xhr.responseText);
+
+    // Test that changes to the design doc properly invalidate cached modules:
+
+    // update the designDoc and replace
+    designDoc.whatever.commonjs.circular_one = "exports.name = 'Updated';"
+    T(db.save(designDoc).ok);
+
+    // request circular_require show function again and check the response has
+    // changed
+    xhr = CouchDB.request(
+      "GET",
+      "/test_suite_db/_design/test/_show/circular_require"
+    );
+    TEquals(200, xhr.status);
+    TEquals("Updated", xhr.responseText);
+
+
+    // test module id values are as expected:
+    xhr = CouchDB.request("GET", "/test_suite_db/_design/test/_show/idtest1");
+    TEquals(200, xhr.status);
+    TEquals("whatever/idtest1/a/c/e", xhr.responseText);
+
+    xhr = CouchDB.request("GET", "/test_suite_db/_design/test/_show/idtest2");
+    TEquals(200, xhr.status);
+    TEquals("whatever/idtest2/a/c/e", xhr.responseText);
+
+    xhr = CouchDB.request("GET", "/test_suite_db/_design/test/_show/idtest3");
+    TEquals(200, xhr.status);
+    TEquals("whatever/idtest3/a/c/e", xhr.responseText);
+
+    xhr = CouchDB.request("GET", "/test_suite_db/_design/test/_show/idtest4");
+    TEquals(200, xhr.status);
+    TEquals("whatever/idtest4/a/c/e", xhr.responseText);
+
+    xhr = CouchDB.request("GET", "/test_suite_db/_design/test/_show/idtest5");
+    TEquals(200, xhr.status);
+    TEquals("whatever/idtest5/b", xhr.responseText);
+
+
+    var prev_view_sig = db.designInfo("_design/test").view_index.signature;
+    var prev_view_size = db.designInfo("_design/test").view_index.disk_size;
+
+    db.bulkSave(makeDocs(1, numDocs + 1));
     T(db.ensureFullCommit().ok);
-    restartServer();
-  };
 
-  // test that POST /db/_view_cleanup
-  // doesn't trigger an update of the views
-  T(db.viewCleanup().ok);
-  for (var loop = 0; loop < 2; loop++) {
-    T(db.view("test/all_docs_twice", {stale: "ok"}).total_rows == 0);
-    T(db.view("test/single_doc", {stale: "ok"}).total_rows == 0);
-    T(db.view("test/summate", {stale: "ok"}).rows.length == 0);
-    T(db.ensureFullCommit().ok);
-    restartServer();
-  };
-
-  // test that the _all_docs view returns correctly with keys
-  var results = db.allDocs({startkey:"_design", endkey:"_design0"});
-  T(results.rows.length == 1);
-
-  for (var loop = 0; loop < 2; loop++) {
-    var rows = db.view("test/all_docs_twice").rows;
-    for (var i = 0; i < numDocs; i++) {
-      T(rows[2*i].key == i+1);
-      T(rows[(2*i)+1].key == i+1);
+    // test that we get correct design doc info back,
+    // and also that GET /db/_design/test/_info
+    // hasn't triggered an update of the views
+    db.view("test/summate", {stale: "ok"}); // make sure view group's open
+    for (var i = 0; i < 2; i++) {
+      var dinfo = db.designInfo("_design/test");
+      TEquals("test", dinfo.name);
+      var vinfo = dinfo.view_index;
+      TEquals(prev_view_size, vinfo.disk_size, "view group disk size didn't change");
+      TEquals(false, vinfo.compact_running);
+      TEquals(prev_view_sig, vinfo.signature, 'ddoc sig');
+      // wait some time (there were issues where an update
+      // of the views had been triggered in the background)
+      var start = new Date().getTime();
+      while (new Date().getTime() < start + 2000);
+      TEquals(0, db.view("test/all_docs_twice", {stale: "ok"}).total_rows, 'view info');
+      TEquals(0, db.view("test/single_doc", {stale: "ok"}).total_rows, 'view info');
+      TEquals(0, db.view("test/summate", {stale: "ok"}).rows.length, 'view info');
+      T(db.ensureFullCommit().ok);
+      restartServer();
     };
-    T(db.view("test/no_docs").total_rows == 0);
-    T(db.view("test/single_doc").total_rows == 1);
+
+    db.bulkSave(makeDocs(numDocs + 1, numDocs * 2 + 1));
+    T(db.ensureFullCommit().ok);
+
+    // open view group
+    db.view("test/summate", {stale: "ok"});
+    // wait so the views can get initialized
+    var start = new Date().getTime();
+    while (new Date().getTime() < start + 2000);
+
+    // test that POST /db/_view_cleanup
+    // doesn't trigger an update of the views
+    var len1 = db.view("test/all_docs_twice", {stale: "ok"}).total_rows;
+    var len2 = db.view("test/single_doc", {stale: "ok"}).total_rows;
+    var len3 = db.view("test/summate", {stale: "ok"}).rows.length;
+    for (i = 0; i < 2; i++) {
+      T(db.viewCleanup().ok);
+      // wait some time (there were issues where an update
+      // of the views had been triggered in the background)
+      start = new Date().getTime();
+      while (new Date().getTime() < start + 2000);
+      TEquals(len1, db.view("test/all_docs_twice", {stale: "ok"}).total_rows, 'view cleanup');
+      TEquals(len2, db.view("test/single_doc", {stale: "ok"}).total_rows, 'view cleanup');
+      TEquals(len3, db.view("test/summate", {stale: "ok"}).rows.length, 'view cleanup');
+      T(db.ensureFullCommit().ok);
+      restartServer();
+      // we'll test whether the view group stays closed
+      // and the views stay uninitialized (they should!)
+      len1 = len2 = len3 = 0;
+    };
+
+    // test that the _all_docs view returns correctly with keys
+    var results = db.allDocs({startkey:"_design", endkey:"_design0"});
+    T(results.rows.length == 1);
+
+    for (i = 0; i < 2; i++) {
+      var rows = db.view("test/all_docs_twice").rows;
+      for (var j = 0; j < numDocs; j++) {
+        T(rows[2 * j].key == (j + 1));
+        T(rows[(2 * j) + 1].key == (j + 1));
+      };
+      T(db.view("test/no_docs").total_rows == 0);
+      T(db.view("test/single_doc").total_rows == 1);
+      T(db.ensureFullCommit().ok);
+      restartServer();
+    };
+
+    // test when language not specified, Javascript is implied
+    var designDoc2 = {
+      _id: "_design/test2",
+      // language: "javascript",
+      views: {
+        single_doc: {
+          map:
+            (function(doc) {
+              if (doc._id === "1") {
+                emit(1, null);
+              }
+            }).toString()
+        }
+      }
+    };
+
+    T(db.save(designDoc2).ok);
+    T(db.view("test2/single_doc").total_rows == 1);
+
+    var summate = function(N) {
+      return (N + 1) * (N / 2);
+    };
+    var result = db.view("test/summate");
+    T(result.rows[0].value == summate(numDocs * 2));
+
+    result = db.view("test/summate", {startkey: 4, endkey: 4});
+    T(result.rows[0].value == 4);
+
+    result = db.view("test/summate", {startkey: 4, endkey: 5});
+    T(result.rows[0].value == 9);
+
+    result = db.view("test/summate", {startkey: 4, endkey: 6});
+    T(result.rows[0].value == 15);
+
+    // Verify that a shared index (view def is an exact copy of "summate")
+    // does not confuse the reduce stage
+    result = db.view("test/summate2", {startkey: 4, endkey: 6});
+    T(result.rows[0].value == 15);
+
+    for(i = 1; i < (numDocs / 2); i += 30) {
+      result = db.view("test/summate", {startkey: i, endkey: (numDocs - i)});
+      T(result.rows[0].value == summate(numDocs - i) - summate(i - 1));
+    }
+
+    T(db.deleteDoc(designDoc).ok);
+    T(db.open(designDoc._id) == null);
+    T(db.view("test/no_docs") == null);
+
     T(db.ensureFullCommit().ok);
     restartServer();
-  };
+    T(db.open(designDoc._id) == null);
+    T(db.view("test/no_docs") == null);
 
-  // test when language not specified, Javascript is implied
-  var designDoc2 = {
-    _id:"_design/test2",
-    // language: "javascript",
-    views: {
-      single_doc: {map: "function(doc) { if (doc._id == \"1\") { emit(1, null) }}"}
-    }
-  };
+    // trigger ddoc cleanup
+    T(db.viewCleanup().ok);
+  }; // enf of testFun
 
-  T(db.save(designDoc2).ok);
-  T(db.view("test2/single_doc").total_rows == 1);
+  run_on_modified_server(server_config, testFun);
 
-  var summate = function(N) {return (N+1)*N/2;};
-  var result = db.view("test/summate");
-  T(result.rows[0].value == summate(numDocs));
-
-  result = db.view("test/summate", {startkey:4,endkey:4});
-  T(result.rows[0].value == 4);
-
-  result = db.view("test/summate", {startkey:4,endkey:5});
-  T(result.rows[0].value == 9);
-
-  result = db.view("test/summate", {startkey:4,endkey:6});
-  T(result.rows[0].value == 15);
-
-  // Verify that a shared index (view def is an exact copy of "summate")
-  // does not confuse the reduce stage
-  result = db.view("test/summate2", {startkey:4,endkey:6});
-  T(result.rows[0].value == 15);
-
-  for(var i=1; i<numDocs/2; i+=30) {
-    result = db.view("test/summate", {startkey:i,endkey:numDocs-i});
-    T(result.rows[0].value == summate(numDocs-i) - summate(i-1));
-  }
-
-  T(db.deleteDoc(designDoc).ok);
-  T(db.open(designDoc._id) == null);
-  T(db.view("test/no_docs") == null);
-
-  T(db.ensureFullCommit().ok);
-  restartServer();
-  T(db.open(designDoc._id) == null);
-  T(db.view("test/no_docs") == null);
-
-  // trigger ddoc cleanup
-  T(db.viewCleanup().ok);
-
-});
+  // cleanup
+  db.deleteDb();
+  db2.deleteDb();
 };
diff --git a/share/www/script/test/replication.js b/share/www/script/test/replication.js
index 7a8a754..a2e5d1f 100644
--- a/share/www/script/test/replication.js
+++ b/share/www/script/test/replication.js
@@ -12,6 +12,20 @@
 
 couchTests.replication = function(debug) {
   if (debug) debugger;
+
+  function waitForSeq(sourceDb, targetDb) {
+    var targetSeq,
+        sourceSeq = sourceDb.info().update_seq,
+        t0 = new Date(),
+        t1,
+        ms = 3000;
+
+    do {
+      targetSeq = targetDb.info().update_seq;
+      t1 = new Date();
+    } while (((t1 - t0) <= ms) && targetSeq < sourceSeq);
+  }
+
   var host = CouchDB.host;
   var dbPairs = [
     {source:"test_suite_db_a",
@@ -310,14 +324,14 @@
   T(continuousResult._local_id);
 
   var cancelResult = CouchDB.replicate(dbA.name, "test_suite_db_b", {
-    body: {"cancel": true}
+    body: {"continuous":true, "cancel": true}
   });
   T(cancelResult.ok);
   T(continuousResult._local_id == cancelResult._local_id);
 
   try {
    var cancelResult2 = CouchDB.replicate(dbA.name, "test_suite_db_b", {
-     body: {"cancel": true}
+     body: {"continuous":true, "cancel": true}
    });
   } catch (e) {
     T(e.error == "not_found");
@@ -723,6 +737,7 @@
 
   var tasksAfter = JSON.parse(xhr.responseText);
   TEquals(tasks.length, tasksAfter.length);
+  waitForSeq(dbA, dbB);
   T(dbB.open("30") !== null);
 
   repResult = CouchDB.replicate(
@@ -740,6 +755,70 @@
   TEquals('string', typeof repResult._local_id);
 
 
+  // COUCHDB-885 - push replication of a doc with attachment causes a
+ //  conflict in the target.
+  dbA = new CouchDB("test_suite_db_a");
+  dbB = new CouchDB("test_suite_db_b");
+
+  dbA.deleteDb();
+  dbA.createDb();
+  dbB.deleteDb();
+  dbB.createDb();
+
+  var doc = {
+    _id: "doc1"
+  };
+  TEquals(true, dbA.save(doc).ok);
+
+  repResult = CouchDB.replicate(
+    dbA.name,
+    "http://" + host + "/" + dbB.name
+  );
+  TEquals(true, repResult.ok);
+  TEquals(true, repResult.history instanceof Array);
+  TEquals(1, repResult.history.length);
+  TEquals(1, repResult.history[0].docs_written);
+  TEquals(1, repResult.history[0].docs_read);
+  TEquals(0, repResult.history[0].doc_write_failures);
+
+  doc["_attachments"] = {
+    "hello.txt": {
+      "content_type": "text/plain",
+      "data": "aGVsbG8gd29ybGQ="  // base64:encode("hello world")
+    },
+    "foo.dat": {
+      "content_type": "not/compressible",
+      "data": "aSBhbSBub3QgZ3ppcGVk"  // base64:encode("i am not gziped")
+    }
+  };
+
+  TEquals(true, dbA.save(doc).ok);
+  repResult = CouchDB.replicate(
+    dbA.name,
+    "http://" + host + "/" + dbB.name
+  );
+  TEquals(true, repResult.ok);
+  TEquals(true, repResult.history instanceof Array);
+  TEquals(2, repResult.history.length);
+  TEquals(1, repResult.history[0].docs_written);
+  TEquals(1, repResult.history[0].docs_read);
+  TEquals(0, repResult.history[0].doc_write_failures);
+
+  var copy = dbB.open(doc._id, {
+    conflicts: true, deleted_conflicts: true, attachments: true,
+    att_encoding_info: true});
+  T(copy !== null);
+  TEquals("undefined", typeof copy._conflicts);
+  TEquals("undefined", typeof copy._deleted_conflicts);
+  TEquals("text/plain", copy._attachments["hello.txt"]["content_type"]);
+  TEquals("aGVsbG8gd29ybGQ=", copy._attachments["hello.txt"]["data"]);
+  TEquals("gzip", copy._attachments["hello.txt"]["encoding"]);
+  TEquals("not/compressible", copy._attachments["foo.dat"]["content_type"]);
+  TEquals("aSBhbSBub3QgZ3ppcGVk", copy._attachments["foo.dat"]["data"]);
+  TEquals("undefined", typeof copy._attachments["foo.dat"]["encoding"]);
+  // end of test for COUCHDB-885
+
+
   // cleanup
   dbA.deleteDb();
   dbB.deleteDb();
diff --git a/src/couchdb/couch_db.erl b/src/couchdb/couch_db.erl
index 70f9025..47e705b 100644
--- a/src/couchdb/couch_db.erl
+++ b/src/couchdb/couch_db.erl
@@ -733,6 +733,8 @@
 % for the doc.
 make_first_doc_on_disk(_Db, _Id, _Pos, []) ->
     nil;
+make_first_doc_on_disk(Db, Id, Pos, [{_Rev, #doc{}} | RestPath]) ->
+    make_first_doc_on_disk(Db, Id, Pos-1, RestPath);
 make_first_doc_on_disk(Db, Id, Pos, [{_Rev, ?REV_MISSING}|RestPath]) ->
     make_first_doc_on_disk(Db, Id, Pos - 1, RestPath);
 make_first_doc_on_disk(Db, Id, Pos, [{_Rev, {IsDel, Sp, _Seq}} |_]=DocPath) ->
@@ -921,10 +923,15 @@
 write_streamed_attachment(_Stream, _F, 0) ->
     ok;
 write_streamed_attachment(Stream, F, LenLeft) when LenLeft > 0 ->
-    Bin = F(),
+    Bin = read_next_chunk(F, LenLeft),
     ok = couch_stream:write(Stream, Bin),
     write_streamed_attachment(Stream, F, LenLeft - size(Bin)).
 
+read_next_chunk(F, _) when is_function(F, 0) ->
+    F();
+read_next_chunk(F, LenLeft) when is_function(F, 1) ->
+    F(lists:min([LenLeft, 16#2000])).
+
 enum_docs_since_reduce_to_count(Reds) ->
     couch_btree:final_reduce(
             fun couch_db_updater:btree_by_seq_reduce/2, Reds).
diff --git a/src/couchdb/couch_doc.erl b/src/couchdb/couch_doc.erl
index d15cd7d..895c16e 100644
--- a/src/couchdb/couch_doc.erl
+++ b/src/couchdb/couch_doc.erl
@@ -443,11 +443,13 @@
 
 
 doc_from_multi_part_stream(ContentType, DataFun) ->
-    Self = self(),
+    Parent = self(),
     Parser = spawn_link(fun() ->
-        couch_httpd:parse_multipart_request(ContentType, DataFun,
-                fun(Next)-> mp_parse_doc(Next, []) end),
-        unlink(Self)
+        {<<"--">>, _, _} = couch_httpd:parse_multipart_request(
+            ContentType, DataFun,
+            fun(Next) -> mp_parse_doc(Next, []) end),
+        unlink(Parent),
+        Parent ! {self(), finished}
         end),
     Parser ! {get_doc_bytes, self()},
     receive 
@@ -465,7 +467,11 @@
             (A) ->
                 A
             end, Doc#doc.atts),
-        {ok, Doc#doc{atts=Atts2}}
+        WaitFun = fun() ->
+            receive {Parser, finished} -> ok end,
+            erlang:put(mochiweb_request_recv, true)
+        end,
+        {ok, Doc#doc{atts=Atts2}, WaitFun}
     end.
 
 mp_parse_doc({headers, H}, []) ->
diff --git a/src/couchdb/couch_httpd_db.erl b/src/couchdb/couch_httpd_db.erl
index c546d11..2d0156b 100644
--- a/src/couchdb/couch_httpd_db.erl
+++ b/src/couchdb/couch_httpd_db.erl
@@ -106,10 +106,16 @@
             FeedChangesFun(MakeCallback(Resp))
         end
     end,
-    couch_stats_collector:track_process_count(
+    couch_stats_collector:increment(
         {httpd, clients_requesting_changes}
     ),
-    WrapperFun(ChangesFun);
+    try
+        WrapperFun(ChangesFun)
+    after
+    couch_stats_collector:decrement(
+        {httpd, clients_requesting_changes}
+    )
+    end;
 
 handle_changes_req(#httpd{path_parts=[_,<<"_changes">>]}=Req, _Db) ->
     send_method_not_allowed(Req, "GET,HEAD").
@@ -683,10 +689,12 @@
     RespHeaders = [{"Location", Loc}],
     case couch_util:to_list(couch_httpd:header_value(Req, "Content-Type")) of
     ("multipart/related;" ++ _) = ContentType ->
-        {ok, Doc0} = couch_doc:doc_from_multi_part_stream(ContentType,
-                fun() -> receive_request_data(Req) end),
+        {ok, Doc0, WaitFun} = couch_doc:doc_from_multi_part_stream(
+            ContentType, fun() -> receive_request_data(Req) end),
         Doc = couch_doc_from_req(Req, DocId, Doc0),
-        update_doc(Req, Db, DocId, Doc, RespHeaders, UpdateType);
+        Result = update_doc(Req, Db, DocId, Doc, RespHeaders, UpdateType),
+        WaitFun(),
+        Result;
     _Else ->
         case couch_httpd:qs_value(Req, "batch") of
         "ok" ->
@@ -761,7 +769,7 @@
         true ->
             Boundary = couch_uuids:random(),
             JsonBytes = ?JSON_ENCODE(couch_doc:to_json_obj(Doc, 
-                    [attachments, follows|Options])),
+                    [attachments, follows, att_encoding_info | Options])),
             {ContentType, Len} = couch_doc:len_doc_to_multi_part_stream(
                     Boundary,JsonBytes, Atts, true),
             CType = {<<"Content-Type">>, ContentType},
@@ -804,7 +812,14 @@
     couch_httpd:last_chunk(Resp).
 
 receive_request_data(Req) ->
-    {couch_httpd:recv(Req, 0), fun() -> receive_request_data(Req) end}.
+    receive_request_data(Req, couch_httpd:body_length(Req)).
+
+receive_request_data(Req, LenLeft) when LenLeft > 0 ->
+    Len = erlang:min(4096, LenLeft),
+    Data = couch_httpd:recv(Req, Len),
+    {Data, fun() -> receive_request_data(Req, LenLeft - iolist_size(Data)) end};
+receive_request_data(_Req, _) ->
+    throw(<<"expected more data">>).
     
 update_doc_result_to_json({{Id, Rev}, Error}) ->
         {_Code, Err, Msg} = couch_httpd:error_info(Error),
@@ -919,6 +934,11 @@
             [{"Content-Encoding", atom_to_list(Enc)}];
         _ ->
             []
+        end ++ if
+            Enc =:= identity orelse ReqAcceptsAttEnc =:= true ->
+                [{"Content-MD5", base64:encode(Att#att.md5)}];
+            true ->
+                []
         end,
         Len = case {Enc, ReqAcceptsAttEnc} of
         {identity, _} ->
@@ -1011,7 +1031,7 @@
                         end,
                         
                         
-                        fun() -> couch_httpd:recv(Req, 0) end;
+                        fun(Size) -> couch_httpd:recv(Req, Size) end;
                     Length ->
                         exit({length_not_integer, Length})
                     end,
@@ -1214,34 +1234,7 @@
 validate_attachment_name(<<"_",_/binary>>) ->
     throw({bad_request, <<"Attachment name can't start with '_'">>});
 validate_attachment_name(Name) ->
-    case is_valid_utf8(Name) of
+    case couch_util:validate_utf8(Name) of
         true -> Name;
         false -> throw({bad_request, <<"Attachment name is not UTF-8 encoded">>})
     end.
-
-%% borrowed from mochijson2:json_bin_is_safe()
-is_valid_utf8(<<>>) ->
-    true;
-is_valid_utf8(<<C, Rest/binary>>) ->
-    case C of
-        $\" ->
-            false;
-        $\\ ->
-            false;
-        $\b ->
-            false;
-        $\f ->
-            false;
-        $\n ->
-            false;
-        $\r ->
-            false;
-        $\t ->
-            false;
-        C when C >= 0, C < $\s; C >= 16#7f, C =< 16#10FFFF ->
-            false;
-        C when C < 16#7f ->
-            is_valid_utf8(Rest);
-        _ ->
-            false
-    end.
diff --git a/src/couchdb/couch_key_tree.erl b/src/couchdb/couch_key_tree.erl
index 6701da5..5241bc6 100644
--- a/src/couchdb/couch_key_tree.erl
+++ b/src/couchdb/couch_key_tree.erl
@@ -10,25 +10,64 @@
 % License for the specific language governing permissions and limitations under
 % the License.
 
+%% @doc Data structure used to represent document edit histories.
+
+%% A key tree is used to represent the edit history of a document. Each node of
+%% the tree represents a particular version. Relations between nodes represent
+%% the order that these edits were applied. For instance, a set of three edits
+%% would produce a tree of versions A->B->C indicating that edit C was based on
+%% version B which was in turn based on A. In a world without replication (and
+%% no ability to disable MVCC checks), all histories would be forced to be
+%% linear lists of edits due to constraints imposed by MVCC (ie, new edits must
+%% be based on the current version). However, we have replication, so we must
+%% deal with not so easy cases, which lead to trees.
+%%
+%% Consider a document in state A. This doc is replicated to a second node. We
+%% then edit the document on each node leaving it in two different states, B
+%% and C. We now have two key trees, A->B and A->C. When we go to replicate a
+%% second time, the key tree must combine these two trees which gives us
+%% A->(B|C). This is how conflicts are introduced. In terms of the key tree, we
+%% say that we have two leaves (B and C) that are not deleted. The presense of
+%% the multiple leaves indicate conflict. To remove a conflict, one of the
+%% edits (B or C) can be deleted, which results in, A->(B|C->D) where D is an
+%% edit that is specially marked with the a deleted=true flag.
+%%
+%% What makes this a bit more complicated is that there is a limit to the
+%% number of revisions kept, specified in couch_db.hrl (default is 1000). When
+%% this limit is exceeded only the last 1000 are kept. This comes in to play
+%% when branches are merged. The comparison has to begin at the same place in
+%% the branches. A revision id is of the form N-XXXXXXX where N is the current
+%% revision. So each path will have a start number, calculated in
+%% couch_doc:to_path using the formula N - length(RevIds) + 1 So, .eg. if a doc
+%% was edit 1003 times this start number would be 4, indicating that 3
+%% revisions were truncated.
+%%
+%% This comes into play in @see merge_at/3 which recursively walks down one
+%% tree or the other until they begin at the same revision.
+
 -module(couch_key_tree).
 
 -export([merge/3, find_missing/2, get_key_leafs/2, get_full_key_paths/2, get/2]).
 -export([map/2, get_all_leafs/1, count_leafs/1, remove_leafs/2,
     get_all_leafs_full/1,stem/2,map_leafs/2]).
 
+-include("couch_db.hrl").
+
 % Tree::term() is really a tree(), but we don't want to require R13B04 yet
 -type branch() :: {Key::term(), Value::term(), Tree::term()}.
 -type path() :: {Start::pos_integer(), branch()}.
 -type tree() :: [branch()]. % sorted by key
 
-% partial trees arranged by how much they are cut off.
-
+%% @doc Merge a path with a list of paths and stem to the given length.
 -spec merge([path()], path(), pos_integer()) -> {[path()],
     conflicts | no_conflicts}.
 merge(Paths, Path, Depth) ->
     {Merged, Conflicts} = merge(Paths, Path),
     {stem(Merged, Depth), Conflicts}.
 
+%% @doc Merge a path with an existing list of paths, returning a new list of
+%% paths. A return of conflicts indicates a new conflict was discovered in this
+%% merge. Conflicts may already exist in the original list of paths.
 -spec merge([path()], path()) -> {[path()], conflicts | no_conflicts}.
 merge(Paths, Path) ->
     {ok, Merged, HasConflicts} = merge_one(Paths, Path, [], false),
@@ -67,6 +106,7 @@
     {ok, Merged, Conflicts} ->
         {ok, [{Key, Value, Merged} | Sibs], Conflicts};
     no ->
+        % first branch didn't merge, move to next branch
         case merge_at(Sibs, Place, InsertTree) of
         {ok, Merged, Conflicts} ->
             {ok, [{Key, Value, SubTree} | Merged], Conflicts};
@@ -82,9 +122,9 @@
     no ->
         no
     end;
-merge_at([{Key, Value, SubTree}|Sibs], 0, [{Key, _Value, InsertSubTree}]) ->
+merge_at([{Key, V1, SubTree}|Sibs], 0, [{Key, V2, InsertSubTree}]) ->
     {Merged, Conflicts} = merge_simple(SubTree, InsertSubTree),
-    {ok, [{Key, Value, Merged} | Sibs], Conflicts};
+    {ok, [{Key, value_pref(V1, V2), Merged} | Sibs], Conflicts};
 merge_at([{OurKey, _, _} | _], 0, [{Key, _, _}]) when OurKey > Key ->
     % siblings keys are ordered, no point in continuing
     no;
@@ -103,16 +143,18 @@
     {B, false};
 merge_simple(A, []) ->
     {A, false};
-merge_simple([{Key, Value, SubA} | NextA], [{Key, _, SubB} | NextB]) ->
+merge_simple([{Key, V1, SubA} | NextA], [{Key, V2, SubB} | NextB]) ->
     {MergedSubTree, Conflict1} = merge_simple(SubA, SubB),
     {MergedNextTree, Conflict2} = merge_simple(NextA, NextB),
+    Value = value_pref(V1, V2),
     {[{Key, Value, MergedSubTree} | MergedNextTree], Conflict1 or Conflict2};
 merge_simple([{A, _, _} = Tree | Next], [{B, _, _} | _] = Insert) when A < B ->
-    {Merged, _} = merge_simple(Next, Insert),
-    {[Tree | Merged], true};
+    {Merged, Conflict} = merge_simple(Next, Insert),
+    % if Merged has more branches than the input we added a new conflict
+    {[Tree | Merged], Conflict orelse (length(Merged) > length(Next))};
 merge_simple(Ours, [Tree | Next]) ->
-    {Merged, _} = merge_simple(Ours, Next),
-    {[Tree | Merged], true}.
+    {Merged, Conflict} = merge_simple(Ours, Next),
+    {[Tree | Merged], Conflict orelse (length(Merged) > length(Next))}.
 
 find_missing(_Tree, []) ->
     [];
@@ -157,14 +199,18 @@
     % filter out any that are in the keys list.
     {FilteredPaths, RemovedKeys} = filter_leafs(Paths, Keys, [], []),
 
+    SortedPaths = lists:sort(
+        [{Pos + 1 - length(Path), Path} || {Pos, Path} <- FilteredPaths]
+    ),
+
     % convert paths back to trees
     NewTree = lists:foldl(
-        fun({PathPos, Path},TreeAcc) ->
+        fun({StartPos, Path},TreeAcc) ->
             [SingleTree] = lists:foldl(
                 fun({K,V},NewTreeAcc) -> [{K,V,NewTreeAcc}] end, [], Path),
-            {NewTrees, _} = merge(TreeAcc, {PathPos + 1 - length(Path), SingleTree}),
+            {NewTrees, _} = merge(TreeAcc, {StartPos, SingleTree}),
             NewTrees
-        end, [], FilteredPaths),
+        end, [], SortedPaths),
     {NewTree, RemovedKeys}.
 
 
@@ -314,19 +360,35 @@
 
 
 stem(Trees, Limit) ->
-    % flatten each branch in a tree into a tree path
-    Paths = get_all_leafs_full(Trees),
-
-    Paths2 = [{Pos, lists:sublist(Path, Limit)} || {Pos, Path} <- Paths],
+    % flatten each branch in a tree into a tree path, sort by starting rev #
+    Paths = lists:sort(lists:map(fun({Pos, Path}) ->
+        StemmedPath = lists:sublist(Path, Limit),
+        {Pos + 1 - length(StemmedPath), StemmedPath}
+    end, get_all_leafs_full(Trees))),
 
     % convert paths back to trees
     lists:foldl(
-        fun({PathPos, Path},TreeAcc) ->
+        fun({StartPos, Path},TreeAcc) ->
             [SingleTree] = lists:foldl(
                 fun({K,V},NewTreeAcc) -> [{K,V,NewTreeAcc}] end, [], Path),
-            {NewTrees, _} = merge(TreeAcc, {PathPos + 1 - length(Path), SingleTree}),
+            {NewTrees, _} = merge(TreeAcc, {StartPos, SingleTree}),
             NewTrees
-        end, [], Paths2).
+        end, [], Paths).
+
+
+value_pref(Tuple, _) when is_tuple(Tuple),
+        (tuple_size(Tuple) == 3 orelse tuple_size(Tuple) == 4) ->
+    Tuple;
+value_pref(_, Tuple) when is_tuple(Tuple),
+        (tuple_size(Tuple) == 3 orelse tuple_size(Tuple) == 4) ->
+    Tuple;
+value_pref(?REV_MISSING, Other) ->
+    Other;
+value_pref(Other, ?REV_MISSING) ->
+    Other;
+value_pref(Last, _) ->
+    Last.
+
 
 % Tests moved to test/etap/06?-*.t
 
diff --git a/src/couchdb/couch_log.erl b/src/couchdb/couch_log.erl
index 2d62cbb..65b5995 100644
--- a/src/couchdb/couch_log.erl
+++ b/src/couchdb/couch_log.erl
@@ -131,11 +131,11 @@
     file:close(Fd).
 
 log(Fd, Pid, Level, Format, Args) ->
-    Msg = io_lib:format(Format, Args),
-    ok = io:format("[~s] [~p] ~s~n", [Level, Pid, Msg]), % dump to console too
-    Msg2 = re:replace(lists:flatten(Msg),"\\r\\n|\\r|\\n", "\r\n",
-        [global, {return, list}]),
-    ok = io:format(Fd, "[~s] [~s] [~p] ~s\r~n\r~n", [httpd_util:rfc1123_date(), Level, Pid, Msg2]).
+    ConsoleMsg = unicode:characters_to_binary(io_lib:format(
+            "[~s] [~p] " ++ Format ++ "~n", [Level, Pid | Args])),
+    FileMsg = ["[", httpd_util:rfc1123_date(), "] ", ConsoleMsg],
+    ok = io:put_chars(ConsoleMsg),
+    ok = io:put_chars(Fd, iolist_to_binary(FileMsg)).
 
 read(Bytes, Offset) ->
     LogFileName = couch_config:get("log", "file"),
diff --git a/src/couchdb/couch_rep.erl b/src/couchdb/couch_rep.erl
index 7a2fd51..47e22e9 100644
--- a/src/couchdb/couch_rep.erl
+++ b/src/couchdb/couch_rep.erl
@@ -78,8 +78,14 @@
         {error, not_found} ->
      {error, not_found};
         ok ->
-     ok = supervisor:delete_child(couch_rep_sup, BaseId ++ Extension),
-            {ok, {cancelled, ?l2b(BaseId)}}
+     case supervisor:delete_child(couch_rep_sup, BaseId ++ Extension) of
+         ok ->
+             {ok, {cancelled, ?l2b(BaseId)}};
+         {error, not_found} ->
+             {ok, {cancelled, ?l2b(BaseId)}};
+         {error, _} = Error ->
+             Error
+     end
  end;
     false ->
         Server = start_replication_server(Replicator),
diff --git a/src/couchdb/couch_rep_writer.erl b/src/couchdb/couch_rep_writer.erl
index cf98ccf..d399789 100644
--- a/src/couchdb/couch_rep_writer.erl
+++ b/src/couchdb/couch_rep_writer.erl
@@ -71,7 +71,7 @@
     [];
 write_bulk_docs(#http_db{headers = Headers} = Db, Docs) ->
     JsonDocs = [
-        couch_doc:to_json_obj(Doc, [revs, att_gzip_length]) || Doc <- Docs
+        couch_doc:to_json_obj(Doc, [revs]) || Doc <- Docs
     ],
     Request = Db#http_db{
         resource = "_bulk_docs",
@@ -91,7 +91,7 @@
     JsonBytes = ?JSON_ENCODE(
         couch_doc:to_json_obj(
             Doc,
-            [follows, att_encoding_info, attachments]
+            [follows, att_encoding_info, attachments, revs]
         )
     ),
     Boundary = couch_uuids:random(),
diff --git a/src/couchdb/couch_util.erl b/src/couchdb/couch_util.erl
index 7c82529..36c1906 100644
--- a/src/couchdb/couch_util.erl
+++ b/src/couchdb/couch_util.erl
@@ -17,7 +17,7 @@
 -export([rand32/0, implode/2, collate/2, collate/3]).
 -export([abs_pathname/1,abs_pathname/2, trim/1, ascii_lower/1]).
 -export([encodeBase64Url/1, decodeBase64Url/1]).
--export([to_hex/1, parse_term/1, dict_find/3]).
+-export([validate_utf8/1, to_hex/1, parse_term/1, dict_find/3]).
 -export([file_read_size/1, get_nested_json_value/2, json_user_ctx/1]).
 -export([proplist_apply_field/2, json_apply_field/2]).
 -export([to_binary/1, to_integer/1, to_list/1, url_encode/1]).
@@ -107,6 +107,37 @@
         erlang:demonitor(MRef, [flush])
     end.
 
+validate_utf8(Data) when is_list(Data) ->
+    validate_utf8(?l2b(Data));
+validate_utf8(Bin) when is_binary(Bin) ->
+    validate_utf8_fast(Bin, 0).
+
+validate_utf8_fast(B, O) ->
+    case B of
+        <<_:O/binary>> ->
+            true;
+        <<_:O/binary, C1, _/binary>> when
+                C1 < 128 ->
+            validate_utf8_fast(B, 1 + O);
+        <<_:O/binary, C1, C2, _/binary>> when
+                C1 >= 194, C1 =< 223,
+                C2 >= 128, C2 =< 191 ->
+            validate_utf8_fast(B, 2 + O);
+        <<_:O/binary, C1, C2, C3, _/binary>> when
+                C1 >= 224, C1 =< 239,
+                C2 >= 128, C2 =< 191,
+                C3 >= 128, C3 =< 191 ->
+            validate_utf8_fast(B, 3 + O);
+        <<_:O/binary, C1, C2, C3, C4, _/binary>> when
+                C1 >= 240, C1 =< 244,
+                C2 >= 128, C2 =< 191,
+                C3 >= 128, C3 =< 191,
+                C4 >= 128, C4 =< 191 ->
+            validate_utf8_fast(B, 4 + O);
+        _ ->
+            false
+    end.
+
 to_hex([]) ->
     [];
 to_hex(Bin) when is_binary(Bin) ->
diff --git a/src/couchdb/couch_view_group.erl b/src/couchdb/couch_view_group.erl
index d4065d5..93c63c8 100644
--- a/src/couchdb/couch_view_group.erl
+++ b/src/couchdb/couch_view_group.erl
@@ -84,6 +84,7 @@
             ReturnPid ! {Ref, self(), {error, invalid_view_seq}},
             ignore;
         _ ->
+            couch_db:monitor(Db),
             couch_db:close(Db),
             {ok, RefCounter} = couch_ref_counter:start([Fd]),
             {ok, #group_state{
@@ -336,7 +337,11 @@
 
 handle_info({'EXIT', FromPid, Reason}, State) ->
     ?LOG_DEBUG("Exit from linked pid: ~p", [{FromPid, Reason}]),
-    {stop, Reason, State}.
+    {stop, Reason, State};
+
+handle_info({'DOWN',_,_,_,_}, State) ->
+    ?LOG_INFO("Shutting down view group server, monitored db is closing.", []),
+    {stop, normal, reply_all(State, shutdown)}.
 
 
 terminate(Reason, #group_state{updater_pid=Update, compactor_pid=Compact}=S) ->
diff --git a/src/couchdb/priv/Makefile.am b/src/couchdb/priv/Makefile.am
index b36d828..0a99cbf 100644
--- a/src/couchdb/priv/Makefile.am
+++ b/src/couchdb/priv/Makefile.am
@@ -79,9 +79,9 @@
 	        $(LN_S) couch_icu_driver couch_icu_driver.so; \
 	fi
 if WINDOWS
-	$(INSTALL) $(ICU_LOCAL_BIN)/icuuc42.dll $(bindir)
-	$(INSTALL) $(ICU_LOCAL_BIN)/icudt42.dll $(bindir)
-	$(INSTALL) $(ICU_LOCAL_BIN)/icuin42.dll $(bindir)
+	$(INSTALL) $(ICU_LOCAL_BIN)/icuuc44.dll $(bindir)
+	$(INSTALL) $(ICU_LOCAL_BIN)/icudt44.dll $(bindir)
+	$(INSTALL) $(ICU_LOCAL_BIN)/icuin44.dll $(bindir)
 	$(INSTALL) $(JS_LIB_BINARY) $(bindir)
 	$(INSTALL) .libs/couchspawnkillable.exe \
 		"$(DESTDIR)$(couchprivdir)/couchspawnkillable.exe"
diff --git a/src/erlang-oauth/oauth_uri.erl b/src/erlang-oauth/oauth_uri.erl
index 3bdc907..5023f98 100644
--- a/src/erlang-oauth/oauth_uri.erl
+++ b/src/erlang-oauth/oauth_uri.erl
@@ -6,14 +6,6 @@
 
 -import(lists, [concat/1]).
 
--define(is_uppercase_alpha(C), C >= $A, C =< $Z).
--define(is_lowercase_alpha(C), C >= $a, C =< $z).
--define(is_alpha(C), ?is_uppercase_alpha(C); ?is_lowercase_alpha(C)).
--define(is_digit(C), C >= $0, C =< $9).
--define(is_alphanumeric(C), ?is_alpha(C); ?is_digit(C)).
--define(is_unreserved(C), ?is_alphanumeric(C); C =:= $-; C =:= $_; C =:= $.; C =:= $~).
--define(is_hex(C), ?is_digit(C); C >= $A, C =< $F).
-
 
 normalize(URI) ->
   case http_uri:parse(URI) of
@@ -66,23 +58,41 @@
 intersperse(Sep, [X|Xs]) ->
   [X, Sep|intersperse(Sep, Xs)].
 
-decode(Chars) ->
-  decode(Chars, []).
+-define(is_alphanum(C), C >= $A, C =< $Z; C >= $a, C =< $z; C >= $0, C =< $9).
 
-decode([], Decoded) ->
-  lists:reverse(Decoded);
-decode([$%,A,B|Etc], Decoded) when ?is_hex(A), ?is_hex(B) ->
-  decode(Etc, [erlang:list_to_integer([A,B], 16)|Decoded]);
-decode([C|Etc], Decoded) when ?is_unreserved(C) ->
-  decode(Etc, [C|Decoded]).
+encode(Term) when is_integer(Term) ->
+  integer_to_list(Term);
+encode(Term) when is_atom(Term) ->
+  encode(atom_to_list(Term));
+encode(Term) when is_list(Term) ->
+  encode(lists:reverse(Term, []), []).
 
-encode(Chars) ->
-  encode(Chars, []).
+encode([X | T], Acc) when ?is_alphanum(X); X =:= $-; X =:= $_; X =:= $.; X =:= $~ ->
+  encode(T, [X | Acc]);
+encode([X | T], Acc) ->
+  NewAcc = [$%, dec2hex(X bsr 4), dec2hex(X band 16#0f) | Acc],
+  encode(T, NewAcc);
+encode([], Acc) ->
+  Acc.
 
-encode([], Encoded) ->
-  lists:flatten(lists:reverse(Encoded));
-encode([C|Etc], Encoded) when ?is_unreserved(C) ->
-  encode(Etc, [C|Encoded]);
-encode([C|Etc], Encoded) ->
-  Value = io_lib:format("%~2.2.0s", [erlang:integer_to_list(C, 16)]),
-  encode(Etc, [Value|Encoded]).
+decode(Str) when is_list(Str) ->
+  decode(Str, []).
+
+decode([$%, A, B | T], Acc) ->
+  decode(T, [(hex2dec(A) bsl 4) + hex2dec(B) | Acc]);
+decode([X | T], Acc) ->
+  decode(T, [X | Acc]);
+decode([], Acc) ->
+  lists:reverse(Acc, []).
+
+-compile({inline, [{dec2hex, 1}, {hex2dec, 1}]}).
+
+dec2hex(N) when N >= 10 andalso N =< 15 ->
+  N + $A - 10;
+dec2hex(N) when N >= 0 andalso N =< 9 ->
+  N + $0.
+
+hex2dec(C) when C >= $A andalso C =< $F ->
+  C - $A + 10;
+hex2dec(C) when C >= $0 andalso C =< $9 ->
+  C - $0.
diff --git a/test/etap/060-kt-merging.t b/test/etap/060-kt-merging.t
index 0e481a5..efbdbf6 100755
--- a/test/etap/060-kt-merging.t
+++ b/test/etap/060-kt-merging.t
@@ -15,7 +15,7 @@
 
 main(_) ->
     test_util:init_code_path(),
-    etap:plan(12),
+    etap:plan(16),
     case (catch test()) of
         ok ->
             etap:end_tests();
@@ -26,25 +26,21 @@
     ok.
 
 test() ->
-    One = {0, {"1","foo",[]}},
-    TwoSibs = [{0, {"1","foo",[]}},
-               {0, {"2","foo",[]}}],
-    OneChild = {0, {"1","foo",[{"1a", "bar", []}]}},
-    TwoChild = {0, {"1","foo", [{"1a", "bar", [{"1aa", "bar", []}]}]}},
-    TwoChildSibs = {0, {"1","foo", [{"1a", "bar", []},
-                                     {"1b", "bar", []}]}},
-    TwoChildSibs2 = {0, {"1","foo", [{"1a", "bar", []},
-                                     {"1b", "bar", [{"1bb", "boo", []}]}]}},
-    Stemmed1b = {1, {"1a", "bar", []}},
-    Stemmed1a = {1, {"1a", "bar", [{"1aa", "bar", []}]}},
-    Stemmed1aa = {2, {"1aa", "bar", []}},
-    Stemmed1bb = {2, {"1bb", "boo", []}},
+    One = {1, {"1","foo",[]}},
 
     etap:is(
         {[One], no_conflicts},
         couch_key_tree:merge([], One, 10),
         "The empty tree is the identity for merge."
     ),
+    etap:is(
+        {[One], no_conflicts},
+        couch_key_tree:merge([One], One, 10),
+        "Merging is reflexive."
+    ),
+
+    TwoSibs = [{1, {"1","foo",[]}},
+               {1, {"2","foo",[]}}],
 
     etap:is(
         {TwoSibs, no_conflicts},
@@ -52,41 +48,75 @@
         "Merging a prefix of a tree with the tree yields the tree."
     ),
 
+    Three = {1, {"3","foo",[]}},
+    ThreeSibs = [{1, {"1","foo",[]}},
+                 {1, {"2","foo",[]}},
+                 {1, {"3","foo",[]}}],
+
     etap:is(
-        {[One], no_conflicts},
-        couch_key_tree:merge([One], One, 10),
-        "Merging is reflexive."
+        {ThreeSibs, conflicts},
+        couch_key_tree:merge(TwoSibs, Three, 10),
+        "Merging a third unrelated branch leads to a conflict."
     ),
 
+
+    TwoChild = {1, {"1","foo", [{"1a", "bar", [{"1aa", "bar", []}]}]}},
+
     etap:is(
         {[TwoChild], no_conflicts},
         couch_key_tree:merge([TwoChild], TwoChild, 10),
         "Merging two children is still reflexive."
     ),
 
+    TwoChildSibs = {1, {"1","foo", [{"1a", "bar", []},
+                                     {"1b", "bar", []}]}},
     etap:is(
         {[TwoChildSibs], no_conflicts},
         couch_key_tree:merge([TwoChildSibs], TwoChildSibs, 10),
         "Merging a tree to itself is itself."),
 
+    TwoChildPlusSibs =
+        {1, {"1","foo", [{"1a", "bar", [{"1aa", "bar", []}]},
+                         {"1b", "bar", []}]}},
+
+    etap:is(
+        {[TwoChildPlusSibs], no_conflicts},
+        couch_key_tree:merge([TwoChild], TwoChildSibs, 10),
+        "Merging tree of uneven length at node 2."),
+
+    Stemmed1b = {2, {"1a", "bar", []}},
     etap:is(
         {[TwoChildSibs], no_conflicts},
         couch_key_tree:merge([TwoChildSibs], Stemmed1b, 10),
         "Merging a tree with a stem."
     ),
 
+    TwoChildSibs2 = {1, {"1","foo", [{"1a", "bar", []},
+                                     {"1b", "bar", [{"1bb", "boo", []}]}]}},
+    Stemmed1bb = {3, {"1bb", "boo", []}},
     etap:is(
         {[TwoChildSibs2], no_conflicts},
         couch_key_tree:merge([TwoChildSibs2], Stemmed1bb, 10),
         "Merging a stem at a deeper level."
     ),
 
+    StemmedTwoChildSibs2 = [{2,{"1a", "bar", []}},
+                            {2,{"1b", "bar", [{"1bb", "boo", []}]}}],
+
+    etap:is(
+        {StemmedTwoChildSibs2, no_conflicts},
+        couch_key_tree:merge(StemmedTwoChildSibs2, Stemmed1bb, 10),
+        "Merging a stem at a deeper level against paths at deeper levels."
+    ),
+
+    Stemmed1aa = {3, {"1aa", "bar", []}},
     etap:is(
         {[TwoChild], no_conflicts},
         couch_key_tree:merge([TwoChild], Stemmed1aa, 10),
         "Merging a single tree with a deeper stem."
     ),
 
+    Stemmed1a = {2, {"1a", "bar", [{"1aa", "bar", []}]}},
     etap:is(
         {[TwoChild], no_conflicts},
         couch_key_tree:merge([TwoChild], Stemmed1a, 10),
@@ -99,6 +129,7 @@
         "More merging."
     ),
 
+    OneChild = {1, {"1","foo",[{"1a", "bar", []}]}},
     Expect1 = [OneChild, Stemmed1aa],
     etap:is(
         {Expect1, conflicts},
@@ -112,4 +143,34 @@
         "Merge should have no conflicts."
     ),
 
+    %% this test is based on couch-902-test-case2.py
+    %% foo has conflicts from replication at depth two
+    %% foo3 is the current value
+    Foo = {1, {"foo",
+               "val1",
+               [{"foo2","val2",[]},
+                {"foo3", "val3", []}
+               ]}},
+    %% foo now has an attachment added, which leads to foo4 and val4
+    %% off foo3
+    Bar = {1, {"foo",
+               [],
+               [{"foo3",
+                 [],
+                 [{"foo4","val4",[]}
+                  ]}]}},
+    %% this is what the merge returns
+    %% note that it ignore the conflicting branch as there's no match
+    FooBar = {1, {"foo",
+               "val1",
+               [{"foo2","val2",[]},
+                {"foo3", "val3", [{"foo4","val4",[]}]}
+               ]}},
+
+    etap:is(
+      {[FooBar], no_conflicts},
+      couch_key_tree:merge([Foo],Bar,10),
+      "Merging trees with conflicts ought to behave."
+    ),
+
     ok.
diff --git a/test/etap/190-oauth.t b/test/etap/190-oauth.t
new file mode 100755
index 0000000..0992204
--- /dev/null
+++ b/test/etap/190-oauth.t
@@ -0,0 +1,31 @@
+#!/usr/bin/env escript
+% Licensed under the Apache License, Version 2.0 (the "License"); you may not
+% use this file except in compliance with the License. You may obtain a copy of
+% the License at
+%
+%   http://www.apache.org/licenses/LICENSE-2.0
+%
+% Unless required by applicable law or agreed to in writing, software
+% distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+% WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+% License for the specific language governing permissions and limitations under
+% the License.
+
+main(_) ->
+    test_util:init_code_path(),
+    etap:plan(1),
+    case (catch test()) of
+        ok ->
+            etap:end_tests();
+        Other ->
+            etap:diag(io_lib:format("Test died abnormally: ~p", [Other])),
+            etap:bail(Other)
+    end,
+    ok.
+
+test() ->
+    etap:is(
+      oauth_uri:params_from_string("realm=http://localhost:5984"),
+      [{"realm","http://localhost:5984"}],
+      "decode should handle non-percent encoded input."),
+    ok.
diff --git a/test/etap/200-view-group-no-db-leaks.t b/test/etap/200-view-group-no-db-leaks.t
index bbfd083..9c77f1a 100755
--- a/test/etap/200-view-group-no-db-leaks.t
+++ b/test/etap/200-view-group-no-db-leaks.t
@@ -65,7 +65,7 @@
 main(_) ->
     test_util:init_code_path(),
 
-    etap:plan(11),
+    etap:plan(18),
     case (catch test()) of
         ok ->
             etap:end_tests();
@@ -87,12 +87,20 @@
 
     create_docs(),
     create_design_doc(),
+
+    ViewGroup = couch_view:get_group_server(
+        test_db_name(), <<"_design/", (ddoc_name())/binary>>),
+    etap:is(is_pid(ViewGroup), true, "got view group pid"),
+    etap:is(is_process_alive(ViewGroup), true, "view group pid is alive"),
+
     query_view(),
     check_db_ref_count(),
+    etap:is(is_process_alive(ViewGroup), true, "view group pid is alive"),
 
     create_new_doc(<<"doc1000">>),
     query_view(),
     check_db_ref_count(),
+    etap:is(is_process_alive(ViewGroup), true, "view group pid is alive"),
 
     Ref1 = get_db_ref_counter(),
     compact_db(),
@@ -100,15 +108,27 @@
     Ref2 = get_db_ref_counter(),
     etap:isnt(Ref1, Ref2,  "DB ref counter changed"),
     etap:is(false, is_process_alive(Ref1), "old DB ref counter is not alive"),
+    etap:is(is_process_alive(ViewGroup), true, "view group pid is alive"),
 
     compact_view_group(),
     check_db_ref_count(),
     Ref3 = get_db_ref_counter(),
     etap:is(Ref3, Ref2,  "DB ref counter didn't change"),
+    etap:is(is_process_alive(ViewGroup), true, "view group pid is alive"),
 
     create_new_doc(<<"doc1001">>),
     query_view(),
     check_db_ref_count(),
+    etap:is(is_process_alive(ViewGroup), true, "view group pid is alive"),
+
+    MonRef = erlang:monitor(process, ViewGroup),
+    ok = couch_server:delete(test_db_name(), []),
+    receive
+    {'DOWN', MonRef, _, _, _} ->
+        etap:diag("view group is dead after DB deletion")
+    after 5000 ->
+        etap:bail("view group did not die after DB deletion")
+    end,
 
     ok = timer:sleep(1000),
     delete_db(),
diff --git a/test/etap/Makefile.am b/test/etap/Makefile.am
index e0109ec..ac234d3 100644
--- a/test/etap/Makefile.am
+++ b/test/etap/Makefile.am
@@ -68,4 +68,5 @@
     140-attachment-comp.t \
     150-invalid-view-seq.t \
     160-vhosts.t \
+    190-oauth.t \
     200-view-group-no-db-leaks.t