Merge branch '3057-add-couch-js-sleep-test-suite-helper'
diff --git a/src/couch_js_functions.hrl b/include/couch_js_functions.hrl
similarity index 100%
rename from src/couch_js_functions.hrl
rename to include/couch_js_functions.hrl
diff --git a/priv/couch_js/utf8.c b/priv/couch_js/utf8.c
index 2d23cc2..fcafff6 100644
--- a/priv/couch_js/utf8.c
+++ b/priv/couch_js/utf8.c
@@ -84,6 +84,9 @@
                 // Invalid second half of surrogate pair
                 v = (uint32) 0xFFFD;
             }
+            // Undo our character advancement
+            src--;
+            srclen++;
         }
         else
         {
diff --git a/priv/stats_descriptions.cfg b/priv/stats_descriptions.cfg
index c695ae4..8b83e0c 100644
--- a/priv/stats_descriptions.cfg
+++ b/priv/stats_descriptions.cfg
@@ -218,3 +218,11 @@
     {type, histogram},
     {desc, <<"duration of validate_doc_update function calls">>}
 ]}.
+{[pread, exceed_eof], [
+    {type, counter},
+    {desc, <<"number of the attempts to read beyond end of db file">>}
+]}.
+{[pread, exceed_limit], [
+    {type, counter},
+    {desc, <<"number of the attempts to read beyond set limit">>}
+]}.
diff --git a/rebar.config.script b/rebar.config.script
index 53b3ca9..7d803b9 100644
--- a/rebar.config.script
+++ b/rebar.config.script
@@ -32,7 +32,12 @@
         "couchjs"
 end,
 CouchJSPath = filename:join(["priv", CouchJSName]),
-Version = string:strip(os:cmd("git describe --always"), right, $\n),
+Version = case os:getenv("COUCHDB_VERSION") of
+    false ->
+        string:strip(os:cmd("git describe --always"), right, $\n);
+    Version0 ->
+        Version0
+end,
 
 CouchConfig = case filelib:is_file(os:getenv("COUCHDB_CONFIG")) of
     true ->
@@ -65,6 +70,8 @@
         case os:type() of
             {win32, _} ->
                 {"/DHAVE_CURL /IC:\\relax\\curl\\include", "/LIBPATH:C:\\relax\\js-1.8.5\\js\\src /LIBPATH:C:\\Relax\\curl\\lib\\release-ssl mozjs185-1.0.lib libcurl_imp.lib"};
+            {unix, freebsd} ->
+                {"-DHAVE_CURL -I/usr/local/include", "-DHAVE_CURL -lmozjs185 -lcurl"};
             _ ->
                 {"-DHAVE_CURL", "-DHAVE_CURL -lmozjs185 -lcurl"}
         end;
diff --git a/src/couch_auth_cache.erl b/src/couch_auth_cache.erl
index c936535..32d706d 100644
--- a/src/couch_auth_cache.erl
+++ b/src/couch_auth_cache.erl
@@ -17,7 +17,7 @@
 
 % public API
 -export([get_user_creds/1, get_user_creds/2, update_user_creds/3]).
--export([get_admin/1, add_roles/2]).
+-export([get_admin/1, add_roles/2, auth_design_doc/1]).
 
 % gen_server API
 -export([start_link/0, init/1, handle_call/3, handle_info/2, handle_cast/2]).
@@ -27,7 +27,7 @@
 -export([handle_db_event/3]).
 
 -include_lib("couch/include/couch_db.hrl").
--include("couch_js_functions.hrl").
+-include_lib("couch/include/couch_js_functions.hrl").
 
 -define(STATE, auth_state_ets).
 -define(BY_USER, auth_by_user_ets).
@@ -49,7 +49,7 @@
 get_user_creds(UserName) ->
     get_user_creds(nil, UserName).
 
--spec get_user_creds(Req::#httpd{}, UserName::string() | binary()) ->
+-spec get_user_creds(Req::#httpd{} | nil, UserName::string() | binary()) ->
     {ok, Credentials::list(), term()} | nil.
 
 get_user_creds(Req, UserName) when is_list(UserName) ->
diff --git a/src/couch_changes.erl b/src/couch_changes.erl
index 7547aef..b37aabf 100644
--- a/src/couch_changes.erl
+++ b/src/couch_changes.erl
@@ -170,7 +170,9 @@
 handle_db_event(_DbName, updated, Parent) ->
     Parent ! updated,
     {ok, Parent};
-
+handle_db_event(_DbName, deleted, Parent) ->
+    Parent ! deleted,
+    {ok, Parent};
 handle_db_event(_DbName, _Event, Parent) ->
     {ok, Parent}.
 
@@ -194,6 +196,8 @@
 
 configure_filter("_doc_ids", Style, Req, _Db) ->
     {doc_ids, Style, get_doc_ids(Req)};
+configure_filter("_selector", Style, Req, _Db) ->
+    {selector, Style,  get_selector(Req)};
 configure_filter("_design", Style, _Req, _Db) ->
     {design_docs, Style};
 configure_filter("_view", Style, Req, Db) ->
@@ -206,14 +210,21 @@
         [DName, VName] ->
             {ok, DDoc} = open_ddoc(Db, <<"_design/", DName/binary>>),
             check_member_exists(DDoc, [<<"views">>, VName]),
-            try
+            FilterType = try
                 true = couch_util:get_nested_json_value(
                         DDoc#doc.body,
                         [<<"options">>, <<"seq_indexed">>]
                 ),
-                {fast_view, Style, DDoc, VName}
+                fast_view
             catch _:_ ->
-                {view, Style, DDoc, VName}
+                view
+            end,
+            case Db#db.id_tree of
+                undefined ->
+                    DIR = fabric_util:doc_id_and_rev(DDoc),
+                    {fetch, FilterType, Style, DIR, VName};
+                _ ->
+                    {FilterType, Style, DDoc, VName}
             end;
         [] ->
             Msg = "`view` must be of the form `designname/viewname`",
@@ -231,7 +242,14 @@
         [DName, FName] ->
             {ok, DDoc} = open_ddoc(Db, <<"_design/", DName/binary>>),
             check_member_exists(DDoc, [<<"filters">>, FName]),
-            {custom, Style, Req, DDoc, FName};
+            case Db#db.id_tree of
+                undefined ->
+                    DIR = fabric_util:doc_id_and_rev(DDoc),
+                    {fetch, custom, Style, Req, DIR, FName};
+                _ ->
+                    {custom, Style, Req, DDoc, FName}
+            end;
+
         [] ->
             {default, Style};
         _Else ->
@@ -251,6 +269,11 @@
         false ->
             []
     end;
+filter(Db, DocInfo, {selector, Style, Selector}) ->
+    Docs = open_revs(Db, DocInfo, Style),
+    Passes = [mango_selector:match(Selector, couch_doc:to_json_obj(Doc, []))
+        || Doc <- Docs],
+    filter_revs(Passes, Docs);
 filter(_Db, DocInfo, {design_docs, Style}) ->
     case DocInfo#doc_info.id of
         <<"_design", _/binary>> ->
@@ -258,7 +281,8 @@
         _ ->
             []
     end;
-filter(Db, DocInfo, {view, Style, DDoc, VName}) ->
+filter(Db, DocInfo, {FilterType, Style, DDoc, VName})
+        when FilterType == view; FilterType == fast_view ->
     Docs = open_revs(Db, DocInfo, Style),
     {ok, Passes} = couch_query_servers:filter_view(DDoc, VName, Docs),
     filter_revs(Passes, Docs);
@@ -320,6 +344,15 @@
     throw({bad_request, no_doc_ids_provided}).
 
 
+get_selector({json_req, {Props}}) ->
+    check_selector(couch_util:get_value(<<"selector">>, Props));
+get_selector(#httpd{method='POST'}=Req) ->
+    couch_httpd:validate_ctype(Req, "application/json"),
+    get_selector({json_req,  couch_httpd:json_body_obj(Req)});
+get_selector(_) ->
+    throw({bad_request, "Selector must be specified in POST payload"}).
+
+
 check_docids(DocIds) when is_list(DocIds) ->
     lists:foreach(fun
         (DocId) when not is_binary(DocId) ->
@@ -333,15 +366,22 @@
     throw({bad_request, Msg}).
 
 
+check_selector(Selector={_}) ->
+    try
+        mango_selector:normalize(Selector)
+    catch
+        {mango_error, Mod, Reason0} ->
+            {_StatusCode, _Error, Reason} = mango_error:info(Mod, Reason0),
+            throw({bad_request, Reason})
+    end;
+check_selector(_Selector) ->
+    throw({bad_request, "Selector error: expected a JSON object"}).
+
+
 open_ddoc(#db{name=DbName, id_tree=undefined}, DDocId) ->
-    {_, Ref} = spawn_monitor(fun() ->
-        exit(fabric:open_doc(mem3:dbname(DbName), DDocId, [ejson_body]))
-    end),
-    receive
-        {'DOWN', Ref, _, _, {ok, _}=Response} ->
-            Response;
-        {'DOWN', Ref, _, _, Response} ->
-            throw(Response)
+    case ddoc_cache:open_doc(mem3:dbname(DbName), DDocId) of
+        {ok, _} = Resp -> Resp;
+        Else -> throw(Else)
     end;
 open_ddoc(Db, DDocId) ->
     case couch_db:open_doc(Db, DDocId, [ejson_body]) of
@@ -765,25 +805,19 @@
 maybe_get_changes_doc(Value, #changes_acc{include_docs=true}=Acc) ->
     #changes_acc{
         db = Db,
-        include_docs = IncDoc,
         doc_options = DocOpts,
         conflicts = Conflicts
     } = Acc,
-    case IncDoc of
-        true ->
-            Opts = case Conflicts of
-                true -> [deleted, conflicts];
-                false -> [deleted]
-            end,
-            Doc = couch_index_util:load_doc(Db, Value, Opts),
-            case Doc of
-                null ->
-                    [{doc, null}];
-                _ ->
-                    [{doc, couch_doc:to_json_obj(Doc, DocOpts)}]
-            end;
-        false ->
-            []
+    Opts = case Conflicts of
+        true -> [deleted, conflicts];
+        false -> [deleted]
+    end,
+    Doc = couch_index_util:load_doc(Db, Value, Opts),
+    case Doc of
+        null ->
+            [{doc, null}];
+        _ ->
+            [{doc, couch_doc:to_json_obj(Doc, DocOpts)}]
     end;
 maybe_get_changes_doc(_Value, _Acc) ->
     [].
diff --git a/src/couch_compress.erl b/src/couch_compress.erl
index f9b5aa6..71588b2 100644
--- a/src/couch_compress.erl
+++ b/src/couch_compress.erl
@@ -65,7 +65,9 @@
     {ok, TermBin} = snappy:decompress(Rest),
     binary_to_term(TermBin);
 decompress(<<?TERM_PREFIX, _/binary>> = Bin) ->
-    binary_to_term(Bin).
+    binary_to_term(Bin);
+decompress(_) ->
+    error(invalid_compression).
 
 
 is_compressed(<<?SNAPPY_PREFIX, _/binary>>, Method) ->
@@ -77,5 +79,7 @@
 is_compressed(<<?TERM_PREFIX, _/binary>>, Method) ->
     Method =:= none;
 is_compressed(Term, _Method) when not is_binary(Term) ->
-    false.
+    false;
+is_compressed(_, _) ->
+    error(invalid_compression).
 
diff --git a/src/couch_db.erl b/src/couch_db.erl
index 4c0cd34..8260a5c 100644
--- a/src/couch_db.erl
+++ b/src/couch_db.erl
@@ -35,6 +35,7 @@
 -export([monitored_by/1]).
 -export([normalize_dbname/1]).
 -export([validate_dbname/1]).
+-export([dbname_suffix/1]).
 
 -include_lib("couch/include/couch_db.hrl").
 
@@ -415,66 +416,84 @@
     {ok, _, Docs} = couch_btree:fold(IdBtree, FoldFun, [], KeyOpts),
     {ok, Docs}.
 
-check_is_admin(#db{} = Db) ->
+
+check_is_admin(#db{user_ctx=UserCtx}=Db) ->
     case is_admin(Db) of
-        true ->
-            ok;
+        true -> ok;
         false ->
-            throw({unauthorized, <<"You are not a db or server admin.">>})
+            Reason = <<"You are not a db or server admin.">>,
+            throw_security_error(UserCtx, Reason)
     end.
 
-is_admin(Db) ->
+check_is_member(#db{user_ctx=UserCtx}=Db) ->
+    case is_member(Db) of
+        true -> ok;
+        false -> throw_security_error(UserCtx)
+    end.
+
+is_admin(#db{user_ctx=UserCtx}=Db) ->
     case couch_db_plugin:check_is_admin(Db) of
-        true ->
-            true;
+        true -> true;
         false ->
-            is_admin_int(Db)
+            {Admins} = get_admins(Db),
+            is_authorized(UserCtx, Admins)
     end.
 
-is_admin_int(#db{user_ctx = #user_ctx{name = Name, roles = Roles}} = Db) ->
-    {Admins} = get_admins(Db),
-    AdminRoles = [<<"_admin">> | couch_util:get_value(<<"roles">>, Admins, [])],
-    AdminNames = couch_util:get_value(<<"names">>, Admins,[]),
-    case AdminRoles -- Roles of
-    AdminRoles -> % same list, not an admin role
-        case AdminNames -- [Name] of
-        AdminNames -> % same names, not an admin
-            false;
-        _ ->
-            true
-        end;
-    _ ->
-        true
-    end.
-
-check_is_member(#db{user_ctx=#user_ctx{name=Name,roles=Roles}=UserCtx}=Db) ->
-    case (catch check_is_admin(Db)) of
-    ok -> ok;
-    _ ->
-        {Members} = get_members(Db),
-        ReaderRoles = couch_util:get_value(<<"roles">>, Members,[]),
-        WithAdminRoles = [<<"_admin">> | ReaderRoles],
-        ReaderNames = couch_util:get_value(<<"names">>, Members,[]),
-        case ReaderRoles ++ ReaderNames of
-        [] -> ok; % no readers == public access
-        _Else ->
-            case WithAdminRoles -- Roles of
-            WithAdminRoles -> % same list, not an reader role
-                case ReaderNames -- [Name] of
-                ReaderNames -> % same names, not a reader
-                    couch_log:debug("Not a reader: UserCtx ~p"
-                                    " vs Names ~p Roles ~p",
-                                    [UserCtx, ReaderNames, WithAdminRoles]),
-                    throw({unauthorized, <<"You are not authorized to access this db.">>});
-                _ ->
-                    ok
-                end;
-            _ ->
-                ok
+is_member(#db{user_ctx=UserCtx}=Db) ->
+    case is_admin(Db) of
+        true -> true;
+        false ->
+            case is_public_db(Db) of
+                true -> true;
+                false ->
+                    {Members} = get_members(Db),
+                    is_authorized(UserCtx, Members)
             end
-        end
     end.
 
+is_public_db(#db{}=Db) ->
+    {Members} = get_members(Db),
+    Names = couch_util:get_value(<<"names">>, Members, []),
+    Roles = couch_util:get_value(<<"roles">>, Members, []),
+    Names =:= [] andalso Roles =:= [].
+
+is_authorized(#user_ctx{name=UserName,roles=UserRoles}, Security) ->
+    Names = couch_util:get_value(<<"names">>, Security, []),
+    Roles = couch_util:get_value(<<"roles">>, Security, []),
+    case check_security(roles, UserRoles, [<<"_admin">> | Roles]) of
+        true -> true;
+        false -> check_security(names, UserName, Names)
+    end.
+
+check_security(roles, [], _) ->
+    false;
+check_security(roles, UserRoles, Roles) ->
+    UserRolesSet = ordsets:from_list(UserRoles),
+    RolesSet = ordsets:from_list(Roles),
+    not ordsets:is_disjoint(UserRolesSet, RolesSet);
+check_security(names, _, []) ->
+    false;
+check_security(names, null, _) ->
+    false;
+check_security(names, UserName, Names) ->
+    lists:member(UserName, Names).
+
+throw_security_error(#user_ctx{name=null}=UserCtx) ->
+    Reason = <<"You are not authorized to access this db.">>,
+    throw_security_error(UserCtx, Reason);
+throw_security_error(#user_ctx{name=_}=UserCtx) ->
+    Reason = <<"You are not allowed to access this db.">>,
+    throw_security_error(UserCtx, Reason).
+throw_security_error(#user_ctx{}=UserCtx, Reason) ->
+    Error = security_error_type(UserCtx),
+    throw({Error, Reason}).
+
+security_error_type(#user_ctx{name=null}) ->
+    unauthorized;
+security_error_type(#user_ctx{name=_}) ->
+    forbidden.
+
+
 get_admins(#db{security=SecProps}) ->
     couch_util:get_value(<<"admins">>, SecProps, {[]}).
 
@@ -855,7 +874,7 @@
             ?l2b(integer_to_list(couch_util:rand32()));
         Atts2 ->
             OldRev = case OldRevs of [] -> 0; [OldRev0|_] -> OldRev0 end,
-            couch_crypto:hash(md5, term_to_binary([Deleted, OldStart, OldRev, Body, Atts2]))
+            couch_crypto:hash(md5, term_to_binary([Deleted, OldStart, OldRev, Body, Atts2], [{minor_version, 1}]))
     end.
 
 new_revs([], OutBuckets, IdRevsAcc) ->
@@ -1495,10 +1514,23 @@
 select_lt(V1, V2) when V1 > V2 -> V2;
 select_lt(V1, _V2) -> V1.
 
-normalize_dbname(<<"shards/", _/binary>> = Path) ->
-    lists:last(binary:split(mem3:dbname(Path), <<"/">>, [global]));
-normalize_dbname(DbName) ->
-    DbName.
+-spec normalize_dbname(list() | binary()) -> binary().
+
+normalize_dbname(DbName) when is_list(DbName) ->
+    normalize_dbname(list_to_binary(DbName));
+normalize_dbname(DbName) when is_binary(DbName) ->
+    case filename:extension(DbName) of
+        <<".couch">> ->
+            mem3:dbname(filename:rootname(DbName));
+        _ ->
+            mem3:dbname(DbName)
+    end.
+
+-spec dbname_suffix(list() | binary()) -> binary().
+
+dbname_suffix(DbName) ->
+    filename:basename(normalize_dbname(DbName)).
+
 
 validate_dbname(DbName) when is_list(DbName) ->
     validate_dbname(?l2b(DbName));
@@ -1524,7 +1556,103 @@
 
 is_systemdb(DbName) when is_list(DbName) ->
     is_systemdb(?l2b(DbName));
-is_systemdb(<<"shards/", _/binary>> = Path) when is_binary(Path) ->
-    is_systemdb(normalize_dbname(Path));
 is_systemdb(DbName) when is_binary(DbName) ->
-    lists:member(DbName, ?SYSTEM_DATABASES).
+    lists:member(dbname_suffix(DbName), ?SYSTEM_DATABASES).
+
+-ifdef(TEST).
+-include_lib("eunit/include/eunit.hrl").
+
+setup() ->
+    ok = meck:new(couch_db_plugin, [passthrough]),
+    ok = meck:expect(couch_db_plugin, validate_dbname, fun(_, _) -> false end),
+    ok.
+
+teardown(_) ->
+    (catch meck:unload(couch_db_plugin)).
+
+validate_dbname_success_test_() ->
+    Cases =
+        generate_cases_with_shards("long/co$mplex-/path+/_something")
+        ++ generate_cases_with_shards("something")
+        ++ lists:append(
+            [generate_cases_with_shards(?b2l(SystemDb))
+                || SystemDb <- ?SYSTEM_DATABASES]),
+    {
+        foreach, fun setup/0, fun teardown/1,
+        [{test_name(A), fun() -> should_pass_validate_dbname(A) end} || {_, A} <- Cases]
+    }.
+
+validate_dbname_fail_test_() ->
+    Cases = generate_cases("_long/co$mplex-/path+/_something")
+       ++ generate_cases("_something")
+       ++ generate_cases_with_shards("long/co$mplex-/path+/_something#")
+       ++ generate_cases_with_shards("long/co$mplex-/path+/some.thing"),
+    {
+        foreach, fun setup/0, fun teardown/1,
+        [{test_name(A), fun() -> should_fail_validate_dbname(A) end} || {_, A} <- Cases]
+    }.
+
+normalize_dbname_test_() ->
+    Cases = generate_cases_with_shards("long/co$mplex-/path+/_something")
+       ++ generate_cases_with_shards("_something"),
+    WithExpected = [{?l2b(filename:rootname(A)), B} || {A, B} <- Cases],
+    [{test_name({Expected, Db}), ?_assertEqual(Expected, normalize_dbname(Db))}
+        || {Expected, Db} <- WithExpected].
+
+dbname_suffix_test_() ->
+    Cases = generate_cases_with_shards("long/co$mplex-/path+/_something")
+       ++ generate_cases_with_shards("_something"),
+    WithExpected = [{?l2b(filename:basename(Arg)), Db} || {Arg, Db} <- Cases],
+    [{test_name({Expected, Db}), ?_assertEqual(Expected, dbname_suffix(Db))}
+        || {Expected, Db} <- WithExpected].
+
+is_systemdb_test_() ->
+    Cases = lists:append([
+        generate_cases_with_shards("long/co$mplex-/path+/" ++ ?b2l(Db))
+            || Db <- ?SYSTEM_DATABASES]
+        ++ [generate_cases_with_shards(?b2l(Db)) || Db <- ?SYSTEM_DATABASES
+    ]),
+    WithExpected = [{?l2b(filename:basename(filename:rootname(Arg))), Db}
+        || {Arg, Db} <- Cases],
+    [{test_name({Expected, Db}) ++ " in ?SYSTEM_DATABASES",
+        ?_assert(is_systemdb(Db))} || {Expected, Db} <- WithExpected].
+
+should_pass_validate_dbname(DbName) ->
+    {test_name(DbName), ?_assertEqual(ok, validate_dbname(DbName))}.
+
+should_fail_validate_dbname(DbName) ->
+    {test_name(DbName), ?_test(begin
+        Result = validate_dbname(DbName),
+        ?assertMatch({error, {illegal_database_name, _}}, Result),
+        {error, {illegal_database_name, FailedDbName}} = Result,
+        ?assertEqual(to_binary(DbName), FailedDbName),
+        ok
+    end)}.
+
+to_binary(DbName) when is_list(DbName) ->
+    ?l2b(DbName);
+to_binary(DbName) when is_binary(DbName) ->
+    DbName.
+
+test_name({Expected, DbName}) ->
+    lists:flatten(io_lib:format("~p -> ~p", [DbName, Expected]));
+test_name(DbName) ->
+    lists:flatten(io_lib:format("~p", [DbName])).
+
+generate_cases_with_shards(DbName) ->
+    DbNameWithShard = add_shard(DbName),
+    DbNameWithShardAndExtension = add_shard(DbName) ++ ".couch",
+    Cases = [
+        DbName, ?l2b(DbName),
+        DbNameWithShard, ?l2b(DbNameWithShard),
+        DbNameWithShardAndExtension, ?l2b(DbNameWithShardAndExtension)
+    ],
+    [{DbName, Case} || Case <- Cases].
+
+add_shard(DbName) ->
+    "shards/00000000-3fffffff/" ++ DbName ++ ".1415960794".
+
+generate_cases(DbName) ->
+    [{DbName, DbName}, {DbName, ?l2b(DbName)}].
+
+-endif.
diff --git a/src/couch_db_header.erl b/src/couch_db_header.erl
index 2f329c1..355364f 100644
--- a/src/couch_db_header.erl
+++ b/src/couch_db_header.erl
@@ -310,8 +310,6 @@
     }.
 
 
--ifdef(run_broken_tests).
-
 upgrade_v3_test() ->
     Vsn3Header = mk_header(3),
     NewHeader = upgrade_tuple(Vsn3Header),
@@ -330,17 +328,9 @@
     ?assertEqual(undefined, uuid(NewHeader)),
     ?assertEqual(undefined, epochs(NewHeader)),
 
-    % Security ptr isn't changed until upgrade_disk_version/1
-    NewNewHeader = upgrade_disk_version(NewHeader),
-    ?assert(is_record(NewNewHeader, db_header)),
-    ?assertEqual(nil, security_ptr(NewNewHeader)),
+    ?assertThrow({database_disk_version_error, _},
+                 upgrade_disk_version(NewHeader)).
 
-    % Assert upgrade works on really old headers
-    NewestHeader = upgrade(Vsn3Header),
-    ?assertMatch(<<_:32/binary>>, uuid(NewestHeader)),
-    ?assertEqual([{node(), 0}], epochs(NewestHeader)).
-
--endif.
 
 upgrade_v5_test() ->
     Vsn5Header = mk_header(5),
diff --git a/src/couch_db_updater.erl b/src/couch_db_updater.erl
index 2404984..7872635 100644
--- a/src/couch_db_updater.erl
+++ b/src/couch_db_updater.erl
@@ -294,6 +294,10 @@
             couch_event:notify(Db2#db.name, updated);
         true -> ok
         end,
+        if NonRepDocs2 /= [] ->
+            couch_event:notify(Db2#db.name, local_updated);
+        true -> ok
+        end,
         [catch(ClientPid ! {done, self()}) || ClientPid <- Clients],
         Db3 = case length(UpdatedDDocIds) > 0 of
             true ->
@@ -566,7 +570,7 @@
         [{compression, Compression}]),
     case couch_db_header:security_ptr(Header) of
     nil ->
-        Security = [],
+        Security = default_security_object(DbName),
         SecurityPtr = nil;
     SecurityPtr ->
         {ok, Security} = couch_file:pread_term(Fd, SecurityPtr)
@@ -1433,3 +1437,20 @@
     end,
     SummaryBin = ?term_to_bin({Body, Atts}),
     couch_file:assemble_file_chunk(SummaryBin, couch_crypto:hash(md5, SummaryBin)).
+
+default_security_object(<<"shards/", _/binary>>) ->
+    case config:get("couchdb", "default_security", "everyone") of
+        "admin_only" ->
+            [{<<"members">>,{[{<<"roles">>,[<<"_admin">>]}]}},
+             {<<"admins">>,{[{<<"roles">>,[<<"_admin">>]}]}}];
+        Everyone when Everyone == "everyone"; Everyone == "admin_local" ->
+            []
+    end;
+default_security_object(_DbName) ->
+    case config:get("couchdb", "default_security", "everyone") of
+        Admin when Admin == "admin_only"; Admin == "admin_local" ->
+            [{<<"members">>,{[{<<"roles">>,[<<"_admin">>]}]}},
+             {<<"admins">>,{[{<<"roles">>,[<<"_admin">>]}]}}];
+        "everyone" ->
+            []
+    end.
diff --git a/src/couch_debug.erl b/src/couch_debug.erl
new file mode 100644
index 0000000..31b4c5c
--- /dev/null
+++ b/src/couch_debug.erl
@@ -0,0 +1,33 @@
+% Licensed under the Apache License, Version 2.0 (the "License"); you may not
+% use this file except in compliance with the License. You may obtain a copy of
+% the License at
+%
+%   http://www.apache.org/licenses/LICENSE-2.0
+%
+% Unless required by applicable law or agreed to in writing, software
+% distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+% WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+% License for the specific language governing permissions and limitations under
+% the License.
+
+-module(couch_debug).
+
+-export([opened_files/0]).
+
+-spec opened_files() ->
+    [{port(), CouchFilePid :: pid(), Fd :: pid() | tuple(), FilePath :: string()}].
+
+opened_files() ->
+    Info = [couch_file_port_info(Port)
+        || Port <- erlang:ports(),
+            {name, "efile"} =:= erlang:port_info(Port, name)],
+    [I || I <- Info, is_tuple(I)].
+
+couch_file_port_info(Port) ->
+    {connected, Pid} = erlang:port_info(Port, connected),
+    case couch_file:process_info(Pid) of
+        {Fd, FilePath} ->
+            {Port, Pid, Fd, FilePath};
+        undefined ->
+            undefined
+    end.
diff --git a/src/couch_doc.erl b/src/couch_doc.erl
index f973923..11063d9 100644
--- a/src/couch_doc.erl
+++ b/src/couch_doc.erl
@@ -403,13 +403,13 @@
         restart_open_doc_revs(Parser, Ref, NewRef);
     {{doc_bytes, Ref, DocBytes}, Parser, ParserRef} ->
         Doc = from_json_obj(?JSON_DECODE(DocBytes)),
+        erlang:put(mochiweb_request_recv, true),
         % we'll send the Parser process ID to the remote nodes so they can
         % retrieve their own copies of the attachment data
         WithParser = fun(follows) -> {follows, Parser, Ref}; (D) -> D end,
         Atts = [couch_att:transform(data, WithParser, A) || A <- Doc#doc.atts],
         WaitFun = fun() ->
-            receive {'DOWN', ParserRef, _, _, _} -> ok end,
-            erlang:put(mochiweb_request_recv, true)
+            receive {'DOWN', ParserRef, _, _, _} -> ok end
         end,
         {ok, Doc#doc{atts=Atts}, WaitFun, Parser};
     ok -> ok
diff --git a/src/couch_event_sup.erl b/src/couch_event_sup.erl
index 39e728c..b617498 100644
--- a/src/couch_event_sup.erl
+++ b/src/couch_event_sup.erl
@@ -62,7 +62,7 @@
     ok.
 
 handle_call(_Whatever, _From, State) ->
-    {ok, State}.
+    {reply, ok, State}.
 
 handle_cast(stop, State) ->
     {stop, normal, State}.
diff --git a/src/couch_file.erl b/src/couch_file.erl
index e6cd01a..8346b02 100644
--- a/src/couch_file.erl
+++ b/src/couch_file.erl
@@ -12,7 +12,7 @@
 
 -module(couch_file).
 -behaviour(gen_server).
--vsn(1).
+-vsn(2).
 
 -include_lib("couch/include/couch_db.hrl").
 
@@ -20,13 +20,16 @@
 -define(INITIAL_WAIT, 60000).
 -define(MONITOR_CHECK, 10000).
 -define(SIZE_BLOCK, 16#1000). % 4 KiB
+-define(READ_AHEAD, 2 * ?SIZE_BLOCK).
+-define(IS_OLD_STATE(S), tuple_size(S) /= tuple_size(#file{})).
 
 
 -record(file, {
     fd,
     is_sys,
     eof = 0,
-    db_pid
+    db_pid,
+    pread_limit = 0
 }).
 
 % public API
@@ -42,6 +45,8 @@
 -export([init/1, terminate/2, code_change/3]).
 -export([handle_call/3, handle_cast/2, handle_info/2]).
 
+%% helper functions
+-export([process_info/1]).
 
 %%----------------------------------------------------------------------
 %% Args:   Valid Options are [create] and [create,overwrite].
@@ -217,25 +222,65 @@
 
 
 delete(RootDir, Filepath) ->
-    delete(RootDir, Filepath, true).
+    delete(RootDir, Filepath, []).
 
+delete(RootDir, FullFilePath, Options) ->
+    EnableRecovery = config:get_boolean("couchdb",
+        "enable_database_recovery", false),
+    Async = not lists:member(sync, Options),
+    Context = couch_util:get_value(context, Options, compaction),
+    case Context =:= delete andalso EnableRecovery of
+        true ->
+            rename_file(FullFilePath);
+        false ->
+            DeleteAfterRename = config:get_boolean("couchdb",
+                "delete_after_rename", true),
+            delete_file(RootDir, FullFilePath, Async, DeleteAfterRename)
+    end.
 
-delete(RootDir, Filepath, Async) ->
+delete_file(RootDir, Filepath, Async, DeleteAfterRename) ->
     DelFile = filename:join([RootDir,".delete", ?b2l(couch_uuids:random())]),
     case file:rename(Filepath, DelFile) of
-    ok ->
+    ok when DeleteAfterRename ->
         if (Async) ->
             spawn(file, delete, [DelFile]),
             ok;
         true ->
             file:delete(DelFile)
         end;
-    Error ->
-        Error
+    Else ->
+        Else
     end.
 
+rename_file(Original) ->
+    DeletedFileName = deleted_filename(Original),
+    Now = calendar:local_time(),
+    case file:rename(Original, DeletedFileName) of
+        ok -> file:change_time(DeletedFileName, Now);
+        Else -> Else
+    end.
+
+deleted_filename(Original) ->
+    {{Y, Mon, D}, {H, Min, S}} = calendar:universal_time(),
+    Suffix = lists:flatten(
+        io_lib:format(".~w~2.10.0B~2.10.0B."
+            ++ "~2.10.0B~2.10.0B~2.10.0B.deleted"
+            ++ filename:extension(Original), [Y, Mon, D, H, Min, S])),
+    filename:rootname(Original) ++ Suffix.
 
 nuke_dir(RootDelDir, Dir) ->
+    EnableRecovery = config:get_boolean("couchdb",
+        "enable_database_recovery", false),
+    case EnableRecovery of
+        true ->
+            rename_file(Dir);
+        false ->
+            delete_dir(RootDelDir, Dir)
+    end.
+
+delete_dir(RootDelDir, Dir) ->
+    DeleteAfterRename = config:get_boolean("couchdb",
+        "delete_after_rename", true),
     FoldFun = fun(File) ->
         Path = Dir ++ "/" ++ File,
         case filelib:is_dir(Path) of
@@ -243,7 +288,7 @@
                 ok = nuke_dir(RootDelDir, Path),
                 file:del_dir(Path);
             false ->
-                delete(RootDelDir, Path, false)
+                delete_file(RootDelDir, Path, false, DeleteAfterRename)
         end
     end,
     case file:list_dir(Dir) of
@@ -294,11 +339,15 @@
 init({Filepath, Options, ReturnPid, Ref}) ->
     process_flag(trap_exit, true),
     OpenOptions = file_open_options(Options),
+    Limit = get_pread_limit(),
+    IsSys = lists:member(sys_db, Options),
     case lists:member(create, Options) of
     true ->
         filelib:ensure_dir(Filepath),
         case file:open(Filepath, OpenOptions) of
         {ok, Fd} ->
+            %% Save Fd in process dictionary for debugging purposes
+            put(couch_file_fd, {Fd, Filepath}),
             {ok, Length} = file:position(Fd, eof),
             case Length > 0 of
             true ->
@@ -312,7 +361,7 @@
                     ok = file:sync(Fd),
                     maybe_track_open_os_files(Options),
                     erlang:send_after(?INITIAL_WAIT, self(), maybe_close),
-                    {ok, #file{fd=Fd, is_sys=lists:member(sys_db, Options)}};
+                    {ok, #file{fd=Fd, is_sys=IsSys, pread_limit=Limit}};
                 false ->
                     ok = file:close(Fd),
                     init_status_error(ReturnPid, Ref, {error, eexist})
@@ -320,7 +369,7 @@
             false ->
                 maybe_track_open_os_files(Options),
                 erlang:send_after(?INITIAL_WAIT, self(), maybe_close),
-                {ok, #file{fd=Fd, is_sys=lists:member(sys_db, Options)}}
+                {ok, #file{fd=Fd, is_sys=IsSys, pread_limit=Limit}}
             end;
         Error ->
             init_status_error(ReturnPid, Ref, Error)
@@ -330,11 +379,13 @@
         case file:open(Filepath, [read, raw]) of
         {ok, Fd_Read} ->
             {ok, Fd} = file:open(Filepath, OpenOptions),
+            %% Save Fd in process dictionary for debugging purposes
+            put(couch_file_fd, {Fd, Filepath}),
             ok = file:close(Fd_Read),
             maybe_track_open_os_files(Options),
             {ok, Eof} = file:position(Fd, eof),
             erlang:send_after(?INITIAL_WAIT, self(), maybe_close),
-            {ok, #file{fd=Fd, eof=Eof, is_sys=lists:member(sys_db, Options)}};
+            {ok, #file{fd=Fd, eof=Eof, is_sys=IsSys, pread_limit=Limit}};
         Error ->
             init_status_error(ReturnPid, Ref, Error)
         end
@@ -361,14 +412,21 @@
 terminate(_Reason, #file{fd = Fd}) ->
     ok = file:close(Fd).
 
+handle_call(Msg, From, File) when ?IS_OLD_STATE(File) ->
+    handle_call(Msg, From, upgrade_state(File));
+
 handle_call(close, _From, #file{fd=Fd}=File) ->
     {stop, normal, file:close(Fd), File#file{fd = nil}};
 
 handle_call({pread_iolist, Pos}, _From, File) ->
     {RawData, NextPos} = try
         % up to 8Kbs of read ahead
-        read_raw_iolist_int(File, Pos, 2 * ?SIZE_BLOCK - (Pos rem ?SIZE_BLOCK))
+        read_raw_iolist_int(File, Pos, ?READ_AHEAD - (Pos rem ?SIZE_BLOCK))
     catch
+    throw:read_beyond_eof ->
+        throw(read_beyond_eof);
+    throw:{exceed_pread_limit, Limit} ->
+        throw({exceed_pread_limit, Limit});
     _:_ ->
         read_raw_iolist_int(File, Pos, 4)
     end,
@@ -442,6 +500,9 @@
 code_change(_OldVsn, State, _Extra) ->
     {ok, State}.
 
+handle_info(Msg, File) when ?IS_OLD_STATE(File) ->
+    handle_info(Msg, upgrade_state(File));
+
 handle_info(maybe_close, File) ->
     case is_idle(File) of
         true ->
@@ -504,11 +565,20 @@
     {Data::iolist(), CurPos::non_neg_integer()}.
 read_raw_iolist_int(Fd, {Pos, _Size}, Len) -> % 0110 UPGRADE CODE
     read_raw_iolist_int(Fd, Pos, Len);
-read_raw_iolist_int(#file{fd = Fd}, Pos, Len) ->
+read_raw_iolist_int(#file{fd = Fd, pread_limit = Limit} = F, Pos, Len) ->
     BlockOffset = Pos rem ?SIZE_BLOCK,
     TotalBytes = calculate_total_read_len(BlockOffset, Len),
-    {ok, <<RawBin:TotalBytes/binary>>} = file:pread(Fd, Pos, TotalBytes),
-    {remove_block_prefixes(BlockOffset, RawBin), Pos + TotalBytes}.
+    case Pos + TotalBytes of
+    Size when Size > F#file.eof + ?READ_AHEAD ->
+        couch_stats:increment_counter([pread, exceed_eof]),
+        throw(read_beyond_eof);
+    Size when Size > Limit ->
+        couch_stats:increment_counter([pread, exceed_limit]),
+        throw({exceed_pread_limit, Limit});
+    Size ->
+        {ok, <<RawBin:TotalBytes/binary>>} = file:pread(Fd, Pos, TotalBytes),
+        {remove_block_prefixes(BlockOffset, RawBin), Size}
+    end.
 
 -spec extract_md5(iolist()) -> {binary(), iolist()}.
 extract_md5(FullIoList) ->
@@ -592,3 +662,66 @@
         {monitored_by, [_]} -> exit(tracker_monitoring_failed);
         _ -> false
     end.
+
+-spec process_info(CouchFilePid :: pid()) ->
+    {Fd :: pid() | tuple(), FilePath :: string()} | undefined.
+
+process_info(Pid) ->
+    {dictionary, Dict} = erlang:process_info(Pid, dictionary),
+    case lists:keyfind(couch_file_fd, 1, Dict) of
+        false ->
+            undefined;
+        {couch_file_fd, {Fd, InitialName}} ->
+            {Fd, InitialName}
+    end.
+
+upgrade_state({file, Fd, IsSys, Eof, DbPid}) ->
+    Limit = get_pread_limit(),
+    #file{fd=Fd, is_sys=IsSys, eof=Eof, db_pid=DbPid, pread_limit=Limit};
+upgrade_state(State) ->
+    State.
+
+get_pread_limit() ->
+    case config:get_integer("couchdb", "max_pread_size", 0) of
+        N when N > 0 -> N;
+        _ -> infinity
+    end.
+
+-ifdef(TEST).
+-include_lib("couch/include/couch_eunit.hrl").
+
+deleted_filename_test_() ->
+    DbNames = ["dbname", "db.name", "user/dbname"],
+    Fixtures = make_filename_fixtures(DbNames),
+    lists:map(fun(Fixture) ->
+        should_create_proper_deleted_filename(Fixture)
+    end, Fixtures).
+
+should_create_proper_deleted_filename(Before) ->
+    {Before,
+    ?_test(begin
+        BeforeExtension = filename:extension(Before),
+        BeforeBasename = filename:basename(Before, BeforeExtension),
+        Re = "^" ++ BeforeBasename ++ "\.[0-9]{8}\.[0-9]{6}\.deleted\..*$",
+        After = deleted_filename(Before),
+        ?assertEqual(match,
+            re:run(filename:basename(After), Re, [{capture, none}])),
+        ?assertEqual(BeforeExtension, filename:extension(After))
+    end)}.
+
+make_filename_fixtures(DbNames) ->
+    Formats = [
+        "~s.couch",
+        ".~s_design/mrview/3133e28517e89a3e11435dd5ac4ad85a.view",
+        "shards/00000000-1fffffff/~s.1458336317.couch",
+        ".shards/00000000-1fffffff/~s.1458336317_design",
+        ".shards/00000000-1fffffff/~s.1458336317_design"
+            "/mrview/3133e28517e89a3e11435dd5ac4ad85a.view"
+    ],
+    lists:flatmap(fun(DbName) ->
+        lists:map(fun(Format) ->
+            filename:join("/srv/data", io_lib:format(Format, [DbName]))
+        end, Formats)
+    end, DbNames).
+
+-endif.
diff --git a/src/couch_httpd.erl b/src/couch_httpd.erl
index daa43d5..2dd6ed2 100644
--- a/src/couch_httpd.erl
+++ b/src/couch_httpd.erl
@@ -21,12 +21,13 @@
 -export([make_fun_spec_strs/1]).
 -export([make_arity_1_fun/1, make_arity_2_fun/1, make_arity_3_fun/1]).
 -export([parse_form/1,json_body/1,json_body_obj/1,body/1]).
--export([doc_etag/1, make_etag/1, etag_match/2, etag_respond/3, etag_maybe/2]).
+-export([doc_etag/1, doc_etag/3, make_etag/1, etag_match/2, etag_respond/3, etag_maybe/2]).
 -export([primary_header_value/2,partition/1,serve_file/3,serve_file/4, server_header/0]).
 -export([start_chunked_response/3,send_chunk/2,log_request/2]).
 -export([start_response_length/4, start_response/3, send/2]).
 -export([start_json_response/2, start_json_response/3, end_json_response/1]).
--export([send_response/4,send_method_not_allowed/2,send_error/2,send_error/4, send_redirect/2,send_chunked_error/2]).
+-export([send_response/4,send_response_no_cors/4,send_method_not_allowed/2,
+    send_error/2,send_error/4, send_redirect/2,send_chunked_error/2]).
 -export([send_json/2,send_json/3,send_json/4,last_chunk/1,parse_multipart_request/3]).
 -export([accepted_encodings/1,handle_request_int/5,validate_referer/1,validate_ctype/2]).
 -export([http_1_0_keep_alive/2]).
@@ -307,8 +308,8 @@
     try
         validate_host(HttpReq),
         check_request_uri_length(RawUri),
-        case couch_httpd_cors:is_preflight_request(HttpReq) of
-        #httpd{} ->
+        case chttpd_cors:maybe_handle_preflight_request(HttpReq) of
+        not_preflight ->
             case authenticate_request(HttpReq) of
             #httpd{} = Req ->
                 HandlerFun(Req);
@@ -475,14 +476,14 @@
 serve_file(Req, RelativePath, DocumentRoot) ->
     serve_file(Req, RelativePath, DocumentRoot, []).
 
-serve_file(#httpd{mochi_req=MochiReq}=Req, RelativePath, DocumentRoot,
-           ExtraHeaders) ->
-    log_request(Req, 200),
-    ResponseHeaders = server_header()
-        ++ couch_httpd_auth:cookie_auth_header(Req, [])
-        ++ ExtraHeaders,
-    ResponseHeaders1 = couch_httpd_cors:cors_headers(Req, ResponseHeaders),
-    {ok, MochiReq:serve_file(RelativePath, DocumentRoot, ResponseHeaders1)}.
+serve_file(Req0, RelativePath0, DocumentRoot0, ExtraHeaders) ->
+    Headers0 = basic_headers(Req0, ExtraHeaders),
+    {ok, {Req1, Code1, Headers1, RelativePath1, DocumentRoot1}} =
+        chttpd_plugin:before_serve_file(
+            Req0, 200, Headers0, RelativePath0, DocumentRoot0),
+    log_request(Req1, Code1),
+    #httpd{mochi_req = MochiReq} = Req1,
+    {ok, MochiReq:serve_file(RelativePath1, DocumentRoot1, Headers1)}.
 
 qs_value(Req, Key) ->
     qs_value(Req, Key, undefined).
@@ -593,8 +594,17 @@
         throw({bad_ctype, [Else, " is not a supported content encoding."]})
     end.
 
-doc_etag(#doc{revs={Start, [DiskRev|_]}}) ->
-    "\"" ++ ?b2l(couch_doc:rev_to_str({Start, DiskRev})) ++ "\"".
+doc_etag(#doc{id=Id, body=Body, revs={Start, [DiskRev|_]}}) ->
+    doc_etag(Id, Body, {Start, DiskRev}).
+
+doc_etag(<<"_local/", _/binary>>, Body, {Start, DiskRev}) ->
+    make_etag({Start, DiskRev, Body});
+doc_etag(_Id, _Body, {Start, DiskRev}) ->
+    rev_etag({Start, DiskRev}).
+
+rev_etag({Start, DiskRev}) ->
+    Rev = couch_doc:rev_to_str({Start, DiskRev}),
+     <<$", Rev/binary, $">>.
 
 make_etag(Term) ->
     <<SigInt:128/integer>> = couch_crypto:hash(md5, term_to_binary(Term)),
@@ -648,26 +658,31 @@
             gen_event:notify(couch_plugin, {log_request, Req, Code})
     end.
 
-start_response_length(#httpd{mochi_req=MochiReq}=Req, Code, Headers, Length) ->
-    log_request(Req, Code),
-    couch_stats:increment_counter([couchdb, httpd_status_codes, Code]),
-    Headers1 = Headers ++ server_header() ++
-               couch_httpd_auth:cookie_auth_header(Req, Headers),
-    Headers2 = couch_httpd_cors:cors_headers(Req, Headers1),
-    Resp = MochiReq:start_response_length({Code, Headers2, Length}),
+log_response(Code, _) when Code < 400 ->
+    ok;
+log_response(Code, Body) ->
+    case {erlang:get(dont_log_response), Body} of
+        {true, _} ->
+            ok;
+        {_, {json, JsonObj}} ->
+            ErrorMsg = couch_util:json_encode(JsonObj),
+            couch_log:error("httpd ~p error response:~n ~s", [Code, ErrorMsg]);
+        _ ->
+            couch_log:error("httpd ~p error response:~n ~s", [Code, Body])
+    end.
+
+start_response_length(#httpd{mochi_req=MochiReq}=Req, Code, Headers0, Length) ->
+    Headers1 = basic_headers(Req, Headers0),
+    Resp = handle_response(Req, Code, Headers1, Length, start_response_length),
     case MochiReq:get(method) of
     'HEAD' -> throw({http_head_abort, Resp});
     _ -> ok
     end,
     {ok, Resp}.
 
-start_response(#httpd{mochi_req=MochiReq}=Req, Code, Headers) ->
-    log_request(Req, Code),
-    couch_stats:increment_counter([couchdb, httpd_status_codes, Code]),
-    CookieHeader = couch_httpd_auth:cookie_auth_header(Req, Headers),
-    Headers1 = Headers ++ server_header() ++ CookieHeader,
-    Headers2 = couch_httpd_cors:cors_headers(Req, Headers1),
-    Resp = MochiReq:start_response({Code, Headers2}),
+start_response(#httpd{mochi_req=MochiReq}=Req, Code, Headers0) ->
+    Headers1 = basic_headers(Req, Headers0),
+    Resp = handle_response(Req, Code, Headers1, undefined, start_response),
     case MochiReq:get(method) of
         'HEAD' -> throw({http_head_abort, Resp});
         _ -> ok
@@ -680,12 +695,16 @@
 
 no_resp_conn_header([]) ->
     true;
-no_resp_conn_header([{Hdr, _}|Rest]) ->
+no_resp_conn_header([{Hdr, V}|Rest]) when is_binary(Hdr)->
+    no_resp_conn_header([{?b2l(Hdr), V}|Rest]);
+no_resp_conn_header([{Hdr, _}|Rest]) when is_list(Hdr)->
     case string:to_lower(Hdr) of
         "connection" -> false;
         _ -> no_resp_conn_header(Rest)
     end.
 
+http_1_0_keep_alive(#httpd{mochi_req = MochiReq}, Headers) ->
+    http_1_0_keep_alive(MochiReq, Headers);
 http_1_0_keep_alive(Req, Headers) ->
     KeepOpen = Req:should_close() == false,
     IsHttp10 = Req:get(version) == {1, 0},
@@ -695,14 +714,9 @@
         false -> Headers
     end.
 
-start_chunked_response(#httpd{mochi_req=MochiReq}=Req, Code, Headers) ->
-    log_request(Req, Code),
-    couch_stats:increment_counter([couchdb, httpd_status_codes, Code]),
-    Headers1 = http_1_0_keep_alive(MochiReq, Headers),
-    Headers2 = Headers1 ++ server_header() ++
-               couch_httpd_auth:cookie_auth_header(Req, Headers1),
-    Headers3 = couch_httpd_cors:cors_headers(Req, Headers2),
-    Resp = MochiReq:respond({Code, Headers3, chunked}),
+start_chunked_response(#httpd{mochi_req=MochiReq}=Req, Code, Headers0) ->
+    Headers1 = add_headers(Req, Headers0),
+    Resp = handle_response(Req, Code, Headers1, chunked, respond),
     case MochiReq:get(method) of
     'HEAD' -> throw({http_head_abort, Resp});
     _ -> ok
@@ -720,21 +734,16 @@
     Resp:write_chunk([]),
     {ok, Resp}.
 
-send_response(#httpd{mochi_req=MochiReq}=Req, Code, Headers, Body) ->
-    log_request(Req, Code),
-    couch_stats:increment_counter([couchdb, httpd_status_codes, Code]),
-    Headers1 = http_1_0_keep_alive(MochiReq, Headers),
-    if Code >= 500 ->
-        couch_log:error("httpd ~p error response:~n ~s", [Code, Body]);
-    Code >= 400 ->
-        couch_log:debug("httpd ~p error response:~n ~s", [Code, Body]);
-    true -> ok
-    end,
-    Headers2 = Headers1 ++ server_header() ++
-               couch_httpd_auth:cookie_auth_header(Req, Headers1),
-    Headers3 = couch_httpd_cors:cors_headers(Req, Headers2),
+send_response(Req, Code, Headers0, Body) ->
+    Headers1 = chttpd_cors:headers(Req, Headers0),
+    send_response_no_cors(Req, Code, Headers1, Body).
 
-    {ok, MochiReq:respond({Code, Headers3, Body})}.
+send_response_no_cors(#httpd{mochi_req=MochiReq}=Req, Code, Headers, Body) ->
+    Headers1 = http_1_0_keep_alive(MochiReq, Headers),
+    Headers2 = basic_headers_no_cors(Req, Headers1),
+    Resp = handle_response(Req, Code, Headers2, Body, respond),
+    log_response(Code, Body),
+    {ok, Resp}.
 
 send_method_not_allowed(Req, Methods) ->
     send_error(Req, 405, [{"Allow", Methods}], <<"method_not_allowed">>, ?l2b("Only " ++ Methods ++ " allowed")).
@@ -748,8 +757,7 @@
 send_json(Req, Code, Headers, Value) ->
     initialize_jsonp(Req),
     AllHeaders = maybe_add_default_headers(Req, Headers),
-    Body = [start_jsonp(), ?JSON_ENCODE(Value), end_jsonp(), $\n],
-    send_response(Req, Code, AllHeaders, Body).
+    send_response(Req, Code, AllHeaders, {json, Value}).
 
 start_json_response(Req, Code) ->
     start_json_response(Req, Code, []).
@@ -970,27 +978,13 @@
 send_redirect(Req, Path) ->
      send_response(Req, 301, [{"Location", absolute_uri(Req, Path)}], <<>>).
 
-negotiate_content_type(Req) ->
+negotiate_content_type(_Req) ->
     case get(jsonp) of
-        no_jsonp -> negotiate_content_type1(Req);
-        [] -> negotiate_content_type1(Req);
+        no_jsonp -> "application/json";
+        [] -> "application/json";
         _Callback -> "application/javascript"
     end.
 
-negotiate_content_type1(#httpd{mochi_req=MochiReq}) ->
-    %% Determine the appropriate Content-Type header for a JSON response
-    %% depending on the Accept header in the request. A request that explicitly
-    %% lists the correct JSON MIME type will get that type, otherwise the
-    %% response will have the generic MIME type "text/plain"
-    AcceptedTypes = case MochiReq:get_header_value("Accept") of
-        undefined       -> [];
-        AcceptHeader    -> string:tokens(AcceptHeader, ", ")
-    end,
-    case lists:member("application/json", AcceptedTypes) of
-        true  -> "application/json";
-        false -> "text/plain; charset=utf-8"
-    end.
-
 server_header() ->
     [{"Server", "CouchDB/" ++ couch_server:get_version() ++
                 " (Erlang OTP/" ++ erlang:system_info(otp_release) ++ ")"}].
@@ -1124,6 +1118,37 @@
         _ -> throw({error, invalid_bind_address})
     end.
 
+add_headers(Req, Headers0) ->
+    Headers = basic_headers(Req, Headers0),
+    http_1_0_keep_alive(Req, Headers).
+
+basic_headers(Req, Headers0) ->
+    Headers = basic_headers_no_cors(Req, Headers0),
+    chttpd_cors:headers(Req, Headers).
+
+basic_headers_no_cors(Req, Headers) ->
+    Headers
+        ++ server_header()
+        ++ couch_httpd_auth:cookie_auth_header(Req, Headers).
+
+handle_response(Req0, Code0, Headers0, Args0, Type) ->
+    {ok, {Req1, Code1, Headers1, Args1}} = before_response(Req0, Code0, Headers0, Args0),
+    couch_stats:increment_counter([couchdb, httpd_status_codes, Code1]),
+    log_request(Req0, Code1),
+    respond_(Req1, Code1, Headers1, Args1, Type).
+
+before_response(Req0, Code0, Headers0, {json, JsonObj}) ->
+    {ok, {Req1, Code1, Headers1, Body1}} =
+        chttpd_plugin:before_response(Req0, Code0, Headers0, JsonObj),
+    Body2 = [start_jsonp(), ?JSON_ENCODE(Body1), end_jsonp(), $\n],
+    {ok, {Req1, Code1, Headers1, Body2}};
+before_response(Req0, Code0, Headers0, Args0) ->
+    chttpd_plugin:before_response(Req0, Code0, Headers0, Args0).
+
+respond_(#httpd{mochi_req = MochiReq}, Code, Headers, _Args, start_response) ->
+    MochiReq:start_response({Code, Headers});
+respond_(#httpd{mochi_req = MochiReq}, Code, Headers, Args, Type) ->
+    MochiReq:Type({Code, Headers, Args}).
 
 %%%%%%%% module tests below %%%%%%%%
 
@@ -1164,4 +1189,41 @@
     end, Cases),
     {"Tests adding default headers", Tests}.
 
+log_request_test_() ->
+    {foreachx,
+        fun(_) ->
+            ok = meck:new([couch_log]),
+            ok = meck:expect(couch_log, error, fun(Fmt, Args) ->
+                case catch io_lib_format:fwrite(Fmt, Args) of
+                    {'EXIT', Error} -> Error;
+                    _ -> ok
+                end
+            end)
+        end,
+        fun(_, _) ->
+            meck:unload([couch_log])
+        end,
+        [{Flag, fun should_accept_code_and_message/2} || Flag <- [true, false]]
+    }.
+
+should_accept_code_and_message(DontLogFlag, _) ->
+    erlang:put(dont_log_response, DontLogFlag),
+    {"with dont_log_response = " ++ atom_to_list(DontLogFlag),
+        [
+            {"Should accept code 200 and string message",
+            ?_assertEqual(ok, log_response(200, "OK"))},
+            {"Should accept code 200 and JSON message",
+            ?_assertEqual(ok, log_response(200, {json, {[{ok, true}]}}))},
+            {"Should accept code >= 400 and string error",
+            ?_assertEqual(ok, log_response(405, method_not_allowed))},
+            {"Should accept code >= 400 and JSON error",
+            ?_assertEqual(ok,
+                log_response(405, {json, {[{error, method_not_allowed}]}}))},
+            {"Should accept code >= 500 and string error",
+            ?_assertEqual(ok, log_response(500, undef))},
+            {"Should accept code >= 500 and JSON error",
+            ?_assertEqual(ok, log_response(500, {json, {[{error, undef}]}}))}
+        ]
+    }.
+
 -endif.
diff --git a/src/couch_httpd_auth.erl b/src/couch_httpd_auth.erl
index c305ad9..15d3ac6 100644
--- a/src/couch_httpd_auth.erl
+++ b/src/couch_httpd_auth.erl
@@ -23,7 +23,7 @@
 -export([cookie_auth_header/2]).
 -export([handle_session_req/1, handle_session_req/2]).
 
--export([authenticate/2, verify_totp/2, maybe_upgrade_password_hash/6]).
+-export([authenticate/2, verify_totp/2]).
 -export([ensure_cookie_auth_secret/0, make_cookie_time/0]).
 -export([cookie_auth_cookie/4, cookie_scheme/1]).
 -export([maybe_value/3]).
@@ -97,12 +97,9 @@
                 Password = ?l2b(Pass),
                 case authenticate(Password, UserProps) of
                     true ->
-                        UserProps2 = maybe_upgrade_password_hash(
-                            Req, UserName, Password, UserProps,
-                            AuthModule, AuthCtx),
                         Req#httpd{user_ctx=#user_ctx{
                             name=UserName,
-                            roles=couch_util:get_value(<<"roles">>, UserProps2, [])
+                            roles=couch_util:get_value(<<"roles">>, UserProps, [])
                         }};
                     false ->
                         authentication_warning(Req, UserName),
@@ -308,11 +305,9 @@
     case authenticate(Password, UserProps) of
         true ->
             verify_totp(UserProps, Form),
-            UserProps2 = maybe_upgrade_password_hash(
-                Req, UserName, Password, UserProps, AuthModule, AuthCtx),
             % setup the session cookie
             Secret = ?l2b(ensure_cookie_auth_secret()),
-            UserSalt = couch_util:get_value(<<"salt">>, UserProps2),
+            UserSalt = couch_util:get_value(<<"salt">>, UserProps),
             CurrentTime = make_cookie_time(),
             Cookie = cookie_auth_cookie(Req, ?b2l(UserName), <<Secret/binary, UserSalt/binary>>, CurrentTime),
             % TODO document the "next" feature in Futon
@@ -326,7 +321,7 @@
                 {[
                     {ok, true},
                     {name, UserName},
-                    {roles, couch_util:get_value(<<"roles">>, UserProps2, [])}
+                    {roles, couch_util:get_value(<<"roles">>, UserProps, [])}
                 ]});
         false ->
             authentication_warning(Req, UserName),
@@ -395,23 +390,6 @@
 maybe_value(Key, Else, Fun) ->
     [{Key, Fun(Else)}].
 
-maybe_upgrade_password_hash(Req, UserName, Password, UserProps,
-        AuthModule, AuthCtx) ->
-    Upgrade = config:get_boolean("couch_httpd_auth", "upgrade_password_on_auth", true),
-    IsAdmin = lists:member(<<"_admin">>, couch_util:get_value(<<"roles">>, UserProps, [])),
-    case {IsAdmin, Upgrade,
-         couch_util:get_value(<<"password_scheme">>, UserProps, <<"simple">>)} of
-    {false, true, <<"simple">>} ->
-        UserProps2 = proplists:delete(<<"password_sha">>, UserProps),
-        UserProps3 = [{<<"password">>, Password} | UserProps2],
-        NewUserDoc = couch_doc:from_json_obj({UserProps3}),
-        ok = AuthModule:update_user_creds(Req, NewUserDoc, AuthCtx),
-        {ok, NewUserProps, _} = AuthModule:get_user_creds(Req, UserName),
-        NewUserProps;
-    _ ->
-        UserProps
-    end.
-
 authenticate(Pass, UserProps) ->
     UserSalt = couch_util:get_value(<<"salt">>, UserProps, <<>>),
     {PasswordHash, ExpectedHash} =
diff --git a/src/couch_httpd_cors.erl b/src/couch_httpd_cors.erl
deleted file mode 100644
index abcd9a7..0000000
--- a/src/couch_httpd_cors.erl
+++ /dev/null
@@ -1,356 +0,0 @@
-% Licensed under the Apache License, Version 2.0 (the "License"); you may not
-% use this file except in compliance with the License. You may obtain a copy of
-% the License at
-%
-%   http://www.apache.org/licenses/LICENSE-2.0
-%
-% Unless required by applicable law or agreed to in writing, software
-% distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-% WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-% License for the specific language governing permissions and limitations under
-% the License.
-
-%% @doc module to handle Cross-Origin Resource Sharing
-%%
-%% This module handles CORS requests and preflight request for
-%% CouchDB. The configuration is done in the ini file.
-%%
-%% This implements http://www.w3.org/TR/cors/
-
-
--module(couch_httpd_cors).
-
--include_lib("couch/include/couch_db.hrl").
-
--export([is_preflight_request/1, cors_headers/2]).
-
--define(SUPPORTED_HEADERS, "Accept, Accept-Language, Content-Type," ++
-        "Expires, Last-Modified, Pragma, Origin, Content-Length," ++
-        "If-Match, Destination, X-Requested-With, " ++
-        "X-Http-Method-Override, Content-Range").
-
--define(SUPPORTED_METHODS, "GET, HEAD, POST, PUT, DELETE," ++
-        "TRACE, CONNECT, COPY, OPTIONS").
-
-% as defined in http://www.w3.org/TR/cors/#terminology
--define(SIMPLE_HEADERS, ["Cache-Control", "Content-Language",
-        "Content-Type", "Expires", "Last-Modified", "Pragma"]).
--define(ALLOWED_HEADERS, lists:sort(["Server", "Etag",
-        "Accept-Ranges" | ?SIMPLE_HEADERS])).
--define(SIMPLE_CONTENT_TYPE_VALUES, ["application/x-www-form-urlencoded",
-        "multipart/form-data", "text/plain"]).
-
-% TODO: - pick a sane default
--define(CORS_DEFAULT_MAX_AGE, 12345).
-
-%% is_preflight_request/1
-
-% http://www.w3.org/TR/cors/#resource-preflight-requests
-
-is_preflight_request(#httpd{method=Method}=Req) when Method /= 'OPTIONS' ->
-    Req;
-is_preflight_request(Req) ->
-    EnableCors = enable_cors(),
-    is_preflight_request(Req, EnableCors).
-
-is_preflight_request(Req, false) ->
-    Req;
-is_preflight_request(#httpd{mochi_req=MochiReq}=Req, true) ->
-    case preflight_request(MochiReq) of
-    {ok, PreflightHeaders} ->
-        send_preflight_response(Req, PreflightHeaders);
-    _ ->
-        Req
-    end.
-
-
-preflight_request(MochiReq) ->
-    Origin = MochiReq:get_header_value("Origin"),
-    preflight_request(MochiReq, Origin).
-
-preflight_request(MochiReq, undefined) ->
-    % If the Origin header is not present terminate this set of
-    % steps. The request is outside the scope of this specification.
-    % http://www.w3.org/TR/cors/#resource-preflight-requests
-    MochiReq;
-preflight_request(MochiReq, Origin) ->
-    Host = couch_httpd_vhost:host(MochiReq),
-    AcceptedOrigins = get_accepted_origins(Host),
-    AcceptAll = lists:member("*", AcceptedOrigins),
-
-    HandlerFun = fun() ->
-        OriginList = couch_util:to_list(Origin),
-        handle_preflight_request(OriginList, Host, MochiReq)
-    end,
-
-    case AcceptAll of
-    true ->
-        % Always matching is acceptable since the list of
-        % origins can be unbounded.
-        % http://www.w3.org/TR/cors/#resource-preflight-requests
-        HandlerFun();
-    false ->
-        case lists:member(Origin, AcceptedOrigins) of
-        % The Origin header can only contain a single origin as
-        % the user agent will not follow redirects.
-        % http://www.w3.org/TR/cors/#resource-preflight-requests
-        % TODO: Square against multi origin thinger in Security Considerations
-        true ->
-            HandlerFun();
-        false ->
-            % If the value of the Origin header is not a
-            % case-sensitive match for any of the values
-            % in list of origins do not set any additional
-            % headers and terminate this set of steps.
-            % http://www.w3.org/TR/cors/#resource-preflight-requests
-            false
-        end
-    end.
-
-
-handle_preflight_request(Origin, Host, MochiReq) ->
-    %% get supported methods
-    SupportedMethods = split_list(cors_config(Host, "methods",
-                                              ?SUPPORTED_METHODS)),
-
-    % get supported headers
-    AllSupportedHeaders = split_list(cors_config(Host, "headers",
-                                                 ?SUPPORTED_HEADERS)),
-
-    SupportedHeaders = [string:to_lower(H) || H <- AllSupportedHeaders],
-
-    % get max age
-    MaxAge = cors_config(Host, "max_age", ?CORS_DEFAULT_MAX_AGE),
-
-    PreflightHeaders0 = maybe_add_credentials(Origin, Host, [
-        {"Access-Control-Allow-Origin", Origin},
-        {"Access-Control-Max-Age", MaxAge},
-        {"Access-Control-Allow-Methods",
-            string:join(SupportedMethods, ", ")}]),
-
-    case MochiReq:get_header_value("Access-Control-Request-Method") of
-    undefined ->
-        % If there is no Access-Control-Request-Method header
-        % or if parsing failed, do not set any additional headers
-        % and terminate this set of steps. The request is outside
-        % the scope of this specification.
-        % http://www.w3.org/TR/cors/#resource-preflight-requests
-        {ok, PreflightHeaders0};
-    Method ->
-        case lists:member(Method, SupportedMethods) of
-        true ->
-            % method ok , check headers
-            AccessHeaders = MochiReq:get_header_value(
-                    "Access-Control-Request-Headers"),
-            {FinalReqHeaders, ReqHeaders} = case AccessHeaders of
-                undefined -> {"", []};
-                Headers ->
-                    % transform header list in something we
-                    % could check. make sure everything is a
-                    % list
-                    RH = [string:to_lower(H)
-                          || H <- split_headers(Headers)],
-                    {Headers, RH}
-            end,
-            % check if headers are supported
-            case ReqHeaders -- SupportedHeaders of
-            [] ->
-                PreflightHeaders = PreflightHeaders0 ++
-                                   [{"Access-Control-Allow-Headers",
-                                     FinalReqHeaders}],
-                {ok, PreflightHeaders};
-            _ ->
-                false
-            end;
-        false ->
-        % If method is not a case-sensitive match for any of
-        % the values in list of methods do not set any additional
-        % headers and terminate this set of steps.
-        % http://www.w3.org/TR/cors/#resource-preflight-requests
-            false
-        end
-    end.
-
-
-send_preflight_response(#httpd{mochi_req=MochiReq}=Req, Headers) ->
-    couch_httpd:log_request(Req, 204),
-    couch_stats:increment_counter([couchdb, httpd_status_codes, 204]),
-    Headers1 = couch_httpd:http_1_0_keep_alive(MochiReq, Headers),
-    Headers2 = Headers1 ++ couch_httpd:server_header() ++
-               couch_httpd_auth:cookie_auth_header(Req, Headers1),
-    {ok, MochiReq:respond({204, Headers2, <<>>})}.
-
-
-% cors_headers/1
-
-cors_headers(MochiReq, RequestHeaders) ->
-    EnableCors = enable_cors(),
-    CorsHeaders = do_cors_headers(MochiReq, EnableCors),
-    maybe_apply_cors_headers(CorsHeaders, RequestHeaders).
-
-do_cors_headers(#httpd{mochi_req=MochiReq}, true) ->
-    Host = couch_httpd_vhost:host(MochiReq),
-    AcceptedOrigins = get_accepted_origins(Host),
-    case MochiReq:get_header_value("Origin") of
-    undefined ->
-        % If the Origin header is not present terminate
-        % this set of steps. The request is outside the scope
-        % of this specification.
-        % http://www.w3.org/TR/cors/#resource-processing-model
-        [];
-    Origin ->
-        handle_cors_headers(couch_util:to_list(Origin),
-                            Host, AcceptedOrigins)
-    end;
-do_cors_headers(_MochiReq, false) ->
-    [].
-
-maybe_apply_cors_headers([], RequestHeaders) ->
-    RequestHeaders;
-maybe_apply_cors_headers(CorsHeaders, RequestHeaders0) ->
-    % for each RequestHeader that isn't in SimpleHeaders,
-    % (or Content-Type with SIMPLE_CONTENT_TYPE_VALUES)
-    % append to Access-Control-Expose-Headers
-    % return: RequestHeaders ++ CorsHeaders ++ ACEH
-
-    RequestHeaders = [K || {K,_V} <- RequestHeaders0],
-    ExposedHeaders0 = reduce_headers(RequestHeaders, ?ALLOWED_HEADERS),
-
-    % here we may have not moved Content-Type into ExposedHeaders,
-    % now we need to check whether the Content-Type valus is
-    % in ?SIMPLE_CONTENT_TYPE_VALUES and if it isn’t add Content-
-    % Type to to ExposedHeaders
-    ContentType =  proplists:get_value("Content-Type", RequestHeaders0),
-    IncludeContentType = case ContentType of
-    undefined ->
-        false;
-    _ ->
-        ContentType_ = string:to_lower(ContentType),
-        lists:member(ContentType_, ?SIMPLE_CONTENT_TYPE_VALUES)
-    end,
-    ExposedHeaders = case IncludeContentType of
-    false ->
-        lists:umerge(ExposedHeaders0, ["Content-Type"]);
-    true ->
-        ExposedHeaders0
-    end,
-    CorsHeaders
-    ++ RequestHeaders0
-    ++ [{"Access-Control-Expose-Headers",
-            string:join(ExposedHeaders, ", ")}].
-
-
-reduce_headers(A, B) ->
-    reduce_headers0(A, B, []).
-
-reduce_headers0([], _B, Result) ->
-    lists:sort(Result);
-reduce_headers0([ElmA|RestA], B, Result) ->
-    R = case member_nocase(ElmA, B) of
-    false -> Result;
-    _Else -> [ElmA | Result]
-    end,
-    reduce_headers0(RestA, B, R).
-
-member_nocase(ElmA, List) ->
-    lists:any(fun(ElmB) ->
-        string:to_lower(ElmA) =:= string:to_lower(ElmB)
-    end, List).
-
-handle_cors_headers(_Origin, _Host, []) ->
-    [];
-handle_cors_headers(Origin, Host, AcceptedOrigins) ->
-    AcceptAll = lists:member("*", AcceptedOrigins),
-    case {AcceptAll, lists:member(Origin, AcceptedOrigins)} of
-    {true, _} ->
-        make_cors_header(Origin, Host);
-    {false, true}  ->
-        make_cors_header(Origin, Host);
-    _ ->
-        % If the value of the Origin header is not a
-        % case-sensitive match for any of the values
-        % in list of origins, do not set any additional
-        % headers and terminate this set of steps.
-        % http://www.w3.org/TR/cors/#resource-requests
-        []
-    end.
-
-
-make_cors_header(Origin, Host) ->
-    Headers = [{"Access-Control-Allow-Origin", Origin}],
-    maybe_add_credentials(Origin, Host, Headers).
-
-
-%% util
-
-maybe_add_credentials(Origin, Host, Headers) ->
-    maybe_add_credentials(Headers, allow_credentials(Origin, Host)).
-
-maybe_add_credentials(Headers, false) ->
-    Headers;
-maybe_add_credentials(Headers, true) ->
-    Headers ++ [{"Access-Control-Allow-Credentials", "true"}].
-
-
-allow_credentials("*", _Host) ->
-    false;
-allow_credentials(_Origin, Host) ->
-    Default = get_bool_config("cors", "credentials", false),
-    get_bool_config(cors_section(Host), "credentials", Default).
-
-
-
-cors_config(Host, Key, Default) ->
-    config:get(cors_section(Host), Key,
-                     config:get("cors", Key, Default)).
-
-cors_section(Host0) ->
-    {Host, _Port} = split_host_port(Host0),
-    "cors:" ++ Host.
-
-enable_cors() ->
-    case get('disable_couch_httpd_cors') of
-        undefined ->
-            get_bool_config("httpd", "enable_cors", false);
-        _ ->
-            false
-    end.
-
-get_bool_config(Section, Key, Default) ->
-    case config:get(Section, Key) of
-    undefined ->
-        Default;
-    "true" ->
-        true;
-    "false" ->
-        false
-    end.
-
-get_accepted_origins(Host) ->
-    split_list(cors_config(Host, "origins", [])).
-
-split_list(S) ->
-    re:split(S, "\\s*,\\s*", [trim, {return, list}]).
-
-split_headers(H) ->
-    re:split(H, ",\\s*", [{return,list}, trim]).
-
-split_host_port(HostAsString) ->
-    % split at semicolon ":"
-    Split = string:rchr(HostAsString, $:),
-    split_host_port(HostAsString, Split).
-
-split_host_port(HostAsString, 0) ->
-    % no semicolon
-    {HostAsString, '*'};
-split_host_port(HostAsString, N) ->
-    HostPart = string:substr(HostAsString, 1, N-1),
-    % parse out port
-    % is there a nicer way?
-    case (catch erlang:list_to_integer(string:substr(HostAsString,
-                    N+1, length(HostAsString)))) of
-    {'EXIT', _} ->
-        {HostAsString, '*'};
-    Port ->
-        {HostPart, Port}
-    end.
diff --git a/src/couch_httpd_db.erl b/src/couch_httpd_db.erl
index 7f2ae1c..965e8fb 100644
--- a/src/couch_httpd_db.erl
+++ b/src/couch_httpd_db.erl
@@ -111,6 +111,8 @@
                 io_lib:format("\n],\n\"last_seq\":~w}\n", [EndSeq])
             ),
             couch_httpd:end_json_response(Resp);
+        (timeout, "eventsource") ->
+            couch_httpd:send_chunk(Resp, "event: heartbeat\ndata: \n\n");
         (timeout, _) ->
             couch_httpd:send_chunk(Resp, "\n")
         end
diff --git a/src/couch_httpd_misc_handlers.erl b/src/couch_httpd_misc_handlers.erl
index 10d6d9e..0cbbdd8 100644
--- a/src/couch_httpd_misc_handlers.erl
+++ b/src/couch_httpd_misc_handlers.erl
@@ -127,10 +127,13 @@
 
 handle_uuids_req(#httpd{method='GET'}=Req) ->
     Max = list_to_integer(config:get("uuids","max_count","1000")),
-    Count = list_to_integer(couch_httpd:qs_value(Req, "count", "1")),
-    case Count > Max of
-        true -> throw({forbidden, <<"count parameter too large">>});
-        false -> ok
+    Count = try list_to_integer(couch_httpd:qs_value(Req, "count", "1")) of
+        N when N > Max ->
+            throw({forbidden, <<"count parameter too large">>});
+        N -> N
+    catch
+        error:badarg ->
+            throw({bad_request, <<"count parameter is not an integer">>})
     end,
     UUIDs = [couch_uuids:new() || _ <- lists:seq(1, Count)],
     Etag = couch_httpd:make_etag(UUIDs),
diff --git a/src/couch_httpd_oauth.erl b/src/couch_httpd_oauth.erl
index d57623a..0310752 100644
--- a/src/couch_httpd_oauth.erl
+++ b/src/couch_httpd_oauth.erl
@@ -13,7 +13,7 @@
 -module(couch_httpd_oauth).
 
 -include_lib("couch/include/couch_db.hrl").
--include("couch_js_functions.hrl").
+-include_lib("couch/include/couch_js_functions.hrl").
 
 -export([oauth_authentication_handler/1, handle_oauth_req/1]).
 
diff --git a/src/couch_key_tree.erl b/src/couch_key_tree.erl
index 8007e17..bc4076a 100644
--- a/src/couch_key_tree.erl
+++ b/src/couch_key_tree.erl
@@ -60,6 +60,7 @@
 map_leafs/2,
 mapfold/3,
 merge/3,
+merge/2,
 remove_leafs/2,
 stem/2
 ]).
@@ -173,8 +174,8 @@
         fail -> fail
     end.
 
--spec merge_extend(tree(), tree()) ->
-                {tree(), new_leaf | new_branch | internal_node}.
+-spec merge_extend(revtree(), revtree()) ->
+                {revtree(), new_leaf | new_branch | internal_node}.
 merge_extend([], B) when B =/= [] ->
     % Most likely the insert branch simply extends this one, so the new
     % branch is exactly B. Its also possible that B is a branch because
@@ -280,25 +281,49 @@
     {Gotten, RemainingKeys} = get_key_leafs_simple(Pos, [Tree], Keys, []),
     get_key_leafs(Rest, RemainingKeys, Gotten ++ Acc).
 
-get_key_leafs_simple(_Pos, _Tree, [], _KeyPathAcc) ->
+get_key_leafs_simple(_Pos, _Tree, [], _PathAcc) ->
     {[], []};
-get_key_leafs_simple(_Pos, [], KeysToGet, _KeyPathAcc) ->
-    {[], KeysToGet};
-get_key_leafs_simple(Pos, [{Key, _Value, SubTree}=Tree | RestTree], KeysToGet, KeyPathAcc) ->
-    case lists:delete({Pos, Key}, KeysToGet) of
-    KeysToGet -> % same list, key not found
-        {LeafsFound, KeysToGet2} = get_key_leafs_simple(Pos + 1, SubTree, KeysToGet, [Key | KeyPathAcc]),
-        {RestLeafsFound, KeysRemaining} = get_key_leafs_simple(Pos, RestTree, KeysToGet2, KeyPathAcc),
-        {LeafsFound ++ RestLeafsFound, KeysRemaining};
-    KeysToGet2 ->
-        LeafsFound = get_all_leafs_simple(Pos, [Tree], KeyPathAcc),
-        LeafKeysFound = [{LeafPos, LeafRev} || {_, {LeafPos, [LeafRev|_]}}
-            <- LeafsFound],
-        KeysToGet3 = KeysToGet2 -- LeafKeysFound,
-        {RestLeafsFound, KeysRemaining} = get_key_leafs_simple(Pos, RestTree, KeysToGet3, KeyPathAcc),
-        {LeafsFound ++ RestLeafsFound, KeysRemaining}
+get_key_leafs_simple(_Pos, [], Keys, _PathAcc) ->
+    {[], Keys};
+get_key_leafs_simple(Pos, [{Key, _, SubTree}=Tree | RestTree], Keys, PathAcc) ->
+    case lists:delete({Pos, Key}, Keys) of
+        Keys ->
+            % Same list, key not found
+            NewPathAcc = [Key | PathAcc],
+            {ChildLeafs, Keys2} = get_key_leafs_simple(Pos + 1, SubTree, Keys, NewPathAcc),
+            {SiblingLeafs, Keys3} = get_key_leafs_simple(Pos, RestTree, Keys2, PathAcc),
+            {ChildLeafs ++ SiblingLeafs, Keys3};
+        Keys2 ->
+            % This is a key we were looking for, get all descendant
+            % leafs while removing any requested key we find. Notice
+            % that this key will be returned by get_key_leafs_simple2
+            % if it's a leaf so there's no need to return it here.
+            {ChildLeafs, Keys3} = get_key_leafs_simple2(Pos, [Tree], Keys2, PathAcc),
+            {SiblingLeafs, Keys4} = get_key_leafs_simple(Pos, RestTree, Keys3, PathAcc),
+            {ChildLeafs ++ SiblingLeafs, Keys4}
     end.
 
+
+get_key_leafs_simple2(_Pos, [], Keys, _PathAcc) ->
+    % No more tree to deal with so no more keys to return.
+    {[], Keys};
+get_key_leafs_simple2(Pos, [{Key, Value, []} | RestTree], Keys, PathAcc) ->
+    % This is a leaf as defined by having an empty list of
+    % child nodes. The assertion is a bit subtle but the function
+    % clause match means its a leaf.
+    Keys2 = lists:delete({Pos, Key}, Keys),
+    {SiblingLeafs, Keys3} = get_key_leafs_simple2(Pos, RestTree, Keys2, PathAcc),
+    {[{Value, {Pos, [Key | PathAcc]}} | SiblingLeafs], Keys3};
+get_key_leafs_simple2(Pos, [{Key, _Value, SubTree} | RestTree], Keys, PathAcc) ->
+    % This isn't a leaf. Recurse into the subtree and then
+    % process any sibling branches.
+    Keys2 = lists:delete({Pos, Key}, Keys),
+    NewPathAcc = [Key | PathAcc],
+    {ChildLeafs, Keys3} = get_key_leafs_simple2(Pos + 1, SubTree, Keys2, NewPathAcc),
+    {SiblingLeafs, Keys4} = get_key_leafs_simple2(Pos, RestTree, Keys3, PathAcc),
+    {ChildLeafs ++ SiblingLeafs, Keys4}.
+
+
 get(Tree, KeysToGet) ->
     {KeyPaths, KeysNotFound} = get_full_key_paths(Tree, KeysToGet),
     FixedResults = [ {Value, {Pos, [Key0 || {Key0, _} <- Path]}} || {Pos, [{_Key, Value}|_]=Path} <- KeyPaths],
diff --git a/src/couch_native_process.erl b/src/couch_native_process.erl
index dcd01b3..ab279cd 100644
--- a/src/couch_native_process.erl
+++ b/src/couch_native_process.erl
@@ -190,6 +190,8 @@
 
 ddoc(State, {_, Fun}, [<<"validate_doc_update">>], Args) ->
     {State, (catch apply(Fun, Args))};
+ddoc(State, {_, Fun}, [<<"rewrites">>], Args) ->
+    {State, (catch apply(Fun, Args))};
 ddoc(State, {_, Fun}, [<<"filters">>|_], [Docs, Req]) ->
     FilterFunWrapper = fun(Doc) ->
         case catch Fun(Doc, Req) of
diff --git a/src/couch_os_process.erl b/src/couch_os_process.erl
index 5b9a17f..4f15555 100644
--- a/src/couch_os_process.erl
+++ b/src/couch_os_process.erl
@@ -51,7 +51,7 @@
     gen_server:cast(Pid, {send, Data}).
 
 prompt(Pid, Data) ->
-    case gen_server:call(Pid, {prompt, Data}, infinity) of
+    case ioq:call(Pid, {prompt, Data}, erlang:get(io_priority)) of
         {ok, Result} ->
             Result;
         Error ->
diff --git a/src/couch_proc_manager.erl b/src/couch_proc_manager.erl
index b3ca69a..33cc1e5 100644
--- a/src/couch_proc_manager.erl
+++ b/src/couch_proc_manager.erl
@@ -52,12 +52,15 @@
     soft_limit
 }).
 
+-type docid() :: iodata().
+-type revision() :: {integer(), binary()}.
+
 -record(client, {
-    timestamp,
-    from,
-    lang,
-    ddoc,
-    ddoc_key
+    timestamp :: os:timestamp() | '_',
+    from :: undefined | {pid(), reference()}  | '_',
+    lang :: binary() | '_',
+    ddoc :: #doc{} | '_',
+    ddoc_key :: undefined | {DDocId :: docid(), Rev :: revision()} | '_'
 }).
 
 -record(proc_int, {
@@ -368,13 +371,8 @@
     Resp = try
         case new_proc_int(From, Lang) of
         {ok, NewProc} ->
-            case teach_ddoc(DDoc, DDocKey, NewProc) of
-            {ok, Proc} ->
-                {spawn_ok, Proc, From};
-            {error, Reason} ->
-                gen_server:reply(From, {error, Reason}),
-                spawn_error
-            end;
+            {ok, Proc} = teach_ddoc(DDoc, DDocKey, NewProc),
+            {spawn_ok, Proc, From};
         Error ->
             gen_server:reply(From, {error, Error}),
             spawn_error
@@ -443,7 +441,7 @@
     NewState = case is_process_alive(Pid) of true ->
         case ProcInt#proc_int.t0 < State#state.threshold_ts of
             true ->
-                remove_proc(State, Pid);
+                remove_proc(State, ProcInt);
             false ->
                 gen_server:cast(Pid, garbage_collect),
                 true = ets:update_element(?PROCS, Pid, [
@@ -519,7 +517,7 @@
 add_waiting_client(Client) ->
     ets:insert(?WAITERS, Client#client{timestamp=os:timestamp()}).
 
-
+-spec get_waiting_client(Lang :: binary()) -> undefined | #client{}.
 get_waiting_client(Lang) ->
     case ets:match_object(?WAITERS, #client{lang=Lang, _='_'}, 1) of
         '$end_of_table' ->
diff --git a/src/couch_query_servers.erl b/src/couch_query_servers.erl
index 92ba3a3..a3d7a47 100644
--- a/src/couch_query_servers.erl
+++ b/src/couch_query_servers.erl
@@ -17,6 +17,7 @@
 -export([reduce/3, rereduce/3,validate_doc_update/5]).
 -export([filter_docs/5]).
 -export([filter_view/3]).
+-export([rewrite/3]).
 
 -export([with_ddoc_proc/2, proc_prompt/2, ddoc_prompt/3, ddoc_proc_prompt/3, json_doc/1]).
 
@@ -292,6 +293,85 @@
             throw({unknown_error, Message})
     end.
 
+
+rewrite(Req, Db, DDoc) ->
+    Fields = [F || F <- chttpd_external:json_req_obj_fields(),
+              F =/= <<"info">>, F =/= <<"form">>,
+              F =/= <<"uuid">>, F =/= <<"id">>],
+    JsonReq = chttpd_external:json_req_obj(Req, Db, null, Fields),
+    case couch_query_servers:ddoc_prompt(DDoc, [<<"rewrites">>], [JsonReq]) of
+        {[{<<"forbidden">>, Message}]} ->
+            throw({forbidden, Message});
+        {[{<<"unauthorized">>, Message}]} ->
+            throw({unauthorized, Message});
+        [<<"no_dispatch_rule">>] ->
+            undefined;
+        [<<"ok">>, {V}=Rewrite] when is_list(V) ->
+            ok = validate_rewrite_response(Rewrite),
+            Rewrite;
+        [<<"ok">>, _]  ->
+            throw_rewrite_error(<<"bad rewrite">>);
+        V ->
+            couch_log:error("bad rewrite return ~p", [V]),
+            throw({unknown_error, V})
+    end.
+
+validate_rewrite_response({Fields}) when is_list(Fields) ->
+    validate_rewrite_response_fields(Fields).
+
+validate_rewrite_response_fields([{Key, Value} | Rest]) ->
+    validate_rewrite_response_field(Key, Value),
+    validate_rewrite_response_fields(Rest);
+validate_rewrite_response_fields([]) ->
+    ok.
+
+validate_rewrite_response_field(<<"method">>, Method) when is_binary(Method) ->
+    ok;
+validate_rewrite_response_field(<<"method">>, _) ->
+    throw_rewrite_error(<<"bad method">>);
+validate_rewrite_response_field(<<"path">>, Path) when is_binary(Path) ->
+    ok;
+validate_rewrite_response_field(<<"path">>, _) ->
+    throw_rewrite_error(<<"bad path">>);
+validate_rewrite_response_field(<<"body">>, Body) when is_binary(Body) ->
+    ok;
+validate_rewrite_response_field(<<"body">>, _) ->
+    throw_rewrite_error(<<"bad body">>);
+validate_rewrite_response_field(<<"headers">>, {Props}=Headers) when is_list(Props) ->
+    validate_object_fields(Headers);
+validate_rewrite_response_field(<<"headers">>, _) ->
+    throw_rewrite_error(<<"bad headers">>);
+validate_rewrite_response_field(<<"query">>, {Props}=Query) when is_list(Props) ->
+    validate_object_fields(Query);
+validate_rewrite_response_field(<<"query">>, _) ->
+    throw_rewrite_error(<<"bad query">>);
+validate_rewrite_response_field(<<"code">>, Code) when is_integer(Code) andalso Code >= 200 andalso Code < 600 ->
+    ok;
+validate_rewrite_response_field(<<"code">>, _) ->
+    throw_rewrite_error(<<"bad code">>);
+validate_rewrite_response_field(K, V) ->
+    couch_log:debug("unknown rewrite field ~p=~p", [K, V]),
+    ok.
+
+validate_object_fields({Props}) when is_list(Props) ->
+    lists:foreach(fun
+        ({Key, Value}) when is_binary(Key) andalso is_binary(Value) ->
+            ok;
+        ({Key, Value}) ->
+            Reason = io_lib:format(
+                "object key/value must be strings ~p=~p", [Key, Value]),
+            throw_rewrite_error(Reason);
+        (Value) ->
+            throw_rewrite_error(io_lib:format("bad value ~p", [Value]))
+    end, Props).
+
+
+throw_rewrite_error(Reason) when is_list(Reason)->
+    throw_rewrite_error(iolist_to_binary(Reason));
+throw_rewrite_error(Reason) when is_binary(Reason) ->
+    throw({rewrite_error, Reason}).
+
+
 json_doc_options() ->
     json_doc_options([]).
 
diff --git a/src/couch_server.erl b/src/couch_server.erl
index bb93dfe..2634bbf 100644
--- a/src/couch_server.erl
+++ b/src/couch_server.erl
@@ -25,8 +25,6 @@
 % config_listener api
 -export([handle_config_change/5, handle_config_terminate/3]).
 
--export([delete_file/3]).
-
 -include_lib("couch/include/couch_db.hrl").
 
 -define(MAX_DBS_OPEN, 100).
@@ -83,10 +81,14 @@
         {ok, Db#db{user_ctx=Ctx, fd_monitor=erlang:monitor(process,Fd)}};
     _ ->
         Timeout = couch_util:get_value(timeout, Options, infinity),
+        Create = couch_util:get_value(create_if_missing, Options, false),
         case gen_server:call(couch_server, {open, DbName, Options}, Timeout) of
         {ok, #db{fd=Fd} = Db} ->
             update_lru(DbName, Options),
             {ok, Db#db{user_ctx=Ctx, fd_monitor=erlang:monitor(process,Fd)}};
+        {not_found, no_db_file} when Create ->
+            couch_log:warning("creating missing database: ~s", [DbName]),
+            couch_server:create(DbName, Options);
         Error ->
             Error
         end
@@ -119,8 +121,7 @@
 maybe_add_sys_db_callbacks(DbName, Options) ->
     DbsDbName = config:get("mem3", "shards_db", "_dbs"),
     NodesDbName = config:get("mem3", "nodes_db", "_nodes"),
-    IsReplicatorDb = DbName == config:get("replicator", "db", "_replicator") orelse
-	path_ends_with(DbName, <<"_replicator">>),
+    IsReplicatorDb = path_ends_with(DbName, <<"_replicator">>),
     IsUsersDb = DbName ==config:get("couch_httpd_auth", "authentication_db", "_users") orelse
 	path_ends_with(DbName, <<"_users">>),
     if
@@ -462,8 +463,9 @@
 
         couch_db_plugin:on_delete(DbName, Options),
 
-        case delete_file(Server#server.root_dir, FullFilepath, Options) of
-        {ok, _} ->
+        DelOpt = [{context, delete} | Options],
+        case couch_file:delete(Server#server.root_dir, FullFilepath, DelOpt) of
+        ok ->
             couch_event:notify(DbName, deleted),
             {reply, ok, Server2};
         {error, enoent} ->
@@ -539,31 +541,3 @@
         false -> Server#server{dbs_open=Server#server.dbs_open - 1};
         true -> Server
     end.
-
-delete_file(RootDir, FullFilePath, Options) ->
-    Async = not lists:member(sync, Options),
-    RenameOnDelete = config:get_boolean("couchdb", "rename_on_delete", false),
-    case {Async, RenameOnDelete} of
-        {_, true} ->
-            rename_on_delete(FullFilePath);
-        {Async, false} ->
-            case couch_file:delete(RootDir, FullFilePath, Async) of
-                ok -> {ok, deleted};
-                Else -> Else
-            end
-    end.
-
-rename_on_delete(Original) ->
-    DeletedFileName = deleted_filename(Original),
-    case file:rename(Original, DeletedFileName) of
-        ok -> {ok, {renamed, DeletedFileName}};
-        Else -> Else
-    end.
-
-deleted_filename(Original) ->
-    {{Y,Mon,D}, {H,Min,S}} = calendar:universal_time(),
-    Suffix = lists:flatten(
-        io_lib:format(".~w~2.10.0B~2.10.0B."
-            ++ "~2.10.0B~2.10.0B~2.10.0B.deleted"
-            ++ filename:extension(Original), [Y,Mon,D,H,Min,S])),
-    filename:rootname(Original) ++ Suffix.
diff --git a/src/test_util.erl b/src/test_util.erl
index d13f850..88cfc63 100644
--- a/src/test_util.erl
+++ b/src/test_util.erl
@@ -34,6 +34,9 @@
 
 -record(test_context, {mocked = [], started = [], module}).
 
+-define(DEFAULT_APPS,
+        [inets, ibrowse, ssl, config, couch_epi, couch_event, couch]).
+
 srcdir() ->
     code:priv_dir(couch) ++ "/../../".
 
@@ -68,16 +71,11 @@
 start_couch(IniFiles, ExtraApps) ->
     load_applications_with_stats(),
     ok = application:set_env(config, ini_files, IniFiles),
-
-    Apps = start_applications(
-        [goldrush, lager, inets, ibrowse, ssl, config, couch_epi, couch_event, couch]
-        ++ ExtraApps),
-
+    Apps = start_applications(?DEFAULT_APPS ++ ExtraApps),
     #test_context{started = Apps}.
 
 stop_couch() ->
-    ok = stop_applications([inets, ibrowse, ssl, config, goldrush, lager, couch]),
-    ok.
+    ok = stop_applications(?DEFAULT_APPS).
 
 stop_couch(#test_context{started = Apps}) ->
     stop_applications(Apps);
@@ -166,19 +164,19 @@
 
 with_process_restart(Name) ->
     {Pid, true} = with_process_restart(
-        fun() -> exit(whereis(Name), shutdown) end, Name),
+        Name, fun() -> exit(whereis(Name), shutdown) end),
     Pid.
 
 with_process_restart(Name, Fun) ->
     with_process_restart(Name, Fun, 5000).
 
 with_process_restart(Name, Fun, Timeout) ->
-    ok = stop_sync(Name, Fun),
+    Res = stop_sync(Name, Fun),
     case wait_process(Name, Timeout) of
     timeout ->
         timeout;
     Pid ->
-        Pid
+        {Pid, Res}
     end.
 
 
diff --git a/test/couch_changes_tests.erl b/test/couch_changes_tests.erl
index c28394b..7e38a02 100644
--- a/test/couch_changes_tests.erl
+++ b/test/couch_changes_tests.erl
@@ -18,8 +18,6 @@
 -define(TIMEOUT, 3000).
 -define(TEST_TIMEOUT, 10000).
 
--ifdef(run_broken_tests).
-
 -record(row, {
     id,
     seq,
@@ -57,15 +55,17 @@
 
 changes_test_() ->
     {
-        "Changes feeed",
+        "Changes feed",
         {
             setup,
             fun test_util:start_couch/0, fun test_util:stop_couch/1,
             [
+                filter_by_selector(),
                 filter_by_doc_id(),
                 filter_by_design(),
                 continuous_feed(),
-                filter_by_custom_function()
+                %%filter_by_custom_function()
+                filter_by_view()
             ]
         }
     }.
@@ -86,6 +86,24 @@
         }
     }.
 
+filter_by_selector() ->
+    {
+        "Filter _selector",
+        {
+            foreach,
+            fun setup/0, fun teardown/1,
+            [
+                fun should_select_basic/1,
+                fun should_select_with_since/1,
+                fun should_select_when_no_result/1,
+                fun should_select_with_deleted_docs/1,
+                fun should_select_with_continuous/1,
+                fun should_stop_selector_when_db_deleted/1
+            ]
+        }
+    }.
+
+
 filter_by_design() ->
     {
         "Filter _design",
@@ -110,6 +128,19 @@
         }
     }.
 
+filter_by_view() ->
+    {
+        "Filter _view",
+        {
+            foreach,
+            fun setup/0, fun teardown/1,
+            [
+                fun should_filter_by_view/1,
+                fun should_filter_by_fast_view/1
+            ]
+        }
+    }.
+
 continuous_feed() ->
     {
         "Continuous Feed",
@@ -117,7 +148,8 @@
             foreach,
             fun setup/0, fun teardown/1,
             [
-                fun should_filter_continuous_feed_by_specific_doc_ids/1
+                fun should_filter_continuous_feed_by_specific_doc_ids/1,
+                fun should_end_changes_when_db_deleted/1
             ]
         }
     }.
@@ -315,6 +347,159 @@
             ?assertMatch([#row{seq = 18, id = <<"doc3">>}], FinalRows)
         end).
 
+
+should_end_changes_when_db_deleted({DbName, _Revs}) ->
+    ?_test(begin
+        {ok, _Db} = couch_db:open_int(DbName, []),
+        ChangesArgs = #changes_args{
+            filter = "_doc_ids",
+            feed = "continuous"
+        },
+        DocIds = [<<"doc3">>, <<"doc4">>, <<"doc9999">>],
+        Req = {json_req, {[{<<"doc_ids">>, DocIds}]}},
+        Consumer = spawn_consumer(DbName, ChangesArgs, Req),
+        ok = pause(Consumer),
+        ok = couch_server:delete(DbName, [?ADMIN_CTX]),
+        ok = unpause(Consumer),
+        {_Rows, _LastSeq} = wait_finished(Consumer),
+        stop_consumer(Consumer),
+        ok
+    end).
+
+
+should_select_basic({DbName, _}) ->
+    ?_test(
+        begin
+            ChArgs = #changes_args{filter = "_selector"},
+            Selector = {[{<<"_id">>, <<"doc3">>}]},
+            Req = {json_req, {[{<<"selector">>, Selector}]}},
+            Consumer = spawn_consumer(DbName, ChArgs, Req),
+            {Rows, LastSeq} = wait_finished(Consumer),
+            {ok, Db} = couch_db:open_int(DbName, []),
+            UpSeq = couch_db:get_update_seq(Db),
+            couch_db:close(Db),
+            stop_consumer(Consumer),
+            ?assertEqual(1, length(Rows)),
+            [#row{seq = Seq, id = Id}] = Rows,
+            ?assertEqual(<<"doc3">>, Id),
+            ?assertEqual(6, Seq),
+            ?assertEqual(UpSeq, LastSeq)
+        end).
+
+should_select_with_since({DbName, _}) ->
+    ?_test(
+        begin
+            ChArgs = #changes_args{filter = "_selector", since = 9},
+            GteDoc2 = {[{<<"$gte">>, <<"doc1">>}]},
+            Selector = {[{<<"_id">>, GteDoc2}]},
+            Req = {json_req, {[{<<"selector">>, Selector}]}},
+            Consumer = spawn_consumer(DbName, ChArgs, Req),
+            {Rows, LastSeq} = wait_finished(Consumer),
+            {ok, Db} = couch_db:open_int(DbName, []),
+            UpSeq = couch_db:get_update_seq(Db),
+            couch_db:close(Db),
+            stop_consumer(Consumer),
+            ?assertEqual(1, length(Rows)),
+            [#row{seq = Seq, id = Id}] = Rows,
+            ?assertEqual(<<"doc8">>, Id),
+            ?assertEqual(10, Seq),
+            ?assertEqual(UpSeq, LastSeq)
+        end).
+
+should_select_when_no_result({DbName, _}) ->
+    ?_test(
+        begin
+            ChArgs = #changes_args{filter = "_selector"},
+            Selector = {[{<<"_id">>, <<"nopers">>}]},
+            Req = {json_req, {[{<<"selector">>, Selector}]}},
+            Consumer = spawn_consumer(DbName, ChArgs, Req),
+            {Rows, LastSeq} = wait_finished(Consumer),
+            {ok, Db} = couch_db:open_int(DbName, []),
+            UpSeq = couch_db:get_update_seq(Db),
+            couch_db:close(Db),
+            stop_consumer(Consumer),
+            ?assertEqual(0, length(Rows)),
+            ?assertEqual(UpSeq, LastSeq)
+        end).
+
+should_select_with_deleted_docs({DbName, Revs}) ->
+    ?_test(
+        begin
+            Rev3_2 = element(6, Revs),
+            {ok, Db} = couch_db:open_int(DbName, []),
+            {ok, _} = save_doc(
+                Db,
+                {[{<<"_id">>, <<"doc3">>},
+                  {<<"_deleted">>, true},
+                  {<<"_rev">>, Rev3_2}]}),
+            ChArgs = #changes_args{filter = "_selector"},
+            Selector = {[{<<"_id">>, <<"doc3">>}]},
+            Req = {json_req, {[{<<"selector">>, Selector}]}},
+            Consumer = spawn_consumer(DbName, ChArgs, Req),
+            {Rows, LastSeq} = wait_finished(Consumer),
+            couch_db:close(Db),
+            stop_consumer(Consumer),
+            ?assertMatch(
+                [#row{seq = LastSeq, id = <<"doc3">>, deleted = true}],
+                Rows
+            ),
+            ?assertEqual(11, LastSeq)
+        end).
+
+should_select_with_continuous({DbName, Revs}) ->
+    ?_test(
+        begin
+            {ok, Db} = couch_db:open_int(DbName, []),
+            ChArgs = #changes_args{filter = "_selector", feed = "continuous"},
+            GteDoc8 = {[{<<"$gte">>, <<"doc8">>}]},
+            Selector = {[{<<"_id">>, GteDoc8}]},
+            Req = {json_req, {[{<<"selector">>, Selector}]}},
+            Consumer = spawn_consumer(DbName, ChArgs, Req),
+            ok = pause(Consumer),
+            Rows = get_rows(Consumer),
+            ?assertMatch(
+               [#row{seq = 10, id = <<"doc8">>, deleted = false}],
+               Rows
+            ),
+            clear_rows(Consumer),
+            {ok, _} = save_doc(Db, {[{<<"_id">>, <<"doc01">>}]}),
+            ok = unpause(Consumer),
+            timer:sleep(100),
+            ok = pause(Consumer),
+            ?assertEqual([], get_rows(Consumer)),
+            Rev4 = element(4, Revs),
+            Rev8 = element(10, Revs),
+            {ok, _} = save_doc(Db, {[{<<"_id">>, <<"doc8">>},
+                                     {<<"_rev">>, Rev8}]}),
+            {ok, _} = save_doc(Db, {[{<<"_id">>, <<"doc4">>},
+                                     {<<"_rev">>, Rev4}]}),
+            ok = unpause(Consumer),
+            timer:sleep(100),
+            ok = pause(Consumer),
+            NewRows = get_rows(Consumer),
+            ?assertMatch(
+               [#row{seq = _, id = <<"doc8">>, deleted = false}],
+               NewRows
+            )
+        end).
+
+should_stop_selector_when_db_deleted({DbName, _Revs}) ->
+    ?_test(
+       begin
+           {ok, _Db} = couch_db:open_int(DbName, []),
+           ChArgs = #changes_args{filter = "_selector", feed = "continuous"},
+           Selector = {[{<<"_id">>, <<"doc3">>}]},
+           Req = {json_req, {[{<<"selector">>, Selector}]}},
+           Consumer = spawn_consumer(DbName, ChArgs, Req),
+           ok = pause(Consumer),
+           ok = couch_server:delete(DbName, [?ADMIN_CTX]),
+           ok = unpause(Consumer),
+           {_Rows, _LastSeq} = wait_finished(Consumer),
+           stop_consumer(Consumer),
+           ok
+       end).
+
+
 should_emit_only_design_documents({DbName, Revs}) ->
     ?_test(
         begin
@@ -420,13 +605,94 @@
              ?assert(Heartbeats3 > Heartbeats2)
         end)}.
 
+should_filter_by_view({DbName, _}) ->
+    ?_test(
+        begin
+            {ok, Db0} = couch_db:open_int(DbName, [?ADMIN_CTX]),
+            DDocId = <<"_design/app">>,
+            DDoc = couch_doc:from_json_obj({[
+                {<<"_id">>, DDocId},
+                {<<"language">>, <<"javascript">>},
+                {<<"views">>, {[
+                    {<<"valid">>, {[
+                        {<<"map">>, <<"function(doc) {"
+                        " if (doc._id == 'doc3') {"
+                            " emit(doc); "
+                        "} }">>}
+                    ]}}
+                ]}}
+            ]}),
+            {ok, _} = couch_db:update_doc(Db0, DDoc, []),
+            couch_db:close(Db0),
+            %%
+            ChangesArgs = #changes_args{filter = "_view"},
+            Opts = {json_req, {[{
+                <<"query">>, {[
+                    {<<"view">>, <<"app/valid">>}
+                ]}
+            }]}},
+            Consumer = spawn_consumer(DbName, ChangesArgs, Opts),
+            {Rows, LastSeq} = wait_finished(Consumer),
+            {ok, Db} = couch_db:open_int(DbName, []),
+            UpSeq = couch_db:get_update_seq(Db),
+            couch_db:close(Db),
+            stop_consumer(Consumer),
+            ?assertEqual(1, length(Rows)),
+            [#row{seq = Seq, id = Id}] = Rows,
+            ?assertEqual(<<"doc3">>, Id),
+            ?assertEqual(6, Seq),
+            ?assertEqual(UpSeq, LastSeq)
+        end).
+
+should_filter_by_fast_view({DbName, _}) ->
+    ?_test(
+        begin
+            {ok, Db0} = couch_db:open_int(DbName, [?ADMIN_CTX]),
+            DDocId = <<"_design/app">>,
+            DDoc = couch_doc:from_json_obj({[
+                {<<"_id">>, DDocId},
+                {<<"language">>, <<"javascript">>},
+                {<<"options">>, {[{<<"seq_indexed">>, true}]}},
+                {<<"views">>, {[
+                    {<<"valid">>, {[
+                        {<<"map">>, <<"function(doc) {"
+                        " if (doc._id == 'doc3') {"
+                            " emit(doc); "
+                        "} }">>}
+                    ]}}
+                ]}}
+            ]}),
+            {ok, _} = couch_db:update_doc(Db0, DDoc, []),
+            couch_db:close(Db0),
+            %%
+            ChangesArgs = #changes_args{filter = "_view"},
+            Opts = {json_req, {[{
+                <<"query">>, {[
+                    {<<"view">>, <<"app/valid">>}
+                ]}
+            }]}},
+            Consumer = spawn_consumer(DbName, ChangesArgs, Opts),
+            {Rows, LastSeq} = wait_finished(Consumer),
+            {ok, Db} = couch_db:open_int(DbName, []),
+            DbUpSeq = couch_db:get_update_seq(Db),
+            {ok, ViewInfo} = couch_mrview:get_view_info(Db, DDoc, <<"valid">>),
+            {update_seq, ViewUpSeq} = lists:keyfind(update_seq, 1, ViewInfo),
+            couch_db:close(Db),
+            stop_consumer(Consumer),
+            ?assertEqual(1, length(Rows)),
+            [#row{seq = Seq, id = Id}] = Rows,
+            ?assertEqual(<<"doc3">>, Id),
+            ?assertEqual(6, Seq),
+            ?assertEqual(LastSeq, Seq),
+            ?assertEqual(DbUpSeq, ViewUpSeq)
+        end).
 
 save_doc(Db, Json) ->
     Doc = couch_doc:from_json_obj(Json),
     {ok, Rev} = couch_db:update_doc(Db, Doc, []),
     {ok, couch_doc:rev_to_str(Rev)}.
 
-get_rows(Consumer) ->
+get_rows({Consumer, _}) ->
     Ref = make_ref(),
     Consumer ! {get_rows, Ref},
     Resp = receive
@@ -438,7 +704,7 @@
     ?assertNotEqual(timeout, Resp),
     Resp.
 
-get_heartbeats(Consumer) ->
+get_heartbeats({Consumer, _}) ->
     Ref = make_ref(),
     Consumer ! {get_heartbeats, Ref},
     Resp = receive
@@ -450,7 +716,7 @@
     ?assertNotEqual(timeout, Resp),
     Resp.
 
-clear_rows(Consumer) ->
+clear_rows({Consumer, _}) ->
     Ref = make_ref(),
     Consumer ! {reset, Ref},
     Resp = receive
@@ -462,7 +728,7 @@
     ?assertNotEqual(timeout, Resp),
     Resp.
 
-stop_consumer(Consumer) ->
+stop_consumer({Consumer, _}) ->
     Ref = make_ref(),
     Consumer ! {stop, Ref},
     Resp = receive
@@ -474,7 +740,7 @@
     ?assertNotEqual(timeout, Resp),
     Resp.
 
-pause(Consumer) ->
+pause({Consumer, _}) ->
     Ref = make_ref(),
     Consumer ! {pause, Ref},
     Resp = receive
@@ -486,7 +752,7 @@
     ?assertNotEqual(timeout, Resp),
     Resp.
 
-unpause(Consumer) ->
+unpause({Consumer, _}) ->
     Ref = make_ref(),
     Consumer ! {continue, Ref},
     Resp = receive
@@ -498,19 +764,29 @@
     ?assertNotEqual(timeout, Resp),
     Resp.
 
-wait_finished(_Consumer) ->
-    Resp = receive
+wait_finished({_, ConsumerRef}) ->
+    receive
         {consumer_finished, Rows, LastSeq} ->
-            {Rows, LastSeq}
+            {Rows, LastSeq};
+        {'DOWN', ConsumerRef, _, _, Msg} when Msg == normal; Msg == ok ->
+            ok;
+        {'DOWN', ConsumerRef, _, _, Msg} ->
+            erlang:error({consumer_died, [
+                {module, ?MODULE},
+                {line, ?LINE},
+                {value, Msg}
+            ]})
     after ?TIMEOUT ->
-        timeout
-    end,
-    ?assertNotEqual(timeout, Resp),
-    Resp.
+        erlang:error({consumer_died, [
+            {module, ?MODULE},
+            {line, ?LINE},
+            {value, timeout}
+        ]})
+    end.
 
 spawn_consumer(DbName, ChangesArgs0, Req) ->
     Parent = self(),
-    spawn(fun() ->
+    spawn_monitor(fun() ->
         put(heartbeat_count, 0),
         Callback = fun
             ({change, {Change}, _}, _, Acc) ->
@@ -538,10 +814,12 @@
         FeedFun = couch_changes:handle_db_changes(ChangesArgs, Req, Db),
         try
             FeedFun({Callback, []})
-        catch throw:{stop, _} ->
-            ok
-        end,
-        catch couch_db:close(Db)
+        catch
+            throw:{stop, _} -> ok;
+            _:Error -> Error
+        after
+            couch_db:close(Db)
+        end
     end).
 
 maybe_pause(Parent, Acc) ->
@@ -601,6 +879,4 @@
     couch_db:create(DbName, [?ADMIN_CTX, overwrite]).
 
 delete_db(DbName) ->
-    ok = couch_server:delete(DbName, [?ADMIN_CTX]).
-
--endif.
+    couch_server:delete(DbName, [?ADMIN_CTX]).
diff --git a/test/couch_compress_tests.erl b/test/couch_compress_tests.erl
new file mode 100644
index 0000000..6d6e6a7
--- /dev/null
+++ b/test/couch_compress_tests.erl
@@ -0,0 +1,74 @@
+% Licensed under the Apache License, Version 2.0 (the "License"); you may not
+% use this file except in compliance with the License. You may obtain a copy of
+% the License at
+%
+%   http://www.apache.org/licenses/LICENSE-2.0
+%
+% Unless required by applicable law or agreed to in writing, software
+% distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+% WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+% License for the specific language governing permissions and limitations under
+% the License.
+
+-module(couch_compress_tests).
+
+-include_lib("couch/include/couch_eunit.hrl").
+-include_lib("couch/include/couch_db.hrl").
+
+-define(TERM, {[{a, 1}, {b, 2}, {c, 3}, {d, 4}, {e, 5}]}).
+-define(NONE, <<131,104,1,108,0,0,0,5,104,2,100,0,1,97,97,1,
+    104,2,100,0,1,98,97,2,104,2,100,0,1,99,97,3,104,2,100,0,
+    1,100,97,4,104,2,100,0,1,101,97,5,106>>).
+-define(DEFLATE, <<131,80,0,0,0,48,120,218,203,96,204,97,96,
+    96,96,205,96,74,97,96,76,76,100,4,211,73,137,76,96,58,57,
+    145,25,76,167,36,178,128,233,212,68,214,44,0,212,169,9,51>>).
+-define(SNAPPY, <<1,49,64,131,104,1,108,0,0,0,5,104,2,100,0,
+    1,97,97,1,104,1,8,8,98,97,2,5,8,8,99,97,3,5,8,44,100,97,
+    4,104,2,100,0,1,101,97,5,106>>).
+-define(CORRUPT, <<2,12,85,06>>).
+
+
+compress_test_() ->
+    [
+        ?_assertEqual(?NONE, couch_compress:compress(?TERM, none)),
+        ?_assertEqual(?DEFLATE, couch_compress:compress(?TERM, {deflate, 9})),
+        ?_assertEqual(?SNAPPY, couch_compress:compress(?TERM, snappy))
+    ].
+
+decompress_test_() ->
+    [
+        ?_assertEqual(?TERM, couch_compress:decompress(?NONE)),
+        ?_assertEqual(?TERM, couch_compress:decompress(?DEFLATE)),
+        ?_assertEqual(?TERM, couch_compress:decompress(?SNAPPY)),
+        ?_assertError(invalid_compression, couch_compress:decompress(?CORRUPT))
+    ].
+
+recompress_test_() ->
+    [
+        ?_assertEqual(?DEFLATE, couch_compress:compress(?NONE, {deflate, 9})),
+        ?_assertEqual(?SNAPPY, couch_compress:compress(?NONE, snappy)),
+        ?_assertEqual(?NONE, couch_compress:compress(?DEFLATE, none)),
+        ?_assertEqual(?SNAPPY, couch_compress:compress(?DEFLATE, snappy)),
+        ?_assertEqual(?NONE, couch_compress:compress(?SNAPPY, none)),
+        ?_assertEqual(?DEFLATE, couch_compress:compress(?SNAPPY, {deflate, 9}))
+    ].
+
+is_compressed_test_() ->
+    [
+        ?_assert(couch_compress:is_compressed(?NONE, none)),
+        ?_assert(couch_compress:is_compressed(?DEFLATE, {deflate, 9})),
+        ?_assert(couch_compress:is_compressed(?SNAPPY, snappy)),
+        ?_assertNot(couch_compress:is_compressed(?NONE, {deflate, 0})),
+        ?_assertNot(couch_compress:is_compressed(?NONE, {deflate, 9})),
+        ?_assertNot(couch_compress:is_compressed(?NONE, snappy)),
+        ?_assertNot(couch_compress:is_compressed(?DEFLATE, none)),
+        ?_assertNot(couch_compress:is_compressed(?DEFLATE, snappy)),
+        ?_assertNot(couch_compress:is_compressed(?SNAPPY, none)),
+        ?_assertNot(couch_compress:is_compressed(?SNAPPY, {deflate, 9})),
+        ?_assertError(invalid_compression,
+            couch_compress:is_compressed(?CORRUPT, none)),
+        ?_assertError(invalid_compression,
+            couch_compress:is_compressed(?CORRUPT, {deflate, 9})),
+        ?_assertError(invalid_compression,
+            couch_compress:is_compressed(?CORRUPT, snappy))
+    ].
diff --git a/test/couch_db_plugin_tests.erl b/test/couch_db_plugin_tests.erl
index 77d9529..337207e 100644
--- a/test/couch_db_plugin_tests.erl
+++ b/test/couch_db_plugin_tests.erl
@@ -34,8 +34,6 @@
 -include_lib("couch/include/couch_eunit.hrl").
 -include_lib("couch/include/couch_db.hrl").
 
--record(ctx, {pid, handle}).
-
 %% couch_epi_plugin behaviour
 
 app() -> test_app.
@@ -46,28 +44,13 @@
 processes() -> [].
 notify(_, _, _) -> ok.
 
-start_epi() ->
-    application:load(couch_epi),
-    Plugins = application:get_env(couch_epi, plugins, []),
-    ok = application:set_env(couch_epi, plugins, append_if_missing(Plugins, ?MODULE)),
-    ok = application:start(couch_epi).
-
-append_if_missing(List, Value) ->
-    case lists:member(Value, List) of
-        true -> List;
-        false -> [Value | List]
-    end.
-
 setup() ->
-    error_logger:tty(false),
-    start_epi(),
-    #ctx{handle = couch_epi:get_handle(couch_db)}.
+    couch_tests:setup([
+        couch_epi_dispatch:dispatch(chttpd, ?MODULE)
+    ]).
 
-teardown(#ctx{}) ->
-    Plugins = application:get_env(couch_epi, plugins, []),
-    application:set_env(couch_epi, plugins, Plugins -- [?MODULE]),
-    application:stop(couch_epi),
-    ok.
+teardown(Ctx) ->
+    couch_tests:teardown(Ctx).
 
 validate_dbname({true, _Db}, _) -> true;
 validate_dbname({false, _Db}, _) -> false;
@@ -97,7 +80,7 @@
     {
         "callback tests",
         {
-            foreach, fun setup/0, fun teardown/1,
+            setup, fun setup/0, fun teardown/1,
             [
                 fun validate_dbname_match/0,
                 fun validate_dbname_no_match/0,
@@ -128,77 +111,65 @@
 
 
 validate_dbname_match() ->
-    ?_assertMatch(
-        {true, [validate_dbname, db]},
-        couch_db_plugin:validate_dbname({true, [db]}, db)).
+    ?assert(couch_db_plugin:validate_dbname({true, [db]}, db)).
 
 validate_dbname_no_match() ->
-    ?_assertMatch(
-        {false, [db]},
-        couch_db_plugin:validate_dbname({false, [db]}, db)).
+    ?assertNot(couch_db_plugin:validate_dbname({false, [db]}, db)).
 
 validate_dbname_throw() ->
-    ?_assertThrow(
+    ?assertThrow(
         validate_dbname,
         couch_db_plugin:validate_dbname({fail, [db]}, db)).
 
 before_doc_update_match() ->
-    ?_assertMatch(
+    ?assertMatch(
         {true, [before_doc_update, doc]},
         couch_db_plugin:before_doc_update(#db{}, {true, [doc]})).
 
 before_doc_update_no_match() ->
-    ?_assertMatch(
+    ?assertMatch(
         {false, [doc]},
         couch_db_plugin:before_doc_update(#db{}, {false, [doc]})).
 
 before_doc_update_throw() ->
-    ?_assertThrow(
+    ?assertThrow(
         before_doc_update,
         couch_db_plugin:before_doc_update(#db{}, {fail, [doc]})).
 
 
 after_doc_read_match() ->
-    ?_assertMatch(
+    ?assertMatch(
         {true, [after_doc_read, doc]},
         couch_db_plugin:after_doc_read(#db{}, {true, [doc]})).
 
 after_doc_read_no_match() ->
-    ?_assertMatch(
+    ?assertMatch(
         {false, [doc]},
         couch_db_plugin:after_doc_read(#db{}, {false, [doc]})).
 
 after_doc_read_throw() ->
-    ?_assertThrow(
+    ?assertThrow(
         after_doc_read,
         couch_db_plugin:after_doc_read(#db{}, {fail, [doc]})).
 
 
 validate_docid_match() ->
-    ?_assertMatch(
-        {true, [validate_docid, doc]},
-        couch_db_plugin:validate_docid({true, [doc]})).
+    ?assert(couch_db_plugin:validate_docid({true, [doc]})).
 
 validate_docid_no_match() ->
-    ?_assertMatch(
-        {false, [doc]},
-        couch_db_plugin:validate_docid({false, [doc]})).
+    ?assertNot(couch_db_plugin:validate_docid({false, [doc]})).
 
 validate_docid_throw() ->
-    ?_assertThrow(
+    ?assertThrow(
         validate_docid,
         couch_db_plugin:validate_docid({fail, [doc]})).
 
 
 check_is_admin_match() ->
-    ?_assertMatch(
-        true,
-        couch_db_plugin:check_is_admin({true, [db]})).
+    ?assert(couch_db_plugin:check_is_admin({true, [db]})).
 
 check_is_admin_no_match() ->
-    ?assertMatch(
-        false,
-        couch_db_plugin:check_is_admin({false, [db]})).
+    ?assertNot(couch_db_plugin:check_is_admin({false, [db]})).
 
 check_is_admin_throw() ->
     ?assertThrow(
@@ -206,14 +177,14 @@
         couch_db_plugin:check_is_admin({fail, [db]})).
 
 on_delete_match() ->
-    ?_assertMatch(
-        true,
-        couch_db_plugin:on_delete(true, [])).
+    ?assertMatch(
+       [true],
+       couch_db_plugin:on_delete(true, [])).
 
 on_delete_no_match() ->
-    ?_assertMatch(
-        false,
-        couch_db_plugin:on_delete(false, [])).
+    ?assertMatch(
+       [false],
+       couch_db_plugin:on_delete(false, [])).
 
 on_delete_throw() ->
     ?assertThrow(
diff --git a/test/couch_db_tests.erl b/test/couch_db_tests.erl
index f614102..c57a0d4 100644
--- a/test/couch_db_tests.erl
+++ b/test/couch_db_tests.erl
@@ -39,6 +39,18 @@
         }
     }.
 
+open_db_test_()->
+    {
+        "Database open tests",
+        {
+            setup,
+            fun setup/0, fun test_util:stop_couch/1,
+            fun(_) ->
+                [should_create_db_if_missing()]
+            end
+        }
+    }.
+
 
 should_create_db() ->
     DbName = ?tempdb(),
@@ -98,6 +110,13 @@
                            ?_assert(loop(DbName, N))}}
      || N <- [10, 100]].
 
+should_create_db_if_missing() ->
+    DbName = ?tempdb(),
+    {ok, Db} = couch_db:open(DbName, [{create_if_missing, true}]),
+    ok = couch_db:close(Db),
+    {ok, AllDbs} = couch_server:all_databases(),
+    ?_assert(lists:member(DbName, AllDbs)).
+
 loop(_, 0) ->
     true;
 loop(DbName, N) ->
diff --git a/test/couch_etag_tests.erl b/test/couch_etag_tests.erl
new file mode 100644
index 0000000..9d15e48
--- /dev/null
+++ b/test/couch_etag_tests.erl
@@ -0,0 +1,30 @@
+% Licensed under the Apache License, Version 2.0 (the "License"); you may not
+% use this file except in compliance with the License. You may obtain a copy of
+% the License at
+%
+%   http://www.apache.org/licenses/LICENSE-2.0
+%
+% Unless required by applicable law or agreed to in writing, software
+% distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+% WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+% License for the specific language governing permissions and limitations under
+% the License.
+
+-module(couch_etag_tests).
+
+-include_lib("eunit/include/eunit.hrl").
+
+local_with_empty_body_test() ->
+    Etag = couch_httpd:doc_etag(<<"_local/local-and-empty">>, {[]}, {0, <<"1">>}),
+    ?assertEqual(Etag, <<"\"5ZVXQYO7VLEOU0TL9VXDNP5PV\"">>).
+
+
+local_with_body_test() ->
+    DocBody = {[{<<"hello">>,<<"world">>},{<<"relax">>,true}]},
+    Etag = couch_httpd:doc_etag(<<"_local/local-with-body">>, DocBody, {0, <<"1">>}),
+    ?assertEqual(Etag, <<"\"CEFXP6WH8OKYIWO1GLGBHKCCA\"">>).
+
+normal_doc_uses_rev_test() ->
+    DocBody = {[{<<"hello">>,<<"world">>},{<<"relax">>,true}]},
+    Etag = couch_httpd:doc_etag(<<"nomal-doc">>, DocBody, {1, <<"efda11e34e88ebe31a2f83e84a0435b6">>}),
+    ?assertEqual(Etag, <<"\"1-efda11e34e88ebe31a2f83e84a0435b6\"">>).
diff --git a/test/couch_file_tests.erl b/test/couch_file_tests.erl
index 4d0bbac..497999e 100644
--- a/test/couch_file_tests.erl
+++ b/test/couch_file_tests.erl
@@ -24,7 +24,10 @@
     Fd.
 
 teardown(Fd) ->
-    ok = couch_file:close(Fd).
+    case is_process_alive(Fd) of
+        true -> ok = couch_file:close(Fd);
+        false -> ok
+    end.
 
 open_close_test_() ->
     {
@@ -126,8 +129,18 @@
 should_fsync(Fd) ->
     {"How does on test fsync?", ?_assertMatch(ok, couch_file:sync(Fd))}.
 
-should_not_read_beyond_eof(_) ->
-    {"No idea how to test reading beyond EOF", ?_assert(true)}.
+should_not_read_beyond_eof(Fd) ->
+    BigBin = list_to_binary(lists:duplicate(100000, 0)),
+    DoubleBin = round(byte_size(BigBin) * 2),
+    {ok, Pos, _Size} = couch_file:append_binary(Fd, BigBin),
+    {_, Filepath} = couch_file:process_info(Fd),
+    %% corrupt db file
+    {ok, Io} = file:open(Filepath, [read, write, binary]),
+    ok = file:pwrite(Io, Pos, <<0:1/integer, DoubleBin:31/integer>>),
+    file:close(Io),
+    unlink(Fd),
+    ExpectedError = {badmatch, {'EXIT', {bad_return_value, read_beyond_eof}}},
+    ?_assertError(ExpectedError, couch_file:pread_binary(Fd, Pos)).
 
 should_truncate(Fd) ->
     {ok, 0, _} = couch_file:append_term(Fd, foo),
@@ -137,6 +150,38 @@
     ok = couch_file:truncate(Fd, Size),
     ?_assertMatch({ok, foo}, couch_file:pread_term(Fd, 0)).
 
+pread_limit_test_() ->
+    {
+        "Read limit tests",
+        {
+            setup,
+            fun() ->
+                Ctx = test_util:start(?MODULE),
+                config:set("couchdb", "max_pread_size", "50000"),
+                Ctx
+            end,
+            fun(Ctx) ->
+                config:delete("couchdb", "max_pread_size"),
+                test_util:stop(Ctx)
+            end,
+            ?foreach([
+                fun should_increase_file_size_on_write/1,
+                fun should_return_current_file_size_on_write/1,
+                fun should_write_and_read_term/1,
+                fun should_write_and_read_binary/1,
+                fun should_not_read_more_than_pread_limit/1
+            ])
+        }
+    }.
+
+should_not_read_more_than_pread_limit(Fd) ->
+    BigBin = list_to_binary(lists:duplicate(100000, 0)),
+    {ok, Pos, _Size} = couch_file:append_binary(Fd, BigBin),
+    unlink(Fd),
+    ExpectedError = {badmatch, {'EXIT', {bad_return_value,
+        {exceed_pread_limit, 50000}}}},
+    ?_assertError(ExpectedError, couch_file:pread_binary(Fd, Pos)).
+
 
 header_test_() ->
     {
@@ -274,3 +319,180 @@
     Term = lists:nth(random:uniform(4) + 1, Choices),
     {ok, _, _} = couch_file:append_term(Fd, Term),
     write_random_data(Fd, N - 1).
+
+
+delete_test_() ->
+    {
+        "File delete tests",
+        {
+            foreach,
+            fun() ->
+                meck:new(config, [passthrough]),
+                File = ?tempfile() ++ ".couch",
+                RootDir = filename:dirname(File),
+                ok = couch_file:init_delete_dir(RootDir),
+                ok = file:write_file(File, <<>>),
+                {RootDir, File}
+            end,
+            fun({_, File}) ->
+                meck:unload(config),
+                file:delete(File)
+            end,
+            [
+                fun(Cfg) ->
+                    {"enable_database_recovery = false, context = delete",
+                    make_enable_recovery_test_case(Cfg, false, delete)}
+                end,
+                fun(Cfg) ->
+                    {"enable_database_recovery = true, context = delete",
+                    make_enable_recovery_test_case(Cfg, true, delete)}
+                end,
+                fun(Cfg) ->
+                    {"enable_database_recovery = false, context = compaction",
+                    make_enable_recovery_test_case(Cfg, false, compaction)}
+                end,
+                fun(Cfg) ->
+                    {"enable_database_recovery = true, context = compaction",
+                    make_enable_recovery_test_case(Cfg, true, compaction)}
+                end,
+                fun(Cfg) ->
+                    {"delete_after_rename = true",
+                    make_delete_after_rename_test_case(Cfg, true)}
+                end,
+                fun(Cfg) ->
+                    {"delete_after_rename = false",
+                    make_delete_after_rename_test_case(Cfg, false)}
+                end
+            ]
+        }
+    }.
+
+
+make_enable_recovery_test_case({RootDir, File}, EnableRecovery, Context) ->
+    meck:expect(config, get_boolean, fun
+        ("couchdb", "enable_database_recovery", _) -> EnableRecovery;
+        ("couchdb", "delete_after_rename", _) -> false
+    end),
+    FileExistsBefore = filelib:is_regular(File),
+    couch_file:delete(RootDir, File, [{context, Context}]),
+    FileExistsAfter = filelib:is_regular(File),
+    RenamedFiles = filelib:wildcard(filename:rootname(File) ++ "*.deleted.*"),
+    DeletedFiles = filelib:wildcard(RootDir ++ "/.delete/*"),
+    {ExpectRenamedCount, ExpectDeletedCount} = if
+        EnableRecovery andalso Context =:= delete -> {1, 0};
+        true -> {0, 1}
+    end,
+    [
+        ?_assert(FileExistsBefore),
+        ?_assertNot(FileExistsAfter),
+        ?_assertEqual(ExpectRenamedCount, length(RenamedFiles)),
+        ?_assertEqual(ExpectDeletedCount, length(DeletedFiles))
+    ].
+
+make_delete_after_rename_test_case({RootDir, File}, DeleteAfterRename) ->
+    meck:expect(config, get_boolean, fun
+        ("couchdb", "enable_database_recovery", _) -> false;
+        ("couchdb", "delete_after_rename", _) -> DeleteAfterRename
+    end),
+    FileExistsBefore = filelib:is_regular(File),
+    couch_file:delete(RootDir, File),
+    FileExistsAfter = filelib:is_regular(File),
+    RenamedFiles = filelib:wildcard(filename:join([RootDir, ".delete", "*"])),
+    ExpectRenamedCount = if DeleteAfterRename -> 0; true -> 1 end,
+    [
+        ?_assert(FileExistsBefore),
+        ?_assertNot(FileExistsAfter),
+        ?_assertEqual(ExpectRenamedCount, length(RenamedFiles))
+    ].
+
+
+nuke_dir_test_() ->
+    {
+        "Nuke directory tests",
+        {
+            foreach,
+            fun() ->
+                meck:new(config, [passthrough]),
+                File0 = ?tempfile() ++ ".couch",
+                RootDir = filename:dirname(File0),
+                BaseName = filename:basename(File0),
+                Seed = crypto:rand_uniform(1000000000, 9999999999),
+                DDocDir = io_lib:format("db.~b_design", [Seed]),
+                ViewDir = filename:join([RootDir, DDocDir]),
+                file:make_dir(ViewDir),
+                File = filename:join([ViewDir, BaseName]),
+                file:rename(File0, File),
+                ok = couch_file:init_delete_dir(RootDir),
+                ok = file:write_file(File, <<>>),
+                {RootDir, ViewDir}
+            end,
+            fun({RootDir, ViewDir}) ->
+                meck:unload(config),
+                remove_dir(ViewDir),
+                Ext = filename:extension(ViewDir),
+                case filelib:wildcard(RootDir ++ "/*.deleted" ++ Ext) of
+                    [DelDir] -> remove_dir(DelDir);
+                    _ -> ok
+                end
+            end,
+            [
+                fun(Cfg) ->
+                    {"enable_database_recovery = false",
+                    make_rename_dir_test_case(Cfg, false)}
+                end,
+                fun(Cfg) ->
+                    {"enable_database_recovery = true",
+                    make_rename_dir_test_case(Cfg, true)}
+                end,
+                fun(Cfg) ->
+                    {"delete_after_rename = true",
+                    make_delete_dir_test_case(Cfg, true)}
+                end,
+                fun(Cfg) ->
+                    {"delete_after_rename = false",
+                    make_delete_dir_test_case(Cfg, false)}
+                end
+            ]
+        }
+    }.
+
+
+make_rename_dir_test_case({RootDir, ViewDir}, EnableRecovery) ->
+    meck:expect(config, get_boolean, fun
+        ("couchdb", "enable_database_recovery", _) -> EnableRecovery;
+        ("couchdb", "delete_after_rename", _) -> true
+    end),
+    DirExistsBefore = filelib:is_dir(ViewDir),
+    couch_file:nuke_dir(RootDir, ViewDir),
+    DirExistsAfter = filelib:is_dir(ViewDir),
+    Ext = filename:extension(ViewDir),
+    RenamedDirs = filelib:wildcard(RootDir ++ "/*.deleted" ++ Ext),
+    ExpectRenamedCount = if EnableRecovery -> 1; true -> 0 end,
+    [
+        ?_assert(DirExistsBefore),
+        ?_assertNot(DirExistsAfter),
+        ?_assertEqual(ExpectRenamedCount, length(RenamedDirs))
+    ].
+
+make_delete_dir_test_case({RootDir, ViewDir}, DeleteAfterRename) ->
+    meck:expect(config, get_boolean, fun
+        ("couchdb", "enable_database_recovery", _) -> false;
+        ("couchdb", "delete_after_rename", _) -> DeleteAfterRename
+    end),
+    DirExistsBefore = filelib:is_dir(ViewDir),
+    couch_file:nuke_dir(RootDir, ViewDir),
+    DirExistsAfter = filelib:is_dir(ViewDir),
+    Ext = filename:extension(ViewDir),
+    RenamedDirs = filelib:wildcard(RootDir ++ "/*.deleted" ++ Ext),
+    RenamedFiles = filelib:wildcard(RootDir ++ "/.delete/*"),
+    ExpectRenamedCount = if DeleteAfterRename -> 0; true -> 1 end,
+    [
+        ?_assert(DirExistsBefore),
+        ?_assertNot(DirExistsAfter),
+        ?_assertEqual(0, length(RenamedDirs)),
+        ?_assertEqual(ExpectRenamedCount, length(RenamedFiles))
+    ].
+
+remove_dir(Dir) ->
+    [file:delete(File) || File <- filelib:wildcard(filename:join([Dir, "*"]))],
+    file:del_dir(Dir).
diff --git a/test/couch_key_tree_tests.erl b/test/couch_key_tree_tests.erl
index 431391f..8aa886f 100644
--- a/test/couch_key_tree_tests.erl
+++ b/test/couch_key_tree_tests.erl
@@ -88,6 +88,8 @@
                 should_gather_non_existant_leaf(),
                 should_gather_leaf(),
                 shoul_gather_multiple_leaves(),
+                should_gather_single_leaf_for_multiple_revs(),
+                should_gather_multiple_for_multiple_revs(),
                 should_retrieve_full_key_path(),
                 should_retrieve_full_key_path_for_node(),
                 should_retrieve_leaves_with_parent_node(),
@@ -347,6 +349,18 @@
     ?_assertEqual({[{"bar", {1, ["1a","1"]}},{"bar",{1, ["1b","1"]}}],[]},
                   couch_key_tree:get_key_leafs(TwoChildSibs, [{0, "1"}])).
 
+should_gather_single_leaf_for_multiple_revs() ->
+    OneChild = [{0, {"1","foo",[{"1a", "bar", []}]}}],
+    ToFind = [{0, "1"}, {1, "1a"}],
+    ?_assertEqual({[{"bar", {1, ["1a", "1"]}}],[]},
+                  couch_key_tree:get_key_leafs(OneChild, ToFind)).
+
+should_gather_multiple_for_multiple_revs() ->
+    TwoChildSibs = [{0, {"1","foo", [{"1a", "bar", []}, {"1b", "bar", []}]}}],
+    ToFind = [{0, "1"}, {1, "1a"}],
+    ?_assertEqual({[{"bar", {1, ["1a","1"]}},{"bar",{1, ["1b","1"]}}],[]},
+                  couch_key_tree:get_key_leafs(TwoChildSibs, ToFind)).
+
 should_retrieve_full_key_path()->
     TwoChildSibs = [{0, {"1","foo", [{"1a", "bar", []}, {"1b", "bar", []}]}}],
     ?_assertEqual({[{0,[{"1", "foo"}]}],[]},
diff --git a/test/couch_server_tests.erl b/test/couch_server_tests.erl
index fcabbe5..c8f8381 100644
--- a/test/couch_server_tests.erl
+++ b/test/couch_server_tests.erl
@@ -26,7 +26,7 @@
     Db.
 
 setup(rename) ->
-    config:set("couchdb", "rename_on_delete", "true", false),
+    config:set("couchdb", "enable_database_recovery", "true", false),
     setup();
 setup(_) ->
     setup().
@@ -36,7 +36,7 @@
     (catch file:delete(Db#db.filepath)).
 
 teardown(rename, Db) ->
-    config:set("couchdb", "rename_on_delete", "false", false),
+    config:set("couchdb", "enable_database_recovery", "false", false),
     teardown(Db);
 teardown(_, Db) ->
     teardown(Db).
diff --git a/test/couch_stats_tests.erl b/test/couch_stats_tests.erl
deleted file mode 100644
index 762953d..0000000
--- a/test/couch_stats_tests.erl
+++ /dev/null
@@ -1,398 +0,0 @@
-% Licensed under the Apache License, Version 2.0 (the "License"); you may not
-% use this file except in compliance with the License. You may obtain a copy of
-% the License at
-%
-%   http://www.apache.org/licenses/LICENSE-2.0
-%
-% Unless required by applicable law or agreed to in writing, software
-% distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-% WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-% License for the specific language governing permissions and limitations under
-% the License.
-
--module(couch_stats_tests).
-
--include_lib("couch/include/couch_eunit.hrl").
--include_lib("couch/include/couch_db.hrl").
-
--define(STATS_CFG_FIXTURE,
-    filename:join([?FIXTURESDIR, "couch_stats_aggregates.cfg"])).
--define(STATS_INI_FIXTURE,
-    filename:join([?FIXTURESDIR, "couch_stats_aggregates.ini"])).
--define(TIMEOUT, 1000).
--define(TIMEWAIT, 500).
-
-
--ifdef(run_broken_tests).
-
-setup_collector() ->
-    couch_stats_collector:start(),
-    ok.
-
-setup_aggregator(_) ->
-    {ok, Pid} = config:start_link([?STATS_INI_FIXTURE]),
-    {ok, _} = couch_stats_collector:start(),
-    {ok, _} = couch_stats_aggregator:start(?STATS_CFG_FIXTURE),
-    Pid.
-
-teardown_collector(_) ->
-    couch_stats_collector:stop(),
-    ok.
-
-teardown_aggregator(_, Pid) ->
-    couch_stats_aggregator:stop(),
-    couch_stats_collector:stop(),
-    test_util:stop_config(Pid).
-
-
-couch_stats_collector_test_() ->
-    {
-        "CouchDB stats collector tests",
-        {
-            foreach,
-            fun setup_collector/0, fun teardown_collector/1,
-            [
-                should_increment_counter(),
-                should_decrement_counter(),
-                should_increment_and_decrement_counter(),
-                should_record_absolute_values(),
-                should_clear_absolute_values(),
-                should_track_process_count(),
-                should_increment_counter_multiple_times_per_pid(),
-                should_decrement_counter_on_process_exit(),
-                should_decrement_for_each_track_process_count_call_on_exit(),
-                should_return_all_counters_and_absolute_values(),
-                should_return_incremental_counters(),
-                should_return_absolute_values()
-            ]
-        }
-    }.
-
-couch_stats_aggregator_test_() ->
-    Funs = [
-        fun should_init_empty_aggregate/2,
-        fun should_get_empty_aggregate/2,
-        fun should_change_stats_on_values_add/2,
-        fun should_change_stats_for_all_times_on_values_add/2,
-        fun should_change_stats_on_values_change/2,
-        fun should_change_stats_for_all_times_on_values_change/2,
-        fun should_not_remove_data_after_some_time_for_0_sample/2,
-        fun should_remove_data_after_some_time_for_other_samples/2
-    ],
-    {
-        "CouchDB stats aggregator tests",
-        [
-            {
-                "Absolute values",
-                {
-                    foreachx,
-                    fun setup_aggregator/1, fun teardown_aggregator/2,
-                    [{absolute, Fun} || Fun <- Funs]
-                }
-            },
-            {
-                "Counters",
-                {
-                    foreachx,
-                    fun setup_aggregator/1, fun teardown_aggregator/2,
-                    [{counter, Fun} || Fun <- Funs]
-                }
-            }
-        ]
-    }.
-
-
-should_increment_counter() ->
-    ?_assertEqual(100,
-        begin
-            AddCount = fun() -> couch_stats_collector:increment(foo) end,
-            repeat(AddCount, 100),
-            couch_stats_collector:get(foo)
-        end).
-
-should_decrement_counter() ->
-    ?_assertEqual(67,
-        begin
-            AddCount = fun() -> couch_stats_collector:increment(foo) end,
-            RemCount = fun() -> couch_stats_collector:decrement(foo) end,
-            repeat(AddCount, 100),
-            repeat(RemCount, 33),
-            couch_stats_collector:get(foo)
-        end).
-
-should_increment_and_decrement_counter() ->
-    ?_assertEqual(0,
-        begin
-            AddCount = fun() -> couch_stats_collector:increment(foo) end,
-            RemCount = fun() -> couch_stats_collector:decrement(foo) end,
-            repeat(AddCount, 100),
-            repeat(RemCount, 25),
-            repeat(AddCount, 10),
-            repeat(RemCount, 5),
-            repeat(RemCount, 80),
-            couch_stats_collector:get(foo)
-        end).
-
-should_record_absolute_values() ->
-    ?_assertEqual(lists:seq(1, 15),
-        begin
-            lists:map(fun(Val) ->
-                couch_stats_collector:record(bar, Val)
-            end, lists:seq(1, 15)),
-            couch_stats_collector:get(bar)
-        end).
-
-should_clear_absolute_values() ->
-    ?_assertEqual(nil,
-        begin
-            lists:map(fun(Val) ->
-                couch_stats_collector:record(bar, Val)
-            end, lists:seq(1, 15)),
-            couch_stats_collector:clear(bar),
-            couch_stats_collector:get(bar)
-        end).
-
-should_track_process_count() ->
-    ?_assertMatch({_, 1}, spawn_and_count(1)).
-
-should_increment_counter_multiple_times_per_pid() ->
-    ?_assertMatch({_, 3}, spawn_and_count(3)).
-
-should_decrement_counter_on_process_exit() ->
-    ?_assertEqual(2,
-        begin
-            {Pid, 1} = spawn_and_count(1),
-            spawn_and_count(2),
-            test_util:stop_sync_throw(Pid,
-                fun() -> Pid ! sepuku end, timeout, ?TIMEOUT),
-            % sleep for awhile to let collector handle the updates
-            % suddenly, it couldn't notice process death instantly
-            timer:sleep(?TIMEWAIT),
-            couch_stats_collector:get(hoopla)
-        end).
-
-should_decrement_for_each_track_process_count_call_on_exit() ->
-    ?_assertEqual(2,
-        begin
-            {_, 2} = spawn_and_count(2),
-            {Pid, 6} = spawn_and_count(4),
-            test_util:stop_sync_throw(Pid,
-                fun() -> Pid ! sepuku end, timeout, ?TIMEOUT),
-            timer:sleep(?TIMEWAIT),
-            couch_stats_collector:get(hoopla)
-        end).
-
-should_return_all_counters_and_absolute_values() ->
-    ?_assertEqual([{bar,[1.0,0.0]}, {foo,1}],
-        begin
-            couch_stats_collector:record(bar, 0.0),
-            couch_stats_collector:record(bar, 1.0),
-            couch_stats_collector:increment(foo),
-            lists:sort(couch_stats_collector:all())
-        end).
-
-should_return_incremental_counters() ->
-    ?_assertEqual([{foo,1}],
-        begin
-            couch_stats_collector:record(bar, 0.0),
-            couch_stats_collector:record(bar, 1.0),
-            couch_stats_collector:increment(foo),
-            lists:sort(couch_stats_collector:all(incremental))
-        end).
-
-should_return_absolute_values() ->
-    ?_assertEqual([{bar,[1.0,0.0]}, {zing, "Z"}],
-        begin
-            couch_stats_collector:record(bar, 0.0),
-            couch_stats_collector:record(bar, 1.0),
-            couch_stats_collector:record(zing, 90),
-            couch_stats_collector:increment(foo),
-            lists:sort(couch_stats_collector:all(absolute))
-        end).
-
-should_init_empty_aggregate(absolute, _) ->
-    {Aggs} = couch_stats_aggregator:all(),
-    ?_assertEqual({[{'11', make_agg(<<"randomosity">>,
-                                    null, null, null, null, null)}]},
-                  couch_util:get_value(number, Aggs));
-should_init_empty_aggregate(counter, _) ->
-    {Aggs} = couch_stats_aggregator:all(),
-    ?_assertEqual({[{stuff, make_agg(<<"yay description">>,
-                                     null, null, null, null, null)}]},
-                  couch_util:get_value(testing, Aggs)).
-
-should_get_empty_aggregate(absolute, _) ->
-    ?_assertEqual(make_agg(<<"randomosity">>, null, null, null, null, null),
-             couch_stats_aggregator:get_json({number, '11'}));
-should_get_empty_aggregate(counter, _) ->
-    ?_assertEqual(make_agg(<<"yay description">>, null, null, null, null, null),
-             couch_stats_aggregator:get_json({testing, stuff})).
-
-should_change_stats_on_values_add(absolute, _) ->
-    lists:foreach(fun(X) ->
-        couch_stats_collector:record({number, 11}, X)
-    end, lists:seq(0, 10)),
-    couch_stats_aggregator:collect_sample(),
-    ?_assertEqual(make_agg(<<"randomosity">>, 5.0, 5.0, null, 5.0, 5.0),
-                  couch_stats_aggregator:get_json({number, 11}));
-should_change_stats_on_values_add(counter, _) ->
-    lists:foreach(fun(_) ->
-        couch_stats_collector:increment({testing, stuff})
-    end, lists:seq(1, 100)),
-    couch_stats_aggregator:collect_sample(),
-    ?_assertEqual(make_agg(<<"yay description">>, 100.0, 100.0, null, 100, 100),
-                  couch_stats_aggregator:get_json({testing, stuff})).
-
-should_change_stats_for_all_times_on_values_add(absolute, _) ->
-    lists:foreach(fun(X) ->
-        couch_stats_collector:record({number, 11}, X)
-    end, lists:seq(0, 10)),
-    couch_stats_aggregator:collect_sample(),
-    ?_assertEqual(make_agg(<<"randomosity">>, 5.0, 5.0, null, 5.0, 5.0),
-                  couch_stats_aggregator:get_json({number, 11}, 1));
-should_change_stats_for_all_times_on_values_add(counter, _) ->
-    lists:foreach(fun(_) ->
-        couch_stats_collector:increment({testing, stuff})
-    end, lists:seq(1, 100)),
-    couch_stats_aggregator:collect_sample(),
-    ?_assertEqual(make_agg(<<"yay description">>, 100.0, 100.0, null, 100, 100),
-                  couch_stats_aggregator:get_json({testing, stuff}, 1)).
-
-should_change_stats_on_values_change(absolute, _) ->
-    ?_assertEqual(make_agg(<<"randomosity">>, 20.0, 10.0, 7.071, 5.0, 15.0),
-        begin
-            lists:foreach(fun(X) ->
-                couch_stats_collector:record({number, 11}, X)
-            end, lists:seq(0, 10)),
-            couch_stats_aggregator:collect_sample(),
-            timer:sleep(?TIMEWAIT),
-            couch_stats_collector:record({number, 11}, 15),
-            couch_stats_aggregator:collect_sample(),
-            couch_stats_aggregator:get_json({number, 11})
-        end);
-should_change_stats_on_values_change(counter, _) ->
-    ?_assertEqual(make_agg(<<"yay description">>, 100.0, 50.0, 70.711, 0, 100),
-        begin
-            lists:foreach(fun(_) ->
-                couch_stats_collector:increment({testing, stuff})
-            end, lists:seq(1, 100)),
-            couch_stats_aggregator:collect_sample(),
-            timer:sleep(?TIMEWAIT),
-            couch_stats_aggregator:collect_sample(),
-            couch_stats_aggregator:get_json({testing, stuff})
-        end).
-
-should_change_stats_for_all_times_on_values_change(absolute, _) ->
-    ?_assertEqual(make_agg(<<"randomosity">>, 20.0, 10.0, 7.071, 5.0, 15.0),
-        begin
-            lists:foreach(fun(X) ->
-                couch_stats_collector:record({number, 11}, X)
-            end, lists:seq(0, 10)),
-            couch_stats_aggregator:collect_sample(),
-            timer:sleep(?TIMEWAIT),
-            couch_stats_collector:record({number, 11}, 15),
-            couch_stats_aggregator:collect_sample(),
-            couch_stats_aggregator:get_json({number, 11}, 1)
-        end);
-should_change_stats_for_all_times_on_values_change(counter, _) ->
-    ?_assertEqual(make_agg(<<"yay description">>, 100.0, 50.0, 70.711, 0, 100),
-        begin
-            lists:foreach(fun(_) ->
-                couch_stats_collector:increment({testing, stuff})
-            end, lists:seq(1, 100)),
-            couch_stats_aggregator:collect_sample(),
-            timer:sleep(?TIMEWAIT),
-            couch_stats_aggregator:collect_sample(),
-            couch_stats_aggregator:get_json({testing, stuff}, 1)
-        end).
-
-should_not_remove_data_after_some_time_for_0_sample(absolute, _) ->
-    ?_assertEqual(make_agg(<<"randomosity">>, 20.0, 10.0, 7.071, 5.0, 15.0),
-        begin
-            lists:foreach(fun(X) ->
-                couch_stats_collector:record({number, 11}, X)
-            end, lists:seq(0, 10)),
-            couch_stats_aggregator:collect_sample(),
-            timer:sleep(?TIMEWAIT),
-            couch_stats_collector:record({number, 11}, 15),
-            couch_stats_aggregator:collect_sample(),
-            timer:sleep(?TIMEWAIT),
-            couch_stats_aggregator:collect_sample(),
-            couch_stats_aggregator:get_json({number, 11})
-        end);
-should_not_remove_data_after_some_time_for_0_sample(counter, _) ->
-    ?_assertEqual(make_agg(<<"yay description">>, 100.0, 33.333, 57.735, 0, 100),
-        begin
-            lists:foreach(fun(_) ->
-                couch_stats_collector:increment({testing, stuff})
-            end, lists:seq(1, 100)),
-            couch_stats_aggregator:collect_sample(),
-            timer:sleep(?TIMEWAIT),
-            couch_stats_aggregator:collect_sample(),
-            timer:sleep(?TIMEWAIT),
-            couch_stats_aggregator:collect_sample(),
-            couch_stats_aggregator:get_json({testing, stuff})
-        end).
-
-should_remove_data_after_some_time_for_other_samples(absolute, _) ->
-    ?_assertEqual(make_agg(<<"randomosity">>, 15.0, 15.0, null, 15.0, 15.0),
-        begin
-            lists:foreach(fun(X) ->
-                couch_stats_collector:record({number, 11}, X)
-            end, lists:seq(0, 10)),
-            couch_stats_aggregator:collect_sample(),
-            timer:sleep(?TIMEWAIT),
-            couch_stats_collector:record({number, 11}, 15),
-            couch_stats_aggregator:collect_sample(),
-            timer:sleep(?TIMEWAIT),
-            couch_stats_aggregator:collect_sample(),
-            couch_stats_aggregator:get_json({number, 11}, 1)
-        end);
-should_remove_data_after_some_time_for_other_samples(counter, _) ->
-    ?_assertEqual(make_agg(<<"yay description">>, 0, 0.0, 0.0, 0, 0),
-        begin
-            lists:foreach(fun(_) ->
-                couch_stats_collector:increment({testing, stuff})
-            end, lists:seq(1, 100)),
-            couch_stats_aggregator:collect_sample(),
-            timer:sleep(?TIMEWAIT),
-            couch_stats_aggregator:collect_sample(),
-            timer:sleep(?TIMEWAIT),
-            couch_stats_aggregator:collect_sample(),
-            couch_stats_aggregator:get_json({testing, stuff}, 1)
-        end).
-
-
-spawn_and_count(N) ->
-    Self = self(),
-    Pid = spawn(fun() ->
-        lists:foreach(
-            fun(_) ->
-                couch_stats_collector:track_process_count(hoopla)
-            end, lists:seq(1,N)),
-        Self ! reporting,
-        receive
-            sepuku -> ok
-        end
-    end),
-    receive reporting -> ok end,
-    {Pid, couch_stats_collector:get(hoopla)}.
-
-repeat(_, 0) ->
-    ok;
-repeat(Fun, Count) ->
-    Fun(),
-    repeat(Fun, Count-1).
-
-make_agg(Desc, Sum, Mean, StdDev, Min, Max) ->
-    {[
-        {description, Desc},
-        {current, Sum},
-        {sum, Sum},
-        {mean, Mean},
-        {stddev, StdDev},
-        {min, Min},
-        {max, Max}
-    ]}.
-
--endif.
diff --git a/test/couchdb_auth_tests.erl b/test/couchdb_auth_tests.erl
index 5ee0a5a..9fb4ceb 100644
--- a/test/couchdb_auth_tests.erl
+++ b/test/couchdb_auth_tests.erl
@@ -26,7 +26,7 @@
 
 auth_test_() ->
     Tests = [
-        fun should_not_return_username_on_post_to_session/2,
+        fun should_return_username_on_post_to_session/2,
         fun should_return_authenticated_field/2,
         fun should_return_list_of_handlers/2
     ],
@@ -48,7 +48,7 @@
         {foreachx, fun setup/1, fun teardown/2, [{Mod, Fun} || Fun <- Funs]}
     }.
 
-should_not_return_username_on_post_to_session(_PortType, Url) ->
+should_return_username_on_post_to_session(_PortType, Url) ->
     ?_assertEqual(<<"rocko">>,
         begin
             ok = config:set("admins", "rocko", "artischocko", false),
diff --git a/test/couchdb_compaction_daemon.erl b/test/couchdb_compaction_daemon_tests.erl
similarity index 97%
rename from test/couchdb_compaction_daemon.erl
rename to test/couchdb_compaction_daemon_tests.erl
index 0598a5a..3228031 100644
--- a/test/couchdb_compaction_daemon.erl
+++ b/test/couchdb_compaction_daemon_tests.erl
@@ -10,15 +10,14 @@
 % License for the specific language governing permissions and limitations under
 % the License.
 
--module(couchdb_compaction_daemon).
+-module(couchdb_compaction_daemon_tests).
 
 -include_lib("couch/include/couch_eunit.hrl").
 -include_lib("couch/include/couch_db.hrl").
 
--define(TIMEOUT, 30000).
+-define(TIMEOUT, 60000).
 -define(TIMEOUT_S, ?TIMEOUT div 1000).
 
--ifdef(run_broken_tests).
 
 start() ->
     Ctx = test_util:start_couch(),
@@ -248,7 +247,7 @@
     ok = couch_db:close(Db),
     not lists:any(fun(M) -> M /= self() end, Monitors).
 
-with_config_change(DbName, Fun) ->
+with_config_change(_DbName, Fun) ->
     Current = ets:info(couch_compaction_daemon_config, size),
     Fun(),
     test_util:wait(fun() ->
@@ -257,5 +256,3 @@
             true -> wait
         end
     end).
-
--endif.
diff --git a/test/couchdb_cors_tests.erl b/test/couchdb_cors_tests.erl
index 4b2e2b5..82630bb 100644
--- a/test/couchdb_cors_tests.erl
+++ b/test/couchdb_cors_tests.erl
@@ -15,11 +15,15 @@
 -include_lib("couch/include/couch_eunit.hrl").
 -include_lib("couch/include/couch_db.hrl").
 
+-include_lib("chttpd/include/chttpd_cors.hrl").
 
--define(SUPPORTED_METHODS,
-        "GET, HEAD, POST, PUT, DELETE, TRACE, CONNECT, COPY, OPTIONS").
 -define(TIMEOUT, 1000).
 
+-define(_assertEqualLists(A, B),
+    ?_assertEqual(lists:usort(A), lists:usort(B))).
+
+-define(assertEqualLists(A, B),
+    ?assertEqual(lists:usort(A), lists:usort(B))).
 
 start() ->
     Ctx = test_util:start_couch([ioq]),
@@ -182,18 +186,18 @@
         ?assertEqual(
             "http://example.com",
             proplists:get_value("Access-Control-Allow-Origin", Resp)),
-        ?assertEqual(
-            "Cache-Control, Content-Type, Server",
-            proplists:get_value("Access-Control-Expose-Headers", Resp))
+        ?assertEqualLists(
+            ?COUCH_HEADERS ++ list_simple_headers(Resp),
+            split_list(proplists:get_value("Access-Control-Expose-Headers", Resp)))
     end).
 
 should_make_preflight_request(_, {_, _, Url, DefaultHeaders}) ->
-    ?_assertEqual(?SUPPORTED_METHODS,
+    ?_assertEqualLists(?SUPPORTED_METHODS,
         begin
             Headers = DefaultHeaders
                       ++ [{"Access-Control-Request-Method", "GET"}],
             {ok, _, Resp, _} = test_request:options(Url, Headers),
-            proplists:get_value("Access-Control-Allow-Methods", Resp)
+            split_list(proplists:get_value("Access-Control-Allow-Methods", Resp))
         end).
 
 should_make_prefligh_request_with_port({_, VHost}, {_, _, Url, _}) ->
@@ -251,7 +255,7 @@
         end).
 
 should_make_preflight_request_with_auth(_, {_, _, Url, DefaultHeaders}) ->
-    ?_assertEqual(?SUPPORTED_METHODS,
+    ?_assertEqualLists(?SUPPORTED_METHODS,
         begin
             Hashed = couch_passwords:hash_admin_password(<<"test">>),
             config:set("admins", "test", ?b2l(Hashed), false),
@@ -260,7 +264,7 @@
             {ok, _, Resp, _} = test_request:options(
                 Url, Headers, [{basic_auth, {"test", "test"}}]),
             config:delete("admins", "test", false),
-            proplists:get_value("Access-Control-Allow-Methods", Resp)
+            split_list(proplists:get_value("Access-Control-Allow-Methods", Resp))
         end).
 
 should_not_return_cors_headers_for_invalid_origin({Host, _}) ->
@@ -331,3 +335,10 @@
     [{"Host", "http://example.com"}];
 maybe_append_vhost(false) ->
     [].
+
+split_list(S) ->
+    re:split(S, "\\s*,\\s*", [trim, {return, list}]).
+
+list_simple_headers(Headers) ->
+    LCHeaders = [string:to_lower(K) || {K, _V} <- Headers],
+    lists:filter(fun(H) -> lists:member(H, ?SIMPLE_HEADERS) end, LCHeaders).
diff --git a/test/couchdb_design_doc_tests.erl b/test/couchdb_design_doc_tests.erl
new file mode 100644
index 0000000..eef12e0
--- /dev/null
+++ b/test/couchdb_design_doc_tests.erl
@@ -0,0 +1,88 @@
+% Licensed under the Apache License, Version 2.0 (the "License"); you may not
+% use this file except in compliance with the License. You may obtain a copy of
+% the License at
+%
+%   http://www.apache.org/licenses/LICENSE-2.0
+%
+% Unless required by applicable law or agreed to in writing, software
+% distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+% WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+% License for the specific language governing permissions and limitations under
+% the License.
+
+-module(couchdb_design_doc_tests).
+
+-include_lib("couch/include/couch_eunit.hrl").
+-include_lib("couch/include/couch_db.hrl").
+
+setup() ->
+    DbName = ?tempdb(),
+    {ok, Db} = couch_db:create(DbName, [?ADMIN_CTX]),
+    ok = couch_db:close(Db),
+    create_design_doc(DbName, <<"_design/foo">>),
+    Addr = config:get("httpd", "bind_address", "127.0.0.1"),
+    Port = integer_to_list(mochiweb_socket_server:get(couch_httpd, port)),
+    BaseUrl = "http://" ++ Addr ++ ":" ++ Port,
+    {?b2l(DbName), BaseUrl}.
+
+
+teardown({DbName, _}) ->
+    couch_server:delete(?l2b(DbName), [?ADMIN_CTX]),
+    ok.
+
+
+design_list_test_() ->
+    {
+        "Check _list functionality",
+        {
+            setup,
+            fun test_util:start_couch/0, fun test_util:stop_couch/1,
+            {
+                foreach,
+                fun setup/0, fun teardown/1,
+                [
+                    fun should_return_empty_when_plain_return/1,
+                    fun should_return_empty_when_no_docs/1
+                ]
+            }
+        }
+    }.
+
+should_return_empty_when_plain_return({DbName, BaseUrl}) ->
+    ?_test(begin
+        ?assertEqual(<<>>,
+            query_text(BaseUrl, DbName, "foo", "_list/plain_return/simple_view"))
+    end).
+
+should_return_empty_when_no_docs({DbName, BaseUrl}) ->
+    ?_test(begin
+        ?assertEqual(<<>>,
+            query_text(BaseUrl, DbName, "foo", "_list/simple_render/simple_view"))
+    end).
+
+create_design_doc(DbName, DDName) ->
+    {ok, Db} = couch_db:open(DbName, [?ADMIN_CTX]),
+    DDoc = couch_doc:from_json_obj({[
+        {<<"_id">>, DDName},
+        {<<"language">>, <<"javascript">>},
+        {<<"views">>, {[
+            {<<"simple_view">>, {[
+                {<<"map">>, <<"function(doc) {emit(doc._id, doc)}">> },
+                {<<"reduce">>, <<"function (key, values, rereduce) {return sum(values);}">> }
+            ]}}
+        ]}},
+        {<<"lists">>, {[
+            {<<"plain_return">>, <<"function(head, req) {return;}">>},
+            {<<"simple_render">>, <<"function(head, req) {var row; while(row=getRow()) {send(JSON.stringify(row)); }}">>}
+        ]}}
+    ]}),
+    {ok, Rev} = couch_db:update_doc(Db, DDoc, []),
+    couch_db:ensure_full_commit(Db),
+    couch_db:close(Db),
+    Rev.
+
+query_text(BaseUrl, DbName, DDoc, Path) ->
+    {ok, Code, _Headers, Body} = test_request:get(
+        BaseUrl ++ "/" ++ DbName ++ "/_design/" ++ DDoc ++ "/" ++ Path),
+    ?assertEqual(200, Code),
+    Body.
diff --git a/test/couchdb_mrview_cors_tests.erl b/test/couchdb_mrview_cors_tests.erl
index 0d61361..f9155ce 100644
--- a/test/couchdb_mrview_cors_tests.erl
+++ b/test/couchdb_mrview_cors_tests.erl
@@ -24,8 +24,14 @@
     ]}}
 ]}).
 
+-define(USER, "mrview_cors_test_admin").
+-define(PASS, "pass").
+-define(AUTH, {basic_auth, {?USER, ?PASS}}).
+
+
 start() ->
     Ctx = test_util:start_couch([chttpd]),
+    ok = config:set("admins", ?USER, ?PASS, _Persist=false),
     ok = config:set("httpd", "enable_cors", "true", false),
     ok = config:set("vhosts", "example.com", "/", false),
     Ctx.
@@ -37,11 +43,14 @@
     config:set("cors", "credentials", "false", false),
     config:set("cors", "origins", "http://example.com", false),
 
-    Addr = config:get("httpd", "bind_address", "127.0.0.1"),
-    Host = "http://" ++ Addr ++ ":" ++ port(PortType),
+    Host = host_url(PortType),
     upload_ddoc(Host, ?b2l(DbName)),
     {Host, ?b2l(DbName)}.
 
+teardown(Ctx) ->
+    ok = config:delete("admins", ?USER, _Persist=false),
+    test_util:stop_couch(Ctx).
+
 teardown(PortType, {_Host, DbName}) ->
     delete_db(PortType, ?l2b(DbName)),
     ok.
@@ -51,7 +60,7 @@
         "CORS for mrview",
         {
             setup,
-            fun start/0, fun test_util:stop_couch/1,
+            fun start/0, fun teardown/1,
             [show_tests()]
         }
     }.
@@ -75,7 +84,7 @@
     ?_test(begin
          ReqUrl = Host ++ "/" ++ DbName ++ "/_design/foo/_show/bar",
          Headers = [{"Origin", "http://example.com"},
-                    {"Access-Control-Request-Method", "GET"}],
+                    {"Access-Control-Request-Method", "GET"}, ?AUTH],
          {ok, _, Resp, Body} = test_request:get(ReqUrl, Headers),
          Origin = proplists:get_value("Access-Control-Allow-Origin", Resp),
          ?assertEqual("http://example.com", Origin),
@@ -86,14 +95,36 @@
     {ok, Db} = couch_db:create(DbName, [?ADMIN_CTX]),
     couch_db:close(Db);
 create_db(clustered, DbName) ->
-    ok = fabric:create_db(DbName, [?ADMIN_CTX]).
+    {ok, Status, _, _} = test_request:put(db_url(DbName), [?AUTH], ""),
+    assert_success(create_db, Status),
+    ok.
 
 delete_db(backdoor, DbName) ->
     couch_server:delete(DbName, [?ADMIN_CTX]);
 delete_db(clustered, DbName) ->
-    ok = fabric:delete_db(DbName, [?ADMIN_CTX]).
+    {ok, Status, _, _} = test_request:delete(db_url(DbName), [?AUTH]),
+    assert_success(delete_db, Status),
+    ok.
 
+assert_success(create_db, Status) ->
+    true = lists:member(Status, [201, 202]);
+assert_success(delete_db, Status) ->
+    true = lists:member(Status, [200, 202]).
+    
 
+host_url(PortType) ->
+    "http://" ++ bind_address(PortType) ++ ":" ++ port(PortType).
+
+bind_address(PortType) ->
+    config:get(section(PortType), "bind_address", "127.0.0.1").
+
+section(backdoor) -> "http";
+section(clustered) -> "chttpd".
+
+db_url(DbName) when is_binary(DbName) ->
+    db_url(binary_to_list(DbName));
+db_url(DbName) when is_list(DbName) ->
+    host_url(clustered) ++ "/" ++ DbName.
 
 port(clustered) ->
     integer_to_list(mochiweb_socket_server:get(chttpd, port));
@@ -104,5 +135,5 @@
 upload_ddoc(Host, DbName) ->
     Url = Host ++ "/" ++ DbName ++ "/_design/foo",
     Body = couch_util:json_encode(?DDOC),
-    {ok, 201, _Resp, _Body} = test_request:put(Url, Body),
+    {ok, 201, _Resp, _Body} = test_request:put(Url, [?AUTH], Body),
     ok.
diff --git a/test/couchdb_vhosts_tests.erl b/test/couchdb_vhosts_tests.erl
index 13b69bb..d1da063 100644
--- a/test/couchdb_vhosts_tests.erl
+++ b/test/couchdb_vhosts_tests.erl
@@ -68,6 +68,7 @@
     config:set("oauth_token_users", "otoksec1", "joe", false),
     config:set("oauth_consumer_secrets", "consec1", "foo", false),
     config:set("oauth_token_secrets", "otoksec1", "foobar", false),
+    config:set("couchdb", "default_security", "everyone", false),
     config:set("couch_httpd_auth", "require_valid_user", "true", false),
 
     ok = config:set(
diff --git a/test/couchdb_views_tests.erl b/test/couchdb_views_tests.erl
index ab0f6f2..289ecb8 100644
--- a/test/couchdb_views_tests.erl
+++ b/test/couchdb_views_tests.erl
@@ -288,7 +288,7 @@
         ?assertNotEqual(IndexerPid, NewIndexerPid),
         UserCnt = case count_users(DbName) of
                       N when N > 2 ->
-                          time:sleep(1000),
+                          timer:sleep(1000),
                           count_users(DbName);
                       N -> N
                   end,
diff --git a/test/global_changes_tests.erl b/test/global_changes_tests.erl
index e04f3bf..930b985 100644
--- a/test/global_changes_tests.erl
+++ b/test/global_changes_tests.erl
@@ -15,26 +15,54 @@
 -include_lib("couch/include/couch_eunit.hrl").
 -include_lib("couch/include/couch_db.hrl").
 
+-define(USER, "admin").
+-define(PASS, "pass").
+-define(AUTH, {basic_auth, {?USER, ?PASS}}).
+
 setup() ->
     Host = get_host(),
-    add_admin("admin", <<"pass">>),
+    ok = add_admin(?USER, ?PASS),
     DbName = "foo/" ++ ?b2l(?tempdb()),
-    [fabric:create_db(Name, [?ADMIN_CTX])
-        || Name <- ["_global_changes", DbName]],
+    ok = http_create_db(DbName),
     {Host, DbName}.
 
 teardown({_, DbName}) ->
-    delete_admin("admin"),
-    [fabric:delete_db(Name, [?ADMIN_CTX])
-        || Name <- ["_global_changes", DbName]],
+    ok = http_delete_db(DbName),
+    delete_admin(?USER),
     ok.
 
+http_create_db(Name) ->
+    Resp = {ok, Status, _, _} = test_request:put(db_url(Name), [?AUTH], ""),
+    true = lists:member(Status, [201, 202]),
+    ok.
+    
+http_delete_db(Name) ->
+    {ok, Status, _, _} = test_request:delete(db_url(Name), [?AUTH]),
+    true = lists:member(Status, [200, 202]),
+    ok.
+
+db_url(Name) ->
+    get_host() ++ "/" ++ escape(Name).
+
+start_couch() ->
+    Ctx = test_util:start_couch([chttpd, global_changes]),
+    ok = ensure_db_exists("_global_changes"),
+    Ctx.
+
+ensure_db_exists(Name) ->
+    case fabric:create_db(Name) of
+        ok ->
+            ok;
+        {error, file_exists} ->
+            ok
+    end.
+
 global_changes_test_() ->
     {
         "Checking global_changes endpoint",
         {
             setup,
-            fun() -> test_util:start_couch([chttpd, global_changes]) end,
+            fun start_couch/0,
             fun test_util:stop/1,
             [
                 check_response()
@@ -57,7 +85,7 @@
 
 should_return_correct_response_on_create({Host, DbName}) ->
     ?_test(begin
-        Headers = [{basic_auth, {"admin", "pass"}}],
+        Headers = [?AUTH],
         create_doc(Host, DbName, "bar/baz"),
         {Status, Events} = request_updates(Host, DbName, Headers),
         ?assertEqual(200, Status),
@@ -66,7 +94,7 @@
 
 should_return_correct_response_on_update({Host, DbName}) ->
     ?_test(begin
-        Headers = [{basic_auth, {"admin", "pass"}}],
+        Headers = [?AUTH],
         create_doc(Host, DbName, "bar/baz"),
         update_doc(Host, DbName, "bar/baz", "new_value"),
         {Status, Events} = request_updates(Host, DbName, Headers),
@@ -75,7 +103,7 @@
     end).
 
 create_doc(Host, DbName, Id) ->
-    Headers = [{basic_auth, {"admin", "pass"}}],
+    Headers = [?AUTH],
     Url = Host ++ "/" ++ escape(DbName) ++ "/" ++ escape(Id),
     Body = jiffy:encode({[
         {key, "value"}
@@ -86,7 +114,7 @@
     ok.
 
 update_doc(Host, DbName, Id, Value) ->
-    Headers = [{basic_auth, {"admin", "pass"}}],
+    Headers = [?AUTH],
     Url = Host ++ "/" ++ escape(DbName) ++ "/" ++ escape(Id),
     {ok, 200, _Headers0, BinBody} = test_request:get(Url, Headers),
     [Rev] = decode_response(BinBody, [<<"_rev">>]),
@@ -116,8 +144,7 @@
     [couch_util:get_value(Key, Body) || Key <- ToDecode].
 
 add_admin(User, Pass) ->
-    Hashed = couch_passwords:hash_admin_password(Pass),
-    config:set("admins", User, ?b2l(Hashed), false).
+    config:set("admins", User, Pass, false).
 
 delete_admin(User) ->
     config:delete("admins", User, false).
@@ -125,8 +152,7 @@
 get_host() ->
     Addr = config:get("httpd", "bind_address", "127.0.0.1"),
     Port = integer_to_list(mochiweb_socket_server:get(chttpd, port)),
-    Host = "http://" ++ Addr ++ ":" ++ Port,
-    Host.
+    "http://" ++ Addr ++ ":" ++ Port.
 
 escape(Path) ->
     re:replace(Path, "/", "%2f", [global, {return, list}]).