Merge pull request #12 from basho/rz-search-rolling-upgrade

Add search to loaded_upgrade
This commit is contained in:
Joseph Blomstedt 2012-08-08 18:30:47 -07:00
commit 372841a8e9
5 changed files with 263 additions and 106 deletions

23
rtdev-build-releases.sh Normal file → Executable file
View File

@ -16,10 +16,10 @@
# Or, alternatively, just substitute the paths to the kerl install paths as # Or, alternatively, just substitute the paths to the kerl install paths as
# that should work too. # that should work too.
R13B04=$HOME/erlang-R13B04 R13B04=${R13B04:-$HOME/erlang-R13B04}
R14B03=$HOME/erlang-R14B03 R14B03=${R14B03:-$HOME/erlang-R14B03}
R14B04=$HOME/erlang-R14B04 R14B04=${R14B04:-HOME/erlang-R14B04}
R15B01=$HOME/erlang-R15B01 R15B01=${R15B01:-HOME/erlang-R15B01}
kerl() kerl()
{ {
@ -40,11 +40,16 @@ build()
ERLROOT=$2 ERLROOT=$2
if [ ! -d $ERLROOT ]; then if [ ! -d $ERLROOT ]; then
echo -n "$ERLROOT cannot be found, install kerl? [y|N]: "
read ans
if [[ ans == 'y' || ans == 'Y' ]]; then
BUILDNAME=`basename $ERLROOT` BUILDNAME=`basename $ERLROOT`
RELEASE=`echo $BUILDNAME | awk -F- '{ print $2 }'` RELEASE=`echo $BUILDNAME | awk -F- '{ print $2 }'`
kerl $RELEASE $BUILDNAME kerl $RELEASE $BUILDNAME
else
exit 1
fi
fi fi
echo echo
echo "Building $SRCDIR" echo "Building $SRCDIR"
@ -63,6 +68,7 @@ if [ $1 = "-ee" ]; then
s3cmd get --continue s3://builds.basho.com/riak_ee/riak_ee-0.14/0.14.2/riak_ee-0.14.2.tar.gz s3cmd get --continue s3://builds.basho.com/riak_ee/riak_ee-0.14/0.14.2/riak_ee-0.14.2.tar.gz
s3cmd get --continue s3://builds.basho.com/riak_ee/1.0/1.0.3/riak_ee-1.0.3.tar.gz s3cmd get --continue s3://builds.basho.com/riak_ee/1.0/1.0.3/riak_ee-1.0.3.tar.gz
s3cmd get --continue s3://builds.basho.com/riak_ee/1.1/1.1.4/riak_ee-1.1.4.tar.gz s3cmd get --continue s3://builds.basho.com/riak_ee/1.1/1.1.4/riak_ee-1.1.4.tar.gz
s3cmd get --continue s3://builds.basho.com/riak_ee/1.2/CURRENT/riak_ee-1.2.0rc4.tar.gz
tar -xzf riak_ee-0.14.2.tar.gz tar -xzf riak_ee-0.14.2.tar.gz
build "riak_ee-0.14.2" $R13B04 build "riak_ee-0.14.2" $R13B04
@ -72,11 +78,15 @@ if [ $1 = "-ee" ]; then
tar -xzf riak_ee-1.1.4.tar.gz tar -xzf riak_ee-1.1.4.tar.gz
build "riak_ee-1.1.4" $R14B03 build "riak_ee-1.1.4" $R14B03
tar -xzf riak_ee-1.2.0rc4.tar.gz
build "riak_ee-1.2.0rc4" $R15B01
else else
# Download Riak release source # Download Riak release source
wget -c http://downloads.basho.com/riak/riak-0.14/riak-0.14.2.tar.gz wget -c http://downloads.basho.com/riak/riak-0.14/riak-0.14.2.tar.gz
wget -c http://downloads.basho.com/riak/riak-1.0.3/riak-1.0.3.tar.gz wget -c http://downloads.basho.com/riak/riak-1.0.3/riak-1.0.3.tar.gz
wget -c http://downloads.basho.com/riak/riak-1.1.4/riak-1.1.4.tar.gz wget -c http://downloads.basho.com/riak/riak-1.1.4/riak-1.1.4.tar.gz
s3cmd get --continue s3://builds.basho.com/riak/1.2/CURRENT/riak-1.2.0rc3.tar.gz
tar -xzf riak-0.14.2.tar.gz tar -xzf riak-0.14.2.tar.gz
build "riak-0.14.2" $R13B04 build "riak-0.14.2" $R13B04
@ -86,4 +96,7 @@ else
tar -xzf riak-1.1.4.tar.gz tar -xzf riak-1.1.4.tar.gz
build "riak-1.1.4" $R14B03 build "riak-1.1.4" $R14B03
tar -xvf riak-1.2.0rc3.tar.gz
build "riak-1.2.0rc3" $R15B01
fi fi

0
rtdev-setup-releases.sh Normal file → Executable file
View File

View File

@ -15,6 +15,8 @@
stop/1, stop/1,
join/2, join/2,
leave/1, leave/1,
get_os_env/1,
get_os_env/2,
get_ring/1, get_ring/1,
admin/2, admin/2,
wait_until_pingable/1, wait_until_pingable/1,
@ -48,6 +50,18 @@
-define(HARNESS, (rt:config(rt_harness))). -define(HARNESS, (rt:config(rt_harness))).
get_os_env(Var) ->
case get_os_env(Var, undefined) of
undefined -> exit({os_env_var_undefined, Var});
Value -> Value
end.
get_os_env(Var, Default) ->
case os:getenv(Var) of
false -> Default;
Value -> Value
end.
%% @doc Get the raw ring for the given `Node'. %% @doc Get the raw ring for the given `Node'.
get_ring(Node) -> get_ring(Node) ->
{ok, Ring} = rpc:call(Node, riak_core_ring_manager, get_raw_ring, []), {ok, Ring} = rpc:call(Node, riak_core_ring_manager, get_raw_ring, []),
@ -443,6 +457,11 @@ rpc_get_env(Node, [{App,Var}|Others]) ->
rpc_get_env(Node, Others) rpc_get_env(Node, Others)
end. end.
-type interface() :: {http, tuple()} | {pb, tuple()}.
-type interfaces() :: [interface()].
-type conn_info() :: [{node(), interfaces()}].
-spec connection_info([node()]) -> conn_info().
connection_info(Nodes) -> connection_info(Nodes) ->
[begin [begin
{ok, PB_IP} = rpc_get_env(Node, [{riak_api, pb_ip}, {ok, PB_IP} = rpc_get_env(Node, [{riak_api, pb_ip},

View File

@ -1,20 +1,21 @@
-module(loaded_upgrade). -module(loaded_upgrade).
-compile(export_all). -compile(export_all).
-include_lib("eunit/include/eunit.hrl"). -include_lib("eunit/include/eunit.hrl").
-define(SPAM_BUCKET, <<"scotts_spam">>).
loaded_upgrade() -> loaded_upgrade() ->
(os:getenv("BASHO_BENCH") /= false) orelse _ = rt:get_os_env("BASHO_BENCH"),
throw("Missing path to BASHO_BENCH enviroment variable"), %% OldVsns = ["1.0.3", "1.1.4"],
%% OldVsns = ["1.0.3", "1.1.2"], OldVsns = ["1.1.4"],
OldVsns = ["1.1.2"],
[verify_upgrade(OldVsn) || OldVsn <- OldVsns], [verify_upgrade(OldVsn) || OldVsn <- OldVsns],
lager:info("Test ~p passed", [?MODULE]), lager:info("Test ~p passed", [?MODULE]),
ok. ok.
verify_upgrade(OldVsn) -> verify_upgrade(OldVsn) ->
Config = [{riak_search, [{enabled, true}]}],
NumNodes = 4, NumNodes = 4,
Vsns = [OldVsn || _ <- lists:seq(2,NumNodes)], Vsns = [{OldVsn, Config} || _ <- lists:seq(2,NumNodes)],
Nodes = rt:build_cluster([current | Vsns]), Nodes = rt:build_cluster([{current, Config} | Vsns]),
[Node1|OldNodes] = Nodes, [Node1|OldNodes] = Nodes,
lager:info("Writing 100 keys to ~p", [Node1]), lager:info("Writing 100 keys to ~p", [Node1]),
rt:systest_write(Node1, 100, 3), rt:systest_write(Node1, 100, 3),
@ -25,18 +26,23 @@ verify_upgrade(OldVsn) ->
KV1 = init_kv_tester(NodeConn), KV1 = init_kv_tester(NodeConn),
MR1 = init_mapred_tester(NodeConn), MR1 = init_mapred_tester(NodeConn),
Search1 = init_search_tester(Nodes, Conns),
[begin [begin
KV2 = spawn_kv_tester(KV1), KV2 = spawn_kv_tester(KV1),
MR2 = spawn_mapred_tester(MR1), MR2 = spawn_mapred_tester(MR1),
Search2 = spawn_search_tester(Search1),
lager:info("Upgrading ~p", [Node]), lager:info("Upgrading ~p", [Node]),
rtdev:upgrade(Node, current), rtdev:upgrade(Node, current),
_KV3 = check_kv_tester(KV2), _KV3 = check_kv_tester(KV2),
_MR3 = check_mapred_tester(MR2), _MR3 = check_mapred_tester(MR2),
_Search3 = check_search_tester(Search2, false),
lager:info("Ensuring keys still exist"), lager:info("Ensuring keys still exist"),
rt:wait_for_cluster_service(Nodes, riak_kv), rt:wait_for_cluster_service(Nodes, riak_kv),
?assertEqual([], rt:systest_read(Node1, 100, 1)) ?assertEqual([], rt:systest_read(Node1, 100, 1))
end || Node <- OldNodes], end || Node <- OldNodes],
lager:info("Upgrade complete, ensure search now passes"),
check_search_tester(spawn_search_tester(Search1), true),
ok. ok.
%% =================================================================== %% ===================================================================
@ -168,6 +174,97 @@ mapred_check_verify(Port) ->
fail fail
end. end.
%% ===================================================================
%% Search tester
%% ===================================================================
-record(search, {buckets, runs}).
init_search_tester(Nodes, Conns) ->
SpamDir = rt:get_os_env("SPAM_DIR"),
IPs = [proplists:get_value(http, I) || {_, I} <- Conns],
Buckets = [?SPAM_BUCKET],
rt:enable_search_hook(hd(Nodes), ?SPAM_BUCKET),
generate_search_scripts(Buckets, IPs, SpamDir),
[search_populate(Bucket) || Bucket <- Buckets],
%% Check search queries actually work as expected
[check_search(Bucket, Nodes) || Bucket <- Buckets],
#search{buckets=Buckets, runs=[]}.
check_search(?SPAM_BUCKET, Nodes) ->
SearchResults = [{"postoffice.mr.net", 194},
{"ZiaSun", 1},
{"headaches", 4},
{"YALSP", 3},
{"mister", 0},
{"prohibiting", 5}],
Results = [{Term,Count} || {Term, Count} <- SearchResults,
Node <- Nodes,
{Count2,_} <- [rpc:call(Node, search, search, [?SPAM_BUCKET, Term])],
Count2 == Count],
Expected = lists:usort(SearchResults),
Actual = lists:usort(Results),
?assertEqual(Expected, Actual),
ok.
spawn_search_tester(Search=#search{buckets=Buckets}) ->
Count = 3,
Runs = [{Bucket, search_spawn_verify(Bucket)} || Bucket <- Buckets,
_ <- lists:seq(1, Count)],
Search#search{runs=Runs}.
check_search_tester(Search=#search{runs=Runs}, Retest) ->
Failed = [Bucket || {Bucket, Run} <- Runs,
ok /= search_check_verify(Bucket, Run, [])],
[begin
lager:info("Failed search test for: ~p", [Bucket]),
maybe_retest_search(Retest, Bucket),
ok
end || Bucket <- Failed],
Search#search{runs=[]}.
maybe_retest_search(false, _) ->
ok;
maybe_retest_search(true, Bucket) ->
lager:info("Re-running until test passes to check for data loss"),
Result =
rt:wait_until(node(),
fun(_) ->
Rerun = search_spawn_verify(Bucket),
ok == search_check_verify(Bucket, Rerun, [])
end),
?assertEqual(ok, Result),
lager:info("search test finally passed"),
ok.
search_populate(Bucket) when is_binary(Bucket) ->
search_populate(binary_to_list(Bucket));
search_populate(Bucket) ->
Config = "bb-populate-" ++ Bucket ++ ".config",
lager:info("Populating search bucket: ~s", [Bucket]),
rt:cmd("$BASHO_BENCH/basho_bench " ++ Config).
search_spawn_verify(Bucket) when is_binary(Bucket) ->
search_spawn_verify(binary_to_list(Bucket));
search_spawn_verify(Bucket) when is_list(Bucket) ->
Config = "bb-verify-" ++ Bucket ++ ".config",
lager:info("Spawning search test against: ~s", [Bucket]),
rt:spawn_cmd("$BASHO_BENCH/basho_bench " ++ Config).
search_check_verify(Bucket, Port, Opts) ->
lager:info("Checking search test against: ~p", [Bucket]),
{Status,_} = rt:wait_for_cmd(Port),
Repair = ordsets:is_element(repair, Opts),
case {Repair, Status} of
%% {true, 1} ->
%% lager:info("Allowing repair: ~p", [Bucket]),
%% search_verify_repair(Bucket);
{_, 0} ->
ok;
{_, _} ->
fail
end.
%% =================================================================== %% ===================================================================
%% basho_bench K/V scripts %% basho_bench K/V scripts
%% =================================================================== %% ===================================================================
@ -182,55 +279,52 @@ generate_kv_scripts(Buckets, Host, Port) ->
ok. ok.
kv_populate_script(Bucket, Host, Port) -> kv_populate_script(Bucket, Host, Port) ->
Out = io_lib:format(" Cfg = [{mode, max},
{mode, max}. {duration, infinity},
{duration, infinity}. {concurrent, 16},
{concurrent, 16}. {driver, basho_bench_driver_http_raw},
{driver, basho_bench_driver_http_raw}. {key_generator, {partitioned_sequential_int, 0, 8000}},
{key_generator, {partitioned_sequential_int, 0, 8000}}. {value_generator, {uniform_bin,100,1000}},
{value_generator, {uniform_bin,100,1000}}. {operations, [{update, 1}]},
{operations, [{update, 1}]}. {http_raw_ips, [Host]},
{http_raw_ips, [\"~s\"]}. {http_raw_port, Port},
{http_raw_port, ~b}. {http_raw_path, "/riak/" ++ Bucket}],
{http_raw_path, \"/riak/~s\"}.", [Host, Port, Bucket]),
Config = "bb-populate-" ++ Bucket ++ ".config", Config = "bb-populate-" ++ Bucket ++ ".config",
file:write_file(Config, Out), write_terms(Config, Cfg),
ok. ok.
kv_verify_script(Bucket, Host, Port) -> kv_verify_script(Bucket, Host, Port) ->
Out = io_lib:format(" Cfg = [{mode, {rate, 50}},
{mode, {rate, 50}}. %%{duration, infinity},
%{duration, infinity}. {duration, 1},
{duration, 1}. {concurrent, 10},
{concurrent, 10}. {driver, basho_bench_driver_http_raw},
{driver, basho_bench_driver_http_raw}. {key_generator, {uniform_int, 7999}},
{key_generator, {uniform_int, 7999}}. {value_generator, {uniform_bin,100,1000}},
{value_generator, {uniform_bin,100,1000}}. {operations, [{update, 1},{get_existing, 1}]},
{operations, [{update, 1},{get_existing, 1}]}. {http_raw_ips, [Host]},
{http_raw_ips, [\"~s\"]}. {http_raw_port, Port},
{http_raw_port, ~b}. {http_raw_path, "/riak/" ++ Bucket},
{http_raw_path, \"/riak/~s\"}. {shutdown_on_error, true}],
{shutdown_on_error, true}.", [Host, Port, Bucket]),
Config = "bb-verify-" ++ Bucket ++ ".config", Config = "bb-verify-" ++ Bucket ++ ".config",
file:write_file(Config, Out), write_terms(Config, Cfg),
ok. ok.
kv_repair_script(Bucket, Host, Port) -> kv_repair_script(Bucket, Host, Port) ->
Out = io_lib:format(" Cfg = [{mode, {rate, 50}},
{mode, {rate, 50}}. {duration, infinity},
{duration, infinity}. %%{duration, 1},
%{duration, 1}. {concurrent, 10},
{concurrent, 10}. {driver, basho_bench_driver_http_raw},
{driver, basho_bench_driver_http_raw}. %%{key_generator, {uniform_int, 8000}},
%{key_generator, {uniform_int, 8000}}. {key_generator, {partitioned_sequential_int, 0, 8000}},
{key_generator, {partitioned_sequential_int, 0, 8000}}. {value_generator, {uniform_bin,100,1000}},
{value_generator, {uniform_bin,100,1000}}. {operations, [{get, 1}]},
{operations, [{get, 1}]}. {http_raw_ips, [Host]},
{http_raw_ips, [\"~s\"]}. {http_raw_port, Port},
{http_raw_port, ~b}. {http_raw_path, "/riak/" ++ Bucket}],
{http_raw_path, \"/riak/~s\"}.", [Host, Port, Bucket]),
Config = "bb-repair-" ++ Bucket ++ ".config", Config = "bb-repair-" ++ Bucket ++ ".config",
file:write_file(Config, Out), write_terms(Config, Cfg),
ok. ok.
%% =================================================================== %% ===================================================================
@ -243,47 +337,88 @@ generate_mapred_scripts(Host, Port) ->
ok. ok.
mapred_populate_script(Host, Port) -> mapred_populate_script(Host, Port) ->
Out = io_lib:format(" Cfg = [{driver, basho_bench_driver_riakc_pb},
{driver, basho_bench_driver_riakc_pb}. {riakc_pb_ips, [{Host, Port}]},
%{code_paths, [\"deps/stats\", {riakc_pb_replies, 1},
% \"deps/riakc\", {riakc_pb_bucket, <<"bryanitbs">>},
% \"deps/protobuffs\"]}. {mode, max},
{riakc_pb_ips, [{~p, ~b}]}. {duration, 10000},
{riakc_pb_replies, 1}. {concurrent, 1},
{riakc_pb_bucket, <<\"bryanitbs\">>}. {operations, [{put, 1}]},
%% load {key_generator, {int_to_str, {sequential_int, 10000}}},
{mode, max}.
{duration, 10000}.
{concurrent, 1}.
{operations, [{put, 1}]}.
{key_generator, {int_to_str, {sequential_int, 10000}}}.
{value_generator, {value_generator,
{function, basho_bench_driver_riakc_pb, mapred_ordered_valgen, []}}.", {function, basho_bench_driver_riakc_pb, mapred_ordered_valgen, []}}],
[Host, Port]),
Config = "bb-populate-mapred.config", Config = "bb-populate-mapred.config",
file:write_file(Config, Out), write_terms(Config, Cfg),
ok. ok.
mapred_verify_script(Host, Port) -> mapred_verify_script(Host, Port) ->
Out = io_lib:format(" Cfg = [{driver, basho_bench_driver_riakc_pb},
{driver, basho_bench_driver_riakc_pb}. {riakc_pb_ips, [{Host, Port}]},
%{code_paths, [\"deps/stats\", {riakc_pb_replies, 1},
% \"deps/riakc\", {riakc_pb_bucket, <<"bryanitbs">>},
% \"deps/protobuffs\"]}. {riakc_pb_preloaded_keys, 10000},
{riakc_pb_ips, [{~p, ~b}]}. {mode, max},
{riakc_pb_replies, 1}. {duration, 1},
{riakc_pb_bucket, <<\"bryanitbs\">>}. {concurrent, 1},
%% test {operations, [{mr_bucket_erlang, 1}]},
%% for computing expected bucket sum {key_generator, {int_to_str, {uniform_int, 9999}}},
{riakc_pb_preloaded_keys, 10000}. {value_generator, {fixed_bin, 1}},
{mode, max}. {riakc_pb_keylist_length, 1000},
{duration, 1}. {shutdown_on_error, true}],
{concurrent, 1}.
{operations, [{mr_bucket_erlang, 1}]}.
{key_generator, {int_to_str, {uniform_int, 9999}}}.
{value_generator, {fixed_bin, 1}}.
{riakc_pb_keylist_length, 1000}.
{shutdown_on_error, true}.", [Host, Port]),
Config = "bb-verify-mapred.config", Config = "bb-verify-mapred.config",
file:write_file(Config, Out), write_terms(Config, Cfg),
ok. ok.
%% ===================================================================
%% basho_bench Search scritps
%% ===================================================================
generate_search_scripts(Buckets, IPs, SpamDir) ->
[begin
Bucket = binary_to_list(BucketBin),
search_populate_script(Bucket, IPs, SpamDir),
search_verify_script(Bucket, IPs)
end || BucketBin <- Buckets],
ok.
search_populate_script(Bucket, IPs, SpamDir) ->
Cfg = [{mode, max},
{duration, 1},
{concurrent, 10},
{driver, basho_bench_driver_http_raw},
{file_dir, SpamDir},
{operations, [{put_file,1}]},
{http_raw_ips, IPs},
{http_raw_path, "/riak/" ++ Bucket},
{shutdown_on_error, true}],
Config = "bb-populate-" ++ Bucket ++ ".config",
write_terms(Config, Cfg).
search_verify_script(Bucket, IPs) ->
Expect = [{"postoffice.mr.net", 194},
{"ZiaSun", 1},
{"headaches", 4},
{"YALSP", 3},
{"mister", 0},
{"prohibiting", 5}],
Operations = [{{search,E},1} || E <- Expect],
Cfg = [{mode, max},
{duration, 1},
{concurrent, 10},
{driver, basho_bench_driver_http_raw},
{operations, Operations},
{http_raw_ips, IPs},
{http_solr_path, "/solr/" ++ Bucket},
{http_raw_path, "/riak/" ++ Bucket},
{shutdown_on_error, true}],
Config = "bb-verify-" ++ Bucket ++ ".config",
write_terms(Config, Cfg).
write_terms(File, Terms) ->
{ok, IO} = file:open(File, [write]),
[io:fwrite(IO, "~p.~n", [T]) || T <- Terms],
file:close(IO).

View File

@ -4,6 +4,8 @@
-import(rt, [deploy_nodes/1, -import(rt, [deploy_nodes/1,
enable_search_hook/2, enable_search_hook/2,
get_os_env/1,
get_os_env/2,
get_ring/1, get_ring/1,
join/2, join/2,
update_app_config/2]). update_app_config/2]).
@ -295,18 +297,6 @@ put_file(C, Bucket, File) ->
O = riak_object:new(Bucket, K, Val, "text/plain"), O = riak_object:new(Bucket, K, Val, "text/plain"),
?assertEqual(ok, C:put(O)). ?assertEqual(ok, C:put(O)).
get_os_env(Var) ->
case get_os_env(Var, undefined) of
undefined -> exit({os_env_var_undefined, Var});
Value -> Value
end.
get_os_env(Var, Default) ->
case os:getenv(Var) of
false -> Default;
Value -> Value
end.
load_module_on_riak(Nodes, Mod) -> load_module_on_riak(Nodes, Mod) ->
{Mod, Bin, File} = code:get_object_code(Mod), {Mod, Bin, File} = code:get_object_code(Mod),
[?assertEqual({module, Mod}, [?assertEqual({module, Mod},