Merge pull request #21 from basho/jnd-teststar_runner

A Test Runner and Oh, so much more!
This commit is contained in:
Joe DeVivo 2012-09-05 07:54:04 -07:00
commit a1e9e04795
22 changed files with 241 additions and 63 deletions

2
.gitignore vendored
View File

@ -3,3 +3,5 @@ ebin
log
riak_test
.eunit
.DS_Store
out

View File

@ -9,11 +9,12 @@
{deps, [
{lager, "1.2.0", {git, "git://github.com/basho/lager", {tag, "1.2.0"}}},
{getopt, ".*", {git, "git://github.com/jcomellas/getopt", {tag, "v0.4"}}},
{meck, ".*", {git, "git://github.com/eproxus/meck"}},
{mapred_verify, ".*", {git, "git://github.com/basho/mapred_verify"}}
]}.
{escript_incl_apps, [lager]}.
{escript_incl_apps, [lager, getopt]}.
{plugin_dir, "src"}.
{plugins, [rebar_riak_test_plugin]}.

View File

@ -86,7 +86,7 @@ riak_test_run(Config, _AppFile) ->
RiakTestConfig = rebar_config:get_global(Config, config, "rtdev"),
Test = rebar_config:get_global(Config, test, ""),
code:add_pathsz([rebar_utils:ebin_dir(), option(test_output, Config)]),
riak_test:main([RiakTestConfig, Test]),
riak_test:main(["-c", RiakTestConfig, "-t", Test]),
ok.
compilation_config(Conf) ->

View File

@ -7,9 +7,46 @@ add_deps(Path) ->
[code:add_path(lists:append([Path, "/", Dep, "/ebin"])) || Dep <- Deps],
ok.
cli_options() ->
%% Option Name, Short Code, Long Code, Argument Spec, Help Message
[
{help, $h, "help", undefined, "Print this usage page"},
{config, $c, "conf", string, "specifies the project configuration"},
{tests, $t, "tests", string, "specifies which tests to run"},
{suites, $s, "suites", string, "which suites to run"},
{dir, $d, "dir", string, "run all tests in the specified directory"},
{verbose, $v, "verbose", undefined, "verbose output"},
{outdir, $o, "outdir", string, "output directory"}
].
main(Args) ->
[Config, Test | HarnessArgs]=Args,
{ok, {ParsedArgs, HarnessArgs}} = getopt:parse(cli_options(), Args),
Verbose = proplists:is_defined(verbose, ParsedArgs),
Config = proplists:get_value(config, ParsedArgs),
SpecificTests = proplists:get_all_values(tests, ParsedArgs),
Suites = proplists:get_all_values(suites, ParsedArgs),
case Suites of
[] -> ok;
_ -> io:format("Suites are not currently supported.")
end,
Dirs = proplists:get_all_values(dir, ParsedArgs),
DirTests = lists:append([load_tests_in_dir(Dir) || Dir <- Dirs]),
%%case Dirs of
%% [] -> ok;
%% _ -> io:format("Directories are not currently supported.")
%%end,
Tests = lists:foldr(fun(X, AccIn) ->
case lists:member(X, AccIn) of
true -> AccIn;
_ -> [X | AccIn]
end
end, [], lists:sort(DirTests ++ SpecificTests)),
io:format("Tests to run: ~p~n", [Tests]),
rt:load_config(Config),
[add_deps(Dep) || Dep <- rt:config(rt_deps)],
@ -21,26 +58,68 @@ main(Args) ->
%% Start Lager
application:load(lager),
LagerLevel = rt:config(rt_lager_level, debug),
application:set_env(lager, handlers, [{lager_console_backend, LagerLevel}]),
lager:start(),
%% Fileoutput
Outdir = proplists:get_value(outdir, ParsedArgs),
ConsoleLagerLevel = case Outdir of
undefined -> rt:config(rt_lager_level, debug);
_ ->
filelib:ensure_dir(Outdir),
notice
end,
%% add handler for specific test.
gen_event:add_handler(lager_event, riak_test_lager_backend, [LagerLevel, false]),
application:set_env(lager, handlers, [{lager_console_backend, ConsoleLagerLevel}]),
lager:start(),
%% rt:set_config(rtdev_path, Path),
%% rt:set_config(rt_max_wait_time, 180000),
%% rt:set_config(rt_retry_delay, 500),
%% rt:set_config(rt_harness, rtbe),
TestResults = [ run_test(Test, Outdir, HarnessArgs) || Test <- Tests],
print_summary(TestResults, Verbose),
ok.
run_test(Test, Outdir, HarnessArgs) ->
rt:setup_harness(Test, HarnessArgs),
TestA = list_to_atom(Test),
%% st:TestFn(),
TestA:TestA(),
SingleTestResult = riak_test_runner:confirm(TestA, Outdir),
rt:cleanup_harness(),
SingleTestResult.
%% Custom Logging Voodoo
{ok, Logs} = gen_event:delete_handler(lager_event, riak_test_lager_backend, []),
io:format("Handled Log: ~n"),
[ io:put_chars(user, [Log, "\n"]) || Log <- Logs ],
print_summary(TestResults, Verbose) ->
io:format("~nTest Results:~n"),
Results = [
[ atom_to_list(proplists:get_value(test, SingleTestResult)),
proplists:get_value(status, SingleTestResult),
proplists:get_value(reason, SingleTestResult)]
|| SingleTestResult <- TestResults],
Width = test_name_width(Results),
Print = fun(Test, Status, Reason) ->
case {Status, Verbose} of
{fail, true} -> io:format("~s: ~s ~p~n", [string:left(Test, Width), Status, Reason]);
_ -> io:format("~s: ~s~n", [string:left(Test, Width), Status])
end
end,
[ Print(Test, Status, Reason) || [Test, Status, Reason] <- Results],
PassCount = length(lists:filter(fun(X) -> proplists:get_value(status, X) =:= pass end, TestResults)),
FailCount = length(lists:filter(fun(X) -> proplists:get_value(status, X) =:= fail end, TestResults)),
io:format("---------------------------------------------~n"),
io:format("~w Tests Failed~n", [FailCount]),
io:format("~w Tests Passed~n", [PassCount]),
io:format("That's ~w% for those keeping score~n", [(PassCount / (PassCount + FailCount)) * 100]),
ok.
test_name_width(Results) ->
lists:max([ length(X) || [X | _T] <- Results ]).
load_tests_in_dir(Dir) ->
case filelib:is_dir(Dir) of
true ->
code:add_path(Dir),
lists:sort([ string:substr(Filename, 1, length(Filename) - 5) || Filename <- filelib:wildcard("*.beam", Dir)]);
_ -> io:format("~s is not a dir!~n", [Dir])
end.

52
src/riak_test_runner.erl Normal file
View File

@ -0,0 +1,52 @@
-module(riak_test_runner).
%% @doc riak_test_runner runs a riak_test module's run/0 function.
-export([confirm/2]).
-spec(confirm(atom(), string()) -> [tuple()]).
%% @doc Runs a module's run/0 function after setting up a log capturing backend for lager.
%% It then cleans up that backend and returns the logs as part of the return proplist.
confirm(TestModule, Outdir) ->
start_lager_backend(TestModule, Outdir),
%% Check for api compatibility
{Status, Reason} = case proplists:get_value(confirm,
proplists:get_value(exports, TestModule:module_info()),
-1) of
0 ->
lager:notice("Running Test ~s", [TestModule]),
execute(TestModule);
_ ->
lager:info("~s is not a runable test", [TestModule]),
{not_a_runable_test, undefined}
end,
lager:notice("~s Test Run Complete", [TestModule]),
{ok, Log} = stop_lager_backend(),
RetList = [{test, TestModule}, {status, Status}, {log, Log}],
case Status of
fail -> RetList ++ [{reason, Reason}];
_ -> RetList
end.
start_lager_backend(TestModule, Outdir) ->
case Outdir of
undefined -> ok;
_ -> gen_event:add_handler(lager_event, lager_file_backend, {Outdir ++ "/" ++ atom_to_list(TestModule) ++ ".dat_test_output", debug, 10485760, "$D0", 1})
end,
gen_event:add_handler(lager_event, riak_test_lager_backend, [debug, false]).
stop_lager_backend() ->
gen_event:delete_handler(lager_event, lager_file_backend, []),
gen_event:delete_handler(lager_event, riak_test_lager_backend, []).
execute(TestModule) ->
try TestModule:confirm() of
ReturnVal -> {ReturnVal, undefined}
catch
error:Error ->
lager:warning("~s failed: ~p", [TestModule, Error]),
{fail, Error}
end.

View File

@ -29,9 +29,6 @@ run_git(Path, Cmd) ->
os:cmd(gitcmd(Path, Cmd)).
run_riak(N, Path, Cmd) ->
%% io:format("~p~n", [riakcmd(Path, N, Cmd)]),
%%?debugFmt("RR: ~p~n", [[N,Path,Cmd]]),
%%?debugFmt("~p~n", [os:cmd(riakcmd(Path, N, Cmd))]).
lager:info("Running: ~s", [riakcmd(Path, N, Cmd)]),
os:cmd(riakcmd(Path, N, Cmd)).
@ -111,9 +108,16 @@ deploy_nodes(NodeConfig) ->
NodeMap = orddict:from_list(lists:zip(Nodes, NodesN)),
{Versions, Configs} = lists:unzip(NodeConfig),
VersionMap = lists:zip(NodesN, Versions),
%% Check that you have the right versions available
[ check_node(Version) || Version <- VersionMap ],
rt:set_config(rt_nodes, NodeMap),
rt:set_config(rt_versions, VersionMap),
%% Stop all discoverable nodes, not just nodes we'll be using for this test.
RTDevPaths = [ DevPath || {_Name, DevPath} <- proplists:delete(root, rt:config(rtdev_path))],
rt:pmap(fun(X) -> stop_all(X ++ "/dev") end, RTDevPaths),
%% Stop nodes if already running
%% [run_riak(N, relpath(node_version(N)), "stop") || N <- Nodes],
rt:pmap(fun(Node) ->
@ -123,6 +127,7 @@ deploy_nodes(NodeConfig) ->
end, Nodes),
%% ?debugFmt("Shutdown~n", []),
%% Reset nodes to base state
lager:info("Resetting nodes to fresh state"),
%% run_git(Path, "status"),
@ -158,6 +163,25 @@ deploy_nodes(NodeConfig) ->
lager:info("Deployed nodes: ~p", [Nodes]),
Nodes.
stop_all(DevPath) ->
case filelib:is_dir(DevPath) of
true ->
Devs = filelib:wildcard(DevPath ++ "/dev*"),
%% Works, but I'd like it to brag a little more about it.
Stop = fun(C) ->
Cmd = C ++ "/bin/riak stop",
[Output | _Tail] = string:tokens(os:cmd(Cmd), "\n"),
Status = case Output of
"ok" -> "ok";
_ -> "wasn't running"
end,
lager:debug("Stopping Node... ~s ~~ ~s.", [Cmd, Status])
end,
rt:pmap(Stop, Devs);
_ -> lager:debug("~s is not a directory.", [DevPath])
end,
ok.
stop(Node) ->
N = node_id(Node),
run_riak(N, relpath(node_version(N)), "stop"),
@ -174,7 +198,7 @@ admin(Node, Args) ->
Cmd = riak_admin_cmd(Path, N, Args),
lager:debug("Running: ~s", [Cmd]),
Result = os:cmd(Cmd),
io:format("~s", [Result]),
lager:debug("~s", [Result]),
ok.
node_id(Node) ->
@ -219,3 +243,11 @@ get_cmd_result(Port, Acc) ->
after 0 ->
timeout
end.
check_node({_N, Version}) ->
case proplists:is_defined(Version, rt:config(rtdev_path)) of
true -> ok;
_ ->
lager:error("You don't have Riak ~s installed", [Version]),
erlang:error("You don't have Riak " ++ Version ++ " installed" )
end.

View File

@ -1,10 +1,10 @@
%% Automated test for issue riak_core#154
%% Hinted handoff does not occur after a node has been restarted in Riak 1.1
-module(gh_riak_core_154).
-export([gh_riak_core_154/0]).
-export([confirm/0]).
-include_lib("eunit/include/eunit.hrl").
gh_riak_core_154() ->
confirm() ->
%% Increase handoff concurrency on nodes
NewConfig = [{riak_core, [{handoff_concurrency, 1024}]}],
Nodes = rt:build_cluster(2, NewConfig),
@ -30,4 +30,4 @@ gh_riak_core_154() ->
?assertEqual([], rt:systest_read(Node2, 1000, 3)),
lager:info("gh_riak_core_154: passed"),
ok.
pass.

View File

@ -2,7 +2,7 @@
-compile(export_all).
-include_lib("eunit/include/eunit.hrl").
gh_riak_core_155() ->
confirm() ->
[Node] = rt:build_cluster(1),
%% Generate a valid preflist for our get requests
@ -37,7 +37,7 @@ gh_riak_core_155() ->
end || _ <- lists:seq(1,10)],
lager:info("Test passed"),
ok.
pass.
load_code(Module, Nodes) ->
{Module, Bin, File} = code:get_object_code(Module),

View File

@ -1,8 +1,8 @@
-module(gh_riak_core_176).
-export([gh_riak_core_176/0]).
-export([confirm/0]).
-include_lib("eunit/include/eunit.hrl").
gh_riak_core_176() ->
confirm() ->
Nodes = rt:deploy_nodes(3),
[Node1, Node2, Node3] = Nodes,
Nodes12 = [Node1, Node2],
@ -52,7 +52,7 @@ gh_riak_core_176() ->
[?assertEqual(Nodes123, rt:owners_according_to(Node)) || Node <- Nodes123],
lager:info("Test gh_riak_core_176 passed"),
ok.
pass.
ip_tuple_to_string(T) ->
L = tuple_to_list(T),

View File

@ -3,13 +3,22 @@
-include_lib("eunit/include/eunit.hrl").
-define(SPAM_BUCKET, <<"scotts_spam">>).
loaded_upgrade() ->
%% @doc This test requires additional setup, here's how to do it.
%% 1. Clone and build basho_bench
%% 2. Set an environment variable "BASHO_BENCH" to the path you cloned to.
%% 3. Get this file: https://github.com/basho/basho_expect/tree/master/search-corpus/spam.0-small.tar.gz
%% 4. Unzip it somewhere.
%% 5. Set an environment variable "SPAM_DIR" to the path you unzipped, including the "spam.0" dir
%% @todo basho_bench creates a lot of files. I wish it didn't.
confirm() ->
_ = rt:get_os_env("BASHO_BENCH"),
%% OldVsns = ["1.0.3", "1.1.4"],
OldVsns = ["1.1.4"],
[verify_upgrade(OldVsn) || OldVsn <- OldVsns],
lager:info("Test ~p passed", [?MODULE]),
ok.
pass.
verify_upgrade(OldVsn) ->
Config = [{riak_search, [{enabled, true}]}],

View File

@ -3,11 +3,11 @@
-module(mapred_verify_rt).
-export([mapred_verify_rt/0]).
-export([confirm/0]).
-define(NODE_COUNT, 3).
mapred_verify_rt() ->
confirm() ->
lager:info("Build ~b node cluster", [?NODE_COUNT]),
Nodes = rt:build_cluster(?NODE_COUNT),
@ -23,4 +23,4 @@ mapred_verify_rt() ->
lager:info("Run mapred_verify"),
0 = mapred_verify:do_verification(MRVProps),
lager:info("~s: PASS", [atom_to_list(?MODULE)]),
ok.
pass.

View File

@ -14,7 +14,7 @@
%% @doc This test verifies that partition repair successfully repairs
%% all data after it has wiped out by a simulated disk crash.
partition_repair() ->
confirm() ->
SpamDir = get_os_env("SPAM_DIR"),
RingSize = list_to_integer(get_os_env("RING_SIZE", "16")),
NVal = get_os_env("N_VAL", undefined),
@ -88,7 +88,8 @@ partition_repair() ->
lager:info("Emulate data loss for riak_kv, repair, verify correct data"),
[kill_repair_verify(Owner, KVDataDir, riak_kv) || Owner <- Owners],
lager:info("TEST PASSED").
lager:info("TEST PASSED"),
pass.
kill_repair_verify({Partition, Node}, DataSuffix, Service) ->
StashPath = stash_path(Service, Partition),
@ -253,6 +254,8 @@ stash_kv(Key, Value, Stash) ->
stash_search({_I,{_F,_T}}=K, _Postings=V, Stash) ->
dict:append_list(K, V, Stash).
%% @todo broken when run in the style of rtdev_mixed.
stash_path(Service, Partition) ->
Path = rt:config(rtdev_path) ++ "/dev/data_stash",
Path ++ "/" ++ atom_to_list(Service) ++ "/" ++ integer_to_list(Partition) ++ ".stash".

View File

@ -1,8 +1,8 @@
-module(rolling_capabilities).
-export([rolling_capabilities/0]).
-export([confirm/0]).
-include_lib("eunit/include/eunit.hrl").
rolling_capabilities() ->
confirm() ->
Count = 4,
OldVsn = "1.1.4",
%% Assuming default 1.1.4 app.config settings, the only difference
@ -35,7 +35,7 @@ rolling_capabilities() ->
lager:info("Verifying final/upgraded capabilities"),
check_capabilities(Nodes, ExpectedNew),
lager:info("Test ~p passed", [?MODULE]),
ok.
pass.
check_capabilities(Nodes, Expected) ->
[?assertEqual(ok, rt:wait_until_capability(Node, {App, Cap}, Val))

View File

@ -1,9 +1,9 @@
-module(rt_basic_test).
-export([rt_basic_test/0]).
-export([confirm/0]).
rt_basic_test() ->
confirm() ->
lager:info("Deploy some nodes"),
Nodes = rt:deploy_nodes(2),
lager:info("Stop the nodes"),
[rt:stop(Node) || Node <- Nodes],
ok.
pass.

View File

@ -1,8 +1,8 @@
-module(upgrade).
-export([upgrade/0]).
-export([confirm/0]).
-include_lib("eunit/include/eunit.hrl").
upgrade() ->
confirm() ->
Nodes = rt:build_cluster(["1.0.3", "1.0.3", "1.1.4", current]),
[Node1, Node2, Node3, _Node4] = Nodes,
@ -16,7 +16,7 @@ upgrade() ->
rt:systest_read(Node1, 100, 1),
%% ?assertEqual([], rt:systest_read(Node1, 100, 1)),
wait_until_readable(Node1, 100),
ok.
pass.
wait_until_readable(Node, N) ->
rt:wait_until(Node,

View File

@ -1,13 +1,13 @@
-module(verify_basic_upgrade).
-export([verify_basic_upgrade/0]).
-export([confirm/0]).
-include_lib("eunit/include/eunit.hrl").
verify_basic_upgrade() ->
confirm() ->
OldVsns = ["1.0.3", "1.1.4"],
[build_cluster(OldVsn, current) || OldVsn <- OldVsns],
[build_cluster(current, OldVsn) || OldVsn <- OldVsns],
lager:info("Test ~p passed", [?MODULE]),
ok.
pass.
build_cluster(Vsn1, Vsn2) ->
lager:info("Testing versions: ~p <- ~p", [Vsn1, Vsn2]),

View File

@ -8,7 +8,7 @@
wait_until_nodes_ready/1,
wait_until_no_pending_changes/1]).
verify_build_cluster() ->
confirm() ->
%% Deploy a set of new nodes
lager:info("Deploying 3 nodes"),
Nodes = rt:deploy_nodes(3),
@ -30,4 +30,4 @@ verify_build_cluster() ->
lager:info("Ensure each node owns a portion of the ring"),
[?assertEqual(Nodes, owners_according_to(Node)) || Node <- Nodes],
lager:info("verify_build_cluster: PASS"),
ok.
pass.

View File

@ -1,8 +1,8 @@
-module(verify_capabilities).
-export([verify_capabilities/0]).
-export([confirm/0]).
-include_lib("eunit/include/eunit.hrl").
verify_capabilities() ->
confirm() ->
lager:info("Deploying mixed set of nodes"),
Nodes = rt:deploy_nodes([current, "0.14.2", "1.1.4", "1.0.3"]),
[Node1, Node2, Node3, Node4] = Nodes,
@ -97,7 +97,7 @@ verify_capabilities() ->
?assertEqual(legacy, rt:capability(Node1, {riak_core, vnode_routing})),
[rt:stop(Node) || Node <- Nodes],
ok.
pass.
crash_capability_server(Node) ->
Pid = rpc:call(Node, erlang, whereis, [riak_core_capability]),

View File

@ -12,7 +12,7 @@
status_of_according_to/2,
wait_until_nodes_ready/1]).
verify_claimant() ->
confirm() ->
Nodes = build_cluster(3),
[Node1, Node2, _Node3] = Nodes,
@ -53,4 +53,4 @@ verify_claimant() ->
%% Ensure all nodes still believe node2 is the claimant
lager:info("Ensure all nodes still believe ~p is the claimant", [Node2]),
[?assertEqual(Node2, claimant_according_to(Node)) || Node <- Nodes],
ok.
pass.

View File

@ -1,8 +1,8 @@
-module(verify_down).
-export([verify_down/0]).
-export([confirm/0]).
-include_lib("eunit/include/eunit.hrl").
verify_down() ->
confirm() ->
Nodes = rt:deploy_nodes(3),
[Node1, Node2, Node3] = Nodes,
@ -45,4 +45,4 @@ verify_down() ->
%% Verify that all three nodes are ready
lager:info("Ensure all nodes are ready"),
?assertEqual(ok, rt:wait_until_nodes_ready(Nodes)),
ok.
pass.

View File

@ -1,5 +1,5 @@
-module(verify_leave).
-export([verify_leave/0]).
-export([confirm/0]).
-include_lib("eunit/include/eunit.hrl").
-import(rt, [build_cluster/1,
@ -9,7 +9,7 @@
status_of_according_to/2,
remove/2]).
verify_leave() ->
confirm() ->
%% Bring up a 3-node cluster for the test
Nodes = build_cluster(3),
[Node1, Node2, Node3] = Nodes,
@ -37,4 +37,4 @@ verify_leave() ->
Remaining2 = Remaining1 -- [Node3],
[?assertEqual(Remaining2, owners_according_to(Node)) || Node <- Remaining2],
[?assertEqual(invalid, status_of_according_to(Node3, Node)) || Node <- Remaining2],
ok.
pass.

View File

@ -1,9 +1,9 @@
-module(verify_staged_clustering).
-export([verify_staged_clustering/0]).
-export([confirm/0]).
-compile(export_all).
-include_lib("eunit/include/eunit.hrl").
verify_staged_clustering() ->
confirm() ->
Nodes = rt:deploy_nodes(4),
[Node1, Node2, Node3, Node4] = Nodes,
Nodes123 = [Node1, Node2, Node3],
@ -90,7 +90,7 @@ verify_staged_clustering() ->
?assertEqual(ok, rt:wait_until_all_members([Node1])),
lager:info("Test verify_staged_clustering: Passed"),
ok.
pass.
n(Atom) ->
atom_to_list(Atom).