mirror of
https://github.com/valitydev/riak_test.git
synced 2024-11-06 08:35:22 +00:00
Merge pull request #268 from basho/jd-cleanup
Some basic refactoring of riak_test config and local functions
This commit is contained in:
commit
d842574cad
@ -18,7 +18,7 @@
|
||||
]}.
|
||||
|
||||
{escript_incl_apps, [lager, getopt, riakhttpc, riakc, ibrowse, mochiweb, kvc]}.
|
||||
{escript_emu_args, "%%! +K true +P 10000 -env ERL_MAX_PORTS 10000\n"}.
|
||||
{escript_emu_args, "%%! -escript main riak_test_escript +K true +P 10000 -env ERL_MAX_PORTS 10000\n"}.
|
||||
{plugin_dir, "src"}.
|
||||
{plugins, [rebar_riak_test_plugin]}.
|
||||
{riak_test, [
|
||||
|
@ -1,6 +1,6 @@
|
||||
%% -------------------------------------------------------------------
|
||||
%%
|
||||
%% Copyright (c) 2012 Basho Technologies, Inc.
|
||||
%% Copyright (c) 2013 Basho Technologies, Inc.
|
||||
%%
|
||||
%% This file is provided to you under the Apache License,
|
||||
%% Version 2.0 (the "License"); you may not use this file
|
||||
@ -51,8 +51,8 @@ get_schema(Platform) ->
|
||||
get_schema(Platform, 3).
|
||||
|
||||
get_schema(Platform, Retries) ->
|
||||
Host = rt:config(giddyup_host),
|
||||
Project = rt:config(rt_project),
|
||||
Host = rt_config:get(giddyup_host),
|
||||
Project = rt_config:get(rt_project),
|
||||
Version = rt:get_version(),
|
||||
URL = lists:flatten(io_lib:format("http://~s/projects/~s?platform=~s&version=~s", [Host, Project, Platform, Version])),
|
||||
lager:info("giddyup url: ~s", [URL]),
|
||||
@ -72,7 +72,7 @@ get_schema(Platform, Retries) ->
|
||||
|
||||
-spec post_result([{atom(), term()}]) -> atom().
|
||||
post_result(TestResult) ->
|
||||
Host = rt:config(giddyup_host),
|
||||
Host = rt_config:get(giddyup_host),
|
||||
URL = "http://" ++ Host ++ "/test_results",
|
||||
lager:info("giddyup url: ~s", [URL]),
|
||||
check_ibrowse(),
|
||||
@ -104,7 +104,7 @@ post_result(TestResult) ->
|
||||
end.
|
||||
|
||||
basic_auth() ->
|
||||
{basic_auth, {rt:config(giddyup_user), rt:config(giddyup_password)}}.
|
||||
{basic_auth, {rt_config:get(giddyup_user), rt_config:get(giddyup_password)}}.
|
||||
|
||||
check_ibrowse() ->
|
||||
try sys:get_status(ibrowse) of
|
||||
@ -115,4 +115,4 @@ check_ibrowse() ->
|
||||
lager:error("Restarting ibrowse"),
|
||||
application:stop(ibrowse),
|
||||
application:start(ibrowse)
|
||||
end.
|
||||
end.
|
@ -1,6 +1,6 @@
|
||||
%% -------------------------------------------------------------------
|
||||
%%
|
||||
%% Copyright (c) 2012 Basho Technologies, Inc.
|
||||
%% Copyright (c) 2013 Basho Technologies, Inc.
|
||||
%%
|
||||
%% This file is provided to you under the Apache License,
|
||||
%% Version 2.0 (the "License"); you may not use this file
|
||||
@ -18,336 +18,7 @@
|
||||
%%
|
||||
%% -------------------------------------------------------------------
|
||||
|
||||
%% @private
|
||||
-module(riak_test).
|
||||
-export([main/1]).
|
||||
|
||||
%% Define the riak_test behavior
|
||||
-callback confirm() -> pass | fail.
|
||||
|
||||
add_deps(Path) ->
|
||||
{ok, Deps} = file:list_dir(Path),
|
||||
[code:add_path(lists:append([Path, "/", Dep, "/ebin"])) || Dep <- Deps],
|
||||
ok.
|
||||
|
||||
cli_options() ->
|
||||
%% Option Name, Short Code, Long Code, Argument Spec, Help Message
|
||||
[
|
||||
{help, $h, "help", undefined, "Print this usage page"},
|
||||
{config, $c, "conf", string, "specifies the project configuration"},
|
||||
{tests, $t, "tests", string, "specifies which tests to run"},
|
||||
{suites, $s, "suites", string, "which suites to run"},
|
||||
{dir, $d, "dir", string, "run all tests in the specified directory"},
|
||||
{verbose, $v, "verbose", undefined, "verbose output"},
|
||||
{outdir, $o, "outdir", string, "output directory"},
|
||||
{backend, $b, "backend", atom, "backend to test [memory | bitcask | eleveldb]"},
|
||||
{upgrade_version, $u, "upgrade", atom, "which version to upgrade from [ previous | legacy ]"},
|
||||
{report, $r, "report", string, "you're reporting an official test run, provide platform info (e.g. ubuntu-1204-64)\nUse 'config' if you want to pull from ~/.riak_test.config"}
|
||||
].
|
||||
|
||||
print_help() ->
|
||||
getopt:usage(cli_options(),
|
||||
escript:script_name()),
|
||||
halt(0).
|
||||
|
||||
run_help([]) -> true;
|
||||
run_help(ParsedArgs) ->
|
||||
lists:member(help, ParsedArgs).
|
||||
|
||||
main(Args) ->
|
||||
case filelib:is_dir("./ebin") of
|
||||
true ->
|
||||
code:add_patha("./ebin");
|
||||
_ ->
|
||||
meh
|
||||
end,
|
||||
|
||||
register(riak_test, self()),
|
||||
{ParsedArgs, HarnessArgs} = case getopt:parse(cli_options(), Args) of
|
||||
{ok, {P, H}} -> {P, H};
|
||||
_ -> print_help()
|
||||
end,
|
||||
|
||||
case run_help(ParsedArgs) of
|
||||
true -> print_help();
|
||||
_ -> ok
|
||||
end,
|
||||
|
||||
%% ibrowse
|
||||
application:load(ibrowse),
|
||||
application:start(ibrowse),
|
||||
%% Start Lager
|
||||
application:load(lager),
|
||||
Config = proplists:get_value(config, ParsedArgs),
|
||||
|
||||
%% Loads application defaults
|
||||
application:load(riak_test),
|
||||
|
||||
%% Loads from ~/.riak_test.config
|
||||
rt:load_config(Config),
|
||||
|
||||
%% Ensure existance of scratch_dir
|
||||
case file:make_dir(rt:config(rt_scratch_dir)) of
|
||||
ok -> great;
|
||||
{eexist, _} -> great;
|
||||
{ErrorType, ErrorReason} -> lager:error("Could not create scratch dir, {~p, ~p}", [ErrorType, ErrorReason])
|
||||
end,
|
||||
|
||||
%% Fileoutput
|
||||
Outdir = proplists:get_value(outdir, ParsedArgs),
|
||||
ConsoleLagerLevel = case Outdir of
|
||||
undefined -> rt:config(lager_level, info);
|
||||
_ ->
|
||||
filelib:ensure_dir(Outdir),
|
||||
notice
|
||||
end,
|
||||
|
||||
application:set_env(lager, handlers, [{lager_console_backend, ConsoleLagerLevel}]),
|
||||
lager:start(),
|
||||
|
||||
%% Report
|
||||
Report = case proplists:get_value(report, ParsedArgs, undefined) of
|
||||
undefined -> undefined;
|
||||
"config" -> rt:config(platform, undefined);
|
||||
R -> R
|
||||
end,
|
||||
|
||||
Verbose = proplists:is_defined(verbose, ParsedArgs),
|
||||
|
||||
Suites = proplists:get_all_values(suites, ParsedArgs),
|
||||
case Suites of
|
||||
[] -> ok;
|
||||
_ -> io:format("Suites are not currently supported.")
|
||||
end,
|
||||
|
||||
CommandLineTests = parse_command_line_tests(ParsedArgs),
|
||||
Tests = which_tests_to_run(Report, CommandLineTests),
|
||||
|
||||
case Tests of
|
||||
[] ->
|
||||
lager:warning("No tests are scheduled to run"),
|
||||
init:stop(1);
|
||||
_ -> keep_on_keepin_on
|
||||
end,
|
||||
|
||||
io:format("Tests to run: ~p~n", [Tests]),
|
||||
%% Two hard-coded deps...
|
||||
add_deps(rt:get_deps()),
|
||||
add_deps("deps"),
|
||||
|
||||
[add_deps(Dep) || Dep <- rt:config(rt_deps, [])],
|
||||
ENode = rt:config(rt_nodename, 'riak_test@127.0.0.1'),
|
||||
Cookie = rt:config(rt_cookie, riak),
|
||||
[] = os:cmd("epmd -daemon"),
|
||||
net_kernel:start([ENode]),
|
||||
erlang:set_cookie(node(), Cookie),
|
||||
|
||||
TestResults = lists:filter(fun results_filter/1, [ run_test(Test, Outdir, TestMetaData, Report, HarnessArgs, length(Tests)) || {Test, TestMetaData} <- Tests]),
|
||||
print_summary(TestResults, Verbose),
|
||||
|
||||
case {length(TestResults), proplists:get_value(status, hd(TestResults))} of
|
||||
{1, fail} ->
|
||||
so_kill_riak_maybe();
|
||||
_ ->
|
||||
lager:info("Multiple tests run or no failure"),
|
||||
rt:teardown()
|
||||
end,
|
||||
ok.
|
||||
|
||||
parse_command_line_tests(ParsedArgs) ->
|
||||
Backends = case proplists:get_all_values(backend, ParsedArgs) of
|
||||
[] -> [undefined];
|
||||
Other -> Other
|
||||
end,
|
||||
Upgrades = case proplists:get_all_values(upgrade_version, ParsedArgs) of
|
||||
[] -> [undefined];
|
||||
UpgradeList -> UpgradeList
|
||||
end,
|
||||
%% Parse Command Line Tests
|
||||
{CodePaths, SpecificTests} =
|
||||
lists:foldl(fun extract_test_names/2,
|
||||
{[], []},
|
||||
proplists:get_all_values(tests, ParsedArgs)),
|
||||
[code:add_patha(CodePath) || CodePath <- CodePaths,
|
||||
CodePath /= "."],
|
||||
Dirs = proplists:get_all_values(dir, ParsedArgs),
|
||||
DirTests = lists:append([load_tests_in_dir(Dir) || Dir <- Dirs]),
|
||||
lists:foldl(fun(Test, Tests) ->
|
||||
[{
|
||||
list_to_atom(Test),
|
||||
[
|
||||
{id, -1},
|
||||
{platform, <<"local">>},
|
||||
{version, rt:get_version()},
|
||||
{project, list_to_binary(rt:config(rt_project, "undefined"))}
|
||||
] ++
|
||||
[ {backend, Backend} || Backend =/= undefined ] ++
|
||||
[ {upgrade_version, Upgrade} || Upgrade =/= undefined ]}
|
||||
|| Backend <- Backends,
|
||||
Upgrade <- Upgrades ] ++ Tests
|
||||
end, [], lists:usort(DirTests ++ SpecificTests)).
|
||||
|
||||
extract_test_names(Test, {CodePaths, TestNames}) ->
|
||||
{[filename:dirname(Test) | CodePaths],
|
||||
[filename:rootname(filename:basename(Test)) | TestNames]}.
|
||||
|
||||
which_tests_to_run(undefined, CommandLineTests) ->
|
||||
{Tests, NonTests} =
|
||||
lists:partition(fun is_runnable_test/1, CommandLineTests),
|
||||
lager:info("These modules are not runnable tests: ~p",
|
||||
[[NTMod || {NTMod, _} <- NonTests]]),
|
||||
Tests;
|
||||
which_tests_to_run(Platform, []) -> giddyup:get_suite(Platform);
|
||||
which_tests_to_run(Platform, CommandLineTests) ->
|
||||
Suite = filter_zip_suite(Platform, CommandLineTests),
|
||||
{Tests, NonTests} =
|
||||
lists:partition(fun is_runnable_test/1,
|
||||
lists:foldr(fun filter_merge_tests/2, [], Suite)),
|
||||
|
||||
lager:info("These modules are not runnable tests: ~p",
|
||||
[[NTMod || {NTMod, _} <- NonTests]]),
|
||||
Tests.
|
||||
|
||||
filter_zip_suite(Platform, CommandLineTests) ->
|
||||
[ {SModule, SMeta, CMeta} || {SModule, SMeta} <- giddyup:get_suite(Platform),
|
||||
{CModule, CMeta} <- CommandLineTests,
|
||||
SModule =:= CModule].
|
||||
|
||||
filter_merge_tests({Module, SMeta, CMeta}, Tests) ->
|
||||
case filter_merge_meta(SMeta, CMeta, [backend, upgrade_version]) of
|
||||
false ->
|
||||
Tests;
|
||||
Meta ->
|
||||
[{Module, Meta}|Tests]
|
||||
end.
|
||||
|
||||
filter_merge_meta(SMeta, _CMeta, []) ->
|
||||
SMeta;
|
||||
filter_merge_meta(SMeta, CMeta, [Field|Rest]) ->
|
||||
case {kvc:value(Field, SMeta, undefined), kvc:value(Field, CMeta, undefined)} of
|
||||
{X, X} ->
|
||||
filter_merge_meta(SMeta, CMeta, Rest);
|
||||
{_, undefined} ->
|
||||
filter_merge_meta(SMeta, CMeta, Rest);
|
||||
{undefined, X} ->
|
||||
filter_merge_meta(lists:keystore(Field, 1, SMeta, {Field, X}), CMeta, Rest);
|
||||
_ ->
|
||||
false
|
||||
end.
|
||||
|
||||
%% Check for api compatibility
|
||||
is_runnable_test({TestModule, _}) ->
|
||||
code:ensure_loaded(TestModule),
|
||||
erlang:function_exported(TestModule, confirm, 0).
|
||||
|
||||
run_test(Test, Outdir, TestMetaData, Report, _HarnessArgs, NumTests) ->
|
||||
SingleTestResult = riak_test_runner:confirm(Test, Outdir, TestMetaData),
|
||||
case NumTests of
|
||||
1 -> keep_them_up;
|
||||
_ -> rt:teardown()
|
||||
end,
|
||||
case Report of
|
||||
undefined -> ok;
|
||||
_ ->
|
||||
%% Old Code for concatinating log files for upload to giddyup
|
||||
%% They're too big now, causing problems which will be solved by
|
||||
%% GiddyUp's new Artifact feature, comming soon from a Cribbs near you.
|
||||
|
||||
%% The point is, this is here in case we need to turn this back on
|
||||
%% before artifacts are ready. And to remind jd that this is the place
|
||||
%% to write the artifact client
|
||||
|
||||
%% {log, TestLog} = lists:keyfind(log, 1, SingleTestResult),
|
||||
%% NodeLogs = cat_node_logs(),
|
||||
%% EncodedNodeLogs = unicode:characters_to_binary(iolist_to_binary(NodeLogs),
|
||||
%% latin1, utf8),
|
||||
%% NewLogs = iolist_to_binary([TestLog, EncodedNodeLogs]),
|
||||
%% ResultWithNodeLogs = lists:keyreplace(log, 1, SingleTestResult,
|
||||
%% {log, NewLogs}),
|
||||
%% giddyup:post_result(ResultWithNodeLogs)
|
||||
giddyup:post_result(SingleTestResult)
|
||||
end,
|
||||
SingleTestResult.
|
||||
|
||||
print_summary(TestResults, Verbose) ->
|
||||
io:format("~nTest Results:~n"),
|
||||
|
||||
Results = [
|
||||
[ atom_to_list(proplists:get_value(test, SingleTestResult)) ++ "-" ++
|
||||
backend_list(proplists:get_value(backend, SingleTestResult)),
|
||||
proplists:get_value(status, SingleTestResult),
|
||||
proplists:get_value(reason, SingleTestResult)]
|
||||
|| SingleTestResult <- TestResults],
|
||||
Width = test_name_width(Results),
|
||||
|
||||
Print = fun(Test, Status, Reason) ->
|
||||
case {Status, Verbose} of
|
||||
{fail, true} -> io:format("~s: ~s ~p~n", [string:left(Test, Width), Status, Reason]);
|
||||
_ -> io:format("~s: ~s~n", [string:left(Test, Width), Status])
|
||||
end
|
||||
end,
|
||||
[ Print(Test, Status, Reason) || [Test, Status, Reason] <- Results],
|
||||
|
||||
PassCount = length(lists:filter(fun(X) -> proplists:get_value(status, X) =:= pass end, TestResults)),
|
||||
FailCount = length(lists:filter(fun(X) -> proplists:get_value(status, X) =:= fail end, TestResults)),
|
||||
io:format("---------------------------------------------~n"),
|
||||
io:format("~w Tests Failed~n", [FailCount]),
|
||||
io:format("~w Tests Passed~n", [PassCount]),
|
||||
Percentage = case PassCount == 0 andalso FailCount == 0 of
|
||||
true -> 0;
|
||||
false -> (PassCount / (PassCount + FailCount)) * 100
|
||||
end,
|
||||
io:format("That's ~w% for those keeping score~n", [Percentage]),
|
||||
ok.
|
||||
|
||||
test_name_width(Results) ->
|
||||
lists:max([ length(X) || [X | _T] <- Results ]).
|
||||
|
||||
backend_list(Backend) when is_atom(Backend) ->
|
||||
atom_to_list(Backend);
|
||||
backend_list(Backends) when is_list(Backends) ->
|
||||
FoldFun = fun(X, []) ->
|
||||
atom_to_list(X);
|
||||
(X, Acc) ->
|
||||
Acc ++ "," ++ atom_to_list(X)
|
||||
end,
|
||||
lists:foldl(FoldFun, [], Backends).
|
||||
|
||||
results_filter(Result) ->
|
||||
case proplists:get_value(status, Result) of
|
||||
not_a_runnable_test ->
|
||||
false;
|
||||
_ ->
|
||||
true
|
||||
end.
|
||||
|
||||
load_tests_in_dir(Dir) ->
|
||||
case filelib:is_dir(Dir) of
|
||||
true ->
|
||||
code:add_path(Dir),
|
||||
lists:sort([ string:substr(Filename, 1, length(Filename) - 5) || Filename <- filelib:wildcard("*.beam", Dir)]);
|
||||
_ -> io:format("~s is not a dir!~n", [Dir])
|
||||
end.
|
||||
|
||||
so_kill_riak_maybe() ->
|
||||
io:format("~n~nSo, we find ourselves in a tricky situation here. ~n"),
|
||||
io:format("You've run a single test, and it has failed.~n"),
|
||||
io:format("Would you like to leave Riak running in order to debug?~n"),
|
||||
Input = io:get_chars("[Y/n] ", 1),
|
||||
case Input of
|
||||
"n" -> rt:teardown();
|
||||
"N" -> rt:teardown();
|
||||
_ ->
|
||||
io:format("Leaving Riak Up... "),
|
||||
rt:whats_up()
|
||||
end.
|
||||
|
||||
%% cat_node_logs() ->
|
||||
%% Files = rt:get_node_logs(),
|
||||
%% Output = io_lib:format("================ Printing node logs and crash dumps ================~n~n", []),
|
||||
%% cat_node_logs(Files, [Output]).
|
||||
%%
|
||||
%% cat_node_logs([], Output) -> Output;
|
||||
%% cat_node_logs([{Filename, Content}|Rest], Output) ->
|
||||
%% Log = io_lib:format("================ Log: ~s =====================~n~s~n~n", [Filename, Content]),
|
||||
%% cat_node_logs(Rest, [Output, Log]).
|
||||
-callback confirm() -> pass | fail.
|
350
src/riak_test_escript.erl
Normal file
350
src/riak_test_escript.erl
Normal file
@ -0,0 +1,350 @@
|
||||
%% -------------------------------------------------------------------
|
||||
%%
|
||||
%% Copyright (c) 2013 Basho Technologies, Inc.
|
||||
%%
|
||||
%% This file is provided to you under the Apache License,
|
||||
%% Version 2.0 (the "License"); you may not use this file
|
||||
%% except in compliance with the License. You may obtain
|
||||
%% a copy of the License at
|
||||
%%
|
||||
%% http://www.apache.org/licenses/LICENSE-2.0
|
||||
%%
|
||||
%% Unless required by applicable law or agreed to in writing,
|
||||
%% software distributed under the License is distributed on an
|
||||
%% "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
%% KIND, either express or implied. See the License for the
|
||||
%% specific language governing permissions and limitations
|
||||
%% under the License.
|
||||
%%
|
||||
%% -------------------------------------------------------------------
|
||||
|
||||
%% @private
|
||||
-module(riak_test_escript).
|
||||
-export([main/1]).
|
||||
|
||||
add_deps(Path) ->
|
||||
{ok, Deps} = file:list_dir(Path),
|
||||
[code:add_path(lists:append([Path, "/", Dep, "/ebin"])) || Dep <- Deps],
|
||||
ok.
|
||||
|
||||
cli_options() ->
|
||||
%% Option Name, Short Code, Long Code, Argument Spec, Help Message
|
||||
[
|
||||
{help, $h, "help", undefined, "Print this usage page"},
|
||||
{config, $c, "conf", string, "specifies the project configuration"},
|
||||
{tests, $t, "tests", string, "specifies which tests to run"},
|
||||
{suites, $s, "suites", string, "which suites to run"},
|
||||
{dir, $d, "dir", string, "run all tests in the specified directory"},
|
||||
{verbose, $v, "verbose", undefined, "verbose output"},
|
||||
{outdir, $o, "outdir", string, "output directory"},
|
||||
{backend, $b, "backend", atom, "backend to test [memory | bitcask | eleveldb]"},
|
||||
{upgrade_version, $u, "upgrade", atom, "which version to upgrade from [ previous | legacy ]"},
|
||||
{report, $r, "report", string, "you're reporting an official test run, provide platform info (e.g. ubuntu-1204-64)\nUse 'config' if you want to pull from ~/.riak_test.config"}
|
||||
].
|
||||
|
||||
print_help() ->
|
||||
getopt:usage(cli_options(),
|
||||
escript:script_name()),
|
||||
halt(0).
|
||||
|
||||
run_help([]) -> true;
|
||||
run_help(ParsedArgs) ->
|
||||
lists:member(help, ParsedArgs).
|
||||
|
||||
main(Args) ->
|
||||
case filelib:is_dir("./ebin") of
|
||||
true ->
|
||||
code:add_patha("./ebin");
|
||||
_ ->
|
||||
meh
|
||||
end,
|
||||
|
||||
register(riak_test, self()),
|
||||
{ParsedArgs, HarnessArgs} = case getopt:parse(cli_options(), Args) of
|
||||
{ok, {P, H}} -> {P, H};
|
||||
_ -> print_help()
|
||||
end,
|
||||
|
||||
case run_help(ParsedArgs) of
|
||||
true -> print_help();
|
||||
_ -> ok
|
||||
end,
|
||||
|
||||
%% ibrowse
|
||||
application:load(ibrowse),
|
||||
application:start(ibrowse),
|
||||
%% Start Lager
|
||||
application:load(lager),
|
||||
Config = proplists:get_value(config, ParsedArgs),
|
||||
|
||||
%% Loads application defaults
|
||||
application:load(riak_test),
|
||||
|
||||
%% Loads from ~/.riak_test.config
|
||||
rt_config:load(Config),
|
||||
|
||||
%% Ensure existance of scratch_dir
|
||||
case file:make_dir(rt_config:get(rt_scratch_dir)) of
|
||||
ok -> great;
|
||||
{eexist, _} -> great;
|
||||
{ErrorType, ErrorReason} -> lager:error("Could not create scratch dir, {~p, ~p}", [ErrorType, ErrorReason])
|
||||
end,
|
||||
|
||||
%% Fileoutput
|
||||
Outdir = proplists:get_value(outdir, ParsedArgs),
|
||||
ConsoleLagerLevel = case Outdir of
|
||||
undefined -> rt_config:get(lager_level, info);
|
||||
_ ->
|
||||
filelib:ensure_dir(Outdir),
|
||||
notice
|
||||
end,
|
||||
|
||||
application:set_env(lager, handlers, [{lager_console_backend, ConsoleLagerLevel}]),
|
||||
lager:start(),
|
||||
|
||||
%% Report
|
||||
Report = case proplists:get_value(report, ParsedArgs, undefined) of
|
||||
undefined -> undefined;
|
||||
"config" -> rt_config:get(platform, undefined);
|
||||
R -> R
|
||||
end,
|
||||
|
||||
Verbose = proplists:is_defined(verbose, ParsedArgs),
|
||||
|
||||
Suites = proplists:get_all_values(suites, ParsedArgs),
|
||||
case Suites of
|
||||
[] -> ok;
|
||||
_ -> io:format("Suites are not currently supported.")
|
||||
end,
|
||||
|
||||
CommandLineTests = parse_command_line_tests(ParsedArgs),
|
||||
Tests = which_tests_to_run(Report, CommandLineTests),
|
||||
|
||||
case Tests of
|
||||
[] ->
|
||||
lager:warning("No tests are scheduled to run"),
|
||||
init:stop(1);
|
||||
_ -> keep_on_keepin_on
|
||||
end,
|
||||
|
||||
io:format("Tests to run: ~p~n", [Tests]),
|
||||
%% Two hard-coded deps...
|
||||
add_deps(rt:get_deps()),
|
||||
add_deps("deps"),
|
||||
|
||||
[add_deps(Dep) || Dep <- rt_config:get(rt_deps, [])],
|
||||
ENode = rt_config:get(rt_nodename, 'riak_test@127.0.0.1'),
|
||||
Cookie = rt_config:get(rt_cookie, riak),
|
||||
[] = os:cmd("epmd -daemon"),
|
||||
net_kernel:start([ENode]),
|
||||
erlang:set_cookie(node(), Cookie),
|
||||
|
||||
TestResults = lists:filter(fun results_filter/1, [ run_test(Test, Outdir, TestMetaData, Report, HarnessArgs, length(Tests)) || {Test, TestMetaData} <- Tests]),
|
||||
print_summary(TestResults, Verbose),
|
||||
|
||||
case {length(TestResults), proplists:get_value(status, hd(TestResults))} of
|
||||
{1, fail} ->
|
||||
so_kill_riak_maybe();
|
||||
_ ->
|
||||
lager:info("Multiple tests run or no failure"),
|
||||
rt:teardown()
|
||||
end,
|
||||
ok.
|
||||
|
||||
parse_command_line_tests(ParsedArgs) ->
|
||||
Backends = case proplists:get_all_values(backend, ParsedArgs) of
|
||||
[] -> [undefined];
|
||||
Other -> Other
|
||||
end,
|
||||
Upgrades = case proplists:get_all_values(upgrade_version, ParsedArgs) of
|
||||
[] -> [undefined];
|
||||
UpgradeList -> UpgradeList
|
||||
end,
|
||||
%% Parse Command Line Tests
|
||||
{CodePaths, SpecificTests} =
|
||||
lists:foldl(fun extract_test_names/2,
|
||||
{[], []},
|
||||
proplists:get_all_values(tests, ParsedArgs)),
|
||||
[code:add_patha(CodePath) || CodePath <- CodePaths,
|
||||
CodePath /= "."],
|
||||
Dirs = proplists:get_all_values(dir, ParsedArgs),
|
||||
DirTests = lists:append([load_tests_in_dir(Dir) || Dir <- Dirs]),
|
||||
lists:foldl(fun(Test, Tests) ->
|
||||
[{
|
||||
list_to_atom(Test),
|
||||
[
|
||||
{id, -1},
|
||||
{platform, <<"local">>},
|
||||
{version, rt:get_version()},
|
||||
{project, list_to_binary(rt_config:get(rt_project, "undefined"))}
|
||||
] ++
|
||||
[ {backend, Backend} || Backend =/= undefined ] ++
|
||||
[ {upgrade_version, Upgrade} || Upgrade =/= undefined ]}
|
||||
|| Backend <- Backends,
|
||||
Upgrade <- Upgrades ] ++ Tests
|
||||
end, [], lists:usort(DirTests ++ SpecificTests)).
|
||||
|
||||
extract_test_names(Test, {CodePaths, TestNames}) ->
|
||||
{[filename:dirname(Test) | CodePaths],
|
||||
[filename:rootname(filename:basename(Test)) | TestNames]}.
|
||||
|
||||
which_tests_to_run(undefined, CommandLineTests) ->
|
||||
{Tests, NonTests} =
|
||||
lists:partition(fun is_runnable_test/1, CommandLineTests),
|
||||
lager:info("These modules are not runnable tests: ~p",
|
||||
[[NTMod || {NTMod, _} <- NonTests]]),
|
||||
Tests;
|
||||
which_tests_to_run(Platform, []) -> giddyup:get_suite(Platform);
|
||||
which_tests_to_run(Platform, CommandLineTests) ->
|
||||
Suite = filter_zip_suite(Platform, CommandLineTests),
|
||||
{Tests, NonTests} =
|
||||
lists:partition(fun is_runnable_test/1,
|
||||
lists:foldr(fun filter_merge_tests/2, [], Suite)),
|
||||
|
||||
lager:info("These modules are not runnable tests: ~p",
|
||||
[[NTMod || {NTMod, _} <- NonTests]]),
|
||||
Tests.
|
||||
|
||||
filter_zip_suite(Platform, CommandLineTests) ->
|
||||
[ {SModule, SMeta, CMeta} || {SModule, SMeta} <- giddyup:get_suite(Platform),
|
||||
{CModule, CMeta} <- CommandLineTests,
|
||||
SModule =:= CModule].
|
||||
|
||||
filter_merge_tests({Module, SMeta, CMeta}, Tests) ->
|
||||
case filter_merge_meta(SMeta, CMeta, [backend, upgrade_version]) of
|
||||
false ->
|
||||
Tests;
|
||||
Meta ->
|
||||
[{Module, Meta}|Tests]
|
||||
end.
|
||||
|
||||
filter_merge_meta(SMeta, _CMeta, []) ->
|
||||
SMeta;
|
||||
filter_merge_meta(SMeta, CMeta, [Field|Rest]) ->
|
||||
case {kvc:value(Field, SMeta, undefined), kvc:value(Field, CMeta, undefined)} of
|
||||
{X, X} ->
|
||||
filter_merge_meta(SMeta, CMeta, Rest);
|
||||
{_, undefined} ->
|
||||
filter_merge_meta(SMeta, CMeta, Rest);
|
||||
{undefined, X} ->
|
||||
filter_merge_meta(lists:keystore(Field, 1, SMeta, {Field, X}), CMeta, Rest);
|
||||
_ ->
|
||||
false
|
||||
end.
|
||||
|
||||
%% Check for api compatibility
|
||||
is_runnable_test({TestModule, _}) ->
|
||||
code:ensure_loaded(TestModule),
|
||||
erlang:function_exported(TestModule, confirm, 0).
|
||||
|
||||
run_test(Test, Outdir, TestMetaData, Report, _HarnessArgs, NumTests) ->
|
||||
SingleTestResult = riak_test_runner:confirm(Test, Outdir, TestMetaData),
|
||||
case NumTests of
|
||||
1 -> keep_them_up;
|
||||
_ -> rt:teardown()
|
||||
end,
|
||||
case Report of
|
||||
undefined -> ok;
|
||||
_ ->
|
||||
%% Old Code for concatinating log files for upload to giddyup
|
||||
%% They're too big now, causing problems which will be solved by
|
||||
%% GiddyUp's new Artifact feature, comming soon from a Cribbs near you.
|
||||
|
||||
%% The point is, this is here in case we need to turn this back on
|
||||
%% before artifacts are ready. And to remind jd that this is the place
|
||||
%% to write the artifact client
|
||||
|
||||
%% {log, TestLog} = lists:keyfind(log, 1, SingleTestResult),
|
||||
%% NodeLogs = cat_node_logs(),
|
||||
%% EncodedNodeLogs = unicode:characters_to_binary(iolist_to_binary(NodeLogs),
|
||||
%% latin1, utf8),
|
||||
%% NewLogs = iolist_to_binary([TestLog, EncodedNodeLogs]),
|
||||
%% ResultWithNodeLogs = lists:keyreplace(log, 1, SingleTestResult,
|
||||
%% {log, NewLogs}),
|
||||
%% giddyup:post_result(ResultWithNodeLogs)
|
||||
giddyup:post_result(SingleTestResult)
|
||||
end,
|
||||
SingleTestResult.
|
||||
|
||||
print_summary(TestResults, Verbose) ->
|
||||
io:format("~nTest Results:~n"),
|
||||
|
||||
Results = [
|
||||
[ atom_to_list(proplists:get_value(test, SingleTestResult)) ++ "-" ++
|
||||
backend_list(proplists:get_value(backend, SingleTestResult)),
|
||||
proplists:get_value(status, SingleTestResult),
|
||||
proplists:get_value(reason, SingleTestResult)]
|
||||
|| SingleTestResult <- TestResults],
|
||||
Width = test_name_width(Results),
|
||||
|
||||
Print = fun(Test, Status, Reason) ->
|
||||
case {Status, Verbose} of
|
||||
{fail, true} -> io:format("~s: ~s ~p~n", [string:left(Test, Width), Status, Reason]);
|
||||
_ -> io:format("~s: ~s~n", [string:left(Test, Width), Status])
|
||||
end
|
||||
end,
|
||||
[ Print(Test, Status, Reason) || [Test, Status, Reason] <- Results],
|
||||
|
||||
PassCount = length(lists:filter(fun(X) -> proplists:get_value(status, X) =:= pass end, TestResults)),
|
||||
FailCount = length(lists:filter(fun(X) -> proplists:get_value(status, X) =:= fail end, TestResults)),
|
||||
io:format("---------------------------------------------~n"),
|
||||
io:format("~w Tests Failed~n", [FailCount]),
|
||||
io:format("~w Tests Passed~n", [PassCount]),
|
||||
Percentage = case PassCount == 0 andalso FailCount == 0 of
|
||||
true -> 0;
|
||||
false -> (PassCount / (PassCount + FailCount)) * 100
|
||||
end,
|
||||
io:format("That's ~w% for those keeping score~n", [Percentage]),
|
||||
ok.
|
||||
|
||||
test_name_width(Results) ->
|
||||
lists:max([ length(X) || [X | _T] <- Results ]).
|
||||
|
||||
backend_list(Backend) when is_atom(Backend) ->
|
||||
atom_to_list(Backend);
|
||||
backend_list(Backends) when is_list(Backends) ->
|
||||
FoldFun = fun(X, []) ->
|
||||
atom_to_list(X);
|
||||
(X, Acc) ->
|
||||
Acc ++ "," ++ atom_to_list(X)
|
||||
end,
|
||||
lists:foldl(FoldFun, [], Backends).
|
||||
|
||||
results_filter(Result) ->
|
||||
case proplists:get_value(status, Result) of
|
||||
not_a_runnable_test ->
|
||||
false;
|
||||
_ ->
|
||||
true
|
||||
end.
|
||||
|
||||
load_tests_in_dir(Dir) ->
|
||||
case filelib:is_dir(Dir) of
|
||||
true ->
|
||||
code:add_path(Dir),
|
||||
lists:sort([ string:substr(Filename, 1, length(Filename) - 5) || Filename <- filelib:wildcard("*.beam", Dir)]);
|
||||
_ -> io:format("~s is not a dir!~n", [Dir])
|
||||
end.
|
||||
|
||||
so_kill_riak_maybe() ->
|
||||
io:format("~n~nSo, we find ourselves in a tricky situation here. ~n"),
|
||||
io:format("You've run a single test, and it has failed.~n"),
|
||||
io:format("Would you like to leave Riak running in order to debug?~n"),
|
||||
Input = io:get_chars("[Y/n] ", 1),
|
||||
case Input of
|
||||
"n" -> rt:teardown();
|
||||
"N" -> rt:teardown();
|
||||
_ ->
|
||||
io:format("Leaving Riak Up... "),
|
||||
rt:whats_up()
|
||||
end.
|
||||
|
||||
%% cat_node_logs() ->
|
||||
%% Files = rt:get_node_logs(),
|
||||
%% Output = io_lib:format("================ Printing node logs and crash dumps ================~n~n", []),
|
||||
%% cat_node_logs(Files, [Output]).
|
||||
%%
|
||||
%% cat_node_logs([], Output) -> Output;
|
||||
%% cat_node_logs([{Filename, Content}|Rest], Output) ->
|
||||
%% Log = io_lib:format("================ Log: ~s =====================~n~s~n~n", [Filename, Content]),
|
||||
%% cat_node_logs(Rest, [Output, Log]).
|
@ -1,6 +1,6 @@
|
||||
%% -------------------------------------------------------------------
|
||||
%%
|
||||
%% Copyright (c) 2012 Basho Technologies, Inc.
|
||||
%% Copyright (c) 2013 Basho Technologies, Inc.
|
||||
%%
|
||||
%% This file is provided to you under the Apache License,
|
||||
%% Version 2.0 (the "License"); you may not use this file
|
||||
@ -71,11 +71,11 @@ start_lager_backend(TestModule, Outdir) ->
|
||||
_ ->
|
||||
gen_event:add_handler(lager_event, lager_file_backend,
|
||||
{Outdir ++ "/" ++ atom_to_list(TestModule) ++ ".dat_test_output",
|
||||
rt:config(lager_level, info), 10485760, "$D0", 1}),
|
||||
lager:set_loglevel(lager_file_backend, rt:config(lager_level, info))
|
||||
rt_config:get(lager_level, info), 10485760, "$D0", 1}),
|
||||
lager:set_loglevel(lager_file_backend, rt_config:get(lager_level, info))
|
||||
end,
|
||||
gen_event:add_handler(lager_event, riak_test_lager_backend, [rt:config(lager_level, info), false]),
|
||||
lager:set_loglevel(riak_test_lager_backend, rt:config(lager_level, info)).
|
||||
gen_event:add_handler(lager_event, riak_test_lager_backend, [rt_config:get(lager_level, info), false]),
|
||||
lager:set_loglevel(riak_test_lager_backend, rt_config:get(lager_level, info)).
|
||||
|
||||
stop_lager_backend() ->
|
||||
gen_event:delete_handler(lager_event, lager_file_backend, []),
|
||||
@ -123,11 +123,11 @@ check_prereqs(Module) ->
|
||||
try Module:module_info(attributes) of
|
||||
Attrs ->
|
||||
Prereqs = proplists:get_all_values(prereq, Attrs),
|
||||
P2 = [ {Prereq, rt:which(Prereq)} || Prereq <- Prereqs],
|
||||
P2 = [ {Prereq, rt_local:which(Prereq)} || Prereq <- Prereqs],
|
||||
lager:info("~s prereqs: ~p", [Module, P2]),
|
||||
[ lager:warning("~s prereq '~s' not installed.", [Module, P]) || {P, false} <- P2],
|
||||
lists:all(fun({_, Present}) -> Present end, P2)
|
||||
catch
|
||||
_DontCare:_Really ->
|
||||
not_present
|
||||
end.
|
||||
end.
|
265
src/rt.erl
265
src/rt.erl
@ -1,6 +1,6 @@
|
||||
%% -------------------------------------------------------------------
|
||||
%%
|
||||
%% Copyright (c) 2012 Basho Technologies, Inc.
|
||||
%% Copyright (c) 2013 Basho Technologies, Inc.
|
||||
%%
|
||||
%% This file is provided to you under the Apache License,
|
||||
%% Version 2.0 (the "License"); you may not use this file
|
||||
@ -29,9 +29,9 @@
|
||||
-export([
|
||||
admin/2,
|
||||
assert_nodes_agree_about_ownership/1,
|
||||
assert_which/1,
|
||||
async_start/1,
|
||||
attach/2,
|
||||
brutal_kill/1,
|
||||
build_cluster/1,
|
||||
build_cluster/2,
|
||||
build_cluster/3,
|
||||
@ -43,35 +43,25 @@
|
||||
clean_data_dir/2,
|
||||
cmd/1,
|
||||
cmd/2,
|
||||
config/1,
|
||||
config/2,
|
||||
config_or_os_env/1,
|
||||
config_or_os_env/2,
|
||||
connection_info/1,
|
||||
console/2,
|
||||
deploy_nodes/1,
|
||||
deploy_nodes/2,
|
||||
down/2,
|
||||
download/1,
|
||||
enable_search_hook/2,
|
||||
get_deps/0,
|
||||
get_node_logs/0,
|
||||
get_os_env/1,
|
||||
get_os_env/2,
|
||||
get_ring/1,
|
||||
get_version/0,
|
||||
heal/1,
|
||||
home_dir/0,
|
||||
http_url/1,
|
||||
httpc/1,
|
||||
httpc_read/3,
|
||||
httpc_write/4,
|
||||
install_on_absence/2,
|
||||
is_mixed_cluster/1,
|
||||
is_pingable/1,
|
||||
join/2,
|
||||
leave/1,
|
||||
load_config/1,
|
||||
load_modules_on_nodes/2,
|
||||
log_to_nodes/2,
|
||||
log_to_nodes/3,
|
||||
@ -85,11 +75,11 @@
|
||||
pbc_put_dir/3,
|
||||
pbc_put_file/4,
|
||||
pmap/2,
|
||||
priv_dir/0,
|
||||
remove/2,
|
||||
riak/2,
|
||||
rpc_get_env/2,
|
||||
set_backend/1,
|
||||
set_config/2,
|
||||
setup_harness/2,
|
||||
slow_upgrade/3,
|
||||
spawn_cmd/1,
|
||||
@ -101,8 +91,6 @@
|
||||
stop/1,
|
||||
stop_and_wait/1,
|
||||
str/2,
|
||||
stream_cmd/1,
|
||||
stream_cmd/2,
|
||||
systest_read/2,
|
||||
systest_read/3,
|
||||
systest_read/5,
|
||||
@ -112,7 +100,6 @@
|
||||
teardown/0,
|
||||
update_app_config/2,
|
||||
upgrade/2,
|
||||
url_to_filename/1,
|
||||
versions/0,
|
||||
wait_for_cluster_service/2,
|
||||
wait_for_cmd/1,
|
||||
@ -134,17 +121,31 @@
|
||||
wait_until_status_ready/1,
|
||||
wait_until_transfers_complete/1,
|
||||
wait_until_unpingable/1,
|
||||
whats_up/0,
|
||||
which/1,
|
||||
brutal_kill/1
|
||||
whats_up/0
|
||||
]).
|
||||
|
||||
-define(HARNESS, (rt:config(rt_harness))).
|
||||
-define(HARNESS, (rt_config:get(rt_harness))).
|
||||
|
||||
%% @doc Return the home directory of the riak_test script.
|
||||
-spec home_dir() -> file:filename().
|
||||
home_dir() ->
|
||||
filename:dirname(filename:absname(escript:script_name())).
|
||||
priv_dir() ->
|
||||
LocalPrivDir = "./priv",
|
||||
%% XXX for some reason, codew:priv_dir returns riak_test/riak_test/priv,
|
||||
%% which is wrong, so fix it.
|
||||
DepPrivDir = re:replace(code:priv_dir(riak_test), "riak_test(/riak_test)*",
|
||||
"riak_test", [{return, list}]),
|
||||
PrivDir = case {filelib:is_dir(LocalPrivDir), filelib:is_dir(DepPrivDir)} of
|
||||
{true, _} ->
|
||||
lager:debug("Local ./priv detected, using that..."),
|
||||
LocalPrivDir;
|
||||
{false, true} ->
|
||||
lager:debug("riak_test dependency priv_dir detected, using that..."),
|
||||
DepPrivDir;
|
||||
_ ->
|
||||
?assertEqual({true, bad_priv_dir}, {false, bad_priv_dir})
|
||||
end,
|
||||
|
||||
lager:info("priv dir: ~p -> ~p", [code:priv_dir(riak_test), PrivDir]),
|
||||
?assert(filelib:is_dir(PrivDir)),
|
||||
PrivDir.
|
||||
|
||||
%% @doc gets riak deps from the appropriate harness
|
||||
-spec get_deps() -> list().
|
||||
@ -158,66 +159,6 @@ str(String, Substr) ->
|
||||
_ -> true
|
||||
end.
|
||||
|
||||
%% @doc Get the value of an OS Environment variable. The arity 1 version of
|
||||
%% this function will fail the test if it is undefined.
|
||||
get_os_env(Var) ->
|
||||
case get_os_env(Var, undefined) of
|
||||
undefined ->
|
||||
lager:error("ENV['~s'] is not defined", [Var]),
|
||||
?assert(false);
|
||||
Value -> Value
|
||||
end.
|
||||
|
||||
%% @doc Get the value of an OS Evironment variable. The arity 2 version of
|
||||
%% this function will return the Default if the OS var is undefined.
|
||||
get_os_env(Var, Default) ->
|
||||
case os:getenv(Var) of
|
||||
false -> Default;
|
||||
Value -> Value
|
||||
end.
|
||||
|
||||
%% @doc Wrap 'which' to give a good output if something is not installed
|
||||
which(Command) ->
|
||||
lager:info("Checking for presence of ~s", [Command]),
|
||||
Cmd = lists:flatten(io_lib:format("which ~s; echo $?", [Command])),
|
||||
case rt:str(os:cmd(Cmd), "0") of
|
||||
false ->
|
||||
lager:warning("`~s` is not installed", [Command]),
|
||||
false;
|
||||
true ->
|
||||
true
|
||||
end.
|
||||
|
||||
download(Url) ->
|
||||
lager:info("Downloading ~s", [Url]),
|
||||
Filename = url_to_filename(Url),
|
||||
case filelib:is_file(filename:join(rt:config(rt_scratch_dir), Filename)) of
|
||||
true ->
|
||||
lager:info("Got it ~p", [Filename]),
|
||||
ok;
|
||||
_ ->
|
||||
lager:info("Getting it ~p", [Filename]),
|
||||
rt:stream_cmd("curl -O -L " ++ Url, [{cd, rt:config(rt_scratch_dir)}])
|
||||
end.
|
||||
|
||||
url_to_filename(Url) ->
|
||||
lists:last(string:tokens(Url, "/")).
|
||||
%% @doc like rt:which, but asserts on failure
|
||||
assert_which(Command) ->
|
||||
?assert(rt:which(Command)).
|
||||
|
||||
%% @doc checks if Command is installed and runs InstallCommand if not
|
||||
%% ex: rt:install_on_absence("bundler", "gem install bundler --no-rdoc --no-ri"),
|
||||
install_on_absence(Command, InstallCommand) ->
|
||||
case rt:which(Command) of
|
||||
false ->
|
||||
lager:info("Attempting to install `~s` with command `~s`", [Command, InstallCommand]),
|
||||
?assertCmd(InstallCommand);
|
||||
_True ->
|
||||
ok
|
||||
end.
|
||||
|
||||
|
||||
%% @doc Rewrite the given node's app.config file, overriding the varialbes
|
||||
%% in the existing app.config with those in `Config'.
|
||||
update_app_config(all, Config) ->
|
||||
@ -423,46 +364,6 @@ cmd(Cmd) ->
|
||||
cmd(Cmd, Opts) ->
|
||||
?HARNESS:cmd(Cmd, Opts).
|
||||
|
||||
%% @doc pretty much the same as os:cmd/1 but it will stream the output to lager.
|
||||
%% If you're running a long running command, it will dump the output
|
||||
%% once per second, as to not create the impression that nothing is happening.
|
||||
-spec stream_cmd(string()) -> {integer(), string()}.
|
||||
stream_cmd(Cmd) ->
|
||||
Port = open_port({spawn, binary_to_list(iolist_to_binary(Cmd))}, [stream, stderr_to_stdout, exit_status]),
|
||||
stream_cmd_loop(Port, "", "", now()).
|
||||
|
||||
%% @doc same as rt:stream_cmd/1, but with options, like open_port/2
|
||||
-spec stream_cmd(string(), string()) -> {integer(), string()}.
|
||||
stream_cmd(Cmd, Opts) ->
|
||||
Port = open_port({spawn, binary_to_list(iolist_to_binary(Cmd))}, [stream, stderr_to_stdout, exit_status] ++ Opts),
|
||||
stream_cmd_loop(Port, "", "", now()).
|
||||
|
||||
stream_cmd_loop(Port, Buffer, NewLineBuffer, Time={_MegaSecs, Secs, _MicroSecs}) ->
|
||||
receive
|
||||
{Port, {data, Data}} ->
|
||||
{_, Now, _} = now(),
|
||||
NewNewLineBuffer = case Now > Secs of
|
||||
true ->
|
||||
lager:info(NewLineBuffer),
|
||||
"";
|
||||
_ ->
|
||||
NewLineBuffer
|
||||
end,
|
||||
case rt:str(Data, "\n") of
|
||||
true ->
|
||||
lager:info(NewNewLineBuffer),
|
||||
Tokens = string:tokens(Data, "\n"),
|
||||
[ lager:info(Token) || Token <- Tokens ],
|
||||
stream_cmd_loop(Port, Buffer ++ NewNewLineBuffer ++ Data, "", Time);
|
||||
_ ->
|
||||
stream_cmd_loop(Port, Buffer, NewNewLineBuffer ++ Data, now())
|
||||
end;
|
||||
{Port, {exit_status, Status}} ->
|
||||
catch port_close(Port),
|
||||
{Status, Buffer}
|
||||
after rt:config(rt_max_wait_time) ->
|
||||
{-1, Buffer}
|
||||
end.
|
||||
%%%===================================================================
|
||||
%%% Remote code management
|
||||
%%%===================================================================
|
||||
@ -490,7 +391,7 @@ is_pingable(Node) ->
|
||||
|
||||
is_mixed_cluster(Nodes) when is_list(Nodes) ->
|
||||
%% If the nodes are bad, we don't care what version they are
|
||||
{Versions, _BadNodes} = rpc:multicall(Nodes, init, script_id, [], rt:config(rt_max_wait_time)),
|
||||
{Versions, _BadNodes} = rpc:multicall(Nodes, init, script_id, [], rt_config:get(rt_max_wait_time)),
|
||||
length(lists:usort(Versions)) > 1;
|
||||
is_mixed_cluster(Node) ->
|
||||
Nodes = rpc:call(Node, erlang, nodes, []),
|
||||
@ -523,8 +424,8 @@ wait_until(Node, Fun) ->
|
||||
wait_until(Node, Fun, fun(_N) -> fail end).
|
||||
|
||||
wait_until(Node, Fun, TimeoutFun) ->
|
||||
MaxTime = rt:config(rt_max_wait_time),
|
||||
Delay = rt:config(rt_retry_delay),
|
||||
MaxTime = rt_config:get(rt_max_wait_time),
|
||||
Delay = rt_config:get(rt_retry_delay),
|
||||
Retry = MaxTime div Delay,
|
||||
wait_until(Node, Fun, Retry, Delay, TimeoutFun).
|
||||
|
||||
@ -692,7 +593,7 @@ wait_until_unpingable(Node) ->
|
||||
%% Hard coding a 6 minute timeout on this wait only. This function is called to see that
|
||||
%% riak has stopped. Riak stop should only take about 5 minutes before its timeouts kill
|
||||
%% the process. This wait should at least wait that long.
|
||||
Delay = rt:config(rt_retry_delay),
|
||||
Delay = rt_config:get(rt_retry_delay),
|
||||
Retry = 360000 div Delay,
|
||||
?assertEqual(ok, wait_until(Node, F, Retry, Delay, TimeoutFun)),
|
||||
ok.
|
||||
@ -710,7 +611,7 @@ wait_until_registered(Node, Name) ->
|
||||
lager:info("The server with the namee ~p on ~p is not coming up.", [Name, Node]),
|
||||
fail
|
||||
end,
|
||||
Delay = rt:config(rt_retry_delay),
|
||||
Delay = rt_config:get(rt_retry_delay),
|
||||
Retry = 360000 div Delay,
|
||||
?assertEqual(ok, wait_until(Node, F, Retry, Delay, TimeoutFun)),
|
||||
ok.
|
||||
@ -1114,111 +1015,7 @@ pmap(F, L) ->
|
||||
setup_harness(Test, Args) ->
|
||||
?HARNESS:setup_harness(Test, Args).
|
||||
|
||||
%% @private
|
||||
load_config(undefined) ->
|
||||
load_dot_config("default");
|
||||
load_config(ConfigName) ->
|
||||
case load_config_file(ConfigName) of
|
||||
ok -> ok;
|
||||
{error, enoent} -> load_dot_config(ConfigName)
|
||||
end.
|
||||
|
||||
%% @private
|
||||
load_dot_config(ConfigName) ->
|
||||
case file:consult(filename:join([os:getenv("HOME"), ".riak_test.config"])) of
|
||||
{ok, Terms} ->
|
||||
%% First, set up the defaults
|
||||
case proplists:get_value(default, Terms) of
|
||||
undefined -> meh; %% No defaults set, move on.
|
||||
Default -> [set_config(Key, Value) || {Key, Value} <- Default]
|
||||
end,
|
||||
%% Now, overlay the specific project
|
||||
Config = proplists:get_value(list_to_atom(ConfigName), Terms),
|
||||
[set_config(Key, Value) || {Key, Value} <- Config],
|
||||
ok;
|
||||
{error, Reason} ->
|
||||
erlang:error("Failed to parse config file", ["~/.riak_test.config", Reason])
|
||||
end.
|
||||
|
||||
%% @private
|
||||
load_config_file(File) ->
|
||||
case file:read_file_info(File) of
|
||||
{ok, _} ->
|
||||
io:format("*********************************************************************************~n"),
|
||||
io:format("WARNING! Use of config files is now deprecated, use ~~/.riak_test.config instead.~n"),
|
||||
io:format("*********************************************************************************~n"),
|
||||
io:format("Please acknowledge that you're aware that this functionality will be gone soon.~n"),
|
||||
Input = io:get_chars("[y/N] ", 1),
|
||||
case Input of
|
||||
"y" -> ok;
|
||||
"Y" -> ok;
|
||||
_ -> exit(1)
|
||||
end;
|
||||
_ -> meh
|
||||
end,
|
||||
case file:consult(File) of
|
||||
{ok, Terms} ->
|
||||
[set_config(Key, Value) || {Key, Value} <- Terms],
|
||||
ok;
|
||||
{error, enoent} ->
|
||||
{error, enoent};
|
||||
{error, Reason} ->
|
||||
erlang:error("Failed to parse config file", [File, Reason])
|
||||
end.
|
||||
|
||||
%% @private
|
||||
set_config(Key, Value) ->
|
||||
ok = application:set_env(riak_test, Key, Value).
|
||||
|
||||
%% @private
|
||||
config(Key) ->
|
||||
case kvc:path(Key, application:get_all_env(riak_test)) of
|
||||
[] -> erlang:error("Missing configuration key", [Key]);
|
||||
Value -> Value
|
||||
end.
|
||||
|
||||
%% @private
|
||||
config(Key, Default) ->
|
||||
case kvc:path(Key, application:get_all_env(riak_test)) of
|
||||
[] -> Default;
|
||||
Value -> Value
|
||||
end.
|
||||
|
||||
-spec config_or_os_env(atom()) -> term().
|
||||
config_or_os_env(Config) ->
|
||||
OSEnvVar = to_upper(atom_to_list(Config)),
|
||||
case {get_os_env(OSEnvVar, undefined), config(Config, undefined)} of
|
||||
{undefined, undefined} ->
|
||||
MSG = io_lib:format("Neither riak_test.~p nor ENV['~p'] are defined", [Config, OSEnvVar]),
|
||||
erlang:error(binary_to_list(iolist_to_binary(MSG)));
|
||||
{undefined, V} ->
|
||||
lager:info("Found riak_test.~s: ~s", [Config, V]),
|
||||
V;
|
||||
{V, _} ->
|
||||
lager:info("Found ENV[~s]: ~s", [OSEnvVar, V]),
|
||||
rt:set_config(Config, V),
|
||||
V
|
||||
end.
|
||||
|
||||
-spec config_or_os_env(atom(), term()) -> term().
|
||||
config_or_os_env(Config, Default) ->
|
||||
OSEnvVar = to_upper(atom_to_list(Config)),
|
||||
case {get_os_env(OSEnvVar, undefined), config(Config, undefined)} of
|
||||
{undefined, undefined} -> Default;
|
||||
{undefined, V} ->
|
||||
lager:info("Found riak_test.~s: ~s", [Config, V]),
|
||||
V;
|
||||
{V, _} ->
|
||||
lager:info("Found ENV[~s]: ~s", [OSEnvVar, V]),
|
||||
rt:set_config(Config, V),
|
||||
V
|
||||
end.
|
||||
|
||||
to_upper(S) -> lists:map(fun char_to_upper/1, S).
|
||||
char_to_upper(C) when C >= $a, C =< $z -> C bxor $\s;
|
||||
char_to_upper(C) -> C.
|
||||
|
||||
%% @doc Downloads any extant log files from the harness's running
|
||||
%% nodes.
|
||||
get_node_logs() ->
|
||||
?HARNESS:get_node_logs().
|
||||
?HARNESS:get_node_logs().
|
123
src/rt_config.erl
Normal file
123
src/rt_config.erl
Normal file
@ -0,0 +1,123 @@
|
||||
%% -------------------------------------------------------------------
|
||||
%%
|
||||
%% Copyright (c) 2013 Basho Technologies, Inc.
|
||||
%%
|
||||
%% This file is provided to you under the Apache License,
|
||||
%% Version 2.0 (the "License"); you may not use this file
|
||||
%% except in compliance with the License. You may obtain
|
||||
%% a copy of the License at
|
||||
%%
|
||||
%% http://www.apache.org/licenses/LICENSE-2.0
|
||||
%%
|
||||
%% Unless required by applicable law or agreed to in writing,
|
||||
%% software distributed under the License is distributed on an
|
||||
%% "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
%% KIND, either express or implied. See the License for the
|
||||
%% specific language governing permissions and limitations
|
||||
%% under the License.
|
||||
%%
|
||||
%% -------------------------------------------------------------------
|
||||
-module(rt_config).
|
||||
-include_lib("eunit/include/eunit.hrl").
|
||||
|
||||
-export([
|
||||
get/1,
|
||||
get/2,
|
||||
config_or_os_env/1,
|
||||
config_or_os_env/2,
|
||||
get_os_env/1,
|
||||
get_os_env/2,
|
||||
load/1,
|
||||
set/2
|
||||
]).
|
||||
|
||||
%% @doc Get the value of an OS Environment variable. The arity 1 version of
|
||||
%% this function will fail the test if it is undefined.
|
||||
get_os_env(Var) ->
|
||||
case get_os_env(Var, undefined) of
|
||||
undefined ->
|
||||
lager:error("ENV['~s'] is not defined", [Var]),
|
||||
?assert(false);
|
||||
Value -> Value
|
||||
end.
|
||||
|
||||
%% @doc Get the value of an OS Evironment variable. The arity 2 version of
|
||||
%% this function will return the Default if the OS var is undefined.
|
||||
get_os_env(Var, Default) ->
|
||||
case os:getenv(Var) of
|
||||
false -> Default;
|
||||
Value -> Value
|
||||
end.
|
||||
|
||||
%% @private
|
||||
load(undefined) ->
|
||||
load_dot_config("default");
|
||||
load(ConfigName) ->
|
||||
load_dot_config(ConfigName).
|
||||
|
||||
%% @private
|
||||
load_dot_config(ConfigName) ->
|
||||
case file:consult(filename:join([os:getenv("HOME"), ".riak_test.config"])) of
|
||||
{ok, Terms} ->
|
||||
%% First, set up the defaults
|
||||
case proplists:get_value(default, Terms) of
|
||||
undefined -> meh; %% No defaults set, move on.
|
||||
Default -> [set(Key, Value) || {Key, Value} <- Default]
|
||||
end,
|
||||
%% Now, overlay the specific project
|
||||
Config = proplists:get_value(list_to_atom(ConfigName), Terms),
|
||||
[set(Key, Value) || {Key, Value} <- Config],
|
||||
ok;
|
||||
{error, Reason} ->
|
||||
erlang:error("Failed to parse config file", ["~/.riak_test.config", Reason])
|
||||
end.
|
||||
|
||||
%% @private
|
||||
set(Key, Value) ->
|
||||
ok = application:set_env(riak_test, Key, Value).
|
||||
|
||||
get(Key) ->
|
||||
case kvc:path(Key, application:get_all_env(riak_test)) of
|
||||
[] -> erlang:error("Missing configuration key", [Key]);
|
||||
Value -> Value
|
||||
end.
|
||||
|
||||
get(Key, Default) ->
|
||||
case kvc:path(Key, application:get_all_env(riak_test)) of
|
||||
[] -> Default;
|
||||
Value -> Value
|
||||
end.
|
||||
|
||||
-spec config_or_os_env(atom()) -> term().
|
||||
config_or_os_env(Config) ->
|
||||
OSEnvVar = to_upper(atom_to_list(Config)),
|
||||
case {get_os_env(OSEnvVar, undefined), get(Config, undefined)} of
|
||||
{undefined, undefined} ->
|
||||
MSG = io_lib:format("Neither riak_test.~p nor ENV['~p'] are defined", [Config, OSEnvVar]),
|
||||
erlang:error(binary_to_list(iolist_to_binary(MSG)));
|
||||
{undefined, V} ->
|
||||
lager:info("Found riak_test.~s: ~s", [Config, V]),
|
||||
V;
|
||||
{V, _} ->
|
||||
lager:info("Found ENV[~s]: ~s", [OSEnvVar, V]),
|
||||
set(Config, V),
|
||||
V
|
||||
end.
|
||||
|
||||
-spec config_or_os_env(atom(), term()) -> term().
|
||||
config_or_os_env(Config, Default) ->
|
||||
OSEnvVar = to_upper(atom_to_list(Config)),
|
||||
case {get_os_env(OSEnvVar, undefined), get(Config, undefined)} of
|
||||
{undefined, undefined} -> Default;
|
||||
{undefined, V} ->
|
||||
lager:info("Found riak_test.~s: ~s", [Config, V]),
|
||||
V;
|
||||
{V, _} ->
|
||||
lager:info("Found ENV[~s]: ~s", [OSEnvVar, V]),
|
||||
set(Config, V),
|
||||
V
|
||||
end.
|
||||
|
||||
to_upper(S) -> lists:map(fun char_to_upper/1, S).
|
||||
char_to_upper(C) when C >= $a, C =< $z -> C bxor $\s;
|
||||
char_to_upper(C) -> C.
|
@ -1,3 +1,22 @@
|
||||
%% -------------------------------------------------------------------
|
||||
%%
|
||||
%% Copyright (c) 2013 Basho Technologies, Inc.
|
||||
%%
|
||||
%% This file is provided to you under the Apache License,
|
||||
%% Version 2.0 (the "License"); you may not use this file
|
||||
%% except in compliance with the License. You may obtain
|
||||
%% a copy of the License at
|
||||
%%
|
||||
%% http://www.apache.org/licenses/LICENSE-2.0
|
||||
%%
|
||||
%% Unless required by applicable law or agreed to in writing,
|
||||
%% software distributed under the License is distributed on an
|
||||
%% "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
%% KIND, either express or implied. See the License for the
|
||||
%% specific language governing permissions and limitations
|
||||
%% under the License.
|
||||
%%
|
||||
%% -------------------------------------------------------------------
|
||||
-module(rt_intercept).
|
||||
-compile(export_all).
|
||||
-define(DEFAULT_INTERCEPT(Target),
|
||||
@ -7,16 +26,16 @@ files_to_mods(Files) ->
|
||||
[list_to_atom(filename:basename(F, ".erl")) || F <- Files].
|
||||
|
||||
intercept_files() ->
|
||||
filelib:wildcard(filename:join([rt:home_dir(), "intercepts", "*.erl"])).
|
||||
filelib:wildcard(filename:join([rt_local:home_dir(), "intercepts", "*.erl"])).
|
||||
|
||||
%% @doc Load the intercepts on the nodes under test.
|
||||
-spec load_intercepts([node()]) -> ok.
|
||||
load_intercepts(Nodes) ->
|
||||
case rt:config(load_intercepts, true) of
|
||||
case rt_config:get(load_intercepts, true) of
|
||||
false ->
|
||||
ok;
|
||||
true ->
|
||||
Intercepts = rt:config(intercepts, []),
|
||||
Intercepts = rt_config:get(intercepts, []),
|
||||
rt:pmap(fun(N) -> load_code(N) end, Nodes),
|
||||
rt:pmap(fun(N) -> add(N, Intercepts) end, Nodes),
|
||||
ok
|
||||
@ -51,7 +70,7 @@ wait_until_loaded(Node, 5) ->
|
||||
{failed_to_load_intercepts, Node};
|
||||
|
||||
wait_until_loaded(Node, Tries) ->
|
||||
case rt:config(load_intercepts, true) of
|
||||
case rt_config:get(load_intercepts, true) of
|
||||
false ->
|
||||
ok;
|
||||
true ->
|
||||
@ -70,4 +89,4 @@ are_intercepts_loaded(Node) ->
|
||||
lists:all(fun is_loaded/1, Results).
|
||||
|
||||
is_loaded({file,_}) -> true;
|
||||
is_loaded(_) -> false.
|
||||
is_loaded(_) -> false.
|
120
src/rt_local.erl
Normal file
120
src/rt_local.erl
Normal file
@ -0,0 +1,120 @@
|
||||
%% -------------------------------------------------------------------
|
||||
%%
|
||||
%% Copyright (c) 2013 Basho Technologies, Inc.
|
||||
%%
|
||||
%% This file is provided to you under the Apache License,
|
||||
%% Version 2.0 (the "License"); you may not use this file
|
||||
%% except in compliance with the License. You may obtain
|
||||
%% a copy of the License at
|
||||
%%
|
||||
%% http://www.apache.org/licenses/LICENSE-2.0
|
||||
%%
|
||||
%% Unless required by applicable law or agreed to in writing,
|
||||
%% software distributed under the License is distributed on an
|
||||
%% "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
%% KIND, either express or implied. See the License for the
|
||||
%% specific language governing permissions and limitations
|
||||
%% under the License.
|
||||
%%
|
||||
%% -------------------------------------------------------------------
|
||||
-module(rt_local).
|
||||
-include_lib("eunit/include/eunit.hrl").
|
||||
|
||||
-export([
|
||||
assert_which/1,
|
||||
download/1,
|
||||
home_dir/0,
|
||||
install_on_absence/2,
|
||||
stream_cmd/1,
|
||||
stream_cmd/2,
|
||||
url_to_filename/1,
|
||||
which/1
|
||||
]).
|
||||
|
||||
%% @doc Return the home directory of the riak_test script.
|
||||
-spec home_dir() -> file:filename().
|
||||
home_dir() ->
|
||||
filename:dirname(filename:absname(escript:script_name())).
|
||||
|
||||
%% @doc Wrap 'which' to give a good output if something is not installed
|
||||
which(Command) ->
|
||||
lager:info("Checking for presence of ~s", [Command]),
|
||||
Cmd = lists:flatten(io_lib:format("which ~s; echo $?", [Command])),
|
||||
case rt:str(os:cmd(Cmd), "0") of
|
||||
false ->
|
||||
lager:warning("`~s` is not installed", [Command]),
|
||||
false;
|
||||
true ->
|
||||
true
|
||||
end.
|
||||
|
||||
%% @doc like rt:which, but asserts on failure
|
||||
assert_which(Command) ->
|
||||
?assert(which(Command)).
|
||||
|
||||
download(Url) ->
|
||||
lager:info("Downloading ~s", [Url]),
|
||||
Filename = url_to_filename(Url),
|
||||
case filelib:is_file(filename:join(rt_config:get(rt_scratch_dir), Filename)) of
|
||||
true ->
|
||||
lager:info("Got it ~p", [Filename]),
|
||||
ok;
|
||||
_ ->
|
||||
lager:info("Getting it ~p", [Filename]),
|
||||
rt_local:stream_cmd("curl -O -L " ++ Url, [{cd, rt_config:get(rt_scratch_dir)}])
|
||||
end.
|
||||
|
||||
url_to_filename(Url) ->
|
||||
lists:last(string:tokens(Url, "/")).
|
||||
|
||||
%% @doc checks if Command is installed and runs InstallCommand if not
|
||||
%% ex: rt:install_on_absence("bundler", "gem install bundler --no-rdoc --no-ri"),
|
||||
install_on_absence(Command, InstallCommand) ->
|
||||
case which(Command) of
|
||||
false ->
|
||||
lager:info("Attempting to install `~s` with command `~s`", [Command, InstallCommand]),
|
||||
?assertCmd(InstallCommand);
|
||||
_True ->
|
||||
ok
|
||||
end.
|
||||
|
||||
%% @doc pretty much the same as os:cmd/1 but it will stream the output to lager.
|
||||
%% If you're running a long running command, it will dump the output
|
||||
%% once per second, as to not create the impression that nothing is happening.
|
||||
-spec stream_cmd(string()) -> {integer(), string()}.
|
||||
stream_cmd(Cmd) ->
|
||||
Port = open_port({spawn, binary_to_list(iolist_to_binary(Cmd))}, [stream, stderr_to_stdout, exit_status]),
|
||||
stream_cmd_loop(Port, "", "", now()).
|
||||
|
||||
%% @doc same as rt:stream_cmd/1, but with options, like open_port/2
|
||||
-spec stream_cmd(string(), string()) -> {integer(), string()}.
|
||||
stream_cmd(Cmd, Opts) ->
|
||||
Port = open_port({spawn, binary_to_list(iolist_to_binary(Cmd))}, [stream, stderr_to_stdout, exit_status] ++ Opts),
|
||||
stream_cmd_loop(Port, "", "", now()).
|
||||
|
||||
stream_cmd_loop(Port, Buffer, NewLineBuffer, Time={_MegaSecs, Secs, _MicroSecs}) ->
|
||||
receive
|
||||
{Port, {data, Data}} ->
|
||||
{_, Now, _} = now(),
|
||||
NewNewLineBuffer = case Now > Secs of
|
||||
true ->
|
||||
lager:info(NewLineBuffer),
|
||||
"";
|
||||
_ ->
|
||||
NewLineBuffer
|
||||
end,
|
||||
case rt:str(Data, "\n") of
|
||||
true ->
|
||||
lager:info(NewNewLineBuffer),
|
||||
Tokens = string:tokens(Data, "\n"),
|
||||
[ lager:info(Token) || Token <- Tokens ],
|
||||
stream_cmd_loop(Port, Buffer ++ NewNewLineBuffer ++ Data, "", Time);
|
||||
_ ->
|
||||
stream_cmd_loop(Port, Buffer, NewNewLineBuffer ++ Data, now())
|
||||
end;
|
||||
{Port, {exit_status, Status}} ->
|
||||
catch port_close(Port),
|
||||
{Status, Buffer}
|
||||
after rt_config:get(rt_max_wait_time) ->
|
||||
{-1, Buffer}
|
||||
end.
|
@ -1,6 +1,6 @@
|
||||
%% -------------------------------------------------------------------
|
||||
%%
|
||||
%% Copyright (c) 2012 Basho Technologies, Inc.
|
||||
%% Copyright (c) 2013 Basho Technologies, Inc.
|
||||
%%
|
||||
%% This file is provided to you under the Apache License,
|
||||
%% Version 2.0 (the "License"); you may not use this file
|
||||
@ -25,7 +25,7 @@
|
||||
|
||||
-define(DEVS(N), lists:concat(["dev", N, "@127.0.0.1"])).
|
||||
-define(DEV(N), list_to_atom(?DEVS(N))).
|
||||
-define(PATH, (rt:config(rtdev_path))).
|
||||
-define(PATH, (rt_config:get(rtdev_path))).
|
||||
|
||||
get_deps() ->
|
||||
lists:flatten(io_lib:format("~s/dev/dev1/lib", [relpath(current)])).
|
||||
@ -127,8 +127,8 @@ upgrade(Node, NewVersion) ->
|
||||
lager:info("Running: ~s", [Cmd]),
|
||||
os:cmd(Cmd)
|
||||
end || Cmd <- Commands],
|
||||
VersionMap = orddict:store(N, NewVersion, rt:config(rt_versions)),
|
||||
rt:set_config(rt_versions, VersionMap),
|
||||
VersionMap = orddict:store(N, NewVersion, rt_config:get(rt_versions)),
|
||||
rt_config:set(rt_versions, VersionMap),
|
||||
start(Node),
|
||||
rt:wait_until_pingable(Node),
|
||||
ok.
|
||||
@ -208,7 +208,7 @@ rm_dir(Dir) ->
|
||||
?assertEqual(false, filelib:is_dir(Dir)).
|
||||
|
||||
add_default_node_config(Nodes) ->
|
||||
case rt:config(rt_default_config, undefined) of
|
||||
case rt_config:get(rt_default_config, undefined) of
|
||||
undefined -> ok;
|
||||
Defaults when is_list(Defaults) ->
|
||||
rt:pmap(fun(Node) ->
|
||||
@ -232,8 +232,8 @@ deploy_nodes(NodeConfig) ->
|
||||
|
||||
%% Check that you have the right versions available
|
||||
[ check_node(Version) || Version <- VersionMap ],
|
||||
rt:set_config(rt_nodes, NodeMap),
|
||||
rt:set_config(rt_versions, VersionMap),
|
||||
rt_config:set(rt_nodes, NodeMap),
|
||||
rt_config:set(rt_versions, VersionMap),
|
||||
|
||||
create_dirs(Nodes),
|
||||
|
||||
@ -376,7 +376,7 @@ interactive_loop(Port, Expected) ->
|
||||
%% We've met every expectation. Yay! If not, it means we've exited before
|
||||
%% something expected happened.
|
||||
?assertEqual([], Expected)
|
||||
after rt:config(rt_max_wait_time) ->
|
||||
after rt_config:get(rt_max_wait_time) ->
|
||||
%% interactive_loop is going to wait until it matches expected behavior
|
||||
%% If it doesn't, the test should fail; however, without a timeout it
|
||||
%% will just hang forever in search of expected behavior. See also: Parenting
|
||||
@ -400,11 +400,11 @@ riak(Node, Args) ->
|
||||
{ok, Result}.
|
||||
|
||||
node_id(Node) ->
|
||||
NodeMap = rt:config(rt_nodes),
|
||||
NodeMap = rt_config:get(rt_nodes),
|
||||
orddict:fetch(Node, NodeMap).
|
||||
|
||||
node_version(N) ->
|
||||
VersionMap = rt:config(rt_versions),
|
||||
VersionMap = rt_config:get(rt_versions),
|
||||
orddict:fetch(N, VersionMap).
|
||||
|
||||
spawn_cmd(Cmd) ->
|
||||
@ -448,7 +448,7 @@ get_cmd_result(Port, Acc) ->
|
||||
end.
|
||||
|
||||
check_node({_N, Version}) ->
|
||||
case proplists:is_defined(Version, rt:config(rtdev_path)) of
|
||||
case proplists:is_defined(Version, rt_config:get(rtdev_path)) of
|
||||
true -> ok;
|
||||
_ ->
|
||||
lager:error("You don't have Riak ~s installed or configured", [Version]),
|
||||
@ -477,14 +477,14 @@ whats_up() ->
|
||||
[io:format(" ~s~n",[string:substr(Dir, 1, length(Dir)-1)]) || Dir <- Up].
|
||||
|
||||
devpaths() ->
|
||||
lists:usort([ DevPath || {_Name, DevPath} <- proplists:delete(root, rt:config(rtdev_path))]).
|
||||
lists:usort([ DevPath || {_Name, DevPath} <- proplists:delete(root, rt_config:get(rtdev_path))]).
|
||||
|
||||
versions() ->
|
||||
proplists:get_keys(rt:config(rtdev_path)) -- [root].
|
||||
proplists:get_keys(rt_config:get(rtdev_path)) -- [root].
|
||||
|
||||
get_node_logs() ->
|
||||
Root = proplists:get_value(root, ?PATH),
|
||||
[ begin
|
||||
{ok, Data} = file:read_file(Filename),
|
||||
{Filename, Data}
|
||||
end || Filename <- filelib:wildcard(Root ++ "/*/dev/dev*/log/*") ].
|
||||
end || Filename <- filelib:wildcard(Root ++ "/*/dev/dev*/log/*") ].
|
@ -4,13 +4,15 @@
|
||||
-include_lib("eunit/include/eunit.hrl").
|
||||
|
||||
%% Change when a new release comes out.
|
||||
-define(JAVA_FAT_BE_URL, rt:config(java.fat_be_url)).
|
||||
-define(JAVA_TESTS_URL, rt:config(java.tests_url)).
|
||||
-define(JAVA_FAT_BE_URL, rt_config:get(java.fat_be_url)).
|
||||
-define(JAVA_TESTS_URL, rt_config:get(java.tests_url)).
|
||||
|
||||
-prereq("java").
|
||||
-prereq("curl").
|
||||
|
||||
confirm() ->
|
||||
|
||||
lager:info("+P ~p", [erlang:system_info(process_limit)]),
|
||||
prereqs(),
|
||||
Nodes = rt:deploy_nodes(1),
|
||||
[Node1] = Nodes,
|
||||
@ -28,8 +30,8 @@ confirm() ->
|
||||
|
||||
prereqs() ->
|
||||
%% Does you have the java client available?
|
||||
rt:download(?JAVA_FAT_BE_URL),
|
||||
rt:download(?JAVA_TESTS_URL),
|
||||
rt_local:download(?JAVA_FAT_BE_URL),
|
||||
rt_local:download(?JAVA_TESTS_URL),
|
||||
ok.
|
||||
|
||||
java_unit_tests(HTTP_Host, HTTP_Port, _PB_Host, PB_Port) ->
|
||||
@ -39,11 +41,11 @@ java_unit_tests(HTTP_Host, HTTP_Port, _PB_Host, PB_Port) ->
|
||||
Cmd = io_lib:format(
|
||||
"java -Dcom.basho.riak.host=~s -Dcom.basho.riak.http.port=~p -Dcom.basho.riak.pbc.port=~p -cp ~s:~s org.junit.runner.JUnitCore com.basho.riak.client.AllTests",
|
||||
[HTTP_Host, HTTP_Port, PB_Port,
|
||||
rt:config(rt_scratch_dir) ++ "/" ++ rt:url_to_filename(?JAVA_FAT_BE_URL),
|
||||
rt:config(rt_scratch_dir) ++ "/" ++ rt:url_to_filename(?JAVA_TESTS_URL)]),
|
||||
rt_config:get(rt_scratch_dir) ++ "/" ++ rt_local:url_to_filename(?JAVA_FAT_BE_URL),
|
||||
rt_config:get(rt_scratch_dir) ++ "/" ++ rt_local:url_to_filename(?JAVA_TESTS_URL)]),
|
||||
lager:info("Cmd: ~s", [Cmd]),
|
||||
|
||||
{ExitCode, JavaLog} = rt:stream_cmd(Cmd, [{cd, rt:config(rt_scratch_dir)}]),
|
||||
{ExitCode, JavaLog} = rt_local:stream_cmd(Cmd, [{cd, rt_config:get(rt_scratch_dir)}]),
|
||||
?assertEqual(0, ExitCode),
|
||||
lager:info(JavaLog),
|
||||
?assertNot(rt:str(JavaLog, "FAILURES!!!")),
|
||||
|
@ -5,7 +5,7 @@
|
||||
|
||||
%% @todo set this to 1.5.2 or greater, once that's released
|
||||
-define(PYTHON_CLIENT_TAG, "1.5-stable").
|
||||
-define(PYTHON_CHECKOUT, filename:join([rt:config(rt_scratch_dir), "riak-python-client"])).
|
||||
-define(PYTHON_CHECKOUT, filename:join([rt_config:get(rt_scratch_dir), "riak-python-client"])).
|
||||
-define(PYTHON_GIT_URL, "git://github.com/basho/riak-python-client.git").
|
||||
|
||||
%% Need python, yo
|
||||
@ -28,7 +28,7 @@ confirm() ->
|
||||
lager:info("Enabling search hook on 'searchbucket'"),
|
||||
rt:enable_search_hook(Node, <<"searchbucket">>),
|
||||
|
||||
{ExitCode, PythonLog} = rt:stream_cmd("bin/python setup.py develop test",
|
||||
{ExitCode, PythonLog} = rt_local:stream_cmd("bin/python setup.py develop test",
|
||||
[{cd, ?PYTHON_CHECKOUT},
|
||||
{env,[{"RIAK_TEST_PB_HOST", PB_Host},
|
||||
{"RIAK_TEST_PB_PORT", integer_to_list(PB_Port)},
|
||||
@ -61,15 +61,15 @@ prereqs() ->
|
||||
%% Checkout the project and a specific tag.
|
||||
lager:info("[PREREQ] Cloning riak-python-client from ~s", [?PYTHON_GIT_URL]),
|
||||
Cmd = io_lib:format("git clone ~s ~s", [?PYTHON_GIT_URL, ?PYTHON_CHECKOUT]),
|
||||
rt:stream_cmd(Cmd),
|
||||
rt_local:stream_cmd(Cmd),
|
||||
|
||||
lager:info("[PREREQ] Resetting python client to tag '~s'", [?PYTHON_CLIENT_TAG]),
|
||||
%% @todo below is how to reset to a tag, use that when 1.5.2 is available
|
||||
%%TagCmd = io_lib:format("git reset --hard ~s", [?PYTHON_CLIENT_TAG]),
|
||||
rt:stream_cmd("git reset --hard", [{cd, ?PYTHON_CHECKOUT}]),
|
||||
rt_local:stream_cmd("git reset --hard", [{cd, ?PYTHON_CHECKOUT}]),
|
||||
TagCmd = io_lib:format("git checkout -b ~s", [?PYTHON_CLIENT_TAG]),
|
||||
rt:stream_cmd(TagCmd, [{cd, ?PYTHON_CHECKOUT}]),
|
||||
rt_local:stream_cmd(TagCmd, [{cd, ?PYTHON_CHECKOUT}]),
|
||||
|
||||
lager:info("[PREREQ] Installing an isolated environment with virtualenv in ~s", [?PYTHON_CHECKOUT]),
|
||||
rt:stream_cmd("virtualenv --clear --no-site-packages .", [{cd, ?PYTHON_CHECKOUT}]),
|
||||
rt_local:stream_cmd("virtualenv --clear --no-site-packages .", [{cd, ?PYTHON_CHECKOUT}]),
|
||||
ok.
|
||||
|
@ -36,7 +36,7 @@ confirm() ->
|
||||
|
||||
lager:info("Cmd: ~s", [Cmd]),
|
||||
|
||||
{Code, RubyLog} = rt:stream_cmd(Cmd, [{cd, GemDir}, {env, [
|
||||
{Code, RubyLog} = rt_local:stream_cmd(Cmd, [{cd, GemDir}, {env, [
|
||||
{"RIAK_NODE_NAME", atom_to_list(Node1)},
|
||||
{"RIAK_ROOT_DIR", RiakRootDir},
|
||||
{"HTTP_PORT", integer_to_list(HTTP_Port)},
|
||||
@ -59,17 +59,17 @@ prereqs() ->
|
||||
% Download the ruby-client gem, unpack it and build it locally
|
||||
dat_gem() ->
|
||||
lager:info("Fetching riak-client gem"),
|
||||
GemInstalled = os:cmd("cd " ++ rt:config(rt_scratch_dir) ++ " ; gem fetch riak-client"),
|
||||
GemInstalled = os:cmd("cd " ++ rt_config:get(rt_scratch_dir) ++ " ; gem fetch riak-client"),
|
||||
GemFile = string:substr(GemInstalled, 12, length(GemInstalled) - 12),
|
||||
%GemFile = "riak-client",
|
||||
lager:info("Downloaded gem: ~s", [GemFile]),
|
||||
|
||||
rt:stream_cmd(io_lib:format("gem unpack ~s.gem", [GemFile]), [{cd, rt:config(rt_scratch_dir)}]),
|
||||
rt_local:stream_cmd(io_lib:format("gem unpack ~s.gem", [GemFile]), [{cd, rt_config:get(rt_scratch_dir)}]),
|
||||
|
||||
Cmd = "bundle install --without=guard --binstubs --no-color --path=vendor/bundle",
|
||||
lager:info(Cmd),
|
||||
|
||||
GemDir = filename:join([rt:config(rt_scratch_dir), GemFile]),
|
||||
GemDir = filename:join([rt_config:get(rt_scratch_dir), GemFile]),
|
||||
|
||||
{_Exit, _Log} = rt:stream_cmd(Cmd, [{cd, GemDir}, {env, [{"BUNDLE_PATH", "vendor/bundle"}]}]),
|
||||
{_Exit, _Log} = rt_local:stream_cmd(Cmd, [{cd, GemDir}, {env, [{"BUNDLE_PATH", "vendor/bundle"}]}]),
|
||||
GemDir.
|
||||
|
@ -50,7 +50,7 @@ confirm() ->
|
||||
%% Let's spawn workers against it.
|
||||
timer:sleep(10000),
|
||||
|
||||
Concurrent = rt:config(load_workers, 10),
|
||||
Concurrent = rt_config:get(load_workers, 10),
|
||||
|
||||
Sups = [
|
||||
{rt_worker_sup:start_link([
|
||||
@ -153,7 +153,7 @@ bucket(search) -> <<"scotts_spam">>.
|
||||
|
||||
seed_search(Node) ->
|
||||
Pid = rt:pbc(Node),
|
||||
SpamDir = rt:config(spam_dir),
|
||||
SpamDir = rt_config:get(spam_dir),
|
||||
Files = case SpamDir of
|
||||
undefined -> undefined;
|
||||
_ -> filelib:wildcard(SpamDir ++ "/*")
|
||||
|
@ -273,7 +273,7 @@ stash_search({_I,{_F,_T}}=K, _Postings=V, Stash) ->
|
||||
dict:append_list(K, V, Stash).
|
||||
|
||||
base_stash_path() ->
|
||||
rt:config(rt_scratch_dir) ++ "/dev/data_stash/".
|
||||
rt_config:get(rt_scratch_dir) ++ "/dev/data_stash/".
|
||||
|
||||
stash_path(Service, Partition) ->
|
||||
base_stash_path() ++ atom_to_list(Service) ++ "/" ++ integer_to_list(Partition) ++ ".stash".
|
||||
@ -300,7 +300,7 @@ wait_for_repair(Service, {Partition, Node}, Tries) ->
|
||||
|
||||
data_path(Node, Suffix, Partition) ->
|
||||
[Name, _] = string:tokens(atom_to_list(Node), "@"),
|
||||
Base = rt:config(rtdev_path.current) ++ "/dev/" ++ Name ++ "/data",
|
||||
Base = rt_config:get(rtdev_path.current) ++ "/dev/" ++ Name ++ "/data",
|
||||
Base ++ "/" ++ Suffix ++ "/" ++ integer_to_list(Partition).
|
||||
|
||||
backend_mod_dir(undefined) ->
|
||||
@ -322,7 +322,7 @@ set_search_schema_nval(Bucket, NVal) ->
|
||||
%% than allowing the internal format to be modified and set you
|
||||
%% must send the update in the external format.
|
||||
BucketStr = binary_to_list(Bucket),
|
||||
SearchCmd = ?FMT("~s/dev/dev1/bin/search-cmd", [rt:config(rtdev_path.current)]),
|
||||
SearchCmd = ?FMT("~s/dev/dev1/bin/search-cmd", [rt_config:get(rtdev_path.current)]),
|
||||
GetSchema = ?FMT("~s show-schema ~s > current-schema",
|
||||
[SearchCmd, BucketStr]),
|
||||
ModifyNVal = ?FMT("sed -E 's/n_val, [0-9]+/n_val, ~s/' "
|
||||
|
@ -13,8 +13,8 @@
|
||||
-export([make_bucket/3]).
|
||||
|
||||
confirm() ->
|
||||
NumNodes = rt:config(num_nodes, 6),
|
||||
ClusterASize = rt:config(cluster_a_size, 3),
|
||||
NumNodes = rt_config:get(num_nodes, 6),
|
||||
ClusterASize = rt_config:get(cluster_a_size, 3),
|
||||
|
||||
lager:info("Deploy ~p nodes", [NumNodes]),
|
||||
Conf = [
|
||||
|
@ -11,8 +11,8 @@
|
||||
wait_until_no_pending_changes/1]).
|
||||
|
||||
confirm() ->
|
||||
NumNodes = rt:config(num_nodes, 6),
|
||||
ClusterASize = rt:config(cluster_a_size, 3),
|
||||
NumNodes = rt_config:get(num_nodes, 6),
|
||||
ClusterASize = rt_config:get(cluster_a_size, 3),
|
||||
|
||||
lager:info("Deploy ~p nodes", [NumNodes]),
|
||||
Conf = [
|
||||
|
@ -11,8 +11,8 @@ confirm() ->
|
||||
TestHash = erlang:md5(term_to_binary(os:timestamp())),
|
||||
TestBucket = <<TestHash/binary, "-systest_a">>,
|
||||
|
||||
NumNodes = rt:config(num_nodes, 6),
|
||||
ClusterASize = rt:config(cluster_a_size, 4),
|
||||
NumNodes = rt_config:get(num_nodes, 6),
|
||||
ClusterASize = rt_config:get(cluster_a_size, 4),
|
||||
lager:info("Deploy ~p nodes", [NumNodes]),
|
||||
Conf = [
|
||||
{riak_repl,
|
||||
|
@ -5,8 +5,8 @@
|
||||
-include_lib("eunit/include/eunit.hrl").
|
||||
|
||||
confirm() ->
|
||||
NumNodes = rt:config(num_nodes, 6),
|
||||
ClusterASize = rt:config(cluster_a_size, 3),
|
||||
NumNodes = rt_config:get(num_nodes, 6),
|
||||
ClusterASize = rt_config:get(cluster_a_size, 3),
|
||||
|
||||
lager:info("Deploy ~p nodes", [NumNodes]),
|
||||
BaseConf = [
|
||||
@ -21,16 +21,8 @@ confirm() ->
|
||||
]}
|
||||
],
|
||||
|
||||
%% XXX for some reason, codew:priv_dir returns riak_test/riak_test/priv,
|
||||
%% which is wrong, so fix it.
|
||||
PrivDir = re:replace(code:priv_dir(riak_test), "riak_test(/riak_test)*",
|
||||
"riak_test", [{return, list}]),
|
||||
|
||||
?assert(filelib:is_dir(PrivDir)),
|
||||
|
||||
|
||||
lager:info("priv dir: ~p -> ~p", [code:priv_dir(riak_test), PrivDir]),
|
||||
|
||||
PrivDir = rt:priv_dir(),
|
||||
|
||||
SSLConfig1 = [
|
||||
{riak_repl,
|
||||
[
|
||||
|
@ -11,9 +11,9 @@ confirm() ->
|
||||
lager:info("Doing rolling replication upgrade test from ~p to ~p",
|
||||
[FromVersion, "current"]),
|
||||
|
||||
NumNodes = rt:config(num_nodes, 6),
|
||||
NumNodes = rt_config:get(num_nodes, 6),
|
||||
|
||||
UpgradeOrder = rt:config(repl_upgrade_order, "forwards"),
|
||||
UpgradeOrder = rt_config:get(repl_upgrade_order, "forwards"),
|
||||
|
||||
lager:info("Deploy ~p nodes", [NumNodes]),
|
||||
Conf = [
|
||||
@ -51,7 +51,7 @@ confirm() ->
|
||||
erlang:exit()
|
||||
end,
|
||||
|
||||
ClusterASize = rt:config(cluster_a_size, 3),
|
||||
ClusterASize = rt_config:get(cluster_a_size, 3),
|
||||
{ANodes, BNodes} = lists:split(ClusterASize, Nodes),
|
||||
lager:info("ANodes: ~p", [ANodes]),
|
||||
lager:info("BNodes: ~p", [BNodes]),
|
||||
|
@ -5,8 +5,8 @@
|
||||
-include_lib("eunit/include/eunit.hrl").
|
||||
|
||||
confirm() ->
|
||||
NumNodes = rt:config(num_nodes, 6),
|
||||
ClusterASize = rt:config(cluster_a_size, 3),
|
||||
NumNodes = rt_config:get(num_nodes, 6),
|
||||
ClusterASize = rt_config:get(cluster_a_size, 3),
|
||||
|
||||
lager:info("Deploy ~p nodes", [NumNodes]),
|
||||
BaseConf = [
|
||||
@ -17,13 +17,7 @@ confirm() ->
|
||||
]}
|
||||
],
|
||||
|
||||
%% XXX for some reason, codew:priv_dir returns riak_test/riak_test/priv,
|
||||
%% which is wrong, so fix it.
|
||||
PrivDir = re:replace(code:priv_dir(riak_test), "riak_test(/riak_test)*",
|
||||
"riak_test", [{return, list}]),
|
||||
|
||||
?assert(filelib:is_dir(PrivDir)),
|
||||
|
||||
PrivDir = rt:priv_dir(),
|
||||
|
||||
lager:info("priv dir: ~p -> ~p", [code:priv_dir(riak_test), PrivDir]),
|
||||
|
||||
|
@ -10,9 +10,9 @@ confirm() ->
|
||||
lager:info("Doing rolling replication upgrade test from ~p to ~p",
|
||||
[FromVersion, "current"]),
|
||||
|
||||
NumNodes = rt:config(num_nodes, 6),
|
||||
NumNodes = rt_config:get(num_nodes, 6),
|
||||
|
||||
UpgradeOrder = rt:config(repl_upgrade_order, "forwards"),
|
||||
UpgradeOrder = rt_config:get(repl_upgrade_order, "forwards"),
|
||||
|
||||
lager:info("Deploy ~p nodes", [NumNodes]),
|
||||
Conf = [
|
||||
@ -44,7 +44,7 @@ confirm() ->
|
||||
erlang:exit()
|
||||
end,
|
||||
|
||||
ClusterASize = rt:config(cluster_a_size, 3),
|
||||
ClusterASize = rt_config:get(cluster_a_size, 3),
|
||||
{ANodes, BNodes} = lists:split(ClusterASize, Nodes),
|
||||
lager:info("ANodes: ~p", [ANodes]),
|
||||
lager:info("BNodes: ~p", [BNodes]),
|
||||
|
@ -72,7 +72,7 @@ confirm() ->
|
||||
verify_searches(PbcPid, Searches, 1),
|
||||
[?assertEqual([], read_some(Node, [{last, ?NUM_KEYS}])) || Node <- Nodes],
|
||||
|
||||
BackupFile = filename:join([rt:config(rt_scratch_dir), "TestBackup.bak"]),
|
||||
BackupFile = filename:join([rt_config:get(rt_scratch_dir), "TestBackup.bak"]),
|
||||
case filelib:is_regular(BackupFile) of
|
||||
true ->
|
||||
lager:info("Deleting current backup file at ~p", [BackupFile]),
|
||||
|
@ -34,7 +34,7 @@ confirm() ->
|
||||
[Node0 | _RestNodes] = Nodes = rt:build_cluster(3, Config),
|
||||
rt:wait_until_ring_converged(Nodes),
|
||||
|
||||
Path = rt:config(rt_scratch_dir),
|
||||
Path = rt_config:get(rt_scratch_dir),
|
||||
lager:info("Creating scratch dir if necessary at ~s", [Path]),
|
||||
?assertMatch({0, _}, rt:cmd("mkdir -p " ++ Path)),
|
||||
SearchRepoDir = filename:join(Path, "riak_search"),
|
||||
|
Loading…
Reference in New Issue
Block a user