mirror of
https://github.com/valitydev/riak_test.git
synced 2024-11-06 16:45:29 +00:00
Merge pull request #51 from basho/jd-riak_stats_test
Port of the basho_expect riak_stats test
This commit is contained in:
commit
0d4b15611e
@ -11,14 +11,16 @@
|
||||
{lager, "1.2.0", {git, "git://github.com/basho/lager", {tag, "1.2.0"}}},
|
||||
{getopt, ".*", {git, "git://github.com/jcomellas/getopt", {tag, "v0.4"}}},
|
||||
{meck, ".*", {git, "git://github.com/eproxus/meck"}},
|
||||
{mapred_verify, ".*", {git, "git://github.com/basho/mapred_verify"}}
|
||||
{mapred_verify, ".*", {git, "git://github.com/basho/mapred_verify"}},
|
||||
{riakc, "1.2.1", {git, "git://github.com/basho/riak-erlang-client", {tag, "1.2.1"}}},
|
||||
{riakhttpc, ".*", {git, "git://github.com/basho/riak-erlang-http-client"}}
|
||||
]}.
|
||||
|
||||
{escript_incl_apps, [lager, getopt]}.
|
||||
{escript_incl_apps, [lager, getopt, riakhttpc, riakc, ibrowse]}.
|
||||
|
||||
{plugin_dir, "src"}.
|
||||
{plugins, [rebar_riak_test_plugin]}.
|
||||
{riak_test, [
|
||||
{test_paths, ["tests"]},
|
||||
{test_output, "ebin"}
|
||||
]}.
|
||||
]}.
|
||||
|
@ -69,6 +69,9 @@ main(Args) ->
|
||||
net_kernel:start([ENode]),
|
||||
erlang:set_cookie(node(), Cookie),
|
||||
|
||||
%% ibrowse
|
||||
application:load(ibrowse),
|
||||
application:start(ibrowse),
|
||||
%% Start Lager
|
||||
application:load(lager),
|
||||
%% Fileoutput
|
||||
|
@ -48,8 +48,9 @@ execute(TestModule) ->
|
||||
try TestModule:confirm() of
|
||||
ReturnVal -> {ReturnVal, undefined}
|
||||
catch
|
||||
error:Error ->
|
||||
lager:warning("~s failed: ~p", [TestModule, Error]),
|
||||
{fail, Error}
|
||||
_:Reason ->
|
||||
lager:warning("~s failed: ~p", [TestModule, Reason]),
|
||||
lager:warning("~p", [erlang:get_stacktrace()]),
|
||||
{fail, Reason}
|
||||
end.
|
||||
|
||||
|
32
src/rt.erl
32
src/rt.erl
@ -561,6 +561,36 @@ systest_read(Node, Start, End, Bucket, R) ->
|
||||
end,
|
||||
lists:foldl(F, [], lists:seq(Start, End)).
|
||||
|
||||
pbc(Node) ->
|
||||
{ok, IP} = rpc:call(Node, application, get_env, [riak_api, pb_ip]),
|
||||
{ok, PBPort} = rpc:call(Node, application, get_env, [riak_api, pb_port]),
|
||||
{ok, Pid} = riakc_pb_socket:start_link(IP, PBPort),
|
||||
Pid.
|
||||
|
||||
pbc_read(Pid, Bucket, Key) ->
|
||||
{ok, Value} = riakc_pb_socket:get(Pid, Bucket, Key),
|
||||
Value.
|
||||
|
||||
pbc_write(Pid, Bucket, Key, Value) ->
|
||||
Object = riakc_obj:new(Bucket, Key, Value),
|
||||
riakc_pb_socket:put(Pid, Object).
|
||||
|
||||
pbc_set_bucket_prop(Pid, Bucket, PropList) ->
|
||||
riakc_pb_socket:set_bucket(Pid, Bucket, PropList).
|
||||
|
||||
|
||||
httpc(Node) ->
|
||||
{ok, [{IP, Port}|_]} = rpc:call(Node, application, get_env, [riak_core, http]),
|
||||
rhc:create(IP, Port, "riak", []).
|
||||
|
||||
httpc_read(C, Bucket, Key) ->
|
||||
{ok, Value} = rhc:get(C, Bucket, Key),
|
||||
Value.
|
||||
|
||||
httpc_write(C, Bucket, Key, Value) ->
|
||||
Object = riakc_obj:new(Bucket, Key, Value),
|
||||
rhc:put(C, Object).
|
||||
|
||||
%% utility function
|
||||
pmap(F, L) ->
|
||||
Parent = self(),
|
||||
@ -579,4 +609,4 @@ str(String, Substr) ->
|
||||
case string:str(String, Substr) of
|
||||
0 -> false;
|
||||
_ -> true
|
||||
end.
|
||||
end.
|
||||
|
109
tests/verify_riak_stats.erl
Normal file
109
tests/verify_riak_stats.erl
Normal file
@ -0,0 +1,109 @@
|
||||
-module(verify_riak_stats).
|
||||
-export([confirm/0]).
|
||||
-include_lib("eunit/include/eunit.hrl").
|
||||
|
||||
%% You should have curl installed locally to do this.
|
||||
confirm() ->
|
||||
Nodes = rt:deploy_nodes(1),
|
||||
[Node1] = Nodes,
|
||||
?assertEqual(ok, rt:wait_until_nodes_ready([Node1])),
|
||||
Stats1 = get_stats(Node1),
|
||||
%% make sure a set of stats have valid values
|
||||
verify_nz(Stats1,[<<"cpu_nprocs">>,
|
||||
<<"mem_total">>,
|
||||
<<"mem_allocated">>,
|
||||
<<"sys_logical_processors">>,
|
||||
<<"sys_process_count">>,
|
||||
<<"sys_thread_pool_size">>,
|
||||
<<"sys_wordsize">>,
|
||||
<<"ring_num_partitions">>,
|
||||
<<"ring_creation_size">>,
|
||||
<<"memory_total">>,
|
||||
<<"memory_processes">>,
|
||||
<<"memory_processes_used">>,
|
||||
<<"memory_system">>,
|
||||
<<"memory_atom">>,
|
||||
<<"memory_atom_used">>,
|
||||
<<"memory_binary">>,
|
||||
<<"memory_code">>,
|
||||
<<"memory_ets">>]),
|
||||
|
||||
lager:info("perform 5 x PUT and a GET to increment the stats"),
|
||||
lager:info("as the stat system only does calcs for > 5 readings"),
|
||||
|
||||
C = rt:httpc(Node1),
|
||||
[rt:httpc_write(C, <<"systest">>, <<X>>, <<"12345">>) || X <- lists:seq(1, 5)],
|
||||
[rt:httpc_read(C, <<"systest">>, <<X>>) || X <- lists:seq(1, 5)],
|
||||
|
||||
Stats2 = get_stats(Node1),
|
||||
|
||||
%% make sure the stats that were supposed to increment did
|
||||
verify_inc(Stats1, Stats2, [{<<"node_gets">>, 10},
|
||||
{<<"node_puts">>, 5},
|
||||
{<<"node_gets_total">>, 10},
|
||||
{<<"node_puts_total">>, 5},
|
||||
{<<"vnode_gets">>, 30},
|
||||
{<<"vnode_puts">>, 15},
|
||||
{<<"vnode_gets_total">>, 30},
|
||||
{<<"vnode_puts_total">>, 15}]),
|
||||
|
||||
%% verify that fsm times were tallied
|
||||
verify_nz(Stats2, [<<"node_get_fsm_time_mean">>,
|
||||
<<"node_get_fsm_time_median">>,
|
||||
<<"node_get_fsm_time_95">>,
|
||||
<<"node_get_fsm_time_99">>,
|
||||
<<"node_get_fsm_time_100">>,
|
||||
<<"node_put_fsm_time_mean">>,
|
||||
<<"node_put_fsm_time_median">>,
|
||||
<<"node_put_fsm_time_95">>,
|
||||
<<"node_put_fsm_time_99">>,
|
||||
<<"node_put_fsm_time_100">>]),
|
||||
|
||||
|
||||
lager:info("Make PBC Connection"),
|
||||
Pid = rt:pbc(Node1),
|
||||
|
||||
Stats3 = get_stats(Node1),
|
||||
|
||||
rt:systest_write(Node1, 1),
|
||||
%% make sure the stats that were supposed to increment did
|
||||
verify_inc(Stats2, Stats3, [{<<"pbc_connects_total">>, 1},
|
||||
{<<"pbc_connects">>, 1},
|
||||
{<<"pbc_active">>, 1}]),
|
||||
|
||||
|
||||
|
||||
lager:info("Force Read Repair"),
|
||||
rt:pbc_write(Pid, <<"testbucket">>, <<"1">>, <<"blah!">>),
|
||||
rt:pbc_set_bucket_prop(Pid, <<"testbucket">>, [{n_val, 4}]),
|
||||
|
||||
Stats4 = get_stats(Node1),
|
||||
verify_inc(Stats3, Stats4, [{<<"read_repairs_total">>, 0},
|
||||
{<<"read_repairs">>, 0}]),
|
||||
|
||||
_Value = rt:pbc_read(Pid, <<"testbucket">>, <<"1">>),
|
||||
|
||||
Stats5 = get_stats(Node1),
|
||||
|
||||
verify_inc(Stats3, Stats5, [{<<"read_repairs_total">>, 1},
|
||||
{<<"read_repairs">>, 1}]),
|
||||
|
||||
pass.
|
||||
|
||||
verify_inc(Prev, Props, Keys) ->
|
||||
[begin
|
||||
Old = proplists:get_value(Key, Prev, 0),
|
||||
New = proplists:get_value(Key, Props, 0),
|
||||
lager:info("~s: ~p -> ~p (expected ~p)", [Key, Old, New, Old + Inc]),
|
||||
?assertEqual(New, (Old + Inc))
|
||||
end || {Key, Inc} <- Keys].
|
||||
|
||||
verify_nz(Props, Keys) ->
|
||||
[?assertNotEqual(proplists:get_value(Key,Props,0), 0) || Key <- Keys].
|
||||
|
||||
get_stats(Node) ->
|
||||
timer:sleep(10000),
|
||||
StatString = os:cmd(io_lib:format("curl -s -S ~s/stats", [rt:http_url(Node)])),
|
||||
{struct, Stats} = mochijson2:decode(StatString),
|
||||
%%lager:debug(StatString),
|
||||
Stats.
|
Loading…
Reference in New Issue
Block a user