2014-05-06 19:28:28 +00:00
|
|
|
-module(verify_listkeys_eqcfsm).
|
|
|
|
-compile(export_all).
|
|
|
|
|
|
|
|
-include_lib("eqc/include/eqc.hrl").
|
|
|
|
-include_lib("eqc/include/eqc_fsm.hrl").
|
|
|
|
-include_lib("eunit/include/eunit.hrl").
|
|
|
|
|
|
|
|
-behaviour(riak_test).
|
|
|
|
-export([confirm/0]).
|
|
|
|
|
2014-05-07 15:35:34 +00:00
|
|
|
-define(NUM_TESTS, 5).
|
2014-05-06 19:28:28 +00:00
|
|
|
-define(PREFIX, {x, x}).
|
|
|
|
-define(DEVS(N), lists:concat(["dev", N, "@127.0.0.1"])).
|
|
|
|
-define(DEV(N), list_to_atom(?DEVS(N))).
|
|
|
|
|
|
|
|
-record(state, {
|
|
|
|
bucket_type = undefined,
|
|
|
|
bucket = undefined,
|
|
|
|
nodes_up = [],
|
|
|
|
nodes_down = [],
|
|
|
|
cluster_nodes = [],
|
|
|
|
num_keys = 0,
|
2014-05-09 15:15:41 +00:00
|
|
|
key_filter = undefined
|
|
|
|
}).
|
2014-05-06 19:28:28 +00:00
|
|
|
|
|
|
|
%% ====================================================================
|
|
|
|
%% riak_test callback
|
|
|
|
%% ====================================================================
|
|
|
|
confirm() ->
|
|
|
|
?assert(eqc:quickcheck(eqc:numtests(?NUM_TESTS, ?MODULE:prop_test()))),
|
|
|
|
pass.
|
|
|
|
%% ====================================================================
|
|
|
|
%% EQC generators
|
|
|
|
%% ====================================================================
|
|
|
|
g_num_nodes() ->
|
|
|
|
oneof([2, 3, 4, 5]).
|
|
|
|
|
|
|
|
g_num_keys() ->
|
|
|
|
choose(10, 1000).
|
|
|
|
|
|
|
|
g_uuid() ->
|
2014-05-09 16:27:50 +00:00
|
|
|
noshrink(eqc_gen:bind(eqc_gen:bool(), fun(_) -> druuid:v4_str() end)).
|
2014-05-06 19:28:28 +00:00
|
|
|
|
|
|
|
g_bucket_type() ->
|
|
|
|
oneof(bucket_types()).
|
|
|
|
|
|
|
|
g_key_filter() ->
|
|
|
|
%% Create a key filter function.
|
|
|
|
%% There will always be at least 10 keys
|
|
|
|
%% due to the lower bound of object count
|
|
|
|
%% generator.
|
|
|
|
MatchKeys = [list_to_binary(integer_to_list(X)) || X <- lists:seq(1,10)],
|
|
|
|
KeyFilter =
|
|
|
|
fun(X) ->
|
|
|
|
lists:member(X, MatchKeys)
|
|
|
|
end,
|
|
|
|
frequency([{4, none}, {2, KeyFilter}]).
|
|
|
|
|
|
|
|
%% ====================================================================
|
|
|
|
%% EQC Properties
|
|
|
|
%% ====================================================================
|
|
|
|
prop_test() ->
|
|
|
|
?FORALL(Cmds, noshrink(commands(?MODULE)),
|
|
|
|
?WHENFAIL(
|
|
|
|
begin
|
|
|
|
_ = lager:error("*********************** FAILED!!!!"
|
|
|
|
"*******************")
|
|
|
|
end,
|
|
|
|
?TRAPEXIT(
|
|
|
|
begin
|
|
|
|
lager:info("======================== Will run commands:"),
|
|
|
|
[lager:info(" Command : ~p~n", [Cmd]) || Cmd <- Cmds],
|
2014-05-07 15:35:34 +00:00
|
|
|
{H, S, Res} = run_commands(?MODULE, Cmds),
|
|
|
|
lager:info("======================== Ran commands"),
|
2014-05-06 21:28:12 +00:00
|
|
|
clean_nodes(S),
|
2014-05-07 15:35:34 +00:00
|
|
|
aggregate(zip(state_names(H),command_names(Cmds)),
|
|
|
|
equals(Res, ok))
|
2014-05-06 19:28:28 +00:00
|
|
|
end))).
|
|
|
|
|
|
|
|
%% ====================================================================
|
|
|
|
%% EQC FSM state transitions
|
|
|
|
%% ====================================================================
|
2014-05-07 15:35:34 +00:00
|
|
|
initial_state() ->
|
|
|
|
building_cluster.
|
|
|
|
|
2014-05-06 19:28:28 +00:00
|
|
|
building_cluster(_S) ->
|
|
|
|
[
|
2014-05-08 19:25:38 +00:00
|
|
|
{preloading_data, {call, ?MODULE, setup_cluster, [g_num_nodes()]}}
|
2014-05-06 19:28:28 +00:00
|
|
|
].
|
|
|
|
|
2014-05-08 19:25:38 +00:00
|
|
|
preloading_data(S) ->
|
2014-05-06 19:28:28 +00:00
|
|
|
[
|
2014-05-09 16:27:50 +00:00
|
|
|
{verifying_data, {call, ?MODULE, preload_data, [g_bucket_type(), g_uuid(), hd(S#state.nodes_up),
|
2014-05-06 19:28:28 +00:00
|
|
|
g_num_keys(), g_key_filter()]}}
|
|
|
|
].
|
|
|
|
|
2014-05-08 19:25:38 +00:00
|
|
|
verifying_data(S) ->
|
2014-05-06 19:28:28 +00:00
|
|
|
[
|
|
|
|
{tearing_down_nodes, {call, ?MODULE, verify, [S#state.bucket_type, S#state.bucket, S#state.nodes_up,
|
|
|
|
S#state.num_keys, S#state.key_filter]}}
|
|
|
|
].
|
|
|
|
|
|
|
|
tearing_down_nodes(S) ->
|
|
|
|
[
|
|
|
|
{stopped, {call, ?MODULE, clean_nodes, [S#state.nodes_up]}}
|
|
|
|
].
|
|
|
|
|
|
|
|
stopped(_S) ->
|
|
|
|
[].
|
|
|
|
|
|
|
|
%% ====================================================================
|
|
|
|
%% EQC FSM State Data
|
|
|
|
%% ====================================================================
|
|
|
|
initial_state_data() ->
|
|
|
|
#state{}.
|
|
|
|
|
2014-05-08 19:25:38 +00:00
|
|
|
next_state_data(building_cluster, preloading_data, S, _, {call, _, setup_cluster, [NumNodes]}) ->
|
2014-05-09 15:15:41 +00:00
|
|
|
S#state{ nodes_up = node_list(NumNodes) };
|
2014-05-08 19:25:38 +00:00
|
|
|
next_state_data(preloading_data, verifying_data, S, _, {call, _, preload_data,
|
2014-05-09 14:50:13 +00:00
|
|
|
[{BucketType, _}, Bucket, _Nodes, NumKeys, KeyFilter]}) ->
|
2014-05-06 19:28:28 +00:00
|
|
|
S#state{ bucket_type = BucketType, bucket = Bucket, num_keys = NumKeys, key_filter = KeyFilter };
|
|
|
|
next_state_data(_From, _To, S, _R, _C) ->
|
|
|
|
S.
|
|
|
|
|
|
|
|
%% ====================================================================
|
|
|
|
%% EQC FSM preconditions
|
|
|
|
%% ====================================================================
|
|
|
|
precondition(_From,_To,_S,{call,_,_,_}) ->
|
|
|
|
true.
|
|
|
|
|
|
|
|
%% ====================================================================
|
|
|
|
%% EQC FSM postconditions
|
|
|
|
%% ====================================================================
|
2014-05-09 14:50:13 +00:00
|
|
|
postcondition(_From,_To,_S,{call,_,setup_cluster,_},Res) ->
|
|
|
|
ok == Res;
|
|
|
|
postcondition(_From,_To,_S,{call,_,verify,_},{error, Reason}) ->
|
|
|
|
lager:info("Error: ~p", [Reason]),
|
|
|
|
false;
|
|
|
|
postcondition(_From,_To,S,{call,_,verify,_},KeyLists) ->
|
|
|
|
ExpectedKeys = expected_keys(S#state.num_keys, S#state.key_filter),
|
|
|
|
lists:all(fun(true) -> true; (_) -> false end,
|
|
|
|
[assert_equal(ExpectedKeys, Keys) || Keys <- KeyLists]);
|
2014-05-06 19:28:28 +00:00
|
|
|
postcondition(_From,_To,_S,{call,_,_,_},_Res) ->
|
|
|
|
true.
|
|
|
|
|
|
|
|
%% ====================================================================
|
|
|
|
%% callback functions
|
|
|
|
%% ====================================================================
|
2014-05-06 21:28:12 +00:00
|
|
|
clean_nodes({stopped, _S}) ->
|
2014-05-09 15:15:41 +00:00
|
|
|
lager:info("Clean-up already completed.");
|
2014-05-06 21:28:12 +00:00
|
|
|
clean_nodes({_, S}) ->
|
2014-05-07 15:35:34 +00:00
|
|
|
lager:info("Running clean_nodes with S:~p", [S]),
|
|
|
|
clean_nodes(S#state.nodes_up);
|
|
|
|
clean_nodes([]) ->
|
|
|
|
lager:info("clean_nodes: no cluster to clean");
|
|
|
|
clean_nodes(Nodes) ->
|
|
|
|
lager:info("Running clean_nodes with Nodes:~p", [Nodes]),
|
2014-05-06 19:28:28 +00:00
|
|
|
CleanupFun =
|
|
|
|
fun(N) ->
|
|
|
|
lager:info("Wiping out node ~p for good", [N]),
|
|
|
|
rt:clean_data_dir(N)
|
|
|
|
end,
|
|
|
|
lager:info("======================== Taking all nodes down ~p", [Nodes]),
|
|
|
|
rt:pmap(CleanupFun, Nodes),
|
|
|
|
rt:teardown().
|
|
|
|
|
2014-05-07 15:35:34 +00:00
|
|
|
preload_data({BucketType, _}, Bucket, Node, NumKeys, _KeyFilter) ->
|
2014-05-06 19:28:28 +00:00
|
|
|
lager:info("*******************[CMD] First node ~p", [Node]),
|
|
|
|
lager:info("Writing to bucket ~p", [Bucket]),
|
|
|
|
put_keys(Node, {BucketType, Bucket}, NumKeys).
|
|
|
|
|
2014-05-07 15:35:34 +00:00
|
|
|
setup_cluster(NumNodes) ->
|
2014-05-06 19:28:28 +00:00
|
|
|
lager:info("Deploying cluster of size ~p", [NumNodes]),
|
|
|
|
Nodes = rt:build_cluster(NumNodes),
|
2014-05-09 14:50:13 +00:00
|
|
|
rt:wait_until_nodes_ready(Nodes),
|
2014-05-06 19:28:28 +00:00
|
|
|
Node = hd(Nodes),
|
|
|
|
rt:wait_until_transfers_complete(Nodes),
|
|
|
|
[begin
|
|
|
|
rt:create_and_activate_bucket_type(Node, BucketType, [{n_val, NVal}]),
|
|
|
|
rt:wait_until_bucket_type_status(BucketType, active, Nodes)
|
|
|
|
end || {BucketType, NVal} <- bucket_types()],
|
2014-05-09 14:50:13 +00:00
|
|
|
ok.
|
2014-05-06 19:28:28 +00:00
|
|
|
|
|
|
|
verify(BucketType, Bucket, Nodes, _NumKeys, KeyFilter) ->
|
|
|
|
[list_filter_sort(Node, {BucketType, Bucket}, KeyFilter) || Node <- Nodes].
|
|
|
|
|
|
|
|
%% ====================================================================
|
|
|
|
%% Helpers
|
|
|
|
%% ====================================================================
|
|
|
|
assert_equal(Expected, Actual) ->
|
|
|
|
case Expected -- Actual of
|
|
|
|
[] -> ok;
|
|
|
|
Diff -> lager:info("Expected -- Actual: ~p", [Diff])
|
|
|
|
end,
|
|
|
|
length(Actual) == length(Expected)
|
|
|
|
andalso Actual == Expected.
|
|
|
|
|
|
|
|
bucket_types() ->
|
|
|
|
[{<<"n_val_one">>, 1},
|
|
|
|
{<<"n_val_two">>, 2},
|
|
|
|
{<<"n_val_three">>, 3},
|
|
|
|
{<<"n_val_four">>, 4},
|
|
|
|
{<<"n_val_five">>, 5}].
|
|
|
|
|
|
|
|
expected_keys(NumKeys, FilterFun) ->
|
|
|
|
KeysPair = {ok, [list_to_binary(["", integer_to_list(Ki)]) ||
|
|
|
|
Ki <- lists:seq(0, NumKeys - 1)]},
|
|
|
|
sort_keys(filter_keys(KeysPair, FilterFun)).
|
|
|
|
|
|
|
|
filter_keys({ok, Keys}, none) ->
|
|
|
|
Keys;
|
|
|
|
filter_keys({ok, Keys}, FilterFun) ->
|
|
|
|
lists:filter(FilterFun, Keys);
|
|
|
|
filter_keys({error, _}=Error, _) ->
|
|
|
|
Error.
|
|
|
|
|
|
|
|
list_filter_sort(Node, Bucket, KeyFilter) ->
|
|
|
|
%% Move client to state
|
|
|
|
{ok, C} = riak:client_connect(Node),
|
2014-05-09 14:50:13 +00:00
|
|
|
sort_keys(filter_keys(riak_client:list_keys(Bucket, C), KeyFilter)).
|
2014-05-07 15:35:34 +00:00
|
|
|
|
2014-05-06 19:28:28 +00:00
|
|
|
node_list(NumNodes) ->
|
|
|
|
NodesN = lists:seq(1, NumNodes),
|
|
|
|
[?DEV(N) || N <- NodesN].
|
|
|
|
|
|
|
|
put_keys(Node, Bucket, Num) ->
|
|
|
|
lager:info("*******************[CMD] Putting ~p keys into bucket ~p on node ~p", [Num, Bucket, Node]),
|
|
|
|
Pid = rt:pbc(Node),
|
|
|
|
try
|
|
|
|
Keys = [list_to_binary(["", integer_to_list(Ki)]) || Ki <- lists:seq(0, Num - 1)],
|
|
|
|
Vals = [list_to_binary(["", integer_to_list(Ki)]) || Ki <- lists:seq(0, Num - 1)],
|
2014-05-09 14:50:13 +00:00
|
|
|
[riakc_pb_socket:put(Pid, riakc_obj:new(Bucket, Key, Val)) || {Key, Val} <- lists:zip(Keys, Vals)]
|
2014-05-06 19:28:28 +00:00
|
|
|
after
|
|
|
|
catch(riakc_pb_socket:stop(Pid))
|
|
|
|
end.
|
|
|
|
|
|
|
|
sort_keys({error, _}=Error) ->
|
|
|
|
Error;
|
|
|
|
sort_keys(Keys) ->
|
|
|
|
lists:usort(Keys).
|
|
|
|
|