diff --git a/lib/no_slides/coverage_fsm.ex b/lib/no_slides/coverage_fsm.ex index 5c234c4..0ac2339 100644 --- a/lib/no_slides/coverage_fsm.ex +++ b/lib/no_slides/coverage_fsm.ex @@ -25,6 +25,7 @@ defmodule NoSlides.CoverageFsm do NoSlides.Service, NoSlides.VNode_master, timeout, + :riak_core_coverage_plan, # specific for riak_core_ng %{from: from, req_id: req_id, args: args} } end diff --git a/lib/no_slides/service.ex b/lib/no_slides/service.ex index adc1ab9..39ef9e3 100644 --- a/lib/no_slides/service.ex +++ b/lib/no_slides/service.ex @@ -79,6 +79,11 @@ defmodule NoSlides.Service do wait_result(req_id) end + def clear do + req_id = NoSlides.CoverageFsmSupervisor.start_fsm(:clear) + wait_result(req_id) + end + defp wait_result(req_id, timeout\\5000) do receive do {^req_id, {:ok, keys}} -> diff --git a/lib/no_slides/v_node.ex b/lib/no_slides/v_node.ex index 10acb6f..b45bfa2 100644 --- a/lib/no_slides/v_node.ex +++ b/lib/no_slides/v_node.ex @@ -120,6 +120,12 @@ defmodule NoSlides.VNode do {:reply, {ref_id, Map.values(state.data)}, state} end + def handle_coverage({:clear, _, _} = req, _key_spaces, {_, ref_id, _} = sender, state) do + Logger.debug "[handle_coverage] VNODE req: #{inspect req} sender: #{inspect sender}" + new_state = Map.put(state, :data, %{}) + {:reply, {ref_id, %{}}, new_state} + end + def handle_exit(pid, reason, state) do Logger.debug "[handle_exit] self: #{inspect self()} - pid: #{inspect pid} - reason: #{inspect reason} - state: #{inspect state}" {:noreply, state} diff --git a/test/no_slides_test.exs b/test/no_slides_test.exs index 57bbfe1..05c6efe 100644 --- a/test/no_slides_test.exs +++ b/test/no_slides_test.exs @@ -12,8 +12,17 @@ defmodule NoSlidesTest do assert Node.alive?() IO.inspect Node.list + #IO.puts "\nGive ring distribution a chance. Be patient..." + #Process.sleep(60000) # Crazy way to give the ring chance for some distribution. Takes a minute or so! + #IO.inspect NoSlides.Service.ring_status() # local, meaningless. Fails without RiakCore running, e.g. 'mix test --no-start' IO.inspect RiakCluster.ring_status(hd(nodeNames)) # remote call + + on_exit fn -> + IO.puts "\nThe test is done! Shutting down..." + IO.inspect RiakCluster.ring_status(hd(nodeNames)) + # TODO: stop the slaves + end [nodes: nodeNames] end @@ -23,8 +32,7 @@ defmodule NoSlidesTest do :pong = rc_command(hd(nodeNames), :ping) for _n <- 1..100 do - i = :rand.uniform(length(nodeNames)) - 1 # index of node to use - :pong = rc_command(Enum.at(nodeNames, i), :ping, [:rand.uniform(100_000_000)]) + :pong = rc_command(pick_random(nodeNames), :ping, [:rand.uniform(100_000_000)]) end end @@ -32,7 +40,6 @@ defmodule NoSlidesTest do nodeNames = context.nodes first_node = hd(nodeNames) - :ok = rc_command(first_node, :put, [:k1, :v1]) :ok = rc_command(first_node, :put, [:k2, :v2]) :ok = rc_command(first_node, :put, [:k3, :v3]) @@ -44,6 +51,42 @@ defmodule NoSlidesTest do :v3 = rc_command(node, :get, [:k3]) nil = rc_command(node, :get, [:k10]) end + + # test updating value + :ok = rc_command(first_node, :put, [:k1, :v_new]) + :v_new = rc_command(first_node, :get, [:k1]) + end + + test "coverage with key-value pairs in a RiakCore cluster", context do + nodeNames = context.nodes + first_node = hd(nodeNames) + + # delete all keys to start with a know state + [] = rc_command(first_node, :clear, []) + [] = rc_command(first_node, :keys, []) + [] = rc_command(first_node, :values, []) + + k_v_pairs = Enum.map(1..100, &({"k#{:rand.uniform(100_000)}", "v#{&1}"})) + + Enum.each(k_v_pairs, fn({k, v}) -> rc_command(pick_random(nodeNames), :put, [k, v]) end) + + actual_keys = rc_command(first_node, :keys, []) + actual_values = rc_command(first_node, :values, []) + + assert 100 == length(actual_keys) + assert 100 == length(actual_values) + assert have_same_elements(actual_keys, Enum.map(k_v_pairs, fn({k, _v}) -> k end)) + assert have_same_elements(actual_values, Enum.map(k_v_pairs, fn({_k, v}) -> v end)) + + # store should be empty after a new clear + [] = rc_command(first_node, :clear, []) + [] = rc_command(first_node, :keys, []) + [] = rc_command(first_node, :values, []) + end + + defp pick_random(nodes) do + i = :rand.uniform(length(nodes)) - 1 # index of node to use + Enum.at(nodes, i) end defp rc_command(node, command) do rc_command(node, command, []) end @@ -52,4 +95,7 @@ defmodule NoSlidesTest do :rpc.call(String.to_atom(node), NoSlides.Service, command, args) end + defp have_same_elements(list1, list2) do + list1 -- list2 == [] and list2 -- list1 == [] + end end diff --git a/test/support/riak_cluster.ex b/test/support/riak_cluster.ex index 919f328..1a9d607 100644 --- a/test/support/riak_cluster.ex +++ b/test/support/riak_cluster.ex @@ -3,9 +3,7 @@ defmodule NoSlides.RiakCluster do ## derived from Phoenix.PubSub.Cluster and Riak_core example at https://github.com/lambdaclass/riak_core_tutorial def start_test_nodes() do - #assert !Node.alive?() start_master() - #assert Node.alive?() nodes = Application.get_env(:no_slides, :nodes, []) # start nodes serially: @@ -51,6 +49,9 @@ defmodule NoSlides.RiakCluster do rpc(node, Application, :put_env, [:riak_core, :handoff_port, handoff_port]) rpc(node, Application, :put_env, [:riak_core, :schema_dirs, ['./priv']]) + rpc(node, Application, :put_env, [:riak_core, :ring_creation_size, 128]) + rpc(node, Application, :put_env, [:riak_core, :vnode_inactivity_timeout, 1000]) + # start our app rpc(node, Application, :ensure_all_started, [:no_slides]) {:ok, node}