From 152d1fbc65cbb8a6f79fdba3310a3bc0dd0a1b76 Mon Sep 17 00:00:00 2001 From: Peter Menhart Date: Tue, 5 Jun 2018 18:02:21 -0400 Subject: [PATCH 1/4] Added test of a riak_core cluster * Test with several slave nodes in a cluster (configured in config/test.exs). To prevent running RiakCore on the master node (harmless, but clutters the logs), invoke with `mix test --no-start` * Fixed a few warnings * Updated riak_core.schema Notes: * Running this project on Elixir 1.6.5 with Erlang 20.3.6. I had to comment out "warnings_as_errors" in `deps/riak_ensemble/rebar.config` and in `deps/riak_core/rebar.config` * This project uses riak_core_ng v3.0.9. Later riak_core_ng commits introduced gen_fsm_compat, which fails with newer Elixir+rebar3 because of "missing erl_vsn" issue: rebar_erl_vsn is a pre-compile hook in rebar.config of gen_fsm_compat and few other projects. Error seems to be caused by Mix not handling rebar3 hooks properly. See https://github.com/elixir-lang/elixir/issues/7733 and https://github.com/Kyorai/riak_core/issues/23 This issue is not specific to erl_vsn: for example, the forked https://github.com/gpad/cuttlefish (see mix.exs) differs from the official version only by rebar.config commenting out: % {provider_hooks, [{post, [{compile, {default, escriptize}}]}]}. --- README.md | 18 +++ config/test.exs | 8 ++ lib/no_slides/get_fsm_supervisor.ex | 2 +- lib/no_slides/service.ex | 5 - lib/no_slides/write_fsm_supervisor.ex | 2 +- mix.exs | 5 + priv/riak_core.schema | 184 +++++++++++++++++++++++++- test/no_slides_test.exs | 51 ++++++- test/support/riak_cluster.ex | 95 +++++++++++++ 9 files changed, 360 insertions(+), 10 deletions(-) create mode 100644 config/test.exs create mode 100644 test/support/riak_cluster.ex diff --git a/README.md b/README.md index 4a933a2..9621c9e 100644 --- a/README.md +++ b/README.md @@ -3,6 +3,7 @@ Example application for the talk at [NoSlidesConf][0]. This application is a sample application with some functionalities available on riak_core. ## Erlang, Elixir, Rebar3 version + On this branch I start to use [asdf][4] and as you can find [here](./.tool-versions) I compile everything with: ``` @@ -22,6 +23,23 @@ mix deps.get mix compile ``` +Run the unit test that executes the riak_core ring onseveral slave nodes in a cluster. Nodes are configured in `config/test.exs`. To prevent running RiakCore on the master node (which is harmless, but clutters the logs), invoke with: +```shell +mix test --no-start +``` + +### Comments on Erlang/Elixir compatibility +(pmenhart, 2018-06-05) Hack to make the project working with erlang 20.3.6 and elixir 1.6.5: +* After `mix deps.get`, I had to comment out "warnings_as_errors" in `deps/riak_ensemble/rebar.config` and in `deps/riak_core/rebar.config`. +* Note this project is using `{:riak_core, "~> 3.0.9", hex: :riak_core_ng}`. Later riak_core_ng commits introduced gen_fsm_compat, which fails +with newer Elixir+rebar3 because of "missing erl_vsn" issue: rebar_erl_vsn is a pre-compile hook in rebar.config of gen_fsm_compat and few other projects. +Error seems to be caused by Mix not handling rebar3 hooks properly. See e.g. https://github.com/elixir-lang/elixir/issues/7733 and https://github.com/Kyorai/riak_core/issues/23 +* This issue is not specific to erl_vsn. For example, the forked https://github.com/gpad/cuttlefish (as used in mix.exs here) differs from the official version only by rebar.config commenting out: +``` +% {provider_hooks, [{post, [{compile, {default, escriptize}}]}]}. +``` + + ## How to start a single node If you want run a single node you can execute in this way: diff --git a/config/test.exs b/config/test.exs new file mode 100644 index 0000000..41ef119 --- /dev/null +++ b/config/test.exs @@ -0,0 +1,8 @@ +use Mix.Config + + config :no_slides, + nodes: [ + {"node1@127.0.0.1", 8198, 8199}, + {"node2@127.0.0.1", 8298, 8299}, + {"node3@127.0.0.1", 8398, 8399} + ] diff --git a/lib/no_slides/get_fsm_supervisor.ex b/lib/no_slides/get_fsm_supervisor.ex index cfd5868..e5c0ce0 100644 --- a/lib/no_slides/get_fsm_supervisor.ex +++ b/lib/no_slides/get_fsm_supervisor.ex @@ -9,7 +9,7 @@ defmodule NoSlides.GetFsmSupervisor do Supervisor.start_link(__MODULE__, [], [name: __MODULE__]) end - def init(arg) do + def init(_arg) do children = [ worker(NoSlides.GetFsm, [], restart: :temporary) ] diff --git a/lib/no_slides/service.ex b/lib/no_slides/service.ex index 0dfe76e..adc1ab9 100644 --- a/lib/no_slides/service.ex +++ b/lib/no_slides/service.ex @@ -69,11 +69,6 @@ defmodule NoSlides.Service do end end - def ring_status() do - {:ok, ring} = :riak_core_ring_manager.get_my_ring - :riak_core_ring.pretty_print(ring, [:legend]) - end - def keys do req_id = NoSlides.CoverageFsmSupervisor.start_fsm(:keys) wait_result(req_id) diff --git a/lib/no_slides/write_fsm_supervisor.ex b/lib/no_slides/write_fsm_supervisor.ex index f9fee1f..ba5a22a 100644 --- a/lib/no_slides/write_fsm_supervisor.ex +++ b/lib/no_slides/write_fsm_supervisor.ex @@ -9,7 +9,7 @@ defmodule NoSlides.WriteFsmSupervisor do Supervisor.start_link(__MODULE__, [], [name: __MODULE__]) end - def init(arg) do + def init(_arg) do children = [ worker(NoSlides.WriteFsm, [], restart: :temporary) ] diff --git a/mix.exs b/mix.exs index 929fbb3..8f00940 100644 --- a/mix.exs +++ b/mix.exs @@ -5,6 +5,7 @@ defmodule NoSlides.Mixfile do [app: :no_slides, version: "0.1.0", elixir: "~> 1.3", + elixirc_paths: elixirc_paths(Mix.env), build_embedded: Mix.env == :prod, start_permanent: Mix.env == :prod, deps: deps()] @@ -18,6 +19,10 @@ defmodule NoSlides.Mixfile do mod: {NoSlides, []}] end + # Specifies which paths to compile per environment. + defp elixirc_paths(:test), do: ["lib", "test/support"] + defp elixirc_paths(_), do: ["lib"] + # Dependencies can be Hex packages: # # {:mydep, "~> 0.3.0"} diff --git a/priv/riak_core.schema b/priv/riak_core.schema index e896e3f..6880074 100644 --- a/priv/riak_core.schema +++ b/priv/riak_core.schema @@ -1,4 +1,5 @@ %%-*- mode: erlang -*- + %% @doc enable active anti-entropy subsystem {mapping, "anti_entropy", "riak_core.anti_entropy", [ {datatype, {enum, [on, off, debug]}}, @@ -295,6 +296,157 @@ end Size =< 1024 end}. +{mapping, "buckets.default.pr", "riak_core.default_bucket_props.pr", [ + {default, "0"}, + {level, advanced} +]}. + +%% Cut and paste translation screams to be rewritten as a datatype, but that's a +%% "nice to have" +{translation, + "riak_core.default_bucket_props.pr", + fun(Conf) -> + Setting = cuttlefish:conf_get("buckets.default.pr", Conf), + case Setting of + "quorum" -> quorum; + "all" -> all; + X -> + try list_to_integer(Setting) of + Int -> Int + catch + E:R -> error + end + end + end +}. + +{mapping, "buckets.default.r", "riak_core.default_bucket_props.r", [ + {default, "quorum"}, + {level, advanced} +]}. +{translation, + "riak_core.default_bucket_props.r", + fun(Conf) -> + Setting = cuttlefish:conf_get("buckets.default.r", Conf), + case Setting of + "quorum" -> quorum; + "all" -> all; + X -> + try list_to_integer(Setting) of + Int -> Int + catch + E:R -> error + end + end + end +}. + +{mapping, "buckets.default.w", "riak_core.default_bucket_props.w", [ + {default, "quorum"}, + {level, advanced} +]}. +{translation, + "riak_core.default_bucket_props.w", + fun(Conf) -> + Setting = cuttlefish:conf_get("buckets.default.w", Conf), + case Setting of + "quorum" -> quorum; + "all" -> all; + X -> + try list_to_integer(Setting) of + Int -> Int + catch + E:R -> error + end + end + end +}. + +{mapping, "buckets.default.pw", "riak_core.default_bucket_props.pw", [ + {default, "0"}, + {level, advanced} +]}. +{translation, + "riak_core.default_bucket_props.pw", + fun(Conf) -> + Setting = cuttlefish:conf_get("buckets.default.pw", Conf), + case Setting of + "quorum" -> quorum; + "all" -> all; + X -> + try list_to_integer(Setting) of + Int -> Int + catch + E:R -> error + end + end + end +}. + +{mapping, "buckets.default.dw", "riak_core.default_bucket_props.dw", [ + {default, "quorum"}, + {level, advanced} +]}. +{translation, + "riak_core.default_bucket_props.dw", + fun(Conf) -> + Setting = cuttlefish:conf_get("buckets.default.dw", Conf), + case Setting of + "quorum" -> quorum; + "all" -> all; + X -> + try list_to_integer(Setting) of + Int -> Int + catch + E:R -> error + end + end + end +}. + +{mapping, "buckets.default.rw", "riak_core.default_bucket_props.rw", [ + {default, "quorum"}, + {level, advanced} +]}. +{translation, + "riak_core.default_bucket_props.rw", + fun(Conf) -> + Setting = cuttlefish:conf_get("buckets.default.rw", Conf), + case Setting of + "quorum" -> quorum; + "all" -> all; + X -> + try list_to_integer(Setting) of + Int -> Int + catch + E:R -> error + end + end + end +}. + +%% {mapping, "buckets.default.basic_quorum", "riak_core.default_bucket_props.basic_quorum", false}, +%% {mapping, "buckets.default.notfound_ok", "riak_core.default_bucket_props.notfound_ok", true} + +%% @doc whether or not siblings are allowed. +%% Note: See Vector Clocks for a discussion of sibling resolution. +{mapping, "buckets.default.siblings", "riak_core.default_bucket_props.allow_mult", [ + {datatype, {enum, [on, off]}}, + {default, on}, + {level, advanced} +]}. + +{translation, + "riak_core.default_bucket_props.allow_mult", + fun(Conf) -> + Setting = cuttlefish:conf_get("buckets.default.siblings", Conf), + case Setting of + on -> true; + off -> false; + _Default -> true + end + end}. + {validator, "ring_size^2", "not a power of 2", fun(Size) -> (Size band (Size-1) =:= 0) @@ -364,7 +516,7 @@ end "can't be a local ip", fun(AddrString) -> case inet_parse:address(AddrString) of - {ok, {127, _, _, _}} -> false; + {ok, {127, 0, _, _}} -> false; {ok, _} -> true; {error, _} -> false end @@ -427,6 +579,19 @@ end {datatype, flag} ]}. +%% consistent on/off (in lieu of enabled/disabled, true/false) +{ translation, + "riak_core.dtrace_support", + fun(Conf) -> + Setting = cuttlefish:conf_get("dtrace", Conf), + case Setting of + on -> true; + off -> false; + _Default -> false + end + end +}. + %% @doc Platform-specific installation paths (substituted by rebar) {mapping, "platform_bin_dir", "riak_core.platform_bin_dir", [ {datatype, directory}, @@ -545,3 +710,20 @@ end lists:sort(lists:foldl(Fold, [], cuttlefish_variable:filter_by_prefix(["cluster", "job"], Conf))) end}. + + +%% @doc Some requests to the vnodes are handled by an asyncronous worker pool. +%% This parameter allows for tuning this pools behaviour when it comes dealing +%% with requests that are queued. +%% The default (fifo) will serve requests in the order they arrive at the worker +%% pool. The alternative is to serve the requests in the reverse order, dealing +%% with the most recent request first. +%% There are pro's and con's for both aproaches, it is best to test out what +%% works best for the desired characteristics. +%% +%% As a very rought rule of thumb: +%% - fifo will lead to lower extremes +%% - filo will lead to lower medians/mediums +{mapping, "worker.queue_strategy", "riak_core.queue_worker_strategy", + [{default, fifo}, + {datatype, {enum, [fifo, filo]}}]}. diff --git a/test/no_slides_test.exs b/test/no_slides_test.exs index e7598ee..57bbfe1 100644 --- a/test/no_slides_test.exs +++ b/test/no_slides_test.exs @@ -1,8 +1,55 @@ defmodule NoSlidesTest do use ExUnit.Case + alias NoSlides.RiakCluster + doctest NoSlides - test "the truth" do - assert 1 + 1 == 2 + # "setup_all" is called once per module before any test runs + setup_all do + # start the RiakCore cluster + assert !Node.alive?() + nodeNames = RiakCluster.start_test_nodes() + assert Node.alive?() + IO.inspect Node.list + + #IO.inspect NoSlides.Service.ring_status() # local, meaningless. Fails without RiakCore running, e.g. 'mix test --no-start' + IO.inspect RiakCluster.ring_status(hd(nodeNames)) # remote call + [nodes: nodeNames] + end + + + test "ping different nodes in a RiakCore cluster", context do + nodeNames = context.nodes + :pong = rc_command(hd(nodeNames), :ping) + + for _n <- 1..100 do + i = :rand.uniform(length(nodeNames)) - 1 # index of node to use + :pong = rc_command(Enum.at(nodeNames, i), :ping, [:rand.uniform(100_000_000)]) + end + end + + test "try key-value pairs in a RiakCore cluster", context do + nodeNames = context.nodes + first_node = hd(nodeNames) + + + :ok = rc_command(first_node, :put, [:k1, :v1]) + :ok = rc_command(first_node, :put, [:k2, :v2]) + :ok = rc_command(first_node, :put, [:k3, :v3]) + + # get from any of the nodes + for node <- nodeNames do + :v1 = rc_command(node, :get, [:k1]) + :v2 = rc_command(node, :get, [:k2]) + :v3 = rc_command(node, :get, [:k3]) + nil = rc_command(node, :get, [:k10]) + end end + + defp rc_command(node, command) do rc_command(node, command, []) end + + defp rc_command(node, command, args) do + :rpc.call(String.to_atom(node), NoSlides.Service, command, args) + end + end diff --git a/test/support/riak_cluster.ex b/test/support/riak_cluster.ex new file mode 100644 index 0000000..919f328 --- /dev/null +++ b/test/support/riak_cluster.ex @@ -0,0 +1,95 @@ +defmodule NoSlides.RiakCluster do + @moduledoc false + ## derived from Phoenix.PubSub.Cluster and Riak_core example at https://github.com/lambdaclass/riak_core_tutorial + + def start_test_nodes() do + #assert !Node.alive?() + start_master() + #assert Node.alive?() + + nodes = Application.get_env(:no_slides, :nodes, []) + # start nodes serially: + #for {node, web_port, handoff_port} <- nodes, do: start_node(node, web_port, handoff_port) + # or start all nodes in parallel: + nodes + |> Enum.map(&Task.async(fn -> {node, web_port, handoff_port} = &1; start_node(node, web_port, handoff_port) end)) + |> Enum.map(&Task.await(&1, 30_000)) + + nodeNames = Enum.map(nodes, &(elem(&1, 0))) + build_cluster(nodeNames) + nodeNames + end + + def ring_status(node) do + #rpc(String.to_atom(node), :riak_core_console, :member_status, [[]]) + rpc(String.to_atom(node), NoSlides.Service, :ring_status, []) + end + + defp start_master() do + :ok = :net_kernel.monitor_nodes(true) + _ = :os.cmd('epmd -daemon') + + # Turn node into a distributed node with the given long name + :net_kernel.start([:"primary@127.0.0.1"]) + + # Allow spawned nodes to fetch all code from this node + :erl_boot_server.start([]) + allow_boot '127.0.0.1' + end + + defp start_node(node_host, web_port, handoff_port) do + {:ok, node} = :slave.start('127.0.0.1', node_name(node_host), inet_loader_args()) + data_dir = './data/#{node_host}' # single quotes for compatibility with Erlang + + add_code_paths(node) + rpc(node, Application, :load, [:lager]) + rpc(node, Application, :load, [:riak_core]) + rpc(node, Application, :put_env, [:riak_core, :ring_state_dir, data_dir]) + rpc(node, Application, :put_env, [:riak_core, :platform_data_dir, data_dir]) + + rpc(node, Application, :put_env, [:riak_core, :web_port, web_port]) + rpc(node, Application, :put_env, [:riak_core, :handoff_port, handoff_port]) + rpc(node, Application, :put_env, [:riak_core, :schema_dirs, ['./priv']]) + + # start our app + rpc(node, Application, :ensure_all_started, [:no_slides]) + {:ok, node} + end + + defp build_cluster(nodes) do + # join remaining nodes to the ring on the first node + [first | tail] = nodes + for node <- tail do + rpc(String.to_atom(node), :riak_core, :join, [String.to_atom(first)]) + end + end + + defp rpc(node, module, function, args) do + case :rpc.block_call(node, module, function, args) do + {:ok, val} -> {:ok, val} + :ok -> :ok + err -> IO.puts "RPC error: #{inspect err}" + end + end + + defp inet_loader_args do + to_charlist("-loader inet -hosts 127.0.0.1 -setcookie #{:erlang.get_cookie()}") + end + + defp allow_boot(host) do + {:ok, ipv4} = :inet.parse_ipv4_address(host) + :erl_boot_server.add_slave(ipv4) + end + + defp add_code_paths(node) do + rpc(node, :code, :add_paths, [:code.get_path()]) + end + + defp node_name(node_host) do + node_host + |> to_string + |> String.split("@") + |> Enum.at(0) + |> String.to_atom + end +end From 8a13e83207eb8634d6d4f365e8ba9cdb5223480d Mon Sep 17 00:00:00 2001 From: Peter Menhart Date: Wed, 6 Jun 2018 18:11:42 -0400 Subject: [PATCH 2/4] Fixed and tested coverage commands * riak_core_coverage_fsm in riak_core_ng expects additional value from init * added coverage command NoSlides.Service.clear() to remove all keys * Added unit test for all coverage commands --- lib/no_slides/coverage_fsm.ex | 1 + lib/no_slides/service.ex | 5 ++++ lib/no_slides/v_node.ex | 6 ++++ test/no_slides_test.exs | 52 +++++++++++++++++++++++++++++++++-- test/support/riak_cluster.ex | 5 ++-- 5 files changed, 64 insertions(+), 5 deletions(-) diff --git a/lib/no_slides/coverage_fsm.ex b/lib/no_slides/coverage_fsm.ex index 5c234c4..0ac2339 100644 --- a/lib/no_slides/coverage_fsm.ex +++ b/lib/no_slides/coverage_fsm.ex @@ -25,6 +25,7 @@ defmodule NoSlides.CoverageFsm do NoSlides.Service, NoSlides.VNode_master, timeout, + :riak_core_coverage_plan, # specific for riak_core_ng %{from: from, req_id: req_id, args: args} } end diff --git a/lib/no_slides/service.ex b/lib/no_slides/service.ex index adc1ab9..39ef9e3 100644 --- a/lib/no_slides/service.ex +++ b/lib/no_slides/service.ex @@ -79,6 +79,11 @@ defmodule NoSlides.Service do wait_result(req_id) end + def clear do + req_id = NoSlides.CoverageFsmSupervisor.start_fsm(:clear) + wait_result(req_id) + end + defp wait_result(req_id, timeout\\5000) do receive do {^req_id, {:ok, keys}} -> diff --git a/lib/no_slides/v_node.ex b/lib/no_slides/v_node.ex index 10acb6f..b45bfa2 100644 --- a/lib/no_slides/v_node.ex +++ b/lib/no_slides/v_node.ex @@ -120,6 +120,12 @@ defmodule NoSlides.VNode do {:reply, {ref_id, Map.values(state.data)}, state} end + def handle_coverage({:clear, _, _} = req, _key_spaces, {_, ref_id, _} = sender, state) do + Logger.debug "[handle_coverage] VNODE req: #{inspect req} sender: #{inspect sender}" + new_state = Map.put(state, :data, %{}) + {:reply, {ref_id, %{}}, new_state} + end + def handle_exit(pid, reason, state) do Logger.debug "[handle_exit] self: #{inspect self()} - pid: #{inspect pid} - reason: #{inspect reason} - state: #{inspect state}" {:noreply, state} diff --git a/test/no_slides_test.exs b/test/no_slides_test.exs index 57bbfe1..05c6efe 100644 --- a/test/no_slides_test.exs +++ b/test/no_slides_test.exs @@ -12,8 +12,17 @@ defmodule NoSlidesTest do assert Node.alive?() IO.inspect Node.list + #IO.puts "\nGive ring distribution a chance. Be patient..." + #Process.sleep(60000) # Crazy way to give the ring chance for some distribution. Takes a minute or so! + #IO.inspect NoSlides.Service.ring_status() # local, meaningless. Fails without RiakCore running, e.g. 'mix test --no-start' IO.inspect RiakCluster.ring_status(hd(nodeNames)) # remote call + + on_exit fn -> + IO.puts "\nThe test is done! Shutting down..." + IO.inspect RiakCluster.ring_status(hd(nodeNames)) + # TODO: stop the slaves + end [nodes: nodeNames] end @@ -23,8 +32,7 @@ defmodule NoSlidesTest do :pong = rc_command(hd(nodeNames), :ping) for _n <- 1..100 do - i = :rand.uniform(length(nodeNames)) - 1 # index of node to use - :pong = rc_command(Enum.at(nodeNames, i), :ping, [:rand.uniform(100_000_000)]) + :pong = rc_command(pick_random(nodeNames), :ping, [:rand.uniform(100_000_000)]) end end @@ -32,7 +40,6 @@ defmodule NoSlidesTest do nodeNames = context.nodes first_node = hd(nodeNames) - :ok = rc_command(first_node, :put, [:k1, :v1]) :ok = rc_command(first_node, :put, [:k2, :v2]) :ok = rc_command(first_node, :put, [:k3, :v3]) @@ -44,6 +51,42 @@ defmodule NoSlidesTest do :v3 = rc_command(node, :get, [:k3]) nil = rc_command(node, :get, [:k10]) end + + # test updating value + :ok = rc_command(first_node, :put, [:k1, :v_new]) + :v_new = rc_command(first_node, :get, [:k1]) + end + + test "coverage with key-value pairs in a RiakCore cluster", context do + nodeNames = context.nodes + first_node = hd(nodeNames) + + # delete all keys to start with a know state + [] = rc_command(first_node, :clear, []) + [] = rc_command(first_node, :keys, []) + [] = rc_command(first_node, :values, []) + + k_v_pairs = Enum.map(1..100, &({"k#{:rand.uniform(100_000)}", "v#{&1}"})) + + Enum.each(k_v_pairs, fn({k, v}) -> rc_command(pick_random(nodeNames), :put, [k, v]) end) + + actual_keys = rc_command(first_node, :keys, []) + actual_values = rc_command(first_node, :values, []) + + assert 100 == length(actual_keys) + assert 100 == length(actual_values) + assert have_same_elements(actual_keys, Enum.map(k_v_pairs, fn({k, _v}) -> k end)) + assert have_same_elements(actual_values, Enum.map(k_v_pairs, fn({_k, v}) -> v end)) + + # store should be empty after a new clear + [] = rc_command(first_node, :clear, []) + [] = rc_command(first_node, :keys, []) + [] = rc_command(first_node, :values, []) + end + + defp pick_random(nodes) do + i = :rand.uniform(length(nodes)) - 1 # index of node to use + Enum.at(nodes, i) end defp rc_command(node, command) do rc_command(node, command, []) end @@ -52,4 +95,7 @@ defmodule NoSlidesTest do :rpc.call(String.to_atom(node), NoSlides.Service, command, args) end + defp have_same_elements(list1, list2) do + list1 -- list2 == [] and list2 -- list1 == [] + end end diff --git a/test/support/riak_cluster.ex b/test/support/riak_cluster.ex index 919f328..1a9d607 100644 --- a/test/support/riak_cluster.ex +++ b/test/support/riak_cluster.ex @@ -3,9 +3,7 @@ defmodule NoSlides.RiakCluster do ## derived from Phoenix.PubSub.Cluster and Riak_core example at https://github.com/lambdaclass/riak_core_tutorial def start_test_nodes() do - #assert !Node.alive?() start_master() - #assert Node.alive?() nodes = Application.get_env(:no_slides, :nodes, []) # start nodes serially: @@ -51,6 +49,9 @@ defmodule NoSlides.RiakCluster do rpc(node, Application, :put_env, [:riak_core, :handoff_port, handoff_port]) rpc(node, Application, :put_env, [:riak_core, :schema_dirs, ['./priv']]) + rpc(node, Application, :put_env, [:riak_core, :ring_creation_size, 128]) + rpc(node, Application, :put_env, [:riak_core, :vnode_inactivity_timeout, 1000]) + # start our app rpc(node, Application, :ensure_all_started, [:no_slides]) {:ok, node} From 8b8a84794e1d0e55ef3286d01ebb04f2600ef04d Mon Sep 17 00:00:00 2001 From: Peter Menhart Date: Wed, 6 Jun 2018 21:47:29 -0400 Subject: [PATCH 3/4] Added Travis CI build information * See README for explanation why failing with Erlang/OTP 20 --- .travis.yml | 24 ++++++++++++++++++++++++ README.md | 3 +++ 2 files changed, 27 insertions(+) create mode 100644 .travis.yml diff --git a/.travis.yml b/.travis.yml new file mode 100644 index 0000000..8ca8731 --- /dev/null +++ b/.travis.yml @@ -0,0 +1,24 @@ +language: elixir +elixir: +- 1.3.4 +- 1.4.5 +- 1.5.2 +- 1.6.5 +otp_release: +- 18.3 +- 19.3 +- 20.3 +matrix: + exclude: + - elixir: 1.3.4 + otp_release: 20.3 + - elixir: 1.4.5 + otp_release: 20.3 + - elixir: 1.6.5 + otp_release: 18.3 + - elixir: 1.6.5 + otp_release: 19.3 +env: +- MIX_ENV=test +script: +- mix test --no-start diff --git a/README.md b/README.md index 9621c9e..2ca227d 100644 --- a/README.md +++ b/README.md @@ -1,3 +1,6 @@ +[![Build Status](https://travis-ci.org/pmenhart/no_slides.svg??branch=riak_core_3)](https://travis-ci.org/pmenhart/no_slides) + + # NoSlides example application Example application for the talk at [NoSlidesConf][0]. This application is a sample application with some functionalities available on riak_core. From ff77559f1bac0d6b169cd7fb4729503468edb9b2 Mon Sep 17 00:00:00 2001 From: Peter Menhart Date: Wed, 11 Jul 2018 11:32:00 -0400 Subject: [PATCH 4/4] Riak_core updated to 3.1.1 * This version uses gen_fsm_compat * Fixed rebar3 version 3.6.0 or above is needed to avoid problems compiling riak_ensemble and riak_core. 'mix local.rebar --force' will upgrade rebar3 for your Mix projects --- .travis.yml | 27 +++++++++++++-------------- README.md | 3 +++ mix.exs | 2 +- mix.lock | 23 +++++++++++++---------- 4 files changed, 30 insertions(+), 25 deletions(-) diff --git a/.travis.yml b/.travis.yml index 8ca8731..3e8c0b5 100644 --- a/.travis.yml +++ b/.travis.yml @@ -1,23 +1,22 @@ language: elixir -elixir: -- 1.3.4 -- 1.4.5 -- 1.5.2 -- 1.6.5 -otp_release: -- 18.3 -- 19.3 -- 20.3 + matrix: - exclude: - - elixir: 1.3.4 - otp_release: 20.3 + include: + - elixir: 1.4.5 + otp_release: 18.3 - elixir: 1.4.5 + otp_release: 19.3 + - elixir: 1.5.2 + otp_release: 18.3 + - elixir: 1.5.2 + otp_release: 19.3 + - elixir: 1.5.2 otp_release: 20.3 - elixir: 1.6.5 - otp_release: 18.3 + otp_release: 20.3 - elixir: 1.6.5 - otp_release: 19.3 + otp_release: 21.0 + env: - MIX_ENV=test script: diff --git a/README.md b/README.md index 2ca227d..0859ec7 100644 --- a/README.md +++ b/README.md @@ -42,6 +42,9 @@ Error seems to be caused by Mix not handling rebar3 hooks properly. See e.g. htt % {provider_hooks, [{post, [{compile, {default, escriptize}}]}]}. ``` +(pmenhart, 2018-07-11) Rebar3 was fixed, use version 3.6.1 or up. +`{:riak_core, "~> 3.1.1", hex: :riak_core_ng}` compiles (with gen_fsm_compat) without problems mentioned above. +However, the forked version of cuttlefish is still needed. ## How to start a single node If you want run a single node you can execute in this way: diff --git a/mix.exs b/mix.exs index 8f00940..b61a3c5 100644 --- a/mix.exs +++ b/mix.exs @@ -34,7 +34,7 @@ defmodule NoSlides.Mixfile do # Type "mix help deps" for more examples and options defp deps do [ - {:riak_core, "~> 3.0.9", hex: :riak_core_ng}, + {:riak_core, "~> 3.1.1", hex: :riak_core_ng}, {:cuttlefish, git: "https://github.com/gpad/cuttlefish", branch: "develop", override: true}, {:poolboy, "~> 1.5.1", override: true}, ] diff --git a/mix.lock b/mix.lock index 6d26407..545ac71 100644 --- a/mix.lock +++ b/mix.lock @@ -1,21 +1,24 @@ -%{"basho_stats": {:hex, :basho_stats, "1.0.3", "7e1174151509c64fcc1934120ed32295e14f84daae7f84926ba2c8d3700d146c", [:rebar3], [], "hexpm"}, - "bear": {:hex, :bear, "0.8.5", "e95fca1627cd9e15baf93ce0a52aff16917baf325f0ee65b88cd715376cd2344", [:rebar3], [], "hexpm"}, +%{ + "basho_stats": {:hex, :basho_stats, "1.0.3", "7e1174151509c64fcc1934120ed32295e14f84daae7f84926ba2c8d3700d146c", [:rebar3], [], "hexpm"}, + "bear": {:hex, :bear, "0.8.7", "16264309ae5d005d03718a5c82641fcc259c9e8f09adeb6fd79ca4271168656f", [:rebar3], [], "hexpm"}, "blume": {:hex, :blume, "0.1.1", "cfb4f43688690ba81c6a79f54e4678cfd5fdedab692f277ae740ae4a3897360d", [:rebar3], [], "hexpm"}, "chash": {:hex, :chash, "0.1.2", "af02484f2640c653c4b9a8557a14ca0704989dbedb27e7ccbc442f1903a3bca7", [:rebar3], [], "hexpm"}, - "clique": {:hex, :clique, "0.3.10", "29ecba7ec61ded866197164d33f61d1cbf892ed85fc933673d7e50d0689d34fb", [:rebar3], [{:cuttlefish, "~>2.0.12", [hex: :cuttlefish, repo: "hexpm", optional: false]}], "hexpm"}, + "clique": {:hex, :clique, "0.3.12", "3d8d5ca3c60787ad13562bea18d34149ba8915ad08214ede91d50657198b4dc3", [:rebar3], [{:cuttlefish, "~>2.1.0", [hex: :cuttlefish, repo: "hexpm", optional: false]}], "hexpm"}, "cuttlefish": {:git, "https://github.com/gpad/cuttlefish", "674619a3e0ff3915209137d2216d9b279f723ebe", [branch: "develop"]}, "edown": {:hex, :edown, "0.8.1", "7333b6f6b7bbc736c263e9ceb8261667d7c8f7d92e251829f1a9fe8f24e7b694", [:rebar3], [], "hexpm"}, "eleveldb": {:hex, :eleveldb, "2.2.20", "1fff63a5055bbf4bf821f797ef76065882b193f5e8095f95fcd9287187773b58", [:rebar3], [], "hexpm"}, - "exometer_core": {:hex, :basho_exometer_core, "1.0.2", "dc4bc9b0b47edee0c053b16ac5b9c692e662c7e1513ffc96f4d928bc3328d65f", [:rebar3], [{:folsom, "~>0.8.3", [hex: :folsom, repo: "hexpm", optional: false]}, {:lager, "~>3.2.0", [hex: :lager, repo: "hexpm", optional: false]}, {:parse_trans, "~>2.9.0", [hex: :parse_trans, repo: "hexpm", optional: false]}, {:setup, "~>1.7.0", [hex: :setup, repo: "hexpm", optional: false]}], "hexpm"}, - "folsom": {:hex, :folsom, "0.8.5", "94a027b56fe84feed264f9b33cb4c6ac9a801fad84b87dbda0836ce83c3b8d69", [:rebar3], [{:bear, "0.8.5", [hex: :bear, repo: "hexpm", optional: false]}], "hexpm"}, + "exometer_core": {:hex, :basho_exometer_core, "1.0.3", "e801bc203558d75645529b55429c4f3d161073a4332a020570bcadd35ddcc279", [:rebar3], [{:folsom, "~>0.8.3", [hex: :folsom, repo: "hexpm", optional: false]}, {:lager, "~>3.6.0", [hex: :lager, repo: "hexpm", optional: false]}, {:parse_trans, "~>2.9.0", [hex: :parse_trans, repo: "hexpm", optional: false]}, {:setup, "~>1.7.0", [hex: :setup, repo: "hexpm", optional: false]}], "hexpm"}, + "folsom": {:hex, :folsom, "0.8.7", "a885f0aeee4c84270954c88a55a5a473d6b2c7493e32ffdc5765412dd555a951", [:rebar3], [{:bear, "0.8.7", [hex: :bear, repo: "hexpm", optional: false]}], "hexpm"}, + "gen_fsm_compat": {:hex, :gen_fsm_compat, "0.3.0", "5903549f67d595f58a7101154cbe0fdd46955fbfbe40813f1e53c23a970ff5f4", [:rebar3], [], "hexpm"}, "getopt": {:hex, :getopt, "1.0.1", "c73a9fa687b217f2ff79f68a3b637711bb1936e712b521d8ce466b29cbf7808a", [:rebar3], [], "hexpm"}, "goldrush": {:hex, :goldrush, "0.1.9", "f06e5d5f1277da5c413e84d5a2924174182fb108dabb39d5ec548b27424cd106", [:rebar3], [], "hexpm"}, "jam": {:hex, :jam, "1.0.0", "ed9b180f2f3a775e6a47ac490954976802f0638c19a393f3e86d4ba4cf890582", [:rebar3], [], "hexpm"}, - "lager": {:hex, :lager, "3.2.4", "a6deb74dae7927f46bd13255268308ef03eb206ec784a94eaf7c1c0f3b811615", [:rebar3], [{:goldrush, "0.1.9", [hex: :goldrush, repo: "hexpm", optional: false]}], "hexpm"}, + "lager": {:hex, :lager, "3.6.3", "fe78951d174616273f87f0dbc3374d1430b1952e5efc4e1c995592d30a207294", [:rebar3], [{:goldrush, "0.1.9", [hex: :goldrush, repo: "hexpm", optional: false]}], "hexpm"}, "parse_trans": {:hex, :parse_trans, "2.9.0", "3f5f7b402928fb9fd200c891e635de909045d1efac40ce3f924d3892898f85eb", [:rebar], [{:edown, "> 0.0.0", [hex: :edown, repo: "hexpm", optional: false]}], "hexpm"}, "pbkdf2": {:hex, :pbkdf2, "2.0.0", "11c23279fded5c0027ab3996cfae77805521d7ef4babde2bd7ec04a9086cf499", [:rebar3], [], "hexpm"}, "poolboy": {:hex, :poolboy, "1.5.1", "6b46163901cfd0a1b43d692657ed9d7e599853b3b21b95ae5ae0a777cf9b6ca8", [:rebar], [], "hexpm"}, - "riak_core": {:hex, :riak_core_ng, "3.0.9", "f6e27fb67c9cb9a3fb7aef38ec1bb5633624d7b78e32e73fe7a0045f3514f032", [:rebar3], [{:basho_stats, "~>1.0.3", [hex: :basho_stats, repo: "hexpm", optional: false]}, {:blume, "~>0.1.0", [hex: :blume, repo: "hexpm", optional: false]}, {:chash, "~>0.1.1", [hex: :chash, repo: "hexpm", optional: false]}, {:clique, "~>0.3.9", [hex: :clique, repo: "hexpm", optional: false]}, {:cuttlefish, "~>2.0.12", [hex: :cuttlefish, repo: "hexpm", optional: false]}, {:eleveldb, "~>2.2.20", [hex: :eleveldb, repo: "hexpm", optional: false]}, {:exometer_core, "~>1.0.2", [hex: :basho_exometer_core, repo: "hexpm", optional: false]}, {:folsom, "~>0.8.5", [hex: :folsom, repo: "hexpm", optional: false]}, {:goldrush, "~>0.1.8", [hex: :goldrush, repo: "hexpm", optional: false]}, {:jam, "~>1.0.0", [hex: :jam, repo: "hexpm", optional: false]}, {:lager, "~>3.2.4", [hex: :lager, repo: "hexpm", optional: false]}, {:pbkdf2, "~>2.0.0", [hex: :pbkdf2, repo: "hexpm", optional: false]}, {:poolboy, "~>0.8.2", [hex: :basho_poolboy, repo: "hexpm", optional: false]}, {:riak_ensemble, "~>2.4.0", [hex: :riak_ensemble_ng, repo: "hexpm", optional: false]}, {:riak_sysmon, "~>2.1.3", [hex: :riak_sysmon, repo: "hexpm", optional: false]}], "hexpm"}, - "riak_ensemble": {:hex, :riak_ensemble_ng, "2.4.0", "6570463bf09f1200c003de298c5d6615c418d3bf0a2df5caf469a9df2558059a", [:rebar3], [{:lager, "~>3.2.1", [hex: :lager, repo: "hexpm", optional: false]}], "hexpm"}, - "riak_sysmon": {:hex, :riak_sysmon, "2.1.5", "2331bbcaebe73ea7b2449f4c765827142476cc73d7d86999b4209ee005694d6b", [:rebar3], [], "hexpm"}, - "setup": {:hex, :setup, "1.7.0", "15df8e57c6df9755e22bfb1aef0c640bd97e9889396fdfb2a85a5536a9043674", [:rebar3], [], "hexpm"}} + "riak_core": {:hex, :riak_core_ng, "3.1.1", "098ceef293f9f232e724dd8745916f2e4ce0f14cb8edd1653e8425a14f0dd0b6", [:rebar3], [{:basho_stats, "~>1.0.3", [hex: :basho_stats, repo: "hexpm", optional: false]}, {:blume, "~>0.1.0", [hex: :blume, repo: "hexpm", optional: false]}, {:chash, "~>0.1.1", [hex: :chash, repo: "hexpm", optional: false]}, {:clique, "~>0.3.11", [hex: :clique, repo: "hexpm", optional: false]}, {:cuttlefish, "~>2.1.4", [hex: :cuttlefish, repo: "hexpm", optional: false]}, {:eleveldb, "~>2.2.20", [hex: :eleveldb, repo: "hexpm", optional: false]}, {:exometer_core, "~>1.0.3", [hex: :basho_exometer_core, repo: "hexpm", optional: false]}, {:folsom, "~>0.8.7", [hex: :folsom, repo: "hexpm", optional: false]}, {:gen_fsm_compat, "~>0.3.0", [hex: :gen_fsm_compat, repo: "hexpm", optional: false]}, {:goldrush, "~>0.1.8", [hex: :goldrush, repo: "hexpm", optional: false]}, {:lager, "~>3.6.0", [hex: :lager, repo: "hexpm", optional: false]}, {:pbkdf2, "~>2.0.0", [hex: :pbkdf2, repo: "hexpm", optional: false]}, {:poolboy, "~>0.8.4", [hex: :basho_poolboy, repo: "hexpm", optional: false]}, {:riak_ensemble, "~>2.4.4", [hex: :riak_ensemble_ng, repo: "hexpm", optional: false]}, {:riak_sysmon, "~>2.1.7", [hex: :riak_sysmon, repo: "hexpm", optional: false]}], "hexpm"}, + "riak_ensemble": {:hex, :riak_ensemble_ng, "2.4.4", "f9e04052f4a7faad20f008dff18d34d3552513000410ce9c5941b4f7361741e8", [:rebar3], [{:gen_fsm_compat, "~>0.3.0", [hex: :gen_fsm_compat, repo: "hexpm", optional: false]}, {:lager, "~>3.6.0", [hex: :lager, repo: "hexpm", optional: false]}], "hexpm"}, + "riak_sysmon": {:hex, :riak_sysmon, "2.1.7", "af420df0f7569e1f12bcd465745164cb6189eb93f118d5cdb3f90feb3f8bf47d", [:rebar3], [{:lager, "~>3.6.0", [hex: :lager, repo: "hexpm", optional: false]}], "hexpm"}, + "setup": {:hex, :setup, "1.7.0", "15df8e57c6df9755e22bfb1aef0c640bd97e9889396fdfb2a85a5536a9043674", [:rebar3], [], "hexpm"}, +}