Skip to content

Commit 8e54499

Browse files
committedOct 29, 2020
Copy & Paste von jehovakel ex
1 parent 1aa136d commit 8e54499

27 files changed

+1323
-0
lines changed
 

‎.formatter.exs

+4
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,4 @@
1+
# Used by "mix format"
2+
[
3+
inputs: ["{mix,.formatter}.exs", "{config,lib,test}/**/*.{ex,exs}"]
4+
]

‎.gitignore

+24
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,24 @@
1+
# The directory Mix will write compiled artifacts to.
2+
/_build/
3+
4+
# If you run "mix test --cover", coverage assets end up here.
5+
/cover/
6+
7+
# The directory Mix downloads your dependencies sources to.
8+
/deps/
9+
10+
# Where 3rd-party dependencies like ExDoc output generated docs.
11+
/doc/
12+
13+
# Ignore .fetch files in case you like to edit your project deps locally.
14+
/.fetch
15+
16+
# If the VM crashes, it generates a dump, let's ignore it too.
17+
erl_crash.dump
18+
19+
# Also ignore archive artifacts (built via "mix archive.build").
20+
*.ez
21+
22+
# Ignore package tarball (built via "mix hex.build").
23+
jehovakel_ex_event_store-*.tar
24+

‎README.md

+21
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,21 @@
1+
# Shared
2+
3+
**Jehovakel Library used with [Eventstore](https://github.com/commanded/eventstore)**
4+
5+
## Installation
6+
7+
If [available in Hex](https://hex.pm/docs/publish), the package can be installed
8+
by adding `jehovakel_ex_event_store` to your list of dependencies in `mix.exs`:
9+
10+
```elixir
11+
def deps do
12+
[
13+
{:jehovakel_ex_event_store, "~> 1.0.0", git: "https://github.com/STUDITEMPS/jehovakel_ex_event_store.git"}
14+
]
15+
end
16+
```
17+
18+
Documentation can be generated with [ExDoc](https://github.com/elixir-lang/ex_doc)
19+
and published on [HexDocs](https://hexdocs.pm). Once published, the docs can
20+
be found at [https://hexdocs.pm/jehovakel_ex_event_store](https://hexdocs.pm/jehovakel_ex_event_store).
21+

‎config/config.exs

+53
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,53 @@
1+
# This file is responsible for configuring your application
2+
# and its dependencies with the aid of the Mix.Config module.
3+
use Mix.Config
4+
5+
# This configuration is loaded before any dependency and is restricted
6+
# to this project. If another project depends on this project, this
7+
# file won't be loaded nor affect the parent project. For this reason,
8+
# if you want to provide default values for your application for
9+
# 3rd-party users, it should be done in your "mix.exs" file.
10+
11+
# You can configure your application as:
12+
#
13+
# config :jehovakel_ex_event_store, key: :value
14+
#
15+
# and access this configuration in your application as:
16+
#
17+
# Application.get_env(:jehovakel_ex_event_store, :key)
18+
#
19+
# You can also configure a 3rd-party app:
20+
#
21+
# config :logger, level: :info
22+
#
23+
24+
# It is also possible to import configuration files, relative to this
25+
# directory. For example, you can emulate configuration per environment
26+
# by uncommenting the line below and defining dev.exs, test.exs and such.
27+
# Configuration from the imported file will override the ones defined
28+
# here (which is why it is important to import them last).
29+
#
30+
31+
config :jehovakel_ex_event_store,
32+
ecto_repos: [Support.JehovakelExRepo]
33+
34+
# General Repository configuration
35+
config :jehovakel_ex_event_store, Support.JehovakelExRepo,
36+
username: System.get_env("PG_USER") || System.get_env()["USER"],
37+
password: System.get_env("PG_PASSWORD") || "",
38+
port: System.get_env("PG_PORT") || "5432",
39+
hostname: System.get_env("PG_HOST") || "localhost",
40+
database: System.get_env("PG_NAME") || "jehovakel_ex_event_store_#{Mix.env()}",
41+
pool_size: String.to_integer(System.get_env("POOL_SIZE") || "10"),
42+
migration_source: "readstore_schema_migrations"
43+
44+
config :eventstore, EventStore.Storage,
45+
serializer: EventStore.TermSerializer,
46+
username: System.get_env("PG_USER") || System.get_env()["USER"],
47+
password: System.get_env("PG_PASSWORD") || "",
48+
port: System.get_env("PG_PORT") || "5432",
49+
hostname: System.get_env("PG_HOST") || "localhost",
50+
database: System.get_env("PG_NAME") || "jehovakel_ex_event_store_#{Mix.env()}",
51+
pool_size: String.to_integer(System.get_env("POOL_SIZE") || "10")
52+
53+
import_config "#{Mix.env()}.exs"

‎config/dev.exs

+1
Original file line numberDiff line numberDiff line change
@@ -0,0 +1 @@
1+
use Mix.Config

‎config/test.exs

+35
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,35 @@
1+
use Mix.Config
2+
3+
config :jehovakel_ex_event_store,
4+
ecto_repos: [Support.Repo],
5+
event_stores: [JehovakelEx.EventStore]
6+
7+
# General Repository configuration
8+
config :jehovakel_ex_event_store, Support.Repo,
9+
adapter: Ecto.Adapters.Postgres,
10+
username: System.get_env("PG_USER") || System.get_env()["USER"],
11+
password: System.get_env("PG_PASSWORD") || "",
12+
port: System.get_env("PG_PORT") || "5432",
13+
hostname: System.get_env("PG_HOST") || "localhost",
14+
database: System.get_env("PG_NAME") || "jehovakel_ex_#{Mix.env()}",
15+
# Avoid collisions with eventstore `schema_migrations` relation
16+
migration_source: "readstore_schema_migrations",
17+
pool_size: String.to_integer(System.get_env("POOL_SIZE") || "10"),
18+
# erhöht die DB-Pool checkout timeouts, so dass die Tests mehr Zeit haben und durchlaufen können
19+
queue_target: 1_000,
20+
queue_interval: 5_000
21+
22+
config :jehovakel_ex_event_store, JehovakelEx.EventStore,
23+
serializer: EventStore.TermSerializer,
24+
username: System.get_env("PG_USER") || System.get_env()["USER"],
25+
password: System.get_env("PG_PASSWORD") || "",
26+
port: System.get_env("PG_PORT") || "5432",
27+
hostname: System.get_env("PG_HOST") || "localhost",
28+
database: System.get_env("PG_NAME") || "jehovakel_ex_#{Mix.env()}",
29+
pool_size: String.to_integer(System.get_env("POOL_SIZE") || "10"),
30+
# erhöht die DB-Pool checkout timeouts, so dass die Tests mehr Zeit haben und durchlaufen können
31+
queue_target: 1_000,
32+
queue_interval: 5_000
33+
34+
# TODO: set level :error, which breaks lib/event_store/event_store_listener_test.exs
35+
config :logger, level: :error

‎lib/event_store/appendable_event.ex

+61
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,61 @@
1+
defprotocol Shared.AppendableEvent do
2+
@doc "Returns the stream ID to which the event could be appended."
3+
def stream_id(event)
4+
5+
@doc "Returns the list of fields to link the event to fields values inside the event"
6+
def streams_to_link(event)
7+
end
8+
9+
defimpl Shared.AppendableEvent, for: Any do
10+
defmacro __deriving__(module, _struct, options) do
11+
quote do
12+
defimpl Shared.AppendableEvent, for: unquote(module) do
13+
def stream_id(event) do
14+
stream_id_field = Keyword.fetch!(unquote(options), :stream_id)
15+
stream_id = Map.fetch!(event, stream_id_field)
16+
17+
unless is_binary(stream_id) do
18+
raise ArgumentError, "Stream ID has to be a string, got '#{inspect(stream_id)}'."
19+
end
20+
21+
stream_id
22+
end
23+
24+
def streams_to_link(event) do
25+
fields_to_link = Keyword.get(unquote(options), :streams_to_link, []) |> List.wrap()
26+
27+
invalid_links =
28+
Enum.reduce(fields_to_link, %{}, fn field, errors ->
29+
field_value = Map.get(event, field)
30+
31+
unless is_binary(field_value) do
32+
Map.put(errors, field, field_value)
33+
else
34+
errors
35+
end
36+
end)
37+
38+
if map_size(invalid_links) > 0 do
39+
invalid_links =
40+
invalid_links
41+
|> Map.to_list()
42+
|> Enum.map(fn {field, value} ->
43+
"#{field} -> #{inspect(value)}"
44+
end)
45+
|> Enum.join(", ")
46+
47+
raise ArgumentError,
48+
"Streams ids to link need to be a string, got '#{invalid_links}'."
49+
end
50+
51+
Map.take(event, fields_to_link) |> Map.values()
52+
end
53+
end
54+
end
55+
end
56+
57+
# No default available
58+
def stream_id(_event), do: raise(ArgumentError, "Implement the Appendable Protocol")
59+
60+
def streams_to_link(_event), do: []
61+
end
+78
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,78 @@
1+
defmodule Shared.AppendableEventTest do
2+
use ExUnit.Case, async: true
3+
require Protocol
4+
5+
defmodule Event do
6+
defstruct [:a, :b, :c, :d]
7+
end
8+
9+
describe "derive" do
10+
setup do
11+
{:ok, %{event: %Event{a: "a", b: "b", c: nil, d: %{foo: :bar}}}}
12+
end
13+
14+
test "event id is a single field, streams_to_link optional", %{event: event} do
15+
Protocol.derive(Shared.AppendableEvent, Event, stream_id: :a)
16+
17+
assert Shared.AppendableEvent.stream_id(event) == "a"
18+
assert Shared.AppendableEvent.streams_to_link(event) == []
19+
end
20+
21+
test "fvent id is a single field, streams_to_link empty", %{event: event} do
22+
Protocol.derive(Shared.AppendableEvent, Event, stream_id: :a, streams_to_link: [])
23+
24+
assert Shared.AppendableEvent.stream_id(event) == "a"
25+
assert Shared.AppendableEvent.streams_to_link(event) == []
26+
end
27+
28+
test "use a single field as streams_to_link", %{event: event} do
29+
Protocol.derive(Shared.AppendableEvent, Event, stream_id: :a, streams_to_link: :b)
30+
31+
assert Shared.AppendableEvent.stream_id(event) == "a"
32+
assert Shared.AppendableEvent.streams_to_link(event) == ["b"]
33+
end
34+
35+
test "use a list as streams_to_link", %{event: event} do
36+
Protocol.derive(Shared.AppendableEvent, Event, stream_id: :a, streams_to_link: [:b])
37+
38+
assert Shared.AppendableEvent.stream_id(event) == "a"
39+
assert Shared.AppendableEvent.streams_to_link(event) == ["b"]
40+
end
41+
42+
test "Stream id needs to be present", %{event: event} do
43+
Protocol.derive(Shared.AppendableEvent, Event, stream_id: :c)
44+
45+
assert_raise ArgumentError, "Stream ID has to be a string, got 'nil'.", fn ->
46+
Shared.AppendableEvent.stream_id(event)
47+
end
48+
end
49+
50+
test "Stream id needs to be a string", %{event: event} do
51+
Protocol.derive(Shared.AppendableEvent, Event, stream_id: :d)
52+
53+
assert_raise ArgumentError, "Stream ID has to be a string, got '%{foo: :bar}'.", fn ->
54+
Shared.AppendableEvent.stream_id(event)
55+
end
56+
end
57+
58+
test "all Links need to be present", %{event: event} do
59+
Protocol.derive(Shared.AppendableEvent, Event, stream_id: :a, streams_to_link: :c)
60+
61+
assert_raise ArgumentError,
62+
"Streams ids to link need to be a string, got 'c -> nil'.",
63+
fn ->
64+
Shared.AppendableEvent.streams_to_link(event)
65+
end
66+
end
67+
68+
test "all links need to be strings", %{event: event} do
69+
Protocol.derive(Shared.AppendableEvent, Event, stream_id: :a, streams_to_link: [:c, :d])
70+
71+
assert_raise ArgumentError,
72+
"Streams ids to link need to be a string, got 'c -> nil, d -> %{foo: :bar}'.",
73+
fn ->
74+
Shared.AppendableEvent.streams_to_link(event)
75+
end
76+
end
77+
end
78+
end

‎lib/event_store/event_store.ex

+77
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,77 @@
1+
defmodule Shared.EventStore do
2+
defmacro __using__(_opts \\ []) do
3+
quote location: :keep, generated: true do
4+
@event_store_backend __MODULE__
5+
alias Shared.EventStoreEvent
6+
require Logger
7+
8+
def append_event(domain_events, metadata) do
9+
domain_events = List.wrap(domain_events)
10+
11+
appended_events =
12+
Enum.flat_map(domain_events, fn domain_event ->
13+
stream_id = Shared.AppendableEvent.stream_id(domain_event)
14+
{:ok, appended_event} = append_event(stream_id, domain_event, metadata)
15+
appended_event
16+
end)
17+
18+
{:ok, appended_events}
19+
end
20+
21+
def append_event(
22+
stream_uuid,
23+
domain_events,
24+
metadata
25+
) do
26+
persisted_events = domain_events |> EventStoreEvent.wrap_for_persistence(metadata)
27+
28+
case @event_store_backend.append_to_stream(stream_uuid, :any_version, persisted_events) do
29+
:ok ->
30+
log(stream_uuid, domain_events, metadata)
31+
{:ok, persisted_events}
32+
33+
error ->
34+
error
35+
end
36+
end
37+
38+
def append_events(stream_uuid, domain_events, metadata),
39+
do: append_event(stream_uuid, domain_events, metadata)
40+
41+
def all_events(stream_id \\ nil, opts \\ []) do
42+
{:ok, events} =
43+
case stream_id do
44+
nil -> @event_store_backend.read_all_streams_forward()
45+
stream_id when is_binary(stream_id) -> events_for_stream(stream_id)
46+
end
47+
48+
if Keyword.get(opts, :unwrap, true) do
49+
Enum.map(events, &Shared.EventStoreEvent.unwrap/1)
50+
else
51+
events
52+
end
53+
end
54+
55+
defp events_for_stream(stream_id) do
56+
case @event_store_backend.read_stream_forward(stream_id) do
57+
{:error, :stream_not_found} -> {:ok, []}
58+
{:ok, events} -> {:ok, events}
59+
end
60+
end
61+
62+
defp log(stream_uuid, events, metadata) do
63+
events = List.wrap(events)
64+
65+
Enum.each(events, fn event ->
66+
logged_event = Shared.LoggableEvent.to_log(event)
67+
68+
Logger.info(fn ->
69+
"Appended event stream_uuid=#{stream_uuid} event=[#{logged_event}] metadata=#{
70+
metadata |> inspect()
71+
}"
72+
end)
73+
end)
74+
end
75+
end
76+
end
77+
end

‎lib/event_store/event_store_event.ex

+30
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,30 @@
1+
defmodule Shared.EventStoreEvent do
2+
def wrap_for_persistence(events, metadata) do
3+
events = List.wrap(events)
4+
metadata = Enum.into(metadata, %{})
5+
6+
Enum.map(events, fn event ->
7+
%EventStore.EventData{
8+
data: event,
9+
metadata: metadata
10+
}
11+
end)
12+
end
13+
14+
def unwrap(%EventStore.RecordedEvent{data: domain_event, metadata: metadata} = event) do
15+
metadata = Enum.into(metadata, %{})
16+
17+
recorded_event_metadata =
18+
Map.take(event, [
19+
:event_number,
20+
:event_id,
21+
:stream_uuid,
22+
:stream_version,
23+
:correlation_id,
24+
:causation_id,
25+
:created_at
26+
])
27+
28+
{domain_event, Map.merge(recorded_event_metadata, metadata)}
29+
end
30+
end
+24
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,24 @@
1+
defmodule Shared.EventStoreEventTest do
2+
use ExUnit.Case, async: true
3+
alias Shared.EventStoreEvent, as: Event
4+
5+
defmodule TestEvent do
6+
defstruct foo: "bar"
7+
end
8+
9+
test "wrappe in EventStore.EventData" do
10+
event = %TestEvent{}
11+
12+
assert [%EventStore.EventData{} = event_data] =
13+
Event.wrap_for_persistence([event], %{
14+
my: "metadata",
15+
causation_id: "causation_id",
16+
correlation_id: "correlation_id"
17+
})
18+
19+
assert event_data.data == event
20+
assert event_data.metadata.correlation_id == "correlation_id"
21+
assert event_data.metadata.my == "metadata"
22+
assert event_data.metadata.causation_id == "causation_id"
23+
end
24+
end
+337
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,337 @@
1+
defmodule Shared.EventStoreListener do
2+
use GenServer
3+
require Logger
4+
alias EventStore.RecordedEvent
5+
6+
defmodule ErrorContext do
7+
defstruct error_count: 0, max_retries: 3, delay_factor: 10
8+
9+
@type t :: %__MODULE__{
10+
error_count: integer,
11+
max_retries: integer,
12+
delay_factor: integer
13+
}
14+
15+
def new do
16+
%__MODULE__{error_count: 0, max_retries: 3}
17+
end
18+
19+
def record_error(%__MODULE__{} = context) do
20+
Map.update(context, :error_count, 1, fn error_count -> error_count + 1 end)
21+
end
22+
23+
def retry?(%__MODULE__{error_count: error_count, max_retries: max_retries}) do
24+
error_count <= max_retries
25+
end
26+
27+
def retry_count(%__MODULE__{error_count: error_count}) do
28+
error_count - 1
29+
end
30+
31+
def delay(%__MODULE__{
32+
error_count: error_count,
33+
max_retries: max_retries,
34+
delay_factor: delay_factor
35+
})
36+
when error_count <= max_retries do
37+
# Exponential backoff
38+
sleep_duration = (:math.pow(2, error_count) * delay_factor) |> round()
39+
40+
Process.sleep(sleep_duration)
41+
end
42+
end
43+
44+
@type domain_event :: struct()
45+
@type metadata :: map()
46+
@type error_context :: ErrorContext.t()
47+
@type state :: map() | list()
48+
@type handle_result :: :ok | {:error, reason :: any()}
49+
50+
@callback handle(domain_event(), metadata()) :: handle_result()
51+
@callback handle(domain_event(), metadata(), state()) :: handle_result()
52+
@callback on_error(
53+
error :: term(),
54+
failed_event :: domain_event(),
55+
metadata :: metadata(),
56+
error_context :: error_context()
57+
) ::
58+
{:retry, error_context :: error_context()}
59+
| {:retry, delay :: non_neg_integer(), error_context :: error_context()}
60+
| :skip
61+
| {:stop, reason :: term()}
62+
63+
@callback on_error(
64+
error :: term(),
65+
stacktrace :: list(),
66+
failed_event :: domain_event(),
67+
metadata :: metadata(),
68+
error_context :: error_context()
69+
) ::
70+
{:retry, error_context :: error_context()}
71+
| {:retry, delay :: non_neg_integer(), error_context :: error_context()}
72+
| :skip
73+
| {:stop, reason :: term()}
74+
75+
defmacro __using__(opts) do
76+
opts = opts || []
77+
78+
quote location: :keep do
79+
@opts unquote(opts) || []
80+
@name @opts[:name] || __MODULE__
81+
82+
@behaviour Shared.EventStoreListener
83+
84+
# Verhindere, dass`@subscription_key` eine Warnung produzieren, falls nicht gesetzt.
85+
unless Module.get_attribute(__MODULE__, :subscription_key) do
86+
Module.put_attribute(__MODULE__, :subscription_key, nil)
87+
end
88+
89+
# Adds default handle method
90+
@before_compile unquote(__MODULE__)
91+
92+
def start_link(opts \\ []) do
93+
opts = Keyword.merge(@opts, opts)
94+
Shared.EventStoreListener.start_link(@name, __MODULE__, opts)
95+
end
96+
97+
def child_spec(opts) do
98+
default = %{
99+
id: @name,
100+
start: {__MODULE__, :start_link, [opts]},
101+
restart: :permanent,
102+
type: :worker
103+
}
104+
105+
Supervisor.child_spec(default, [])
106+
end
107+
end
108+
end
109+
110+
def start_link(name, handler_module, opts) do
111+
default_opts = %{
112+
name: nil,
113+
handler_module: nil,
114+
subscription_key: nil,
115+
subscription: nil,
116+
start_from: :origin,
117+
event_store: nil
118+
}
119+
120+
opts = Enum.into(opts, default_opts)
121+
opts[:event_store] || raise "Event Store(event_store: My.EventStore) configuration is missing"
122+
123+
state = %{opts | handler_module: handler_module, name: name}
124+
125+
GenServer.start_link(__MODULE__, state, name: name)
126+
end
127+
128+
defmacro __before_compile__(_env) do
129+
quote generated: true do
130+
def init(state) do
131+
state =
132+
case @subscription_key do
133+
subscription_key when is_binary(subscription_key) and subscription_key != "" ->
134+
%{state | subscription_key: subscription_key}
135+
136+
_ ->
137+
state
138+
end
139+
140+
{:ok, state}
141+
end
142+
143+
defoverridable init: 1
144+
145+
def handle(_event, _metadata), do: :ok
146+
defoverridable handle: 2
147+
148+
def handle(event, metadata, _state), do: handle(event, metadata)
149+
defoverridable handle: 3
150+
151+
def on_error({:error, reason}, _event, _metadata, error_context),
152+
do: {:retry, error_context}
153+
154+
defoverridable on_error: 4
155+
156+
def on_error(error, _stacktrace, event, metadata, error_context),
157+
do: on_error(error, event, metadata, error_context)
158+
159+
defoverridable on_error: 5
160+
end
161+
end
162+
163+
@impl true
164+
def init(
165+
%{name: handler_name, handler_module: handler_module, event_store: event_store} = state
166+
) do
167+
with {:ok, new_state} <- handler_module.init(state),
168+
subscription_key = new_state[:subscription_key] || subscription_key_for(handler_name),
169+
start_from = new_state[:start_from] || :origin,
170+
{:ok, subscription} <-
171+
event_store.subscribe_to_all_streams(
172+
subscription_key,
173+
self(),
174+
start_from: start_from
175+
) do
176+
{:ok, %{new_state | subscription: subscription}}
177+
end
178+
end
179+
180+
@impl true
181+
def handle_info({:subscribed, _subscription}, %{name: name} = state) do
182+
Logger.debug(fn ->
183+
"#{name} sucessfully subscribed to event store."
184+
end)
185+
186+
{:noreply, state}
187+
end
188+
189+
@impl true
190+
def handle_info({:events, events}, %{name: name} = state) do
191+
Logger.debug(fn -> "#{name} received events: #{inspect(events)}" end)
192+
193+
try do
194+
Enum.each(events, fn event -> handle_event(event, state, ErrorContext.new()) end)
195+
{:noreply, state}
196+
catch
197+
{:error, reason} ->
198+
{:stop, reason, state}
199+
end
200+
end
201+
202+
@impl true
203+
def handle_call(:get_state, _from, state) do
204+
{:reply, state, state}
205+
end
206+
207+
defp handle_event(
208+
%RecordedEvent{} = event,
209+
%{name: name} = state,
210+
%ErrorContext{} = error_context
211+
) do
212+
case delegate_event_to_handler(event, state) do
213+
:ok ->
214+
ack_event(event, state)
215+
216+
{:ok, _} ->
217+
ack_event(event, state)
218+
219+
{:error, reason} ->
220+
Logger.error(fn ->
221+
"#{name} failed to handle event #{inspect(event)} due to #{inspect(reason)}"
222+
end)
223+
224+
handle_error({:error, reason}, current_stacktrace(), event, state, error_context)
225+
226+
{:error, reason, stacktrace} ->
227+
Logger.error(fn ->
228+
"#{name} failed to handle event #{inspect(event)} due to #{inspect(reason)}"
229+
end)
230+
231+
handle_error({:error, reason}, stacktrace, event, state, error_context)
232+
end
233+
end
234+
235+
defp delegate_event_to_handler(
236+
%RecordedEvent{} = event,
237+
%{
238+
handler_module: handler_module
239+
} = state
240+
) do
241+
try do
242+
{domain_event, metadata} = Shared.EventStoreEvent.unwrap(event)
243+
handler_module.handle(domain_event, metadata, state)
244+
rescue
245+
error ->
246+
{:error, error, __STACKTRACE__}
247+
end
248+
end
249+
250+
defp handle_error(
251+
error,
252+
stacktrace,
253+
event,
254+
%{handler_module: handler_module, name: name} = state,
255+
context
256+
) do
257+
%RecordedEvent{data: domain_event, metadata: metadata} = event
258+
259+
case handler_module.on_error(error, stacktrace, domain_event, metadata, context) do
260+
{:retry, %ErrorContext{} = context} ->
261+
context = ErrorContext.record_error(context)
262+
263+
if ErrorContext.retry?(context) do
264+
ErrorContext.delay(context)
265+
266+
Logger.warn(fn ->
267+
"#{name} is retrying (#{context.error_count}/#{context.max_retries}) failed event #{
268+
inspect(event)
269+
}"
270+
end)
271+
272+
handle_event(event, state, context)
273+
else
274+
reason =
275+
"#{name} is dying due to bad event after #{ErrorContext.retry_count(context)} retries #{
276+
inspect(error)
277+
}, Stacktrace: #{inspect(stacktrace)}"
278+
279+
Logger.warn(reason)
280+
281+
throw({:error, reason})
282+
end
283+
284+
:skip ->
285+
Logger.debug(fn ->
286+
"#{name} is skipping event #{inspect(event)}"
287+
end)
288+
289+
ack_event(event, state)
290+
291+
{:stop, reason} ->
292+
reason = "#{name} has requested to stop in on_error/5 callback with #{inspect(reason)}"
293+
294+
Logger.warn(reason)
295+
throw({:error, reason})
296+
297+
error ->
298+
Logger.warn(fn ->
299+
"#{name} on_error/5 returned an invalid response #{inspect(error)}"
300+
end)
301+
302+
throw(error)
303+
end
304+
end
305+
306+
defp current_stacktrace do
307+
case Process.info(self(), :current_stacktrace) do
308+
{:current_stacktrace, stacktrace} -> stacktrace
309+
nil -> "Process is not alive. No stacktrace available"
310+
end
311+
end
312+
313+
defp ack_event(event, %{subscription: subscription, event_store: event_store}) do
314+
:ok = event_store.ack(subscription, event)
315+
end
316+
317+
@deprecated """
318+
Set `subscription_key` on initialization. Otherwise a change of the file name would break the subscription and all the events get processed again.
319+
"""
320+
defp subscription_key_for(handler) do
321+
subscription_key =
322+
handler
323+
|> Atom.to_string()
324+
|> String.split(".")
325+
|> Enum.at(-1)
326+
|> Macro.underscore()
327+
|> Kernel.<>("_event_listener")
328+
329+
Logger.warn(
330+
"Please specify a `subscription_key` on initialization for `#{handler}`. Otherwise a change of the file name would break the subscription and all the events get processed again. Default was: \"#{
331+
subscription_key
332+
}\""
333+
)
334+
335+
subscription_key
336+
end
337+
end
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,114 @@
1+
defmodule Shared.EventStoreListenerTest do
2+
use Support.EventStoreCase, async: false
3+
import ExUnit.CaptureLog
4+
5+
@event %Shared.EventTest.FakeEvent{}
6+
7+
defmodule EventHandlingError do
8+
defexception [:message]
9+
end
10+
11+
defmodule Counter do
12+
use Agent
13+
14+
def start_link(initial_value) do
15+
Agent.start_link(fn -> initial_value end, name: __MODULE__)
16+
end
17+
18+
def increment do
19+
Agent.get_and_update(__MODULE__, &{&1, &1 + 1})
20+
end
21+
end
22+
23+
defmodule ExampleConsumer do
24+
use Shared.EventStoreListener,
25+
subscription_key: "example_consumer",
26+
event_store: JehovakelEx.EventStore
27+
28+
def handle(_event, %{test_pid: test_pid, raise_until: raise_until}) do
29+
case Counter.increment() do
30+
count when count <= raise_until ->
31+
send(test_pid, :exception_during_event_handling)
32+
raise EventHandlingError, "BAM BAM BAM"
33+
34+
_ ->
35+
send(test_pid, :event_handled_successfully)
36+
end
37+
38+
:ok
39+
end
40+
end
41+
42+
setup do
43+
old_log_level = Logger.level()
44+
Logger.configure(level: :warn)
45+
46+
start_supervised!(ExampleConsumer)
47+
{:ok, _pid} = Counter.start_link(0)
48+
49+
on_exit fn ->
50+
Logger.configure(level: old_log_level)
51+
end
52+
:ok
53+
end
54+
55+
describe "Retry" do
56+
test "automatically on Exception during event handling without GenServer restart" do
57+
capture_log([level: :warn], fn ->
58+
{:ok, _events} =
59+
JehovakelEx.EventStore.append_event(@event, %{test_pid: self(), raise_until: 0})
60+
61+
assert_receive :exception_during_event_handling, 500
62+
assert_receive :event_handled_successfully, 500
63+
end)
64+
end
65+
66+
test "does not restart Listener process" do
67+
capture_log([level: :warn], fn ->
68+
listener_pid = Process.whereis(ExampleConsumer)
69+
70+
{:ok, _events} =
71+
JehovakelEx.EventStore.append_event(@event, %{test_pid: self(), raise_until: 0})
72+
73+
assert_receive :event_handled_successfully, 500
74+
assert listener_pid == Process.whereis(ExampleConsumer)
75+
end)
76+
end
77+
78+
test "stops EventStoreListener GenServer after 3 attempts" do
79+
logs =
80+
capture_log([level: :warn], fn ->
81+
listener_pid = Process.whereis(ExampleConsumer)
82+
83+
{:ok, _events} =
84+
JehovakelEx.EventStore.append_event(@event, %{test_pid: self(), raise_until: 3})
85+
86+
assert_receive :exception_during_event_handling
87+
assert_receive :event_handled_successfully, 500
88+
89+
assert listener_pid != Process.whereis(ExampleConsumer)
90+
end)
91+
92+
assert logs =~ "ExampleConsumer is retrying (1/3)"
93+
assert logs =~ "ExampleConsumer is retrying (2/3)"
94+
assert logs =~ "ExampleConsumer is retrying (3/3)"
95+
assert logs =~ "is dying due to bad event after 3 retries"
96+
end
97+
end
98+
99+
test "Log Stacktrace on failing to handle exception during event handling" do
100+
logs =
101+
capture_log([level: :warn], fn ->
102+
{:ok, _events} =
103+
JehovakelEx.EventStore.append_event(@event, %{test_pid: self(), raise_until: 4})
104+
105+
assert_receive :exception_during_event_handling
106+
assert_receive :event_handled_successfully, 500
107+
end)
108+
109+
assert logs =~ "Stacktrace"
110+
assert logs =~ "BAM BAM BAM"
111+
assert logs =~ "Shared.EventStoreListenerTest.ExampleConsumer"
112+
assert logs =~ "lib/event_store/event_store_listener_test.exs"
113+
end
114+
end

‎lib/event_store/event_store_test.exs

+34
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,34 @@
1+
defmodule Shared.EventTest do
2+
use Support.EventStoreCase, async: false
3+
4+
@event %Shared.EventTest.FakeEvent{}
5+
@metadata %{meta: "data"}
6+
7+
test "append event to stream" do
8+
assert {:ok, [%{data: @event}]} = append_event(@event, @metadata)
9+
10+
assert [%EventStore.RecordedEvent{data: @event, metadata: @metadata}] =
11+
all_events(nil, unwrap: false)
12+
13+
assert [{@event, @metadata}] = all_events()
14+
end
15+
16+
test "append event to stream with stream_uuid" do
17+
assert {:ok, [%{data: @event}]} = append_event("stream_uuid", @event, @metadata)
18+
19+
assert [%EventStore.RecordedEvent{data: @event, metadata: @metadata}] =
20+
all_events(nil, unwrap: false)
21+
22+
assert [{@event, @metadata}] = all_events()
23+
assert [{@event, @metadata}] = all_events("stream_uuid")
24+
end
25+
26+
test "append list of events" do
27+
assert {:ok, [%{data: @event}]} = append_event([@event], @metadata)
28+
29+
assert [%EventStore.RecordedEvent{data: @event, metadata: @metadata}] =
30+
all_events(nil, unwrap: false)
31+
32+
assert [{@event, @metadata}] = all_events()
33+
end
34+
end
+78
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,78 @@
1+
defmodule Shared.LinkAppendableEvents do
2+
use GenServer
3+
4+
def child_spec(opts) do
5+
start_options = %{
6+
id: __MODULE__,
7+
start: {__MODULE__, :start_link, [opts]},
8+
restart: :permanent,
9+
type: :worker
10+
}
11+
12+
Supervisor.child_spec(start_options, [])
13+
end
14+
15+
def start_link(opts \\ []) do
16+
GenServer.start_link(__MODULE__, opts)
17+
end
18+
19+
# Opts are all options available for EventStore.subscribe_to_all_streams/3
20+
def init(opts) do
21+
event_store = Keyword.fetch!(opts, :event_store)
22+
23+
{subscription_name, opts} = Keyword.get_and_update(opts, :subscription_name, fn _ -> :pop end)
24+
subscription_name = subscription_name || "__jehovakel_ex_event_store_link_appendable_events__"
25+
# Subscribe to events from all streams
26+
{:ok, subscription} = event_store.subscribe_to_all_streams(subscription_name, self(), opts)
27+
28+
{:ok, %{event_store: event_store, subscription: subscription}}
29+
end
30+
31+
# Successfully subscribed to all streams
32+
def handle_info({:subscribed, subscription}, %{subscription: subscription} = state) do
33+
{:noreply, state}
34+
end
35+
36+
# Event notification
37+
def handle_info(
38+
{:events, events},
39+
%{subscription: subscription, event_store: event_store} = state
40+
) do
41+
link_appendable_events(event_store, subscription, events)
42+
43+
{:noreply, state}
44+
end
45+
46+
defp link_appendable_events(event_store, subscription, events) do
47+
for event <- events do
48+
link_appendable_event(event_store, subscription, event)
49+
end
50+
end
51+
52+
defp link_appendable_event(
53+
event_store,
54+
subscription,
55+
%EventStore.RecordedEvent{} = eventstore_event
56+
) do
57+
event = eventstore_event.data
58+
59+
if Shared.AppendableEvent.impl_for(event) do
60+
streams_to_link = Shared.AppendableEvent.streams_to_link(event)
61+
62+
for stream <- streams_to_link do
63+
:ok = link_appendable_event(event_store, eventstore_event, stream)
64+
end
65+
end
66+
67+
# Confirm receipt of received events
68+
:ok = event_store.ack(subscription, eventstore_event)
69+
end
70+
71+
defp link_appendable_event(event_store, %EventStore.RecordedEvent{} = event, stream_id) do
72+
case event_store.link_to_stream(stream_id, :any_version, [event]) do
73+
:ok -> :ok
74+
{:error, :duplicate_event} -> :ok
75+
error -> error
76+
end
77+
end
78+
end
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,55 @@
1+
defmodule Shared.LinkAppendableEventsTest do
2+
use Support.EventStoreCase, async: false
3+
4+
defmodule TestEvent do
5+
@derive {Shared.AppendableEvent, stream_id: :a, streams_to_link: :b}
6+
defstruct [:a, :b, :c, :d]
7+
end
8+
9+
setup do
10+
event = %TestEvent{a: "id_a", b: "id_b", c: "some_value_for_c", d: :something}
11+
12+
start_supervised!({Shared.LinkAppendableEvents, event_store: JehovakelEx.EventStore})
13+
14+
{:ok, %{event: event}}
15+
end
16+
17+
test "link an event to the streams defined in AppendableEvent", %{event: event} do
18+
assert {:ok, _event_data} = JehovakelEx.EventStore.append_event(event, _metadata = %{})
19+
20+
wait_until(fn ->
21+
assert [{^event, metadata}] = all_events("id_a")
22+
assert [{^event, metadata}] = all_events("id_b")
23+
end)
24+
end
25+
26+
test "link an event to the stream twice does not produce errors", %{
27+
event: event,
28+
postgrex_connection: postgrex
29+
} do
30+
assert {:ok, _event_data} = JehovakelEx.EventStore.append_event(event, _metadata = %{})
31+
32+
wait_until(fn ->
33+
assert [{^event, metadata}] = all_events("id_a")
34+
assert [{^event, metadata}] = all_events("id_b")
35+
end)
36+
37+
# Simulate linking running twice
38+
assert :ok = stop_supervised(Shared.LinkAppendableEvents)
39+
40+
Postgrex.query!(postgrex, "DELETE FROM subscriptions WHERE true", [])
41+
42+
start_supervised!(
43+
{Shared.LinkAppendableEvents, event_store: JehovakelEx.EventStore, subscription_name: "FOO"}
44+
)
45+
46+
wait_until(fn ->
47+
assert %{num_rows: 1, rows: [[1]]} =
48+
Postgrex.query!(
49+
postgrex,
50+
"SELECT last_seen FROM subscriptions WHERE subscription_name='FOO'",
51+
[]
52+
)
53+
end)
54+
end
55+
end

‎lib/event_store/loggable_event.ex

+22
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,22 @@
1+
defprotocol Shared.LoggableEvent do
2+
@doc "Converts an event to a loggable String"
3+
@fallback_to_any true
4+
def to_log(event)
5+
end
6+
7+
defimpl Shared.LoggableEvent, for: Any do
8+
def to_log(%event_type{} = event) do
9+
event_type = event_type |> Atom.to_string() |> String.split(".") |> Enum.at(-1)
10+
11+
event_data =
12+
event
13+
|> Map.from_struct()
14+
|> Enum.reduce("", fn {key, value}, event_as_string ->
15+
event_as_string <> " #{key}=" <> inspect(value)
16+
end)
17+
18+
~s(#{event_type}:#{event_data})
19+
end
20+
21+
def to_log(_event), do: raise(ArgumentError, "Implement the Shared.LoggableEvent Protocol")
22+
end
+19
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,19 @@
1+
defmodule Shared.LoggableEventTest do
2+
use ExUnit.Case, async: true
3+
require Protocol
4+
5+
defmodule Event do
6+
defstruct [:a, :b, :c, :d]
7+
end
8+
9+
defmodule Strct do
10+
defstruct attr: :brr
11+
end
12+
13+
test "to_log/1" do
14+
event = %Event{a: "foo", b: 23, c: nil, d: %Strct{}}
15+
16+
assert "Event: a=\"foo\" b=23 c=nil d=%Shared.LoggableEventTest.Strct{attr: :brr}" ==
17+
Shared.LoggableEvent.to_log(event)
18+
end
19+
end

‎lib/event_store/migration/event.ex

+84
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,84 @@
1+
if Code.ensure_loaded?(Ecto) && Code.ensure_loaded?(Shared.Ecto.Term) do
2+
defmodule Shared.EventStore.Migration.Event do
3+
@moduledoc """
4+
Warnung: Diese Datei ist ungetestet. Vergewissere dich, dass du deine Skripte, die darauf basieren gut lokal und
5+
auf Staging getestet sind, bevor du diese auf Production loslässt!!!
6+
"""
7+
use Ecto.Schema
8+
import Ecto.Query
9+
import Ecto.Changeset
10+
require Logger
11+
12+
@primary_key {:event_id, :binary_id, autogenerate: false}
13+
schema "events" do
14+
field(:event_type, :string)
15+
field(:data, Shared.Ecto.Term)
16+
field(:metadata, Shared.Ecto.Term)
17+
field(:created_at, :utc_datetime)
18+
end
19+
20+
def migrate_event(event_type_to_migrate, migration, repository)
21+
when is_atom(event_type_to_migrate) and is_function(migration) do
22+
event_type = Atom.to_string(event_type_to_migrate)
23+
24+
query =
25+
from(
26+
e in Shared.EventStore.Migration.Event,
27+
where: e.event_type == ^event_type
28+
)
29+
30+
anzahl_events = repository.aggregate(query, :count)
31+
32+
events = repository.stream(query)
33+
34+
if anzahl_events > 0 do
35+
Logger.info(
36+
"Migrating " <>
37+
to_string(anzahl_events) <> " Events vom Typ " <> to_string(event_type_to_migrate)
38+
)
39+
40+
Ecto.Adapters.SQL.query!(repository, "DROP RULE no_update_events ON events")
41+
42+
repository.transaction(
43+
fn ->
44+
Enum.each(events, fn event ->
45+
{new_data, new_metadata} = run_migration(migration, event)
46+
%event_module{} = new_data
47+
event_type = Atom.to_string(event_module)
48+
49+
{:ok, migrated_at} = DateTime.now("Europe/Berlin")
50+
51+
new_metadata =
52+
new_metadata
53+
|> Enum.into(%{})
54+
|> Map.merge(%{migrated_at: migrated_at, original_event: event.data})
55+
56+
changeset =
57+
change(event, event_type: event_type, data: new_data, metadata: new_metadata)
58+
59+
repository.update!(changeset)
60+
end)
61+
end,
62+
timeout: 600_000_000
63+
)
64+
end
65+
after
66+
Ecto.Adapters.SQL.query!(
67+
repository,
68+
"CREATE RULE no_update_events AS ON UPDATE TO events DO INSTEAD NOTHING"
69+
)
70+
end
71+
72+
defp run_migration(migration, event) when is_function(migration, 2) do
73+
migration.(event.data, event.metadata)
74+
end
75+
76+
defp run_migration(migration, event) when is_function(migration, 3) do
77+
migration.(event.data, event.metadata, %{
78+
id: event.event_id,
79+
type: event.event_type,
80+
created_at: event.created_at
81+
})
82+
end
83+
end
84+
end

‎lib/test_helper.exs

+1
Original file line numberDiff line numberDiff line change
@@ -0,0 +1 @@
1+
ExUnit.start(capture_log: true)

‎mix.exs

+69
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,69 @@
1+
defmodule Shared.MixProject do
2+
use Mix.Project
3+
4+
def project do
5+
[
6+
app: :jehovakel_ex_event_store,
7+
version: "1.0.0",
8+
elixir: "~> 1.7",
9+
elixirc_paths: elixirc_paths(Mix.env()),
10+
start_permanent: Mix.env() == :prod,
11+
aliases: aliases(),
12+
test_paths: ["lib"],
13+
test_coverage: [tool: ExCoveralls],
14+
deps: deps(),
15+
aliases: aliases(),
16+
consolidate_protocols: Mix.env() != :test,
17+
name: "Jehovakel EX EventStore",
18+
source_url: "https://github.com/STUDITEMPS/jehovakel_ex_event_store",
19+
description: description(),
20+
package: package()
21+
]
22+
end
23+
24+
# Run "mix help compile.app" to learn about applications.
25+
def application do
26+
[
27+
extra_applications: [:logger]
28+
]
29+
end
30+
31+
# Specifies which paths to compile per environment.
32+
defp elixirc_paths(:test), do: ["lib", "test/support"]
33+
defp elixirc_paths(_), do: ["lib"]
34+
35+
defp aliases do
36+
[
37+
test: ["ecto.create --quiet", "event_store.init", "ecto.migrate", "test"]
38+
]
39+
end
40+
41+
# Run "mix help deps" to learn about dependencies.
42+
defp deps do
43+
[
44+
# CQRS event store using PostgreSQL for persistence
45+
{:eventstore, "~> 1.0"},
46+
{:ecto, "~> 3.0", optional: true},
47+
{:ecto_sql, "~> 3.0", optional: true},
48+
# {:jehovakel_ex_ecto, ">= 0.0.0", optional: true, in_umbrella: true},
49+
{:excoveralls, ">= 0.10.5", only: :test}
50+
]
51+
end
52+
53+
defp description do
54+
"TODO: describe this package"
55+
end
56+
57+
defp package do
58+
[
59+
# This option is only needed when you don't want to use the OTP application name
60+
name: "jehovakel_ex_event_store",
61+
# These are the default files included in the package
62+
licenses: ["MIT License"],
63+
links: %{
64+
"GitHub" => "https://github.com/STUDITEMPS/jehovakel_ex_event_store",
65+
"Studitemps" => "https://tech.studitemps.de"
66+
}
67+
]
68+
end
69+
end

‎mix.lock

+24
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,24 @@
1+
%{
2+
"certifi": {:hex, :certifi, "2.5.2", "b7cfeae9d2ed395695dd8201c57a2d019c0c43ecaf8b8bcb9320b40d6662f340", [:rebar3], [{:parse_trans, "~>3.3", [hex: :parse_trans, repo: "hexpm", optional: false]}], "hexpm", "3b3b5f36493004ac3455966991eaf6e768ce9884693d9968055aeeeb1e575040"},
3+
"connection": {:hex, :connection, "1.0.4", "a1cae72211f0eef17705aaededacac3eb30e6625b04a6117c1b2db6ace7d5976", [:mix], [], "hexpm", "4a0850c9be22a43af9920a71ab17c051f5f7d45c209e40269a1938832510e4d9"},
4+
"db_connection": {:hex, :db_connection, "2.3.0", "d56ef906956a37959bcb385704fc04035f4f43c0f560dd23e00740daf8028c49", [:mix], [{:connection, "~> 1.0.2", [hex: :connection, repo: "hexpm", optional: false]}], "hexpm", "dcc082b8f723de9a630451b49fdbd7a59b065c4b38176fb147aaf773574d4520"},
5+
"decimal": {:hex, :decimal, "2.0.0", "a78296e617b0f5dd4c6caf57c714431347912ffb1d0842e998e9792b5642d697", [:mix], [], "hexpm", "34666e9c55dea81013e77d9d87370fe6cb6291d1ef32f46a1600230b1d44f577"},
6+
"ecto": {:hex, :ecto, "3.5.4", "73ee115deb10769c73fd2d27e19e36bc4af7c56711ad063616a86aec44f80f6f", [:mix], [{:decimal, "~> 1.6 or ~> 2.0", [hex: :decimal, repo: "hexpm", optional: false]}, {:jason, "~> 1.0", [hex: :jason, repo: "hexpm", optional: true]}, {:telemetry, "~> 0.4", [hex: :telemetry, repo: "hexpm", optional: false]}], "hexpm", "7f13f9c9c071bd2ca04652373ff3edd1d686364de573255096872a4abc471807"},
7+
"ecto_sql": {:hex, :ecto_sql, "3.5.3", "1964df0305538364b97cc4661a2bd2b6c89d803e66e5655e4e55ff1571943efd", [:mix], [{:db_connection, "~> 2.2", [hex: :db_connection, repo: "hexpm", optional: false]}, {:ecto, "~> 3.5.0", [hex: :ecto, repo: "hexpm", optional: false]}, {:myxql, "~> 0.3.0 or ~> 0.4.0", [hex: :myxql, repo: "hexpm", optional: true]}, {:postgrex, "~> 0.15.0", [hex: :postgrex, repo: "hexpm", optional: true]}, {:tds, "~> 2.1.1", [hex: :tds, repo: "hexpm", optional: true]}, {:telemetry, "~> 0.4.0", [hex: :telemetry, repo: "hexpm", optional: false]}], "hexpm", "d2f53592432ce17d3978feb8f43e8dc0705e288b0890caf06d449785f018061c"},
8+
"elixir_uuid": {:hex, :elixir_uuid, "1.2.1", "dce506597acb7e6b0daeaff52ff6a9043f5919a4c3315abb4143f0b00378c097", [:mix], [], "hexpm", "f7eba2ea6c3555cea09706492716b0d87397b88946e6380898c2889d68585752"},
9+
"eventstore": {:hex, :eventstore, "1.1.0", "1f5f20feb343c78cf8a38e8d288f6c7a860056c341dbe1520e6faa728c5e633f", [:mix], [{:elixir_uuid, "~> 1.2", [hex: :elixir_uuid, repo: "hexpm", optional: false]}, {:fsm, "~> 0.3", [hex: :fsm, repo: "hexpm", optional: false]}, {:gen_stage, "~> 1.0", [hex: :gen_stage, repo: "hexpm", optional: false]}, {:highlander, "~> 0.2", [hex: :highlander, repo: "hexpm", optional: false]}, {:jason, "~> 1.2", [hex: :jason, repo: "hexpm", optional: true]}, {:poolboy, "~> 1.5", [hex: :poolboy, repo: "hexpm", optional: true]}, {:postgrex, "~> 0.15", [hex: :postgrex, repo: "hexpm", optional: false]}], "hexpm", "0b61fa4972a1521efa9027c3fd3955afac2df396783357ab2295c9d424124046"},
10+
"excoveralls": {:hex, :excoveralls, "0.13.3", "edc5f69218f84c2bf61b3609a22ddf1cec0fbf7d1ba79e59f4c16d42ea4347ed", [:mix], [{:hackney, "~> 1.16", [hex: :hackney, repo: "hexpm", optional: false]}, {:jason, "~> 1.0", [hex: :jason, repo: "hexpm", optional: false]}], "hexpm", "cc26f48d2f68666380b83d8aafda0fffc65dafcc8d8650358e0b61f6a99b1154"},
11+
"fsm": {:hex, :fsm, "0.3.1", "087aa9b02779a84320dc7a2d8464452b5308e29877921b2bde81cdba32a12390", [:mix], [], "hexpm", "fbf0d53f89e9082b326b0b5828b94b4c549ff9d1452bbfd00b4d1ac082208e96"},
12+
"gen_stage": {:hex, :gen_stage, "1.0.0", "51c8ae56ff54f9a2a604ca583798c210ad245f415115453b773b621c49776df5", [:mix], [], "hexpm", "1d9fc978db5305ac54e6f5fec7adf80cd893b1000cf78271564c516aa2af7706"},
13+
"hackney": {:hex, :hackney, "1.16.0", "5096ac8e823e3a441477b2d187e30dd3fff1a82991a806b2003845ce72ce2d84", [:rebar3], [{:certifi, "2.5.2", [hex: :certifi, repo: "hexpm", optional: false]}, {:idna, "6.0.1", [hex: :idna, repo: "hexpm", optional: false]}, {:metrics, "1.0.1", [hex: :metrics, repo: "hexpm", optional: false]}, {:mimerl, "~>1.1", [hex: :mimerl, repo: "hexpm", optional: false]}, {:parse_trans, "3.3.0", [hex: :parse_trans, repo: "hexpm", optional: false]}, {:ssl_verify_fun, "1.1.6", [hex: :ssl_verify_fun, repo: "hexpm", optional: false]}], "hexpm", "3bf0bebbd5d3092a3543b783bf065165fa5d3ad4b899b836810e513064134e18"},
14+
"highlander": {:hex, :highlander, "0.2.1", "e59b459f857e89daf73f2598bf2b2c0479a435481e6101ea389fd3625919b052", [:mix], [], "hexpm", "5ba19a18358803d82a923511acec8ee85fac30731c5ca056f2f934bc3d3afd9a"},
15+
"idna": {:hex, :idna, "6.0.1", "1d038fb2e7668ce41fbf681d2c45902e52b3cb9e9c77b55334353b222c2ee50c", [:rebar3], [{:unicode_util_compat, "0.5.0", [hex: :unicode_util_compat, repo: "hexpm", optional: false]}], "hexpm", "a02c8a1c4fd601215bb0b0324c8a6986749f807ce35f25449ec9e69758708122"},
16+
"jason": {:hex, :jason, "1.2.2", "ba43e3f2709fd1aa1dce90aaabfd039d000469c05c56f0b8e31978e03fa39052", [:mix], [{:decimal, "~> 1.0 or ~> 2.0", [hex: :decimal, repo: "hexpm", optional: true]}], "hexpm", "18a228f5f0058ee183f29f9eae0805c6e59d61c3b006760668d8d18ff0d12179"},
17+
"metrics": {:hex, :metrics, "1.0.1", "25f094dea2cda98213cecc3aeff09e940299d950904393b2a29d191c346a8486", [:rebar3], [], "hexpm", "69b09adddc4f74a40716ae54d140f93beb0fb8978d8636eaded0c31b6f099f16"},
18+
"mimerl": {:hex, :mimerl, "1.2.0", "67e2d3f571088d5cfd3e550c383094b47159f3eee8ffa08e64106cdf5e981be3", [:rebar3], [], "hexpm", "f278585650aa581986264638ebf698f8bb19df297f66ad91b18910dfc6e19323"},
19+
"parse_trans": {:hex, :parse_trans, "3.3.0", "09765507a3c7590a784615cfd421d101aec25098d50b89d7aa1d66646bc571c1", [:rebar3], [], "hexpm", "17ef63abde837ad30680ea7f857dd9e7ced9476cdd7b0394432af4bfc241b960"},
20+
"postgrex": {:hex, :postgrex, "0.15.7", "724410acd48abac529d0faa6c2a379fb8ae2088e31247687b16cacc0e0883372", [:mix], [{:connection, "~> 1.0", [hex: :connection, repo: "hexpm", optional: false]}, {:db_connection, "~> 2.1", [hex: :db_connection, repo: "hexpm", optional: false]}, {:decimal, "~> 1.5 or ~> 2.0", [hex: :decimal, repo: "hexpm", optional: false]}, {:jason, "~> 1.0", [hex: :jason, repo: "hexpm", optional: true]}], "hexpm", "88310c010ff047cecd73d5ceca1d99205e4b1ab1b9abfdab7e00f5c9d20ef8f9"},
21+
"ssl_verify_fun": {:hex, :ssl_verify_fun, "1.1.6", "cf344f5692c82d2cd7554f5ec8fd961548d4fd09e7d22f5b62482e5aeaebd4b0", [:make, :mix, :rebar3], [], "hexpm", "bdb0d2471f453c88ff3908e7686f86f9be327d065cc1ec16fa4540197ea04680"},
22+
"telemetry": {:hex, :telemetry, "0.4.2", "2808c992455e08d6177322f14d3bdb6b625fbcfd233a73505870d8738a2f4599", [:rebar3], [], "hexpm", "2d1419bd9dda6a206d7b5852179511722e2b18812310d304620c7bd92a13fcef"},
23+
"unicode_util_compat": {:hex, :unicode_util_compat, "0.5.0", "8516502659002cec19e244ebd90d312183064be95025a319a6c7e89f4bccd65b", [:rebar3], [], "hexpm", "d48d002e15f5cc105a696cf2f1bbb3fc72b4b770a184d8420c8db20da2674b38"},
24+
}

‎priv/repo/migrations/.keep

Whitespace-only changes.

‎test/support/event_store.ex

+4
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,4 @@
1+
defmodule JehovakelEx.EventStore do
2+
use EventStore, otp_app: :jehovakel_ex_event_store
3+
use Shared.EventStore
4+
end

‎test/support/event_store_case.ex

+48
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,48 @@
1+
defmodule Support.EventStoreCase do
2+
@moduledoc """
3+
Used for Tests using event store
4+
"""
5+
6+
use ExUnit.CaseTemplate
7+
8+
using do
9+
quote do
10+
import JehovakelEx.EventStore,
11+
only: [append_event: 2, append_event: 3, all_events: 2, all_events: 1, all_events: 0],
12+
warn: false
13+
14+
def wait_until(fun), do: wait_until(500, fun)
15+
16+
def wait_until(0, fun), do: fun.()
17+
18+
def wait_until(timeout, fun) do
19+
try do
20+
fun.()
21+
rescue
22+
ExUnit.AssertionError ->
23+
:timer.sleep(100)
24+
wait_until(max(0, timeout - 100), fun)
25+
end
26+
end
27+
end
28+
end
29+
30+
setup _tags do
31+
# reset eventstore
32+
config = EventStore.Config.parsed(JehovakelEx.EventStore, :jehovakel_ex_event_store)
33+
postgrex_config = EventStore.Config.default_postgrex_opts(config)
34+
{:ok, eventstore_connection} = Postgrex.start_link(postgrex_config)
35+
EventStore.Storage.Initializer.reset!(eventstore_connection)
36+
{:ok, _} = Application.ensure_all_started(:eventstore)
37+
38+
start_supervised!(JehovakelEx.EventStore)
39+
40+
on_exit(fn ->
41+
# stop eventstore application
42+
Application.stop(:eventstore)
43+
Process.exit(eventstore_connection, :shutdown)
44+
end)
45+
46+
{:ok, %{postgrex_connection: eventstore_connection}}
47+
end
48+
end

‎test/support/fake_event.ex

+21
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,21 @@
1+
# Because of consolidation the protocol implementation needs to live in a file
2+
# that is compiled before the tests are executed. Check the "Consolidation"
3+
# section in the documentation for Kernel.defprotocol/2
4+
5+
defmodule Shared.EventTest.FakeEvent do
6+
defstruct some: :default
7+
end
8+
9+
defimpl Shared.LoggableEvent, for: Shared.EventTest.FakeEvent do
10+
def to_log(_event) do
11+
"FakeEvent: logging"
12+
end
13+
end
14+
15+
defimpl Shared.AppendableEvent, for: Shared.EventTest.FakeEvent do
16+
def stream_id(event) do
17+
event.some |> Atom.to_string()
18+
end
19+
20+
def streams_to_link(_event), do: []
21+
end

‎test/support/repo.ex

+5
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,5 @@
1+
defmodule Support.Repo do
2+
use Ecto.Repo,
3+
otp_app: :jehovakel_ex_event_store,
4+
adapter: Ecto.Adapters.Postgres
5+
end

0 commit comments

Comments
 (0)
Please sign in to comment.