From c9068725b92137b69c2e2f3e982f2cda37c60a63 Mon Sep 17 00:00:00 2001
From: =?UTF-8?q?Erik=20Hedenstro=CC=88m?= <erik@hedenstroem.com>
Date: Sat, 24 Sep 2016 00:12:49 +0200
Subject: [PATCH] Working client

---
 .gitignore            |   2 +
 config/sys.config     |  32 ++++
 rebar.config          |  63 ++++++-
 rebar.lock            |  12 ++
 src/dockerl.app.src   |  33 ++--
 src/dockerl.erl       | 381 ++++++++++++++++++++++++++++++++++++++++++
 src/dockerl_app.erl   |  26 ---
 src/dockerl_sup.erl   |  35 ----
 test/dockerl_test.erl |  78 +++++++++
 9 files changed, 583 insertions(+), 79 deletions(-)
 create mode 100644 config/sys.config
 create mode 100644 rebar.lock
 create mode 100644 src/dockerl.erl
 delete mode 100644 src/dockerl_app.erl
 delete mode 100644 src/dockerl_sup.erl
 create mode 100644 test/dockerl_test.erl

diff --git a/.gitignore b/.gitignore
index ced0c5e..e44f8a7 100644
--- a/.gitignore
+++ b/.gitignore
@@ -9,7 +9,9 @@ _*
 .erlang.cookie
 ebin
 log
+var
 erl_crash.dump
 .rebar
 logs
 _build
+_checkouts
diff --git a/config/sys.config b/config/sys.config
new file mode 100644
index 0000000..772ce42
--- /dev/null
+++ b/config/sys.config
@@ -0,0 +1,32 @@
+[
+    {kernel, [
+        {start_timer, true},
+        {inet_default_listen_options, [{nodelay, true}, {sndbuf, 32768}, {recbuf, 32768}]},
+        {inet_default_connect_options, [{delay_send, true}]}
+    ]},
+    {sasl, [
+        {sasl_error_logger, {file, "log/sasl-error.log"}},
+        {errlog_type, error},
+        {error_logger_mf_dir, "log/sasl"},
+        {error_logger_mf_maxbytes, 10485760},
+        {error_logger_mf_maxfiles, 5},
+        {utc_log, true}
+    ]},
+    {lager, [
+        {colored, true},
+        {handlers, [
+            {lager_console_backend, [info, {lager_default_formatter, [color, time, " (", {module, "?"}, ".erl:", {line, "?"}, ") ", pid, " [", severity, "] ", message, "\n"]}]},
+            {lager_file_backend, [{file, "log/error.log"}, {level, error}, {size, 10485760}, {date, "$D0"}, {count, 5}]},
+            {lager_file_backend, [{file, "log/console.log"}, {level, info}, {size, 10485760}, {date, "$D0"}, {count, 5}]}
+        ]},
+        {crash_log, "log/crash.log"},
+        {crash_log_msg_size, 65536},
+        {crash_log_size, 10485760},
+        {crash_log_date, "$D0"},
+        {crash_log_count, 5},
+        {error_logger_redirect, true}
+    ]},
+    {dockerl, [
+        {docker_host, "localhost"}
+    ]}
+].
diff --git a/rebar.config b/rebar.config
index f618f3e..e05acfa 100644
--- a/rebar.config
+++ b/rebar.config
@@ -1,2 +1,61 @@
-{erl_opts, [debug_info]}.
-{deps, []}.
\ No newline at end of file
+{global_rebar_dir, ".rebar3"}.
+
+{plugins, [
+    rebar_alias,
+    {rebar_cmd, "0.2.3"}
+]}.
+
+{erl_opts, [{parse_transform, lager_transform}]}.
+
+{eunit_opts, [{report, {eunit_surefire, [{dir, "_build/test"}]}}]}.
+
+{edoc_opts, [{dir, "_build/edoc"}]}.
+
+{deps, [
+    {lager, "3.2.1"},
+    {jsx, "2.8.0"},
+    {hackney, {git, "https://github.com/benoitc/hackney.git", {branch, "master"}}}
+]}.
+
+{profiles, [
+    {test, [
+        {eunit_opts, [{report, {eunit_surefire, [{dir, "_build/test"}]}}]},
+        {erl_opts, [debug_info, nowarn_unused_vars]}
+    ]},
+    {prod, [
+        {erl_opts, [no_debug_info, warnings_as_errors]}
+    ]}
+]}.
+
+{dist_node, [
+    {setcookie, 'cookie'},
+    {sname, 'dockerl@localhost'}
+]}.
+
+{shell, [
+    {config, "config/sys.config"}
+]}.
+
+{dialyzer, [
+    {base_plt_location, global}
+]}.
+
+{alias, [
+    {test, [
+        {eunit, "--cover --application=dockerl --dir=test"},
+        {cover, "--verbose"}
+    ]},
+    {analyze, [
+        dialyzer,
+        xref
+    ]},
+    {cleanup, [
+        clean,
+        {cmd, "distclean"}
+    ]}
+]}.
+
+{commands, [
+    {distclean, "rm -rf .rebar3 _build _checkouts/*/ebin ebin log"},
+    {sync, "git fetch upstream && git merge upstream/master"}
+]}.
diff --git a/rebar.lock b/rebar.lock
new file mode 100644
index 0000000..5d3f879
--- /dev/null
+++ b/rebar.lock
@@ -0,0 +1,12 @@
+[{<<"certifi">>,{pkg,<<"certifi">>,<<"0.6.0">>},1},
+ {<<"goldrush">>,{pkg,<<"goldrush">>,<<"0.1.8">>},1},
+ {<<"hackney">>,
+  {git,"https://github.com/benoitc/hackney.git",
+       {ref,"533254f1bc615550aa6a88e30d2a1785e5e3a93d"}},
+  0},
+ {<<"idna">>,{pkg,<<"idna">>,<<"1.2.0">>},1},
+ {<<"jsx">>,{pkg,<<"jsx">>,<<"2.8.0">>},0},
+ {<<"lager">>,{pkg,<<"lager">>,<<"3.2.1">>},0},
+ {<<"metrics">>,{pkg,<<"metrics">>,<<"1.0.1">>},1},
+ {<<"mimerl">>,{pkg,<<"mimerl">>,<<"1.0.2">>},1},
+ {<<"ssl_verify_fun">>,{pkg,<<"ssl_verify_fun">>,<<"1.1.1">>},1}].
diff --git a/src/dockerl.app.src b/src/dockerl.app.src
index 00d50e6..bcfa701 100644
--- a/src/dockerl.app.src
+++ b/src/dockerl.app.src
@@ -1,16 +1,17 @@
-{application, dockerl,
- [{description, "An OTP application"},
-  {vsn, "0.1.0"},
-  {registered, []},
-  {mod, { dockerl_app, []}},
-  {applications,
-   [kernel,
-    stdlib
-   ]},
-  {env,[]},
-  {modules, []},
-
-  {maintainers, []},
-  {licenses, []},
-  {links, []}
- ]}.
+{application, dockerl, [
+    {description, "Erlang Docker Remote API Client"},
+    {vsn, "0.1.0"},
+    {registered, []},
+    {applications, [
+        kernel,
+        stdlib,
+        sasl,
+        lager,
+        hackney
+    ]},
+    {env, []},
+    {modules, []},
+    {maintainers, []},
+    {licenses, []},
+    {links, [{"GitLab", "https://gitlab.hedenstroem.com/erlang-ninja/dockerl"}]}
+]}.
diff --git a/src/dockerl.erl b/src/dockerl.erl
new file mode 100644
index 0000000..07f5acb
--- /dev/null
+++ b/src/dockerl.erl
@@ -0,0 +1,381 @@
+%%%-------------------------------------------------------------------
+%% @doc dockerl public API
+%% @end
+%%%-------------------------------------------------------------------
+
+-module(dockerl).
+
+-behaviour(gen_server).
+
+-export([
+    info/1,
+    version/1,
+    monitor/1, monitor/2,
+
+    list_images/1, list_images/2,
+    pull_image/2, pull_image/3,
+    inspect_image/2,
+    image_history/2,
+    remove_image/2, remove_image/3,
+
+    list_containers/1, list_containers/2,
+    create_container/2, create_container/3, create_container/4,
+    inspect_container/2, inspect_container/3,
+    container_top/2, container_top/3,
+    container_logs/2,
+    container_changes/2,
+    export_container/2,
+    container_stats/2, container_stats/3,
+    resize_container_tty/4,
+    start_container/2,
+    stop_container/2, stop_container/3,
+    restart_container/2, restart_container/3,
+    kill_container/2, kill_container/3,
+    update_container/3,
+    rename_container/3,
+    pause_container/2,
+    unpause_container/2,
+    wait_container/2,
+    remove_container/2, remove_container/3
+]).
+
+-export([start_link/2, start_link/3]).
+-export([init/1, handle_call/3, handle_cast/2, handle_info/2, terminate/2, code_change/3]).
+
+-record(state, {
+    transport,
+    host,
+    port
+}).
+
+%%%===================================================================
+%%% Docker Remote API
+%%%===================================================================
+info(ServerRef) ->
+    gen_server:call(ServerRef, {get, <<"/info">>}).
+
+version(ServerRef) ->
+    gen_server:call(ServerRef, {get, <<"/version">>}).
+
+monitor(ServerRef) ->
+    gen_server:call(ServerRef, {stream, get, <<"/events">>}).
+monitor(ServerRef, Opts) ->
+    QS = opts_to_qs(Opts),
+    gen_server:call(ServerRef, {stream, get, <<"/events?", QS/binary>>}).
+
+%%%===================================================================
+%%% Docker Remote API - Images
+%%%===================================================================
+
+list_images(ServerRef) ->
+    gen_server:call(ServerRef, {get, <<"/images/json">>}).
+list_images(ServerRef, Opts) ->
+    QS = opts_to_qs(Opts),
+    gen_server:call(ServerRef, {get, <<"/images/json?", QS/binary>>}).
+
+pull_image(ServerRef, Name) ->
+    gen_server:call(ServerRef, {post, <<"/images/create?fromImage=", Name/binary>>}, 600000).
+pull_image(ServerRef, Name, Tag) ->
+    gen_server:call(ServerRef, {post, <<"/images/create?fromImage=", Name/binary, "&tag=", Tag/binary>>}, 600000).
+
+inspect_image(ServerRef, Name) ->
+    gen_server:call(ServerRef, {get, <<"/images/", Name/binary, "/json">>}).
+
+image_history(ServerRef, Name) ->
+    gen_server:call(ServerRef, {get, <<"/images/", Name/binary, "/history">>}).
+
+remove_image(ServerRef, Name) ->
+    remove_image(ServerRef, Name, #{<<"force">> => <<"0">>, <<"noprune">> => <<"0">>}).
+remove_image(ServerRef, Name, Opts) ->
+    QS = opts_to_qs(Opts),
+    gen_server:call(ServerRef, {delete, <<"/images/", Name/binary, "?", QS/binary>>}).
+
+%%%===================================================================
+%%% Docker Remote API - Containers
+%%%===================================================================
+
+list_containers(ServerRef) ->
+    gen_server:call(ServerRef, {get, <<"/containers/json">>}).
+list_containers(ServerRef, Opts) ->
+    QS = opts_to_qs(Opts),
+    gen_server:call(ServerRef, {get, <<"/containers/json?", QS/binary>>}).
+
+create_container(ServerRef, Image) ->
+    create_container(ServerRef, Image, #{}).
+create_container(ServerRef, Image, Opts) when is_map(Opts) ->
+    create_container(ServerRef, Image, undefined, Opts);
+create_container(ServerRef, Image, Name) ->
+    create_container(ServerRef, Image, Name, #{}).
+create_container(ServerRef, Image, undefined, Opts) ->
+    get_value(<<"Id">>, gen_server:call(ServerRef, {post, <<"/containers/create">>, Opts#{image=>Image}}, 60000));
+create_container(ServerRef, Image, Name, Opts) ->
+    get_value(<<"Id">>, gen_server:call(ServerRef, {post, <<"/containers/create?name=", Name/binary>>, Opts#{image=>Image}}, 60000)).
+
+inspect_container(ServerRef, Id) ->
+    gen_server:call(ServerRef, {get, <<"/containers/", Id/binary, "/json">>}).
+inspect_container(ServerRef, Id, Size) ->
+    BinSize = to_binary(Size),
+    gen_server:call(ServerRef, {get, <<"/containers/", Id/binary, "/json?size=", BinSize/binary>>}).
+
+container_top(ServerRef, Id) ->
+    gen_server:call(ServerRef, {get, <<"/containers/", Id/binary, "/top">>}).
+container_top(ServerRef, Id, Args) ->
+    gen_server:call(ServerRef, {get, <<"/containers/", Id/binary, "/top?ps_args=", Args/binary>>}).
+
+container_logs(ServerRef, Id) ->
+    container_logs(ServerRef, Id, #{<<"stdout">> => <<"1">>, <<"stderr">> => <<"1">>, <<"follow">> => <<"1">>}).
+container_logs(ServerRef, Id, Opts) ->
+    QS = opts_to_qs(Opts),
+    gen_server:call(ServerRef, {stream, get, <<"/containers/", Id/binary, "/logs?", QS/binary>>}).
+
+container_changes(ServerRef, Id) ->
+    gen_server:call(ServerRef, {get, <<"/containers/", Id/binary, "/changes">>}).
+
+export_container(ServerRef, Id) ->
+    gen_server:call(ServerRef, {stream, get, <<"/containers/", Id/binary, "/export">>}).
+
+container_stats(ServerRef, Id) ->
+    gen_server:call(ServerRef, {stream, get, <<"/containers/", Id/binary, "/stats">>}).
+container_stats(ServerRef, Id, Stream) ->
+    BinStream = to_binary(Stream),
+    gen_server:call(ServerRef, {stream, get, <<"/containers/", Id/binary, "/stats?stream=", BinStream/binary>>}).
+
+resize_container_tty(ServerRef, Id, Height, Width) ->
+    QS = opts_to_qs(#{<<"h">> => Height, <<"w">> => Width}),
+    gen_server:call(ServerRef, {post, <<"/containers/", Id/binary, "/resize?", QS/binary>>}).
+
+start_container(ServerRef, Id) ->
+    gen_server:call(ServerRef, {post, <<"/containers/", Id/binary, "/start">>}, 60000).
+
+stop_container(ServerRef, Id) ->
+    stop_container(ServerRef, Id, 10).
+stop_container(ServerRef, Id, Time) ->
+    BTime = integer_to_binary(Time),
+    gen_server:call(ServerRef, {post, <<"/containers/", Id/binary, "/stop?t=", BTime/binary>>}, (Time + 5) * 1000).
+
+restart_container(ServerRef, Id) ->
+    stop_container(ServerRef, Id, 10).
+restart_container(ServerRef, Id, Time) ->
+    BTime = integer_to_binary(Time),
+    gen_server:call(ServerRef, {post, <<"/containers/", Id/binary, "/restart?t=", BTime/binary>>}, (Time + 5) * 1000).
+
+kill_container(ServerRef, Id) ->
+    stop_container(ServerRef, Id, <<"SIGKILL">>).
+kill_container(ServerRef, Id, SIGNAL) ->
+    gen_server:call(ServerRef, {post, <<"/containers/", Id/binary, "/kill?signal=", SIGNAL/binary>>}).
+
+update_container(ServerRef, Id, Opts) ->
+    gen_server:call(ServerRef, {post, <<"/containers/", Id/binary, "/update">>, Opts}).
+
+rename_container(ServerRef, Id, Name) ->
+    gen_server:call(ServerRef, {post, <<"/containers/", Id/binary, "/rename?name=", Name/binary>>}).
+
+pause_container(ServerRef, Id) ->
+    gen_server:call(ServerRef, {post, <<"/containers/", Id/binary, "/pause">>}).
+
+unpause_container(ServerRef, Id) ->
+    gen_server:call(ServerRef, {post, <<"/containers/", Id/binary, "/unpause">>}).
+
+wait_container(ServerRef, Id) ->
+    gen_server:call(ServerRef, {post, <<"/containers/", Id/binary, "/wait">>}).
+
+remove_container(ServerRef, Id) ->
+    remove_container(ServerRef, Id, #{<<"v">> => <<"0">>, <<"force">> => <<"0">>}).
+remove_container(ServerRef, Id, Opts) ->
+    QS = opts_to_qs(Opts),
+    gen_server:call(ServerRef, {delete, <<"/containers/", Id/binary, "?", QS/binary>>}).
+
+%%%===================================================================
+%%% API
+%%%===================================================================
+
+start_link(socket, Path) ->
+    gen_server:start_link(?MODULE, [hackney_local_tcp, Path, 0], []);
+start_link(http, URL) ->
+    gen_server:start_link(?MODULE, [hackney_tcp, URL, 80], []);
+start_link(https, URL) ->
+    gen_server:start_link(?MODULE, [hackney_ssl, URL, 443], []).
+
+start_link(socket, Path, _Port) ->
+    gen_server:start_link(?MODULE, [hackney_local_tcp, Path, 0], []);
+start_link(http, URL, Port) ->
+    gen_server:start_link(?MODULE, [hackney_tcp, URL, Port], []);
+start_link(https, URL, Port) ->
+    gen_server:start_link(?MODULE, [hackney_ssl, URL, Port], []).
+
+%%%===================================================================
+%%% gen_server callbacks
+%%%===================================================================
+
+init([Transport, Host, Port]) ->
+    {ok, #state{
+        transport = Transport,
+        host = Host,
+        port = Port
+    }}.
+
+handle_call({stream, Method, Path}, {Caller, _Tag}, State) ->
+    Pid = spawn(fun() -> stream_body(Caller, undefined) end),
+    {reply, stream_request(Pid, connect(State, [async, {stream_to, Pid}]), Method, Path, <<>>), State};
+
+handle_call({stream, Method, Path, Data}, {Caller, _Tag}, State) ->
+    Pid = spawn(fun() -> stream_body(Caller, undefined) end),
+    {reply, stream_request(Pid, connect(State, [async, {stream_to, Pid}]), Method, Path, jsx:encode(Data)), State};
+
+handle_call({Method, Path}, _From, State) ->
+    {reply, send_request(connect(State), Method, Path, <<>>), State};
+
+handle_call({Method, Path, Data}, _From, State) ->
+    {reply, send_request(connect(State), Method, Path, jsx:encode(Data)), State};
+
+handle_call(Request, From, State) ->
+    lager:warning("Unhandled call from ~p: ~p", [From, Request]),
+    {reply, ok, State}.
+
+handle_cast(stop, State) ->
+    {stop, normal, State};
+
+handle_cast(Request, State) ->
+    lager:warning("Unhandled cast: ~p", [Request]),
+    {noreply, State}.
+
+handle_info(Info, State) ->
+    lager:warning("Unhandled info: ~p", [Info]),
+    {noreply, State}.
+
+terminate(_Reason, _State) ->
+    ok.
+
+code_change(_OldVsn, State, _Extra) ->
+    {ok, State}.
+
+%%%===================================================================
+%%% Internal functions
+%%%===================================================================
+connect(#state{transport = Transport, host = Host, port = Port}) ->
+    hackney:connect(Transport, Host, Port).
+
+connect(#state{transport = Transport, host = Host, port = Port}, Opts) ->
+    hackney:connect(Transport, Host, Port, Opts).
+
+get_value(Key, {ok, Map}) when is_map(Map) ->
+    {ok, maps:get(Key, Map)};
+get_value(_, Value) ->
+    Value.
+
+opts_to_qs(Map) ->
+    maps:fold(
+        fun
+            (K, V, <<>>) ->
+                BinK = to_binary(K),
+                BinV = to_binary(V),
+                <<BinK/binary, "=", BinV/binary>>;
+            (K, V, QS) ->
+                BinK = to_binary(K),
+                BinV = to_binary(V),
+                <<QS/binary, "&", BinK/binary, "=", BinV/binary>>
+        end, <<>>, Map).
+
+to_binary(List) when is_list(List) ->
+    list_to_binary(List);
+to_binary(Number) when is_number(Number) ->
+    integer_to_binary(Number);
+to_binary(Atom) when is_atom(Atom) ->
+    atom_to_binary(Atom, utf8);
+to_binary(Binary) ->
+    Binary.
+
+send_request({ok, ConnRef}, Method, Path, ReqBody) ->
+    ReqHeaders = [
+        {<<"Host">>, <<"localhost">>},
+        {<<"Accept">>, <<"application/json">>},
+        {<<"Content-Type">>, <<"application/json">>}
+    ],
+    lager:info("~p ~p", [Method, Path]),
+    try hackney:send_request(ConnRef, {Method, Path, ReqHeaders, ReqBody}) of
+        {ok, 204, _Headers, _ConnRef} ->
+            ok;
+        {ok, Status, Headers, ConnRef} when 200 =< Status, Status < 300 ->
+            parse_body(read_body(infinite, ConnRef, <<>>), Headers);
+        {ok, Status, Headers, ConnRef} when 400 =< Status, Status < 500 ->
+            error_message(read_body(infinite, ConnRef, <<>>));
+        {ok, 500, _Headers, _ConnRef} ->
+            {error, server_error};
+        {ok, Status, _Headers, _ConnRef} ->
+            {error, {unexpected_status, Status}};
+        Error ->
+            Error
+    after
+        hackney:close(ConnRef)
+    end;
+send_request({error, Reason}, _Method, _Path, _ReqBody) ->
+    {error, Reason}.
+
+stream_request(Pid, {ok, ConnRef}, Method, Path, ReqBody) ->
+    ReqHeaders = [
+        {<<"Host">>, <<"localhost">>},
+        {<<"Accept">>, <<"application/json">>},
+        {<<"Content-Type">>, <<"application/json">>}
+    ],
+    case hackney:send_request(ConnRef, {Method, Path, ReqHeaders, ReqBody}) of
+        {ok, ConnRef} ->
+            {ok, Pid};
+        Error ->
+            Pid ! stop,
+            Error
+    end;
+stream_request(_Pid, {error, Reason}, _Method, _Path, _ReqBody) ->
+    {error, Reason}.
+
+
+parse_body({ok, Body}, Headers) ->
+    {ok, decode_response(proplists:get_value(<<"Content-Type">>, Headers), Body)};
+parse_body(Error, _Headers) ->
+    Error.
+
+error_message({ok, Body}) ->
+    #{<<"message">> := Message} = decode_response(<<"application/json">>, Body),
+    {error, Message};
+error_message(Error) ->
+    Error.
+
+decode_response(<<"application/json">>, Body) ->
+    case jsx:decode(Body, [return_maps, return_tail]) of
+        {with_tail, Map, <<>>} ->
+            Map;
+        {with_tail, Map, Tail} ->
+            lists:flatten([Map, decode_response(<<"application/json">>, Tail)])
+    end;
+decode_response(_ContentType, Body) ->
+    Body.
+
+read_body(MaxLength, Ref, Body) when MaxLength =:= infinite; MaxLength > byte_size(Body) ->
+    case hackney:stream_body(Ref) of
+        {ok, Data} ->
+            lager:info("~p", [Data]),
+            read_body(MaxLength, Ref, <<Body/binary, Data/binary>>);
+        done ->
+            {ok, Body};
+        {error, Reason} ->
+            {error, Reason}
+    end;
+read_body(_MaxLength, _Ref, _Acc) ->
+    {error, max_length_exceeded}.
+
+stream_body(Pid, Ref) ->
+    receive
+        {hackney_response, NewRef, {status, Status, _Reason}} when 200 =< Status, Status < 300 ->
+            stream_body(Pid, NewRef);
+        {hackney_response, NewRef, {headers, _Headers}} ->
+            stream_body(Pid, NewRef);
+        {hackney_response, NewRef, done} ->
+            Pid ! done,
+            ok;
+        {hackney_response, NewRef, Chunk} ->
+            Pid ! Chunk,
+            stream_body(Pid, NewRef);
+        stop when Ref /= undefined ->
+            hackney:close(Ref);
+        _ ->
+            ok
+    end.
diff --git a/src/dockerl_app.erl b/src/dockerl_app.erl
deleted file mode 100644
index f6faaf1..0000000
--- a/src/dockerl_app.erl
+++ /dev/null
@@ -1,26 +0,0 @@
-%%%-------------------------------------------------------------------
-%% @doc dockerl public API
-%% @end
-%%%-------------------------------------------------------------------
-
--module(dockerl_app).
-
--behaviour(application).
-
-%% Application callbacks
--export([start/2, stop/1]).
-
-%%====================================================================
-%% API
-%%====================================================================
-
-start(_StartType, _StartArgs) ->
-    dockerl_sup:start_link().
-
-%%--------------------------------------------------------------------
-stop(_State) ->
-    ok.
-
-%%====================================================================
-%% Internal functions
-%%====================================================================
diff --git a/src/dockerl_sup.erl b/src/dockerl_sup.erl
deleted file mode 100644
index 400d346..0000000
--- a/src/dockerl_sup.erl
+++ /dev/null
@@ -1,35 +0,0 @@
-%%%-------------------------------------------------------------------
-%% @doc dockerl top level supervisor.
-%% @end
-%%%-------------------------------------------------------------------
-
--module(dockerl_sup).
-
--behaviour(supervisor).
-
-%% API
--export([start_link/0]).
-
-%% Supervisor callbacks
--export([init/1]).
-
--define(SERVER, ?MODULE).
-
-%%====================================================================
-%% API functions
-%%====================================================================
-
-start_link() ->
-    supervisor:start_link({local, ?SERVER}, ?MODULE, []).
-
-%%====================================================================
-%% Supervisor callbacks
-%%====================================================================
-
-%% Child :: {Id,StartFunc,Restart,Shutdown,Type,Modules}
-init([]) ->
-    {ok, { {one_for_all, 0, 1}, []} }.
-
-%%====================================================================
-%% Internal functions
-%%====================================================================
diff --git a/test/dockerl_test.erl b/test/dockerl_test.erl
new file mode 100644
index 0000000..25665fa
--- /dev/null
+++ b/test/dockerl_test.erl
@@ -0,0 +1,78 @@
+-module(dockerl_test).
+
+-include_lib("eunit/include/eunit.hrl").
+
+
+%%%====================================================================
+%%% Test descriptions
+%%%====================================================================
+rpc_test_() ->
+    {
+        setup,
+        fun start/0,
+        fun stop/1,
+        fun instantiator/1
+    }.
+
+%%%====================================================================
+%%% Setup functions
+%%%====================================================================
+start() ->
+    lager:start(),
+    hackney:start(),
+    {ok, Pid} = dockerl:start_link(socket, <<"/var/run/docker.sock">>),
+    Pid.
+
+stop(Pid) ->
+    MRef = erlang:monitor(process, Pid),
+    gen_server:cast(Pid, stop),
+    receive {'DOWN', MRef, _, _, _} -> ok end.
+
+instantiator(Pid) ->
+    [
+        info(Pid),
+        version(Pid),
+        {timeout, 60, pull_image(Pid)},
+        {timeout, 60, start_container(Pid)},
+        remove_image(Pid)
+    ].
+
+%%%====================================================================
+%%% Tests
+%%%====================================================================
+info(Pid) ->
+    ?_assertMatch({ok, #{}}, dockerl:info(Pid)).
+
+version(Pid) ->
+    ?_assertMatch({ok, #{}}, dockerl:version(Pid)).
+
+pull_image(Pid) ->
+    ?_assertMatch({ok, _}, dockerl:pull_image(Pid, <<"nginx:latest">>)).
+
+remove_image(Pid) ->
+    ?_assertMatch({ok, _}, dockerl:remove_image(Pid, <<"nginx:latest">>)).
+
+start_container(Pid) ->
+    fun() ->
+        {ok, Id} = dockerl:create_container(Pid, <<"foobar">>),
+        ?assertMatch(ok, dockerl:start_container(Pid, Id)),
+        {ok, Stream} = dockerl:container_logs(Pid, Id),
+        loop(Stream),
+        ?assertMatch(ok, dockerl:stop_container(Pid, Id)),
+        ?assertMatch(ok, dockerl:remove_container(Pid, Id))
+    end.
+
+%%%===================================================================
+%%% Internal functions
+%%%===================================================================
+loop(Stream) ->
+    receive
+        done ->
+            ok;
+        Msg ->
+            lager:info("~p", [Msg]),
+            loop(Stream)
+    after
+        3000 ->
+            Stream ! stop
+    end.
-- 
GitLab