diff --git a/.gitignore b/.gitignore index ced0c5eae8945e02b2fdc8015a268d98715f079c..e44f8a7fa9e805c690c3294078682ee71e68a948 100644 --- a/.gitignore +++ b/.gitignore @@ -9,7 +9,9 @@ _* .erlang.cookie ebin log +var erl_crash.dump .rebar logs _build +_checkouts diff --git a/config/sys.config b/config/sys.config new file mode 100644 index 0000000000000000000000000000000000000000..772ce429ae897cc441d1bec6477094bca9f9aaed --- /dev/null +++ b/config/sys.config @@ -0,0 +1,32 @@ +[ + {kernel, [ + {start_timer, true}, + {inet_default_listen_options, [{nodelay, true}, {sndbuf, 32768}, {recbuf, 32768}]}, + {inet_default_connect_options, [{delay_send, true}]} + ]}, + {sasl, [ + {sasl_error_logger, {file, "log/sasl-error.log"}}, + {errlog_type, error}, + {error_logger_mf_dir, "log/sasl"}, + {error_logger_mf_maxbytes, 10485760}, + {error_logger_mf_maxfiles, 5}, + {utc_log, true} + ]}, + {lager, [ + {colored, true}, + {handlers, [ + {lager_console_backend, [info, {lager_default_formatter, [color, time, " (", {module, "?"}, ".erl:", {line, "?"}, ") ", pid, " [", severity, "] ", message, "\n"]}]}, + {lager_file_backend, [{file, "log/error.log"}, {level, error}, {size, 10485760}, {date, "$D0"}, {count, 5}]}, + {lager_file_backend, [{file, "log/console.log"}, {level, info}, {size, 10485760}, {date, "$D0"}, {count, 5}]} + ]}, + {crash_log, "log/crash.log"}, + {crash_log_msg_size, 65536}, + {crash_log_size, 10485760}, + {crash_log_date, "$D0"}, + {crash_log_count, 5}, + {error_logger_redirect, true} + ]}, + {dockerl, [ + {docker_host, "localhost"} + ]} +]. diff --git a/rebar.config b/rebar.config index f618f3e4089e1a3c9f585596dac27187393af8d4..e05acfa2d43a7bdd3f708a9dc8e3df1f1125bf9d 100644 --- a/rebar.config +++ b/rebar.config @@ -1,2 +1,61 @@ -{erl_opts, [debug_info]}. -{deps, []}. \ No newline at end of file +{global_rebar_dir, ".rebar3"}. + +{plugins, [ + rebar_alias, + {rebar_cmd, "0.2.3"} +]}. + +{erl_opts, [{parse_transform, lager_transform}]}. + +{eunit_opts, [{report, {eunit_surefire, [{dir, "_build/test"}]}}]}. + +{edoc_opts, [{dir, "_build/edoc"}]}. + +{deps, [ + {lager, "3.2.1"}, + {jsx, "2.8.0"}, + {hackney, {git, "https://github.com/benoitc/hackney.git", {branch, "master"}}} +]}. + +{profiles, [ + {test, [ + {eunit_opts, [{report, {eunit_surefire, [{dir, "_build/test"}]}}]}, + {erl_opts, [debug_info, nowarn_unused_vars]} + ]}, + {prod, [ + {erl_opts, [no_debug_info, warnings_as_errors]} + ]} +]}. + +{dist_node, [ + {setcookie, 'cookie'}, + {sname, 'dockerl@localhost'} +]}. + +{shell, [ + {config, "config/sys.config"} +]}. + +{dialyzer, [ + {base_plt_location, global} +]}. + +{alias, [ + {test, [ + {eunit, "--cover --application=dockerl --dir=test"}, + {cover, "--verbose"} + ]}, + {analyze, [ + dialyzer, + xref + ]}, + {cleanup, [ + clean, + {cmd, "distclean"} + ]} +]}. + +{commands, [ + {distclean, "rm -rf .rebar3 _build _checkouts/*/ebin ebin log"}, + {sync, "git fetch upstream && git merge upstream/master"} +]}. diff --git a/rebar.lock b/rebar.lock new file mode 100644 index 0000000000000000000000000000000000000000..5d3f879729e8fa8162f40fff736e6444f8b3dc75 --- /dev/null +++ b/rebar.lock @@ -0,0 +1,12 @@ +[{<<"certifi">>,{pkg,<<"certifi">>,<<"0.6.0">>},1}, + {<<"goldrush">>,{pkg,<<"goldrush">>,<<"0.1.8">>},1}, + {<<"hackney">>, + {git,"https://github.com/benoitc/hackney.git", + {ref,"533254f1bc615550aa6a88e30d2a1785e5e3a93d"}}, + 0}, + {<<"idna">>,{pkg,<<"idna">>,<<"1.2.0">>},1}, + {<<"jsx">>,{pkg,<<"jsx">>,<<"2.8.0">>},0}, + {<<"lager">>,{pkg,<<"lager">>,<<"3.2.1">>},0}, + {<<"metrics">>,{pkg,<<"metrics">>,<<"1.0.1">>},1}, + {<<"mimerl">>,{pkg,<<"mimerl">>,<<"1.0.2">>},1}, + {<<"ssl_verify_fun">>,{pkg,<<"ssl_verify_fun">>,<<"1.1.1">>},1}]. diff --git a/src/dockerl.app.src b/src/dockerl.app.src index 00d50e62c4ef53f40f4b0447e2cded0593372fd2..bcfa7011693e426dce5a49065f4beb634471bc09 100644 --- a/src/dockerl.app.src +++ b/src/dockerl.app.src @@ -1,16 +1,17 @@ -{application, dockerl, - [{description, "An OTP application"}, - {vsn, "0.1.0"}, - {registered, []}, - {mod, { dockerl_app, []}}, - {applications, - [kernel, - stdlib - ]}, - {env,[]}, - {modules, []}, - - {maintainers, []}, - {licenses, []}, - {links, []} - ]}. +{application, dockerl, [ + {description, "Erlang Docker Remote API Client"}, + {vsn, "0.1.0"}, + {registered, []}, + {applications, [ + kernel, + stdlib, + sasl, + lager, + hackney + ]}, + {env, []}, + {modules, []}, + {maintainers, []}, + {licenses, []}, + {links, [{"GitLab", "https://gitlab.hedenstroem.com/erlang-ninja/dockerl"}]} +]}. diff --git a/src/dockerl.erl b/src/dockerl.erl new file mode 100644 index 0000000000000000000000000000000000000000..07f5acb4bbfc0e78ddc2e03a2f2f5b7aa724e96c --- /dev/null +++ b/src/dockerl.erl @@ -0,0 +1,381 @@ +%%%------------------------------------------------------------------- +%% @doc dockerl public API +%% @end +%%%------------------------------------------------------------------- + +-module(dockerl). + +-behaviour(gen_server). + +-export([ + info/1, + version/1, + monitor/1, monitor/2, + + list_images/1, list_images/2, + pull_image/2, pull_image/3, + inspect_image/2, + image_history/2, + remove_image/2, remove_image/3, + + list_containers/1, list_containers/2, + create_container/2, create_container/3, create_container/4, + inspect_container/2, inspect_container/3, + container_top/2, container_top/3, + container_logs/2, + container_changes/2, + export_container/2, + container_stats/2, container_stats/3, + resize_container_tty/4, + start_container/2, + stop_container/2, stop_container/3, + restart_container/2, restart_container/3, + kill_container/2, kill_container/3, + update_container/3, + rename_container/3, + pause_container/2, + unpause_container/2, + wait_container/2, + remove_container/2, remove_container/3 +]). + +-export([start_link/2, start_link/3]). +-export([init/1, handle_call/3, handle_cast/2, handle_info/2, terminate/2, code_change/3]). + +-record(state, { + transport, + host, + port +}). + +%%%=================================================================== +%%% Docker Remote API +%%%=================================================================== +info(ServerRef) -> + gen_server:call(ServerRef, {get, <<"/info">>}). + +version(ServerRef) -> + gen_server:call(ServerRef, {get, <<"/version">>}). + +monitor(ServerRef) -> + gen_server:call(ServerRef, {stream, get, <<"/events">>}). +monitor(ServerRef, Opts) -> + QS = opts_to_qs(Opts), + gen_server:call(ServerRef, {stream, get, <<"/events?", QS/binary>>}). + +%%%=================================================================== +%%% Docker Remote API - Images +%%%=================================================================== + +list_images(ServerRef) -> + gen_server:call(ServerRef, {get, <<"/images/json">>}). +list_images(ServerRef, Opts) -> + QS = opts_to_qs(Opts), + gen_server:call(ServerRef, {get, <<"/images/json?", QS/binary>>}). + +pull_image(ServerRef, Name) -> + gen_server:call(ServerRef, {post, <<"/images/create?fromImage=", Name/binary>>}, 600000). +pull_image(ServerRef, Name, Tag) -> + gen_server:call(ServerRef, {post, <<"/images/create?fromImage=", Name/binary, "&tag=", Tag/binary>>}, 600000). + +inspect_image(ServerRef, Name) -> + gen_server:call(ServerRef, {get, <<"/images/", Name/binary, "/json">>}). + +image_history(ServerRef, Name) -> + gen_server:call(ServerRef, {get, <<"/images/", Name/binary, "/history">>}). + +remove_image(ServerRef, Name) -> + remove_image(ServerRef, Name, #{<<"force">> => <<"0">>, <<"noprune">> => <<"0">>}). +remove_image(ServerRef, Name, Opts) -> + QS = opts_to_qs(Opts), + gen_server:call(ServerRef, {delete, <<"/images/", Name/binary, "?", QS/binary>>}). + +%%%=================================================================== +%%% Docker Remote API - Containers +%%%=================================================================== + +list_containers(ServerRef) -> + gen_server:call(ServerRef, {get, <<"/containers/json">>}). +list_containers(ServerRef, Opts) -> + QS = opts_to_qs(Opts), + gen_server:call(ServerRef, {get, <<"/containers/json?", QS/binary>>}). + +create_container(ServerRef, Image) -> + create_container(ServerRef, Image, #{}). +create_container(ServerRef, Image, Opts) when is_map(Opts) -> + create_container(ServerRef, Image, undefined, Opts); +create_container(ServerRef, Image, Name) -> + create_container(ServerRef, Image, Name, #{}). +create_container(ServerRef, Image, undefined, Opts) -> + get_value(<<"Id">>, gen_server:call(ServerRef, {post, <<"/containers/create">>, Opts#{image=>Image}}, 60000)); +create_container(ServerRef, Image, Name, Opts) -> + get_value(<<"Id">>, gen_server:call(ServerRef, {post, <<"/containers/create?name=", Name/binary>>, Opts#{image=>Image}}, 60000)). + +inspect_container(ServerRef, Id) -> + gen_server:call(ServerRef, {get, <<"/containers/", Id/binary, "/json">>}). +inspect_container(ServerRef, Id, Size) -> + BinSize = to_binary(Size), + gen_server:call(ServerRef, {get, <<"/containers/", Id/binary, "/json?size=", BinSize/binary>>}). + +container_top(ServerRef, Id) -> + gen_server:call(ServerRef, {get, <<"/containers/", Id/binary, "/top">>}). +container_top(ServerRef, Id, Args) -> + gen_server:call(ServerRef, {get, <<"/containers/", Id/binary, "/top?ps_args=", Args/binary>>}). + +container_logs(ServerRef, Id) -> + container_logs(ServerRef, Id, #{<<"stdout">> => <<"1">>, <<"stderr">> => <<"1">>, <<"follow">> => <<"1">>}). +container_logs(ServerRef, Id, Opts) -> + QS = opts_to_qs(Opts), + gen_server:call(ServerRef, {stream, get, <<"/containers/", Id/binary, "/logs?", QS/binary>>}). + +container_changes(ServerRef, Id) -> + gen_server:call(ServerRef, {get, <<"/containers/", Id/binary, "/changes">>}). + +export_container(ServerRef, Id) -> + gen_server:call(ServerRef, {stream, get, <<"/containers/", Id/binary, "/export">>}). + +container_stats(ServerRef, Id) -> + gen_server:call(ServerRef, {stream, get, <<"/containers/", Id/binary, "/stats">>}). +container_stats(ServerRef, Id, Stream) -> + BinStream = to_binary(Stream), + gen_server:call(ServerRef, {stream, get, <<"/containers/", Id/binary, "/stats?stream=", BinStream/binary>>}). + +resize_container_tty(ServerRef, Id, Height, Width) -> + QS = opts_to_qs(#{<<"h">> => Height, <<"w">> => Width}), + gen_server:call(ServerRef, {post, <<"/containers/", Id/binary, "/resize?", QS/binary>>}). + +start_container(ServerRef, Id) -> + gen_server:call(ServerRef, {post, <<"/containers/", Id/binary, "/start">>}, 60000). + +stop_container(ServerRef, Id) -> + stop_container(ServerRef, Id, 10). +stop_container(ServerRef, Id, Time) -> + BTime = integer_to_binary(Time), + gen_server:call(ServerRef, {post, <<"/containers/", Id/binary, "/stop?t=", BTime/binary>>}, (Time + 5) * 1000). + +restart_container(ServerRef, Id) -> + stop_container(ServerRef, Id, 10). +restart_container(ServerRef, Id, Time) -> + BTime = integer_to_binary(Time), + gen_server:call(ServerRef, {post, <<"/containers/", Id/binary, "/restart?t=", BTime/binary>>}, (Time + 5) * 1000). + +kill_container(ServerRef, Id) -> + stop_container(ServerRef, Id, <<"SIGKILL">>). +kill_container(ServerRef, Id, SIGNAL) -> + gen_server:call(ServerRef, {post, <<"/containers/", Id/binary, "/kill?signal=", SIGNAL/binary>>}). + +update_container(ServerRef, Id, Opts) -> + gen_server:call(ServerRef, {post, <<"/containers/", Id/binary, "/update">>, Opts}). + +rename_container(ServerRef, Id, Name) -> + gen_server:call(ServerRef, {post, <<"/containers/", Id/binary, "/rename?name=", Name/binary>>}). + +pause_container(ServerRef, Id) -> + gen_server:call(ServerRef, {post, <<"/containers/", Id/binary, "/pause">>}). + +unpause_container(ServerRef, Id) -> + gen_server:call(ServerRef, {post, <<"/containers/", Id/binary, "/unpause">>}). + +wait_container(ServerRef, Id) -> + gen_server:call(ServerRef, {post, <<"/containers/", Id/binary, "/wait">>}). + +remove_container(ServerRef, Id) -> + remove_container(ServerRef, Id, #{<<"v">> => <<"0">>, <<"force">> => <<"0">>}). +remove_container(ServerRef, Id, Opts) -> + QS = opts_to_qs(Opts), + gen_server:call(ServerRef, {delete, <<"/containers/", Id/binary, "?", QS/binary>>}). + +%%%=================================================================== +%%% API +%%%=================================================================== + +start_link(socket, Path) -> + gen_server:start_link(?MODULE, [hackney_local_tcp, Path, 0], []); +start_link(http, URL) -> + gen_server:start_link(?MODULE, [hackney_tcp, URL, 80], []); +start_link(https, URL) -> + gen_server:start_link(?MODULE, [hackney_ssl, URL, 443], []). + +start_link(socket, Path, _Port) -> + gen_server:start_link(?MODULE, [hackney_local_tcp, Path, 0], []); +start_link(http, URL, Port) -> + gen_server:start_link(?MODULE, [hackney_tcp, URL, Port], []); +start_link(https, URL, Port) -> + gen_server:start_link(?MODULE, [hackney_ssl, URL, Port], []). + +%%%=================================================================== +%%% gen_server callbacks +%%%=================================================================== + +init([Transport, Host, Port]) -> + {ok, #state{ + transport = Transport, + host = Host, + port = Port + }}. + +handle_call({stream, Method, Path}, {Caller, _Tag}, State) -> + Pid = spawn(fun() -> stream_body(Caller, undefined) end), + {reply, stream_request(Pid, connect(State, [async, {stream_to, Pid}]), Method, Path, <<>>), State}; + +handle_call({stream, Method, Path, Data}, {Caller, _Tag}, State) -> + Pid = spawn(fun() -> stream_body(Caller, undefined) end), + {reply, stream_request(Pid, connect(State, [async, {stream_to, Pid}]), Method, Path, jsx:encode(Data)), State}; + +handle_call({Method, Path}, _From, State) -> + {reply, send_request(connect(State), Method, Path, <<>>), State}; + +handle_call({Method, Path, Data}, _From, State) -> + {reply, send_request(connect(State), Method, Path, jsx:encode(Data)), State}; + +handle_call(Request, From, State) -> + lager:warning("Unhandled call from ~p: ~p", [From, Request]), + {reply, ok, State}. + +handle_cast(stop, State) -> + {stop, normal, State}; + +handle_cast(Request, State) -> + lager:warning("Unhandled cast: ~p", [Request]), + {noreply, State}. + +handle_info(Info, State) -> + lager:warning("Unhandled info: ~p", [Info]), + {noreply, State}. + +terminate(_Reason, _State) -> + ok. + +code_change(_OldVsn, State, _Extra) -> + {ok, State}. + +%%%=================================================================== +%%% Internal functions +%%%=================================================================== +connect(#state{transport = Transport, host = Host, port = Port}) -> + hackney:connect(Transport, Host, Port). + +connect(#state{transport = Transport, host = Host, port = Port}, Opts) -> + hackney:connect(Transport, Host, Port, Opts). + +get_value(Key, {ok, Map}) when is_map(Map) -> + {ok, maps:get(Key, Map)}; +get_value(_, Value) -> + Value. + +opts_to_qs(Map) -> + maps:fold( + fun + (K, V, <<>>) -> + BinK = to_binary(K), + BinV = to_binary(V), + <<BinK/binary, "=", BinV/binary>>; + (K, V, QS) -> + BinK = to_binary(K), + BinV = to_binary(V), + <<QS/binary, "&", BinK/binary, "=", BinV/binary>> + end, <<>>, Map). + +to_binary(List) when is_list(List) -> + list_to_binary(List); +to_binary(Number) when is_number(Number) -> + integer_to_binary(Number); +to_binary(Atom) when is_atom(Atom) -> + atom_to_binary(Atom, utf8); +to_binary(Binary) -> + Binary. + +send_request({ok, ConnRef}, Method, Path, ReqBody) -> + ReqHeaders = [ + {<<"Host">>, <<"localhost">>}, + {<<"Accept">>, <<"application/json">>}, + {<<"Content-Type">>, <<"application/json">>} + ], + lager:info("~p ~p", [Method, Path]), + try hackney:send_request(ConnRef, {Method, Path, ReqHeaders, ReqBody}) of + {ok, 204, _Headers, _ConnRef} -> + ok; + {ok, Status, Headers, ConnRef} when 200 =< Status, Status < 300 -> + parse_body(read_body(infinite, ConnRef, <<>>), Headers); + {ok, Status, Headers, ConnRef} when 400 =< Status, Status < 500 -> + error_message(read_body(infinite, ConnRef, <<>>)); + {ok, 500, _Headers, _ConnRef} -> + {error, server_error}; + {ok, Status, _Headers, _ConnRef} -> + {error, {unexpected_status, Status}}; + Error -> + Error + after + hackney:close(ConnRef) + end; +send_request({error, Reason}, _Method, _Path, _ReqBody) -> + {error, Reason}. + +stream_request(Pid, {ok, ConnRef}, Method, Path, ReqBody) -> + ReqHeaders = [ + {<<"Host">>, <<"localhost">>}, + {<<"Accept">>, <<"application/json">>}, + {<<"Content-Type">>, <<"application/json">>} + ], + case hackney:send_request(ConnRef, {Method, Path, ReqHeaders, ReqBody}) of + {ok, ConnRef} -> + {ok, Pid}; + Error -> + Pid ! stop, + Error + end; +stream_request(_Pid, {error, Reason}, _Method, _Path, _ReqBody) -> + {error, Reason}. + + +parse_body({ok, Body}, Headers) -> + {ok, decode_response(proplists:get_value(<<"Content-Type">>, Headers), Body)}; +parse_body(Error, _Headers) -> + Error. + +error_message({ok, Body}) -> + #{<<"message">> := Message} = decode_response(<<"application/json">>, Body), + {error, Message}; +error_message(Error) -> + Error. + +decode_response(<<"application/json">>, Body) -> + case jsx:decode(Body, [return_maps, return_tail]) of + {with_tail, Map, <<>>} -> + Map; + {with_tail, Map, Tail} -> + lists:flatten([Map, decode_response(<<"application/json">>, Tail)]) + end; +decode_response(_ContentType, Body) -> + Body. + +read_body(MaxLength, Ref, Body) when MaxLength =:= infinite; MaxLength > byte_size(Body) -> + case hackney:stream_body(Ref) of + {ok, Data} -> + lager:info("~p", [Data]), + read_body(MaxLength, Ref, <<Body/binary, Data/binary>>); + done -> + {ok, Body}; + {error, Reason} -> + {error, Reason} + end; +read_body(_MaxLength, _Ref, _Acc) -> + {error, max_length_exceeded}. + +stream_body(Pid, Ref) -> + receive + {hackney_response, NewRef, {status, Status, _Reason}} when 200 =< Status, Status < 300 -> + stream_body(Pid, NewRef); + {hackney_response, NewRef, {headers, _Headers}} -> + stream_body(Pid, NewRef); + {hackney_response, NewRef, done} -> + Pid ! done, + ok; + {hackney_response, NewRef, Chunk} -> + Pid ! Chunk, + stream_body(Pid, NewRef); + stop when Ref /= undefined -> + hackney:close(Ref); + _ -> + ok + end. diff --git a/src/dockerl_app.erl b/src/dockerl_app.erl deleted file mode 100644 index f6faaf1645355ffb4be2c169489f617c9defee9a..0000000000000000000000000000000000000000 --- a/src/dockerl_app.erl +++ /dev/null @@ -1,26 +0,0 @@ -%%%------------------------------------------------------------------- -%% @doc dockerl public API -%% @end -%%%------------------------------------------------------------------- - --module(dockerl_app). - --behaviour(application). - -%% Application callbacks --export([start/2, stop/1]). - -%%==================================================================== -%% API -%%==================================================================== - -start(_StartType, _StartArgs) -> - dockerl_sup:start_link(). - -%%-------------------------------------------------------------------- -stop(_State) -> - ok. - -%%==================================================================== -%% Internal functions -%%==================================================================== diff --git a/src/dockerl_sup.erl b/src/dockerl_sup.erl deleted file mode 100644 index 400d346363953979a9e0208f6b233caac6c90030..0000000000000000000000000000000000000000 --- a/src/dockerl_sup.erl +++ /dev/null @@ -1,35 +0,0 @@ -%%%------------------------------------------------------------------- -%% @doc dockerl top level supervisor. -%% @end -%%%------------------------------------------------------------------- - --module(dockerl_sup). - --behaviour(supervisor). - -%% API --export([start_link/0]). - -%% Supervisor callbacks --export([init/1]). - --define(SERVER, ?MODULE). - -%%==================================================================== -%% API functions -%%==================================================================== - -start_link() -> - supervisor:start_link({local, ?SERVER}, ?MODULE, []). - -%%==================================================================== -%% Supervisor callbacks -%%==================================================================== - -%% Child :: {Id,StartFunc,Restart,Shutdown,Type,Modules} -init([]) -> - {ok, { {one_for_all, 0, 1}, []} }. - -%%==================================================================== -%% Internal functions -%%==================================================================== diff --git a/test/dockerl_test.erl b/test/dockerl_test.erl new file mode 100644 index 0000000000000000000000000000000000000000..25665fa10a3b6596fd91bad4b10d9e7a06b93dcd --- /dev/null +++ b/test/dockerl_test.erl @@ -0,0 +1,78 @@ +-module(dockerl_test). + +-include_lib("eunit/include/eunit.hrl"). + + +%%%==================================================================== +%%% Test descriptions +%%%==================================================================== +rpc_test_() -> + { + setup, + fun start/0, + fun stop/1, + fun instantiator/1 + }. + +%%%==================================================================== +%%% Setup functions +%%%==================================================================== +start() -> + lager:start(), + hackney:start(), + {ok, Pid} = dockerl:start_link(socket, <<"/var/run/docker.sock">>), + Pid. + +stop(Pid) -> + MRef = erlang:monitor(process, Pid), + gen_server:cast(Pid, stop), + receive {'DOWN', MRef, _, _, _} -> ok end. + +instantiator(Pid) -> + [ + info(Pid), + version(Pid), + {timeout, 60, pull_image(Pid)}, + {timeout, 60, start_container(Pid)}, + remove_image(Pid) + ]. + +%%%==================================================================== +%%% Tests +%%%==================================================================== +info(Pid) -> + ?_assertMatch({ok, #{}}, dockerl:info(Pid)). + +version(Pid) -> + ?_assertMatch({ok, #{}}, dockerl:version(Pid)). + +pull_image(Pid) -> + ?_assertMatch({ok, _}, dockerl:pull_image(Pid, <<"nginx:latest">>)). + +remove_image(Pid) -> + ?_assertMatch({ok, _}, dockerl:remove_image(Pid, <<"nginx:latest">>)). + +start_container(Pid) -> + fun() -> + {ok, Id} = dockerl:create_container(Pid, <<"foobar">>), + ?assertMatch(ok, dockerl:start_container(Pid, Id)), + {ok, Stream} = dockerl:container_logs(Pid, Id), + loop(Stream), + ?assertMatch(ok, dockerl:stop_container(Pid, Id)), + ?assertMatch(ok, dockerl:remove_container(Pid, Id)) + end. + +%%%=================================================================== +%%% Internal functions +%%%=================================================================== +loop(Stream) -> + receive + done -> + ok; + Msg -> + lager:info("~p", [Msg]), + loop(Stream) + after + 3000 -> + Stream ! stop + end.