%%% Copyright (c) 2017, NORDUnet A/S. %%% See LICENSE for licensing information. -module(merge_util). -export([sendlog/4, sendentries/3, missingentries/1]). -export([request/2, request/4]). -export([readfile/1, nfetched/0]). request(DebugTag, URL) -> request(DebugTag, URL, [], <<>>). request(DebugTag, URL, Headers, RequestBody) -> case plop_httputil:request(DebugTag, URL, Headers, RequestBody) of {error, Err} -> throw({request_error, request, DebugTag, Err}); {failure, {none, StatusCode, none}, _RespHeaders, _Body} -> throw({request_error, failure, DebugTag, StatusCode}); {success, {_, StatusCode, _}, _, Body} when StatusCode == 200 -> case (catch mochijson2:decode(Body)) of {error, Err} -> throw({request_error, decode, DebugTag, Err}); {struct, PropList} -> {proplists:get_value(<<"result">>, PropList), PropList} end end. sendlog(NodeAddress, Start, Hashes, Chunksize) -> sendlog_chunk(NodeAddress, Start, lists:split(min(Chunksize, length(Hashes)), Hashes), Chunksize). sendlog_chunk(_, _, {[], _}, _) -> ok; sendlog_chunk(NodeAddress, Start, {Chunk, Rest}, Chunksize) -> ok = sendlog_request(NodeAddress, Start, Chunk), sendlog_chunk(NodeAddress, Start + length(Chunk), lists:split(min(Chunksize, length(Rest)), Rest), Chunksize). sendlog_request(NodeAddress, Start, Hashes) -> DebugTag = io_lib:format("sendlog ~B:~B", [Start, length(Hashes)]), URL = NodeAddress ++ "sendlog", Headers = [{"Content-Type", "text/json"}], EncodedHashes = [base64:encode(H) || H <- Hashes], RequestBody = list_to_binary(mochijson2:encode({[{"start", Start}, {"hashes", EncodedHashes}]})), case request(DebugTag, URL, Headers, RequestBody) of {<<"ok">>, _} -> ok; Err -> throw({request_error, result, DebugTag, Err}) end. missingentries(NodeAddress) -> DebugTag = "missingentries", URL = NodeAddress ++ "missingentries", case request(DebugTag, URL) of {<<"ok">>, PropList} -> {ok, proplists:get_value(<<"entries">>, PropList)}; Err -> throw({request_error, result, DebugTag, Err}) end. sendentries(NodeAddress, Hashes, Chunksize) -> {ChunkOfHashes, RestOfHashes} = lists:split(min(Chunksize, length(Hashes)), Hashes), sendentries_chunk(NodeAddress, {ChunkOfHashes, RestOfHashes}, Chunksize). sendentries_chunk(_, {[], _}, _) -> ok; sendentries_chunk(NodeAddress, {Chunk, Rest}, Chunksize) -> HashesAndEntries = lists:zip(Chunk, [db:entry_for_leafhash(H) || H <- Chunk]), ok = sendentries_request(NodeAddress, HashesAndEntries), sendentries_chunk(NodeAddress, lists:split(min(Chunksize, length(Rest)), Rest), Chunksize). sendentries_request(NodeAddress, HashesAndEntries) -> DebugTag = io_lib:format("sendentry ~B", [length(HashesAndEntries)]), URL = NodeAddress ++ "sendentry", Headers = [{"Content-Type", "text/json"}], L = mochijson2:encode([[{"entry", base64:encode(E)}, {"treeleafhash", base64:encode(H)}] || {H, E} <- HashesAndEntries]), RequestBody = list_to_binary(L), case request(DebugTag, URL, Headers, RequestBody) of {<<"ok">>, _} -> ok; Err -> throw({request_error, result, DebugTag, Err}) end. readfile(FileInConfig) -> case application:get_env(plop, FileInConfig) of {ok, File} -> case atomic:readfile(File) of noentry -> noentry; Contents -> mochijson2:decode(Contents) end; undefined -> noentry end. nfetched() -> {Index, Hash} = read_fetched(), case Index >= 0 of true -> ok = verify_logorder_entry(Index, binary_to_list(Hash)); false -> ok end, Index + 1. read_fetched() -> case merge_util:readfile(fetched_path) of noentry -> {-1, <<>>}; {struct, PropList} -> {proplists:get_value(<<"index">>, PropList), proplists:get_value(<<"hash">>, PropList)} end. verify_logorder_entry(Index, HashAsString) -> case hex:bin_to_hexstr(index:get(logorder, Index)) of HashAsString -> ok; Mismatch -> lager:error("hash in fetched file ~p doesn't match logorder[~B]=~p", [HashAsString, Index, Mismatch]), mismatch end.