%%% Copyright (c) 2014-2015, NORDUnet A/S. %%% See LICENSE for licensing information. %%% @doc Frontend node API -module(frontend). %% API (URL) -export([request/4]). -define(APPURL_PLOP_FRONTEND, "plop/v1/frontend"). -define(APPURL_PLOP_MERGE, "plop/v1/merge"). request(post, ?APPURL_PLOP_FRONTEND, "sendentry", Input) -> case (catch mochijson2:decode(Input)) of {error, E} -> html("sendentry: bad input:", E); {struct, PropList} -> LogEntry = base64:decode(proplists:get_value(<<"entry">>, PropList)), TreeLeafHash = base64:decode(proplists:get_value(<<"treeleafhash">>, PropList)), ok = db:add_entry_sync(TreeLeafHash, LogEntry), success({[{result, <<"ok">>}]}) end; request(post, ?APPURL_PLOP_FRONTEND, "sendlog", Input) -> case (catch mochijson2:decode(Input)) of {error, E} -> html("sendentry: bad input:", E); {struct, PropList} -> Start = proplists:get_value(<<"start">>, PropList), Hashes = lists:map(fun (S) -> base64:decode(S) end, proplists:get_value(<<"hashes">>, PropList)), write_or_verify_index(Start, Hashes) end; request(post, ?APPURL_PLOP_FRONTEND, "sendsth", Input) -> case (catch mochijson2:decode(Input)) of {error, E} -> html("sendentry: bad input:", E); {struct, PropList} -> OldSize = db:size(), Treesize = proplists:get_value(<<"tree_size">>, PropList), Timestamp = proplists:get_value(<<"timestamp">>, PropList), RootHash = base64:decode(proplists:get_value(<<"sha256_root_hash">>, PropList)), Signature = base64:decode(proplists:get_value(<<"tree_head_signature">>, PropList)), Indexsize = db:indexsize(), if Treesize < OldSize -> html("Size is older than current size", OldSize); Treesize == 0, OldSize == 0 -> lager:debug("both old and new size is 0, saving sth"), verify_and_save_sth(Treesize, Timestamp, RootHash, Signature, PropList); Treesize > Indexsize -> html("Has too few entries", Indexsize); true -> lager:debug("old size: ~p new size: ~p", [OldSize, Treesize]), Errors = check_entries(Treesize), case Errors of [] -> ht:load_tree(Treesize - 1), verify_and_save_sth(Treesize, Timestamp, RootHash, Signature, PropList); _ -> html("Database not complete", Errors) end end end; request(get, ?APPURL_PLOP_FRONTEND, "currentposition", _Query) -> Size = db:size(), success({[{result, <<"ok">>}, {position, Size}]}); request(get, ?APPURL_PLOP_FRONTEND, "missingentries", _Query) -> Size = db:size(), Missing = fetchmissingentries(Size, 100000), lager:debug("missingentries: ~p", [Missing]), success({[{result, <<"ok">>}, {entries, lists:map(fun (Entry) -> base64:encode(Entry) end, Missing)}]}); request(post, ?APPURL_PLOP_MERGE, "sendentry", Input) -> case (catch mochijson2:decode(Input)) of {error, E} -> html("sendentry: bad input:", E); Entries when is_list(Entries) -> lists:map(fun ({struct, PropList}) -> LogEntry = base64:decode(proplists:get_value(<<"entry">>, PropList)), TreeLeafHash = base64:decode(proplists:get_value(<<"treeleafhash">>, PropList)), ok = db:add_entry_nosync(TreeLeafHash, LogEntry) end, Entries), ok = db:sync_entry_db(), success({[{result, <<"ok">>}]}); {struct, PropList} -> LogEntry = base64:decode(proplists:get_value(<<"entry">>, PropList)), TreeLeafHash = base64:decode(proplists:get_value(<<"treeleafhash">>, PropList)), ok = db:add_entry_sync(TreeLeafHash, LogEntry), success({[{result, <<"ok">>}]}) end; request(post, ?APPURL_PLOP_MERGE, "sendlog", Input) -> case (catch mochijson2:decode(Input)) of {error, E} -> html("sendentry: bad input:", E); {struct, PropList} -> Start = proplists:get_value(<<"start">>, PropList), Hashes = lists:map(fun (S) -> base64:decode(S) end, proplists:get_value(<<"hashes">>, PropList)), write_or_verify_index(Start, Hashes) end; request(post, ?APPURL_PLOP_MERGE, "verifyroot", Input) -> case (catch mochijson2:decode(Input)) of {error, E} -> html("sendentry: bad input:", E); {struct, PropList} -> OldSize = db:verifiedsize(), Treesize = proplists:get_value(<<"tree_size">>, PropList), Indexsize = db:indexsize(), if Treesize > Indexsize -> html("Has too few entries", Indexsize); true -> NewEntries = get_new_entries(OldSize, Treesize), lager:debug("old size: ~p new size: ~p entries: ~p", [OldSize, Treesize, NewEntries]), Errors = check_entries_noreverse(NewEntries, OldSize, Treesize - 1), case Errors of [] -> ht:load_tree(Treesize - 1), RootHash = ht:root(Treesize - 1), success({[{result, <<"ok">>}, {root_hash, base64:encode(RootHash)}]}); _ -> html("Database not complete", Errors) end end end; request(get, ?APPURL_PLOP_MERGE, "verifiedsize", _Query) -> Size = db:verifiedsize(), success({[{result, <<"ok">>}, {size, Size}]}); request(post, ?APPURL_PLOP_MERGE, "setverifiedsize", Input) -> case (catch mochijson2:decode(Input)) of {error, E} -> html("setverifiedsize: bad input:", E); {struct, PropList} -> OldSize = db:verifiedsize(), Treesize = proplists:get_value(<<"size">>, PropList), Indexsize = db:indexsize(), if Treesize > Indexsize -> html("Has too few entries", Indexsize); OldSize > Treesize -> success({[{result, <<"ok">>}]}); true -> db:set_verifiedsize(Treesize), success({[{result, <<"ok">>}]}) end end; request(get, ?APPURL_PLOP_MERGE, "missingentries", _Query) -> Size = db:verifiedsize(), Missing = fetchmissingentries(Size, 100000), lager:debug("missingentries: ~p", [Missing]), success({[{result, <<"ok">>}, {entries, lists:map(fun (Entry) -> base64:encode(Entry) end, Missing)}]}). verify_and_save_sth(Treesize, Timestamp, RootHash, Signature, PropList) -> OwnRootHash = ht:root(Treesize - 1), case {plop:verify_sth(Treesize, Timestamp, RootHash, Signature), OwnRootHash} of {true, RootHash} -> ok = plop:save_sth({struct, PropList}), success({[{result, <<"ok">>}]}); {false, RootHash} -> html("Verification failed", hex:bin_to_hexstr(RootHash)); _ -> html("Root hash not the same", hex:bin_to_hexstr(OwnRootHash)) end. get_new_entries(OldSize, Treesize) when OldSize < Treesize -> db:leafhash_for_indices(OldSize, Treesize - 1); get_new_entries(OldSize, Treesize) when OldSize == Treesize -> []. check_entries(Treesize) -> End = Treesize - 1, Start = db:sendsth_verified() + 1, lager:debug("Top level checking entries ~p-~p", [Start, End]), Errors = check_entries_chunked(Start, End), db:commit_entryhash(), Errors. check_entries_chunked(Start, End) -> lager:debug("Checking entries ~p-~p", [Start, End]), Chunksize = application:get_env(plop, check_entries_chunksize, 1000), PartialEnd = min(Start + Chunksize - 1, End), case check_entries_onechunk(Start, PartialEnd) of [] when PartialEnd == End -> []; [] -> check_entries_chunked(PartialEnd + 1, End); Errors -> Errors end. check_entries_onechunk(Start, End) -> Entries = get_new_entries(Start, End + 1), lager:debug("Checking chunk ~p-~p: ~p", [Start, End, Entries]), lists:foreach(fun ({Hash, Index}) -> ok = db:indexforhash_nosync(Hash, Index) end, lists:zip(Entries, lists:seq(Start, End))), case check_entries_int(Entries, Start, End) of [] -> ok = db:indexforhash_dosync(), case Entries of [] -> none; Entries -> db:set_sendsth_verified(End, lists:nth(End - Start + 1, Entries)) end, []; Errors -> Errors end. check_entries_int(Entries, Start, End) -> lists:foldl(fun ({Hash, Index}, Acc) -> case check_entry(Hash, Index) of ok -> Acc; Error -> [Error | Acc] end end, [], lists:zip(Entries, lists:seq(Start, End))). check_entries_noreverse(Entries, Start, End) -> ParallelTasks = application:get_env(plop, check_entries_parallel_tasks, 1), Results = util:parallel_map(fun ({Hash, Index}) -> check_entry_noreverse(Hash, Index) end, lists:zip(Entries, lists:seq(Start, End)), ParallelTasks), lists:foldl(fun (Result, Acc) -> case Result of ok -> Acc; Error -> [Error | Acc] end end, [], Results). entryhash_from_entry(Entry) -> {ok, {Module, Function}} = application:get_env(plop, entryhash_from_entry), Module:Function(Entry). verify_entry(Entry) -> {ok, {Module, Function}} = application:get_env(plop, verify_entry), Module:Function(Entry). check_entry(LeafHash, Index) -> case plop:get_by_leaf_hash(LeafHash) of notfound -> {notfound, Index}; {Index, LeafHash, Entry} -> case verify_entry(Entry) of {ok, LeafHash} -> EntryHash = entryhash_from_entry(Entry), db:add_entryhash(LeafHash, EntryHash), % Returns ok|differ... ok; % ... both are OK. {ok, DifferentLeafHash} -> lager:error("leaf hash not correct: filename is ~p " ++ "and contents are ~p", [hex:bin_to_hexstr(LeafHash), hex:bin_to_hexstr(DifferentLeafHash)]), {error, differentleafhash}; {error, Reason} -> lager:error("verification failed: ~p", [Reason]), {error, verificationfailed} end end. check_entry_noreverse(LeafHash, Index) -> case plop:entry_for_leafhash(LeafHash) of notfound -> {notfound, Index}; Entry -> case verify_entry(Entry) of {ok, LeafHash} -> ok; {ok, DifferentLeafHash} -> lager:error("leaf hash not correct: filename is ~p " ++ "and contents are ~p", [hex:bin_to_hexstr(LeafHash), hex:bin_to_hexstr(DifferentLeafHash)]), {error, differentleafhash}; {error, Reason} -> lager:error("verification failed: ~p", [Reason]), {error, verificationfailed} end end. prefetchindices(Index, []) -> case db:leafhash_for_indices(Index, Index + 1000) of noentry -> case db:leafhash_for_index(Index) of noentry -> noentry; Hash -> [Hash] end; Hashes -> Hashes end; prefetchindices(_Index, PrefetchList) -> PrefetchList. -spec fetchmissingentries(non_neg_integer(), non_neg_integer()) -> [binary() | noentry]. fetchmissingentries(Index, MaxEntries) -> lists:reverse(fetchmissingentries(Index, [], [], MaxEntries)). -spec fetchmissingentries(non_neg_integer(), [binary() | noentry], [binary()], non_neg_integer()) -> [binary() | noentry]. fetchmissingentries(_Index, Acc, _PrefetchList, 0) -> Acc; fetchmissingentries(Index, Acc, PrefetchList, MaxEntries) -> lager:debug("index ~p", [Index]), case prefetchindices(Index, PrefetchList) of noentry -> Acc; [Hash|PrefetchRest] -> case db:entry_for_leafhash(Hash) of noentry -> lager:debug("didn't find hash ~p", [Hash]), fetchmissingentries(Index + 1, [Hash | Acc], PrefetchRest, MaxEntries - 1); _ -> fetchmissingentries(Index + 1, Acc, PrefetchRest, MaxEntries) end end. write_or_verify_index(Start, Hashes) -> End = Start + length(Hashes) - 1, case db:indexsize() of IndexSize when IndexSize > End -> FetchedHashes = db:leafhash_for_indices(Start, End), case FetchedHashes of Hashes -> success({[{result, <<"ok">>}]}); _ -> util:exit_with_error(invalid, index, "Written content not the" ++ " same as old content") end; _ -> Indices = lists:seq(Start, End), lists:foreach(fun ({Hash, Index}) -> ok = db:add_index_nosync_noreverse(Hash, Index) end, lists:zip(Hashes, Indices)), ok = db:index_sync(), success({[{result, <<"ok">>}]}) end. %% Private functions. html(Text, Input) -> {400, [{"Content-Type", "text/html"}], io_lib:format( "

~n" ++ "~s~n" ++ "~p~n" ++ "~n", [Text, Input])}. success(Data) -> {200, [{"Content-Type", "text/json"}], mochijson2:encode(Data)}.