summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorLinus Nordberg <linus@nordu.net>2016-07-16 17:26:54 +0200
committerLinus Nordberg <linus@nordu.net>2016-07-16 17:26:54 +0200
commit374900dca397ba8fe38fc028e9eb657feb5ce073 (patch)
tree708bfe4081bf50961bab983e0e4a610cd7ac1355
parentcde186313b20e46be41736c9ac506674fa4f2d23 (diff)
WIP
NOTE: tests don't work -- SCT's don't validate
-rw-r--r--Makefile1
-rw-r--r--reltool.config4
-rw-r--r--src/catlfish.erl1
-rw-r--r--src/dns.erl168
-rw-r--r--src/v1.erl17
-rw-r--r--tools/certtools.py13
-rw-r--r--tools/precerttools.py2
-rwxr-xr-xtools/testcase1.py127
8 files changed, 219 insertions, 114 deletions
diff --git a/Makefile b/Makefile
index 58dc408..30563ff 100644
--- a/Makefile
+++ b/Makefile
@@ -37,6 +37,7 @@ tests-prepare:
mkdir $(INSTDIR)/tests/mergedb-secondary/chains
touch $(INSTDIR)/tests/mergedb-secondary/logorder
printf 0 > $(INSTDIR)/tests/mergedb-secondary/verifiedsize
+ cp test/testdata/dnssec/trust_anchors $(INSTDIR)/tests/
mkdir $(INSTDIR)/tests/known_roots
cp tools/testcerts/roots/* $(INSTDIR)/tests/known_roots
@for machine in $(MACHINES); do \
diff --git a/reltool.config b/reltool.config
index 9fa5e7f..2a1b615 100644
--- a/reltool.config
+++ b/reltool.config
@@ -1,4 +1,8 @@
%% -*- mode: erlang -*-
+
+%% TODO: Be more specific with which files are copied. Point in case:
+%% catlfish-0.9.0-dev.ez contains catlfish-0.9.0-dev/src/.~/.
+
{sys, [
{erts, [{mod_cond, derived}, {app_file, strip}]},
{app_file, strip},
diff --git a/src/catlfish.erl b/src/catlfish.erl
index 711deaa..27563d1 100644
--- a/src/catlfish.erl
+++ b/src/catlfish.erl
@@ -124,6 +124,7 @@ add_to_db(Type, LeafCert, CertChain, EntryHash) ->
leaf_type = timestamped_entry,
entry = TSE}),
MTLHash = ht:leaf_hash(MTLText),
+ lager:debug("LeafCert len: ~p", [byte_size(LeafCert)]),
LogEntry = pack_entry(Type, MTLText, LeafCert, CertChain),
ok = plop:add(LogEntry, MTLHash, EntryHash),
{TSE, MTLHash}.
diff --git a/src/dns.erl b/src/dns.erl
index f327a8f..46cda38 100644
--- a/src/dns.erl
+++ b/src/dns.erl
@@ -5,11 +5,13 @@
-export([decode_rrset/1, decode_rr/1, encode_rrset/1, encode_rr/1,
canonicalize/1, validate/1]).
+-include_lib("eunit/include/eunit.hrl").
+
-record(rr, {name :: list(), % List of name labels.
- type :: non_neg_integer(),
- class :: binary(),
- ttl :: integer(),
- rdata :: binary()}).
+ type = 0 :: non_neg_integer(),
+ class = <<>> :: binary(),
+ ttl = 0 :: integer(),
+ rdata = <<>> :: binary()}).
-type rr() :: #rr{}.
-spec decode_name_label(binary()) -> tuple().
@@ -95,6 +97,7 @@ encode_rrset([H|T], Acc) ->
encode_rrset(T, [encode_rr(H) | Acc]).
%% Canonicalise a single RR according to RFC4034 section 6.2.
+-spec canonicalize_rr_form(rr(), rr()) -> rr().
canonicalize_rr_form(RR, RRSIG) ->
%% 1. Expand domain name -- a label with a length field >= 0xC0 is
%% a two octet pointer, which we can't expand (since we don't have
@@ -113,22 +116,72 @@ canonicalize_rr_form(RR, RRSIG) ->
RR#rr{name = LCName, ttl = OrigTTL}.
-%% Canonicalise an RRset with DNSKEY, DS, and RRSIG records according
-%% to RFC4034 section 6. Records of other types are removed. Duplicate
-%% records are removed.
-canonicalize(RRset) ->
+isValidType(#rr{type = Type}) ->
+ case Type of
+ 43 -> true; % DS
+ 46 -> true; % RRSIG
+ 48 -> true; % DNSKEY
+ _ -> false
+ end.
+
+%% Sort RR's within the same RRset, remove duplicate RR's and any RR's
+%% not of the types DNSKEY, DS or RRSIG.
+canonicalize_rr_ordering(RRs) ->
+ L1 = lists:takewhile(fun isValidType/1, RRs),
+ %%RRsets = splitOnName(L1),
+ %%SortedRRsets = lists:map(fun(L) -> lists:usort(fun cmpRR/2, L) end, L1),
+ %%OneList = lists:append(SortedRRsets),
+ lists:usort(fun cmpRR/2, L1).
+
+%% Canonicalise a list of RR's of the types DNSKEY, DS, and RRSIG
+%% according to RFC4034 section 6. Records of other types are
+%% removed. Duplicate records are removed.
+-spec canonicalize(list()) -> list().
+canonicalize(RRs) ->
%% 6.1 owner name order
- RRset61 = RRset, % TODO
+ RRs61 = RRs, % TODO
%% 6.2 RR form
- [DS, RRSIG | Rest] = RRset61,
+ [DS, RRSIG | Rest] = RRs61,
C14N_DS = canonicalize_rr_form(DS, RRSIG),
- RRset62 = [C14N_DS, RRSIG | Rest],
+ RRs62 = [C14N_DS, RRSIG | Rest],
%% 6.3 RR ordering (and dropping duplicates)
- RRset63 = RRset62,
+ RRs63 = canonicalize_rr_ordering(RRs62),
+
+ RRs63.
+
+%% cmpRR(A, B) when A#rr.type =< B#rr.type,
+%% A#rr.class =< B#rr.class,
+%% A#rr.ttl =< B#rr.ttl,
+%% A#rr.rdata =< B#rr.rdata ->
+%% cmpRRname(A#rr.name, B#rr.name);
+%% cmpRR(_, _) ->
+%% false.
+
+%% @doc
+-spec cmpRR(rr(), rr()) -> boolean().
+cmpRR(A, B) ->
+ case cmpRRname(A#rr.name, B#rr.name) of
+ equal ->
+ ?debugFmt("~p == ~p, next", [A#rr.name, B#rr.name]),
+ case A#rr.type == B#rr.type of
+ false -> A#rr.type =< B#rr.type;
+ true -> case A#rr.class == B#rr.class of
+ false -> A#rr.class =< B#rr.class;
+ true -> case A#rr.ttl == B#rr.ttl of
+ false -> A#rr.ttl =< B#rr.ttl;
+ true -> A#rr.rdata =< B#rr.rdata
+ end
+ end
+ end;
+ NameCmp -> NameCmp
+ end.
- RRset63.
+cmpRRname(A, B) when A == B -> equal;
+cmpRRname(A, B) when length(A) < length(B) -> true;
+cmpRRname(A, B) when length(A) > length(B) -> false;
+cmpRRname(A, B) -> A =< B.
%% Is the RR set valid for our needs from a DNS point of view? If so,
%% return the signature inception time of the RRSIG covering the DS
@@ -150,3 +203,92 @@ validate(RRsetBin) ->
%% TODO: Add unit tests.
+
+-define(TV_RR1_inbin,
+<<7,101,120,97,109,112,108,101,3,99,111,109,0,0,43,0,1,0,0,14,16,0,36,82,106,
+ 13,2,89,208,13,15,120,173,192,134,9,41,169,35,70,122,194,189,203,240,40,210,
+ 4,171,20,30,135,63,107,184,116,61,213,134,7,101,120,97,109,112,108,101,3,99,
+ 111,109,0,0,46,0,1,0,0,14,16,0,87,0,43,13,2,0,0,14,16,87,22,3,11,87,3,142,11,
+ 80,81,3,99,111,109,0,6,228,88,59,0,197,54,50,211,112,165,110,118,14,215,62,
+ 255,210,31,169,117,192,113,47,232,31,111,175,28,118,31,225,190,139,249,250,
+ 244,69,217,9,111,122,75,130,10,159,190,71,241,184,230,58,126,189,225,42,29,
+ 195,7,217,85,233,231,155,0,0,48,0,1,0,0,14,16,0,68,1,0,3,13,209,167,133,117,
+ 137,124,191,163,201,10,151,19,139,232,224,244,203,106,201,233,28,167,30,177,
+ 84,53,125,127,85,116,219,50,35,216,117,50,127,240,195,143,219,193,12,65,95,5,
+ 16,116,0,141,5,83,66,213,40,91,22,196,101,145,127,109,68,210,7,101,120,97,
+ 109,112,108,101,3,99,111,109,0,0,43,0,1,0,0,14,16,0,36,82,106,13,2,89,208,13,
+ 15,120,173,192,134,9,41,169,35,70,122,194,189,203,240,40,210,4,171,20,30,135,
+ 63,107,184,116,61,213,134,0,0,48,0,1,0,0,14,16,0,68,1,0,3,13,209,167,133,117,
+ 137,124,191,163,201,10,151,19,139,232,224,244,203,106,201,233,28,167,30,177,
+ 84,53,125,127,85,116,219,50,35,216,117,50,127,240,195,143,219,193,12,65,95,5,
+ 16,116,0,141,5,83,66,213,40,91,22,196,101,145,127,109,68,210>>).
+-define(TV_RR1_in, decode_rrset(?TV_RR1_inbin)).
+-define(TV_RR1_out, []).
+
+gen_rrset(Name) ->
+ gen_rrset(Name, 0, 0, <<>>).
+gen_rrset(Name, Type) ->
+ gen_rrset(Name, Type, 0, <<>>).
+gen_rrset(Name, Type, TTL, RDATA) ->
+ N = string:tokens(Name, "."),
+ RRs = [#rr{name = N, type = Type, class = <<0, 1>>, ttl = TTL, rdata = RDATA},
+ #rr{name = N, type = 46, class = <<0, 1>>, ttl = TTL,
+ rdata = <<0,0,0,0, TTL:32/integer>>}],
+ %%?debugFmt("gen_rrset: ~p", [RRs]),
+ RRs.
+
+basic_test_() ->
+ {ok, SingleRRbin} = file:read_file("test/testdata/dnssec/testrrsets/single-record"),
+ [SingleRR | _] = decode_rrset(SingleRRbin),
+ [G | _] = gen_rrset("example.com", 43, 3600,
+ <<82,106,13,2,89,208,13,15,120,173,192,134,9,41,
+ 169,35,70,122,194,189,203,240,40,210,4,171,20,
+ 30,135,63,107,184,116,61,213,134>>),
+ [?_assertMatch(#rr{name = ["example", "com"],
+ type = 43, class = <<0,1>>, ttl = 3600}, SingleRR),
+ ?_assertEqual(G, SingleRR)].
+
+-define(TV_R1, gen_rrset("example.com", 43, 3600, <<>>)).
+-define(TV_R2, gen_rrset("com", 43, 3600, <<>>)).
+-define(TV_R3, gen_rrset("com", 43, 300, <<>>)).
+
+c14n_test_() ->
+ [
+ %% Reverse order, names.
+ ?_assertEqual(lists:append([?TV_R2, ?TV_R1]),
+ canonicalize(lists:append([?TV_R1, ?TV_R2]))),
+ %% Remove duplicate R1.
+ ?_assertEqual(lists:append([?TV_R2, ?TV_R1]),
+ canonicalize(lists:append([?TV_R1, ?TV_R2, ?TV_R1]))),
+ %% Reverse order, TTL.
+ ?_assertEqual(lists:append([?TV_R3, ?TV_R2]),
+ canonicalize(lists:append([?TV_R2, ?TV_R3]))),
+ %% Sorting and removing.
+ %%?_assertEqual(lists:append([?TV_R3, ?TV_R2, ?TV_R1]),
+ %% canonicalize(lists:append([?TV_R2, ?TV_R3, ?TV_R1, ?TV_R3]))),
+ ?_assert(true)].
+
+sorting_test_() ->
+ [
+ ?_assertEqual(lists:append([
+ gen_rrset("example", 43),
+ gen_rrset("a.example", 43),
+ gen_rrset("yljkjljk.a.example", 43),
+ gen_rrset("Z.a.example", 43),
+ gen_rrset("zABC.a.EXAMPLE", 43),
+ gen_rrset("z.example", 43),
+ gen_rrset("\001.z.example", 43),
+ gen_rrset("*.z.example", 43),
+ gen_rrset("\200.z.example", 43)]),
+ canonicalize(
+ lists:append([
+ gen_rrset("\001.z.example", 43),
+ gen_rrset("a.example", 43),
+ gen_rrset("example", 43),
+ gen_rrset("Z.a.example", 43),
+ gen_rrset("zABC.a.EXAMPLE", 43),
+ gen_rrset("\200.z.example", 43),
+ gen_rrset("z.example", 43),
+ gen_rrset("*.z.example", 43),
+ gen_rrset("yljkjljk.a.example", 43)])))
+ ].
diff --git a/src/v1.erl b/src/v1.erl
index c1b07e6..d16b5c6 100644
--- a/src/v1.erl
+++ b/src/v1.erl
@@ -129,13 +129,20 @@ request(_Method, _App, _Fun, _) ->
none.
%% Private functions.
+-define(INPUT_TRIMLEN, 80).
err400(Text, Input) ->
+ InputTrimmed = case Input of
+ I when is_binary(I) ->
+ binary_part(I, 0, ?INPUT_TRIMLEN);
+ I when is_list(I) ->
+ lists:sublist(I, ?INPUT_TRIMLEN)
+ end,
{400, [{"Content-Type", "text/html"}],
io_lib:format(
"<html><body><p>~n" ++
"~s~n" ++
"~p~n" ++
- "</body></html>~n", [Text, Input])}.
+ "</body></html>~n", [Text, InputTrimmed])}.
success(Data) ->
{200, [{"Content-Type", "text/json"}], mochijson2:encode(Data)}.
@@ -165,9 +172,13 @@ add_rr_chain(Input) ->
add_chain_helper(Data) ->
case dnssecport:validate(Data) of
- {valid, [DS | Chain]} ->
+ {valid, ChainBin} ->
lager:debug("succesful DNSSEC validation"),
- success(catlfish:add_chain(DS, Chain, normal));
+ {DS, Chain} = lists:split(2, dns:decode_rrset(ChainBin)),
+ %%lager:debug("DS: ~p~nChain: ~p", [DS, Chain]),
+ success(catlfish:add_chain(dns:encode_rrset(DS),
+ [dns:encode_rrset(Chain)], % FIXME
+ normal));
{invalid, Reason} ->
lager:debug("DNSSEC validation failed with ~p", [Reason]),
err400(io_lib:format("add-rr-chain: invalid DS record: ~p",
diff --git a/tools/certtools.py b/tools/certtools.py
index 9296401..ec4177c 100644
--- a/tools/certtools.py
+++ b/tools/certtools.py
@@ -137,7 +137,7 @@ def pyopenssl_https_get(url):
return response_lines[len(response_lines) - 1]
def get_sth(baseurl):
- result = urlopen(baseurl + "open/gaol/v1/get-sth").read()
+ result = urlopen(baseurl + "dt/v1/get-sth").read()
return json.loads(result)
def get_proof_by_hash(baseurl, hash, tree_size):
@@ -145,7 +145,7 @@ def get_proof_by_hash(baseurl, hash, tree_size):
params = urllib.urlencode({"hash":base64.b64encode(hash),
"tree_size":tree_size})
result = \
- urlopen(baseurl + "open/gaol/v1/get-proof-by-hash?" + params).read()
+ urlopen(baseurl + "dt/v1/get-proof-by-hash?" + params).read()
return json.loads(result)
except urllib2.HTTPError, e:
print "ERROR:", e.read()
@@ -156,7 +156,7 @@ def get_consistency_proof(baseurl, tree_size1, tree_size2):
params = urllib.urlencode({"first":tree_size1,
"second":tree_size2})
result = \
- urlopen(baseurl + "open/gaol/v1/get-sth-consistency?" + params).read()
+ urlopen(baseurl + "dt/v1/get-sth-consistency?" + params).read()
return json.loads(result)["consistency"]
except urllib2.HTTPError, e:
print "ERROR:", e.read()
@@ -179,7 +179,8 @@ def unpack_tls_array(packed_data, length_len):
def add_chain(baseurl, submission):
try:
- result = urlopen(baseurl + "open/gaol/v1/add-blob", json.dumps(submission)).read()
+ result = urlopen(baseurl + "dt/v1/add-rr-chain",
+ json.dumps(submission)).read()
return json.loads(result)
except urllib2.HTTPError, e:
print "ERROR", e.code,":", e.read()
@@ -196,7 +197,7 @@ def add_chain(baseurl, submission):
def add_prechain(baseurl, submission):
try:
- result = urlopen(baseurl + "open/gaol/v1/add-pre-chain",
+ result = urlopen(baseurl + "dt/v1/add-pre-chain",
json.dumps(submission)).read()
return json.loads(result)
except urllib2.HTTPError, e:
@@ -215,7 +216,7 @@ def add_prechain(baseurl, submission):
def get_entries(baseurl, start, end):
params = urllib.urlencode({"start":start, "end":end})
try:
- result = urlopen(baseurl + "open/gaol/v1/get-entries?" + params).read()
+ result = urlopen(baseurl + "dt/v1/get-entries?" + params).read()
return json.loads(result)
except urllib2.HTTPError, e:
print "ERROR:", e.read()
diff --git a/tools/precerttools.py b/tools/precerttools.py
index 13ac572..9687e28 100644
--- a/tools/precerttools.py
+++ b/tools/precerttools.py
@@ -3,7 +3,7 @@
import sys
import hashlib
-import rfc2459
+import rfc2459 # debian package python-pyasn1-modules
from pyasn1.type import univ, tag
from pyasn1.codec.der import encoder, decoder
diff --git a/tools/testcase1.py b/tools/testcase1.py
index 1a294d9..ec85d85 100755
--- a/tools/testcase1.py
+++ b/tools/testcase1.py
@@ -13,23 +13,19 @@ import struct
import hashlib
import itertools
from certtools import *
+from dnstools import c14n_dsrr, unpack_rrset
baseurls = [sys.argv[1]]
logpublickeyfile = sys.argv[2]
cacertfile = sys.argv[3]
-certfiles = ["../tools/testcerts/cert1.txt", "../tools/testcerts/cert2.txt",
- "../tools/testcerts/cert3.txt", "../tools/testcerts/cert4.txt",
- "../tools/testcerts/cert5.txt"]
+RRfiles = ["../test/testdata/dnssec/testrrsets/req-basic"]
-def get_blob_from_file(filename):
- return [open(filename, 'r').read()]
+def get_rrset_from_file(filename):
+ return open(filename, 'r').read()
-cc1 = get_blob_from_file(certfiles[0])
-cc2 = get_blob_from_file(certfiles[1])
-cc3 = get_blob_from_file(certfiles[2])
-cc4 = get_blob_from_file(certfiles[3])
-cc5 = get_blob_from_file(certfiles[4])
+# TODO: Add more tests, like 4 more would be good.
+rr1 = get_rrset_from_file(RRfiles[0])
create_ssl_context(cafile=cacertfile)
@@ -76,15 +72,18 @@ def print_and_check_tree_size(expected, baseurl):
tree_size = sth["tree_size"]
assert_equal(tree_size, expected, "tree size", quiet=True)
-def do_add_chain(chain, baseurl):
+def do_add_chain(ignore1, ignore2): assert 0, "use do_add_rr() instead" # FIXME: remove
+
+def do_add_rr(rrset, baseurl):
global failures
- blob = ''.join(chain)
try:
- result = add_chain(baseurl, {"blob":base64.b64encode(blob)})
+ result = add_chain(baseurl, {"chain":base64.b64encode(rrset)})
except ValueError, e:
print_error("%s", e)
try:
- signed_entry = pack_cert(blob)
+ print "result:", result
+ signed_entry = pack_cert(c14n_dsrr(rrset))
+ print "signed_entry:", repr(signed_entry)
check_sct_signature(baseurl, signed_entry, result, publickey=logpublickey)
print_success("signature check succeeded")
except AssertionError, e:
@@ -95,8 +94,8 @@ def do_add_chain(chain, baseurl):
return result
def get_and_validate_proof(timestamp, chain, leaf_index, nentries, baseurl):
- blob = ''.join(chain)
- merkle_tree_leaf = pack_mtl(timestamp, blob)
+ dsrr = c14n_dsrr(chain)
+ merkle_tree_leaf = pack_mtl(timestamp, dsrr)
leaf_hash = get_leaf_hash(merkle_tree_leaf)
sth = get_sth(baseurl)
proof = get_proof_by_hash(baseurl, leaf_hash, sth["tree_size"])
@@ -109,7 +108,7 @@ def get_and_validate_proof(timestamp, chain, leaf_index, nentries, baseurl):
root_hash = base64.b64decode(sth["sha256_root_hash"])
assert_equal(root_hash, calc_root_hash, "verified root hash", nodata=True, quiet=True)
- get_and_check_entry(timestamp, blob, leaf_index, baseurl)
+ get_and_check_entry(timestamp, chain, leaf_index, baseurl)
def get_and_validate_consistency_proof(sth1, sth2, size1, size2, baseurl):
consistency_proof = [base64.decodestring(entry) for entry in get_consistency_proof(baseurl, size1, size2)]
@@ -121,15 +120,17 @@ def get_and_validate_consistency_proof(sth1, sth2, size1, size2, baseurl):
def get_and_check_entry(timestamp, chain, leaf_index, baseurl):
- blob = ''.join(chain)
entries = get_entries(baseurl, leaf_index, leaf_index)
assert_equal(len(entries), 1, "get_entries", quiet=True)
fetched_entry = entries["entries"][0]
- merkle_tree_leaf = pack_mtl(timestamp, blob)
+ merkle_tree_leaf = pack_mtl(timestamp, c14n_dsrr(chain))
leaf_input = base64.decodestring(fetched_entry["leaf_input"])
- extra_data = base64.decodestring(fetched_entry["extra_data"])
assert_equal(leaf_input, merkle_tree_leaf, "entry", nodata=True, quiet=True)
- assert_equal(extra_data, '\x00\x00\x00', "extra_data", quiet=True)
+ extra_data = base64.decodestring(fetched_entry["extra_data"])
+ chain_fetched = unpack_rrset(decode_certificate_chain(extra_data)[0])
+ chain_submitted = unpack_rrset(chain)[1:]
+ # FIXME: Might not have submited trust anchors.
+ assert_equal(chain_fetched, chain_submitted, "chain", quiet=True)
def merge():
return subprocess.call(["../tools/merge", "--config", "../test/catlfish-test.cfg",
@@ -141,9 +142,9 @@ assert_equal(mergeresult, 0, "merge", quiet=True, fatal=True)
for baseurl in baseurls:
print_and_check_tree_size(0, baseurl)
-testgroup("cert1")
+testgroup("rr1")
-result1 = do_add_chain(cc1, baseurls[0])
+result1 = do_add_rr(rr1, baseurls[0])
mergeresult = merge()
assert_equal(mergeresult, 0, "merge", quiet=True, fatal=True)
@@ -154,7 +155,7 @@ for baseurl in baseurls:
print_and_check_tree_size(1, baseurl)
size_sth[1] = base64.b64decode(get_sth(baseurls[0])["sha256_root_hash"])
-result2 = do_add_chain(cc1, baseurls[0])
+result2 = do_add_rr(rr1, baseurls[0])
assert_equal(result2["timestamp"], result1["timestamp"], "timestamp")
@@ -167,79 +168,23 @@ size1_v2_sth = base64.b64decode(get_sth(baseurls[0])["sha256_root_hash"])
assert_equal(size_sth[1], size1_v2_sth, "sth", nodata=True)
-# TODO: add invalid cert and check that it generates an error
-# and that treesize still is 1
-
-get_and_validate_proof(result1["timestamp"], cc1, 0, 0, baseurls[0])
-
-testgroup("cert2")
-
-result3 = do_add_chain(cc2, baseurls[0])
-
-mergeresult = merge()
-assert_equal(mergeresult, 0, "merge", quiet=True, fatal=True)
-
-for baseurl in baseurls:
- print_and_check_tree_size(2, baseurl)
-size_sth[2] = base64.b64decode(get_sth(baseurls[0])["sha256_root_hash"])
-
-get_and_validate_proof(result1["timestamp"], cc1, 0, 1, baseurls[0])
-get_and_validate_proof(result3["timestamp"], cc2, 1, 1, baseurls[0])
-
-testgroup("cert3")
-
-result4 = do_add_chain(cc3, baseurls[0])
-
-mergeresult = merge()
-assert_equal(mergeresult, 0, "merge", quiet=True, fatal=True)
-
-for baseurl in baseurls:
- print_and_check_tree_size(3, baseurl)
-size_sth[3] = base64.b64decode(get_sth(baseurls[0])["sha256_root_hash"])
-
-get_and_validate_proof(result1["timestamp"], cc1, 0, 2, baseurls[0])
-get_and_validate_proof(result3["timestamp"], cc2, 1, 2, baseurls[0])
-get_and_validate_proof(result4["timestamp"], cc3, 2, 1, baseurls[0])
-
-testgroup("cert4")
-
-result5 = do_add_chain(cc4, baseurls[0])
+# TODO: add an invalid chain and check that it generates an error and
+# that treesize still is 1
-mergeresult = merge()
-assert_equal(mergeresult, 0, "merge", quiet=True, fatal=True)
-
-for baseurl in baseurls:
- print_and_check_tree_size(4, baseurl)
-size_sth[4] = base64.b64decode(get_sth(baseurls[0])["sha256_root_hash"])
-
-get_and_validate_proof(result1["timestamp"], cc1, 0, 2, baseurls[0])
-get_and_validate_proof(result3["timestamp"], cc2, 1, 2, baseurls[0])
-get_and_validate_proof(result4["timestamp"], cc3, 2, 2, baseurls[0])
-get_and_validate_proof(result5["timestamp"], cc4, 3, 2, baseurls[0])
-
-testgroup("cert5")
-
-result6 = do_add_chain(cc5, baseurls[0])
-
-mergeresult = merge()
-assert_equal(mergeresult, 0, "merge", quiet=True, fatal=True)
-
-for baseurl in baseurls:
- print_and_check_tree_size(5, baseurl)
-size_sth[5] = base64.b64decode(get_sth(baseurls[0])["sha256_root_hash"])
+get_and_validate_proof(result1["timestamp"], rr1, 0, 0, baseurls[0])
-get_and_validate_proof(result1["timestamp"], cc1, 0, 3, baseurls[0])
-get_and_validate_proof(result3["timestamp"], cc2, 1, 3, baseurls[0])
-get_and_validate_proof(result4["timestamp"], cc3, 2, 3, baseurls[0])
-get_and_validate_proof(result5["timestamp"], cc4, 3, 3, baseurls[0])
-get_and_validate_proof(result6["timestamp"], cc5, 4, 1, baseurls[0])
+testgroup("proofs")
mergeresult = merge()
assert_equal(mergeresult, 0, "merge", quiet=True, fatal=True)
-for first_size in range(1, 5):
- for second_size in range(first_size + 1, 6):
- get_and_validate_consistency_proof(size_sth[first_size], size_sth[second_size], first_size, second_size, baseurls[0])
+for first_size in range(1, 1):
+ for second_size in range(first_size + 1, 2):
+ get_and_validate_consistency_proof(size_sth[first_size],
+ size_sth[second_size],
+ first_size,
+ second_size,
+ baseurls[0])
print "-------"
if failures: