From fe36969835c2f3be14e90a1ac7632fd4c638afaa Mon Sep 17 00:00:00 2001 From: josef Date: Tue, 25 Aug 2015 16:19:10 +0200 Subject: experimental python auditor added --- tools/josef_experimental.py | 68 +++++++++++++++++++++++++++++++++++++ tools/josef_experimental_auditor.py | 68 +++++++++++++++++++++++++++++++++++++ 2 files changed, 136 insertions(+) create mode 100755 tools/josef_experimental.py create mode 100755 tools/josef_experimental_auditor.py (limited to 'tools') diff --git a/tools/josef_experimental.py b/tools/josef_experimental.py new file mode 100755 index 0000000..da3f31e --- /dev/null +++ b/tools/josef_experimental.py @@ -0,0 +1,68 @@ +#!/usr/bin/python +# -*- coding: utf-8 -*- + +import time +from certtools import get_sth, get_consistency_proof, check_sth_signature, get_public_key_from_file, verify_consistency_proof + + +base_urls = ["https://plausible.ct.nordu.net/", + "https://ct1.digicert-ct.com/log/", + "https://ct.izenpe.com/", + "https://log.certly.io/", + "https://ct.googleapis.com/aviator/", + "https://ct.googleapis.com/pilot/", + "https://ct.googleapis.com/rocketeer/", + ] + +logkeys = {} +logkeys["https://plausible.ct.nordu.net/"] = get_public_key_from_file("../../plausible-logkey.pem") +logkeys["https://ct.googleapis.com/rocketeer/"] = get_public_key_from_file("../../rocketeer-logkey.pem") +logkeys["https://ct.googleapis.com/aviator/"] = get_public_key_from_file("../../aviator-logkey.pem") +logkeys["https://ct.googleapis.com/pilot/"] = get_public_key_from_file("../../pilot-logkey.pem") +logkeys["https://log.certly.io/"] = get_public_key_from_file("../../certly-logkey.pem") +logkeys["https://ct.izenpe.com/"] = get_public_key_from_file("../../izenpe-logkey.pem") +logkeys["https://ct1.digicert-ct.com/log/"] = get_public_key_from_file("../../digicert-logkey.pem") +count = 0 +old_sth = {} + +# Get initial sth +for base_url in base_urls: + + old_sth[base_url] = get_sth(base_url) + # print old_sth[base_url]["sha256_root_hash"] + print str(count) + ": Received STH from " + base_url + ", timestamp: " + str(old_sth[base_url]["timestamp"]) + ", size: " + str(old_sth[base_url]["tree_size"]) + + try: + check_sth_signature(base_url, old_sth[base_url], logkeys[base_url]) + except: + print "Could not verify signature!!" + + +while True: + time.sleep(60) + count += 1 + for base_url in base_urls: + new_sth = get_sth(base_url) + print str(count) + ": Received STH from " + base_url + ", timestamp: " + str(old_sth[base_url]["timestamp"]) + ", size: " + str(old_sth[base_url]["tree_size"]) + try: + check_sth_signature(base_url, new_sth, logkeys[base_url]) + except: + print "Could not verify signature!!" + + if old_sth[base_url]["tree_size"] != new_sth["tree_size"]: + print "Wohoo, new STH! Checking..." + try: + consistency_proof = get_consistency_proof(base_url, old_sth[base_url]["tree_size"], new_sth["tree_size"] ) + # print consistency_proof + print verify_consistency_proof(consistency_proof, old_sth[base_url]["tree_size"], new_sth["tree_size"], old_sth[base_url]["sha256_root_hash"]) + except: + print consistency_proof + finally: + old_sth[base_url] = new_sth + + + + + + + diff --git a/tools/josef_experimental_auditor.py b/tools/josef_experimental_auditor.py new file mode 100755 index 0000000..da3f31e --- /dev/null +++ b/tools/josef_experimental_auditor.py @@ -0,0 +1,68 @@ +#!/usr/bin/python +# -*- coding: utf-8 -*- + +import time +from certtools import get_sth, get_consistency_proof, check_sth_signature, get_public_key_from_file, verify_consistency_proof + + +base_urls = ["https://plausible.ct.nordu.net/", + "https://ct1.digicert-ct.com/log/", + "https://ct.izenpe.com/", + "https://log.certly.io/", + "https://ct.googleapis.com/aviator/", + "https://ct.googleapis.com/pilot/", + "https://ct.googleapis.com/rocketeer/", + ] + +logkeys = {} +logkeys["https://plausible.ct.nordu.net/"] = get_public_key_from_file("../../plausible-logkey.pem") +logkeys["https://ct.googleapis.com/rocketeer/"] = get_public_key_from_file("../../rocketeer-logkey.pem") +logkeys["https://ct.googleapis.com/aviator/"] = get_public_key_from_file("../../aviator-logkey.pem") +logkeys["https://ct.googleapis.com/pilot/"] = get_public_key_from_file("../../pilot-logkey.pem") +logkeys["https://log.certly.io/"] = get_public_key_from_file("../../certly-logkey.pem") +logkeys["https://ct.izenpe.com/"] = get_public_key_from_file("../../izenpe-logkey.pem") +logkeys["https://ct1.digicert-ct.com/log/"] = get_public_key_from_file("../../digicert-logkey.pem") +count = 0 +old_sth = {} + +# Get initial sth +for base_url in base_urls: + + old_sth[base_url] = get_sth(base_url) + # print old_sth[base_url]["sha256_root_hash"] + print str(count) + ": Received STH from " + base_url + ", timestamp: " + str(old_sth[base_url]["timestamp"]) + ", size: " + str(old_sth[base_url]["tree_size"]) + + try: + check_sth_signature(base_url, old_sth[base_url], logkeys[base_url]) + except: + print "Could not verify signature!!" + + +while True: + time.sleep(60) + count += 1 + for base_url in base_urls: + new_sth = get_sth(base_url) + print str(count) + ": Received STH from " + base_url + ", timestamp: " + str(old_sth[base_url]["timestamp"]) + ", size: " + str(old_sth[base_url]["tree_size"]) + try: + check_sth_signature(base_url, new_sth, logkeys[base_url]) + except: + print "Could not verify signature!!" + + if old_sth[base_url]["tree_size"] != new_sth["tree_size"]: + print "Wohoo, new STH! Checking..." + try: + consistency_proof = get_consistency_proof(base_url, old_sth[base_url]["tree_size"], new_sth["tree_size"] ) + # print consistency_proof + print verify_consistency_proof(consistency_proof, old_sth[base_url]["tree_size"], new_sth["tree_size"], old_sth[base_url]["sha256_root_hash"]) + except: + print consistency_proof + finally: + old_sth[base_url] = new_sth + + + + + + + -- cgit v1.1 From 45ab48e10763c5e29a7e49c2abe1656798e0e774 Mon Sep 17 00:00:00 2001 From: josef Date: Wed, 26 Aug 2015 12:21:59 +0200 Subject: verifying consistency proofs working --- tools/josef_experimental.py | 33 ++++++++++++++++++++++----------- tools/josef_experimental_auditor.py | 33 ++++++++++++++++++++++----------- 2 files changed, 44 insertions(+), 22 deletions(-) (limited to 'tools') diff --git a/tools/josef_experimental.py b/tools/josef_experimental.py index da3f31e..dc1dc7e 100755 --- a/tools/josef_experimental.py +++ b/tools/josef_experimental.py @@ -2,6 +2,7 @@ # -*- coding: utf-8 -*- import time +import base64 from certtools import get_sth, get_consistency_proof, check_sth_signature, get_public_key_from_file, verify_consistency_proof @@ -22,15 +23,14 @@ logkeys["https://ct.googleapis.com/pilot/"] = get_public_key_from_file("../../pi logkeys["https://log.certly.io/"] = get_public_key_from_file("../../certly-logkey.pem") logkeys["https://ct.izenpe.com/"] = get_public_key_from_file("../../izenpe-logkey.pem") logkeys["https://ct1.digicert-ct.com/log/"] = get_public_key_from_file("../../digicert-logkey.pem") -count = 0 old_sth = {} # Get initial sth +print time.strftime("%H:%M:%S", time.gmtime()) for base_url in base_urls: old_sth[base_url] = get_sth(base_url) - # print old_sth[base_url]["sha256_root_hash"] - print str(count) + ": Received STH from " + base_url + ", timestamp: " + str(old_sth[base_url]["timestamp"]) + ", size: " + str(old_sth[base_url]["tree_size"]) + print "Received STH from " + base_url + ", timestamp: " + str(old_sth[base_url]["timestamp"]) + ", size: " + str(old_sth[base_url]["tree_size"]) try: check_sth_signature(base_url, old_sth[base_url], logkeys[base_url]) @@ -39,24 +39,35 @@ for base_url in base_urls: while True: - time.sleep(60) - count += 1 + time.sleep(1*60-4) + print time.strftime("%H:%M:%S", time.gmtime()) for base_url in base_urls: new_sth = get_sth(base_url) - print str(count) + ": Received STH from " + base_url + ", timestamp: " + str(old_sth[base_url]["timestamp"]) + ", size: " + str(old_sth[base_url]["tree_size"]) + print "Received STH from " + base_url + ", timestamp: " + str(new_sth["timestamp"]) + ", size: " + str(new_sth["tree_size"]) try: check_sth_signature(base_url, new_sth, logkeys[base_url]) except: print "Could not verify signature!!" - if old_sth[base_url]["tree_size"] != new_sth["tree_size"]: + if old_sth[base_url]["tree_size"]!= new_sth["tree_size"]: print "Wohoo, new STH! Checking..." try: + # Hashes are base64 encoded from the server and needs to be decoded before checking proofs. consistency_proof = get_consistency_proof(base_url, old_sth[base_url]["tree_size"], new_sth["tree_size"] ) - # print consistency_proof - print verify_consistency_proof(consistency_proof, old_sth[base_url]["tree_size"], new_sth["tree_size"], old_sth[base_url]["sha256_root_hash"]) - except: - print consistency_proof + decoded_consistency_proof = [] + for item in consistency_proof: + decoded_consistency_proof.append(base64.b64decode(item)) + res = verify_consistency_proof(decoded_consistency_proof, old_sth[base_url]["tree_size"], new_sth["tree_size"], old_sth[base_url]["sha256_root_hash"]) + + if old_sth[base_url]["sha256_root_hash"] != str(base64.b64encode(res[0])): + print "Verification of old hash failed!!!" + print old_sth[base_url]["sha256_root_hash"], str(base64.b64encode(res[0])) + if new_sth["sha256_root_hash"] != str(base64.b64encode(res[1])): + print "Verification of new hash failed!!!" + print new_sth["sha256_root_hash"], str(base64.b64encode(res[1])) + + except Exception, err: + print Exception, err finally: old_sth[base_url] = new_sth diff --git a/tools/josef_experimental_auditor.py b/tools/josef_experimental_auditor.py index da3f31e..dc1dc7e 100755 --- a/tools/josef_experimental_auditor.py +++ b/tools/josef_experimental_auditor.py @@ -2,6 +2,7 @@ # -*- coding: utf-8 -*- import time +import base64 from certtools import get_sth, get_consistency_proof, check_sth_signature, get_public_key_from_file, verify_consistency_proof @@ -22,15 +23,14 @@ logkeys["https://ct.googleapis.com/pilot/"] = get_public_key_from_file("../../pi logkeys["https://log.certly.io/"] = get_public_key_from_file("../../certly-logkey.pem") logkeys["https://ct.izenpe.com/"] = get_public_key_from_file("../../izenpe-logkey.pem") logkeys["https://ct1.digicert-ct.com/log/"] = get_public_key_from_file("../../digicert-logkey.pem") -count = 0 old_sth = {} # Get initial sth +print time.strftime("%H:%M:%S", time.gmtime()) for base_url in base_urls: old_sth[base_url] = get_sth(base_url) - # print old_sth[base_url]["sha256_root_hash"] - print str(count) + ": Received STH from " + base_url + ", timestamp: " + str(old_sth[base_url]["timestamp"]) + ", size: " + str(old_sth[base_url]["tree_size"]) + print "Received STH from " + base_url + ", timestamp: " + str(old_sth[base_url]["timestamp"]) + ", size: " + str(old_sth[base_url]["tree_size"]) try: check_sth_signature(base_url, old_sth[base_url], logkeys[base_url]) @@ -39,24 +39,35 @@ for base_url in base_urls: while True: - time.sleep(60) - count += 1 + time.sleep(1*60-4) + print time.strftime("%H:%M:%S", time.gmtime()) for base_url in base_urls: new_sth = get_sth(base_url) - print str(count) + ": Received STH from " + base_url + ", timestamp: " + str(old_sth[base_url]["timestamp"]) + ", size: " + str(old_sth[base_url]["tree_size"]) + print "Received STH from " + base_url + ", timestamp: " + str(new_sth["timestamp"]) + ", size: " + str(new_sth["tree_size"]) try: check_sth_signature(base_url, new_sth, logkeys[base_url]) except: print "Could not verify signature!!" - if old_sth[base_url]["tree_size"] != new_sth["tree_size"]: + if old_sth[base_url]["tree_size"]!= new_sth["tree_size"]: print "Wohoo, new STH! Checking..." try: + # Hashes are base64 encoded from the server and needs to be decoded before checking proofs. consistency_proof = get_consistency_proof(base_url, old_sth[base_url]["tree_size"], new_sth["tree_size"] ) - # print consistency_proof - print verify_consistency_proof(consistency_proof, old_sth[base_url]["tree_size"], new_sth["tree_size"], old_sth[base_url]["sha256_root_hash"]) - except: - print consistency_proof + decoded_consistency_proof = [] + for item in consistency_proof: + decoded_consistency_proof.append(base64.b64decode(item)) + res = verify_consistency_proof(decoded_consistency_proof, old_sth[base_url]["tree_size"], new_sth["tree_size"], old_sth[base_url]["sha256_root_hash"]) + + if old_sth[base_url]["sha256_root_hash"] != str(base64.b64encode(res[0])): + print "Verification of old hash failed!!!" + print old_sth[base_url]["sha256_root_hash"], str(base64.b64encode(res[0])) + if new_sth["sha256_root_hash"] != str(base64.b64encode(res[1])): + print "Verification of new hash failed!!!" + print new_sth["sha256_root_hash"], str(base64.b64encode(res[1])) + + except Exception, err: + print Exception, err finally: old_sth[base_url] = new_sth -- cgit v1.1 From 4b93c3cf8c0ef58bf67e1e37b163f3aa68325635 Mon Sep 17 00:00:00 2001 From: josef Date: Thu, 27 Aug 2015 10:00:02 +0200 Subject: creating more user-friendly tool --- tools/josef_auditor.py | 136 +++++++++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 136 insertions(+) create mode 100755 tools/josef_auditor.py (limited to 'tools') diff --git a/tools/josef_auditor.py b/tools/josef_auditor.py new file mode 100755 index 0000000..4cb0d04 --- /dev/null +++ b/tools/josef_auditor.py @@ -0,0 +1,136 @@ +#!/usr/bin/python +# -*- coding: utf-8 -*- + +import time +import base64 +import argparse +from pympler.asizeof import asizeof +from certtools import * + + +base_urls = ["https://plausible.ct.nordu.net/", + "https://ct1.digicert-ct.com/log/", + "https://ct.izenpe.com/", + "https://log.certly.io/", + "https://ct.googleapis.com/aviator/", + "https://ct.googleapis.com/pilot/", + "https://ct.googleapis.com/rocketeer/", + ] + +logkeys = {} +logkeys["https://plausible.ct.nordu.net/"] = get_public_key_from_file("../../plausible-logkey.pem") +logkeys["https://ct.googleapis.com/rocketeer/"] = get_public_key_from_file("../../rocketeer-logkey.pem") +logkeys["https://ct.googleapis.com/aviator/"] = get_public_key_from_file("../../aviator-logkey.pem") +logkeys["https://ct.googleapis.com/pilot/"] = get_public_key_from_file("../../pilot-logkey.pem") +logkeys["https://log.certly.io/"] = get_public_key_from_file("../../certly-logkey.pem") +logkeys["https://ct.izenpe.com/"] = get_public_key_from_file("../../izenpe-logkey.pem") +logkeys["https://ct1.digicert-ct.com/log/"] = get_public_key_from_file("../../digicert-logkey.pem") + +parser = argparse.ArgumentParser(description="") +parser.add_argument('--audit', action='store_true', help="run lightweight auditor ensuring consistency in STH") +parser.add_argument('--build-sth', action='store_true', help="get all entries and construct STH") + + +def reduce_leafs_to_root(layer0): + if len(layer0) == 0: + return [[hashlib.sha256().digest()]] + current_layer = layer0 + while len(current_layer) > 1: + current_layer = next_merkle_layer(current_layer) + return current_layer + +# Get STH and verify signature +def fetch_all_sth(): + sths = {} + for base_url in base_urls: + try: + sths[base_url] = get_sth(base_url) + except: + print "Failed to retrieve STH from " + base_url + continue + + try: + check_sth_signature(base_url, sths[base_url], logkeys[base_url]) + except: + print "Could not verify signature from " + base_url + "!!!" + continue + return sths + + +def verify_consistency(old, new): + for url in old: + # try: + if old[url]["tree_size"]!= new[url]["tree_size"]: + consistency_proof = get_consistency_proof(url, old[url]["tree_size"], new[url]["tree_size"] ) + decoded_consistency_proof = [] + for item in consistency_proof: + decoded_consistency_proof.append(base64.b64decode(item)) + res = verify_consistency_proof(decoded_consistency_proof, old[url]["tree_size"], new[url]["tree_size"], old[url]["sha256_root_hash"]) + + if old[url]["sha256_root_hash"] != str(base64.b64encode(res[0])): + print "Verification of old hash failed!!!" + print old[url]["sha256_root_hash"], str(base64.b64encode(res[0])) + elif new[url]["sha256_root_hash"] != str(base64.b64encode(res[1])): + print "Verification of new hash failed!!!" + print new[url]["sha256_root_hash"], str(base64.b64encode(res[1])) + else: + print time.strftime("%H:%M:%S", time.gmtime()) + " New STH from " + url + ", timestamp: " + str(new[url]["timestamp"]) + ", size: " + str(new[url]["tree_size"]) + "...OK." + + # except: + # print "ERROR: Could not verify consistency for " + url + + +def fetch_and_build_tree(old_sth, base_url): + print "Getting all entries from " + base_url + + sth = old_sth[base_url] + + entries = [] + leafs = [] + + while len(leafs) < sth["tree_size"]: + pre_size = len(leafs) + entries = get_entries(base_url, len(leafs), sth["tree_size"])["entries"] + + for item in entries: + leafs.append(get_leaf_hash(base64.b64decode(item["leaf_input"]))) + print "Got entries " + str(pre_size) + " to " + str(len(leafs)) + " (total leaf size: " + str(asizeof(leafs)/1024/1024) + "MB)" + + + # tree = build_merkle_tree(leafs) + root = base64.b64encode(reduce_leafs_to_root(leafs)[0]) + # print "Tree size: " + str(asizeof(tree)/1024/1024) + "MB" + # print len(leafs) + + if root == sth["sha256_root_hash"]: + print "Verifying root hashes...OK." + else: + print "ERROR: Failed to verify root hashes!" + print "STH root: " + sth["sha256_root_hash"] + print "Tree root: " + root + + +def main(args): + print "Started " + time.strftime("%H:%M:%S", time.gmtime()) + old_sth = fetch_all_sth() + + if args.build_sth: + fetch_and_build_tree(old_sth, base_urls[2]) + + if args.audit: + print "Running auditor for " +str(len(base_urls)) + " logs..." + + while True: + time.sleep(1*60-4) + # print time.strftime("%H:%M:%S", time.gmtime()) + " checking for updates..." + new_sth = fetch_all_sth() + verify_consistency(old_sth, new_sth) + old_sth = new_sth + + + + + +if __name__ == '__main__': + main(parser.parse_args()) + -- cgit v1.1 From 3ce32ce3b55a118ed95b88751d16a9f5b11c9789 Mon Sep 17 00:00:00 2001 From: josef Date: Thu, 27 Aug 2015 12:05:47 +0200 Subject: significantly reduced memory usage while building trees --- tools/josef_auditor.py | 76 +++++++++++++++++++++++++++++++++++--------------- 1 file changed, 53 insertions(+), 23 deletions(-) (limited to 'tools') diff --git a/tools/josef_auditor.py b/tools/josef_auditor.py index 4cb0d04..4e60f3c 100755 --- a/tools/josef_auditor.py +++ b/tools/josef_auditor.py @@ -31,13 +31,46 @@ parser.add_argument('--audit', action='store_true', help="run lightweight audito parser.add_argument('--build-sth', action='store_true', help="get all entries and construct STH") -def reduce_leafs_to_root(layer0): - if len(layer0) == 0: - return [[hashlib.sha256().digest()]] - current_layer = layer0 - while len(current_layer) > 1: - current_layer = next_merkle_layer(current_layer) - return current_layer +# def reduce_leafs_to_root(layer0): +# if len(layer0) == 0: +# return [[hashlib.sha256().digest()]] +# current_layer = layer0 +# while len(current_layer) > 1: +# current_layer = next_merkle_layer(current_layer) +# return current_layer + +def reduce_layer(layer): + new_layer = [] + while len(layer) > 1: + e1 = layer.pop(0) + e2 = layer.pop(0) + new_layer.append(internal_hash((e1,e2))) + return new_layer + +def reduce_tree(entries, layers): + if len(entries) == 0 and layers is []: + return [[hashlib.sha256().digest()]] + + layer_idx = 0 + layers[layer_idx] += entries + + while len(layers[layer_idx]) > 1: + if len(layers) == layer_idx + 1: + layers.append([]) + + layers[layer_idx + 1] += reduce_layer(layers[layer_idx]) + layer_idx += 1 + return layers + +def reduce_subtree_to_root(layers): + while len(layers) > 1: + layers[1] += next_merkle_layer(layers[0]) + del layers[0] + + if len(layers[0]) > 1: + return next_merkle_layer(layers[0]) + return layers[0] + # Get STH and verify signature def fetch_all_sth(): @@ -81,26 +114,23 @@ def verify_consistency(old, new): def fetch_and_build_tree(old_sth, base_url): - print "Getting all entries from " + base_url - sth = old_sth[base_url] + subtree = [[]] + idx = 0 - entries = [] - leafs = [] - - while len(leafs) < sth["tree_size"]: - pre_size = len(leafs) - entries = get_entries(base_url, len(leafs), sth["tree_size"])["entries"] + print "Getting all entries from " + base_url + while idx < sth["tree_size"]: + pre_size = idx + entries = get_entries(base_url, idx, sth["tree_size"])["entries"] + new_leafs = [] for item in entries: - leafs.append(get_leaf_hash(base64.b64decode(item["leaf_input"]))) - print "Got entries " + str(pre_size) + " to " + str(len(leafs)) + " (total leaf size: " + str(asizeof(leafs)/1024/1024) + "MB)" - + new_leafs.append(get_leaf_hash(base64.b64decode(item["leaf_input"]))) + idx += len(new_leafs) + print "Got entries " + str(pre_size) + " to " + str(idx) + " (tree size: " + str(asizeof(subtree)) + " B)" + subtree = reduce_tree(new_leafs, subtree) - # tree = build_merkle_tree(leafs) - root = base64.b64encode(reduce_leafs_to_root(leafs)[0]) - # print "Tree size: " + str(asizeof(tree)/1024/1024) + "MB" - # print len(leafs) + root = base64.b64encode(reduce_subtree_to_root(subtree)[0]) if root == sth["sha256_root_hash"]: print "Verifying root hashes...OK." @@ -115,6 +145,7 @@ def main(args): old_sth = fetch_all_sth() if args.build_sth: + print "Building trees from entries. This may take a while, go get coffee or something..." fetch_and_build_tree(old_sth, base_urls[2]) if args.audit: @@ -122,7 +153,6 @@ def main(args): while True: time.sleep(1*60-4) - # print time.strftime("%H:%M:%S", time.gmtime()) + " checking for updates..." new_sth = fetch_all_sth() verify_consistency(old_sth, new_sth) old_sth = new_sth -- cgit v1.1 From c0a6e244f5105894a1723c42991a895437397766 Mon Sep 17 00:00:00 2001 From: josef Date: Fri, 28 Aug 2015 08:34:58 +0200 Subject: inclusion proofs and buxfixes --- tools/certtools.py | 5 +- tools/josef_auditor.py | 23 ++-- tools/josef_experimental_auditor.py | 248 ++++++++++++++++++++++++++++++------ 3 files changed, 221 insertions(+), 55 deletions(-) (limited to 'tools') diff --git a/tools/certtools.py b/tools/certtools.py index 307a728..6dd97c4 100644 --- a/tools/certtools.py +++ b/tools/certtools.py @@ -436,7 +436,8 @@ def internal_hash(pair): hash.update(struct.pack(">b", 1)) hash.update(pair[0]) hash.update(pair[1]) - return hash.digest() + digest = hash.digest() + return digest def chunks(l, n): return [l[i:i+n] for i in range(0, len(l), n)] @@ -700,10 +701,8 @@ def nodes_for_index(pos, treesize): nodes = [] level = 0 pos ^= 1 - #print pos, level while level < height: pos_level0 = pos * (2 ** level) - #print pos, level if pos_level0 < treesize: nodes.append((pos, level)) pos >>= 1 diff --git a/tools/josef_auditor.py b/tools/josef_auditor.py index 4e60f3c..38c0ba6 100755 --- a/tools/josef_auditor.py +++ b/tools/josef_auditor.py @@ -4,7 +4,7 @@ import time import base64 import argparse -from pympler.asizeof import asizeof +# from pympler.asizeof import asizeof from certtools import * @@ -31,14 +31,6 @@ parser.add_argument('--audit', action='store_true', help="run lightweight audito parser.add_argument('--build-sth', action='store_true', help="get all entries and construct STH") -# def reduce_leafs_to_root(layer0): -# if len(layer0) == 0: -# return [[hashlib.sha256().digest()]] -# current_layer = layer0 -# while len(current_layer) > 1: -# current_layer = next_merkle_layer(current_layer) -# return current_layer - def reduce_layer(layer): new_layer = [] while len(layer) > 1: @@ -77,9 +69,10 @@ def fetch_all_sth(): sths = {} for base_url in base_urls: try: - sths[base_url] = get_sth(base_url) + sths[base_url] = get_sth(base_url) except: print "Failed to retrieve STH from " + base_url + sths[base_url] = None continue try: @@ -92,7 +85,7 @@ def fetch_all_sth(): def verify_consistency(old, new): for url in old: - # try: + if old[url] is not None: if old[url]["tree_size"]!= new[url]["tree_size"]: consistency_proof = get_consistency_proof(url, old[url]["tree_size"], new[url]["tree_size"] ) decoded_consistency_proof = [] @@ -109,8 +102,6 @@ def verify_consistency(old, new): else: print time.strftime("%H:%M:%S", time.gmtime()) + " New STH from " + url + ", timestamp: " + str(new[url]["timestamp"]) + ", size: " + str(new[url]["tree_size"]) + "...OK." - # except: - # print "ERROR: Could not verify consistency for " + url def fetch_and_build_tree(old_sth, base_url): @@ -127,7 +118,7 @@ def fetch_and_build_tree(old_sth, base_url): for item in entries: new_leafs.append(get_leaf_hash(base64.b64decode(item["leaf_input"]))) idx += len(new_leafs) - print "Got entries " + str(pre_size) + " to " + str(idx) + " (tree size: " + str(asizeof(subtree)) + " B)" + print "Got entries " + str(pre_size) + " to " + str(idx) #+ " (tree size: " + str(asizeof(subtree)) + " B)" subtree = reduce_tree(new_leafs, subtree) root = base64.b64encode(reduce_subtree_to_root(subtree)[0]) @@ -146,7 +137,9 @@ def main(args): if args.build_sth: print "Building trees from entries. This may take a while, go get coffee or something..." - fetch_and_build_tree(old_sth, base_urls[2]) + # for url in base_urls: + # fetch_and_build_tree(old_sth, url) + fetch_and_build_tree(old_sth, base_urls[0]) if args.audit: print "Running auditor for " +str(len(base_urls)) + " logs..." diff --git a/tools/josef_experimental_auditor.py b/tools/josef_experimental_auditor.py index dc1dc7e..e3f0ca1 100755 --- a/tools/josef_experimental_auditor.py +++ b/tools/josef_experimental_auditor.py @@ -3,7 +3,9 @@ import time import base64 -from certtools import get_sth, get_consistency_proof, check_sth_signature, get_public_key_from_file, verify_consistency_proof +import argparse +# from pympler.asizeof import asizeof +from certtools import * base_urls = ["https://plausible.ct.nordu.net/", @@ -23,57 +25,229 @@ logkeys["https://ct.googleapis.com/pilot/"] = get_public_key_from_file("../../pi logkeys["https://log.certly.io/"] = get_public_key_from_file("../../certly-logkey.pem") logkeys["https://ct.izenpe.com/"] = get_public_key_from_file("../../izenpe-logkey.pem") logkeys["https://ct1.digicert-ct.com/log/"] = get_public_key_from_file("../../digicert-logkey.pem") -old_sth = {} -# Get initial sth -print time.strftime("%H:%M:%S", time.gmtime()) -for base_url in base_urls: - - old_sth[base_url] = get_sth(base_url) - print "Received STH from " + base_url + ", timestamp: " + str(old_sth[base_url]["timestamp"]) + ", size: " + str(old_sth[base_url]["tree_size"]) - - try: - check_sth_signature(base_url, old_sth[base_url], logkeys[base_url]) - except: - print "Could not verify signature!!" - - -while True: - time.sleep(1*60-4) - print time.strftime("%H:%M:%S", time.gmtime()) +parser = argparse.ArgumentParser(description="") +parser.add_argument('--audit', action='store_true', help="run lightweight auditor verifying consistency in STH") +parser.add_argument('--audit2', action='store_true', help="run medium-weight auditor verifying consistency in STH and inclusion proofs of new entries") +parser.add_argument('--build-sth', action='store_true', help="get all entries and construct STH") +parser.add_argument('--verify-index', default=None, help="Verify a specific index in all logs" ) +parser.add_argument('--verify-hash', action='store_true', help="Verify a specific index in all logs" ) + + +def reduce_layer(layer): + new_layer = [] + while len(layer) > 1: + e1 = layer.pop(0) + e2 = layer.pop(0) + new_layer.append(internal_hash((e1,e2))) + return new_layer + +def reduce_tree(entries, layers): + if len(entries) == 0 and layers is []: + return [[hashlib.sha256().digest()]] + + layer_idx = 0 + layers[layer_idx] += entries + + while len(layers[layer_idx]) > 1: + if len(layers) == layer_idx + 1: + layers.append([]) + + layers[layer_idx + 1] += reduce_layer(layers[layer_idx]) + layer_idx += 1 + return layers + +def reduce_subtree_to_root(layers): + while len(layers) > 1: + if len(layers[1]) == 0: + layers[1] = layers[0] + else: + layers[1] += next_merkle_layer(layers[0]) + del layers[0] + + if len(layers[0]) > 1: + return next_merkle_layer(layers[0]) + return layers[0] + +def fetch_all_sth(): + sths = {} for base_url in base_urls: - new_sth = get_sth(base_url) - print "Received STH from " + base_url + ", timestamp: " + str(new_sth["timestamp"]) + ", size: " + str(new_sth["tree_size"]) try: - check_sth_signature(base_url, new_sth, logkeys[base_url]) + sths[base_url] = get_sth(base_url) except: - print "Could not verify signature!!" + print "Failed to retrieve STH from " + base_url + continue - if old_sth[base_url]["tree_size"]!= new_sth["tree_size"]: - print "Wohoo, new STH! Checking..." - try: - # Hashes are base64 encoded from the server and needs to be decoded before checking proofs. - consistency_proof = get_consistency_proof(base_url, old_sth[base_url]["tree_size"], new_sth["tree_size"] ) + try: + check_sth_signature(base_url, sths[base_url], logkeys[base_url]) + except: + print "Could not verify signature from " + base_url + "!!!" + continue + return sths + +def verify_consistency(old, new): + for url in old: + # try: + if old[url]["tree_size"]!= new[url]["tree_size"]: + consistency_proof = get_consistency_proof(url, old[url]["tree_size"], new[url]["tree_size"]) decoded_consistency_proof = [] for item in consistency_proof: decoded_consistency_proof.append(base64.b64decode(item)) - res = verify_consistency_proof(decoded_consistency_proof, old_sth[base_url]["tree_size"], new_sth["tree_size"], old_sth[base_url]["sha256_root_hash"]) + res = verify_consistency_proof(decoded_consistency_proof, old[url]["tree_size"], new[url]["tree_size"], old[url]["sha256_root_hash"]) - if old_sth[base_url]["sha256_root_hash"] != str(base64.b64encode(res[0])): + if old[url]["sha256_root_hash"] != str(base64.b64encode(res[0])): print "Verification of old hash failed!!!" - print old_sth[base_url]["sha256_root_hash"], str(base64.b64encode(res[0])) - if new_sth["sha256_root_hash"] != str(base64.b64encode(res[1])): + print old[url]["sha256_root_hash"], str(base64.b64encode(res[0])) + elif new[url]["sha256_root_hash"] != str(base64.b64encode(res[1])): print "Verification of new hash failed!!!" - print new_sth["sha256_root_hash"], str(base64.b64encode(res[1])) - - except Exception, err: - print Exception, err - finally: - old_sth[base_url] = new_sth + print new[url]["sha256_root_hash"], str(base64.b64encode(res[1])) + else: + print time.strftime("%H:%M:%S", time.gmtime()) + " New STH from " + url + ", timestamp: " + str(new[url]["timestamp"]) + ", size: " + str(new[url]["tree_size"]) + "...OK." + # except: + # print "ERROR: Could not verify consistency for " + url +def verify_inclusion_all(old, new): + for url in old: + try: + if old[url]["tree_size"]!= new[url]["tree_size"]: + entries = get_entries(url, old[url]["tree_size"]-1, new[url]["tree_size"] -1)["entries"] + success = True + for i in entries: + h = get_leaf_hash(base64.b64decode(i["leaf_input"])) + if not verify_inclusion_by_hash(url, h): + success = False + + if success: + print "Verifying inclusion for " + str(len(entries)) + " new entries in " + url + " ...OK" + else: + print "ERROR: Failed to prove inclusion of all new entries in " + url + except: + print "ERROR: Failed to prove inclusion of all new entries in " + url + + +def fetch_and_build_tree(old_sth, base_url): + sth = old_sth[base_url] + subtree = [[]] + idx = 0 + + print "Getting all entries from " + base_url + while idx < sth["tree_size"]: + pre_size = idx + entries = get_entries(base_url, idx, sth["tree_size"]-1)["entries"] + + new_leafs = [] + for item in entries: + new_leafs.append(get_leaf_hash(base64.b64decode(item["leaf_input"]))) + idx += len(new_leafs) + print "Got entries " + str(pre_size) + " to " + str(idx) #+ " (tree size: " + str(asizeof(subtree)) + " B)" + subtree = reduce_tree(new_leafs, subtree) + + root = base64.b64encode(reduce_subtree_to_root(subtree)[0]) + + if root == sth["sha256_root_hash"]: + print "Verifying root hashes...OK." + else: + print "ERROR: Failed to verify root hashes!" + print "STH root: " + sth["sha256_root_hash"] + print "Tree root: " + root + +def verify_inclusion_by_hash(base_url, leaf_hash): + try: + tmp_sth = get_sth(base_url) + proof = get_proof_by_hash(base_url, leaf_hash, tmp_sth["tree_size"]) + + decoded_inclusion_proof = [] + for item in proof["audit_path"]: + decoded_inclusion_proof.append(base64.b64decode(item)) + + root = base64.b64encode(verify_inclusion_proof(decoded_inclusion_proof, proof["leaf_index"], tmp_sth["tree_size"], leaf_hash)) + + if tmp_sth["sha256_root_hash"] == root: + # print "Verifying inclusion for entry " + str(proof["leaf_index"]) + " in " + base_url + "...OK." + return True + else: + print "ERROR: Could not prove inclusion for entry " + str(proof["leaf_index"]) + " in " + base_url + return False + except: + print "ERROR: Could not prove inclusion for hashed entry in " + base_url + return False +def verify_inclusion_by_index(base_url, index): + try: + tmp_sth = get_sth(base_url) + proof = get_proof_by_index(base_url, index, tmp_sth["tree_size"]) + decoded_inclusion_proof = [] + for item in proof["audit_path"]: + decoded_inclusion_proof.append(base64.b64decode(item)) + root = base64.b64encode(verify_inclusion_proof(decoded_inclusion_proof, index, tmp_sth["tree_size"], get_leaf_hash(base64.b64decode(proof["leaf_input"])))) + if tmp_sth["sha256_root_hash"] == root: + print "Verifying inclusion for entry " + str(index) + " in " + base_url + "...OK." + else: + print "ERROR: Could not prove inclusion for entry " + str(index) + " in " + base_url + except: + print "ERROR: Could not prove inclusion for entry " + str(index) + " in " + base_url + +def get_proof_by_index(baseurl, index, tree_size): + try: + params = urllib.urlencode({"leaf_index":index, + "tree_size":tree_size}) + result = \ + urlopen(baseurl + "ct/v1/get-entry-and-proof?" + params).read() + return json.loads(result) + except urllib2.HTTPError, e: + print "ERROR:", e.read() + sys.exit(0) + + +def main(args): + print "Started " + time.strftime("%H:%M:%S", time.gmtime()) + if args.verify_index is None and not args.build_sth and not args.audit and not args.audit2 and not args.verify_hash: + print "Nothing to do." + return + else: + sth = fetch_all_sth() + + if args.verify_index is not None: + for url in base_urls: + verify_inclusion_by_index(url, int(args.verify_index)) + + if args.verify_hash: + idx = 1337 + url = base_urls[0] + entries = get_entries(url, idx, idx)["entries"] + h = get_leaf_hash(base64.b64decode(entries[0]["leaf_input"])) + verify_inclusion_by_hash(url, h) + + if args.build_sth: + print "Building trees from entries. This may take a while, go get coffee or something..." + fetch_and_build_tree(sth, base_urls[2]) + + if args.audit: + print "Running auditor for " +str(len(base_urls)) + " logs..." + while True: + time.sleep(1*60-4) + new_sth = fetch_all_sth() + verify_consistency(sth, new_sth) + sth = new_sth + + if args.audit2: + print "Running auditor2 for " +str(len(base_urls)) + " logs..." + while True: + time.sleep(1*60-4) + new_sth = fetch_all_sth() + verify_consistency(sth, new_sth) + verify_inclusion_all(sth, new_sth) + sth = new_sth + + + print "Done. Exiting..." + + + +if __name__ == '__main__': + main(parser.parse_args()) -- cgit v1.1 From 61a9fe4088539cacf3858ff130e136abad48ffbf Mon Sep 17 00:00:00 2001 From: josef Date: Mon, 31 Aug 2015 14:59:47 +0200 Subject: bugfixes. Working on nagiosification --- tools/josef_auditor.py | 459 +++++++++++++++++++++++------- tools/josef_experimental_auditor.py | 551 ++++++++++++++++++++++++------------ 2 files changed, 714 insertions(+), 296 deletions(-) (limited to 'tools') diff --git a/tools/josef_auditor.py b/tools/josef_auditor.py index 38c0ba6..454c90b 100755 --- a/tools/josef_auditor.py +++ b/tools/josef_auditor.py @@ -1,21 +1,30 @@ #!/usr/bin/python -# -*- coding: utf-8 -*- +# -*- coding: utf-8 -*- import time +import datetime import base64 import argparse -# from pympler.asizeof import asizeof +import errno from certtools import * +NAGIOS_OK = 0 +NAGIOS_WARN = 1 +NAGIOS_CRIT = 2 +NAGIOS_UNKNOWN = 3 + +DEFAULT_CUR_FILE = 'all-sth.json' base_urls = ["https://plausible.ct.nordu.net/", - "https://ct1.digicert-ct.com/log/", - "https://ct.izenpe.com/", - "https://log.certly.io/", - "https://ct.googleapis.com/aviator/", - "https://ct.googleapis.com/pilot/", - "https://ct.googleapis.com/rocketeer/", - ] + "https://ct1.digicert-ct.com/log/", + "https://ct.izenpe.com/", + "https://log.certly.io/", + "https://ct.googleapis.com/aviator/", + "https://ct.googleapis.com/pilot/", + "https://ct.googleapis.com/rocketeer/", + "https://ct.ws.symantec.com/", + "https://ctlog.api.venafi.com/", + ] logkeys = {} logkeys["https://plausible.ct.nordu.net/"] = get_public_key_from_file("../../plausible-logkey.pem") @@ -24,136 +33,366 @@ logkeys["https://ct.googleapis.com/aviator/"] = get_public_key_from_file("../../ logkeys["https://ct.googleapis.com/pilot/"] = get_public_key_from_file("../../pilot-logkey.pem") logkeys["https://log.certly.io/"] = get_public_key_from_file("../../certly-logkey.pem") logkeys["https://ct.izenpe.com/"] = get_public_key_from_file("../../izenpe-logkey.pem") +logkeys["https://ct.ws.symantec.com/"] = get_public_key_from_file("../../symantec-logkey.pem") +logkeys["https://ctlog.api.venafi.com/"] = get_public_key_from_file("../../venafi-logkey.pem") logkeys["https://ct1.digicert-ct.com/log/"] = get_public_key_from_file("../../digicert-logkey.pem") parser = argparse.ArgumentParser(description="") -parser.add_argument('--audit', action='store_true', help="run lightweight auditor ensuring consistency in STH") +parser.add_argument('--audit', action='store_true', help="run lightweight auditor verifying consistency in STH") +parser.add_argument('--audit2', action='store_true', help="run medium-weight auditor verifying consistency in STH and inclusion proofs of new entries") +parser.add_argument('--audit3', action='store_true', help="continously run medium-weight auditor verifying consistency in STH and inclusion proofs of new entries") parser.add_argument('--build-sth', action='store_true', help="get all entries and construct STH") - +parser.add_argument('--verify-index', default=None, help="Verify a specific index in all logs" ) +# parser.add_argument('--verify-hash', action='store_true', help="Verify an entry hash in all logs" ) +parser.add_argument('--roots', action='store_true', help="Check accepted root certificates for all logs" ) +parser.add_argument('--cur-sth', + metavar='file', + default=DEFAULT_CUR_FILE, + help="File containing current STH (default=%s)" % DEFAULT_CUR_FILE) + +timings = {} +errors = [] + +class UTC(datetime.tzinfo): + def utcoffset(self, dt): + return datetime.timedelta(hours=0) + def dst(self, dt): + return datetime.timedelta(0) def reduce_layer(layer): - new_layer = [] - while len(layer) > 1: - e1 = layer.pop(0) - e2 = layer.pop(0) - new_layer.append(internal_hash((e1,e2))) - return new_layer + new_layer = [] + while len(layer) > 1: + e1 = layer.pop(0) + e2 = layer.pop(0) + new_layer.append(internal_hash((e1,e2))) + return new_layer def reduce_tree(entries, layers): - if len(entries) == 0 and layers is []: - return [[hashlib.sha256().digest()]] + if len(entries) == 0 and layers is []: + return [[hashlib.sha256().digest()]] - layer_idx = 0 - layers[layer_idx] += entries + layer_idx = 0 + layers[layer_idx] += entries - while len(layers[layer_idx]) > 1: - if len(layers) == layer_idx + 1: - layers.append([]) + while len(layers[layer_idx]) > 1: + if len(layers) == layer_idx + 1: + layers.append([]) - layers[layer_idx + 1] += reduce_layer(layers[layer_idx]) - layer_idx += 1 - return layers + layers[layer_idx + 1] += reduce_layer(layers[layer_idx]) + layer_idx += 1 + return layers def reduce_subtree_to_root(layers): - while len(layers) > 1: - layers[1] += next_merkle_layer(layers[0]) - del layers[0] - - if len(layers[0]) > 1: - return next_merkle_layer(layers[0]) - return layers[0] + while len(layers) > 1: + if len(layers[1]) == 0: + layers[1] = layers[0] + else: + layers[1] += next_merkle_layer(layers[0]) + del layers[0] + if len(layers[0]) > 1: + return next_merkle_layer(layers[0]) + return layers[0] -# Get STH and verify signature def fetch_all_sth(): - sths = {} - for base_url in base_urls: - try: - sths[base_url] = get_sth(base_url) - except: - print "Failed to retrieve STH from " + base_url - sths[base_url] = None - continue - - try: - check_sth_signature(base_url, sths[base_url], logkeys[base_url]) - except: - print "Could not verify signature from " + base_url + "!!!" - continue - return sths - + sths = {} + for base_url in base_urls: + # Fetch STH + try: + sths[base_url] = get_sth(base_url) + except: + sths[base_url] = None + error_str = time.strftime('%H:%M:%S') + " ERROR: Failed to retrieve STH from " + base_url + print error_str + errors.append(error_str) + continue + + # Check signature on the STH + try: + check_sth_signature(base_url, sths[base_url], logkeys[base_url]) + except: + error_str = time.strftime('%H:%M:%S') + " ERROR: Could not verify signature from " + base_url + "!!!" + print error_str + errors.append(error_str) + continue + + return sths + +def verify_progress(old, new): + print "Verifying progress" + for url in new: + if new and old and new[url] and old[url]: + if new[url]["tree_size"] == old[url]["tree_size"]: + if old[url]["sha256_root_hash"] != new[url]["sha256_root_hash"]: + errors.append(time.strftime('%H:%M:%S') + " CRITICAL: root hash is different for same tree size in " + url) + # print "tree size:", newsth["tree_size"], + # print "old hash:", b64_to_b16(oldsth["sha256_root_hash"]) + # print "new hash:", b64_to_b16(newsth["sha256_root_hash"]) + # sys.exit(NAGIOS_CRIT) + # TODO + elif new[url]["tree_size"] < old[url]["tree_size"]: + # if not args.allow_lag: + errors.append(time.strftime('%H:%M:%S') + " CRITICAL: new tree smaller than previous tree (%d < %d)" % \ + (new[url]["tree_size"], old[url]["tree_size"])) + # sys.exit(NAGIOS_CRIT) + if new[url]: + age = time.time() - new[url]["timestamp"]/1000 + sth_time = datetime.datetime.fromtimestamp(new[url]['timestamp'] / 1000, UTC()).strftime("%Y-%m-%d %H:%M:%S") + # roothash = b64_to_b16(sth['sha256_root_hash']) + roothash = new[url]['sha256_root_hash'] + if age > 24 * 3600: + errors.append(time.strftime('%H:%M:%S') + " CRITICAL: %s is older than 24h: %s UTC" % (url, sth_time)) + elif age > 12 * 3600: + errors.append(time.strftime('%H:%M:%S') + " WARNING: %s is older than 12h: %s UTC" % (url, sth_time)) + elif age > 6 * 3600: + errors.append(time.strftime('%H:%M:%S') + " WARNING: %s is older than 6h: %s UTC" % (url, sth_time)) + # elif age > 2 * 3600: + # errors.append(time.strftime('%H:%M:%S') + " WARNING: %s is older than 2h: %s UTC" % (url, sth_time)) def verify_consistency(old, new): - for url in old: - if old[url] is not None: - if old[url]["tree_size"]!= new[url]["tree_size"]: - consistency_proof = get_consistency_proof(url, old[url]["tree_size"], new[url]["tree_size"] ) - decoded_consistency_proof = [] - for item in consistency_proof: - decoded_consistency_proof.append(base64.b64decode(item)) - res = verify_consistency_proof(decoded_consistency_proof, old[url]["tree_size"], new[url]["tree_size"], old[url]["sha256_root_hash"]) - - if old[url]["sha256_root_hash"] != str(base64.b64encode(res[0])): - print "Verification of old hash failed!!!" - print old[url]["sha256_root_hash"], str(base64.b64encode(res[0])) - elif new[url]["sha256_root_hash"] != str(base64.b64encode(res[1])): - print "Verification of new hash failed!!!" - print new[url]["sha256_root_hash"], str(base64.b64encode(res[1])) - else: - print time.strftime("%H:%M:%S", time.gmtime()) + " New STH from " + url + ", timestamp: " + str(new[url]["timestamp"]) + ", size: " + str(new[url]["tree_size"]) + "...OK." - - + for url in old: + # try: + if old[url] and new[url] and old[url]["tree_size"]!= new[url]["tree_size"]: + consistency_proof = get_consistency_proof(url, old[url]["tree_size"], new[url]["tree_size"]) + decoded_consistency_proof = [] + for item in consistency_proof: + decoded_consistency_proof.append(base64.b64decode(item)) + res = verify_consistency_proof(decoded_consistency_proof, old[url]["tree_size"], new[url]["tree_size"], old[url]["sha256_root_hash"]) + + if old[url]["sha256_root_hash"] != str(base64.b64encode(res[0])): + print time.strftime('%H:%M:%S') + " Verification of old hash failed! " + old[url]["sha256_root_hash"], str(base64.b64encode(res[0])) + errors.append(time.strftime('%H:%M:%S') + " ERROR: Failed to verify consistency for " + url + ", tree size " + old[url]["tree_size"]) + elif new[url]["sha256_root_hash"] != str(base64.b64encode(res[1])): + print time.strftime('%H:%M:%S') + " Verification of new hash failed! " + new[url]["sha256_root_hash"], str(base64.b64encode(res[1])) + errors.append(time.strftime('%H:%M:%S') + " ERROR: Failed to verify consistency for " + url + ", tree size " + new[url]["tree_size"]) + else: + print time.strftime("%H:%M:%S") + " New STH from " + url + ", timestamp: " + \ + str(new[url]["timestamp"]) + ", size: " + str(new[url]["tree_size"]) + "...OK." + + # except: + # print "ERROR: Could not verify consistency for " + url + +def verify_inclusion_all(old, new): + for url in old: + try: + if old[url] and new[url]: + if old[url]["tree_size"]!= new[url]["tree_size"]: + entries = get_entries(url, old[url]["tree_size"], new[url]["tree_size"] -1)["entries"] + success = True + for i in entries: + h = get_leaf_hash(base64.b64decode(i["leaf_input"])) + if not verify_inclusion_by_hash(url, h): + success = False + + if success: + print time.strftime("%H:%M:%S") + " Verifying inclusion for " + str(len(entries)) + " new entries in " + url + " ...OK" + else: + print time.strftime('%H:%M:%S') + " ERROR: Failed to prove inclusion of all new entries in " + url + errors.append(time.strftime('%H:%M:%S') + " ERROR: Failed to prove inclusion of all new entries in " + url) + except: + print time.strftime('%H:%M:%S') + " ERROR: Failed to prove inclusion of all new entries in " + url + errors.append(time.strftime('%H:%M:%S') + " ERROR: Failed to prove inclusion of all new entries in " + url) def fetch_and_build_tree(old_sth, base_url): - sth = old_sth[base_url] - subtree = [[]] - idx = 0 - - print "Getting all entries from " + base_url - while idx < sth["tree_size"]: - pre_size = idx - entries = get_entries(base_url, idx, sth["tree_size"])["entries"] - - new_leafs = [] - for item in entries: - new_leafs.append(get_leaf_hash(base64.b64decode(item["leaf_input"]))) - idx += len(new_leafs) - print "Got entries " + str(pre_size) + " to " + str(idx) #+ " (tree size: " + str(asizeof(subtree)) + " B)" - subtree = reduce_tree(new_leafs, subtree) - - root = base64.b64encode(reduce_subtree_to_root(subtree)[0]) - - if root == sth["sha256_root_hash"]: - print "Verifying root hashes...OK." - else: - print "ERROR: Failed to verify root hashes!" - print "STH root: " + sth["sha256_root_hash"] - print "Tree root: " + root + sth = old_sth[base_url] + subtree = [[]] + idx = 0 + + res_strings = [""] + + print time.strftime('%H:%M:%S') + " Getting all entries from " + base_url + while idx < sth["tree_size"]: + pre_size = idx + entries = get_entries(base_url, idx, sth["tree_size"]-1)["entries"] + + new_leafs = [] + for item in entries: + new_leafs.append(get_leaf_hash(base64.b64decode(item["leaf_input"]))) + idx += len(new_leafs) + print time.strftime('%H:%M:%S') + " Got entries " + str(pre_size) + " to " + str(idx) + " from " + base_url + subtree = reduce_tree(new_leafs, subtree) + + root = base64.b64encode(reduce_subtree_to_root(subtree)[0]) + + if root == sth["sha256_root_hash"]: + print time.strftime('%H:%M:%S') + " Verifying root hashes for " + base_url + "...OK." + res_strings.append("STH for " + base_url + " built successfully.") + else: + print time.strftime('%H:%M:%S') + " ERROR: Failed to verify root hashes! STH root: " + sth["sha256_root_hash"] + ", Tree root: " + root + res_strings.append(time.strftime('%H:%M:%S') + " " + base_url + " Failed! STH root: " + sth["sha256_root_hash"] + " Calculated root: " + root) + errors.append(time.strftime('%H:%M:%S') + " ERROR: Failed to verify root hash for " + base_url + ", tre size " + sth["tree_size"]) + + for item in res_strings: + print item + "\n" + +def verify_inclusion_by_hash(base_url, leaf_hash): + try: + tmp_sth = get_sth(base_url) + proof = get_proof_by_hash(base_url, leaf_hash, tmp_sth["tree_size"]) + + decoded_inclusion_proof = [] + for item in proof["audit_path"]: + decoded_inclusion_proof.append(base64.b64decode(item)) + + root = base64.b64encode(verify_inclusion_proof(decoded_inclusion_proof, proof["leaf_index"], tmp_sth["tree_size"], leaf_hash)) + + if tmp_sth["sha256_root_hash"] == root: + # print "Verifying inclusion for entry " + str(proof["leaf_index"]) + " in " + base_url + "...OK." + return True + else: + print time.strftime('%H:%M:%S') + " ERROR: Could not prove inclusion for entry " + str(proof["leaf_index"]) + " in " + base_url + errors.append(time.strftime('%H:%M:%S') + " ERROR: Could not prove inclusion for entry " + str(proof["leaf_index"]) + " in " + base_url) + return False + except: + print time.strftime('%H:%M:%S') + " ERROR: Could not prove inclusion for hashed entry in " + base_url + errors.append(time.strftime('%H:%M:%S') + " ERROR: Could not prove inclusion for hashed entry in " + base_url) + return False + +def verify_inclusion_by_index(base_url, index): + try: + tmp_sth = get_sth(base_url) + proof = get_proof_by_index(base_url, index, tmp_sth["tree_size"]) + + decoded_inclusion_proof = [] + for item in proof["audit_path"]: + decoded_inclusion_proof.append(base64.b64decode(item)) + + root = base64.b64encode(verify_inclusion_proof(decoded_inclusion_proof, index, tmp_sth["tree_size"], get_leaf_hash(base64.b64decode(proof["leaf_input"])))) + + if tmp_sth["sha256_root_hash"] == root: + print time.strftime('%H:%M:%S') + " Verifying inclusion for entry " + str(index) + " in " + base_url + "...OK." + else: + print time.strftime('%H:%M:%S') + " ERROR: Could not prove inclusion for entry " + str(index) + " in " + base_url + errors.append(time.strftime('%H:%M:%S') + " ERROR: Could not prove inclusion for entry " + str(index) + " in " + base_url) + except: + print time.strftime('%H:%M:%S') + " ERROR: Could not prove inclusion for entry " + str(index) + " in " + base_url + errors.append(time.strftime('%H:%M:%S') + " ERROR: Could not prove inclusion for entry " + str(index) + " in " + base_url) + +def get_proof_by_index(baseurl, index, tree_size): + try: + params = urllib.urlencode({"leaf_index":index, + "tree_size":tree_size}) + result = \ + urlopen(baseurl + "ct/v1/get-entry-and-proof?" + params).read() + return json.loads(result) + except urllib2.HTTPError, e: + print "ERROR:", e.read() + sys.exit(0) + +def get_all_roots(base_url): + # print "Fetching roots from " + base_url + result = urlopen(base_url + "ct/v1/get-roots").read() + certs = json.loads(result)["certificates"] + print time.strftime('%H:%M:%S') + " Received " + str(len(certs)) + " certs from " + base_url + + for accepted_cert in certs: + subject = get_cert_info(base64.decodestring(accepted_cert))["subject"] + issuer = get_cert_info(base64.decodestring(accepted_cert))["issuer"] + if subject == issuer: + root_cert = base64.decodestring(accepted_cert) + print get_cert_info(root_cert)["subject"] + +def print_errors(errors): + print "Encountered " + str(len(errors)) + " errors:" + for item in errors: + print item + +def print_timings(timings): + for item in timings: + m,s = divmod(timings[item]["longest"], 60) + h,m = divmod(m, 60) + print item + " last seen " + datetime.datetime.fromtimestamp(int(timings[item]["last"])/1000).strftime('%Y-%m-%d %H:%M:%S') \ + + " longest between two STH: " + str(int(h)) + "h " + str(int(m)) + "m "# + str(int(s)) + "s." + + +def read_sth(fn): + try: + f = open(fn) + except IOError, e: + if e.errno == errno.ENOENT: + return None + raise e + return json.loads(f.read()) + + +def write_file(fn, sth): + tempname = fn + ".new" + open(tempname, 'w').write(json.dumps(sth)) + mv_file(tempname, fn) def main(args): - print "Started " + time.strftime("%H:%M:%S", time.gmtime()) - old_sth = fetch_all_sth() + + # print time.strftime("%H:%M:%S") + " Starting..." + if args.verify_index is None and not args.build_sth and not args.audit and not args.audit2 \ + and not args.audit3 and not args.verify_hash and not args.roots: + + print time.strftime('%H:%M:%S') + " Nothing to do." + return + else: + sth = fetch_all_sth() + + if args.verify_index is not None: + for url in base_urls: + verify_inclusion_by_index(url, int(args.verify_index)) + + + if args.roots: + print time.strftime('%H:%M:%S') + " Getting accepted Root Certs from all logs..." + for url in base_urls: + get_all_roots(url) + + + if args.build_sth: + print time.strftime('%H:%M:%S') + " Building trees from entries. This may take a while, go get coffee or something..." + for base_url in base_urls: + fetch_and_build_tree(sth, base_url) + # fetch_and_build_tree(sth, base_urls[2]) + + if args.audit: + print time.strftime('%H:%M:%S') + " Running auditor1 for " +str(len(base_urls)) + " logs..." + old_sth = read_sth(args.cur_sth) + if old_sth: + verify_consistency(old_sth, sth) + else: + print "No old sth found..." + write_file(args.cur_sth, sth) + + + if args.audit3: + print time.strftime('%H:%M:%S') + " Running auditor3 for " +str(len(base_urls)) + " logs..." + while True: + time.sleep(30) + new_sth = fetch_all_sth() + verify_consistency(sth, new_sth) + verify_inclusion_all(sth, new_sth) + sth = new_sth + + if args.audit2: + print time.strftime('%H:%M:%S') + " Running auditor2 for " +str(len(base_urls)) + " logs..." + old_sth = read_sth(args.cur_sth) + verify_progress(old_sth, sth) + if old_sth: + verify_consistency(old_sth, sth) + verify_inclusion_all(old_sth, sth) + write_file(args.cur_sth, sth) + + + +if __name__ == '__main__': + main(parser.parse_args()) + if len(errors) == 0: + print time.strftime('%H:%M:%S') + " Everything OK." + sys.exit(NAGIOS_OK) + else: + print_errors(errors) + sys.exit(NAGIOS_WARN) - if args.build_sth: - print "Building trees from entries. This may take a while, go get coffee or something..." - # for url in base_urls: - # fetch_and_build_tree(old_sth, url) - fetch_and_build_tree(old_sth, base_urls[0]) - if args.audit: - print "Running auditor for " +str(len(base_urls)) + " logs..." - while True: - time.sleep(1*60-4) - new_sth = fetch_all_sth() - verify_consistency(old_sth, new_sth) - old_sth = new_sth -if __name__ == '__main__': - main(parser.parse_args()) diff --git a/tools/josef_experimental_auditor.py b/tools/josef_experimental_auditor.py index e3f0ca1..b7d3bc6 100755 --- a/tools/josef_experimental_auditor.py +++ b/tools/josef_experimental_auditor.py @@ -1,21 +1,30 @@ #!/usr/bin/python -# -*- coding: utf-8 -*- +# -*- coding: utf-8 -*- import time +import datetime import base64 import argparse -# from pympler.asizeof import asizeof +import errno from certtools import * +NAGIOS_OK = 0 +NAGIOS_WARN = 1 +NAGIOS_CRIT = 2 +NAGIOS_UNKNOWN = 3 + +DEFAULT_CUR_FILE = 'all-sth.json' base_urls = ["https://plausible.ct.nordu.net/", - "https://ct1.digicert-ct.com/log/", - "https://ct.izenpe.com/", - "https://log.certly.io/", - "https://ct.googleapis.com/aviator/", - "https://ct.googleapis.com/pilot/", - "https://ct.googleapis.com/rocketeer/", - ] + "https://ct1.digicert-ct.com/log/", + "https://ct.izenpe.com/", + "https://log.certly.io/", + "https://ct.googleapis.com/aviator/", + "https://ct.googleapis.com/pilot/", + "https://ct.googleapis.com/rocketeer/", + "https://ct.ws.symantec.com/", + "https://ctlog.api.venafi.com/", + ] logkeys = {} logkeys["https://plausible.ct.nordu.net/"] = get_public_key_from_file("../../plausible-logkey.pem") @@ -24,172 +33,258 @@ logkeys["https://ct.googleapis.com/aviator/"] = get_public_key_from_file("../../ logkeys["https://ct.googleapis.com/pilot/"] = get_public_key_from_file("../../pilot-logkey.pem") logkeys["https://log.certly.io/"] = get_public_key_from_file("../../certly-logkey.pem") logkeys["https://ct.izenpe.com/"] = get_public_key_from_file("../../izenpe-logkey.pem") +logkeys["https://ct.ws.symantec.com/"] = get_public_key_from_file("../../symantec-logkey.pem") +logkeys["https://ctlog.api.venafi.com/"] = get_public_key_from_file("../../venafi-logkey.pem") logkeys["https://ct1.digicert-ct.com/log/"] = get_public_key_from_file("../../digicert-logkey.pem") parser = argparse.ArgumentParser(description="") parser.add_argument('--audit', action='store_true', help="run lightweight auditor verifying consistency in STH") parser.add_argument('--audit2', action='store_true', help="run medium-weight auditor verifying consistency in STH and inclusion proofs of new entries") +parser.add_argument('--audit3', action='store_true', help="continously run medium-weight auditor verifying consistency in STH and inclusion proofs of new entries") parser.add_argument('--build-sth', action='store_true', help="get all entries and construct STH") parser.add_argument('--verify-index', default=None, help="Verify a specific index in all logs" ) -parser.add_argument('--verify-hash', action='store_true', help="Verify a specific index in all logs" ) - +# parser.add_argument('--verify-hash', action='store_true', help="Verify an entry hash in all logs" ) +parser.add_argument('--roots', action='store_true', help="Check accepted root certificates for all logs" ) +parser.add_argument('--cur-sth', + metavar='file', + default=DEFAULT_CUR_FILE, + help="File containing current STH (default=%s)" % DEFAULT_CUR_FILE) + +timings = {} +errors = [] + +class UTC(datetime.tzinfo): + def utcoffset(self, dt): + return datetime.timedelta(hours=0) + def dst(self, dt): + return datetime.timedelta(0) def reduce_layer(layer): - new_layer = [] - while len(layer) > 1: - e1 = layer.pop(0) - e2 = layer.pop(0) - new_layer.append(internal_hash((e1,e2))) - return new_layer + new_layer = [] + while len(layer) > 1: + e1 = layer.pop(0) + e2 = layer.pop(0) + new_layer.append(internal_hash((e1,e2))) + return new_layer def reduce_tree(entries, layers): - if len(entries) == 0 and layers is []: - return [[hashlib.sha256().digest()]] + if len(entries) == 0 and layers is []: + return [[hashlib.sha256().digest()]] - layer_idx = 0 - layers[layer_idx] += entries + layer_idx = 0 + layers[layer_idx] += entries - while len(layers[layer_idx]) > 1: - if len(layers) == layer_idx + 1: - layers.append([]) + while len(layers[layer_idx]) > 1: + if len(layers) == layer_idx + 1: + layers.append([]) - layers[layer_idx + 1] += reduce_layer(layers[layer_idx]) - layer_idx += 1 - return layers + layers[layer_idx + 1] += reduce_layer(layers[layer_idx]) + layer_idx += 1 + return layers def reduce_subtree_to_root(layers): - while len(layers) > 1: - if len(layers[1]) == 0: - layers[1] = layers[0] - else: - layers[1] += next_merkle_layer(layers[0]) - del layers[0] + while len(layers) > 1: + if len(layers[1]) == 0: + layers[1] = layers[0] + else: + layers[1] += next_merkle_layer(layers[0]) + del layers[0] - if len(layers[0]) > 1: - return next_merkle_layer(layers[0]) - return layers[0] + if len(layers[0]) > 1: + return next_merkle_layer(layers[0]) + return layers[0] def fetch_all_sth(): - sths = {} - for base_url in base_urls: - try: - sths[base_url] = get_sth(base_url) - except: - print "Failed to retrieve STH from " + base_url - continue - - try: - check_sth_signature(base_url, sths[base_url], logkeys[base_url]) - except: - print "Could not verify signature from " + base_url + "!!!" - continue - return sths + sths = {} + for base_url in base_urls: + # Fetch STH + try: + sths[base_url] = get_sth(base_url) + except: + sths[base_url] = None + error_str = time.strftime('%H:%M:%S') + " ERROR: Failed to retrieve STH from " + base_url + print error_str + errors.append(error_str) + continue + + # Check signature on the STH + try: + check_sth_signature(base_url, sths[base_url], logkeys[base_url]) + except: + error_str = time.strftime('%H:%M:%S') + " ERROR: Could not verify signature from " + base_url + "!!!" + print error_str + errors.append(error_str) + continue + + # Add timing info + # try: + # if base_url not in timings: + # timings[base_url] = {"last":sths[base_url]["timestamp"], "longest":0} + # else: + # then = datetime.datetime.fromtimestamp(int(timings[base_url]["last"])/1000) + # now = datetime.datetime.fromtimestamp(int(sths[base_url]["timestamp"])/1000) + # tdelta = now - then + + # timings[base_url]["last"] = sths[base_url]["timestamp"] + + # if tdelta.total_seconds() > timings[base_url]["longest"]: + # timings[base_url]["longest"] = tdelta.total_seconds() + + # except Exception, err: + # print Exception, err + # print time.strftime('%H:%M:%S') + "ERROR: Failed to set TIME info for STH" + + return sths + +def verify_progress(old, new): + print "Verifying progress" + for url in new: + if new and old and new[url] and old[url]: + if new[url]["tree_size"] == old[url]["tree_size"]: + if old[url]["sha256_root_hash"] != new[url]["sha256_root_hash"]: + errors.append(time.strftime('%H:%M:%S') + " CRITICAL: root hash is different for same tree size in " + url) + # print "tree size:", newsth["tree_size"], + # print "old hash:", b64_to_b16(oldsth["sha256_root_hash"]) + # print "new hash:", b64_to_b16(newsth["sha256_root_hash"]) + # sys.exit(NAGIOS_CRIT) + # TODO + elif new[url]["tree_size"] < old[url]["tree_size"]: + # if not args.allow_lag: + errors.append(time.strftime('%H:%M:%S') + " CRITICAL: new tree smaller than previous tree (%d < %d)" % \ + (new[url]["tree_size"], old[url]["tree_size"])) + # sys.exit(NAGIOS_CRIT) + if new[url]: + age = time.time() - new[url]["timestamp"]/1000 + sth_time = datetime.datetime.fromtimestamp(new[url]['timestamp'] / 1000, UTC()).strftime("%Y-%m-%d %H:%M:%S") + # roothash = b64_to_b16(sth['sha256_root_hash']) + roothash = new[url]['sha256_root_hash'] + if age > 24 * 3600: + errors.append(time.strftime('%H:%M:%S') + " CRITICAL: %s is older than 24h: %s UTC" % (url, sth_time)) + elif age > 12 * 3600: + errors.append(time.strftime('%H:%M:%S') + " WARNING: %s is older than 12h: %s UTC" % (url, sth_time)) + elif age > 6 * 3600: + errors.append(time.strftime('%H:%M:%S') + " WARNING: %s is older than 6h: %s UTC" % (url, sth_time)) + # elif age > 2 * 3600: + # errors.append(time.strftime('%H:%M:%S') + " WARNING: %s is older than 2h: %s UTC" % (url, sth_time)) def verify_consistency(old, new): - for url in old: - # try: - if old[url]["tree_size"]!= new[url]["tree_size"]: - consistency_proof = get_consistency_proof(url, old[url]["tree_size"], new[url]["tree_size"]) - decoded_consistency_proof = [] - for item in consistency_proof: - decoded_consistency_proof.append(base64.b64decode(item)) - res = verify_consistency_proof(decoded_consistency_proof, old[url]["tree_size"], new[url]["tree_size"], old[url]["sha256_root_hash"]) - - if old[url]["sha256_root_hash"] != str(base64.b64encode(res[0])): - print "Verification of old hash failed!!!" - print old[url]["sha256_root_hash"], str(base64.b64encode(res[0])) - elif new[url]["sha256_root_hash"] != str(base64.b64encode(res[1])): - print "Verification of new hash failed!!!" - print new[url]["sha256_root_hash"], str(base64.b64encode(res[1])) - else: - print time.strftime("%H:%M:%S", time.gmtime()) + " New STH from " + url + ", timestamp: " + str(new[url]["timestamp"]) + ", size: " + str(new[url]["tree_size"]) + "...OK." - - # except: - # print "ERROR: Could not verify consistency for " + url + for url in old: + # try: + if old[url] and new[url] and old[url]["tree_size"]!= new[url]["tree_size"]: + consistency_proof = get_consistency_proof(url, old[url]["tree_size"], new[url]["tree_size"]) + decoded_consistency_proof = [] + for item in consistency_proof: + decoded_consistency_proof.append(base64.b64decode(item)) + res = verify_consistency_proof(decoded_consistency_proof, old[url]["tree_size"], new[url]["tree_size"], old[url]["sha256_root_hash"]) + + if old[url]["sha256_root_hash"] != str(base64.b64encode(res[0])): + print time.strftime('%H:%M:%S') + " Verification of old hash failed! " + old[url]["sha256_root_hash"], str(base64.b64encode(res[0])) + errors.append(time.strftime('%H:%M:%S') + " ERROR: Failed to verify consistency for " + url + ", tree size " + old[url]["tree_size"]) + elif new[url]["sha256_root_hash"] != str(base64.b64encode(res[1])): + print time.strftime('%H:%M:%S') + " Verification of new hash failed! " + new[url]["sha256_root_hash"], str(base64.b64encode(res[1])) + errors.append(time.strftime('%H:%M:%S') + " ERROR: Failed to verify consistency for " + url + ", tree size " + new[url]["tree_size"]) + else: + print time.strftime("%H:%M:%S") + " New STH from " + url + ", timestamp: " + \ + str(new[url]["timestamp"]) + ", size: " + str(new[url]["tree_size"]) + "...OK." + + # except: + # print "ERROR: Could not verify consistency for " + url def verify_inclusion_all(old, new): - for url in old: - try: - if old[url]["tree_size"]!= new[url]["tree_size"]: - entries = get_entries(url, old[url]["tree_size"]-1, new[url]["tree_size"] -1)["entries"] - success = True - for i in entries: - h = get_leaf_hash(base64.b64decode(i["leaf_input"])) - if not verify_inclusion_by_hash(url, h): - success = False - - if success: - print "Verifying inclusion for " + str(len(entries)) + " new entries in " + url + " ...OK" - else: - print "ERROR: Failed to prove inclusion of all new entries in " + url - except: - print "ERROR: Failed to prove inclusion of all new entries in " + url - + for url in old: + try: + if old[url] and new[url]: + if old[url]["tree_size"]!= new[url]["tree_size"]: + entries = get_entries(url, old[url]["tree_size"], new[url]["tree_size"] -1)["entries"] + success = True + for i in entries: + h = get_leaf_hash(base64.b64decode(i["leaf_input"])) + if not verify_inclusion_by_hash(url, h): + success = False + + if success: + print time.strftime("%H:%M:%S") + " Verifying inclusion for " + str(len(entries)) + " new entries in " + url + " ...OK" + else: + print time.strftime('%H:%M:%S') + " ERROR: Failed to prove inclusion of all new entries in " + url + errors.append(time.strftime('%H:%M:%S') + " ERROR: Failed to prove inclusion of all new entries in " + url) + except: + print time.strftime('%H:%M:%S') + " ERROR: Failed to prove inclusion of all new entries in " + url + errors.append(time.strftime('%H:%M:%S') + " ERROR: Failed to prove inclusion of all new entries in " + url) def fetch_and_build_tree(old_sth, base_url): - sth = old_sth[base_url] - subtree = [[]] - idx = 0 - - print "Getting all entries from " + base_url - while idx < sth["tree_size"]: - pre_size = idx - entries = get_entries(base_url, idx, sth["tree_size"]-1)["entries"] - - new_leafs = [] - for item in entries: - new_leafs.append(get_leaf_hash(base64.b64decode(item["leaf_input"]))) - idx += len(new_leafs) - print "Got entries " + str(pre_size) + " to " + str(idx) #+ " (tree size: " + str(asizeof(subtree)) + " B)" - subtree = reduce_tree(new_leafs, subtree) - - root = base64.b64encode(reduce_subtree_to_root(subtree)[0]) - - if root == sth["sha256_root_hash"]: - print "Verifying root hashes...OK." - else: - print "ERROR: Failed to verify root hashes!" - print "STH root: " + sth["sha256_root_hash"] - print "Tree root: " + root + sth = old_sth[base_url] + subtree = [[]] + idx = 0 + + res_strings = [""] + + print time.strftime('%H:%M:%S') + " Getting all entries from " + base_url + while idx < sth["tree_size"]: + pre_size = idx + entries = get_entries(base_url, idx, sth["tree_size"]-1)["entries"] + + new_leafs = [] + for item in entries: + new_leafs.append(get_leaf_hash(base64.b64decode(item["leaf_input"]))) + idx += len(new_leafs) + print time.strftime('%H:%M:%S') + " Got entries " + str(pre_size) + " to " + str(idx) + " from " + base_url + subtree = reduce_tree(new_leafs, subtree) + + root = base64.b64encode(reduce_subtree_to_root(subtree)[0]) + + if root == sth["sha256_root_hash"]: + print time.strftime('%H:%M:%S') + " Verifying root hashes for " + base_url + "...OK." + res_strings.append("STH for " + base_url + " built successfully.") + else: + print time.strftime('%H:%M:%S') + " ERROR: Failed to verify root hashes! STH root: " + sth["sha256_root_hash"] + ", Tree root: " + root + res_strings.append(time.strftime('%H:%M:%S') + " " + base_url + " Failed! STH root: " + sth["sha256_root_hash"] + " Calculated root: " + root) + errors.append(time.strftime('%H:%M:%S') + " ERROR: Failed to verify root hash for " + base_url + ", tre size " + sth["tree_size"]) + + for item in res_strings: + print item + "\n" def verify_inclusion_by_hash(base_url, leaf_hash): - try: - tmp_sth = get_sth(base_url) - proof = get_proof_by_hash(base_url, leaf_hash, tmp_sth["tree_size"]) - - decoded_inclusion_proof = [] - for item in proof["audit_path"]: - decoded_inclusion_proof.append(base64.b64decode(item)) - - root = base64.b64encode(verify_inclusion_proof(decoded_inclusion_proof, proof["leaf_index"], tmp_sth["tree_size"], leaf_hash)) - - if tmp_sth["sha256_root_hash"] == root: - # print "Verifying inclusion for entry " + str(proof["leaf_index"]) + " in " + base_url + "...OK." - return True - else: - print "ERROR: Could not prove inclusion for entry " + str(proof["leaf_index"]) + " in " + base_url - return False - except: - print "ERROR: Could not prove inclusion for hashed entry in " + base_url - return False + try: + tmp_sth = get_sth(base_url) + proof = get_proof_by_hash(base_url, leaf_hash, tmp_sth["tree_size"]) + + decoded_inclusion_proof = [] + for item in proof["audit_path"]: + decoded_inclusion_proof.append(base64.b64decode(item)) + + root = base64.b64encode(verify_inclusion_proof(decoded_inclusion_proof, proof["leaf_index"], tmp_sth["tree_size"], leaf_hash)) + + if tmp_sth["sha256_root_hash"] == root: + # print "Verifying inclusion for entry " + str(proof["leaf_index"]) + " in " + base_url + "...OK." + return True + else: + print time.strftime('%H:%M:%S') + " ERROR: Could not prove inclusion for entry " + str(proof["leaf_index"]) + " in " + base_url + errors.append(time.strftime('%H:%M:%S') + " ERROR: Could not prove inclusion for entry " + str(proof["leaf_index"]) + " in " + base_url) + return False + except: + print time.strftime('%H:%M:%S') + " ERROR: Could not prove inclusion for hashed entry in " + base_url + errors.append(time.strftime('%H:%M:%S') + " ERROR: Could not prove inclusion for hashed entry in " + base_url) + return False def verify_inclusion_by_index(base_url, index): - try: - tmp_sth = get_sth(base_url) - proof = get_proof_by_index(base_url, index, tmp_sth["tree_size"]) + try: + tmp_sth = get_sth(base_url) + proof = get_proof_by_index(base_url, index, tmp_sth["tree_size"]) - decoded_inclusion_proof = [] - for item in proof["audit_path"]: - decoded_inclusion_proof.append(base64.b64decode(item)) + decoded_inclusion_proof = [] + for item in proof["audit_path"]: + decoded_inclusion_proof.append(base64.b64decode(item)) - root = base64.b64encode(verify_inclusion_proof(decoded_inclusion_proof, index, tmp_sth["tree_size"], get_leaf_hash(base64.b64decode(proof["leaf_input"])))) + root = base64.b64encode(verify_inclusion_proof(decoded_inclusion_proof, index, tmp_sth["tree_size"], get_leaf_hash(base64.b64decode(proof["leaf_input"])))) - if tmp_sth["sha256_root_hash"] == root: - print "Verifying inclusion for entry " + str(index) + " in " + base_url + "...OK." - else: - print "ERROR: Could not prove inclusion for entry " + str(index) + " in " + base_url - except: - print "ERROR: Could not prove inclusion for entry " + str(index) + " in " + base_url + if tmp_sth["sha256_root_hash"] == root: + print time.strftime('%H:%M:%S') + " Verifying inclusion for entry " + str(index) + " in " + base_url + "...OK." + else: + print time.strftime('%H:%M:%S') + " ERROR: Could not prove inclusion for entry " + str(index) + " in " + base_url + errors.append(time.strftime('%H:%M:%S') + " ERROR: Could not prove inclusion for entry " + str(index) + " in " + base_url) + except: + print time.strftime('%H:%M:%S') + " ERROR: Could not prove inclusion for entry " + str(index) + " in " + base_url + errors.append(time.strftime('%H:%M:%S') + " ERROR: Could not prove inclusion for entry " + str(index) + " in " + base_url) def get_proof_by_index(baseurl, index, tree_size): try: @@ -202,52 +297,136 @@ def get_proof_by_index(baseurl, index, tree_size): print "ERROR:", e.read() sys.exit(0) +def get_all_roots(base_url): + # print "Fetching roots from " + base_url + result = urlopen(base_url + "ct/v1/get-roots").read() + certs = json.loads(result)["certificates"] + print time.strftime('%H:%M:%S') + " Received " + str(len(certs)) + " certs from " + base_url + + for accepted_cert in certs: + subject = get_cert_info(base64.decodestring(accepted_cert))["subject"] + issuer = get_cert_info(base64.decodestring(accepted_cert))["issuer"] + if subject == issuer: + root_cert = base64.decodestring(accepted_cert) + print get_cert_info(root_cert)["subject"] + +def print_errors(errors): + print "Encountered " + str(len(errors)) + " errors:" + for item in errors: + print item + +def print_timings(timings): + for item in timings: + m,s = divmod(timings[item]["longest"], 60) + h,m = divmod(m, 60) + print item + " last seen " + datetime.datetime.fromtimestamp(int(timings[item]["last"])/1000).strftime('%Y-%m-%d %H:%M:%S') \ + + " longest between two STH: " + str(int(h)) + "h " + str(int(m)) + "m "# + str(int(s)) + "s." + + +def read_sth(fn): + try: + f = open(fn) + except IOError, e: + if e.errno == errno.ENOENT: + return None + raise e + return json.loads(f.read()) + -def main(args): - print "Started " + time.strftime("%H:%M:%S", time.gmtime()) - if args.verify_index is None and not args.build_sth and not args.audit and not args.audit2 and not args.verify_hash: - print "Nothing to do." - return - else: - sth = fetch_all_sth() - - if args.verify_index is not None: - for url in base_urls: - verify_inclusion_by_index(url, int(args.verify_index)) - - if args.verify_hash: - idx = 1337 - url = base_urls[0] - entries = get_entries(url, idx, idx)["entries"] - h = get_leaf_hash(base64.b64decode(entries[0]["leaf_input"])) - verify_inclusion_by_hash(url, h) - - if args.build_sth: - print "Building trees from entries. This may take a while, go get coffee or something..." - fetch_and_build_tree(sth, base_urls[2]) - - if args.audit: - print "Running auditor for " +str(len(base_urls)) + " logs..." - while True: - time.sleep(1*60-4) - new_sth = fetch_all_sth() - verify_consistency(sth, new_sth) - sth = new_sth - - if args.audit2: - print "Running auditor2 for " +str(len(base_urls)) + " logs..." - while True: - time.sleep(1*60-4) - new_sth = fetch_all_sth() - verify_consistency(sth, new_sth) - verify_inclusion_all(sth, new_sth) - sth = new_sth - - - print "Done. Exiting..." +def write_file(fn, sth): + tempname = fn + ".new" + open(tempname, 'w').write(json.dumps(sth)) + mv_file(tempname, fn) +def main(args): + + # print time.strftime("%H:%M:%S") + " Starting..." + if args.verify_index is None and not args.build_sth and not args.audit and not args.audit2 \ + and not args.audit3 and not args.verify_hash and not args.roots: + + print time.strftime('%H:%M:%S') + " Nothing to do." + return + else: + sth = fetch_all_sth() + + if args.verify_index is not None: + for url in base_urls: + verify_inclusion_by_index(url, int(args.verify_index)) + + # if args.verify_hash: + # idx = 1337 + # url = base_urls[0] + # entries = get_entries(url, idx, idx)["entries"] + # h = get_leaf_hash(base64.b64decode(entries[0]["leaf_input"])) + # verify_inclusion_by_hash(url, h) + + if args.roots: + print time.strftime('%H:%M:%S') + " Getting accepted Root Certs from all logs..." + for url in base_urls: + get_all_roots(url) + + + if args.build_sth: + print time.strftime('%H:%M:%S') + " Building trees from entries. This may take a while, go get coffee or something..." + for base_url in base_urls: + fetch_and_build_tree(sth, base_url) + # fetch_and_build_tree(sth, base_urls[2]) + + if args.audit: + print time.strftime('%H:%M:%S') + " Running auditor1 for " +str(len(base_urls)) + " logs..." + old_sth = read_sth(args.cur_sth) + if old_sth: + verify_consistency(old_sth, sth) + else: + print "No old sth found..." + write_file(args.cur_sth, sth) + + + if args.audit3: + print time.strftime('%H:%M:%S') + " Running auditor3 for " +str(len(base_urls)) + " logs..." + while True: + time.sleep(30) + new_sth = fetch_all_sth() + verify_consistency(sth, new_sth) + verify_inclusion_all(sth, new_sth) + sth = new_sth + + if args.audit2: + print time.strftime('%H:%M:%S') + " Running auditor2 for " +str(len(base_urls)) + " logs..." + old_sth = read_sth(args.cur_sth) + # print "Verifying progress..." + verify_progress(old_sth, sth) + if old_sth: + print "Verifying consistency..." + verify_consistency(old_sth, sth) + print "Verifying inclusion..." + verify_inclusion_all(old_sth, sth) + write_file(args.cur_sth, sth) + + if __name__ == '__main__': - main(parser.parse_args()) + # try: + main(parser.parse_args()) + if len(errors) == 0: + print time.strftime('%H:%M:%S') + " Everything OK." + sys.exit(NAGIOS_OK) + else: + # print "errors found!" + print_errors(errors) + sys.exit(NAGIOS_WARN) + # except: + # pass + # finally: + # # print_timings(timings) + # print_errors(errors) + + + + + + + + -- cgit v1.1 From 54e38a8c5ac97bffdd1fba995375f8e80727b6e5 Mon Sep 17 00:00:00 2001 From: josef Date: Tue, 1 Sep 2015 14:11:22 +0200 Subject: nagios comatible auditor --- tools/josef_nagios_auditor.py | 347 ++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 347 insertions(+) create mode 100755 tools/josef_nagios_auditor.py (limited to 'tools') diff --git a/tools/josef_nagios_auditor.py b/tools/josef_nagios_auditor.py new file mode 100755 index 0000000..6e36568 --- /dev/null +++ b/tools/josef_nagios_auditor.py @@ -0,0 +1,347 @@ +#!/usr/bin/python +# -*- coding: utf-8 -*- + +import time +import datetime +import base64 +import argparse +import errno +from certtools import * + +NAGIOS_OK = 0 +NAGIOS_WARN = 1 +NAGIOS_CRIT = 2 +NAGIOS_UNKNOWN = 3 + +parser = argparse.ArgumentParser(description="") +parser.add_argument('--audit', action='store_true', help="run lightweight auditor verifying consistency in STH") +parser.add_argument('--build-sth', action='store_true', help="get all entries and construct STH") +parser.add_argument('--baseurl', required=True, help="Base URL for CT log") +parser.add_argument('--sthfile', + required=True, + metavar='file', + help="File containing current STH") +parser.add_argument('--keyfile', + metavar='file', + required=True, + help="File containing current STH") + +class UTC(datetime.tzinfo): + def utcoffset(self, dt): + return datetime.timedelta(hours=0) + def dst(self, dt): + return datetime.timedelta(0) + +def reduce_layer(layer): + new_layer = [] + while len(layer) > 1: + e1 = layer.pop(0) + e2 = layer.pop(0) + new_layer.append(internal_hash((e1,e2))) + return new_layer + +def reduce_tree(entries, layers): + if len(entries) == 0 and layers is []: + return [[hashlib.sha256().digest()]] + + layer_idx = 0 + layers[layer_idx] += entries + + while len(layers[layer_idx]) > 1: + if len(layers) == layer_idx + 1: + layers.append([]) + + layers[layer_idx + 1] += reduce_layer(layers[layer_idx]) + layer_idx += 1 + return layers + +def reduce_subtree_to_root(layers): + while len(layers) > 1: + if len(layers[1]) == 0: + layers[1] = layers[0] + else: + layers[1] += next_merkle_layer(layers[0]) + del layers[0] + + if len(layers[0]) > 1: + return next_merkle_layer(layers[0]) + return layers[0] + +def get_and_verify_sth(url, key): + try: + sth = get_sth(url) + except: + print time.strftime('%H:%M:%S') + " ERROR: Failed to retrieve STH from " + url + sys.exit(NAGIOS_CRIT) + + # Check signature on the STH + try: + check_sth_signature(url, sth, key) + # write_file("plausible-sth.json", tmp_sth) + except: + error_str = time.strftime('%H:%M:%S') + " ERROR: Could not verify signature from " + base_url + print error_str + sys.exit(NAGIOS_CRIT) + return sth + +def fetch_all_sth(): + sths = {} + for base_url in base_urls: + # Fetch STH + try: + sths[base_url] = get_sth(base_url) + except: + sths[base_url] = None + error_str = time.strftime('%H:%M:%S') + " ERROR: Failed to retrieve STH from " + base_url + print error_str + errors.append(error_str) + continue + + # Check signature on the STH + try: + check_sth_signature(base_url, sths[base_url], logkeys[base_url]) + except: + error_str = time.strftime('%H:%M:%S') + " ERROR: Could not verify signature from " + base_url + print error_str + errors.append(error_str) + continue + + # Add timing info + # try: + # if base_url not in timings: + # timings[base_url] = {"last":sths[base_url]["timestamp"], "longest":0} + # else: + # then = datetime.datetime.fromtimestamp(int(timings[base_url]["last"])/1000) + # now = datetime.datetime.fromtimestamp(int(sths[base_url]["timestamp"])/1000) + # tdelta = now - then + + # timings[base_url]["last"] = sths[base_url]["timestamp"] + + # if tdelta.total_seconds() > timings[base_url]["longest"]: + # timings[base_url]["longest"] = tdelta.total_seconds() + + # except Exception, err: + # print Exception, err + # print time.strftime('%H:%M:%S') + "ERROR: Failed to set TIME info for STH" + + return sths + +def verify_progress(url, old, new): + if old and new: + if new["tree_size"] == old["tree_size"]: + if old["sha256_root_hash"] != new["sha256_root_hash"]: + print time.strftime('%H:%M:%S') + " CRITICAL: root hash is different for same tree size in " + url + sys.exit(NAGIOS_CRIT) + + elif new["tree_size"] < old["tree_size"]: + print time.strftime('%H:%M:%S') + " CRITICAL: new tree smaller than previous tree (%d < %d)" % \ + (new["tree_size"], old["tree_size"]) + sys.exit(NAGIOS_WARN) + if new: + age = time.time() - new["timestamp"]/1000 + sth_time = datetime.datetime.fromtimestamp(new['timestamp'] / 1000, UTC()).strftime("%Y-%m-%d %H:%M:%S") + roothash = new['sha256_root_hash'] + if age > 24 * 3600: + print time.strftime('%H:%M:%S') + " CRITICAL: %s is older than 24h: %s UTC" % (url, sth_time) + sys.exit(NAGIOS_CRIT) + elif age > 12 * 3600: + print time.strftime('%H:%M:%S') + " WARNING: %s is older than 12h: %s UTC" % (url, sth_time) + sys.exit(NAGIOS_WARN) + elif age > 6 * 3600: + print time.strftime('%H:%M:%S') + " WARNING: %s is older than 6h: %s UTC" % (url, sth_time) + sys.exit(NAGIOS_WARN) + +def verify_consistency(url, old, new): + if old and new: + try: + if old["tree_size"]!= new["tree_size"]: + consistency_proof = get_consistency_proof(url, old["tree_size"], new["tree_size"]) + decoded_consistency_proof = [] + for item in consistency_proof: + decoded_consistency_proof.append(base64.b64decode(item)) + res = verify_consistency_proof(decoded_consistency_proof, old["tree_size"], new["tree_size"], old["sha256_root_hash"]) + + if old["sha256_root_hash"] != str(base64.b64encode(res[0])): + print time.strftime('%H:%M:%S') + " Verification of old hash failed! " + old["sha256_root_hash"] + str(base64.b64encode(res[0])) + sys.exit(NAGIOS_CRIT) + # errors.append(time.strftime('%H:%M:%S') + " ERROR: Failed to verify consistency for " + url + ", tree size " + old[url]["tree_size"]) + elif new[url]["sha256_root_hash"] != str(base64.b64encode(res[1])): + print time.strftime('%H:%M:%S') + " Verification of new hash failed! " + new["sha256_root_hash"] + str(base64.b64encode(res[1])) + sys.exit(NAGIOS_CRIT) + # errors.append(time.strftime('%H:%M:%S') + " ERROR: Failed to verify consistency for " + url + ", tree size " + new[url]["tree_size"]) + else: + print time.strftime("%H:%M:%S") + " New STH from " + url + ", timestamp: " + \ + str(new["timestamp"]) + ", size: " + str(new["tree_size"]) + "...OK." + + except: + print time.strftime('%H:%M:%S') + " ERROR: Could not verify consistency for " + url + sys.exit(NAGIOS_CRIT) + +def verify_inclusion_all(url, old, new): + if old and new: + try: + if old["tree_size"]!= new["tree_size"]: + entries = get_entries(url, old["tree_size"], new["tree_size"] -1)["entries"] + success = True + for i in entries: + h = get_leaf_hash(base64.b64decode(i["leaf_input"])) + if not verify_inclusion_by_hash(url, h): + success = False + + if not success: + # print time.strftime("%H:%M:%S") + " Verifying inclusion for " + str(len(entries)) + " new entries in " + url + " ...OK" + # else: + print time.strftime('%H:%M:%S') + " ERROR: Failed to prove inclusion of all new entries in " + url + sys.exit(NAGIOS_CRIT) + # errors.append(time.strftime('%H:%M:%S') + " ERROR: Failed to prove inclusion of all new entries in " + url) + except: + print time.strftime('%H:%M:%S') + " ERROR: Failed to prove inclusion of all new entries in " + url + # errors.append(time.strftime('%H:%M:%S') + " ERROR: Failed to prove inclusion of all new entries in " + url) + sys.exit(NAGIOS_CRIT) + +def fetch_and_build_tree(old_sth, base_url): + sth = old_sth[base_url] + subtree = [[]] + idx = 0 + + res_strings = [""] + + print time.strftime('%H:%M:%S') + " Getting all entries from " + base_url + while idx < sth["tree_size"]: + pre_size = idx + entries = get_entries(base_url, idx, sth["tree_size"]-1)["entries"] + + new_leafs = [] + for item in entries: + new_leafs.append(get_leaf_hash(base64.b64decode(item["leaf_input"]))) + idx += len(new_leafs) + print time.strftime('%H:%M:%S') + " Got entries " + str(pre_size) + " to " + str(idx) + " from " + base_url + subtree = reduce_tree(new_leafs, subtree) + + root = base64.b64encode(reduce_subtree_to_root(subtree)[0]) + + if root == sth["sha256_root_hash"]: + print time.strftime('%H:%M:%S') + " Verifying root hashes for " + base_url + "...OK." + res_strings.append("STH for " + base_url + " built successfully.") + else: + print time.strftime('%H:%M:%S') + " ERROR: Failed to verify root hashes! STH root: " + sth["sha256_root_hash"] + ", Tree root: " + root + res_strings.append(time.strftime('%H:%M:%S') + " " + base_url + " Failed! STH root: " + sth["sha256_root_hash"] + " Calculated root: " + root) + errors.append(time.strftime('%H:%M:%S') + " ERROR: Failed to verify root hash for " + base_url + ", tre size " + sth["tree_size"]) + + for item in res_strings: + print item + "\n" + +def verify_inclusion_by_hash(base_url, leaf_hash): + try: + tmp_sth = get_sth(base_url) + proof = get_proof_by_hash(base_url, leaf_hash, tmp_sth["tree_size"]) + + decoded_inclusion_proof = [] + for item in proof["audit_path"]: + decoded_inclusion_proof.append(base64.b64decode(item)) + + root = base64.b64encode(verify_inclusion_proof(decoded_inclusion_proof, proof["leaf_index"], tmp_sth["tree_size"], leaf_hash)) + + if tmp_sth["sha256_root_hash"] == root: + # print "Verifying inclusion for entry " + str(proof["leaf_index"]) + " in " + base_url + "...OK." + return True + else: + print time.strftime('%H:%M:%S') + " ERROR: Could not prove inclusion for entry " + str(proof["leaf_index"]) + " in " + base_url + errors.append(time.strftime('%H:%M:%S') + " ERROR: Could not prove inclusion for entry " + str(proof["leaf_index"]) + " in " + base_url) + return False + except: + print time.strftime('%H:%M:%S') + " ERROR: Could not prove inclusion for hashed entry in " + base_url + errors.append(time.strftime('%H:%M:%S') + " ERROR: Could not prove inclusion for hashed entry in " + base_url) + return False + +def verify_inclusion_by_index(base_url, index): + try: + tmp_sth = get_sth(base_url) + proof = get_proof_by_index(base_url, index, tmp_sth["tree_size"]) + + decoded_inclusion_proof = [] + for item in proof["audit_path"]: + decoded_inclusion_proof.append(base64.b64decode(item)) + + root = base64.b64encode(verify_inclusion_proof(decoded_inclusion_proof, index, tmp_sth["tree_size"], get_leaf_hash(base64.b64decode(proof["leaf_input"])))) + + if tmp_sth["sha256_root_hash"] == root: + print time.strftime('%H:%M:%S') + " Verifying inclusion for entry " + str(index) + " in " + base_url + "...OK." + else: + print time.strftime('%H:%M:%S') + " ERROR: Could not prove inclusion for entry " + str(index) + " in " + base_url + errors.append(time.strftime('%H:%M:%S') + " ERROR: Could not prove inclusion for entry " + str(index) + " in " + base_url) + except: + print time.strftime('%H:%M:%S') + " ERROR: Could not prove inclusion for entry " + str(index) + " in " + base_url + errors.append(time.strftime('%H:%M:%S') + " ERROR: Could not prove inclusion for entry " + str(index) + " in " + base_url) + +def get_proof_by_index(baseurl, index, tree_size): + try: + params = urllib.urlencode({"leaf_index":index, + "tree_size":tree_size}) + result = \ + urlopen(baseurl + "ct/v1/get-entry-and-proof?" + params).read() + return json.loads(result) + except urllib2.HTTPError, e: + print "ERROR:", e.read() + sys.exit(0) + +def get_all_roots(base_url): + # print "Fetching roots from " + base_url + result = urlopen(base_url + "ct/v1/get-roots").read() + certs = json.loads(result)["certificates"] + print time.strftime('%H:%M:%S') + " Received " + str(len(certs)) + " certs from " + base_url + + for accepted_cert in certs: + subject = get_cert_info(base64.decodestring(accepted_cert))["subject"] + issuer = get_cert_info(base64.decodestring(accepted_cert))["issuer"] + if subject == issuer: + root_cert = base64.decodestring(accepted_cert) + print get_cert_info(root_cert)["subject"] + +def read_sth(fn): + try: + f = open(fn) + except IOError, e: + if e.errno == errno.ENOENT: + return None + raise e + return json.loads(f.read()) + +def write_file(fn, sth): + tempname = fn + ".new" + open(tempname, 'w').write(json.dumps(sth)) + mv_file(tempname, fn) + + +def main(args): + try: + log_key = get_public_key_from_file(args.keyfile) + except: + print time.strftime('%H:%M:%S') + " ERROR: Failed to load keyfile " + args.logkey + sys.exit(NAGIOS_WARN) + + old_sth = read_sth(args.sthfile) + new_sth = get_and_verify_sth(args.baseurl, log_key) + write_file(args.sthfile, new_sth) + + verify_progress(args.baseurl, old_sth, new_sth) + + verify_consistency(args.baseurl, old_sth, new_sth) + + verify_inclusion_all(args.baseurl, old_sth, new_sth) + + print "Everything OK from " + args.baseurl + sys.exit(NAGIOS_OK) + + +if __name__ == '__main__': + main(parser.parse_args()) + + + + + + + + + -- cgit v1.1 From e71fab5e9f9a9b0b7a298acec5c85c188f7fe58f Mon Sep 17 00:00:00 2001 From: josef Date: Tue, 1 Sep 2015 14:13:26 +0200 Subject: updates --- tools/josef_auditor.py | 2 +- tools/josef_experimental_auditor.py | 34 +++++++++++++++++++++++++++++++--- 2 files changed, 32 insertions(+), 4 deletions(-) (limited to 'tools') diff --git a/tools/josef_auditor.py b/tools/josef_auditor.py index 454c90b..710e3da 100755 --- a/tools/josef_auditor.py +++ b/tools/josef_auditor.py @@ -111,7 +111,7 @@ def fetch_all_sth(): try: check_sth_signature(base_url, sths[base_url], logkeys[base_url]) except: - error_str = time.strftime('%H:%M:%S') + " ERROR: Could not verify signature from " + base_url + "!!!" + error_str = time.strftime('%H:%M:%S') + " ERROR: Could not verify signature from " + base_url print error_str errors.append(error_str) continue diff --git a/tools/josef_experimental_auditor.py b/tools/josef_experimental_auditor.py index b7d3bc6..135bb46 100755 --- a/tools/josef_experimental_auditor.py +++ b/tools/josef_experimental_auditor.py @@ -41,9 +41,11 @@ parser = argparse.ArgumentParser(description="") parser.add_argument('--audit', action='store_true', help="run lightweight auditor verifying consistency in STH") parser.add_argument('--audit2', action='store_true', help="run medium-weight auditor verifying consistency in STH and inclusion proofs of new entries") parser.add_argument('--audit3', action='store_true', help="continously run medium-weight auditor verifying consistency in STH and inclusion proofs of new entries") +parser.add_argument('--audit4', action='store_true', help="run one check on one server") parser.add_argument('--build-sth', action='store_true', help="get all entries and construct STH") parser.add_argument('--verify-index', default=None, help="Verify a specific index in all logs" ) # parser.add_argument('--verify-hash', action='store_true', help="Verify an entry hash in all logs" ) +parser.add_argument('--host', default=None, help="Base URL for CT log") parser.add_argument('--roots', action='store_true', help="Check accepted root certificates for all logs" ) parser.add_argument('--cur-sth', metavar='file', @@ -111,7 +113,7 @@ def fetch_all_sth(): try: check_sth_signature(base_url, sths[base_url], logkeys[base_url]) except: - error_str = time.strftime('%H:%M:%S') + " ERROR: Could not verify signature from " + base_url + "!!!" + error_str = time.strftime('%H:%M:%S') + " ERROR: Could not verify signature from " + base_url print error_str errors.append(error_str) continue @@ -343,10 +345,12 @@ def main(args): # print time.strftime("%H:%M:%S") + " Starting..." if args.verify_index is None and not args.build_sth and not args.audit and not args.audit2 \ - and not args.audit3 and not args.verify_hash and not args.roots: + and not args.audit3 and not args.audit4 and not args.roots: print time.strftime('%H:%M:%S') + " Nothing to do." return + elif args.audit4: + pass else: sth = fetch_all_sth() @@ -404,7 +408,31 @@ def main(args): verify_inclusion_all(old_sth, sth) write_file(args.cur_sth, sth) - + # Experimental for plausible + nagios + if args.audit4: + base_url = base_urls[0] + old_sth = read_sth("plausible-sth.json") + print "Running auditor4 for " + base_url + try: + tmp_sth = get_sth(base_url) + except: + # sths[base_url] = None + error_str = time.strftime('%H:%M:%S') + " ERROR: Failed to retrieve STH from " + base_url + print error_str + errors.append(error_str) + sys.exit(NAGIOS_WARN) + + # Check signature on the STH + try: + check_sth_signature(base_url, tmp_sth, logkeys[base_url]) + write_file("plausible-sth.json", tmp_sth) + except: + error_str = time.strftime('%H:%M:%S') + " ERROR: Could not verify signature from " + base_url + print error_str + errors.append(error_str) + sys.exit(NAGIOS_CRIT) + sys.exit(NAGIOS_OK) + if __name__ == '__main__': # try: -- cgit v1.1 From c44dc8533ece4e000162cae6fd6c6fa376b94602 Mon Sep 17 00:00:00 2001 From: josef Date: Tue, 1 Sep 2015 14:50:33 +0200 Subject: experimental... --- tools/josef_experimental.py | 111 ++++++++++++++++++++++++-------------------- 1 file changed, 61 insertions(+), 50 deletions(-) (limited to 'tools') diff --git a/tools/josef_experimental.py b/tools/josef_experimental.py index dc1dc7e..7f79788 100755 --- a/tools/josef_experimental.py +++ b/tools/josef_experimental.py @@ -3,13 +3,69 @@ import time import base64 -from certtools import get_sth, get_consistency_proof, check_sth_signature, get_public_key_from_file, verify_consistency_proof +import urllib +import urllib2 +import sys +# from pympler.asizeof import asizeof +from certtools import * + +def reduce_leafs_to_root(layer0): + if len(layer0) == 0: + return [[hashlib.sha256().digest()]] + current_layer = layer0 + while len(current_layer) > 1: + current_layer = next_merkle_layer(current_layer) + return current_layer + +def reduce_layer(layer): + new_layer = [] + while len(layer) > 1: + e1 = layer.pop(0) + e2 = layer.pop(0) + new_layer.append(internal_hash((e1,e2))) + return new_layer + +def reduce_tree(entries, layers): + if len(entries) == 0 and layers is []: + return [[hashlib.sha256().digest()]] + + layer_idx = 0 + layers[layer_idx] += entries + + while len(layers[layer_idx]) > 1: + if len(layers) == layer_idx + 1: + layers.append([]) + + layers[layer_idx + 1] += reduce_layer(layers[layer_idx]) + layer_idx += 1 + return layers + +def reduce_subtree_to_root(layers): + while len(layers) > 1: + layers[1] += next_merkle_layer(layers[0]) + del layers[0] + + if len(layers[0]) > 1: + return next_merkle_layer(layers[0]) + return layers[0] + +def get_proof_by_index(baseurl, index, tree_size): + try: + params = urllib.urlencode({"leaf_index":index, + "tree_size":tree_size}) + result = \ + urlopen(baseurl + "ct/v1/get-entry-and-proof?" + params).read() + return json.loads(result) + except urllib2.HTTPError, e: + print "ERROR:", e.read() + sys.exit(1) base_urls = ["https://plausible.ct.nordu.net/", "https://ct1.digicert-ct.com/log/", "https://ct.izenpe.com/", "https://log.certly.io/", + "https://ctlog.api.venafi.com/", "https://ct.googleapis.com/aviator/", "https://ct.googleapis.com/pilot/", "https://ct.googleapis.com/rocketeer/", @@ -23,57 +79,12 @@ logkeys["https://ct.googleapis.com/pilot/"] = get_public_key_from_file("../../pi logkeys["https://log.certly.io/"] = get_public_key_from_file("../../certly-logkey.pem") logkeys["https://ct.izenpe.com/"] = get_public_key_from_file("../../izenpe-logkey.pem") logkeys["https://ct1.digicert-ct.com/log/"] = get_public_key_from_file("../../digicert-logkey.pem") -old_sth = {} - -# Get initial sth -print time.strftime("%H:%M:%S", time.gmtime()) -for base_url in base_urls: - - old_sth[base_url] = get_sth(base_url) - print "Received STH from " + base_url + ", timestamp: " + str(old_sth[base_url]["timestamp"]) + ", size: " + str(old_sth[base_url]["tree_size"]) - - try: - check_sth_signature(base_url, old_sth[base_url], logkeys[base_url]) - except: - print "Could not verify signature!!" - - -while True: - time.sleep(1*60-4) - print time.strftime("%H:%M:%S", time.gmtime()) - for base_url in base_urls: - new_sth = get_sth(base_url) - print "Received STH from " + base_url + ", timestamp: " + str(new_sth["timestamp"]) + ", size: " + str(new_sth["tree_size"]) - try: - check_sth_signature(base_url, new_sth, logkeys[base_url]) - except: - print "Could not verify signature!!" - - if old_sth[base_url]["tree_size"]!= new_sth["tree_size"]: - print "Wohoo, new STH! Checking..." - try: - # Hashes are base64 encoded from the server and needs to be decoded before checking proofs. - consistency_proof = get_consistency_proof(base_url, old_sth[base_url]["tree_size"], new_sth["tree_size"] ) - decoded_consistency_proof = [] - for item in consistency_proof: - decoded_consistency_proof.append(base64.b64decode(item)) - res = verify_consistency_proof(decoded_consistency_proof, old_sth[base_url]["tree_size"], new_sth["tree_size"], old_sth[base_url]["sha256_root_hash"]) - - if old_sth[base_url]["sha256_root_hash"] != str(base64.b64encode(res[0])): - print "Verification of old hash failed!!!" - print old_sth[base_url]["sha256_root_hash"], str(base64.b64encode(res[0])) - if new_sth["sha256_root_hash"] != str(base64.b64encode(res[1])): - print "Verification of new hash failed!!!" - print new_sth["sha256_root_hash"], str(base64.b64encode(res[1])) - - except Exception, err: - print Exception, err - finally: - old_sth[base_url] = new_sth - - +logkeys["https://ctlog.api.venafi.com/"] = get_public_key_from_file("../../venafi-logkey.pem") +from Crypto import Signature +sth = get_sth(base_urls[4]) +print sth -- cgit v1.1