summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorJosef Gustafsson <josef.gson@gmail.com>2015-09-07 09:08:15 +0200
committerJosef Gustafsson <josef.gson@gmail.com>2015-09-07 09:08:15 +0200
commit1fbeb7f1af0d0b7e98b246dfb06fd0525abd23f1 (patch)
treefc3fa19b20dfdab4a51752069e0190fc1ae84b5d
parent4596485adffb636c014362bc982ee0b952c77f26 (diff)
auditing working, starting with monitor design
-rwxr-xr-xtools/josef_auditor.py205
-rwxr-xr-xtools/josef_experimental_auditor.py64
-rwxr-xr-xtools/josef_nagios_auditor.py2
3 files changed, 126 insertions, 145 deletions
diff --git a/tools/josef_auditor.py b/tools/josef_auditor.py
index 710e3da..29032d1 100755
--- a/tools/josef_auditor.py
+++ b/tools/josef_auditor.py
@@ -15,7 +15,8 @@ NAGIOS_UNKNOWN = 3
DEFAULT_CUR_FILE = 'all-sth.json'
-base_urls = ["https://plausible.ct.nordu.net/",
+base_urls = [
+ "https://plausible.ct.nordu.net/",
"https://ct1.digicert-ct.com/log/",
"https://ct.izenpe.com/",
"https://log.certly.io/",
@@ -23,27 +24,16 @@ base_urls = ["https://plausible.ct.nordu.net/",
"https://ct.googleapis.com/pilot/",
"https://ct.googleapis.com/rocketeer/",
"https://ct.ws.symantec.com/",
- "https://ctlog.api.venafi.com/",
+ # "https://ctlog.api.venafi.com/",
]
-
-logkeys = {}
-logkeys["https://plausible.ct.nordu.net/"] = get_public_key_from_file("../../plausible-logkey.pem")
-logkeys["https://ct.googleapis.com/rocketeer/"] = get_public_key_from_file("../../rocketeer-logkey.pem")
-logkeys["https://ct.googleapis.com/aviator/"] = get_public_key_from_file("../../aviator-logkey.pem")
-logkeys["https://ct.googleapis.com/pilot/"] = get_public_key_from_file("../../pilot-logkey.pem")
-logkeys["https://log.certly.io/"] = get_public_key_from_file("../../certly-logkey.pem")
-logkeys["https://ct.izenpe.com/"] = get_public_key_from_file("../../izenpe-logkey.pem")
-logkeys["https://ct.ws.symantec.com/"] = get_public_key_from_file("../../symantec-logkey.pem")
-logkeys["https://ctlog.api.venafi.com/"] = get_public_key_from_file("../../venafi-logkey.pem")
-logkeys["https://ct1.digicert-ct.com/log/"] = get_public_key_from_file("../../digicert-logkey.pem")
-
parser = argparse.ArgumentParser(description="")
parser.add_argument('--audit', action='store_true', help="run lightweight auditor verifying consistency in STH")
parser.add_argument('--audit2', action='store_true', help="run medium-weight auditor verifying consistency in STH and inclusion proofs of new entries")
parser.add_argument('--audit3', action='store_true', help="continously run medium-weight auditor verifying consistency in STH and inclusion proofs of new entries")
+parser.add_argument('--audit4', action='store_true', help="run one check on one server")
parser.add_argument('--build-sth', action='store_true', help="get all entries and construct STH")
parser.add_argument('--verify-index', default=None, help="Verify a specific index in all logs" )
-# parser.add_argument('--verify-hash', action='store_true', help="Verify an entry hash in all logs" )
+parser.add_argument('--host', default=None, help="Base URL for CT log")
parser.add_argument('--roots', action='store_true', help="Check accepted root certificates for all logs" )
parser.add_argument('--cur-sth',
metavar='file',
@@ -109,7 +99,8 @@ def fetch_all_sth():
# Check signature on the STH
try:
- check_sth_signature(base_url, sths[base_url], logkeys[base_url])
+ # check_sth_signature(base_url, sths[base_url], logkeys[base_url])
+ check_sth_signature(base_url, sths[base_url], None)
except:
error_str = time.strftime('%H:%M:%S') + " ERROR: Could not verify signature from " + base_url
print error_str
@@ -120,38 +111,34 @@ def fetch_all_sth():
def verify_progress(old, new):
print "Verifying progress"
- for url in new:
- if new and old and new[url] and old[url]:
- if new[url]["tree_size"] == old[url]["tree_size"]:
- if old[url]["sha256_root_hash"] != new[url]["sha256_root_hash"]:
- errors.append(time.strftime('%H:%M:%S') + " CRITICAL: root hash is different for same tree size in " + url)
- # print "tree size:", newsth["tree_size"],
- # print "old hash:", b64_to_b16(oldsth["sha256_root_hash"])
- # print "new hash:", b64_to_b16(newsth["sha256_root_hash"])
- # sys.exit(NAGIOS_CRIT)
- # TODO
- elif new[url]["tree_size"] < old[url]["tree_size"]:
- # if not args.allow_lag:
- errors.append(time.strftime('%H:%M:%S') + " CRITICAL: new tree smaller than previous tree (%d < %d)" % \
- (new[url]["tree_size"], old[url]["tree_size"]))
- # sys.exit(NAGIOS_CRIT)
- if new[url]:
- age = time.time() - new[url]["timestamp"]/1000
- sth_time = datetime.datetime.fromtimestamp(new[url]['timestamp'] / 1000, UTC()).strftime("%Y-%m-%d %H:%M:%S")
- # roothash = b64_to_b16(sth['sha256_root_hash'])
- roothash = new[url]['sha256_root_hash']
- if age > 24 * 3600:
- errors.append(time.strftime('%H:%M:%S') + " CRITICAL: %s is older than 24h: %s UTC" % (url, sth_time))
- elif age > 12 * 3600:
- errors.append(time.strftime('%H:%M:%S') + " WARNING: %s is older than 12h: %s UTC" % (url, sth_time))
- elif age > 6 * 3600:
- errors.append(time.strftime('%H:%M:%S') + " WARNING: %s is older than 6h: %s UTC" % (url, sth_time))
- # elif age > 2 * 3600:
- # errors.append(time.strftime('%H:%M:%S') + " WARNING: %s is older than 2h: %s UTC" % (url, sth_time))
+ try:
+ for url in new:
+ if new and old and new[url] and old[url]:
+ if new[url]["tree_size"] == old[url]["tree_size"]:
+ if old[url]["sha256_root_hash"] != new[url]["sha256_root_hash"]:
+ errors.append(time.strftime('%H:%M:%S') + " CRITICAL: root hash is different for same tree size in " + url)
+ elif new[url]["tree_size"] < old[url]["tree_size"]:
+ errors.append(time.strftime('%H:%M:%S') + " CRITICAL: new tree smaller than previous tree (%d < %d)" % \
+ (new[url]["tree_size"], old[url]["tree_size"]))
+ if new[url]:
+ age = time.time() - new[url]["timestamp"]/1000
+ sth_time = datetime.datetime.fromtimestamp(new[url]['timestamp'] / 1000, UTC()).strftime("%Y-%m-%d %H:%M:%S")
+ roothash = new[url]['sha256_root_hash']
+ if age > 24 * 3600:
+ errors.append(time.strftime('%H:%M:%S') + " CRITICAL: %s is older than 24h: %s UTC" % (url, sth_time))
+ elif age > 12 * 3600:
+ errors.append(time.strftime('%H:%M:%S') + " WARNING: %s is older than 12h: %s UTC" % (url, sth_time))
+ elif age > 6 * 3600:
+ errors.append(time.strftime('%H:%M:%S') + " WARNING: %s is older than 6h: %s UTC" % (url, sth_time))
+ # elif age > 2 * 3600:
+ # errors.append(time.strftime('%H:%M:%S') + " WARNING: %s is older than 2h: %s UTC" % (url, sth_time))
+ except:
+ print time.strftime('%H:%M:%S') + " ERROR: Failed to verify progress for " + url
+
def verify_consistency(old, new):
for url in old:
- # try:
+ try:
if old[url] and new[url] and old[url]["tree_size"]!= new[url]["tree_size"]:
consistency_proof = get_consistency_proof(url, old[url]["tree_size"], new[url]["tree_size"])
decoded_consistency_proof = []
@@ -169,15 +156,20 @@ def verify_consistency(old, new):
print time.strftime("%H:%M:%S") + " New STH from " + url + ", timestamp: " + \
str(new[url]["timestamp"]) + ", size: " + str(new[url]["tree_size"]) + "...OK."
- # except:
- # print "ERROR: Could not verify consistency for " + url
+ except:
+ print "ERROR: Could not verify consistency for " + url
def verify_inclusion_all(old, new):
for url in old:
try:
if old[url] and new[url]:
if old[url]["tree_size"]!= new[url]["tree_size"]:
- entries = get_entries(url, old[url]["tree_size"], new[url]["tree_size"] -1)["entries"]
+ entries = []
+
+ while len(entries) + old[url]["tree_size"]!= new[url]["tree_size"]:
+ entries += get_entries(url, str(int(old[url]["tree_size"]) + len(entries)), new[url]["tree_size"] -1)["entries"]
+ print "Got " + str(len(entries)) + " entries..."
+
success = True
for i in entries:
h = get_leaf_hash(base64.b64decode(i["leaf_input"]))
@@ -194,36 +186,41 @@ def verify_inclusion_all(old, new):
errors.append(time.strftime('%H:%M:%S') + " ERROR: Failed to prove inclusion of all new entries in " + url)
def fetch_and_build_tree(old_sth, base_url):
- sth = old_sth[base_url]
- subtree = [[]]
- idx = 0
-
- res_strings = [""]
+ try:
+ sth = old_sth[base_url]
+ subtree = [[]]
+ idx = 0
+
+ res_strings = [""]
+
+ print time.strftime('%H:%M:%S') + " Getting all entries from " + base_url
+ while idx < sth["tree_size"]:
+ pre_size = idx
+ entries = get_entries(base_url, idx, sth["tree_size"]-1)["entries"]
+
+ new_leafs = []
+ for item in entries:
+ new_leafs.append(get_leaf_hash(base64.b64decode(item["leaf_input"])))
+ idx += len(new_leafs)
+ print time.strftime('%H:%M:%S') + " Got entries " + str(pre_size) + " to " + str(idx) + " from " + base_url
+ subtree = reduce_tree(new_leafs, subtree)
+
+ root = base64.b64encode(reduce_subtree_to_root(subtree)[0])
+
+ if root == sth["sha256_root_hash"]:
+ print time.strftime('%H:%M:%S') + " Verifying root hashes for " + base_url + "...OK."
+ res_strings.append("STH for " + base_url + " built successfully.")
+ else:
+ print time.strftime('%H:%M:%S') + " ERROR: Failed to verify root hashes! STH root: " + sth["sha256_root_hash"] + ", Tree root: " + root
+ res_strings.append(time.strftime('%H:%M:%S') + " " + base_url + " Failed! STH root: " + sth["sha256_root_hash"] + " Calculated root: " + root)
+ errors.append(time.strftime('%H:%M:%S') + " ERROR: Failed to verify root hash for " + base_url + ", tre size " + sth["tree_size"])
- print time.strftime('%H:%M:%S') + " Getting all entries from " + base_url
- while idx < sth["tree_size"]:
- pre_size = idx
- entries = get_entries(base_url, idx, sth["tree_size"]-1)["entries"]
-
- new_leafs = []
- for item in entries:
- new_leafs.append(get_leaf_hash(base64.b64decode(item["leaf_input"])))
- idx += len(new_leafs)
- print time.strftime('%H:%M:%S') + " Got entries " + str(pre_size) + " to " + str(idx) + " from " + base_url
- subtree = reduce_tree(new_leafs, subtree)
-
- root = base64.b64encode(reduce_subtree_to_root(subtree)[0])
-
- if root == sth["sha256_root_hash"]:
- print time.strftime('%H:%M:%S') + " Verifying root hashes for " + base_url + "...OK."
- res_strings.append("STH for " + base_url + " built successfully.")
- else:
- print time.strftime('%H:%M:%S') + " ERROR: Failed to verify root hashes! STH root: " + sth["sha256_root_hash"] + ", Tree root: " + root
- res_strings.append(time.strftime('%H:%M:%S') + " " + base_url + " Failed! STH root: " + sth["sha256_root_hash"] + " Calculated root: " + root)
- errors.append(time.strftime('%H:%M:%S') + " ERROR: Failed to verify root hash for " + base_url + ", tre size " + sth["tree_size"])
+ for item in res_strings:
+ print item + "\n"
- for item in res_strings:
- print item + "\n"
+ except:
+ print time.strftime('%H:%M:%S') + " ERROR: Failed to build STH for " + base_url
+ errors.append(time.strftime('%H:%M:%S') + " ERROR: Failed to build STH for " + base_url)
def verify_inclusion_by_hash(base_url, leaf_hash):
try:
@@ -237,7 +234,6 @@ def verify_inclusion_by_hash(base_url, leaf_hash):
root = base64.b64encode(verify_inclusion_proof(decoded_inclusion_proof, proof["leaf_index"], tmp_sth["tree_size"], leaf_hash))
if tmp_sth["sha256_root_hash"] == root:
- # print "Verifying inclusion for entry " + str(proof["leaf_index"]) + " in " + base_url + "...OK."
return True
else:
print time.strftime('%H:%M:%S') + " ERROR: Could not prove inclusion for entry " + str(proof["leaf_index"]) + " in " + base_url
@@ -280,7 +276,6 @@ def get_proof_by_index(baseurl, index, tree_size):
sys.exit(0)
def get_all_roots(base_url):
- # print "Fetching roots from " + base_url
result = urlopen(base_url + "ct/v1/get-roots").read()
certs = json.loads(result)["certificates"]
print time.strftime('%H:%M:%S') + " Received " + str(len(certs)) + " certs from " + base_url
@@ -325,10 +320,12 @@ def main(args):
# print time.strftime("%H:%M:%S") + " Starting..."
if args.verify_index is None and not args.build_sth and not args.audit and not args.audit2 \
- and not args.audit3 and not args.verify_hash and not args.roots:
+ and not args.audit3 and not args.audit4 and not args.roots:
print time.strftime('%H:%M:%S') + " Nothing to do."
return
+ elif args.audit4:
+ pass
else:
sth = fetch_all_sth()
@@ -336,7 +333,6 @@ def main(args):
for url in base_urls:
verify_inclusion_by_index(url, int(args.verify_index))
-
if args.roots:
print time.strftime('%H:%M:%S') + " Getting accepted Root Certs from all logs..."
for url in base_urls:
@@ -371,22 +367,55 @@ def main(args):
if args.audit2:
print time.strftime('%H:%M:%S') + " Running auditor2 for " +str(len(base_urls)) + " logs..."
old_sth = read_sth(args.cur_sth)
+ # print "Verifying progress..."
verify_progress(old_sth, sth)
if old_sth:
+ print "Verifying consistency..."
verify_consistency(old_sth, sth)
+ print "Verifying inclusion..."
verify_inclusion_all(old_sth, sth)
write_file(args.cur_sth, sth)
-
+ # Experimental for plausible + nagios
+ if args.audit4:
+ base_url = base_urls[0]
+ old_sth = read_sth("plausible-sth.json")
+ print "Running auditor4 for " + base_url
+ try:
+ tmp_sth = get_sth(base_url)
+ except:
+ error_str = time.strftime('%H:%M:%S') + " ERROR: Failed to retrieve STH from " + base_url
+ print error_str
+ errors.append(error_str)
+ sys.exit(NAGIOS_WARN)
-if __name__ == '__main__':
- main(parser.parse_args())
- if len(errors) == 0:
- print time.strftime('%H:%M:%S') + " Everything OK."
+ # Check signature on the STH
+ try:
+ check_sth_signature(base_url, tmp_sth, None)
+ write_file("plausible-sth.json", tmp_sth)
+ except:
+ error_str = time.strftime('%H:%M:%S') + " ERROR: Could not verify signature from " + base_url
+ print error_str
+ errors.append(error_str)
+ sys.exit(NAGIOS_CRIT)
sys.exit(NAGIOS_OK)
- else:
- print_errors(errors)
- sys.exit(NAGIOS_WARN)
+
+
+if __name__ == '__main__':
+ # try:
+ main(parser.parse_args())
+ if len(errors) == 0:
+ print time.strftime('%H:%M:%S') + " Everything OK."
+ sys.exit(NAGIOS_OK)
+ else:
+ # print "errors found!"
+ print_errors(errors)
+ sys.exit(NAGIOS_WARN)
+ # except:
+ # pass
+ # finally:
+ # # print_timings(timings)
+ # print_errors(errors)
diff --git a/tools/josef_experimental_auditor.py b/tools/josef_experimental_auditor.py
index 57ef9cb..9268d03 100755
--- a/tools/josef_experimental_auditor.py
+++ b/tools/josef_experimental_auditor.py
@@ -16,36 +16,24 @@ NAGIOS_UNKNOWN = 3
DEFAULT_CUR_FILE = 'all-sth.json'
base_urls = [
- "https://plausible.ct.nordu.net/",
- "https://ct1.digicert-ct.com/log/",
- "https://ct.izenpe.com/",
- "https://log.certly.io/",
- "https://ct.googleapis.com/aviator/",
- "https://ct.googleapis.com/pilot/",
- "https://ct.googleapis.com/rocketeer/",
+ # "https://plausible.ct.nordu.net/",
+ # "https://ct1.digicert-ct.com/log/",
+ # "https://ct.izenpe.com/",
+ # "https://log.certly.io/",
+ # "https://ct.googleapis.com/aviator/",
+ # "https://ct.googleapis.com/pilot/",
+ # "https://ct.googleapis.com/rocketeer/",
"https://ct.ws.symantec.com/",
# "https://ctlog.api.venafi.com/",
]
-
-# logkeys = {}
-# logkeys["https://plausible.ct.nordu.net/"] = get_public_key_from_file("../../plausible-logkey.pem")
-# logkeys["https://ct.googleapis.com/rocketeer/"] = get_public_key_from_file("../../rocketeer-logkey.pem")
-# logkeys["https://ct.googleapis.com/aviator/"] = get_public_key_from_file("../../aviator-logkey.pem")
-# logkeys["https://ct.googleapis.com/pilot/"] = get_public_key_from_file("../../pilot-logkey.pem")
-# logkeys["https://log.certly.io/"] = get_public_key_from_file("../../certly-logkey.pem")
-# logkeys["https://ct.izenpe.com/"] = get_public_key_from_file("../../izenpe-logkey.pem")
-# logkeys["https://ct.ws.symantec.com/"] = get_public_key_from_file("../../symantec-logkey.pem")
-# logkeys["https://ctlog.api.venafi.com/"] = get_public_key_from_file("../../venafi-logkey.pem")
-# logkeys["https://ct1.digicert-ct.com/log/"] = get_public_key_from_file("../../digicert-logkey.pem")
-
parser = argparse.ArgumentParser(description="")
parser.add_argument('--audit', action='store_true', help="run lightweight auditor verifying consistency in STH")
+parser.add_argument('--monitor', action='store_true', help="run full monitoring for all logs")
parser.add_argument('--audit2', action='store_true', help="run medium-weight auditor verifying consistency in STH and inclusion proofs of new entries")
parser.add_argument('--audit3', action='store_true', help="continously run medium-weight auditor verifying consistency in STH and inclusion proofs of new entries")
parser.add_argument('--audit4', action='store_true', help="run one check on one server")
parser.add_argument('--build-sth', action='store_true', help="get all entries and construct STH")
parser.add_argument('--verify-index', default=None, help="Verify a specific index in all logs" )
-# parser.add_argument('--verify-hash', action='store_true', help="Verify an entry hash in all logs" )
parser.add_argument('--host', default=None, help="Base URL for CT log")
parser.add_argument('--roots', action='store_true', help="Check accepted root certificates for all logs" )
parser.add_argument('--cur-sth',
@@ -120,24 +108,6 @@ def fetch_all_sth():
errors.append(error_str)
continue
- # Add timing info
- # try:
- # if base_url not in timings:
- # timings[base_url] = {"last":sths[base_url]["timestamp"], "longest":0}
- # else:
- # then = datetime.datetime.fromtimestamp(int(timings[base_url]["last"])/1000)
- # now = datetime.datetime.fromtimestamp(int(sths[base_url]["timestamp"])/1000)
- # tdelta = now - then
-
- # timings[base_url]["last"] = sths[base_url]["timestamp"]
-
- # if tdelta.total_seconds() > timings[base_url]["longest"]:
- # timings[base_url]["longest"] = tdelta.total_seconds()
-
- # except Exception, err:
- # print Exception, err
- # print time.strftime('%H:%M:%S') + "ERROR: Failed to set TIME info for STH"
-
return sths
def verify_progress(old, new):
@@ -148,20 +118,12 @@ def verify_progress(old, new):
if new[url]["tree_size"] == old[url]["tree_size"]:
if old[url]["sha256_root_hash"] != new[url]["sha256_root_hash"]:
errors.append(time.strftime('%H:%M:%S') + " CRITICAL: root hash is different for same tree size in " + url)
- # print "tree size:", newsth["tree_size"],
- # print "old hash:", b64_to_b16(oldsth["sha256_root_hash"])
- # print "new hash:", b64_to_b16(newsth["sha256_root_hash"])
- # sys.exit(NAGIOS_CRIT)
- # TODO
elif new[url]["tree_size"] < old[url]["tree_size"]:
- # if not args.allow_lag:
errors.append(time.strftime('%H:%M:%S') + " CRITICAL: new tree smaller than previous tree (%d < %d)" % \
(new[url]["tree_size"], old[url]["tree_size"]))
- # sys.exit(NAGIOS_CRIT)
if new[url]:
age = time.time() - new[url]["timestamp"]/1000
sth_time = datetime.datetime.fromtimestamp(new[url]['timestamp'] / 1000, UTC()).strftime("%Y-%m-%d %H:%M:%S")
- # roothash = b64_to_b16(sth['sha256_root_hash'])
roothash = new[url]['sha256_root_hash']
if age > 24 * 3600:
errors.append(time.strftime('%H:%M:%S') + " CRITICAL: %s is older than 24h: %s UTC" % (url, sth_time))
@@ -273,7 +235,6 @@ def verify_inclusion_by_hash(base_url, leaf_hash):
root = base64.b64encode(verify_inclusion_proof(decoded_inclusion_proof, proof["leaf_index"], tmp_sth["tree_size"], leaf_hash))
if tmp_sth["sha256_root_hash"] == root:
- # print "Verifying inclusion for entry " + str(proof["leaf_index"]) + " in " + base_url + "...OK."
return True
else:
print time.strftime('%H:%M:%S') + " ERROR: Could not prove inclusion for entry " + str(proof["leaf_index"]) + " in " + base_url
@@ -316,7 +277,6 @@ def get_proof_by_index(baseurl, index, tree_size):
sys.exit(0)
def get_all_roots(base_url):
- # print "Fetching roots from " + base_url
result = urlopen(base_url + "ct/v1/get-roots").read()
certs = json.loads(result)["certificates"]
print time.strftime('%H:%M:%S') + " Received " + str(len(certs)) + " certs from " + base_url
@@ -374,13 +334,6 @@ def main(args):
for url in base_urls:
verify_inclusion_by_index(url, int(args.verify_index))
- # if args.verify_hash:
- # idx = 1337
- # url = base_urls[0]
- # entries = get_entries(url, idx, idx)["entries"]
- # h = get_leaf_hash(base64.b64decode(entries[0]["leaf_input"]))
- # verify_inclusion_by_hash(url, h)
-
if args.roots:
print time.strftime('%H:%M:%S') + " Getting accepted Root Certs from all logs..."
for url in base_urls:
@@ -432,7 +385,6 @@ def main(args):
try:
tmp_sth = get_sth(base_url)
except:
- # sths[base_url] = None
error_str = time.strftime('%H:%M:%S') + " ERROR: Failed to retrieve STH from " + base_url
print error_str
errors.append(error_str)
diff --git a/tools/josef_nagios_auditor.py b/tools/josef_nagios_auditor.py
index db68bbe..cbfdff2 100755
--- a/tools/josef_nagios_auditor.py
+++ b/tools/josef_nagios_auditor.py
@@ -319,7 +319,7 @@ def main(args):
if not args.no_inclusion:
verify_inclusion_all(args.baseurl, old_sth, new_sth)
- print "Everything OK from " + args.baseurl
+ print "Everything OK from " + args.baseurl + " Tree size: " + str(new_sth["tree_size"])
sys.exit(NAGIOS_OK)