summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorjosef <josef.gson@gmail.com>2015-10-19 12:08:03 +0200
committerjosef <josef.gson@gmail.com>2015-10-19 12:08:03 +0200
commitcc06b25e43deeee70418a1ee9f6271ed9fed0c83 (patch)
treec60aa3cf27c70a2fd5f9ff1301ff35b210e769ab
parentdb5fd75ac73f81721904b94408b7984482791179 (diff)
adding script for inclusion checking by submitting to all logs and checking timestamp age
-rwxr-xr-xmonitor/josef_experimental.py22
-rw-r--r--monitor/josef_lib.py20
-rwxr-xr-xmonitor/josef_monitor.py46
-rwxr-xr-xmonitor/josef_mover.py134
4 files changed, 128 insertions, 94 deletions
diff --git a/monitor/josef_experimental.py b/monitor/josef_experimental.py
index d64012d..cef06a6 100755
--- a/monitor/josef_experimental.py
+++ b/monitor/josef_experimental.py
@@ -70,24 +70,24 @@ def update_roots(log):
if __name__ == '__main__':
- dbdir = "tmpdb/"
+ # dbdir = "tmpdb/"
# entry = get_entries(ctlogs[0]["url"], 1,1)["entries"]
# print extract_original_entry(entry[0])
- for url in [CTLOGS[6]["url"]]:
+ # for url in [CTLOGS[6]["url"]]:
# for url in [CTLOGS[0]["url"],CTLOGS[5]["url"],CTLOGS[6]["url"]]:
# for log in CTLOGS:
# url = log["url"]
# url = CTLOGS[1]["url"]
- entries = get_entries(url, 3638637,3638637)["entries"]
+ # entries = get_entries(url, 3638637,3638637)["entries"]
# entries = get_entries(url, first, last)["entries"]
- tmp_cert_data = []
- for item in entries:
- tmp_data = check_domain(item, url)
- entry_hash = get_leaf_hash(base64.b64decode(item["leaf_input"]))
- if tmp_data:
- tmp_data["leaf_hash"] = base64.b64encode(entry_hash)
- tmp_cert_data.append(tmp_data)
- print tmp_data
+ # tmp_cert_data = []
+ # for item in entries:
+ # tmp_data = check_domain(item, url)
+ # entry_hash = get_leaf_hash(base64.b64decode(item["leaf_input"]))
+ # if tmp_data:
+ # tmp_data["leaf_hash"] = base64.b64encode(entry_hash)
+ # tmp_cert_data.append(tmp_data)
+ # print tmp_data
# new_leafs.append(entry_hash)
# if self.dbdir:/
diff --git a/monitor/josef_lib.py b/monitor/josef_lib.py
index 0f64ef6..db9dad3 100644
--- a/monitor/josef_lib.py
+++ b/monitor/josef_lib.py
@@ -48,6 +48,26 @@ def get_all_roots(base_url):
root_cert = base64.decodestring(accepted_cert)
return certs
+def verify_inclusion_by_hash(base_url, leaf_hash):
+ try:
+ tmp_sth = get_sth(base_url)
+ proof = get_proof_by_hash(base_url, leaf_hash, tmp_sth["tree_size"])
+
+ decoded_inclusion_proof = []
+ for item in proof["audit_path"]:
+ decoded_inclusion_proof.append(base64.b64decode(item))
+
+ root = base64.b64encode(verify_inclusion_proof(decoded_inclusion_proof, proof["leaf_index"], tmp_sth["tree_size"], leaf_hash))
+
+ if tmp_sth["sha256_root_hash"] == root:
+ return True
+ else:
+ # print time.strftime('%H:%M:%S') + " ERROR: Could not prove inclusion for entry " + str(proof["leaf_index"]) + " in " + base_url
+ return False
+ except:
+ # print time.strftime('%H:%M:%S') + " ERROR: Could not prove inclusion for hashed entry in " + base_url
+ return False
+
def check_domain(raw_entry, log=None):
orig_entry = extract_original_entry(raw_entry)
try:
diff --git a/monitor/josef_monitor.py b/monitor/josef_monitor.py
index 3896493..b8ebd52 100755
--- a/monitor/josef_monitor.py
+++ b/monitor/josef_monitor.py
@@ -309,53 +309,9 @@ class ctlog:
self.rollback()
-# def verify_inclusion_all(old, new):
-# for url in old:
-# try:
-# if old[url] and new[url]:
-# if old[url]["tree_size"]!= new[url]["tree_size"]:
-# entries = []
-
-# while len(entries) + old[url]["tree_size"]!= new[url]["tree_size"]:
-# entries += get_entries(url, str(int(old[url]["tree_size"]) + len(entries)), new[url]["tree_size"] -1)["entries"]
-# print "Got " + str(len(entries)) + " entries..."
-
-# success = True
-# for i in entries:
-# h = get_leaf_hash(base64.b64decode(i["leaf_input"]))
-# if not verify_inclusion_by_hash(url, h):
-# success = False
-
-# if success:
-# print time.strftime("%H:%M:%S") + " Verifying inclusion for " + str(len(entries)) + " new entries in " + url + " ...OK"
-# else:
-# print time.strftime('%H:%M:%S') + " ERROR: Failed to prove inclusion of all new entries in " + url
-# errors.append(time.strftime('%H:%M:%S') + " ERROR: Failed to prove inclusion of all new entries in " + url)
-# except:
-# print time.strftime('%H:%M:%S') + " ERROR: Failed to prove inclusion of all new entries in " + url
-# errors.append(time.strftime('%H:%M:%S') + " ERROR: Failed to prove inclusion of all new entries in " + url)
-
-
-
-# def verify_inclusion_by_hash(base_url, leaf_hash):
-# try:
-# tmp_sth = get_sth(base_url)
-# proof = get_proof_by_hash(base_url, leaf_hash, tmp_sth["tree_size"])
-# decoded_inclusion_proof = []
-# for item in proof["audit_path"]:
-# decoded_inclusion_proof.append(base64.b64decode(item))
-
-# root = base64.b64encode(verify_inclusion_proof(decoded_inclusion_proof, proof["leaf_index"], tmp_sth["tree_size"], leaf_hash))
-# if tmp_sth["sha256_root_hash"] == root:
-# return True
-# else:
-# print time.strftime('%H:%M:%S') + " ERROR: Could not prove inclusion for entry " + str(proof["leaf_index"]) + " in " + base_url
-# return False
-# except:
-# print time.strftime('%H:%M:%S') + " ERROR: Could not prove inclusion for hashed entry in " + base_url
-# return False
+
# def verify_inclusion_by_index(base_url, index):
# try:
diff --git a/monitor/josef_mover.py b/monitor/josef_mover.py
index 34680d8..436e6b4 100755
--- a/monitor/josef_mover.py
+++ b/monitor/josef_mover.py
@@ -16,14 +16,19 @@ def print_reply(rep, entry):
log_id = rep["id"]
- for log in ctlogs:
+ for log in CTLOGS:
if str(log_id) == log["id"]:
l = log
break
else:
l = {"name" : "Log not found" + log_id}
- print "Time:", t
+ s = "Time:", t
+ if is_new_timestamp(rep["timestamp"]):
+ print s, "(NEW)"
+ else:
+ print s, "(OLD)"
+
if entry[2]:
print "Type: Precert"
@@ -42,43 +47,96 @@ def print_reply(rep, entry):
print ""
+def is_new_timestamp(ts):
+ MAX_TIMEDIFF = 300 # 5 min, allows for some clock skew
+ ts_time = datetime.datetime.fromtimestamp(ts / 1000, UTC()).strftime('%Y-%m-%d %H:%M:%S')
+ cur_time = datetime.datetime.utcnow().strftime('%Y-%m-%d %H:%M:%S')
+ delta_time = datetime.datetime.strptime(cur_time, '%Y-%m-%d %H:%M:%S') - datetime.datetime.strptime(ts_time, '%Y-%m-%d %H:%M:%S')
+ # print delta_time.seconds
+ if delta_time.seconds > MAX_TIMEDIFF:
+ return False
+ else:
+ return True
+
+def check_inclusion_all(first, last, source, dest):
+ for s_log in source:
+ url = s_log["url"]
+ entries = []
+ while len(entries) + first != last + 1:
+ entries += get_entries(url, str(first + len(entries)), last)["entries"]
+ # print "Got " + str(len(entries)) + " entries..."
+
+ for e in entries:
+ inclusions = []
+ print base64.b64decode(e["leaf_input"])
+ h = get_leaf_hash(base64.b64decode(e["leaf_input"]))
+ for log in dest:
+ url = log["url"]
+ if verify_inclusion_by_hash(url, h):
+ inclusions.append(log["name"])
+ print "Entry found in " + str(len(inclusions)) + " logs: ", inclusions
+ # success = False
+
+ # if success:
+ # print time.strftime("%H:%M:%S") + " Verifying inclusion for " + str(len(entries)) + " new entries in " + url + " ...OK"
+ # else:
+ # print time.strftime('%H:%M:%S') + " ERROR: Failed to prove inclusion of all new entries in " + url
+ # errors.append(time.strftime('%H:%M:%S') + " ERROR: Failed to prove inclusion of all new entries in " + url)
+ # except:
+ # print time.strftime('%H:%M:%S') + " ERROR: Failed to prove inclusion of all new entries in " + url
+
+def move_entry(first, last, source, dest):
+ # print entries
+ for s_log in source:
+ entries = get_entries(s_log["url"], first, last)["entries"]
+ print "\n\nSource: " + s_log["name"] + "\n"
+ for i in range(len(entries)):
+ # for item in entries:
+ item = entries[i]
+ inclusions = []
+ for d_log in dests:
+ print "Log: " + d_log["name"]
+ try:
+ entry = extract_original_entry(item)
+ if entry[2]:
+ precert = True
+ else:
+ precert = False
+ submission = []
+
+ for e in entry[0]:
+ submission.append(base64.b64encode(e))
+
+ if entry[2]:
+ res = add_prechain(d_log["url"], {"chain" : submission})
+ else:
+ res = add_chain(d_log["url"], {"chain" : submission})
+ print_reply(res, entry)
+
+ if not is_new_timestamp(res["timestamp"]):
+ inclusions.append(d_log["name"])
+
+ # time.sleep(5)
+ except KeyboardInterrupt:
+ sys.exit()
+ except:
+ print "FAILED!\n"
+ print s_log["name"] + "[" + str(first + i) + "] found in " + str(len(inclusions)) + " logs: ", inclusions
+
+
+if __name__ == "__main__":
+ source = [CTLOGS[0]]
+ dests = CTLOGS
+ # source = ctlogs
+ # dests = ctlogs
+
+ first = 100
+ last = 101
+
+ move_entry(first, last, source,dests)
+ # check_inclusion_all(first,last,source, dests)
+
-source = [ctlogs[0]]
-dests = [ctlogs[0]]
-# source = ctlogs
-# dests = ctlogs
-
-first = 0
-last = 1
-
-# print entries
-for s_log in source:
- entries = get_entries(s_log["url"], first, last)["entries"]
- print "\n\nSource: " + s_log["name"] + "\n"
- for d_log in dests:
- for item in entries:
- print "Log: " + d_log["name"]
- try:
- entry = extract_original_entry(item)
- if entry[2]:
- precert = True
- else:
- precert = False
- submission = []
-
- for e in entry[0]:
- submission.append(base64.b64encode(e))
-
- if entry[2]:
- res = add_prechain(d_log["url"], {"chain" : submission})
- else:
- res = add_chain(d_log["url"], {"chain" : submission})
- print_reply(res, entry)
- # time.sleep(5)
- except KeyboardInterrupt:
- sys.exit()
- except:
- print "FAILED!\n"