summaryrefslogtreecommitdiff
path: root/tools
diff options
context:
space:
mode:
authorLinus Nordberg <linus@nordu.net>2015-09-22 22:54:21 +0200
committerLinus Nordberg <linus@nordu.net>2015-11-10 12:48:46 +0100
commitab327d88f7a8f458b6150efd6b21b5615210e571 (patch)
tree63821c66612ef758d9c22eb539db3e7fff0ab421 /tools
parenta1d900e34bc73ea58c4bd1b83b5023343b2b8f01 (diff)
Lint nits.
Diffstat (limited to 'tools')
-rw-r--r--tools/mergetools.py94
1 files changed, 56 insertions, 38 deletions
diff --git a/tools/mergetools.py b/tools/mergetools.py
index c474796..7644dac 100644
--- a/tools/mergetools.py
+++ b/tools/mergetools.py
@@ -19,7 +19,8 @@ def get_logorder(filename):
return [parselogrow(row.rstrip()) for row in f]
def read_chain_open(chainsdir, filename):
- path = chainsdir + "/" + filename[0:2] + "/" + filename[2:4] + "/" + filename[4:6]
+ path = chainsdir + "/" + \
+ filename[0:2] + "/" + filename[2:4] + "/" + filename[4:6]
f = open(path + "/" + filename, "r")
return f
@@ -35,55 +36,55 @@ def read_chain(chainsdir, key):
def tlv_decode(data):
(length,) = struct.unpack(">I", data[0:4])
- type = data[4:8]
+ dtype = data[4:8]
value = data[8:length]
rest = data[length:]
- return (type, value, rest)
+ return (dtype, value, rest)
-def tlv_encode(type, value):
- assert(len(type) == 4)
- data = struct.pack(">I", len(value) + 8) + type + value
+def tlv_encode(dtype, value):
+ assert len(dtype) == 4
+ data = struct.pack(">I", len(value) + 8) + dtype + value
return data
def tlv_decodelist(data):
l = []
while len(data):
- (type, value, rest) = tlv_decode(data)
- l.append((type, value))
+ (dtype, value, rest) = tlv_decode(data)
+ l.append((dtype, value))
data = rest
return l
def tlv_encodelist(l):
data = ""
- for (type, value) in l:
- data += tlv_encode(type, value)
+ for (dtype, value) in l:
+ data += tlv_encode(dtype, value)
return data
def unwrap_entry(entry):
ploplevel = tlv_decodelist(entry)
- assert(len(ploplevel) == 2)
+ assert len(ploplevel) == 2
(ploptype, plopdata) = ploplevel[0]
(plopchecksumtype, plopchecksum) = ploplevel[1]
- assert(ploptype == "PLOP")
- assert(plopchecksumtype == "S256")
+ assert ploptype == "PLOP"
+ assert plopchecksumtype == "S256"
computedchecksum = hashlib.sha256(plopdata).digest()
- assert(computedchecksum == plopchecksum)
+ assert computedchecksum == plopchecksum
return plopdata
def wrap_entry(entry):
return tlv_encodelist([("PLOP", entry),
("S256", hashlib.sha256(entry).digest())])
-def verify_entry(verifycert, entry, hash):
+def verify_entry(verifycert, entry, ehash):
packed = unwrap_entry(entry)
unpacked = tlv_decodelist(packed)
(mtltype, mtl) = unpacked[0]
- assert hash == get_leaf_hash(mtl)
+ assert ehash == get_leaf_hash(mtl)
assert mtltype == "MTL1"
s = struct.pack(">I", len(packed)) + packed
try:
verifycert.stdin.write(s)
- except IOError, e:
+ except IOError:
sys.stderr.write("merge: unable to write to verifycert process: ")
while 1:
line = verifycert.stdout.readline()
@@ -110,7 +111,7 @@ def write_chain(key, value, chainsdir, hashed_dir=True):
+ filename[0:2] + "/" + filename[2:4] + "/" + filename[4:6]
try:
os.makedirs(path)
- except Exception, e:
+ except Exception:
pass
else:
path = chainsdir
@@ -145,14 +146,16 @@ def get_new_entries(node, baseurl, own_key, paths):
def get_entries(node, baseurl, own_key, paths, hashes):
try:
- params = urllib.urlencode({"hash":[base64.b64encode(hash) for \
- hash in hashes]}, doseq=True)
+ params = urllib.urlencode({"hash":[base64.b64encode(ehash) for \
+ ehash in hashes]}, doseq=True)
result = http_request(baseurl + "plop/v1/storage/getentry?" + params,
key=own_key, verifynode=node,
publickeydir=paths["publickeys"])
parsed_result = json.loads(result)
if parsed_result.get(u"result") == u"ok":
- entries = dict([(base64.b64decode(entry["hash"]), base64.b64decode(entry["entry"])) for entry in parsed_result[u"entries"]])
+ entries = dict([(base64.b64decode(entry["hash"]),
+ base64.b64decode(entry["entry"])) for \
+ entry in parsed_result[u"entries"]])
assert len(entries) == len(hashes)
assert set(entries.keys()) == set(hashes)
return entries
@@ -164,7 +167,9 @@ def get_entries(node, baseurl, own_key, paths, hashes):
def get_curpos(node, baseurl, own_key, paths):
try:
- result = http_request(baseurl + "plop/v1/frontend/currentposition", key=own_key, verifynode=node, publickeydir=paths["publickeys"])
+ result = http_request(baseurl + "plop/v1/frontend/currentposition",
+ key=own_key, verifynode=node,
+ publickeydir=paths["publickeys"])
parsed_result = json.loads(result)
if parsed_result.get(u"result") == u"ok":
return parsed_result[u"position"]
@@ -176,7 +181,9 @@ def get_curpos(node, baseurl, own_key, paths):
def get_verifiedsize(node, baseurl, own_key, paths):
try:
- result = http_request(baseurl + "plop/v1/merge/verifiedsize", key=own_key, verifynode=node, publickeydir=paths["publickeys"])
+ result = http_request(baseurl + "plop/v1/merge/verifiedsize",
+ key=own_key, verifynode=node,
+ publickeydir=paths["publickeys"])
parsed_result = json.loads(result)
if parsed_result.get(u"result") == u"ok":
return parsed_result[u"size"]
@@ -190,7 +197,8 @@ def get_verifiedsize(node, baseurl, own_key, paths):
def sendlog(node, baseurl, own_key, paths, submission):
try:
result = http_request(baseurl + "plop/v1/frontend/sendlog",
- json.dumps(submission), key=own_key, verifynode=node, publickeydir=paths["publickeys"])
+ json.dumps(submission), key=own_key,
+ verifynode=node, publickeydir=paths["publickeys"])
return json.loads(result)
except urllib2.HTTPError, e:
print >>sys.stderr, "ERROR: sendlog", e.read()
@@ -208,7 +216,8 @@ def sendlog(node, baseurl, own_key, paths, submission):
def backup_sendlog(node, baseurl, own_key, paths, submission):
try:
result = http_request(baseurl + "plop/v1/merge/sendlog",
- json.dumps(submission), key=own_key, verifynode=node, publickeydir=paths["publickeys"])
+ json.dumps(submission), key=own_key,
+ verifynode=node, publickeydir=paths["publickeys"])
return json.loads(result)
except urllib2.HTTPError, e:
print >>sys.stderr, "ERROR: sendlog", e.read()
@@ -223,36 +232,40 @@ def backup_sendlog(node, baseurl, own_key, paths, submission):
sys.stderr.flush()
raise e
-def sendentry(node, baseurl, own_key, paths, entry, hash):
+def sendentry(node, baseurl, own_key, paths, entry, ehash):
try:
- result = http_request(baseurl + "plop/v1/frontend/sendentry",
- json.dumps({"entry":base64.b64encode(entry), "treeleafhash":base64.b64encode(hash)}), key=own_key,
- verifynode=node, publickeydir=paths["publickeys"])
+ result = http_request(
+ baseurl + "plop/v1/frontend/sendentry",
+ json.dumps({"entry":base64.b64encode(entry),
+ "treeleafhash":base64.b64encode(ehash)}),
+ key=own_key, verifynode=node, publickeydir=paths["publickeys"])
return json.loads(result)
except urllib2.HTTPError, e:
print >>sys.stderr, "ERROR: sendentry", e.read()
sys.exit(1)
except ValueError, e:
print >>sys.stderr, "==== FAILED REQUEST ===="
- print >>sys.stderr, hash
+ print >>sys.stderr, ehash
print >>sys.stderr, "======= RESPONSE ======="
print >>sys.stderr, result
print >>sys.stderr, "========================"
sys.stderr.flush()
raise e
-def sendentry_merge(node, baseurl, own_key, paths, entry, hash):
+def sendentry_merge(node, baseurl, own_key, paths, entry, ehash):
try:
- result = http_request(baseurl + "plop/v1/merge/sendentry",
- json.dumps({"entry":base64.b64encode(entry), "treeleafhash":base64.b64encode(hash)}), key=own_key,
- verifynode=node, publickeydir=paths["publickeys"])
+ result = http_request(
+ baseurl + "plop/v1/merge/sendentry",
+ json.dumps({"entry":base64.b64encode(entry),
+ "treeleafhash":base64.b64encode(ehash)}),
+ key=own_key, verifynode=node, publickeydir=paths["publickeys"])
return json.loads(result)
except urllib2.HTTPError, e:
print >>sys.stderr, "ERROR: sendentry", e.read()
sys.exit(1)
except ValueError, e:
print >>sys.stderr, "==== FAILED REQUEST ===="
- print >>sys.stderr, hash
+ print >>sys.stderr, ehash
print >>sys.stderr, "======= RESPONSE ======="
print >>sys.stderr, result
print >>sys.stderr, "========================"
@@ -262,7 +275,8 @@ def sendentry_merge(node, baseurl, own_key, paths, entry, hash):
def sendsth(node, baseurl, own_key, paths, submission):
try:
result = http_request(baseurl + "plop/v1/frontend/sendsth",
- json.dumps(submission), key=own_key, verifynode=node, publickeydir=paths["publickeys"])
+ json.dumps(submission), key=own_key,
+ verifynode=node, publickeydir=paths["publickeys"])
return json.loads(result)
except urllib2.HTTPError, e:
print >>sys.stderr, "ERROR: sendsth", e.read()
@@ -314,7 +328,9 @@ def setverifiedsize(node, baseurl, own_key, paths, treesize):
def get_missingentries(node, baseurl, own_key, paths):
try:
- result = http_request(baseurl + "plop/v1/frontend/missingentries", key=own_key, verifynode=node, publickeydir=paths["publickeys"])
+ result = http_request(baseurl + "plop/v1/frontend/missingentries",
+ key=own_key, verifynode=node,
+ publickeydir=paths["publickeys"])
parsed_result = json.loads(result)
if parsed_result.get(u"result") == u"ok":
return parsed_result[u"entries"]
@@ -326,7 +342,9 @@ def get_missingentries(node, baseurl, own_key, paths):
def get_missingentriesforbackup(node, baseurl, own_key, paths):
try:
- result = http_request(baseurl + "plop/v1/merge/missingentries", key=own_key, verifynode=node, publickeydir=paths["publickeys"])
+ result = http_request(baseurl + "plop/v1/merge/missingentries",
+ key=own_key, verifynode=node,
+ publickeydir=paths["publickeys"])
parsed_result = json.loads(result)
if parsed_result.get(u"result") == u"ok":
return parsed_result[u"entries"]