summaryrefslogtreecommitdiff
path: root/tools/mergetools.py
diff options
context:
space:
mode:
authorLinus Nordberg <linus@nordu.net>2016-07-11 15:00:29 +0200
committerLinus Nordberg <linus@nordu.net>2016-07-11 15:00:29 +0200
commit3d4a9fdd338713c2f63da2b92940904762878d98 (patch)
tree2e8ee7375619d507f0f206be2c713aa12d17f048 /tools/mergetools.py
parent1a36628401658def9ab9595f7cbcf72b8cb4eb6a (diff)
parentbbf254d6d7f1708503f425c0eb8926af1b715b9c (diff)
Merge remote-tracking branch 'refs/remotes/map/python-requests-chunked'
Diffstat (limited to 'tools/mergetools.py')
-rw-r--r--tools/mergetools.py102
1 files changed, 33 insertions, 69 deletions
diff --git a/tools/mergetools.py b/tools/mergetools.py
index 3dbe517..ec4fd2a 100644
--- a/tools/mergetools.py
+++ b/tools/mergetools.py
@@ -6,11 +6,10 @@ import base64
import hashlib
import sys
import struct
-import urllib
-import urllib2
import json
import yaml
import argparse
+import requests
from certtools import get_leaf_hash, http_request, get_leaf_hash
def parselogrow(row):
@@ -172,18 +171,15 @@ def get_new_entries(node, baseurl, own_key, paths):
entry in parsed_result[u"entries"]]
print >>sys.stderr, "ERROR: fetchnewentries", parsed_result
sys.exit(1)
- except urllib2.URLError, e:
- print >>sys.stderr, "ERROR: fetchnewentries", e.reason
- sys.exit(1)
- except urllib2.HTTPError, e:
- print >>sys.stderr, "ERROR: fetchnewentries", e.read()
+ except requests.exceptions.HTTPError, e:
+ print >>sys.stderr, "ERROR: fetchnewentries", e.response
sys.exit(1)
def get_entries(node, baseurl, own_key, paths, hashes):
try:
- params = urllib.urlencode({"hash":[base64.b64encode(ehash) for \
- ehash in hashes]}, doseq=True)
- result = http_request(baseurl + "plop/v1/storage/getentry?" + params,
+ params = {"hash":[base64.b64encode(ehash) for ehash in hashes]}
+ result = http_request(baseurl + "plop/v1/storage/getentry",
+ params=params,
key=own_key, verifynode=node,
publickeydir=paths["publickeys"])
parsed_result = json.loads(result)
@@ -196,11 +192,8 @@ def get_entries(node, baseurl, own_key, paths, hashes):
return entries
print >>sys.stderr, "ERROR: getentry", parsed_result
sys.exit(1)
- except urllib2.URLError, e:
- print >>sys.stderr, "ERROR: getentry", e.reason
- sys.exit(1)
- except urllib2.HTTPError, e:
- print >>sys.stderr, "ERROR: getentry", e.read()
+ except requests.exceptions.HTTPError, e:
+ print >>sys.stderr, "ERROR: getentry", e.request.url, e.response
sys.exit(1)
def get_curpos(node, baseurl, own_key, paths):
@@ -213,11 +206,8 @@ def get_curpos(node, baseurl, own_key, paths):
return parsed_result[u"position"]
print >>sys.stderr, "ERROR: currentposition", parsed_result
sys.exit(1)
- except urllib2.URLError, e:
- print >>sys.stderr, "ERROR: currentposition", e.reason
- sys.exit(1)
- except urllib2.HTTPError, e:
- print >>sys.stderr, "ERROR: currentposition", e.read()
+ except requests.exceptions.HTTPError, e:
+ print >>sys.stderr, "ERROR: currentposition", e.response
sys.exit(1)
def get_verifiedsize(node, baseurl, own_key, paths):
@@ -230,11 +220,8 @@ def get_verifiedsize(node, baseurl, own_key, paths):
return parsed_result[u"size"]
print >>sys.stderr, "ERROR: verifiedsize", parsed_result
sys.exit(1)
- except urllib2.URLError, e:
- print >>sys.stderr, "ERROR: verifiedsize", e.reason
- sys.exit(1)
- except urllib2.HTTPError, e:
- print >>sys.stderr, "ERROR: verifiedsize", e.read()
+ except requests.exceptions.HTTPError, e:
+ print >>sys.stderr, "ERROR: verifiedsize", e.response
sys.exit(1)
@@ -244,11 +231,8 @@ def sendlog(node, baseurl, own_key, paths, submission):
json.dumps(submission), key=own_key,
verifynode=node, publickeydir=paths["publickeys"])
return json.loads(result)
- except urllib2.URLError, e:
- print >>sys.stderr, "ERROR: sendlog", e.reason
- sys.exit(1)
- except urllib2.HTTPError, e:
- print >>sys.stderr, "ERROR: sendlog", e.read()
+ except requests.exceptions.HTTPError, e:
+ print >>sys.stderr, "ERROR: sendlog", e.response
sys.stderr.flush()
return None
except ValueError, e:
@@ -266,11 +250,8 @@ def backup_sendlog(node, baseurl, own_key, paths, submission):
json.dumps(submission), key=own_key,
verifynode=node, publickeydir=paths["publickeys"])
return json.loads(result)
- except urllib2.URLError, e:
- print >>sys.stderr, "ERROR: backup_sendlog", e.reason
- sys.exit(1)
- except urllib2.HTTPError, e:
- print >>sys.stderr, "ERROR: backup_sendlog", e.read()
+ except requests.exceptions.HTTPError, e:
+ print >>sys.stderr, "ERROR: backup_sendlog", e.response
sys.stderr.flush()
return None
except ValueError, e:
@@ -290,11 +271,8 @@ def sendentry(node, baseurl, own_key, paths, entry, ehash):
"treeleafhash":base64.b64encode(ehash)}),
key=own_key, verifynode=node, publickeydir=paths["publickeys"])
return json.loads(result)
- except urllib2.URLError, e:
- print >>sys.stderr, "ERROR: sendentry", e.reason
- sys.exit(1)
- except urllib2.HTTPError, e:
- print >>sys.stderr, "ERROR: sendentry", e.read()
+ except requests.exceptions.HTTPError, e:
+ print >>sys.stderr, "ERROR: sendentry", e.reponse
sys.exit(1)
except ValueError, e:
print >>sys.stderr, "==== FAILED REQUEST ===="
@@ -308,19 +286,17 @@ def sendentry(node, baseurl, own_key, paths, entry, ehash):
def sendentry_merge(node, baseurl, own_key, paths, entry, ehash):
return sendentries_merge(node, baseurl, own_key, paths, [(ehash, entry)])
-def sendentries_merge(node, baseurl, own_key, paths, entries):
+def sendentries_merge(node, baseurl, own_key, paths, entries, session=None):
try:
json_entries = [{"entry":base64.b64encode(entry), "treeleafhash":base64.b64encode(hash)} for hash, entry in entries]
result = http_request(
baseurl + "plop/v1/merge/sendentry",
json.dumps(json_entries),
- key=own_key, verifynode=node, publickeydir=paths["publickeys"])
+ key=own_key, verifynode=node, publickeydir=paths["publickeys"],
+ session=session)
return json.loads(result)
- except urllib2.URLError, e:
- print >>sys.stderr, "ERROR: sendentry_merge", e.reason
- sys.exit(1)
- except urllib2.HTTPError, e:
- print >>sys.stderr, "ERROR: sendentry_merge", e.read()
+ except requests.exceptions.HTTPError, e:
+ print >>sys.stderr, "ERROR: sendentry_merge", e.response
sys.exit(1)
except ValueError, e:
print >>sys.stderr, "==== FAILED REQUEST ===="
@@ -337,8 +313,8 @@ def sendsth(node, baseurl, own_key, paths, submission):
json.dumps(submission), key=own_key,
verifynode=node, publickeydir=paths["publickeys"])
return json.loads(result)
- except urllib2.HTTPError, e:
- print >>sys.stderr, "ERROR: sendsth", e.read()
+ except requests.exceptions.HTTPError, e:
+ print >>sys.stderr, "ERROR: sendsth", e.response
sys.exit(1)
except ValueError, e:
print >>sys.stderr, "==== FAILED REQUEST ===="
@@ -355,11 +331,8 @@ def verifyroot(node, baseurl, own_key, paths, treesize):
json.dumps({"tree_size":treesize}), key=own_key,
verifynode=node, publickeydir=paths["publickeys"])
return json.loads(result)
- except urllib2.URLError, e:
- print >>sys.stderr, "ERROR: verifyroot", e.reason
- sys.exit(1)
- except urllib2.HTTPError, e:
- print >>sys.stderr, "ERROR: verifyroot", e.read()
+ except requests.exceptions.HTTPError, e:
+ print >>sys.stderr, "ERROR: verifyroot", e.response
sys.exit(1)
except ValueError, e:
print >>sys.stderr, "==== FAILED REQUEST ===="
@@ -376,11 +349,8 @@ def setverifiedsize(node, baseurl, own_key, paths, treesize):
json.dumps({"size":treesize}), key=own_key,
verifynode=node, publickeydir=paths["publickeys"])
return json.loads(result)
- except urllib2.URLError, e:
- print >>sys.stderr, "ERROR: setverifiedsize", e.reason
- sys.exit(1)
- except urllib2.HTTPError, e:
- print >>sys.stderr, "ERROR: setverifiedsize", e.read()
+ except requests.exceptions.HTTPError, e:
+ print >>sys.stderr, "ERROR: setverifiedsize", e.response
sys.exit(1)
except ValueError, e:
print >>sys.stderr, "==== FAILED REQUEST ===="
@@ -401,11 +371,8 @@ def get_missingentries(node, baseurl, own_key, paths):
return parsed_result[u"entries"]
print >>sys.stderr, "ERROR: missingentries", parsed_result
sys.exit(1)
- except urllib2.URLError, e:
- print >>sys.stderr, "ERROR: missingentries", e.reason
- sys.exit(1)
- except urllib2.HTTPError, e:
- print >>sys.stderr, "ERROR: missingentries", e.read()
+ except requests.exceptions.HTTPError, e:
+ print >>sys.stderr, "ERROR: missingentries", e.response
sys.exit(1)
def get_missingentriesforbackup(node, baseurl, own_key, paths):
@@ -418,11 +385,8 @@ def get_missingentriesforbackup(node, baseurl, own_key, paths):
return parsed_result[u"entries"]
print >>sys.stderr, "ERROR: missingentriesforbackup", parsed_result
sys.exit(1)
- except urllib2.URLError, e:
- print >>sys.stderr, "ERROR: missingentriesforbackup", e.reason
- sys.exit(1)
- except urllib2.HTTPError, e:
- print >>sys.stderr, "ERROR: missingentriesforbackup", e.read()
+ except requests.exceptions.HTTPError, e:
+ print >>sys.stderr, "ERROR: missingentriesforbackup", e.response
sys.exit(1)
def chunks(l, n):