diff options
author | Linus Nordberg <linus@nordu.net> | 2015-09-25 19:49:39 +0200 |
---|---|---|
committer | Linus Nordberg <linus@nordu.net> | 2015-11-10 12:48:47 +0100 |
commit | 87e5dd2bfd293f229bab472e946ef12580facf6c (patch) | |
tree | 30f1fb6331d8db74aa454fb06be1debcdac7cd7f /tools/mergetools.py | |
parent | 5825db44c73510e71e34f020e3efd03bd49ffd0c (diff) |
Add a test for when merge backup fails.
Also, avoid tracebacks on ECONNREFUSED by catching urrllib2.URLError.
Diffstat (limited to 'tools/mergetools.py')
-rw-r--r-- | tools/mergetools.py | 40 |
1 files changed, 38 insertions, 2 deletions
diff --git a/tools/mergetools.py b/tools/mergetools.py index 7b674de..89ba7b2 100644 --- a/tools/mergetools.py +++ b/tools/mergetools.py @@ -172,6 +172,9 @@ def get_new_entries(node, baseurl, own_key, paths): entry in parsed_result[u"entries"]] print >>sys.stderr, "ERROR: fetchnewentries", parsed_result sys.exit(1) + except urllib2.URLError, e: + print >>sys.stderr, "ERROR: fetchnewentries", e.reason + sys.exit(1) except urllib2.HTTPError, e: print >>sys.stderr, "ERROR: fetchnewentries", e.read() sys.exit(1) @@ -193,6 +196,9 @@ def get_entries(node, baseurl, own_key, paths, hashes): return entries print >>sys.stderr, "ERROR: getentry", parsed_result sys.exit(1) + except urllib2.URLError, e: + print >>sys.stderr, "ERROR: getentry", e.reason + sys.exit(1) except urllib2.HTTPError, e: print >>sys.stderr, "ERROR: getentry", e.read() sys.exit(1) @@ -207,6 +213,9 @@ def get_curpos(node, baseurl, own_key, paths): return parsed_result[u"position"] print >>sys.stderr, "ERROR: currentposition", parsed_result sys.exit(1) + except urllib2.URLError, e: + print >>sys.stderr, "ERROR: currentposition", e.reason + sys.exit(1) except urllib2.HTTPError, e: print >>sys.stderr, "ERROR: currentposition", e.read() sys.exit(1) @@ -221,6 +230,9 @@ def get_verifiedsize(node, baseurl, own_key, paths): return parsed_result[u"size"] print >>sys.stderr, "ERROR: verifiedsize", parsed_result sys.exit(1) + except urllib2.URLError, e: + print >>sys.stderr, "ERROR: verifiedsize", e.reason + sys.exit(1) except urllib2.HTTPError, e: print >>sys.stderr, "ERROR: verifiedsize", e.read() sys.exit(1) @@ -232,6 +244,9 @@ def sendlog(node, baseurl, own_key, paths, submission): json.dumps(submission), key=own_key, verifynode=node, publickeydir=paths["publickeys"]) return json.loads(result) + except urllib2.URLError, e: + print >>sys.stderr, "ERROR: sendlog", e.reason + sys.exit(1) except urllib2.HTTPError, e: print >>sys.stderr, "ERROR: sendlog", e.read() sys.stderr.flush() @@ -251,8 +266,11 @@ def backup_sendlog(node, baseurl, own_key, paths, submission): json.dumps(submission), key=own_key, verifynode=node, publickeydir=paths["publickeys"]) return json.loads(result) + except urllib2.URLError, e: + print >>sys.stderr, "ERROR: backup_sendlog", e.reason + sys.exit(1) except urllib2.HTTPError, e: - print >>sys.stderr, "ERROR: sendlog", e.read() + print >>sys.stderr, "ERROR: backup_sendlog", e.read() sys.stderr.flush() return None except ValueError, e: @@ -272,6 +290,9 @@ def sendentry(node, baseurl, own_key, paths, entry, ehash): "treeleafhash":base64.b64encode(ehash)}), key=own_key, verifynode=node, publickeydir=paths["publickeys"]) return json.loads(result) + except urllib2.URLError, e: + print >>sys.stderr, "ERROR: sendentry", e.reason + sys.exit(1) except urllib2.HTTPError, e: print >>sys.stderr, "ERROR: sendentry", e.read() sys.exit(1) @@ -292,8 +313,11 @@ def sendentry_merge(node, baseurl, own_key, paths, entry, ehash): "treeleafhash":base64.b64encode(ehash)}), key=own_key, verifynode=node, publickeydir=paths["publickeys"]) return json.loads(result) + except urllib2.URLError, e: + print >>sys.stderr, "ERROR: sendentry_merge", e.reason + sys.exit(1) except urllib2.HTTPError, e: - print >>sys.stderr, "ERROR: sendentry", e.read() + print >>sys.stderr, "ERROR: sendentry_merge", e.read() sys.exit(1) except ValueError, e: print >>sys.stderr, "==== FAILED REQUEST ====" @@ -328,6 +352,9 @@ def verifyroot(node, baseurl, own_key, paths, treesize): json.dumps({"tree_size":treesize}), key=own_key, verifynode=node, publickeydir=paths["publickeys"]) return json.loads(result) + except urllib2.URLError, e: + print >>sys.stderr, "ERROR: verifyroot", e.reason + sys.exit(1) except urllib2.HTTPError, e: print >>sys.stderr, "ERROR: verifyroot", e.read() sys.exit(1) @@ -346,6 +373,9 @@ def setverifiedsize(node, baseurl, own_key, paths, treesize): json.dumps({"size":treesize}), key=own_key, verifynode=node, publickeydir=paths["publickeys"]) return json.loads(result) + except urllib2.URLError, e: + print >>sys.stderr, "ERROR: setverifiedsize", e.reason + sys.exit(1) except urllib2.HTTPError, e: print >>sys.stderr, "ERROR: setverifiedsize", e.read() sys.exit(1) @@ -368,6 +398,9 @@ def get_missingentries(node, baseurl, own_key, paths): return parsed_result[u"entries"] print >>sys.stderr, "ERROR: missingentries", parsed_result sys.exit(1) + except urllib2.URLError, e: + print >>sys.stderr, "ERROR: missingentries", e.reason + sys.exit(1) except urllib2.HTTPError, e: print >>sys.stderr, "ERROR: missingentries", e.read() sys.exit(1) @@ -382,6 +415,9 @@ def get_missingentriesforbackup(node, baseurl, own_key, paths): return parsed_result[u"entries"] print >>sys.stderr, "ERROR: missingentriesforbackup", parsed_result sys.exit(1) + except urllib2.URLError, e: + print >>sys.stderr, "ERROR: missingentriesforbackup", e.reason + sys.exit(1) except urllib2.HTTPError, e: print >>sys.stderr, "ERROR: missingentriesforbackup", e.read() sys.exit(1) |