summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorMagnus Ahltorp <map@kth.se>2017-03-02 01:09:53 +0100
committerMagnus Ahltorp <map@kth.se>2017-03-02 01:09:53 +0100
commitee64f188609bb1a29a8371ac84c05202be41deac (patch)
tree33035e32a499bb997e239337c9a15cb5c34036f4
parente67187c38e4e5a97ab971554d8acf9466e9775c2 (diff)
Handle unreachable storage nodes
-rwxr-xr-xtools/merge_fetch.py23
1 files changed, 15 insertions, 8 deletions
diff --git a/tools/merge_fetch.py b/tools/merge_fetch.py
index 42a3089..e71d3f1 100755
--- a/tools/merge_fetch.py
+++ b/tools/merge_fetch.py
@@ -38,14 +38,17 @@ def merge_fetch(args, config, localconfig):
entries_to_fetch = {}
for storagenode in storagenodes:
- print >>sys.stderr, "getting new entries from", storagenode["name"]
- sys.stderr.flush()
- new_entries_per_node[storagenode["name"]] = \
- set(get_new_entries(storagenode["name"],
- "https://%s/" % storagenode["address"],
- own_key, paths))
- new_entries.update(new_entries_per_node[storagenode["name"]])
- entries_to_fetch[storagenode["name"]] = []
+ try:
+ print >>sys.stderr, "getting new entries from", storagenode["name"]
+ sys.stderr.flush()
+ new_entries_per_node[storagenode["name"]] = \
+ set(get_new_entries(storagenode["name"],
+ "https://%s/" % storagenode["address"],
+ own_key, paths))
+ new_entries.update(new_entries_per_node[storagenode["name"]])
+ entries_to_fetch[storagenode["name"]] = []
+ except requests.exceptions.ConnectionError:
+ pass
timing_point(timing, "get new entries")
new_entries -= certsinlog
@@ -54,6 +57,8 @@ def merge_fetch(args, config, localconfig):
for ehash in new_entries:
for storagenode in storagenodes:
+ if storagenode["name"] not in new_entries_per_node:
+ continue
if ehash in new_entries_per_node[storagenode["name"]]:
entries_to_fetch[storagenode["name"]].append(ehash)
break
@@ -64,6 +69,8 @@ def merge_fetch(args, config, localconfig):
added_entries = 0
for storagenode in storagenodes:
+ if storagenode["name"] not in entries_to_fetch:
+ continue
print >>sys.stderr, "getting %d entries from %s:" % \
(len(entries_to_fetch[storagenode["name"]]), storagenode["name"]),
sys.stderr.flush()