From e298a8d12ea6f205330031beb6d572aad9d27ee7 Mon Sep 17 00:00:00 2001
From: Linus Nordberg <linus@nordu.net>
Date: Fri, 2 Dec 2016 17:39:04 +0100
Subject: Don't crash in merge_fetch when there's a logorder file.

The 'logorder' file keeps hashes hexencoded. When read into the
logorder list they're decoded.
---
 tools/merge_fetch.py | 12 +++++++-----
 1 file changed, 7 insertions(+), 5 deletions(-)

(limited to 'tools/merge_fetch.py')

diff --git a/tools/merge_fetch.py b/tools/merge_fetch.py
index af05209..5478f9e 100755
--- a/tools/merge_fetch.py
+++ b/tools/merge_fetch.py
@@ -188,7 +188,7 @@ def process_worker_message(name, msg, fetch_dict, fetch_set, chainsdb, newentry,
         logging.info("FETCHED from %s: %s", name, hexencode(ehash))
         chainsdb.add(ehash, entry)
         newentry.append(ehash) # Writing to logorderfile after loop.
-        logorder.append(hexencode(ehash))
+        logorder.append(ehash)
         entries_in_log.add(ehash)
         if ehash in fetch_dict:
             del fetch_dict[ehash]
@@ -212,8 +212,9 @@ def merge_fetch_parallel(args, config, localconfig):
 
     currentsizefilecontent = ""
     # Entries in log, kept in both a set and a list.
-    logorder = get_logorder(logorderfile) # Hashes are hexencoded.
-    entries_in_log = set(logorder)        # Hashes are binary.
+    logorder = get_logorder(logorderfile)
+    entries_in_log = set(logorder)
+
     # Entries to fetch, kept in both a set and a dict. The dict is
     # keyed on hashes (binary) and contains randomised lists of nodes
     # to fetch from. Note that the dict keeps entries until they're
@@ -268,9 +269,10 @@ def merge_fetch_parallel(args, config, localconfig):
             last_hash = ''
         else:
             last_hash = logorder[logsize - 1]
-        newcontent = {"index": logsize - 1, "hash": last_hash}
+        newcontent = {"index": logsize - 1, "hash": hexencode(last_hash)}
         if newcontent != currentsizefilecontent:
-            logging.info("updating 'fetched' file: %d %s", logsize - 1, last_hash)
+            logging.info("updating 'fetched' file: %d %s", logsize - 1,
+                         hexencode(last_hash))
             currentsizefilecontent = newcontent
             write_file(currentsizefile, currentsizefilecontent)
 
-- 
cgit v1.1