# Copyright (c) 2015, NORDUnet A/S.
# See LICENSE for licensing information.

import os
import base64
import hashlib
import sys
import struct
import urllib
import urllib2
import json
from certtools import get_leaf_hash, http_request, get_leaf_hash

def parselogrow(row):
    return base64.b16decode(row, casefold=True)

def get_logorder(filename):
    f = open(filename, "r")
    return [parselogrow(row.rstrip()) for row in f]

def read_chain_open(chainsdir, filename):
    path = chainsdir + "/" + \
      filename[0:2] + "/" + filename[2:4] + "/" + filename[4:6]
    f = open(path + "/" + filename, "r")
    return f

def read_chain(chainsdir, key):
    filename = base64.b16encode(key).upper()
    try:
        f = read_chain_open(chainsdir, filename)
    except IOError:
        f = read_chain_open(chainsdir, filename.lower())
    value = f.read()
    f.close()
    return value

def tlv_decode(data):
    (length,) = struct.unpack(">I", data[0:4])
    dtype = data[4:8]
    value = data[8:length]
    rest = data[length:]
    return (dtype, value, rest)

def tlv_encode(dtype, value):
    assert len(dtype) == 4
    data = struct.pack(">I", len(value) + 8) + dtype + value
    return data

def tlv_decodelist(data):
    l = []
    while len(data):
        (dtype, value, rest) = tlv_decode(data)
        l.append((dtype, value))
        data = rest
    return l

def tlv_encodelist(l):
    data = ""
    for (dtype, value) in l:
        data += tlv_encode(dtype, value)
    return data

def unwrap_entry(entry):
    ploplevel = tlv_decodelist(entry)
    assert len(ploplevel) == 2
    (ploptype, plopdata) = ploplevel[0]
    (plopchecksumtype, plopchecksum) = ploplevel[1]
    assert ploptype == "PLOP"
    assert plopchecksumtype == "S256"
    computedchecksum = hashlib.sha256(plopdata).digest()
    assert computedchecksum == plopchecksum
    return plopdata

def wrap_entry(entry):
    return tlv_encodelist([("PLOP", entry),
                           ("S256", hashlib.sha256(entry).digest())])

def verify_entry(verifycert, entry, ehash):
    packed = unwrap_entry(entry)
    unpacked = tlv_decodelist(packed)
    (mtltype, mtl) = unpacked[0]
    assert ehash == get_leaf_hash(mtl)
    assert mtltype == "MTL1"
    s = struct.pack(">I", len(packed)) + packed
    try:
        verifycert.stdin.write(s)
    except IOError:
        sys.stderr.write("merge: unable to write to verifycert process: ")
        while 1:
            line = verifycert.stdout.readline()
            if line:
                sys.stderr.write(line)
            else:
                sys.exit(1)
    result_length_packed = verifycert.stdout.read(4)
    (result_length,) = struct.unpack(">I", result_length_packed)
    result = verifycert.stdout.read(result_length)
    assert len(result) == result_length
    (error_code,) = struct.unpack("B", result[0:1])
    if error_code != 0:
        print >>sys.stderr, result[1:]
        sys.exit(1)

def hexencode(key):
    return base64.b16encode(key).lower()

def write_chain(key, value, chainsdir, hashed_dir=True):
    filename = hexencode(key)
    if hashed_dir:
        path = chainsdir + "/" \
          + filename[0:2] + "/" + filename[2:4] + "/" + filename[4:6]
        try:
            os.makedirs(path)
        except Exception:
            pass
    else:
        path = chainsdir
    f = open(path + "/" + filename, "w")
    f.write(value)
    f.close()

def add_to_logorder(logorderfile, key):
    f = open(logorderfile, "a")
    f.write(hexencode(key) + "\n")
    f.close()

def fsync_logorder(logorderfile):
    f = open(logorderfile, "a")
    os.fsync(f.fileno())
    f.close()

def get_new_entries(node, baseurl, own_key, paths):
    try:
        result = http_request(baseurl + "plop/v1/storage/fetchnewentries",
                              key=own_key, verifynode=node,
                              publickeydir=paths["publickeys"])
        parsed_result = json.loads(result)
        if parsed_result.get(u"result") == u"ok":
            return [base64.b64decode(entry) for \
                    entry in parsed_result[u"entries"]]
        print >>sys.stderr, "ERROR: fetchnewentries", parsed_result
        sys.exit(1)
    except urllib2.HTTPError, e:
        print >>sys.stderr, "ERROR: fetchnewentries", e.read()
        sys.exit(1)

def get_entries(node, baseurl, own_key, paths, hashes):
    try:
        params = urllib.urlencode({"hash":[base64.b64encode(ehash) for \
                                           ehash in hashes]}, doseq=True)
        result = http_request(baseurl + "plop/v1/storage/getentry?" + params,
                              key=own_key, verifynode=node,
                              publickeydir=paths["publickeys"])
        parsed_result = json.loads(result)
        if parsed_result.get(u"result") == u"ok":
            entries = dict([(base64.b64decode(entry["hash"]),
                             base64.b64decode(entry["entry"])) for \
                             entry in parsed_result[u"entries"]])
            assert len(entries) == len(hashes)
            assert set(entries.keys()) == set(hashes)
            return entries
        print >>sys.stderr, "ERROR: getentry", parsed_result
        sys.exit(1)
    except urllib2.HTTPError, e:
        print >>sys.stderr, "ERROR: getentry", e.read()
        sys.exit(1)

def get_curpos(node, baseurl, own_key, paths):
    try:
        result = http_request(baseurl + "plop/v1/frontend/currentposition",
                              key=own_key, verifynode=node,
                              publickeydir=paths["publickeys"])
        parsed_result = json.loads(result)
        if parsed_result.get(u"result") == u"ok":
            return parsed_result[u"position"]
        print >>sys.stderr, "ERROR: currentposition", parsed_result
        sys.exit(1)
    except urllib2.HTTPError, e:
        print >>sys.stderr, "ERROR: currentposition", e.read()
        sys.exit(1)

def get_verifiedsize(node, baseurl, own_key, paths):
    try:
        result = http_request(baseurl + "plop/v1/merge/verifiedsize",
                              key=own_key, verifynode=node,
                              publickeydir=paths["publickeys"])
        parsed_result = json.loads(result)
        if parsed_result.get(u"result") == u"ok":
            return parsed_result[u"size"]
        print >>sys.stderr, "ERROR: verifiedsize", parsed_result
        sys.exit(1)
    except urllib2.HTTPError, e:
        print >>sys.stderr, "ERROR: verifiedsize", e.read()
        sys.exit(1)


def sendlog(node, baseurl, own_key, paths, submission):
    try:
        result = http_request(baseurl + "plop/v1/frontend/sendlog",
                              json.dumps(submission), key=own_key,
                              verifynode=node, publickeydir=paths["publickeys"])
        return json.loads(result)
    except urllib2.HTTPError, e:
        print >>sys.stderr, "ERROR: sendlog", e.read()
        sys.stderr.flush()
        return None
    except ValueError, e:
        print >>sys.stderr, "==== FAILED REQUEST ===="
        print >>sys.stderr, submission
        print >>sys.stderr, "======= RESPONSE ======="
        print >>sys.stderr, result
        print >>sys.stderr, "========================"
        sys.stderr.flush()
        raise e

def backup_sendlog(node, baseurl, own_key, paths, submission):
    try:
        result = http_request(baseurl + "plop/v1/merge/sendlog",
                              json.dumps(submission), key=own_key,
                              verifynode=node, publickeydir=paths["publickeys"])
        return json.loads(result)
    except urllib2.HTTPError, e:
        print >>sys.stderr, "ERROR: sendlog", e.read()
        sys.stderr.flush()
        return None
    except ValueError, e:
        print >>sys.stderr, "==== FAILED REQUEST ===="
        print >>sys.stderr, submission
        print >>sys.stderr, "======= RESPONSE ======="
        print >>sys.stderr, result
        print >>sys.stderr, "========================"
        sys.stderr.flush()
        raise e

def sendentry(node, baseurl, own_key, paths, entry, ehash):
    try:
        result = http_request(
            baseurl + "plop/v1/frontend/sendentry",
            json.dumps({"entry":base64.b64encode(entry),
                        "treeleafhash":base64.b64encode(ehash)}),
            key=own_key, verifynode=node, publickeydir=paths["publickeys"])
        return json.loads(result)
    except urllib2.HTTPError, e:
        print >>sys.stderr, "ERROR: sendentry", e.read()
        sys.exit(1)
    except ValueError, e:
        print >>sys.stderr, "==== FAILED REQUEST ===="
        print >>sys.stderr, ehash
        print >>sys.stderr, "======= RESPONSE ======="
        print >>sys.stderr, result
        print >>sys.stderr, "========================"
        sys.stderr.flush()
        raise e

def sendentry_merge(node, baseurl, own_key, paths, entry, ehash):
    try:
        result = http_request(
            baseurl + "plop/v1/merge/sendentry",
            json.dumps({"entry":base64.b64encode(entry),
                        "treeleafhash":base64.b64encode(ehash)}),
            key=own_key, verifynode=node, publickeydir=paths["publickeys"])
        return json.loads(result)
    except urllib2.HTTPError, e:
        print >>sys.stderr, "ERROR: sendentry", e.read()
        sys.exit(1)
    except ValueError, e:
        print >>sys.stderr, "==== FAILED REQUEST ===="
        print >>sys.stderr, ehash
        print >>sys.stderr, "======= RESPONSE ======="
        print >>sys.stderr, result
        print >>sys.stderr, "========================"
        sys.stderr.flush()
        raise e

def sendsth(node, baseurl, own_key, paths, submission):
    try:
        result = http_request(baseurl + "plop/v1/frontend/sendsth",
                              json.dumps(submission), key=own_key,
                              verifynode=node, publickeydir=paths["publickeys"])
        return json.loads(result)
    except urllib2.HTTPError, e:
        print >>sys.stderr, "ERROR: sendsth", e.read()
        sys.exit(1)
    except ValueError, e:
        print >>sys.stderr, "==== FAILED REQUEST ===="
        print >>sys.stderr, submission
        print >>sys.stderr, "======= RESPONSE ======="
        print >>sys.stderr, result
        print >>sys.stderr, "========================"
        sys.stderr.flush()
        raise e

def verifyroot(node, baseurl, own_key, paths, treesize):
    try:
        result = http_request(baseurl + "plop/v1/merge/verifyroot",
                              json.dumps({"tree_size":treesize}), key=own_key,
                              verifynode=node, publickeydir=paths["publickeys"])
        return json.loads(result)
    except urllib2.HTTPError, e:
        print >>sys.stderr, "ERROR: verifyroot", e.read()
        sys.exit(1)
    except ValueError, e:
        print >>sys.stderr, "==== FAILED REQUEST ===="
        print >>sys.stderr, treesize
        print >>sys.stderr, "======= RESPONSE ======="
        print >>sys.stderr, result
        print >>sys.stderr, "========================"
        sys.stderr.flush()
        raise e

def setverifiedsize(node, baseurl, own_key, paths, treesize):
    try:
        result = http_request(baseurl + "plop/v1/merge/setverifiedsize",
                              json.dumps({"size":treesize}), key=own_key,
                              verifynode=node, publickeydir=paths["publickeys"])
        return json.loads(result)
    except urllib2.HTTPError, e:
        print >>sys.stderr, "ERROR: setverifiedsize", e.read()
        sys.exit(1)
    except ValueError, e:
        print >>sys.stderr, "==== FAILED REQUEST ===="
        print >>sys.stderr, treesize
        print >>sys.stderr, "======= RESPONSE ======="
        print >>sys.stderr, result
        print >>sys.stderr, "========================"
        sys.stderr.flush()
        raise e

def get_missingentries(node, baseurl, own_key, paths):
    try:
        result = http_request(baseurl + "plop/v1/frontend/missingentries",
                              key=own_key, verifynode=node,
                              publickeydir=paths["publickeys"])
        parsed_result = json.loads(result)
        if parsed_result.get(u"result") == u"ok":
            return parsed_result[u"entries"]
        print >>sys.stderr, "ERROR: missingentries", parsed_result
        sys.exit(1)
    except urllib2.HTTPError, e:
        print >>sys.stderr, "ERROR: missingentries", e.read()
        sys.exit(1)

def get_missingentriesforbackup(node, baseurl, own_key, paths):
    try:
        result = http_request(baseurl + "plop/v1/merge/missingentries",
                              key=own_key, verifynode=node,
                              publickeydir=paths["publickeys"])
        parsed_result = json.loads(result)
        if parsed_result.get(u"result") == u"ok":
            return parsed_result[u"entries"]
        print >>sys.stderr, "ERROR: missingentriesforbackup", parsed_result
        sys.exit(1)
    except urllib2.HTTPError, e:
        print >>sys.stderr, "ERROR: missingentriesforbackup", e.read()
        sys.exit(1)

def chunks(l, n):
    return [l[i:i+n] for i in range(0, len(l), n)]