Example #1
0
def get_stream_hash(hex_stream_name, key, hex_suggested_file_name, blob_infos):
    h = get_lbry_hash_obj()
    h.update(hex_stream_name)
    h.update(key)
    h.update(hex_suggested_file_name)
    blobs_hashsum = get_lbry_hash_obj()
    for blob in blob_infos:
        blobs_hashsum.update(get_blob_hashsum(blob))
    h.update(blobs_hashsum.digest())
    return h.hexdigest()
Example #2
0
    def validate(self):
        log.debug("Trying to validate stream descriptor for %s",
                  str(self.raw_info['stream_name']))
        try:
            hex_stream_name = self.raw_info['stream_name']
            key = self.raw_info['key']
            hex_suggested_file_name = self.raw_info['suggested_file_name']
            stream_hash = self.raw_info['stream_hash']
            blobs = self.raw_info['blobs']
        except KeyError as e:
            raise InvalidStreamDescriptorError("Missing '%s'" % (e.args[0]))
        for c in hex_suggested_file_name:
            if c not in '0123456789abcdef':
                raise InvalidStreamDescriptorError(
                    "Suggested file name is not a hex-encoded string")
        h = get_lbry_hash_obj()
        h.update(hex_stream_name)
        h.update(key)
        h.update(hex_suggested_file_name)

        def get_blob_hashsum(b):
            length = b['length']
            if length != 0:
                blob_hash = b['blob_hash']
            else:
                blob_hash = None
            blob_num = b['blob_num']
            iv = b['iv']
            blob_hashsum = get_lbry_hash_obj()
            if length != 0:
                blob_hashsum.update(blob_hash)
            blob_hashsum.update(str(blob_num))
            blob_hashsum.update(iv)
            blob_hashsum.update(str(length))
            return blob_hashsum.digest()

        blobs_hashsum = get_lbry_hash_obj()
        for blob in blobs:
            blobs_hashsum.update(get_blob_hashsum(blob))
        if blobs[-1]['length'] != 0:
            raise InvalidStreamDescriptorError(
                "Does not end with a zero-length blob.")
        h.update(blobs_hashsum.digest())
        if h.hexdigest() != stream_hash:
            raise InvalidStreamDescriptorError(
                "Stream hash does not match stream metadata")
        log.debug("It is validated")
        return defer.succeed(True)
Example #3
0
    def _create_and_add_blob(self, should_announce=False):
        # create and add blob to blob manager
        data_len = random.randint(1, 1000)
        data = b''.join(
            random.choice(string.ascii_lowercase).encode()
            for _ in range(data_len))

        hashobj = get_lbry_hash_obj()
        hashobj.update(data)
        out = hashobj.hexdigest()
        blob_hash = out

        # create new blob
        yield self.bm.setup()
        blob = yield self.bm.get_blob(blob_hash, len(data))

        writer, finished_d = yield blob.open_for_writing(self.peer)
        yield writer.write(data)
        yield self.bm.blob_completed(blob, should_announce)

        # check to see if blob is there
        self.assertTrue(os.path.isfile(os.path.join(self.blob_dir, blob_hash)))
        blobs = yield self.bm.get_all_verified_blobs()
        self.assertTrue(blob_hash in blobs)
        defer.returnValue(blob_hash)
Example #4
0
 def _make_stream_hash(self):
     hashsum = get_lbry_hash_obj()
     hashsum.update(hexlify(self.name))
     hashsum.update(hexlify(self.key))
     hashsum.update(hexlify(self.suggested_file_name))
     hashsum.update(self._get_blobs_hashsum())
     self.stream_hash = hashsum.hexdigest()
Example #5
0
    def _create_and_add_blob(self):
        # create and add blob to blob manager
        data_len = random.randint(1, 1000)
        data = ''.join(
            random.choice(string.lowercase) for data_len in range(data_len))

        hashobj = get_lbry_hash_obj()
        hashobj.update(data)
        out = hashobj.hexdigest()
        blob_hash = out

        # create new blob
        yield self.bm.setup()
        blob = yield self.bm.get_blob(blob_hash, len(data))

        finished_d, write, cancel = yield blob.open_for_writing(self.peer)
        yield write(data)
        yield self.bm.blob_completed(blob)
        yield self.bm.add_blob_to_upload_history(blob_hash, 'test', len(data))

        # check to see if blob is there
        self.assertTrue(os.path.isfile(os.path.join(self.blob_dir, blob_hash)))
        blobs = yield self.bm.get_all_verified_blobs()
        self.assertTrue(blob_hash in blobs)
        defer.returnValue(blob_hash)
Example #6
0
def generate_id(num=None):
    h = get_lbry_hash_obj()
    if num is not None:
        h.update(str(num))
    else:
        h.update(str(random.getrandbits(512)))
    return h.digest()
Example #7
0
 def __init__(self, length_getter, finished_cb):
     self.write_handle = BytesIO()
     self.length_getter = length_getter
     self.finished_cb = finished_cb
     self.finished_cb_d = None
     self._hashsum = get_lbry_hash_obj()
     self.len_so_far = 0
Example #8
0
def generate_id(num=None):
    h = get_lbry_hash_obj()
    if num is not None:
        h.update(str(num))
    else:
        h.update(str(random.getrandbits(512)))
    return h.digest()
Example #9
0
 def close(self):
     hashsum = cryptoutils.get_lbry_hash_obj()
     buffer = self.data.getvalue()
     hashsum.update(buffer)
     with open(hashsum.hexdigest(), 'w') as fout:
         fout.write(buffer)
     return defer.succeed(True)
Example #10
0
    def _blob_finished(self, blob_info):
        log.debug("In blob_finished")
        log.debug("length: %s", str(blob_info.length))
        sig_hash = get_lbry_hash_obj()
        sig_hash.update(self.stream_hash)
        if blob_info.length != 0:
            sig_hash.update(blob_info.blob_hash)
        sig_hash.update(str(blob_info.blob_num))
        sig_hash.update(str(blob_info.revision))
        sig_hash.update(blob_info.iv)
        sig_hash.update(str(blob_info.length))
        signature = sign_with_pass_phrase(sig_hash.digest(),
                                          self.secret_pass_phrase)
        blob_info.signature = signature
        self.finished_blob_hashes[blob_info.blob_num] = blob_info.blob_hash
        if self.delete_after_num is not None:
            self._delete_old_blobs(blob_info.blob_num)
        d = self.stream_info_manager.add_blobs_to_stream(
            self.stream_hash, [blob_info])

        def log_add_error(err):
            log.error(
                "An error occurred adding a blob info to the stream info manager: %s",
                err.getErrorMessage())
            return err

        d.addErrback(log_add_error)
        log.debug("returning from blob_finished")
        return d
Example #11
0
 def _make_stream_hash(self):
     hashsum = get_lbry_hash_obj()
     hashsum.update(binascii.hexlify(self.name))
     hashsum.update(binascii.hexlify(self.key))
     hashsum.update(binascii.hexlify(self.suggested_file_name))
     hashsum.update(self._get_blobs_hashsum())
     self.stream_hash = hashsum.hexdigest()
Example #12
0
 def __init__(self, length_getter, finished_cb):
     self.write_handle = BytesIO()
     self.length_getter = length_getter
     self.finished_cb = finished_cb
     self.finished_cb_d = None
     self._hashsum = get_lbry_hash_obj()
     self.len_so_far = 0
Example #13
0
 def make_stream_hash():
     hashsum = get_lbry_hash_obj()
     hashsum.update(binascii.hexlify(self.name))
     hashsum.update(get_pub_key(self.secret_pass_phrase))
     hashsum.update(binascii.hexlify(self.key))
     self.stream_hash = hashsum.hexdigest()
     return self.stream_hash
Example #14
0
 def make_stream_hash():
     hashsum = get_lbry_hash_obj()
     hashsum.update(binascii.hexlify(self.name))
     hashsum.update(get_pub_key(self.secret_pass_phrase))
     hashsum.update(binascii.hexlify(self.key))
     self.stream_hash = hashsum.hexdigest()
     return self.stream_hash
Example #15
0
    def _blob_finished(self, blob_info):
        log.debug("In blob_finished")
        log.debug("length: %s", str(blob_info.length))
        sig_hash = get_lbry_hash_obj()
        sig_hash.update(self.stream_hash)
        if blob_info.length != 0:
            sig_hash.update(blob_info.blob_hash)
        sig_hash.update(str(blob_info.blob_num))
        sig_hash.update(str(blob_info.revision))
        sig_hash.update(blob_info.iv)
        sig_hash.update(str(blob_info.length))
        signature = sign_with_pass_phrase(sig_hash.digest(), self.secret_pass_phrase)
        blob_info.signature = signature
        self.finished_blob_hashes[blob_info.blob_num] = blob_info.blob_hash
        if self.delete_after_num is not None:
            self._delete_old_blobs(blob_info.blob_num)
        d = self.stream_info_manager.add_blobs_to_stream(self.stream_hash, [blob_info])

        def log_add_error(err):
            log.error("An error occurred adding a blob info to the stream info manager: %s", err.getErrorMessage())
            return err

        d.addErrback(log_add_error)
        log.debug("returning from blob_finished")
        return d
Example #16
0
def generate_id(num=None):
    h = get_lbry_hash_obj()
    if num is not None:
        h.update(str(num))
    else:
        h.update(str(random.getrandbits(512)))
    # digest会生成不是ascii码的字符(hexdigest生成都是ascii码)
    return h.digest()
Example #17
0
def get_stream_hash(hex_stream_name, key, hex_suggested_file_name, blob_infos):
    h = get_lbry_hash_obj()
    h.update(hex_stream_name)
    h.update(key)
    h.update(hex_suggested_file_name)
    blobs_hashsum = get_lbry_hash_obj()
    sorted_blob_infos = sorted(blob_infos, key=lambda x: x['blob_num'])
    for blob in sorted_blob_infos:
        blobs_hashsum.update(get_blob_hashsum(blob))
    if sorted_blob_infos[-1]['length'] != 0:
        raise InvalidStreamDescriptorError(
            "Does not end with a zero-length blob.")
    if 'blob_hash' in sorted_blob_infos[-1]:
        raise InvalidStreamDescriptorError(
            "Stream terminator blob should not have a hash")
    h.update(blobs_hashsum.digest())
    return h.hexdigest()
Example #18
0
 def __init__(self, blob_dir):
     self.blob_dir = blob_dir
     self.buffer = BytesIO()
     self._is_open = True
     self._hashsum = get_lbry_hash_obj()
     self.len_so_far = 0
     self.blob_hash = None
     self.length = None
Example #19
0
    def validate(self):
        log.debug("Trying to validate stream descriptor for %s", str(self.raw_info['stream_name']))
        try:
            hex_stream_name = self.raw_info['stream_name']
            key = self.raw_info['key']
            hex_suggested_file_name = self.raw_info['suggested_file_name']
            stream_hash = self.raw_info['stream_hash']
            blobs = self.raw_info['blobs']
        except KeyError as e:
            raise InvalidStreamDescriptorError("Missing '%s'" % (e.args[0]))
        for c in hex_suggested_file_name:
            if c not in '0123456789abcdef':
                raise InvalidStreamDescriptorError(
                    "Suggested file name is not a hex-encoded string")
        h = get_lbry_hash_obj()
        h.update(hex_stream_name)
        h.update(key)
        h.update(hex_suggested_file_name)

        def get_blob_hashsum(b):
            length = b['length']
            if length != 0:
                blob_hash = b['blob_hash']
            else:
                blob_hash = None
            blob_num = b['blob_num']
            iv = b['iv']
            blob_hashsum = get_lbry_hash_obj()
            if length != 0:
                blob_hashsum.update(blob_hash)
            blob_hashsum.update(str(blob_num))
            blob_hashsum.update(iv)
            blob_hashsum.update(str(length))
            return blob_hashsum.digest()

        blobs_hashsum = get_lbry_hash_obj()
        for blob in blobs:
            blobs_hashsum.update(get_blob_hashsum(blob))
        if blobs[-1]['length'] != 0:
            raise InvalidStreamDescriptorError("Does not end with a zero-length blob.")
        h.update(blobs_hashsum.digest())
        if h.hexdigest() != stream_hash:
            raise InvalidStreamDescriptorError("Stream hash does not match stream metadata")
        log.debug("It is validated")
        return defer.succeed(True)
Example #20
0
 def _get_blobs_hashsum(self):
     blobs_hashsum = get_lbry_hash_obj()
     for blob_info in sorted(self.blob_infos, key=lambda b_i: b_i.blob_num):
         length = blob_info.length
         if length != 0:
             blob_hash = blob_info.blob_hash
         else:
             blob_hash = None
         blob_num = blob_info.blob_num
         iv = blob_info.iv
         blob_hashsum = get_lbry_hash_obj()
         if length != 0:
             blob_hashsum.update(blob_hash)
         blob_hashsum.update(str(blob_num))
         blob_hashsum.update(iv)
         blob_hashsum.update(str(length))
         blobs_hashsum.update(blob_hashsum.digest())
     return blobs_hashsum.digest()
Example #21
0
 def _get_blobs_hashsum(self):
     blobs_hashsum = get_lbry_hash_obj()
     for blob_info in sorted(self.blob_infos, key=lambda b_i: b_i.blob_num):
         length = blob_info.length
         if length != 0:
             blob_hash = blob_info.blob_hash
         else:
             blob_hash = None
         blob_num = blob_info.blob_num
         iv = blob_info.iv
         blob_hashsum = get_lbry_hash_obj()
         if length != 0:
             blob_hashsum.update(blob_hash)
         blob_hashsum.update(str(blob_num))
         blob_hashsum.update(iv)
         blob_hashsum.update(str(length))
         blobs_hashsum.update(blob_hashsum.digest())
     return blobs_hashsum.digest()
Example #22
0
 def get_blob_hashsum(b):
     length = b['length']
     if length != 0:
         blob_hash = b['blob_hash']
     else:
         blob_hash = None
     blob_num = b['blob_num']
     iv = b['iv']
     blob_hashsum = get_lbry_hash_obj()
     if length != 0:
         blob_hashsum.update(blob_hash)
     blob_hashsum.update(str(blob_num))
     blob_hashsum.update(iv)
     blob_hashsum.update(str(length))
     return blob_hashsum.digest()
Example #23
0
def get_blob_hashsum(b):
    length = b['length']
    if length != 0:
        blob_hash = b['blob_hash']
    else:
        blob_hash = None
    blob_num = b['blob_num']
    iv = b['iv']
    blob_hashsum = get_lbry_hash_obj()
    if length != 0:
        blob_hashsum.update(blob_hash)
    blob_hashsum.update(str(blob_num))
    blob_hashsum.update(iv)
    blob_hashsum.update(str(length))
    return blob_hashsum.digest()
Example #24
0
 def check_blob(blob_hash, blob_length, verified_time):
     file_path = os.path.join(self.blob_dir, blob_hash)
     if os.path.isfile(file_path):
         if verified_time >= os.path.getctime(file_path):
             return ALREADY_VERIFIED
         else:
             h = get_lbry_hash_obj()
             len_so_far = 0
             f = open(file_path)
             while True:
                 data = f.read(2**12)
                 if not data:
                     break
                 h.update(data)
                 len_so_far += len(data)
             if len_so_far == blob_length and h.hexdigest() == blob_hash:
                 return NEWLY_VERIFIED
     return INVALID
Example #25
0
 def check_blob(blob_hash, blob_length, verified_time):
     file_path = os.path.join(self.blob_dir, blob_hash)
     if os.path.isfile(file_path):
         if verified_time >= os.path.getctime(file_path):
             return ALREADY_VERIFIED
         else:
             h = get_lbry_hash_obj()
             len_so_far = 0
             f = open(file_path)
             while True:
                 data = f.read(2**12)
                 if not data:
                     break
                 h.update(data)
                 len_so_far += len(data)
             if len_so_far == blob_length and h.hexdigest() == blob_hash:
                 return NEWLY_VERIFIED
     return INVALID
Example #26
0
    def validate(self):
        log.debug("Trying to validate stream descriptor for %s",
                  str(self.raw_info['stream_name']))
        hex_stream_name = self.raw_info['stream_name']
        public_key = self.raw_info['public_key']
        key = self.raw_info['key']
        stream_hash = self.raw_info['stream_hash']
        h = get_lbry_hash_obj()
        h.update(hex_stream_name)
        h.update(public_key)
        h.update(key)
        if h.hexdigest() != stream_hash:
            raise InvalidStreamDescriptorError(
                "Stream hash does not match stream metadata")
        blobs = self.raw_info['blobs']

        def check_blob_signatures():
            for blob in blobs:
                length = blob['length']
                if length != 0:
                    blob_hash = blob['blob_hash']
                else:
                    blob_hash = None
                blob_num = blob['blob_num']
                revision = blob['revision']
                iv = blob['iv']
                signature = blob['signature']
                hashsum = get_lbry_hash_obj()
                hashsum.update(stream_hash)
                if length != 0:
                    hashsum.update(blob_hash)
                hashsum.update(str(blob_num))
                hashsum.update(str(revision))
                hashsum.update(iv)
                hashsum.update(str(length))
                if not verify_signature(hashsum.digest(), signature,
                                        public_key):
                    raise InvalidStreamDescriptorError(
                        "Invalid signature in stream descriptor")

        return threads.deferToThread(check_blob_signatures)
Example #27
0
 def check_blob_signatures():
     for blob in blobs:
         length = blob['length']
         if length != 0:
             blob_hash = blob['blob_hash']
         else:
             blob_hash = None
         blob_num = blob['blob_num']
         revision = blob['revision']
         iv = blob['iv']
         signature = blob['signature']
         hashsum = get_lbry_hash_obj()
         hashsum.update(stream_hash)
         if length != 0:
             hashsum.update(blob_hash)
         hashsum.update(str(blob_num))
         hashsum.update(str(revision))
         hashsum.update(iv)
         hashsum.update(str(length))
         if not verify_signature(hashsum.digest(), signature, public_key):
             raise InvalidStreamDescriptorError("Invalid signature in stream descriptor")
Example #28
0
 def _verify_blob(self, blob):
     log.debug("Got an unverified blob to check:")
     log.debug("blob_hash: %s", blob.blob_hash)
     log.debug("blob_num: %s", str(blob.blob_num))
     log.debug("revision: %s", str(blob.revision))
     log.debug("iv: %s", blob.iv)
     log.debug("length: %s", str(blob.length))
     hashsum = get_lbry_hash_obj()
     hashsum.update(self.stream_hash)
     if blob.length != 0:
         hashsum.update(blob.blob_hash)
     hashsum.update(str(blob.blob_num))
     hashsum.update(str(blob.revision))
     hashsum.update(blob.iv)
     hashsum.update(str(blob.length))
     log.debug("hexdigest to be verified: %s", hashsum.hexdigest())
     if verify_signature(hashsum.digest(), blob.signature, self.stream_pub_key):
         log.debug("Blob info is valid")
         return True
     else:
         log.debug("The blob info is invalid")
         return False
 def _verify_blob(self, blob):
     log.debug("Got an unverified blob to check:")
     log.debug("blob_hash: %s", blob.blob_hash)
     log.debug("blob_num: %s", str(blob.blob_num))
     log.debug("revision: %s", str(blob.revision))
     log.debug("iv: %s", blob.iv)
     log.debug("length: %s", str(blob.length))
     hashsum = get_lbry_hash_obj()
     hashsum.update(self.stream_hash)
     if blob.length != 0:
         hashsum.update(blob.blob_hash)
     hashsum.update(str(blob.blob_num))
     hashsum.update(str(blob.revision))
     hashsum.update(blob.iv)
     hashsum.update(str(blob.length))
     log.debug("hexdigest to be verified: %s", hashsum.hexdigest())
     if verify_signature(hashsum.digest(), blob.signature,
                         self.stream_pub_key):
         log.debug("Blob info is valid")
         return True
     else:
         log.debug("The blob info is invalid")
         return False
Example #30
0
    def validate(self):
        log.debug("Trying to validate stream descriptor for %s", str(self.raw_info['stream_name']))
        hex_stream_name = self.raw_info['stream_name']
        public_key = self.raw_info['public_key']
        key = self.raw_info['key']
        stream_hash = self.raw_info['stream_hash']
        h = get_lbry_hash_obj()
        h.update(hex_stream_name)
        h.update(public_key)
        h.update(key)
        if h.hexdigest() != stream_hash:
            raise InvalidStreamDescriptorError("Stream hash does not match stream metadata")
        blobs = self.raw_info['blobs']

        def check_blob_signatures():
            for blob in blobs:
                length = blob['length']
                if length != 0:
                    blob_hash = blob['blob_hash']
                else:
                    blob_hash = None
                blob_num = blob['blob_num']
                revision = blob['revision']
                iv = blob['iv']
                signature = blob['signature']
                hashsum = get_lbry_hash_obj()
                hashsum.update(stream_hash)
                if length != 0:
                    hashsum.update(blob_hash)
                hashsum.update(str(blob_num))
                hashsum.update(str(revision))
                hashsum.update(iv)
                hashsum.update(str(length))
                if not verify_signature(hashsum.digest(), signature, public_key):
                    raise InvalidStreamDescriptorError("Invalid signature in stream descriptor")

        return threads.deferToThread(check_blob_signatures)
Example #31
0
    def _create_and_add_blob(self, should_announce=False):
        # create and add blob to blob manager
        data_len = random.randint(1, 1000)
        data = ''.join(random.choice(string.lowercase) for data_len in range(data_len))

        hashobj = get_lbry_hash_obj()
        hashobj.update(data)
        out = hashobj.hexdigest()
        blob_hash = out

        # create new blob
        yield self.bm.setup()
        blob = yield self.bm.get_blob(blob_hash, len(data))

        writer, finished_d = yield blob.open_for_writing(self.peer)
        yield writer.write(data)
        yield self.bm.blob_completed(blob, should_announce)
        yield self.bm.add_blob_to_upload_history(blob_hash, 'test', len(data))

        # check to see if blob is there
        self.assertTrue(os.path.isfile(os.path.join(self.blob_dir, blob_hash)))
        blobs = yield self.bm.get_all_verified_blobs()
        self.assertTrue(blob_hash in blobs)
        defer.returnValue(blob_hash)
Example #32
0
#

# Thanks to Paul Cannon for IP-address resolution functions (taken from aspn.activestate.com)


import binascii
import random
import twisted.internet.reactor
from lbrynet.dht.node import Node
from lbrynet.core.cryptoutils import get_lbry_hash_obj

# The Entangled DHT node; instantiated in the main() method
node = None

# The key to use for this example when storing/retrieving data
h = get_lbry_hash_obj()
h.update("key")
KEY = h.digest()
# The value to store
VALUE = random.randint(10000, 20000)

lbryid = KEY


def storeValue(key, value):
    """ Stores the specified value in the DHT using the specified key """
    global node
    print '\nStoring value; Key: %s, Value: %s' % (key, value)
    # Store the value in the DHT. This method returns a Twisted
    # Deferred result, which we then add callbacks to
    deferredResult = node.announceHaveHash(key, value)
Example #33
0
 def __init__(self, write_handle, length_getter, finished_cb):
     self.write_handle = write_handle
     self.length_getter = length_getter
     self.finished_cb = finished_cb
     self.hashsum = get_lbry_hash_obj()
     self.len_so_far = 0
Example #34
0
import base64
import datetime
import logging
import random
import socket
import string
import json

import pkg_resources

from lbryschema.claim import ClaimDict
from lbrynet.core.cryptoutils import get_lbry_hash_obj

# digest_size is in bytes, and blob hashes are hex encoded
blobhash_length = get_lbry_hash_obj().digest_size * 2

log = logging.getLogger(__name__)


# defining these time functions here allows for easier overriding in testing
def now():
    return datetime.datetime.now()


def utcnow():
    return datetime.datetime.utcnow()


def isonow():
    """Return utc now in isoformat with timezone"""
    return utcnow().isoformat() + 'Z'
Example #35
0
 def __init__(self, blob_manager):
     self.blob_manager = blob_manager
     self._hashsum = get_lbry_hash_obj()
     self.len_so_far = 0
     self.blob_hash = None
     self.length = None
Example #36
0
import base64
import distutils.version
import random

from lbrynet.core.cryptoutils import get_lbry_hash_obj

blobhash_length = get_lbry_hash_obj(
).digest_size * 2  # digest_size is in bytes, and blob hashes are hex encoded


def generate_id(num=None):
    h = get_lbry_hash_obj()
    if num is not None:
        h.update(str(num))
    else:
        h.update(str(random.getrandbits(512)))
    return h.digest()


def is_valid_blobhash(blobhash):
    """
    @param blobhash: string, the blobhash to check

    @return: Whether the blobhash is the correct length and contains only valid characters (0-9, a-f)
    """
    if len(blobhash) != blobhash_length:
        return False
    for l in blobhash:
        if l not in "0123456789abcdef":
            return False
    return True
Example #37
0
import base64
import datetime
import logging
import random
import socket
import string
import json

import pkg_resources

from lbryschema.claim import ClaimDict
from lbrynet.core.cryptoutils import get_lbry_hash_obj

# digest_size is in bytes, and blob hashes are hex encoded
blobhash_length = get_lbry_hash_obj().digest_size * 2

log = logging.getLogger(__name__)


# defining these time functions here allows for easier overriding in testing
def now():
    return datetime.datetime.now()


def utcnow():
    return datetime.datetime.utcnow()


def isonow():
    """Return utc now in isoformat with timezone"""
    return utcnow().isoformat() + 'Z'
Example #38
0
 def __init__(self):
     self._hashsum = get_lbry_hash_obj()
     self.len_so_far = 0
     self.blob_hash = None
     self.length = None
Example #39
0
from lbrynet.core.cryptoutils import get_lbry_hash_obj
import random


blobhash_length = get_lbry_hash_obj().digest_size * 2  # digest_size is in bytes, and blob hashes are hex encoded


def generate_id(num=None):
    h = get_lbry_hash_obj()
    if num is not None:
        h.update(str(num))
    else:
        h.update(str(random.getrandbits(512)))
    return h.digest()


def is_valid_blobhash(blobhash):
    """
    @param blobhash: string, the blobhash to check

    @return: Whether the blobhash is the correct length and contains only valid characters (0-9, a-f)
    """
    if len(blobhash) != blobhash_length:
        return False
    for l in blobhash:
        if l not in "0123456789abcdef":
            return False
    return True
Example #40
0
 def __init__(self, blob_manager):
     self.blob_manager = blob_manager
     self.hashsum = get_lbry_hash_obj()
     self.len_so_far = 0
     self.blob_hash = None
     self.length = None
Example #41
0
iterativeLookupDelay = rpcTimeout / 2

#: If a k-bucket has not been used for this amount of time, refresh it (in seconds)
refreshTimeout = 3600  # 1 hour
#: The interval at which nodes replicate (republish/refresh) data they are holding
replicateInterval = refreshTimeout
# The time it takes for data to expire in the network; the original publisher of the data
# will also republish the data at this time if it is still valid
dataExpireTimeout = 86400  # 24 hours

tokenSecretChangeInterval = 300  # 5 minutes

peer_request_timeout = 10

######## IMPLEMENTATION-SPECIFIC CONSTANTS ###########

#: The interval in which the node should check its whether any buckets need refreshing,
#: or whether any data needs to be republished (in seconds)
checkRefreshInterval = refreshTimeout / 5

#: Max size of a single UDP datagram, in bytes. If a message is larger than this, it will
#: be spread across several UDP packets.
udpDatagramMaxSize = 8192  # 8 KB

from lbrynet.core.cryptoutils import get_lbry_hash_obj

h = get_lbry_hash_obj()
key_bits = h.digest_size * 8  # 384 bits

rpc_id_length = 20
Example #42
0
 def __init__(self, write_handle, length_getter, finished_cb):
     self.write_handle = write_handle
     self.length_getter = length_getter
     self.finished_cb = finished_cb
     self.hashsum = get_lbry_hash_obj()
     self.len_so_far = 0