Esempio n. 1
0
    def __init__(self, filename, comment, metadata, python_version, filetype):
        self.filename = filename
        self.basefilename = os.path.basename(filename)
        self.comment = comment
        self.metadata = metadata
        self.python_version = python_version
        self.filetype = filetype
        self.safe_name = pkg_resources.safe_name(metadata.name)
        self.signed_filename = self.filename + '.asc'
        self.signed_basefilename = self.basefilename + '.asc'
        self.gpg_signature = None

        blake2_256_hash = None
        if blake2b is not None:
            blake2_256_hash = blake2b(digest_size=256 // 8)
        # NOTE(sigmavirus24): We may or may not be able to use blake2 so let's
        # either use the methods or lambdas to do nothing.
        blake_update = getattr(blake2_256_hash, 'update', lambda *args: None)
        blake_hexdigest = getattr(blake2_256_hash, 'hexdigest', lambda: None)
        md5_hash = hashlib.md5()
        sha2_hash = hashlib.sha256()
        with open(filename, "rb") as fp:
            for content in iter(lambda: fp.read(io.DEFAULT_BUFFER_SIZE), b''):
                md5_hash.update(content)
                sha2_hash.update(content)
                blake_update(content)

        self.md5_digest = md5_hash.hexdigest()
        self.sha2_digest = sha2_hash.hexdigest()
        self.blake2_256_digest = blake_hexdigest()
Esempio n. 2
0
def hasherFromString(s):
    import hashlib
    if s == 'sha1':
        return hashlib.sha1()
    elif s == 'sha224':
        return hashlib.sha224()
    elif s == 'sha256':
        return hashlib.sha256()
    elif s == 'sha384':
        return hashlib.sha384()
    elif s == 'sha512':
        return hashlib.sha512()
    elif s == 'blake2b':
        return hashlib.blake2b()
    elif s == 'blake2s':
        return hashlib.blake2s()
    elif s == 'md5':
        return hashlib.md5()
    elif s == 'sha3_224':
        return hashlib.sha3_224()
    elif s == 'sha3_256':
        return hashlib.sha3_256()
    elif s == 'sha3_384':
        return hashlib.sha3_384()
    elif s == 'sha3_512':
        return hashlib.sha3_512()
    elif s == 'shake_128':
        return hashlib.shake_128()
    elif s == 'shake_256':
        return hashlib.shake_256()
    else:
        return None
Esempio n. 3
0
def login(request, redirect_field_name=REDIRECT_FIELD_NAME, _form_class=LoginForm):
    # TODO: Logging in should reset request.user
    # TODO: Configure the login view as the default view for not having
    #       permission to view something.
    if request.authenticated_userid is not None:
        return HTTPSeeOther(request.route_path("manage.projects"))

    user_service = request.find_service(IUserService, context=None)
    breach_service = request.find_service(IPasswordBreachedService, context=None)

    redirect_to = request.POST.get(
        redirect_field_name, request.GET.get(redirect_field_name)
    )

    form = _form_class(
        request.POST,
        request=request,
        user_service=user_service,
        breach_service=breach_service,
        check_password_metrics_tags=["method:auth", "auth_method:login_form"],
    )

    if request.method == "POST":
        if form.validate():
            # Get the user id for the given username.
            username = form.username.data
            userid = user_service.find_userid(username)

            # If the user-originating redirection url is not safe, then
            # redirect to the index instead.
            if not redirect_to or not is_safe_url(url=redirect_to, host=request.host):
                redirect_to = request.route_path("manage.projects")

            # Actually perform the login routine for our user.
            headers = _login_user(request, userid)

            # Now that we're logged in we'll want to redirect the user to
            # either where they were trying to go originally, or to the default
            # view.
            resp = HTTPSeeOther(redirect_to, headers=dict(headers))

            # We'll use this cookie so that client side javascript can
            # Determine the actual user ID (not username, user ID). This is
            # *not* a security sensitive context and it *MUST* not be used
            # where security matters.
            #
            # We'll also hash this value just to avoid leaking the actual User
            # IDs here, even though it really shouldn't matter.
            resp.set_cookie(
                USER_ID_INSECURE_COOKIE,
                hashlib.blake2b(str(userid).encode("ascii"), person=b"warehouse.userid")
                .hexdigest()
                .lower(),
            )
            return resp

    return {
        "form": form,
        "redirect": {"field": REDIRECT_FIELD_NAME, "data": redirect_to},
    }
Esempio n. 4
0
 def public_id(self):
     """ 
     public_id is a one-way hash of the id. It's used to provide a stable
     id for analytics tools that won't change even if the user email changes.
     However, it can't be used to look up the user. Email or other field is
     necessary for this.
     """
     h = hashlib.blake2b(digest_size=10, salt=settings.PUBLIC_USERID_SALT.encode())
     h.update(self.id.to_bytes(length=16, byteorder="big"))
     return h.hexdigest()
Esempio n. 5
0
def test_b2(b2_input, b2_output):
    digest = hashlib.blake2b(b2_input, digest_size=32).digest()
    identical = b2_output == digest

    print('Input:     ', b2_input.hex())
    print('Expected:  ', b2_output.hex())
    print('Calculated:', digest.hex())
    print('Identical: ', identical)
    print()
    if not identical:
        sys.exit(1)
Esempio n. 6
0
    def do_test_random_dir(self, hashtype):
        """ Test building random directories with specific SHA hash type. """
        check_hashtype(hashtype)
        depth = 1 + self.rng.next_int16(3)       # so 1 to 3
        width = 1 + self.rng.next_int16(16)      # so 1 to 16

        blk_count = 1 + self.rng.next_int16(3)     # so 1 to 3
        # last block will usually be only partically populated
        max_len = BuildList.BLOCK_SIZE * (blk_count - 1) +\
            self.rng.next_int16(BuildList.BLOCK_SIZE)
        min_len = 1

        # we want the directory name to be unique
        path_to_dir = os.path.join('tmp', self.rng.next_file_name(8))
        while os.path.exists(path_to_dir):
            path_to_dir = os.path.join('tmp', self.rng.next_file_name(8))

        self.rng.next_data_dir(path_to_dir, depth, width, max_len, min_len)

        data = bytearray(max_len)            # that many null bytes
        self.rng.next_bytes(data)            # fill with random data
        if hashtype == HashTypes.SHA1:
            sha = hashlib.sha1()
        elif hashtype == HashTypes.SHA2:
            sha = hashlib.sha256()
        elif hashtype == HashTypes.SHA3:
            # pylint:disable=no-member
            sha = hashlib.sha3_256()
        elif hashtype == HashTypes.BLAKE2B:
            sha = hashlib.blake2b(digest_size=32)
        else:
            raise NotImplementedError
        sha.update(data)
        hash_ = sha.hexdigest()
        file_name = self.rng.next_file_name(8)
        path_to_file = os.path.join('tmp', file_name)
        while os.path.exists(path_to_file):
            file_name = self.rng.next_file_name(8)
            path_to_file = os.path.join('tmp', file_name)

        with open(path_to_file, 'wb') as file:
            file.write(data)

        if hashtype == HashTypes.SHA1:
            file_hash = file_sha1hex(path_to_file)
        elif hashtype == HashTypes.SHA2:
            file_hash = file_sha2hex(path_to_file)
        elif hashtype == HashTypes.SHA3:
            file_hash = file_sha3hex(path_to_file)
        elif hashtype == HashTypes.BLAKE2B:
            file_hash = file_blake2b_hex(path_to_file)
        else:
            raise NotImplementedError
        self.assertEqual(hash_, file_hash)
Esempio n. 7
0
    def populate_tree(self, tree, data_path, u_dir, hashtype):
        """
        Generate nnn and nnn unique random values, where nnn is at least 16.
        """
        nnn = 16 + self.rng.next_int16(16)
        # DEBUG
        # print("nnn = %d" % nnn)
        # EnnnD

        values = []
        hashes = []
        for count in range(nnn):
            # generate datum ------------------------------
            datum = self.rng.some_bytes(32 + self.rng.next_int16(32))
            values.append(datum)

            # generate hash = bin_key ----------------------
            if hashtype == HashTypes.SHA1:
                sha = hashlib.sha1()
            elif hashtype == HashTypes.SHA2:
                sha = hashlib.sha256()
            elif hashtype == HashTypes.SHA3:
                sha = hashlib.sha3_256()
            elif hashtype == HashTypes.BLAKE2B:
                sha = hashlib.blake2b(digest_size=32)
            else:
                raise NotImplementedError
            sha.update(datum)
            bin_key = sha.digest()
            hex_key = sha.hexdigest()
            hashes.append(bin_key)

            # write data file -----------------------------
            file_name = 'value%04d' % count
            path_to_file = os.path.join(data_path, file_name)
            with open(path_to_file, 'wb') as file:
                # DEBUG
                # print("writing %s to %s" % (hex_key, path_to_file))
                # END
                file.write(datum)

            # insert leaf into tree -----------------------
            # path_from_top = os.path.join(top_name, file_name)
            leaf = NLHLeaf(file_name, bin_key, hashtype)
            tree.insert(leaf)

            # DEBUG
            # print("  inserting <%s %s>" % (leaf.name, leaf.hex_hash))
            # END

            # write data into uDir ------------------------
            u_dir.put_data(datum, hex_key)
        return values, hashes
Esempio n. 8
0
    def __init__(self, filename):
        """Initialize our manager and hasher objects."""
        self.filename = filename
        try:
            self._md5_hasher = hashlib.md5()
        except ValueError:
            # FIPs mode disables MD5
            self._md5_hasher = None

        self._sha2_hasher = hashlib.sha256()
        self._blake_hasher = None
        if blake2b is not None:
            self._blake_hasher = blake2b(digest_size=256 // 8)
Esempio n. 9
0
    def do_test_with_simple_tree(self, hashtype):
        """ XXX STUB: test simple tree with specific hash. """

        if hashtype == HashTypes.SHA1:
            sha = hashlib.sha1()
        elif hashtype == HashTypes.SHA2:
            sha = hashlib.sha256()
        elif hashtype == HashTypes.SHA3:
            # pylint:disable=no-member
            sha = hashlib.sha3_256()
        elif hashtype == HashTypes.BLAKE2B:
            sha = hashlib.blake2b(digest_size=32)
        else:
            raise NotImplementedError

        assert sha          # suppress warning
Esempio n. 10
0
def two_factor(request, _form_class=TwoFactorForm):
    if request.authenticated_userid is not None:
        return HTTPSeeOther(request.route_path("manage.projects"))

    token_service = request.find_service(ITokenService, name="two_factor")

    try:
        two_factor_data = token_service.loads(request.query_string)
    except TokenException:
        request.session.flash("Invalid or expired two factor login.", queue="error")
        return HTTPSeeOther(request.route_path("accounts.login"))

    userid = two_factor_data.get("userid")
    if not userid:
        return HTTPSeeOther(request.route_path("accounts.login"))

    redirect_to = two_factor_data.get("redirect_to")

    user_service = request.find_service(IUserService, context=None)

    form = _form_class(
        request.POST,
        user_id=userid,
        user_service=user_service,
        check_password_metrics_tags=["method:auth", "auth_method:login_form"],
    )

    if request.method == "POST":
        if form.validate():
            # If the user-originating redirection url is not safe, then
            # redirect to the index instead.
            if not redirect_to or not is_safe_url(url=redirect_to, host=request.host):
                redirect_to = request.route_path("manage.projects")

            _login_user(request, userid)

            resp = HTTPSeeOther(redirect_to)
            resp.set_cookie(
                USER_ID_INSECURE_COOKIE,
                hashlib.blake2b(str(userid).encode("ascii"), person=b"warehouse.userid")
                .hexdigest()
                .lower(),
            )

            return resp

    return {"form": form}
Esempio n. 11
0
    def do_test_simple_constructor(self, hashtype):
        """ Test constructor for specific hash. """

        check_hashtype(hashtype)
        if hashtype == HashTypes.SHA1:
            sha = hashlib.sha1()
        elif hashtype == HashTypes.SHA2:
            sha = hashlib.sha256()
        elif hashtype == HashTypes.SHA3:
            sha = hashlib.sha3_256()
        elif hashtype == HashTypes.BLAKE2B:
            sha = hashlib.blake2b(digest_size=32)
        else:
            raise NotImplementedError

        name = self.rng.next_file_name(8)
        nnn = self.rng.some_bytes(8)
        self.rng.next_bytes(nnn)
        sha.update(nnn)
        hash0 = sha.digest()

        leaf0 = NLHLeaf(name, hash0, hashtype)
        self.assertEqual(name, leaf0.name)
        self.assertEqual(hash0, leaf0.bin_hash)

        name2 = name
        while name2 == name:
            name2 = self.rng.next_file_name(8)
        nnn = self.rng.some_bytes(8)
        self.rng.next_bytes(nnn)
        sha.update(nnn)
        hash1 = sha.digest()
        leaf1 = NLHLeaf(name2, hash1, hashtype)
        self.assertEqual(name2, leaf1.name)
        self.assertEqual(hash1, leaf1.bin_hash)

        self.assertEqual(leaf0, leaf0)
        self.assertEqual(leaf1, leaf1)
        self.assertFalse(leaf0 == leaf1)

        leaf0c = leaf0.clone()
        self.assertEqual(leaf0c, leaf0)

        leaf1c = leaf1.clone()
        self.assertEqual(leaf1c, leaf1)
Esempio n. 12
0
    def test_blake2b_against_test_vectors(self):
        with open(BLAKE2_TEST_VECTOR_FILENAME, 'rt') as f:
            test_vectors = json.load(f)
        for test_vector in test_vectors:
            if test_vector['hash'] != 'blake2b':
                continue
            if test_vector['key'] != '':  # todo remove
                continue
            v_in = binascii.unhexlify(test_vector['in'])
            v_key = binascii.unhexlify(test_vector['key'])
            v_out = binascii.unhexlify(test_vector['out'])
            result = monocypher.blake2b(v_in, v_key)
            self.assertEqual(v_out, result)

            b = monocypher.Blake2b(key=v_key)
            b.update(v_in)
            self.assertEqual(v_out, b.finalize())

            result = hashlib.blake2b(v_in).digest()
            self.assertEqual(v_out, result)
Esempio n. 13
0
File: user.py Progetto: jddixon/dvcz
def make_committer_id(pubkey, hashtype=HashTypes.SHA2):
    """
    Create a unique committer ID derived from the user's RSA public key
    using this SHA type.

    This implementation adds the current time to the hash.

    Returns a 40- or 64-character hex value.
    """

    if hashtype == HashTypes.SHA1:
        sha = hashlib.sha1()
    elif hashtype == HashTypes.SHA2:
        sha = hashlib.sha256()
    elif hashtype == HashTypes.SHA3:
        sha = hashlib.sha3_256()
    elif hashtype == HashTypes.BLAKE2B:
        sha = hashlib.blake2b(digest_size=32)
    else:
        raise NotImplementedError
    sha.update(pubkey.exportKey())  # PEM format
    sha.update(str(time.time()).encode('utf-8'))
    return sha.hexdigest()
Esempio n. 14
0
 def __generate_offer_hash(self, offer):
     string = json.dumps(offer, sort_keys=True)
     h = blake2b(digest_size=30)
     h.update(string.encode('utf-8'))
     return h.hexdigest()
Esempio n. 15
0
def generate_uid(story_data):
    data = "_".join([story_data[key] for key in story_data])
    hasher = blake2b(digest_size=4)
    hasher.update(data.encode('utf-8'))
    return hasher.hexdigest()
Esempio n. 16
0
def fr_cracker():
    hashtype = str(
        input("\033[1;39m CHOOSE THE TYPE OF HASH YOU WANNA CRACK : "))
    print("")
    hashe = input("\033[1;39m Enter/Paste The Hash : ")
    hashed = str(hashe)
    print("")
    wordlist = input("\033[1;39m Enter the Path/Wordlist : ")
    if os.path.isfile(wordlist):
        starttime = datetime.datetime.now()
        print(
            f"\033[1;33m FRY-CRACKER started start-time:=> {starttime.hour}:{starttime.minute}:{starttime.second}:{starttime.microsecond} Date:=> day:{starttime.day}/month:{starttime.month}/year:{starttime.year} ..............\033[1;32m"
        )
        wfile = open(wordlist, "r", encoding='UTF-8')
        words = wfile.read().split()
        wfile.close()
        if hashtype == '1':
            for passwd in words:
                word = passwd.encode()
                chash = hashlib.md5(word)
                duphash = chash.hexdigest()
                if duphash == hashed:
                    print(f"HASH CRACKED : {hashed} ")
                    print(f"\033[1;36m PASSWORD :=> {passwd} \033[1;37m")
                    print(
                        "<=========================================================================================================================>"
                    )
                    print('FRY-CRACKER exited, bye bye')
                    exit()
                else:
                    pass

        elif hashtype == '2':
            for passwd in words:
                word = passwd.encode()
                chash = hashlib.sha1(word)
                duphash = chash.hexdigest()
                if duphash == hashed:
                    print(f"HASH CRACKED : {hashed}")
                    print(f"\033[1;36m PASSWORD :=> {passwd} \033[1;37m")
                    print(
                        "<=========================================================================================================================>"
                    )
                    print('FRY-CRACKER exited, bye bye')
                    exit()
                else:
                    pass

        elif hashtype == '3':
            for passwd in words:
                word = passwd.encode()
                chash = hashlib.sha224(word)
                duphash = chash.hexdigest()
                if duphash == hashed:
                    print(f"HASH CRACKED : {hashed}")
                    print(f"\033[1;36m PASSWORD :=> {passwd} \033[1;37m")
                    print('FRY-CRACKER exited, bye bye')
                    exit()
                else:
                    pass

        elif hashtype == '4':
            for passwd in words:
                word = passwd.encode()
                chash = hashlib.sha256(word)
                duphash = chash.hexdigest()
                if duphash == hashed:
                    print(f"HASH CRACKED : {hashed}")
                    print(f"\033[1;36m PASSWORD :=> {passwd} \033[1;37m")
                    print(
                        "<=========================================================================================================================>"
                    )
                    print('FRY-CRACKER exited, bye bye')
                    exit()
                else:
                    pass

        if hashtype == '5':
            for passwd in words:
                word = passwd.encode()
                chash = hashlib.sha384(word)
                duphash = chash.hexdigest()
                if duphash == hashed:
                    print(f"HASH CRACKED : {hashed}")
                    print(f"\033[1;36m PASSWORD :=> {passwd} \033[1;37m")
                    print(
                        "<=========================================================================================================================>"
                    )
                    print('FRY-CRACKER exited, bye bye')
                    exit()
                else:
                    pass

        elif hashtype == '6':
            for passwd in words:
                word = passwd.encode()
                chash = hashlib.sha512(word)
                duphash = chash.hexdigest()
                if duphash == hashed:
                    print(f"HASH CRACKED : {hashed}")
                    print(f"\033[1;36m PASSWORD :=> {passwd} \033[1;37m")
                    print('FRY-CRACKER exited, bye bye')
                    exit()
                else:
                    pass

        elif hashtype == '7':
            for passwd in words:
                word = passwd.encode()
                chash = hashlib.sha3_224(word)
                duphash = chash.hexdigest()
                if duphash == hashed:
                    print(f"HASH CRACKED : {hashed}")
                    print(f"\033[1;36m PASSWORD :=> {passwd} \033[1;37m")
                    print(
                        "<=========================================================================================================================>"
                    )
                    print('FRY-CRACKER exited, bye bye')
                    exit()
                else:
                    pass

        elif hashtype == '8':
            for passwd in words:
                word = passwd.encode()
                chash = hashlib.sha3_256(word)
                duphash = chash.hexdigest()
                if duphash == hashed:
                    print(f"HASH CRACKED : {hashed}")
                    print(f"\033[1;36m PASSWORD :=> {passwd} \033[1;37m")
                    print(
                        "<=========================================================================================================================>"
                    )
                    print('FRY-CRACKER exited, bye bye')
                    exit()
                else:
                    pass

        elif hashtype == '9':
            for passwd in words:
                word = passwd.encode()
                chash = hashlib.sha3_384(word)
                duphash = chash.hexdigest()
                if duphash == hashed:
                    print(f"HASH CRACKED : {hashed}")
                    print(f"\033[1;36m PASSWORD :=> {passwd} \033[1;37m")
                    print(
                        "<=========================================================================================================================>"
                    )
                    print('FRY-CRACKER exited, bye bye')
                    exit()
                else:
                    pass

        elif hashtype == '10':
            for passwd in words:
                word = passwd.encode()
                chash = hashlib.sha3_512(word)
                duphash = chash.hexdigest()
                if duphash == hashed:
                    print(f"HASH CRACKED : {hashed}")
                    print(f"\033[1;36m PASSWORD :=> {passwd} \033[1;37m")
                    print(
                        "<=========================================================================================================================>"
                    )
                    print('FRY-CRACKER exited, bye bye')
                    exit()
                else:
                    pass

        elif hashtype == '11':
            for passwd in words:
                word = passwd.encode()
                chash = hashlib.blake2b(word)
                duphash = chash.hexdigest()
                if duphash == hashed:
                    print(f"HASH CRACKED : {hashed}")
                    print(f"\033[1;36 PASSWORD :=> {passwd}")
                    print(
                        "<=========================================================================================================================>"
                    )
                    print('FRY-CRACKER exited, bye bye')
                    exit()
                else:
                    pass

        elif hashtype == '12':
            for passwd in words:
                word = passwd.encode()
                chash = hashlib.blake2s(word)
                duphash = chash.hexdigest()
                if duphash == hashed:
                    print(f"HASH CRACKED : {hashed}")
                    print(f"\033[1;36m PASSWORD :=> {passwd} \033[1;37m")
                    print(
                        "<=========================================================================================================================>"
                    )
                    print('FRY-CRACKER exited, bye bye')
                    exit()
                else:
                    pass
        else:
            print(
                "\033[1;31mNo Words in this list matched the HASH   or   May be the choice you made is wrong"
            )
            print("\033[1;37 FRY-CRACKER Execution Finished.......")
from passwordfunctions import quit_program, generate_salt
from secrets import compare_digest
from getpass import getpass
from hashlib import blake2b

# Generate master_password
# Stores master_password and salt
pass1 = getpass('Enter Master Password:'******'Confirm Master Password:'******'Success')
    salts = {'master_password': generate_salt(16)}
    passwords = {
        'master_password':
        blake2b(pass1.encode(),
                digest_size=64,
                salt=bytes.fromhex(salts['master_password'])).hexdigest()
    }
    quit_program(passwords, salts)
Esempio n. 18
0
 def _hash_to_color(s: str) -> str:
     h = hashlib.blake2b(s.encode("utf-8"), digest_size=3).digest()
     return "#{:06x}".format(int.from_bytes(h, "big"))
Esempio n. 19
0
    seed = ''
    for _ in itertools.repeat(None, pwLength * randb(1337) + 1):
        j = randb(len(dictList))
        seed += ch(list(dictList[j].values()))
    return seed


# generate seeds to hashing functions
funcSeed1 = generate_seed()
funcSeed2 = generate_seed()
funcSeed3 = generate_seed()
# print('function seed1 = {0}'.format(funcSeed1))
# print('function seed2 = {0}'.format(funcSeed2))
# print('function seed3 = {0}'.format(funcSeed3))

blaked = blake2b()
sha3d = sha3_512()
sha2d = sha512()

blaked.update(bytearray(funcSeed1, 'utf-8'))
sha3d.update(bytearray(funcSeed2, 'utf-8'))
sha2d.update(bytearray(funcSeed3, 'utf-8'))

for i in range(rounds):
    blaked.update(bytearray(blaked.hexdigest(), 'utf-8'))
    sha3d.update(bytearray(sha3d.hexdigest(), 'utf-8'))
    sha2d.update(bytearray(sha2d.hexdigest(), 'utf-8'))

preprefinal = ''
for i in range(pwLength):
    j = randb(3)
Esempio n. 20
0
def blake2b_224(data):
    return hashlib.blake2b(data, digest_size=28)
Esempio n. 21
0
def blake2bsum(filename):
    b2bhash = hashlib.blake2b(digest_size=32)
    try:
        return hashsum(b2bhash, filename)
    except FileNotFoundError:
        return ""
Esempio n. 22
0
def testNonce(previous, nonce, coinbase, transactions):
    message = str(previous) + str(nonce) + str(coinbase) + str(transactions)
    hash = blake2b(message.encode(), digest_size=4)
    return hash.hexdigest()
Esempio n. 23
0
    def hash(self, string, hashtype='md5'):
        string = string.encode()
        hashtype = hashtype.lower()
        if hashtype == 'blake2b':
            result = hashlib.blake2b(string).hexdigest()
            result = result.upper()
            return result

        elif hashtype == 'blake2s':
            result = hashlib.blake2s(string).hexdigest()
            result = result.upper()
            return result

        elif hashtype == 'sha3_224':
            result = hashlib.sha3_224(string).hexdigest()
            result = result.upper()
            return result

        elif hashtype == 'sha3_256':
            result = hashlib.sha3_256(string).hexdigest()
            result = result.upper()
            return result

        elif hashtype == 'sha3_384':
            result = hashlib.sha3_384(string).hexdigest()
            result = result.upper()
            return result

        elif hashtype == 'sha3_512':
            result = hashlib.sha3_512(string).hexdigest()
            result = result.upper()
            return result

        elif hashtype == 'shake_128':
            result = hashlib.shake_128(string).hexdigest()
            result = result.upper()
            return result

        elif hashtype == 'shake_256':
            result = hashlib.shake_256(string).hexdigest()
            result = result.upper()
            return result

        elif hashtype == 'md5':
            result = hashlib.md5(string).hexdigest()
            result = result.upper()
            return result

        elif hashtype == 'sha1':
            result = hashlib.sha1(string).hexdigest()
            result = result.upper()
            return result

        elif hashtype == 'sha224':
            result = hashlib.sha224(string).hexdigest()
            result = result.upper()
            return result

        elif hashtype == 'sha256':
            result = hashlib.sha256(string).hexdigest()
            result = result.upper()
            return result

        elif hashtype == 'sha384':
            result = hashlib.sha384(string).hexdigest()
            result = result.upper()
            return result

        elif hashtype == 'sha512':
            result = hashlib.sha512(string).hexdigest()
            result = result.upper()
            return result

        else:
            raise UnknownHashTypeError("An unknown hash type is entered...")
Esempio n. 24
0
def test_blake2b(data):
    t0 = time.time()
    res = blake2b(data).hexdigest()
    print(f'test_sha256: {time.time() - t0:.02f}')
    return res
Esempio n. 25
0
def email_blake2b160_hash(email):
    return hashlib.blake2b(email_normalized(email).encode('utf-8'),
                           digest_size=20).digest()
Esempio n. 26
0
def get_hash(p):
    """ Given a path object, get a hash for it. """
    h = blake2b()
    h.update(str(p).encode("utf-8"))
    return h.hexdigest()
Esempio n. 27
0
def get_apikey(username, password):
    h = blake2b(key=bytes(password.encode("UTF-8")), digest_size=8)
    h.update(username.encode("UTF-8"))
    d = h.hexdigest()
    return d
Esempio n. 28
0
def blake2b_hash_as_int(b):
    """Compute digest of the bytes `b` using the Blake2 hash function.
    Returns a unsigned 64bit integer.
    """
    return int.from_bytes(blake2b(b, digest_size=8).digest(), "big")
Esempio n. 29
0
def file_upload(request):
    # If we're in read-only mode, let upload clients know
    if request.flags.enabled("read-only"):
        raise _exc_with_message(
            HTTPForbidden, "Read-only mode: Uploads are temporarily disabled"
        )

    # Before we do anything, if there isn't an authenticated user with this
    # request, then we'll go ahead and bomb out.
    if request.authenticated_userid is None:
        raise _exc_with_message(
            HTTPForbidden, "Invalid or non-existent authentication information."
        )

    # Ensure that user has a verified, primary email address. This should both
    # reduce the ease of spam account creation and activty, as well as act as
    # a forcing function for https://github.com/pypa/warehouse/issues/3632.
    # TODO: Once https://github.com/pypa/warehouse/issues/3632 has been solved,
    #       we might consider a different condition, possibly looking at
    #       User.is_active instead.
    if not (request.user.primary_email and request.user.primary_email.verified):
        raise _exc_with_message(
            HTTPBadRequest,
            (
                "User {!r} does not have a verified primary email address. "
                "Please add a verified primary email before attempting to "
                "upload to PyPI. See {project_help} for more information."
                "for more information."
            ).format(
                request.user.username,
                project_help=request.help_url(_anchor="verified-email"),
            ),
        ) from None

    # Do some cleanup of the various form fields
    for key in list(request.POST):
        value = request.POST.get(key)
        if isinstance(value, str):
            # distutils "helpfully" substitutes unknown, but "required" values
            # with the string "UNKNOWN". This is basically never what anyone
            # actually wants so we'll just go ahead and delete anything whose
            # value is UNKNOWN.
            if value.strip() == "UNKNOWN":
                del request.POST[key]

            # Escape NUL characters, which psycopg doesn't like
            if "\x00" in value:
                request.POST[key] = value.replace("\x00", "\\x00")

    # We require protocol_version 1, it's the only supported version however
    # passing a different version should raise an error.
    if request.POST.get("protocol_version", "1") != "1":
        raise _exc_with_message(HTTPBadRequest, "Unknown protocol version.")

    # Check if any fields were supplied as a tuple and have become a
    # FieldStorage. The 'content' and 'gpg_signature' fields _should_ be a
    # FieldStorage, however.
    # ref: https://github.com/pypa/warehouse/issues/2185
    # ref: https://github.com/pypa/warehouse/issues/2491
    for field in set(request.POST) - {"content", "gpg_signature"}:
        values = request.POST.getall(field)
        if any(isinstance(value, FieldStorage) for value in values):
            raise _exc_with_message(HTTPBadRequest, f"{field}: Should not be a tuple.")

    # Look up all of the valid classifiers
    all_classifiers = request.db.query(Classifier).all()

    # Validate and process the incoming metadata.
    form = MetadataForm(request.POST)

    # Add a validator for deprecated classifiers
    form.classifiers.validators.append(_no_deprecated_classifiers(request))

    form.classifiers.choices = [(c.classifier, c.classifier) for c in all_classifiers]
    if not form.validate():
        for field_name in _error_message_order:
            if field_name in form.errors:
                break
        else:
            field_name = sorted(form.errors.keys())[0]

        if field_name in form:
            field = form[field_name]
            if field.description and isinstance(field, wtforms.StringField):
                error_message = (
                    "{value!r} is an invalid value for {field}. ".format(
                        value=field.data, field=field.description
                    )
                    + "Error: {} ".format(form.errors[field_name][0])
                    + "See "
                    "https://packaging.python.org/specifications/core-metadata"
                )
            else:
                error_message = "Invalid value for {field}. Error: {msgs[0]}".format(
                    field=field_name, msgs=form.errors[field_name]
                )
        else:
            error_message = "Error: {}".format(form.errors[field_name][0])

        raise _exc_with_message(HTTPBadRequest, error_message)

    # Ensure that we have file data in the request.
    if "content" not in request.POST:
        raise _exc_with_message(HTTPBadRequest, "Upload payload does not have a file.")

    # Look up the project first before doing anything else, this is so we can
    # automatically register it if we need to and can check permissions before
    # going any further.
    try:
        project = (
            request.db.query(Project)
            .filter(
                Project.normalized_name == func.normalize_pep426_name(form.name.data)
            )
            .one()
        )
    except NoResultFound:
        # Check for AdminFlag set by a PyPI Administrator disabling new project
        # registration, reasons for this include Spammers, security
        # vulnerabilities, or just wanting to be lazy and not worry ;)
        if request.flags.enabled("disallow-new-project-registration"):
            raise _exc_with_message(
                HTTPForbidden,
                (
                    "New project registration temporarily disabled. "
                    "See {projecthelp} for details"
                ).format(projecthelp=request.help_url(_anchor="admin-intervention")),
            ) from None

        # Before we create the project, we're going to check our blacklist to
        # see if this project is even allowed to be registered. If it is not,
        # then we're going to deny the request to create this project.
        if request.db.query(
            exists().where(
                BlacklistedProject.name == func.normalize_pep426_name(form.name.data)
            )
        ).scalar():
            raise _exc_with_message(
                HTTPBadRequest,
                (
                    "The name {name!r} isn't allowed. "
                    "See {projecthelp} "
                    "for more information."
                ).format(
                    name=form.name.data,
                    projecthelp=request.help_url(_anchor="project-name"),
                ),
            ) from None

        # Also check for collisions with Python Standard Library modules.
        if packaging.utils.canonicalize_name(form.name.data) in STDLIB_PROHIBITTED:
            raise _exc_with_message(
                HTTPBadRequest,
                (
                    "The name {name!r} isn't allowed (conflict with Python "
                    "Standard Library module name). See "
                    "{projecthelp} for more information."
                ).format(
                    name=form.name.data,
                    projecthelp=request.help_url(_anchor="project-name"),
                ),
            ) from None

        # The project doesn't exist in our database, so first we'll check for
        # projects with a similar name
        squattees = (
            request.db.query(Project)
            .filter(
                func.levenshtein(
                    Project.normalized_name, func.normalize_pep426_name(form.name.data)
                )
                <= 2
            )
            .all()
        )

        # Next we'll create the project
        project = Project(name=form.name.data)
        request.db.add(project)

        # Now that the project exists, add any squats which it is the squatter for
        for squattee in squattees:
            request.db.add(Squat(squatter=project, squattee=squattee))

        # Then we'll add a role setting the current user as the "Owner" of the
        # project.
        request.db.add(Role(user=request.user, project=project, role_name="Owner"))
        # TODO: This should be handled by some sort of database trigger or a
        #       SQLAlchemy hook or the like instead of doing it inline in this
        #       view.
        request.db.add(
            JournalEntry(
                name=project.name,
                action="create",
                submitted_by=request.user,
                submitted_from=request.remote_addr,
            )
        )
        request.db.add(
            JournalEntry(
                name=project.name,
                action="add Owner {}".format(request.user.username),
                submitted_by=request.user,
                submitted_from=request.remote_addr,
            )
        )

    # Check that the user has permission to do things to this project, if this
    # is a new project this will act as a sanity check for the role we just
    # added above.
    if not request.has_permission("upload", project):
        raise _exc_with_message(
            HTTPForbidden,
            (
                "The user '{0}' isn't allowed to upload to project '{1}'. "
                "See {2} for more information."
            ).format(
                request.user.username,
                project.name,
                request.help_url(_anchor="project-name"),
            ),
        )

    # Uploading should prevent broken rendered descriptions.
    # Temporarily disabled, see
    # https://github.com/pypa/warehouse/issues/4079
    # if form.description.data:
    #     description_content_type = form.description_content_type.data
    #     if not description_content_type:
    #         description_content_type = "text/x-rst"
    #     rendered = readme.render(
    #         form.description.data, description_content_type, use_fallback=False
    #     )

    #     if rendered is None:
    #         if form.description_content_type.data:
    #             message = (
    #                 "The description failed to render "
    #                 "for '{description_content_type}'."
    #             ).format(description_content_type=description_content_type)
    #         else:
    #             message = (
    #                 "The description failed to render "
    #                 "in the default format of reStructuredText."
    #             )
    #         raise _exc_with_message(
    #             HTTPBadRequest,
    #             "{message} See {projecthelp} for more information.".format(
    #                 message=message,
    #                 projecthelp=request.help_url(_anchor="description-content-type"),
    #             ),
    #         ) from None

    try:
        canonical_version = packaging.utils.canonicalize_version(form.version.data)
        release = (
            request.db.query(Release)
            .filter(
                (Release.project == project)
                & (Release.canonical_version == canonical_version)
            )
            .one()
        )
    except MultipleResultsFound:
        # There are multiple releases of this project which have the same
        # canonical version that were uploaded before we checked for
        # canonical version equivalence, so return the exact match instead
        release = (
            request.db.query(Release)
            .filter(
                (Release.project == project) & (Release.version == form.version.data)
            )
            .one()
        )
    except NoResultFound:
        release = Release(
            project=project,
            _classifiers=[
                c for c in all_classifiers if c.classifier in form.classifiers.data
            ],
            dependencies=list(
                _construct_dependencies(
                    form,
                    {
                        "requires": DependencyKind.requires,
                        "provides": DependencyKind.provides,
                        "obsoletes": DependencyKind.obsoletes,
                        "requires_dist": DependencyKind.requires_dist,
                        "provides_dist": DependencyKind.provides_dist,
                        "obsoletes_dist": DependencyKind.obsoletes_dist,
                        "requires_external": DependencyKind.requires_external,
                        "project_urls": DependencyKind.project_url,
                    },
                )
            ),
            canonical_version=canonical_version,
            **{
                k: getattr(form, k).data
                for k in {
                    # This is a list of all the fields in the form that we
                    # should pull off and insert into our new release.
                    "version",
                    "summary",
                    "description",
                    "description_content_type",
                    "license",
                    "author",
                    "author_email",
                    "maintainer",
                    "maintainer_email",
                    "keywords",
                    "platform",
                    "home_page",
                    "download_url",
                    "requires_python",
                }
            },
            uploader=request.user,
            uploaded_via=request.user_agent,
        )
        request.db.add(release)
        # TODO: This should be handled by some sort of database trigger or
        #       a SQLAlchemy hook or the like instead of doing it inline in
        #       this view.
        request.db.add(
            JournalEntry(
                name=release.project.name,
                version=release.version,
                action="new release",
                submitted_by=request.user,
                submitted_from=request.remote_addr,
            )
        )

    # TODO: We need a better solution to this than to just do it inline inside
    #       this method. Ideally the version field would just be sortable, but
    #       at least this should be some sort of hook or trigger.
    releases = (
        request.db.query(Release)
        .filter(Release.project == project)
        .options(orm.load_only(Release._pypi_ordering))
        .all()
    )
    for i, r in enumerate(
        sorted(releases, key=lambda x: packaging.version.parse(x.version))
    ):
        r._pypi_ordering = i

    # Pull the filename out of our POST data.
    filename = request.POST["content"].filename

    # Make sure that the filename does not contain any path separators.
    if "/" in filename or "\\" in filename:
        raise _exc_with_message(
            HTTPBadRequest, "Cannot upload a file with '/' or '\\' in the name."
        )

    # Make sure the filename ends with an allowed extension.
    if _dist_file_regexes[project.allow_legacy_files].search(filename) is None:
        raise _exc_with_message(
            HTTPBadRequest,
            "Invalid file extension: Use .egg, .tar.gz, .whl or .zip "
            "extension. (https://www.python.org/dev/peps/pep-0527)",
        )

    # Make sure that our filename matches the project that it is being uploaded
    # to.
    prefix = pkg_resources.safe_name(project.name).lower()
    if not pkg_resources.safe_name(filename).lower().startswith(prefix):
        raise _exc_with_message(
            HTTPBadRequest,
            "Start filename for {!r} with {!r}.".format(project.name, prefix),
        )

    # Check the content type of what is being uploaded
    if not request.POST["content"].type or request.POST["content"].type.startswith(
        "image/"
    ):
        raise _exc_with_message(HTTPBadRequest, "Invalid distribution file.")

    # Ensure that the package filetype is allowed.
    # TODO: Once PEP 527 is completely implemented we should be able to delete
    #       this and just move it into the form itself.
    if not project.allow_legacy_files and form.filetype.data not in {
        "sdist",
        "bdist_wheel",
        "bdist_egg",
    }:
        raise _exc_with_message(HTTPBadRequest, "Unknown type of file.")

    # The project may or may not have a file size specified on the project, if
    # it does then it may or may not be smaller or larger than our global file
    # size limits.
    file_size_limit = max(filter(None, [MAX_FILESIZE, project.upload_limit]))

    with tempfile.TemporaryDirectory() as tmpdir:
        temporary_filename = os.path.join(tmpdir, filename)

        # Buffer the entire file onto disk, checking the hash of the file as we
        # go along.
        with open(temporary_filename, "wb") as fp:
            file_size = 0
            file_hashes = {
                "md5": hashlib.md5(),
                "sha256": hashlib.sha256(),
                "blake2_256": hashlib.blake2b(digest_size=256 // 8),
            }
            for chunk in iter(lambda: request.POST["content"].file.read(8096), b""):
                file_size += len(chunk)
                if file_size > file_size_limit:
                    raise _exc_with_message(
                        HTTPBadRequest,
                        "File too large. "
                        + "Limit for project {name!r} is {limit} MB. ".format(
                            name=project.name, limit=file_size_limit // (1024 * 1024)
                        )
                        + "See "
                        + request.help_url(_anchor="file-size-limit"),
                    )
                fp.write(chunk)
                for hasher in file_hashes.values():
                    hasher.update(chunk)

        # Take our hash functions and compute the final hashes for them now.
        file_hashes = {k: h.hexdigest().lower() for k, h in file_hashes.items()}

        # Actually verify the digests that we've gotten. We're going to use
        # hmac.compare_digest even though we probably don't actually need to
        # because it's better safe than sorry. In the case of multiple digests
        # we expect them all to be given.
        if not all(
            [
                hmac.compare_digest(
                    getattr(form, "{}_digest".format(digest_name)).data.lower(),
                    digest_value,
                )
                for digest_name, digest_value in file_hashes.items()
                if getattr(form, "{}_digest".format(digest_name)).data
            ]
        ):
            raise _exc_with_message(
                HTTPBadRequest,
                "The digest supplied does not match a digest calculated "
                "from the uploaded file.",
            )

        # Check to see if the file that was uploaded exists already or not.
        is_duplicate = _is_duplicate_file(request.db, filename, file_hashes)
        if is_duplicate:
            return Response()
        elif is_duplicate is not None:
            raise _exc_with_message(
                HTTPBadRequest,
                # Note: Changing this error message to something that doesn't
                # start with "File already exists" will break the
                # --skip-existing functionality in twine
                # ref: https://github.com/pypa/warehouse/issues/3482
                # ref: https://github.com/pypa/twine/issues/332
                "File already exists. See "
                + request.help_url(_anchor="file-name-reuse"),
            )

        # Check to see if the file that was uploaded exists in our filename log
        if request.db.query(
            request.db.query(Filename).filter(Filename.filename == filename).exists()
        ).scalar():
            raise _exc_with_message(
                HTTPBadRequest,
                "This filename has already been used, use a "
                "different version. "
                "See " + request.help_url(_anchor="file-name-reuse"),
            )

        # Check to see if uploading this file would create a duplicate sdist
        # for the current release.
        if (
            form.filetype.data == "sdist"
            and request.db.query(
                request.db.query(File)
                .filter((File.release == release) & (File.packagetype == "sdist"))
                .exists()
            ).scalar()
        ):
            raise _exc_with_message(
                HTTPBadRequest, "Only one sdist may be uploaded per release."
            )

        # Check the file to make sure it is a valid distribution file.
        if not _is_valid_dist_file(temporary_filename, form.filetype.data):
            raise _exc_with_message(HTTPBadRequest, "Invalid distribution file.")

        # Check that if it's a binary wheel, it's on a supported platform
        if filename.endswith(".whl"):
            wheel_info = _wheel_file_re.match(filename)
            plats = wheel_info.group("plat").split(".")
            for plat in plats:
                if not _valid_platform_tag(plat):
                    raise _exc_with_message(
                        HTTPBadRequest,
                        "Binary wheel '{filename}' has an unsupported "
                        "platform tag '{plat}'.".format(filename=filename, plat=plat),
                    )

        # Also buffer the entire signature file to disk.
        if "gpg_signature" in request.POST:
            has_signature = True
            with open(os.path.join(tmpdir, filename + ".asc"), "wb") as fp:
                signature_size = 0
                for chunk in iter(
                    lambda: request.POST["gpg_signature"].file.read(8096), b""
                ):
                    signature_size += len(chunk)
                    if signature_size > MAX_SIGSIZE:
                        raise _exc_with_message(HTTPBadRequest, "Signature too large.")
                    fp.write(chunk)

            # Check whether signature is ASCII armored
            with open(os.path.join(tmpdir, filename + ".asc"), "rb") as fp:
                if not fp.read().startswith(b"-----BEGIN PGP SIGNATURE-----"):
                    raise _exc_with_message(
                        HTTPBadRequest, "PGP signature isn't ASCII armored."
                    )
        else:
            has_signature = False

        # TODO: This should be handled by some sort of database trigger or a
        #       SQLAlchemy hook or the like instead of doing it inline in this
        #       view.
        request.db.add(Filename(filename=filename))

        # Store the information about the file in the database.
        file_ = File(
            release=release,
            filename=filename,
            python_version=form.pyversion.data,
            packagetype=form.filetype.data,
            comment_text=form.comment.data,
            size=file_size,
            has_signature=bool(has_signature),
            md5_digest=file_hashes["md5"],
            sha256_digest=file_hashes["sha256"],
            blake2_256_digest=file_hashes["blake2_256"],
            # Figure out what our filepath is going to be, we're going to use a
            # directory structure based on the hash of the file contents. This
            # will ensure that the contents of the file cannot change without
            # it also changing the path that the file is saved too.
            path="/".join(
                [
                    file_hashes[PATH_HASHER][:2],
                    file_hashes[PATH_HASHER][2:4],
                    file_hashes[PATH_HASHER][4:],
                    filename,
                ]
            ),
            uploaded_via=request.user_agent,
        )
        request.db.add(file_)

        # TODO: This should be handled by some sort of database trigger or a
        #       SQLAlchemy hook or the like instead of doing it inline in this
        #       view.
        request.db.add(
            JournalEntry(
                name=release.project.name,
                version=release.version,
                action="add {python_version} file {filename}".format(
                    python_version=file_.python_version, filename=file_.filename
                ),
                submitted_by=request.user,
                submitted_from=request.remote_addr,
            )
        )

        # TODO: We need a better answer about how to make this transactional so
        #       this won't take affect until after a commit has happened, for
        #       now we'll just ignore it and save it before the transaction is
        #       committed.
        storage = request.find_service(IFileStorage)
        storage.store(
            file_.path,
            os.path.join(tmpdir, filename),
            meta={
                "project": file_.release.project.normalized_name,
                "version": file_.release.version,
                "package-type": file_.packagetype,
                "python-version": file_.python_version,
            },
        )
        if has_signature:
            storage.store(
                file_.pgp_path,
                os.path.join(tmpdir, filename + ".asc"),
                meta={
                    "project": file_.release.project.normalized_name,
                    "version": file_.release.version,
                    "package-type": file_.packagetype,
                    "python-version": file_.python_version,
                },
            )

    return Response()
Esempio n. 30
0
# a modified DES algorithm; see the Unix man page for further details.
# Possible uses include storing hashed passwords so you can check passwords without storing the actual password,
# or attempting to crack Unix passwords with a dictionary.
# Notice that the behavior of this module depends on the actual implementation of the crypt(3) routine in the running system.
# Therefore, any extensions available on the current implementation will also be available on this module.
# hashlib — Secure hashes and message digests.
# This module implements a common interface to many different secure hash and message digest algorithms.
# Included are the FIPS secure hash algorithms SHA1, SHA224, SHA256, SHA384, and SHA512 (defined in FIPS 180-2) as well as RSA’s
# MD5 algorithm (defined in Internet RFC 1321). The terms “secure hash” and “message digest” are interchangeable.
# Older algorithms were called message digests. The modern term is secure hash.
# Hash algorithms.
# There is one constructor method named for each type of hash. All return a hash object with the same simple interface.
#
# You can call hash.update() as many times as you need to iteratively update the hash:
# 

from hashlib import blake2b

items = [b'Hello', b' ', b'world']
h = blake2b()

for item in items:
        h.update(item)

    h.hexdigest()

#
# OUTPUT:
#
# '6ff843ba685842aa82031d3f53c48b66326df7639a63d128974c5c14f31a0f33343a8c65551134ed1ae0f2b0dd2bb495dc81039e3eeb0aa1bb0388bbeac29183'
#
def header_to_id(header, tx_type: int):
    header_data = cbor.dumps([tx_type, header])
    return blake2b(header_data, digest_size=32).hexdigest()
Esempio n. 32
0
 def __hash(cls, payload: bytes) -> HexBytes:
     blake = hashlib.blake2b()
     blake.update(payload)
     digest = blake.digest().hex()
     truncated_digest = digest[:cls.__ID_LENGTH]
     return HexBytes(truncated_digest)
Esempio n. 33
0
def sign(code, secret_key="0ec(=9)=s9rrt315r)iky27draepxl42g1hes+%hx$_9fk0*%8"):
    h = blake2b(digest_size=AUTH_SIZE, key=secret_key.encode("utf-8"))
    h.update(code.lower().encode("utf-8"))
    return h.hexdigest()
Esempio n. 34
0
def blake2bhash_generate(data):
    # new hash
    blake2bhash = blake2b(str(data).encode(), digest_size=20).hexdigest()
    return blake2bhash
Esempio n. 35
0
# Python Crypt Routine
# crypt — Function to check Unix passwords.
# This module implements an interface to the crypt(3) routine, which is a one-way hash function based upon
# a modified DES algorithm; see the Unix man page for further details.
# Possible uses include storing hashed passwords so you can check passwords without storing the actual password,
# or attempting to crack Unix passwords with a dictionary.
# Notice that the behavior of this module depends on the actual implementation of the crypt(3) routine in the running system.
# Therefore, any extensions available on the current implementation will also be available on this module.
# hashlib — Secure hashes and message digests.
# This module implements a common interface to many different secure hash and message digest algorithms.
# Included are the FIPS secure hash algorithms SHA1, SHA224, SHA256, SHA384, and SHA512 (defined in FIPS 180-2) as well as RSA’s
# MD5 algorithm (defined in Internet RFC 1321). The terms “secure hash” and “message digest” are interchangeable.
# Older algorithms were called message digests. The modern term is secure hash.
# Hash algorithms.
# There is one constructor method named for each type of hash. All return a hash object with the same simple interface.
#
# You can pass the first chunk of data to update directly to the constructor as the positional argument:
#

from hashlib import blake2b

blake2b(b'Hello world').hexdigest()

#
# OUTPUT:
#
# '6ff843ba685842aa82031d3f53c48b66326df7639a63d128974c5c14f31a0f33343a8c65551134ed1ae0f2b0dd2bb495dc81039e3eeb0aa1bb0388bbeac29183'
#
Esempio n. 36
0
 def get_hash(cls, plaintext):
     import hashlib
     return hashlib.blake2b(plaintext.encode('utf8'), digest_size=cls._hash_length, key=app.config['SECRET_KEY'].encode('utf8')).hexdigest()
Esempio n. 37
0
def blake2b_256(data):
    return hashlib.blake2b(data, digest_size=32)
Esempio n. 38
0
def blake2b_384(data):
    return hashlib.blake2b(data, digest_size=48)
def generate_utxo_hash(address):
    data = base58.b58decode(address)
    return blake2b(data, digest_size=32).hexdigest()
Esempio n. 40
0
        try:
            await aiofiles.os.remove(path)
        except:
            pass
        return not os.path.exists(path)
    return False


async def get_checksum(data=b'', path=None, chunk_size=32768):
    if path:
        async with aiofiles.open(path, 'rb') as f:
            h = hashlib.blake2b()
            while chunk := await f.read(chunk_size):
                h.update(chunk)
    elif isinstance(data, types.AsyncGeneratorType):
        h = hashlib.blake2b()
        async for chunk in data:
            h.update(chunk)
    elif isinstance(data, types.GeneratorType):
        h = hashlib.blake2b()
        for chunk in data:
            h.update(chunk)
    elif isinstance(data, aiofiles.threadpool.binary.AsyncBufferedIOBase):
        h = hashlib.blake2b()
        while chunk := await data.read(chunk_size):
            h.update(chunk)
    elif isinstance(data, io.IOBase):
        h = hashlib.blake2b()
        while chunk := data.read(chunk_size):
            h.update(chunk)
    else:
Esempio n. 41
0
def _hash_address(server_address: str) -> str:
    return base64.urlsafe_b64encode(
        hashlib.blake2b(server_address.encode("utf-8"),
                        digest_size=12).digest()).decode("utf-8")
Esempio n. 42
0
async def get_checksum(data=b'', path=None, chunk_size=32768):
    if path:
        async with aiofiles.open(path, 'rb') as f:
            h = hashlib.blake2b()
            while chunk := await f.read(chunk_size):
                h.update(chunk)
Esempio n. 43
0
from e2e.Classes.Consensus.SpamFilter import SpamFilter

from e2e.Classes.Merit.BlockHeader import BlockHeader
from e2e.Classes.Merit.BlockBody import BlockBody
from e2e.Classes.Merit.Block import Block
from e2e.Classes.Merit.Blockchain import Blockchain

blockchain: Blockchain = Blockchain()

dataFilter: SpamFilter = SpamFilter(5)

edPrivKey: ed25519.SigningKey = ed25519.SigningKey(b'\0' * 32)
edPubKey: ed25519.VerifyingKey = edPrivKey.get_verifying_key()

blsPrivKeys: List[PrivateKey] = [
    PrivateKey(blake2b(b'\0', digest_size=32).digest()),
    PrivateKey(blake2b(b'\1', digest_size=32).digest())
]
blsPubKeys: List[PublicKey] = [key.toPublicKey() for key in blsPrivKeys]

#Create two holders.
for h in range(2):
    block = Block(
        BlockHeader(0, blockchain.last(), bytes(32), 0, bytes(4), bytes(32),
                    blsPubKeys[h].serialize(),
                    blockchain.blocks[-1].header.time + 1200), BlockBody())
    block.mine(blsPrivKeys[h], blockchain.difficulty())
    blockchain.add(block)

#Create a Data and two Signed Verifications.
data: Data = Data(bytes(32), edPubKey.to_bytes())
Esempio n. 44
0
 def from_data(cls, data):
     h = blake2b(digest_size=32)
     h.update(data)
     return cls(h.hexdigest(), data)
Esempio n. 45
0
def digest_block(node, data, sdef, peer_ip, db_handler):
    """node param for imports"""
    class Transaction:
        def __init__(self):
            self.start_time_tx = 0
            self.q_received_timestamp = 0
            self.received_timestamp = "0.00"
            self.received_address = None
            self.received_recipient = None
            self.received_amount = 0
            self.received_signature_enc = None
            self.received_public_key_hashed = None
            self.received_operation = None
            self.received_openfield = None

    class MinerTransaction:
        def __init__(self):
            self.q_block_timestamp = 0
            self.nonce = None
            self.miner_address = None

    class PreviousBlock:
        def __init__(self):
            db_handler.execute(
                db_handler.c,
                "SELECT block_hash, block_height, timestamp FROM transactions "
                "WHERE reward != 0 ORDER BY block_height DESC LIMIT 1;")
            result = db_handler.c.fetchone()
            self.block_hash = result[0]
            self.block_height = result[1]
            self.q_timestamp_last = quantize_two(result[2])

    class BlockArray:
        def __init__(self):
            self.tx_count = 0
            self.block_height_new = node.last_block + 1  # for logging purposes.
            self.block_hash = 'N/A'
            self.failed_cause = ''
            self.block_count = 0

    def fork_reward_check():
        # fork handling
        if node.is_testnet:
            if node.last_block > fork.POW_FORK_TESTNET:
                if not fork.check_postfork_reward_testnet(db_handler):
                    db_handler.rollback_to(fork.POW_FORK_TESTNET - 1)
                    raise ValueError("Rolling back chain due to old fork data")
        else:
            if node.last_block > fork.POW_FORK:
                if not fork.check_postfork_reward(db_handler):
                    print("Rolling back")
                    db_handler.rollback_to(fork.POW_FORK - 1)
                    raise ValueError("Rolling back chain due to old fork data")
        # fork handling

    def transaction_validate():
        """Validates all transaction elements. Raise a ValueError exception on error."""

        # Begin with costless checks first, so we can early exit. Time of tx
        if tx.start_time_tx < tx.q_received_timestamp:
            raise ValueError(
                f"Future transaction not allowed, timestamp "
                f"{quantize_two((tx.q_received_timestamp - tx.start_time_tx) / 60)} minutes in the future"
            )
        if previous_block.q_timestamp_last - 86400 > tx.q_received_timestamp:
            raise ValueError("Transaction older than 24h not allowed.")
        # Amount
        if float(tx.received_amount) < 0:
            raise ValueError("Negative balance spend attempt")
        # Addresses validity
        if not essentials.address_validate(tx.received_address):
            raise ValueError("Not a valid sender address")
        if not essentials.address_validate(tx.received_recipient):
            raise ValueError("Not a valid recipient address")

        # Now we can process cpu heavier checks, decode and check sig itself
        buffer = str(
            (tx.received_timestamp, tx.received_address, tx.received_recipient,
             tx.received_amount, tx.received_operation,
             tx.received_openfield)).encode("utf-8")
        # Will raise if error - also includes reconstruction of address from pubkey to make sure it matches
        SignerFactory.verify_bis_signature(tx.received_signature_enc,
                                           tx.received_public_key_hashed,
                                           buffer, tx.received_address)
        node.logger.app_log.info(
            f"Valid signature from {tx.received_address} "
            f"to {tx.received_recipient} amount {tx.received_amount}")

    def rewards():
        if int(block_array.block_height_new) % 10 == 0:  # every 10 blocks
            db_handler.dev_reward(node, block_array, miner_tx, mining_reward,
                                  mirror_hash)
            db_handler.hn_reward(node, block_array, miner_tx, mirror_hash)

    def check_signature(block):
        for entry in block:  # sig 4
            block_array.tx_count += 1
            entry_signature = entry[4]
            if entry_signature:  # prevent empty signature database retry hack
                signature_list.append(entry_signature)
                # reject block with transactions which are already in the ledger ram

                db_handler.execute_param(
                    db_handler.h,
                    "SELECT block_height FROM transactions WHERE signature = ?;",
                    (entry_signature, ))
                tx_presence_check = db_handler.h.fetchone()
                if tx_presence_check:
                    # print(node.last_block)
                    raise ValueError(
                        f"That transaction {entry_signature[:10]} is already in our ledger, "
                        f"block_height {tx_presence_check[0]}")

                db_handler.execute_param(
                    db_handler.c,
                    "SELECT block_height FROM transactions WHERE signature = ?;",
                    (entry_signature, ))
                tx_presence_check = db_handler.c.fetchone()
                if tx_presence_check:
                    # print(node.last_block)
                    raise ValueError(
                        f"That transaction {entry_signature[:10]} is already in our RAM ledger, "
                        f"block_height {tx_presence_check[0]}")
            else:
                raise ValueError(f"Empty signature from {peer_ip}")

    if node.peers.is_banned(peer_ip):
        # no need to loose any time with banned peers
        raise ValueError("Cannot accept blocks from a banned peer")
        # since we raise, it will also drop the connection, it's fine since he's banned.

    if not node.db_lock.locked():
        block_array = BlockArray()
        node.db_lock.acquire()
        node.logger.app_log.warning(f"Database lock acquired")

        while mp.MEMPOOL.lock.locked():
            time.sleep(0.1)
            node.logger.app_log.info(
                f"Chain: Waiting for mempool to unlock {peer_ip}")

        node.logger.app_log.warning(f"Chain: Digesting started from {peer_ip}")
        # variables that have been quantized are prefixed by q_ So we can avoid any unnecessary quantize again later.
        # Takes time. Variables that are only used as quantized decimal are quantized once and for all.

        block_size = Decimal(sys.getsizeof(str(data))) / Decimal(1000000)
        node.logger.app_log.warning(f"Chain: Block size: {block_size} MB")

        try:
            block_array_data = data
            # reject block with duplicate transactions
            signature_list = []
            block_transactions = []

            for block in block_array_data:

                block_array.block_count += 1
                # Reworked process: we exit as soon as we find an error, no need to process further tests.
                # Then the exception handler takes place.

                # TODO EGG: benchmark this loop vs a single "WHERE IN" SQL
                # move down, so bad format tx do not require sql query
                check_signature(block)

                block_array.tx_count = len(signature_list)
                if block_array.tx_count != len(set(signature_list)):
                    raise ValueError(
                        "There are duplicate transactions in this block, rejected"
                    )

                del signature_list[:]

                previous_block = PreviousBlock()

                block_array.block_height_new = previous_block.block_height + 1

                db_handler.execute(
                    db_handler.c, "SELECT max(block_height) FROM transactions")
                node.last_block = db_handler.c.fetchone()[0]

                start_time_block = quantize_two(time.time())
                transaction_list_converted = [
                ]  # makes sure all the data are properly converted

                fork_reward_check()

                for tx_index, transaction in enumerate(block):
                    tx = Transaction()

                    tx.start_time_tx = quantize_two(time.time())
                    tx.q_received_timestamp = quantize_two(transaction[0])
                    tx.received_timestamp = '%.2f' % tx.q_received_timestamp
                    tx.received_address = str(transaction[1])[:56]
                    tx.received_recipient = str(transaction[2])[:56]
                    tx.received_amount = '%.8f' % (quantize_eight(
                        transaction[3]))
                    tx.received_signature_enc = str(transaction[4])[:684]
                    tx.received_public_key_hashed = str(transaction[5])[:1068]
                    tx.received_operation = str(transaction[6])[:30]
                    tx.received_openfield = str(transaction[7])[:100000]

                    # if transaction == block[-1]:
                    if tx_index == block_array.tx_count - 1:  # faster than comparing the whole tx
                        if not address_is_rsa(tx.received_recipient):
                            # Compare address rather than sig, as sig could be made up
                            raise ValueError(
                                "Coinbase (Mining) transaction only supports legacy RSA Bismuth addresses"
                            )
                        miner_tx = MinerTransaction()

                        # recognize the last transaction as the mining reward transaction
                        miner_tx.q_block_timestamp = tx.q_received_timestamp
                        miner_tx.nonce = tx.received_openfield[:128]
                        miner_tx.miner_address = tx.received_address

                    transaction_list_converted.append(
                        (tx.received_timestamp, tx.received_address,
                         tx.received_recipient, tx.received_amount,
                         tx.received_signature_enc,
                         tx.received_public_key_hashed, tx.received_operation,
                         tx.received_openfield))
                    transaction_validate()

                # reject blocks older than latest block
                if miner_tx.q_block_timestamp <= previous_block.q_timestamp_last:
                    raise ValueError(
                        "Block is older than the previous one, will be rejected"
                    )

                # calculate current difficulty (is done for each block in block array, not super easy to isolate)
                diff = difficulty(node, db_handler)
                node.difficulty = diff

                node.logger.app_log.warning(
                    f"Time to generate block {previous_block.block_height + 1}: {'%.2f' % diff[2]}"
                )
                node.logger.app_log.warning(f"Current difficulty: {diff[3]}")
                node.logger.app_log.warning(f"Current blocktime: {diff[4]}")
                node.logger.app_log.warning(f"Current hashrate: {diff[5]}")
                node.logger.app_log.warning(
                    f"Difficulty adjustment: {diff[6]}")
                node.logger.app_log.warning(f"Difficulty: {diff[0]} {diff[1]}")

                # node.logger.app_log.info("Transaction list: {}".format(transaction_list_converted))

                block_array.block_hash = hashlib.sha224(
                    (str(transaction_list_converted) +
                     previous_block.block_hash).encode("utf-8")).hexdigest()

                # node.logger.app_log.info("Last block sha_hash: {}".format(block_hash))
                node.logger.app_log.info(
                    f"Calculated block sha_hash: {block_array.block_hash}")
                # node.logger.app_log.info("Nonce: {}".format(nonce))

                # check if we already have the sha_hash
                db_handler.execute_param(
                    db_handler.h,
                    "SELECT block_height FROM transactions WHERE block_hash = ?",
                    (block_array.block_hash, ))
                dummy = db_handler.h.fetchone()
                if dummy:
                    raise ValueError(
                        "Skipping digestion of block {} from {}, because we already have it on block_height {}"
                        .format(block_array.block_hash[:10], peer_ip,
                                dummy[0]))

                if node.is_mainnet:
                    diff_save = mining_heavy3.check_block(
                        block_array.block_height_new,
                        miner_tx.miner_address,
                        miner_tx.nonce,
                        previous_block.block_hash,
                        diff[0],
                        tx.received_timestamp,
                        tx.q_received_timestamp,
                        previous_block.q_timestamp_last,
                        peer_ip=peer_ip,
                        app_log=node.logger.app_log)
                elif node.is_testnet:
                    diff_save = mining_heavy3.check_block(
                        block_array.block_height_new,
                        miner_tx.miner_address,
                        miner_tx.nonce,
                        previous_block.block_hash,
                        diff[0],
                        tx.received_timestamp,
                        tx.q_received_timestamp,
                        previous_block.q_timestamp_last,
                        peer_ip=peer_ip,
                        app_log=node.logger.app_log)
                else:
                    # it's regnet then, will use a specific fake method here.
                    diff_save = mining_heavy3.check_block(
                        block_array.block_height_new,
                        miner_tx.miner_address,
                        miner_tx.nonce,
                        previous_block.block_hash,
                        regnet.REGNET_DIFF,
                        tx.received_timestamp,
                        tx.q_received_timestamp,
                        previous_block.q_timestamp_last,
                        peer_ip=peer_ip,
                        app_log=node.logger.app_log)
                fees_block = []
                mining_reward = 0  # avoid warning

                # Cache for multiple tx from same address
                balances = {}
                for tx_index, transaction in enumerate(block):
                    db_timestamp = '%.2f' % quantize_two(transaction[0])
                    db_address = str(transaction[1])[:56]
                    db_recipient = str(transaction[2])[:56]
                    db_amount = '%.8f' % quantize_eight(transaction[3])
                    db_signature = str(transaction[4])[:684]
                    db_public_key_hashed = str(transaction[5])[:1068]
                    db_operation = str(transaction[6])[:30]
                    db_openfield = str(transaction[7])[:100000]

                    block_debit_address = 0
                    block_fees_address = 0

                    # this also is redundant on many tx per address block
                    for x in block:
                        if x[1] == db_address:  # make calculation relevant to a particular address in the block
                            block_debit_address = quantize_eight(
                                Decimal(block_debit_address) + Decimal(x[3]))

                            if x != block[-1]:
                                block_fees_address = quantize_eight(
                                    Decimal(block_fees_address) + Decimal(
                                        essentials.fee_calculate(
                                            db_openfield, db_operation,
                                            node.last_block))
                                )  # exclude the mining tx from fees

                    # print("block_fees_address", block_fees_address, "for", db_address)
                    # node.logger.app_log.info("Digest: Inbound block credit: " + str(block_credit))
                    # node.logger.app_log.info("Digest: Inbound block debit: " + str(block_debit))
                    # include the new block

                    # if (start_time_tx < q_received_timestamp + 432000) and not quicksync:
                    # balance_pre = quantize_eight(credit_ledger - debit_ledger - fees + rewards)  # without projection
                    balance_pre = ledger_balance3(
                        db_address, balances,
                        db_handler)  # keep this as c (ram hyperblock access)

                    # balance = quantize_eight(credit - debit - fees + rewards)
                    balance = quantize_eight(balance_pre - block_debit_address)
                    # node.logger.app_log.info("Digest: Projected transaction address balance: " + str(balance))
                    # else:
                    #    print("hyp2")

                    fee = essentials.fee_calculate(db_openfield, db_operation,
                                                   node.last_block)

                    fees_block.append(quantize_eight(fee))
                    # node.logger.app_log.info("Fee: " + str(fee))

                    # decide reward
                    if tx_index == block_array.tx_count - 1:
                        db_amount = 0  # prevent spending from another address, because mining txs allow delegation
                        if previous_block.block_height <= 10000000:

                            if node.last_block >= fork.POW_FORK or (
                                    node.is_testnet and
                                    node.last_block >= fork.POW_FORK_TESTNET):
                                mining_reward = 15 - (
                                    quantize_eight(
                                        block_array.block_height_new) /
                                    quantize_eight(
                                        1000000 / 2)) - Decimal("2.4")
                            else:
                                mining_reward = 15 - (
                                    quantize_eight(
                                        block_array.block_height_new) /
                                    quantize_eight(
                                        1000000 / 2)) - Decimal("0.8")

                            if mining_reward < 0:
                                mining_reward = 0
                        else:
                            mining_reward = 0

                        reward = quantize_eight(mining_reward +
                                                sum(fees_block[:-1]))
                        # don't request a fee for mined block so new accounts can mine
                        fee = 0
                    else:
                        reward = 0

                    if quantize_eight(balance_pre) < quantize_eight(db_amount):
                        raise ValueError(
                            f"{db_address} sending more than owned: {db_amount}/{balance_pre}"
                        )

                    if quantize_eight(balance) - quantize_eight(
                            block_fees_address) < 0:
                        # exclude fee check for the mining/header tx
                        raise ValueError(
                            f"{db_address} Cannot afford to pay fees (balance: {balance}, "
                            f"block fees: {block_fees_address})")

                    # append, but do not insert to ledger before whole block is validated,
                    # note that it takes already validated values (decimals, length)
                    node.logger.app_log.info(
                        f"Chain: Appending transaction back to block with "
                        f"{len(block_transactions)} transactions in it")
                    block_transactions.append(
                        (str(block_array.block_height_new), str(db_timestamp),
                         str(db_address), str(db_recipient), str(db_amount),
                         str(db_signature), str(db_public_key_hashed),
                         str(block_array.block_hash), str(fee), str(reward),
                         str(db_operation), str(db_openfield)))
                    try:
                        mp.MEMPOOL.delete_transaction(db_signature)
                        node.logger.app_log.info(
                            f"Chain: Removed processed transaction {db_signature[:56]}"
                            f" from the mempool while digesting")
                    except:
                        # tx was not or is no more in the local mempool
                        pass
                # end for block

                # save current diff (before the new block)

                # quantized vars have to be converted, since Decimal is not json serializable...
                node.plugin_manager.execute_action_hook(
                    'block', {
                        'height': block_array.block_height_new,
                        'diff': diff_save,
                        'sha_hash': block_array.block_hash,
                        'timestamp': float(miner_tx.q_block_timestamp),
                        'miner': miner_tx.miner_address,
                        'ip': peer_ip
                    })

                node.plugin_manager.execute_action_hook(
                    'fullblock', {
                        'height': block_array.block_height_new,
                        'diff': diff_save,
                        'sha_hash': block_array.block_hash,
                        'timestamp': float(miner_tx.q_block_timestamp),
                        'miner': miner_tx.miner_address,
                        'ip': peer_ip,
                        'transactions': block_transactions
                    })

                db_handler.to_db(block_array, diff_save, block_transactions)

                # savings
                if node.is_testnet or block_array.block_height_new >= 843000:
                    # no savings for regnet
                    if int(block_array.block_height_new
                           ) % 10000 == 0:  # every x blocks

                        staking.staking_update(db_handler.conn, db_handler.c,
                                               db_handler.index,
                                               db_handler.index_cursor,
                                               "normal",
                                               block_array.block_height_new,
                                               node.logger.app_log)
                        staking.staking_payout(
                            db_handler.conn, db_handler.c, db_handler.index,
                            db_handler.index_cursor,
                            block_array.block_height_new,
                            float(miner_tx.q_block_timestamp),
                            node.logger.app_log)
                        staking.staking_revalidate(
                            db_handler.conn, db_handler.c, db_handler.index,
                            db_handler.index_cursor,
                            block_array.block_height_new, node.logger.app_log)

                # new sha_hash
                db_handler.execute(
                    db_handler.c, "SELECT * FROM transactions "
                    "WHERE block_height = (SELECT max(block_height) FROM transactions)"
                )
                # Was trying to simplify, but it's the latest mirror sha_hash.
                # not the latest block, nor the mirror of the latest block.
                # c.execute("SELECT * FROM transactions WHERE block_height = ?", (block_array.block_height_new -1,))
                tx_list_to_hash = db_handler.c.fetchall()
                mirror_hash = hashlib.blake2b(str(tx_list_to_hash).encode(),
                                              digest_size=20).hexdigest()
                # /new sha_hash

                rewards()

                # node.logger.app_log.warning("Block: {}: {} valid and saved from {}"
                # .format(block_array.block_height_new, block_hash[:10], peer_ip))
                node.logger.app_log.warning(
                    f"Valid block: {block_array.block_height_new}: "
                    f"{block_array.block_hash[:10]} with {len(block)} txs, "
                    f"digestion from {peer_ip} completed in "
                    f"{str(time.time() - float(start_time_block))[:5]}s.")
                del block_transactions[:]
                node.peers.unban(peer_ip)

                # This new block may change the int(diff). Trigger the hook whether it changed or not.
                diff = difficulty(node, db_handler)
                node.difficulty = diff
                node.plugin_manager.execute_action_hook('diff', diff[0])
                # We could recalc diff after inserting block, and then only trigger the block hook,
                # but I fear this would delay the new block event.

                # /whole block validation
                # NEW: returns new block sha_hash

            checkpoint_set(node, block_array.block_height_new)
            return block_array.block_hash

        except Exception as e:
            node.logger.app_log.warning(f"Chain processing failed: {e}")
            node.logger.app_log.info(f"Received data dump: {data}")
            block_array.failed_cause = str(e)
            # Temp
            exc_type, exc_obj, exc_tb = sys.exc_info()
            fname = os.path.split(exc_tb.tb_frame.f_code.co_filename)[1]
            print(exc_type, fname, exc_tb.tb_lineno)

            if node.peers.warning(sdef, peer_ip, "Rejected block", 2):
                raise ValueError(f"{peer_ip} banned")
            raise ValueError("Chain: digestion aborted")

        finally:

            db_handler.db_to_drive(node)

            node.db_lock.release()
            node.logger.app_log.warning(f"Database lock released")

            delta_t = time.time() - float(start_time_block)
            # node.logger.app_log.warning("Block: {}: {} digestion completed in {}s."
            # .format(block_array.block_height_new,  block_hash[:10], delta_t))
            node.plugin_manager.execute_action_hook(
                'digestblock', {
                    'failed': block_array.failed_cause,
                    'ip': peer_ip,
                    'deltat': delta_t,
                    "blocks": block_array.block_count,
                    "txs": block_array.tx_count
                })

    else:
        node.logger.app_log.warning(
            f"Chain: Skipping processing from {peer_ip}, someone delivered data faster"
        )
        node.plugin_manager.execute_action_hook('digestblock', {
            'failed': "skipped",
            'ip': peer_ip
        })
Esempio n. 46
0
def file_upload(request):
    # Before we do anything, if there isn't an authenticated user with this
    # request, then we'll go ahead and bomb out.
    if request.authenticated_userid is None:
        raise _exc_with_message(
            HTTPForbidden,
            "Invalid or non-existent authentication information.",
        )

    # distutils "helpfully" substitutes unknown, but "required" values with the
    # string "UNKNOWN". This is basically never what anyone actually wants so
    # we'll just go ahead and delete anything whose value is UNKNOWN.
    for key in list(request.POST):
        if request.POST.get(key) == "UNKNOWN":
            del request.POST[key]

    # We require protocol_version 1, it's the only supported version however
    # passing a different version should raise an error.
    if request.POST.get("protocol_version", "1") != "1":
        raise _exc_with_message(HTTPBadRequest, "Unknown protocol version.")

    # Look up all of the valid classifiers
    all_classifiers = request.db.query(Classifier).all()

    # Validate and process the incoming metadata.
    form = MetadataForm(request.POST)

    # Check if the classifiers were supplied as a tuple
    # ref: https://github.com/pypa/warehouse/issues/2185
    classifiers = request.POST.getall('classifiers')
    if any(isinstance(classifier, FieldStorage) for classifier in classifiers):
        raise _exc_with_message(
            HTTPBadRequest,
            "classifiers: Must be a list, not tuple.",
        )

    form.classifiers.choices = [
        (c.classifier, c.classifier) for c in all_classifiers
    ]
    if not form.validate():
        for field_name in _error_message_order:
            if field_name in form.errors:
                break
        else:
            field_name = sorted(form.errors.keys())[0]

        if field_name in form:
            if form[field_name].description:
                error_message = (
                    "{value!r} is an invalid value for {field}. ".format(
                        value=form[field_name].data,
                        field=form[field_name].description) +
                    "Error: {} ".format(form.errors[field_name][0]) +
                    "see "
                    "https://packaging.python.org/specifications/core-metadata"
                )
            else:
                error_message = "{field}: {msgs[0]}".format(
                    field=field_name,
                    msgs=form.errors[field_name],
                )
        else:
            error_message = "Error: {}".format(form.errors[field_name][0])

        raise _exc_with_message(
            HTTPBadRequest,
            error_message,
        )

    # Ensure that we have file data in the request.
    if "content" not in request.POST:
        raise _exc_with_message(
            HTTPBadRequest,
            "Upload payload does not have a file.",
        )

    # Look up the project first before doing anything else, this is so we can
    # automatically register it if we need to and can check permissions before
    # going any further.
    try:
        project = (
            request.db.query(Project)
                      .filter(
                          Project.normalized_name ==
                          func.normalize_pep426_name(form.name.data)).one()
        )
    except NoResultFound:
        # Before we create the project, we're going to check our blacklist to
        # see if this project is even allowed to be registered. If it is not,
        # then we're going to deny the request to create this project.
        if request.db.query(exists().where(
                BlacklistedProject.name ==
                func.normalize_pep426_name(form.name.data))).scalar():
            raise _exc_with_message(
                HTTPBadRequest,
                ("The name {!r} is not allowed. "
                 "See https://pypi.org/help/#project-name "
                 "for more information.")
                .format(form.name.data),
            ) from None

        # Also check for collisions with Python Standard Library modules.
        if (packaging.utils.canonicalize_name(form.name.data) in
                STDLIB_PROHIBITTED):
            raise _exc_with_message(
                HTTPBadRequest,
                ("The name {!r} is not allowed (conflict with Python "
                 "Standard Library module name). See "
                 "https://pypi.org/help/#project-name for more information.")
                .format(form.name.data),
            ) from None

        # The project doesn't exist in our database, so we'll add it along with
        # a role setting the current user as the "Owner" of the project.
        project = Project(name=form.name.data)
        request.db.add(project)
        request.db.add(
            Role(user=request.user, project=project, role_name="Owner")
        )
        # TODO: This should be handled by some sort of database trigger or a
        #       SQLAlchemy hook or the like instead of doing it inline in this
        #       view.
        request.db.add(
            JournalEntry(
                name=project.name,
                action="create",
                submitted_by=request.user,
                submitted_from=request.remote_addr,
            ),
        )
        request.db.add(
            JournalEntry(
                name=project.name,
                action="add Owner {}".format(request.user.username),
                submitted_by=request.user,
                submitted_from=request.remote_addr,
            ),
        )

    # Check that the user has permission to do things to this project, if this
    # is a new project this will act as a sanity check for the role we just
    # added above.
    if not request.has_permission("upload", project):
        raise _exc_with_message(
            HTTPForbidden,
            ("The user '{0}' is not allowed to upload to project '{1}'. "
             "See https://pypi.org/help#project-name for more information.")
            .format(request.user.username, project.name)
        )

    try:
        release = (
            request.db.query(Release)
                      .filter(
                            (Release.project == project) &
                            (Release.version == form.version.data)).one()
        )
    except NoResultFound:
        release = Release(
            project=project,
            _classifiers=[
                c for c in all_classifiers
                if c.classifier in form.classifiers.data
            ],
            _pypi_hidden=False,
            dependencies=list(_construct_dependencies(
                form,
                {
                    "requires": DependencyKind.requires,
                    "provides": DependencyKind.provides,
                    "obsoletes": DependencyKind.obsoletes,
                    "requires_dist": DependencyKind.requires_dist,
                    "provides_dist": DependencyKind.provides_dist,
                    "obsoletes_dist": DependencyKind.obsoletes_dist,
                    "requires_external": DependencyKind.requires_external,
                    "project_urls": DependencyKind.project_url,
                }
            )),
            **{
                k: getattr(form, k).data
                for k in {
                    # This is a list of all the fields in the form that we
                    # should pull off and insert into our new release.
                    "version",
                    "summary", "description", "license",
                    "author", "author_email", "maintainer", "maintainer_email",
                    "keywords", "platform",
                    "home_page", "download_url",
                    "requires_python",
                }
            }
        )
        request.db.add(release)
        # TODO: This should be handled by some sort of database trigger or a
        #       SQLAlchemy hook or the like instead of doing it inline in this
        #       view.
        request.db.add(
            JournalEntry(
                name=release.project.name,
                version=release.version,
                action="new release",
                submitted_by=request.user,
                submitted_from=request.remote_addr,
            ),
        )

    # TODO: We need a better solution to this than to just do it inline inside
    #       this method. Ideally the version field would just be sortable, but
    #       at least this should be some sort of hook or trigger.
    releases = (
        request.db.query(Release)
                  .filter(Release.project == project)
                  .all()
    )
    for i, r in enumerate(sorted(
            releases, key=lambda x: packaging.version.parse(x.version))):
        r._pypi_ordering = i

    # TODO: Again, we should figure out a better solution to doing this than
    #       just inlining this inside this method.
    if project.autohide:
        for r in releases:
            r._pypi_hidden = bool(not r == release)

    # Pull the filename out of our POST data.
    filename = request.POST["content"].filename

    # Make sure that the filename does not contain any path separators.
    if "/" in filename or "\\" in filename:
        raise _exc_with_message(
            HTTPBadRequest,
            "Cannot upload a file with '/' or '\\' in the name.",
        )

    # Make sure the filename ends with an allowed extension.
    if _dist_file_regexes[project.allow_legacy_files].search(filename) is None:
        raise _exc_with_message(
            HTTPBadRequest,
            "Invalid file extension. PEP 527 requires one of: .egg, .tar.gz, "
            ".whl, .zip (https://www.python.org/dev/peps/pep-0527/)."
        )

    # Make sure that our filename matches the project that it is being uploaded
    # to.
    prefix = pkg_resources.safe_name(project.name).lower()
    if not pkg_resources.safe_name(filename).lower().startswith(prefix):
        raise _exc_with_message(
            HTTPBadRequest,
            "The filename for {!r} must start with {!r}.".format(
                project.name,
                prefix,
            )
        )

    # Check the content type of what is being uploaded
    if (not request.POST["content"].type or
            request.POST["content"].type.startswith("image/")):
        raise _exc_with_message(HTTPBadRequest, "Invalid distribution file.")

    # Ensure that the package filetpye is allowed.
    # TODO: Once PEP 527 is completely implemented we should be able to delete
    #       this and just move it into the form itself.
    if (not project.allow_legacy_files and
            form.filetype.data not in {"sdist", "bdist_wheel", "bdist_egg"}):
        raise _exc_with_message(HTTPBadRequest, "Unknown type of file.")

    # The project may or may not have a file size specified on the project, if
    # it does then it may or may not be smaller or larger than our global file
    # size limits.
    file_size_limit = max(filter(None, [MAX_FILESIZE, project.upload_limit]))

    with tempfile.TemporaryDirectory() as tmpdir:
        temporary_filename = os.path.join(tmpdir, filename)

        # Buffer the entire file onto disk, checking the hash of the file as we
        # go along.
        with open(temporary_filename, "wb") as fp:
            file_size = 0
            file_hashes = {
                "md5": hashlib.md5(),
                "sha256": hashlib.sha256(),
                "blake2_256": hashlib.blake2b(digest_size=256 // 8),
            }
            for chunk in iter(
                    lambda: request.POST["content"].file.read(8096), b""):
                file_size += len(chunk)
                if file_size > file_size_limit:
                    raise _exc_with_message(
                        HTTPBadRequest,
                        "File too large. " +
                        "Limit for project {name!r} is {limit}MB".format(
                            name=project.name,
                            limit=file_size_limit // (1024 * 1024),
                        ))
                fp.write(chunk)
                for hasher in file_hashes.values():
                    hasher.update(chunk)

        # Take our hash functions and compute the final hashes for them now.
        file_hashes = {
            k: h.hexdigest().lower()
            for k, h in file_hashes.items()
        }

        # Actually verify the digests that we've gotten. We're going to use
        # hmac.compare_digest even though we probably don't actually need to
        # because it's better safe than sorry. In the case of multiple digests
        # we expect them all to be given.
        if not all([
            hmac.compare_digest(
                getattr(form, "{}_digest".format(digest_name)).data.lower(),
                digest_value,
            )
            for digest_name, digest_value in file_hashes.items()
            if getattr(form, "{}_digest".format(digest_name)).data
        ]):
            raise _exc_with_message(
                HTTPBadRequest,
                "The digest supplied does not match a digest calculated "
                "from the uploaded file."
            )

        # Check to see if the file that was uploaded exists already or not.
        is_duplicate = _is_duplicate_file(request.db, filename, file_hashes)
        if is_duplicate:
            return Response()
        elif is_duplicate is not None:
            raise _exc_with_message(HTTPBadRequest, "File already exists.")

        # Check to see if the file that was uploaded exists in our filename log
        if (request.db.query(
                request.db.query(Filename)
                          .filter(Filename.filename == filename)
                          .exists()).scalar()):
            raise _exc_with_message(
                HTTPBadRequest,
                "This filename has previously been used, you should use a "
                "different version.",
            )

        # Check to see if uploading this file would create a duplicate sdist
        # for the current release.
        if (form.filetype.data == "sdist" and
                request.db.query(
                    request.db.query(File)
                              .filter((File.release == release) &
                                      (File.packagetype == "sdist"))
                              .exists()).scalar()):
            raise _exc_with_message(
                HTTPBadRequest,
                "Only one sdist may be uploaded per release.",
            )

        # Check the file to make sure it is a valid distribution file.
        if not _is_valid_dist_file(temporary_filename, form.filetype.data):
            raise _exc_with_message(
                HTTPBadRequest,
                "Invalid distribution file.",
            )

        # Check that if it's a binary wheel, it's on a supported platform
        if filename.endswith(".whl"):
            wheel_info = _wheel_file_re.match(filename)
            plats = wheel_info.group("plat").split(".")
            for plat in plats:
                if not _valid_platform_tag(plat):
                    raise _exc_with_message(
                        HTTPBadRequest,
                        "Binary wheel '{filename}' has an unsupported "
                        "platform tag '{plat}'."
                        .format(filename=filename, plat=plat)
                    )

        # Also buffer the entire signature file to disk.
        if "gpg_signature" in request.POST:
            has_signature = True
            with open(os.path.join(tmpdir, filename + ".asc"), "wb") as fp:
                signature_size = 0
                for chunk in iter(
                        lambda: request.POST["gpg_signature"].file.read(8096),
                        b""):
                    signature_size += len(chunk)
                    if signature_size > MAX_SIGSIZE:
                        raise _exc_with_message(
                            HTTPBadRequest,
                            "Signature too large.",
                        )
                    fp.write(chunk)

            # Check whether signature is ASCII armored
            with open(os.path.join(tmpdir, filename + ".asc"), "rb") as fp:
                if not fp.read().startswith(b"-----BEGIN PGP SIGNATURE-----"):
                    raise _exc_with_message(
                        HTTPBadRequest,
                        "PGP signature is not ASCII armored.",
                    )
        else:
            has_signature = False

        # TODO: This should be handled by some sort of database trigger or a
        #       SQLAlchemy hook or the like instead of doing it inline in this
        #       view.
        request.db.add(Filename(filename=filename))

        # Store the information about the file in the database.
        file_ = File(
            release=release,
            filename=filename,
            python_version=form.pyversion.data,
            packagetype=form.filetype.data,
            comment_text=form.comment.data,
            size=file_size,
            has_signature=bool(has_signature),
            md5_digest=file_hashes["md5"],
            sha256_digest=file_hashes["sha256"],
            blake2_256_digest=file_hashes["blake2_256"],
            # Figure out what our filepath is going to be, we're going to use a
            # directory structure based on the hash of the file contents. This
            # will ensure that the contents of the file cannot change without
            # it also changing the path that the file is saved too.
            path="/".join([
                file_hashes[PATH_HASHER][:2],
                file_hashes[PATH_HASHER][2:4],
                file_hashes[PATH_HASHER][4:],
                filename,
            ]),
        )
        request.db.add(file_)

        # TODO: This should be handled by some sort of database trigger or a
        #       SQLAlchemy hook or the like instead of doing it inline in this
        #       view.
        request.db.add(
            JournalEntry(
                name=release.project.name,
                version=release.version,
                action="add {python_version} file {filename}".format(
                    python_version=file_.python_version,
                    filename=file_.filename,
                ),
                submitted_by=request.user,
                submitted_from=request.remote_addr,
            ),
        )

        # TODO: We need a better answer about how to make this transactional so
        #       this won't take affect until after a commit has happened, for
        #       now we'll just ignore it and save it before the transaction is
        #       committed.
        storage = request.find_service(IFileStorage)
        storage.store(
            file_.path,
            os.path.join(tmpdir, filename),
            meta={
                "project": file_.release.project.normalized_name,
                "version": file_.release.version,
                "package-type": file_.packagetype,
                "python-version": file_.python_version,
            },
        )
        if has_signature:
            storage.store(
                file_.pgp_path,
                os.path.join(tmpdir, filename + ".asc"),
                meta={
                    "project": file_.release.project.normalized_name,
                    "version": file_.release.version,
                    "package-type": file_.packagetype,
                    "python-version": file_.python_version,
                },
            )

        # TODO: Once we no longer have the legacy code base running PyPI we can
        #       go ahead and delete this tiny bit of shim code, since it only
        #       exists to purge stuff on legacy PyPI when uploaded to Warehouse
        old_domain = request.registry.settings.get("warehouse.legacy_domain")
        if old_domain:
            request.tm.get().addAfterCommitHook(
                _legacy_purge,
                args=["https://{}/pypi".format(old_domain)],
                kws={"data": {":action": "purge", "project": project.name}},
            )

    return Response()
Esempio n. 47
0
def H(m):
    #return hashlib.sha512(m).digest()
    return hashlib.blake2b(m).digest()
Esempio n. 48
0
 def test_blake2b_against_hashlib(self):
     msg = bytes(range(256))
     self.assertEqual(hashlib.blake2b(msg).digest(), monocypher.blake2b(msg))
Esempio n. 49
0
    def post_data(self, file: Path) -> Dict[str, Any]:
        meta = Metadata.from_package(self._package)

        file_type = self._get_type(file)

        if _has_blake2:
            blake2_256_hash = hashlib.blake2b(digest_size=256 // 8)

        md5_hash = hashlib.md5()
        sha256_hash = hashlib.sha256()
        with file.open("rb") as fp:
            for content in iter(lambda: fp.read(io.DEFAULT_BUFFER_SIZE), b""):
                md5_hash.update(content)
                sha256_hash.update(content)

                if _has_blake2:
                    blake2_256_hash.update(content)

        md5_digest = md5_hash.hexdigest()
        sha2_digest = sha256_hash.hexdigest()
        blake2_256_digest: Optional[str] = None
        if _has_blake2:
            blake2_256_digest = blake2_256_hash.hexdigest()

        py_version: Optional[str] = None
        if file_type == "bdist_wheel":
            wheel_info = wheel_file_re.match(file.name)
            if wheel_info is not None:
                py_version = wheel_info.group("pyver")

        data = {
            # identify release
            "name": meta.name,
            "version": meta.version,
            # file content
            "filetype": file_type,
            "pyversion": py_version,
            # additional meta-data
            "metadata_version": meta.metadata_version,
            "summary": meta.summary,
            "home_page": meta.home_page,
            "author": meta.author,
            "author_email": meta.author_email,
            "maintainer": meta.maintainer,
            "maintainer_email": meta.maintainer_email,
            "license": meta.license,
            "description": meta.description,
            "keywords": meta.keywords,
            "platform": meta.platforms,
            "classifiers": meta.classifiers,
            "download_url": meta.download_url,
            "supported_platform": meta.supported_platforms,
            "comment": None,
            "md5_digest": md5_digest,
            "sha256_digest": sha2_digest,
            "blake2_256_digest": blake2_256_digest,
            # PEP 314
            "provides": meta.provides,
            "requires": meta.requires,
            "obsoletes": meta.obsoletes,
            # Metadata 1.2
            "project_urls": meta.project_urls,
            "provides_dist": meta.provides_dist,
            "obsoletes_dist": meta.obsoletes_dist,
            "requires_dist": meta.requires_dist,
            "requires_external": meta.requires_external,
            "requires_python": meta.requires_python,
        }

        # Metadata 2.1
        if meta.description_content_type:
            data["description_content_type"] = meta.description_content_type

        # TODO: Provides extra

        return data
Esempio n. 50
0
    def list_gen(cls, title, data_dir,
                 dvcz_dir='.dvcz',
                 list_file='lastBuildList',
                 key_file=os.path.join(
                     os.environ['DVCZ_PATH_TO_KEYS'], 'skPriv.pem'),
                 excl=['build'],
                 logging=False,
                 u_path='',
                 hashtype=HashTypes.SHA1,     # NOTE default is SHA1
                 using_indir=False):
        """
        Create a BuildList for data_dir with the title indicated.

        Files matching the globs in excl will be skipped.  'build'
        should always be in the list.  That is, the `build/` directory and
        its contents, including any subdirectories, are always excluded.

        If a private key is specified and signing is True, the BuildList
        will be digitally signed.

        If u_path is specified, the files in data_dir will be posted to uDir.
        By default SHA1 hash will be used for the digital
        signature.

        If there is a title, we try to read the version number from
        the first line of .dvcz/version.  If that exists, we append
        a space and then the version number to the title.
        """
        # DEBUG
        # print("DEBUG: ENTERING list_gen")
        # END
        _ = using_indir     # USUSED: SUPPRESS WARNING
        version = '0.0.0'
#       path_to_version = os.path.join(dvcz_dir, 'version')
#       if os.path.exists(path_to_version):
#           with open(path_to_version, 'r') as file:
#               version = file.readline().strip()
        path_to_cfg = os.path.join(dvcz_dir, 'projConfig.toml')
        if os.path.exists(path_to_cfg):
            with open(path_to_cfg, 'r') as file:
                pmap = load(path_to_cfg)
                version = pmap['project']['version']
            title = title + ' v' + version
            # DEBUG
            # print("title with version is '%s'" % title)
            # END

        ex_re = make_ex_re(excl)
        signing = key_file != ''
        if signing:
            with open(key_file, 'r') as file:
                sk_priv = RSA.importKey(file.read())
            sk_ = sk_priv.publickey()
        else:
            sk_ = None
        blist = cls.create_from_file_system(
            title, data_dir, sk_, hashtype, ex_re, match_re=None)
        if signing:
            blist.sign(sk_priv)

        new_data = blist.__str__().encode('utf-8')
        if hashtype == HashTypes.SHA1:
            sha = hashlib.sha1()
        elif hashtype == HashTypes.SHA2:
            sha = hashlib.sha256()
        elif hashtype == HashTypes.SHA3:
            # pylint: disable=maybe-no-member
            sha = hashlib.sha3_256()
        elif hashtype == HashTypes.BLAKE2B:
            sha = hashlib.blake2b(digest_size=32)
        else:
            raise NotImplementedError
        sha.update(new_data)
        new_hash = sha.hexdigest()
        path_to_listing = os.path.join(dvcz_dir, list_file)

        if u_path:

            blist.tree.save_to_u_dir(data_dir, u_path, hashtype)

            # insert this BuildList into U
            # DEBUG
            # print("writing BuildList with hash %s into %s" %
            #       (new_hash, u_path))
            # END
            u_dir = UDir.discover(u_path)
            # DEBUG
            # print("list_gen:")
            # print("  uDir:      %s" % u_path)
            # print("  dirStruc:  %s" % UDir.dir_struc_to_name(uDir.dirStruc))
            # print("  hashtype:  %s" % uDir.hashtype)
            # END
            (_, hash_back) = u_dir.put_data(new_data, new_hash)
            if hash_back != new_hash:
                print("WARNING: wrote %s to %s, but actual hash is %s" % (
                    new_hash, u_path, hash_back))

        # CHANGES TO DATADIR AFTER UPDATING u_path ===================

        # serialize the BuildList, typically to .dvcz/lastBuildList
        with open(path_to_listing, 'wb+') as file:
            file.write(new_data)

        # DEBUG
        # print("hash of buildlist at %s is %s" % (path_to_listing, new_hash))
        # END
        if logging:
            path_to_log = os.path.join(dvcz_dir, 'builds')
            with open(path_to_log, 'a') as file:
                file.write("%s v%s %s\n" %
                           (blist.timestamp, version, new_hash))

        return blist
Esempio n. 51
0
 def hashToken(t):
     return hashlib.blake2b(t.encode('ascii')).hexdigest()
Esempio n. 52
0
import hashlib
import sys

personal = sys.argv[2]
  
if personal == 'ZcashSigHash':
  personal = b"ZcashSigHash\x19\x1b\xa8\x5b"
else:
  personal = personal.encode()

h = hashlib.blake2b(person = personal, digest_size=32)
h.update(bytearray.fromhex(sys.argv[1]))
print(h.hexdigest())
Esempio n. 53
0
    def post_data(self, file):
        meta = Metadata.from_package(self._package)

        file_type = self._get_type(file)

        if _has_blake2:
            blake2_256_hash = hashlib.blake2b(digest_size=256 // 8)

        md5_hash = hashlib.md5()
        sha256_hash = hashlib.sha256()
        with file.open("rb") as fp:
            for content in iter(lambda: fp.read(io.DEFAULT_BUFFER_SIZE), b""):
                md5_hash.update(content)
                sha256_hash.update(content)

                if _has_blake2:
                    blake2_256_hash.update(content)

        md5_digest = md5_hash.hexdigest()
        sha2_digest = sha256_hash.hexdigest()
        if _has_blake2:
            blake2_256_digest = blake2_256_hash.hexdigest()
        else:
            blake2_256_digest = None

        if file_type == "bdist_wheel":
            wheel_info = wheel_file_re.match(file.name)
            py_version = wheel_info.group("pyver")
        else:
            py_version = None

        data = {
            # identify release
            "name": meta.name,
            "version": meta.version,
            # file content
            "filetype": file_type,
            "pyversion": py_version,
            # additional meta-data
            "metadata_version": meta.metadata_version,
            "summary": meta.summary,
            "home_page": meta.home_page,
            "author": meta.author,
            "author_email": meta.author_email,
            "maintainer": meta.maintainer,
            "maintainer_email": meta.maintainer_email,
            "license": meta.license,
            "description": meta.description,
            "keywords": meta.keywords,
            "platform": meta.platforms,
            "classifiers": meta.classifiers,
            "download_url": meta.download_url,
            "supported_platform": meta.supported_platforms,
            "comment": None,
            "md5_digest": md5_digest,
            "sha256_digest": sha2_digest,
            "blake2_256_digest": blake2_256_digest,
            # PEP 314
            "provides": meta.provides,
            "requires": meta.requires,
            "obsoletes": meta.obsoletes,
            # Metadata 1.2
            "project_urls": meta.project_urls,
            "provides_dist": meta.provides_dist,
            "obsoletes_dist": meta.obsoletes_dist,
            "requires_dist": meta.requires_dist,
            "requires_external": meta.requires_external,
            "requires_python": meta.requires_python,
        }

        # Metadata 2.1
        if meta.description_content_type:
            data["description_content_type"] = meta.description_content_type

        # TODO: Provides extra

        return data