Beispiel #1
0
def prove_token(token: str):
    """If the hash is in a correct form: [HASH of PASSWORD + RANDOM][RANDOM] it checks
    whether the hash corresponds to the hash of [PASSWORD][RANDOM]. The lengths are constants in this file"""
    short = False
    if len(token) is ((NO_OF_BYTES_HASH + NO_OF_BYTES_RANDOM_STANDARD) * 2):
        short = False
    elif len(token) is (
        (NO_OF_BYTES_HASH_SHORT + NO_OF_BYTES_RANDOM_SHORT) * 2):
        short = True
    else:
        return False

    if short:
        h = token[:(NO_OF_BYTES_HASH_SHORT * 2)]
        rand1 = token[(NO_OF_BYTES_HASH_SHORT * 2):]
        h2 = hasher(
            bytes.fromhex(global_storage.db_token_check_password +
                          rand1)).hexdigest()
        return h2[:(NO_OF_BYTES_HASH_SHORT * 2)] == h
    else:
        h = token[:(NO_OF_BYTES_HASH * 2)]
        rand1 = token[(NO_OF_BYTES_HASH * 2):]
        h2 = hasher(
            bytes.fromhex(global_storage.db_token_check_password +
                          rand1)).hexdigest()
        return h2 == h
Beispiel #2
0
def print_duplicates(root_directory):
    block_size = 2 ** 20
    file_hashes = {}
    for current_path, folders, files in walk('.'):
        for file_name in files:
            if file_name[0] in ['.', '~']:
                continue;

            with open(path.join(current_path, file_name), mode='rb') as f:
                my_hasher = hasher()
                block = f.read(block_size)
                while block:
                    my_hasher.update(block)
                    block = f.read(block_size)

                file_hash = my_hasher.hexdigest()
                full_file_name = path.join(current_path, file_name)
                if file_hash not in file_hashes:
                    file_hashes[file_hash] = [full_file_name]
                else:
                    file_hashes[file_hash].append(full_file_name)

    for files_with_hash in file_hashes.values():
        if len(files_with_hash) > 1:
            print(*files_with_hash, sep=':')
Beispiel #3
0
    def incore_digest(self):
        """Generate hashed digest of in-code file content.

        Returns hashed digest based upon in-core file content for
        conf file; used in determining whether file has been modified.
        """
        return hasher(self.content).hexdigest()
Beispiel #4
0
def hash_a_container(obj, truncate=False):
    # type: (dict, bool) -> int
    """
    Hashes a container which is iterable and its outputs are hashable.
    Getting this method so output is stable across python2 and python3 is
    tricky, due to differences in the way encodings are handled.
    Also, python2 has different int value lengths than py3, so if we just pass
    the hexdigest to int(x, 16), results diverge between py2 and py3. This
    makes doctests complicated. To make doctests match, you have to truncate
    so that the output of hash() lines up in py2 and py3.

    todo: make this better and faster
    :param obj: Container of hashables
    :param truncate:
    :return:
    """
    #
    hashval = hasher()
    for key, val in sorted(obj.items()):
        hashval.update(str(hash(key)).encode())
        hashval.update(str(hash(val)).encode())

    hash_hex = hashval.hexdigest()
    hash_hex = hash_hex[:14] if truncate else hash_hex

    return int(hash_hex, 16)
Beispiel #5
0
def hashing(file_path):
    text = b"~"
    with open(file_path, mode='rb') as f:
        h = hasher()
        while text != b"":
            text = f.read(2**15)
            h.update(text)
        return h.hexdigest()
def get_hash(file):
    with open(file, mode='rb') as f:
        hashe = hasher()
        data = f.read(256)
        while data:
            hashe.update(data)
            data = f.read(256)
        return hashe.hexdigest()
Beispiel #7
0
def hsh(filename):
    with open(filename, mode='rb') as filename:
        h = hasher()
        up = filename.read(SZ)
        while up:
            h.update(up)
            up = filename.read(SZ)
    return h.hexdigest()
 def index_data(self):
     hash_index = {}
     beginning = self.data.tell()
     for seq in self.data:
         hashh = hasher(seq.strip()).hexdigest()[:10]
         hash_index[hashh] = beginning
         beginning = self.data.tell()
     self.data.seek(0)
     return hash_index
Beispiel #9
0
def hashing(s1):
	h1= hasher()
	with open(s1, mode = 'rb') as f:
		while True:
			s2 = f.read(4096)
			if not s2:
				break
			h1.update(s2)
	return h1.digest()
Beispiel #10
0
    def ondisk_digest(self):
        """Generate hashed digest of on-disk file content.

        Returns hashed digest based upon on-disk file content for
        current file name; used in determining whether file has
        been modified.
        """
        with open(self.rename_phase_src) as f:
            return hasher(f.read()).hexdigest()
Beispiel #11
0
def hash_file(filename):
    f = open(filename, mode="rb")
    hash = hasher()
    while True:
        chunk = f.read(2 ** 15)
        if not chunk:
            break
        hash.update(chunk)
    return hash.hexdigest()
Beispiel #12
0
def index(req):
    global inited
    global root
    db_name = "cross_browser"
    sub_number = 0
    post_data = str(req.form.list)
    json_data = post_data[8:-7]
    one_test = json.loads(json_data)
    ip = req.connection.remote_ip
    db = MySQLdb.connect("localhost", "somewebsite", "somewebsite", db_name)
    cursor = db.cursor()
    cursor.execute("SELECT COUNT(*) FROM {} WHERE id='{}'".format(
        'uid', one_test['user_id']))
    if not cursor.fetchone()[0]:
        return "user_id error"

    agent = req.headers_in['User-Agent']
    agent = agent.replace(',', ' ')
    accept = req.headers_in['Accept']
    encoding = req.headers_in['Accept-Encoding']
    language = req.headers_in['Accept-Language']
    keys = str(req.headers_in.keys())
    keys = keys.replace(',', ' ')
    keys = keys.replace('\'', ' ')
    keys = keys.replace('[', '')
    keys = keys.replace(']', '')

    table_name = "new_data"
    time = str(datetime.datetime.now())
    image_id = insert_into_db(db, table_name, ip, one_test, time, agent,
                              accept, encoding, language, keys)
    return image_id

    pixels = one_test['pixels'].split(" ")
    for pi in pixels:
        saveImg(padb64(pi), "{}_{}".format(image_id, sub_number))
        sub_number += 1

    h = hasher()
    string = ''
    for i in range(0, len(pixels) - 10):
        string += pixels[i]
    h.update(string)
    hash_code = encode(h.digest()).replace('=', '')
    cursor.execute("UPDATE {} SET simple_hash='{}' WHERE image_id='{}'".format(
        table_name, hash_code, image_id))
    db.commit()

    cursor.execute("SELECT COUNT(*) FROM {} WHERE user_id='{}'".format(
        table_name, one_test['user_id']))
    row = cursor.fetchone()[0]
    db.close()
    if row >= 3:
        return str(row) + ',' + str(one_test['user_id'])
    else:
        return str(row) + ',not finished'
Beispiel #13
0
Datei: hw.py Projekt: mahkons/MIT
def get_hash(filename, SZ_READ=2 ** 16):
    # SZ_READ Достаточно большой, чтобы ввод был не слишком медленный
    # и не слишком большой, чтобы он не требовал много памяти
    with open(filename, mode='rb') as f:
        h = hasher()
        s = f.read(SZ_READ)
        while(s):
            h.update(s)
            s = f.read(SZ_READ)
    return h.hexdigest()
Beispiel #14
0
def find_block(data):
    '''Try to find a block (based on data) containing the target bytes.'''
    for nonce in range(MAX_ITERATIONS):
        # create a payload we can create a new block from
        payload = ''.join((str(data), str(nonce))).encode('utf-8')
        block = hasher(payload).hexdigest()

        # see if the new block is a valid one
        if block[0:TARGET_BYTES_LENGTH] == TARGET_BYTES:
            return block
Beispiel #15
0
def connect(username, password, ip):
    print(f"{ip}: connecting...")
    user = getUser(username=username)
    if not user:
        print(f"User '{username}' not found")
        return 2,  # User not found
    if hasher(password.encode()).hexdigest() != user["password"]:
        print("Invalid password")
        return 3,  # Invalid password
    print("Client connected")
    return 1, *generateToken(user["id"], ip)  # Return token
Beispiel #16
0
def generate_provable_token(short: bool = False):
    """Returns a new randomly generated token.
    The token is in form [HASH of PASSWORD + RANDOM][RANDOM]"""
    if global_storage.db_token_check_password:
        rand1 = generate_random_short_hex(
        ) if short else generate_random_standard_hex()
        h = hasher(
            bytes.fromhex(global_storage.db_token_check_password +
                          rand1)).hexdigest()
        return (h[:(NO_OF_BYTES_HASH_SHORT * 2)] if short else h) + rand1
    else:
        return None
Beispiel #17
0
def verify_passwd(pwd, old_hash):
    """
    utility func to verify that given passwd is correct
    :param pwd:
    :param old_hash:
    :return: Bool
    """
    from hashlib import pbkdf2_hmac as hasher
    salt = old_hash[:64]
    key_old = old_hash[64:]

    key_new = hasher('sha256', pwd.encode('utf-8'), salt, 100000)
    return key_new == key_old
Beispiel #18
0
def hash_passwd(pwd):
    """
    utility func to hash a given passwd str for first time
    :param pwd:
    :return: bString
    """
    from os import urandom as salter
    from hashlib import pbkdf2_hmac as hasher

    salt = salter(64)
    key = hasher('sha256', pwd.encode('utf-8'), salt, 100000)

    return salt + key
Beispiel #19
0
async def get_or_create(tx, bstore, blob):
    hash = hasher(blob).digest()
    key = found.pack((bstore.prefix_hash, hash))
    maybe_uid = await found.get(tx, key)
    if maybe_uid is not None:
        return UUID(bytes=maybe_uid)
    # Otherwise create the hash entry and store the blob with a new uid
    # TODO: Use a counter and implement a garbage collector, and implement
    # bstore.delete
    uid = uuid4()
    found.set(tx, key, uid.bytes)
    for index, slice in enumerate(sliced(blob, found.MAX_SIZE_VALUE)):
        found.set(tx, found.pack((bstore.prefix_blob, uid, index)),
                  bytes(slice))
    return uid
Beispiel #20
0
def is_hash_name(FileName):
    '''
    This function will return True if FileName could be a hashname.
    '''
    if not isinstance(FileName, str): return False
    if not os.path.exists(FileName): return False
    if not os.path.isfile(FileName): return False
    basename = os.path.split(FileName)[1].split('.')[0]
    _hash = hasher()
    if len(basename) != len(_hash.hexdigest()): return False
    try:
        int(basename, 16)
    except:
        return False
    else:
        return True
Beispiel #21
0
def main():	
	s = sys.argv[1]
	h1 = hasher()
	way = []
	for path, _, files in walk(s):	
		for f in files:
			if f[0] != '.' and f[0] != '~' and not os.path.islink(f):
				way.append(os.path.join(path, f))
	d = {}
	for p in way:
		k = hashing(p)
		if k not in d:
			d[k] = [p]
        else:
			d[k].append(p)
	for hsh in d.values():
		if len(hsh) > 1:
			print(":".join(hsh))
Beispiel #22
0
def file_contents_hash(FileName):
    '''
    This function returns the md5 (hex) digest (in string format) of the file contents of FileName.
    If the given file is in one of the given supportd_compressions, then the hash is
    created from the uncompressed contents!
    if something goes wrong, an empty string is returned.
    '''
    retval = ''
    if os.path.exists(FileName) and os.path.isfile(FileName) and is_STDF(
            FileName):
        _hash = hasher()
        if is_compressed_file(FileName, supported_compressions):
            compression_lookup = dict(
                (v, k) for k, v in supported_compressions.items())
            ext = extension_from_magic_number_in_file(FileName)
            if len(ext) != 1:
                raise Exception("WTF!")
            compression = compression_lookup[ext]
            if compression == 'lzma':
                with lzma.open(FileName, 'rb') as fd:
                    for chunk in iter(lambda: fd.read(_hash.block_size), b''):
                        _hash.update(chunk)
                retval = _hash.hexdigest()
            elif compression == 'bz2':
                with bz2.open(FileName, 'rb') as fd:
                    for chunk in iter(lambda: fd.read(_hash.block_size), b''):
                        _hash.update(chunk)
                retval = _hash.hexdigest()
            elif compression == 'gzip':
                with gzip.open(FileName, 'rb') as fd:
                    for chunk in iter(lambda: fd.read(_hash.block_size), b''):
                        _hash.update(chunk)
                retval = _hash.hexdigest()
            else:
                raise Exception(
                    "Supported but un-implemented compression '%s'" %
                    compression)
        else:
            with open(FileName, 'rb') as fd:
                for chunk in iter(lambda: fd.read(_hash.block_size), b''):
                    _hash.update(chunk)
            retval = _hash.hexdigest()
    return retval
Beispiel #23
0
def createUser(username: str, password: str):
    print(f"Creating '{username}' user...")
    with open("users.json", "r") as f:
        data = load(f)
    for u in data["users"]:
        if username == u["username"]:
            print("User already exist")
            return 2,
    user = {
        "id": data["lastId"],
        "username": username,
        "password": hasher(password.encode()).hexdigest(),
        "lastMsg": -1
    }
    data["users"].append(user)
    data["lastId"] += 1
    with open("users.json", "w") as f:
        dump(data, f, indent=4)
    print("User created")
    return 1,
Beispiel #24
0
def fave(user, upload_path):
    upload_data = urlopen(upload_path).read()
    image_id = hasher(upload_data).hexdigest()
    image_extension = upload_path.rsplit('.',1)[1]
    preference_data = {'user_id':user,'image_id':image_id}
    if not Preference.find_one(**preference_data):
        Preference(**preference_data).save()
    
    try:
        # Note: this can stream from a file.  We should do that.
        fs.put(upload_data, _id=image_id, extension=image_extension)
    except FileExists:
        pass

    #if not Image.find_one({'_id':image_id}):
    #    Image(_id=image_id, extension=image_extension).save()

    #output = open("foo.png","wb")
    #output.write(upload_data)
    #app.logger.debug("Hit with %s" % data)
    return ""
# dir(<module_name>) -- list content "names" defined by module

import math  # import entire modure
from os import walk  # import singe "name" (function/submodule)
from os import path
from math import pi as PI
from hashlib import sha1 as hasher  # import name with synonym

print("Sqrt: ", math.sqrt(9))
print("Traversal:")
for _, _, files in walk(".."):
    print("\t", files)

hasher = hasher()
hasher.update(b"chunk1")
hasher.update(b"chunk2")
hasher.update("chunk3".encode('utf-8'))
print(hasher.hexdigest())

# to get encoding of file, use encoding method
# with open(filename) as f:
#     print(f.encoding)
# to open file with binary mode, use mode = 'rb'
# with open(filename, mode='rb') as f:
#     hasher = hasher()
#     hasher.update(f.read(1024))
#     return hasher.digest() # digest of first min(1024, filesize) bytes of
# file
Beispiel #26
0
 def get_id(self):
     """Return identificator of frame, for binging frame stack"""
     return hasher(self.get_filename() + str(self.get_lineno())).hexdigest()
template = '''
<html>
<head><title>test</title></head>
<body>
%s
</body>
</html>
'''

body_template = '''
<p style="color: #%(color)s; background-color: #%(bg)s">
A test of %(name)s as %(color)s with a %(bg)s background</p>
'''

names = 'joe tom bill sue john'.split()

from hashlib import md5 as hasher

body = []
for name in names:
    h = hasher(name).hexdigest()
    color = h[:6]
    bg = h[-6:]
    body.append(body_template % {'color': color, 'name':name, 'bg':bg})
body = ''.join(body)
html = template % body
print html
Beispiel #28
0
def index(req):
    global inited
    global root
    db_name = "cross_browser"
    post_data = str(req.form.list)
    json_data = post_data[8:-7]
    one_test = json.loads(json_data)
    ip = req.connection.remote_ip
    db = MySQLdb.connect("localhost", "erik", "erik", db_name)
    cursor = db.cursor()
    cursor.execute("SELECT COUNT(*) FROM {} WHERE id='{}'".format('uid', one_test['user_id']))
    if not cursor.fetchone()[0]:
        return "user_id error"

    agent = req.headers_in[ 'User-Agent' ]
    agent = agent.replace(',', ' ')
    accept = 'NULL'
    encodeing = 'NULL'
    language = 'NULL'
    try:
        accept = req.headers_in['Accept']
        encoding = req.headers_in['Accept-Encoding']
        language = req.headers_in['Accept-Language']
    except:
        pass

    DNT = 'NULL'
    try:
        DNT = req.headers_in['DNT']
    except:
        DNT = 'Not Defined'

    keys = "_".join(req.headers_in.keys())

    table_name = "new_data"
    time = str(datetime.datetime.now())
    image_id = insert_into_db(db, table_name, ip, one_test, time, agent, accept, encoding, language, keys, DNT)

    gpu_imgs = one_test['gpuImgs']
    for i, img in enumerate(gpu_imgs):
        saveImg(img, "{}_{}".format(image_id, i))

    for i, img in enumerate(one_test['langsDetected']):
        saveImg(img, "{}_{}_lang".format(image_id, i))

    h = hasher()
    string = ''
    for i in range(len(gpu_imgs) - 6):
        string += gpu_imgs[i]['pixels']
    h.update(string)
    hash_code = encode(h.digest()).replace('=', '')
    cursor.execute("UPDATE {} SET simple_hash='{}' WHERE image_id='{}'".format(table_name, hash_code, image_id))
    db.commit()

    cursor.execute("SELECT COUNT(*) FROM {} WHERE user_id='{}'".format(table_name, one_test['user_id']))
    row = cursor.fetchone()[0]
    db.close()
    if row == 3:
        return str(row) + ',' + getEncrypt(str(one_test['user_id']) + '_3')
    if row == 2:
        return str(row) + ',' + getEncrypt(str(one_test['user_id']))
    else:
        return str(row) + ',not finished'
Beispiel #29
0
 def hash(self):
     return hasher(self.msg).hexdigest()
Beispiel #30
0
def hashed(password):
    return hasher(password + PASSWORD_SALT).hexdigest()
Beispiel #31
0
def hashTweets(tweets):
	tweets = [t[:2] for t in tweets];
	tweets.sort(key= lambda tup: tup[1]+str(tup[0]), reverse=True)
	myString = str(tweets);
	return hasher(myString.encode()).hexdigest()
Beispiel #32
0
def index(req):
    global inited
    global root
    db_name = "cross_browser"
    post_data = str(req.form.list)
    json_data = post_data[8:-7]
    one_test = json.loads(json_data)
    ip = req.connection.remote_ip
    db = MySQLdb.connect("localhost", "erik", "erik", db_name)
    cursor = db.cursor()
    cursor.execute("SELECT COUNT(*) FROM {} WHERE id='{}'".format(
        'uid', one_test['user_id']))
    if not cursor.fetchone()[0]:
        return "user_id error"

    agent = req.headers_in['User-Agent']
    agent = agent.replace(',', ' ')
    accept = 'NULL'
    encodeing = 'NULL'
    language = 'NULL'
    try:
        accept = req.headers_in['Accept']
        encoding = req.headers_in['Accept-Encoding']
        language = req.headers_in['Accept-Language']
    except:
        pass

    DNT = 'NULL'
    try:
        DNT = req.headers_in['DNT']
    except:
        DNT = 'Not Defined'

    keys = "_".join(req.headers_in.keys())

    table_name = "new_data"
    time = str(datetime.datetime.now())
    image_id = insert_into_db(db, table_name, ip, one_test, time, agent,
                              accept, encoding, language, keys, DNT)

    gpu_imgs = one_test['gpuImgs']
    for i, img in enumerate(gpu_imgs):
        saveImg(img, "{}_{}".format(image_id, i))

    for i, img in enumerate(one_test['langsDetected']):
        saveImg(img, "{}_{}_lang".format(image_id, i))

    h = hasher()
    string = ''
    for i in range(len(gpu_imgs) - 6):
        string += gpu_imgs[i]['pixels']
    h.update(string)
    hash_code = encode(h.digest()).replace('=', '')
    cursor.execute("UPDATE {} SET simple_hash='{}' WHERE image_id='{}'".format(
        table_name, hash_code, image_id))
    db.commit()

    cursor.execute("SELECT COUNT(*) FROM {} WHERE user_id='{}'".format(
        table_name, one_test['user_id']))
    row = cursor.fetchone()[0]
    db.close()
    if row == 3:
        return str(row) + ',' + getEncrypt(str(one_test['user_id']) + '_3')
    if row == 2:
        return str(row) + ',' + getEncrypt(str(one_test['user_id']))
    else:
        return str(row) + ',not finished'
Beispiel #33
0
from cryptography.hazmat.primitives import serialization as crypto_serialization
from cryptography.hazmat.primitives.asymmetric import rsa
from cryptography.hazmat.backends import default_backend as crypto_default_backend
from helpers import global_storage
import os
import string

# Number of bytes for the standard random hex
from hashlib import sha1 as hasher

NO_OF_BYTES_HASH = hasher().digest_size
NO_OF_BYTES_RANDOM_STANDARD = 25
NO_OF_BYTES_RANDOM_SHORT = 2
NO_OF_BYTES_HASH_SHORT = 1


async def generate_keypair():
    key = rsa.generate_private_key(backend=crypto_default_backend(),
                                   public_exponent=65537,
                                   key_size=2048)
    private_key = key.private_bytes(crypto_serialization.Encoding.PEM,
                                    crypto_serialization.PrivateFormat.PKCS8,
                                    crypto_serialization.NoEncryption())
    public_key = key.public_key().public_bytes(
        crypto_serialization.Encoding.OpenSSH,
        crypto_serialization.PublicFormat.OpenSSH)

    return private_key, str(public_key, encoding="utf-8")


def generate_random_short_hex():
Beispiel #34
0
def convert_pubmlst(new_package_name,
                    profile_file,
                    loci_files=[],
                    description=""):
    "Convert an MLST scheme in PubMLST 'format' into a Torch file and register it in IPFS"
    log = logg.getChild('covert_mlst')
    tor = TorchModel()
    tor.reference = Reference()
    allele_hash_cache = defaultdict(dict)
    # Open allele files, scan and hash alleles, add them to the Torch metadata
    #
    # --JSP
    for loci_file in loci_files:
        log.info(f"Opening {loci_file}...")
        locus_name = basename(loci_file).split('.')[0]
        log.debug(locus_name)
        locus = Locus()
        locus.Name = locus_name
        with open(loci_file, 'r') as loci_file_f:
            for defline, seq, _ in readfq(loci_file_f):
                loc, name = defline.split('_')
                hash_ = hasher(seq.encode('utf-8')).hexdigest()[:10]
                log.info(f"{loc} {name}: {hash_}")

                allele_hash_cache[loc][name] = hash_

                a = Allele()
                a.Name = name
                a.Hash = hash_
                locus.alleles.append(a)
        tor.reference.loci.append(locus)
    # Open the profile definition TXT and link allele names to allele hashes,
    # then add the profile definitions to the Torch struct.
    # --JSP
    profile_cache = defaultdict(dict)
    log.info(f"Opening profile definitions in {profile_file}...")
    with open(profile_file, 'r') as profile_f:
        rdr = csv.reader(profile_f, delimiter='\t')
        header = rdr.__next__()
        log.debug(header)
        for row in rdr:
            st = row[0]
            log.debug(f"ST {st}")
            for loc, allele in zip(header[1:], row[1:]):
                ha = allele_hash_cache[loc][allele]
                profile_cache[st][loc] = ha
                #log.debug(f"{loc}_{allele}/{ha}")
    [log.debug(f"{k}:{v}") for k, v in profile_cache.items()]
    #Create types and add to Types array
    #--JSP
    for type_name, allelic_profile in profile_cache.items():
        t = Types()
        t.Name = type_name
        for locus, hashh in allelic_profile.items():
            p = ProfileElement()
            p.LocusName = locus
            p.AlleleName = hashh
            t.profile.append(p)
        tor.types.append(t)
    #We need to serialize the alleles to a file in the next step, so create and return
    #a generator to access the allele sequence data only, with line delimiters.
    #--JSP
    def allele_generator():
        for loci_file in loci_files:
            with open(loci_file, 'r') as loci:
                for _, seq, _ in readfq(loci):
                    yield seq
                    yield '\n'

    #Complete the Torch, build the files, register the Torch
    #--JSP
    tor.Name = new_package_name
    tor.Version = '1.0.0'
    tor.Description = description
    return register_torch(new_package_name,
                          build_torch(tor, allele_generator()))
Beispiel #35
0
def salted_hash(val):
    hash = hasher(settings.CRYPTO_SECRET)
    hash.update(unicode(val, 'utf-8') if isinstance(val, str) else unicode(val))
    return hash.hexdigest()
Beispiel #36
0
template = '''
<html>
<head><title>test</title></head>
<body>
%s
</body>
</html>
'''

body_template = '''
<p style="color: #%(color)s; background-color: #%(bg)s">
A test of %(name)s as %(color)s with a %(bg)s background</p>
'''

names = 'joe tom bill sue john'.split()

from hashlib import md5 as hasher

body = []
for name in names:
    h = hasher(name).hexdigest()
    color = h[:6]
    bg = h[-6:]
    body.append(body_template % {'color': color, 'name': name, 'bg': bg})
body = ''.join(body)
html = template % body
print html
Beispiel #37
0
def make_pw_hash(name, pw, salt=None):
    if not salt:
        salt = make_salt()
    h = hasher(name + pw + salt).hexdigest()
    return (h, salt)
Beispiel #38
0
# dir(<module_name>) -- list content "names" defined by module

import math  # import entire module
from os import walk  # import single "name" (function/submodule)
from os import path
from math import pi as PI
from hashlib import sha1 as hasher  # import name with synonym

print("Sqrt: ", math.sqrt(9))
print("Traversal:")
for _, _, files in walk(".."):
    print("\t", files)

hasher = hasher()
hasher.update(b"chunk1")
hasher.update(b"chunk2")
hasher.update("chunk3".encode('utf-8'))
print(hasher.hexdigest())

# to get encoding of file, use encoding method
# with open(filename) as f:
#     print(f.encoding)
# to open file with binary mode, use mode = 'rb'
# with open(filename, mode='rb') as f:
#     hasher = hasher()
#     hasher.update(f.read(1024))
#     return hasher.digest() # digest of first min(1024, filesize) bytes of
# file