def genhash(cls, secret, config): # TODO: add in 'encoding' support once that's finalized in 1.8 / 1.9. uh.validate_secret(secret) secret = to_bytes(secret, "utf-8") self = cls.from_string(config) if self.type_d: type = _argon2_cffi.low_level.Type.D else: type = _argon2_cffi.low_level.Type.I # XXX: doesn't seem to be a way to make this honor max_threads try: result = bascii_to_str(_argon2_cffi.low_level.hash_secret( type=type, memory_cost=self.memory_cost, time_cost=self.rounds, parallelism=self.parallelism, salt=to_bytes(self.salt), hash_len=self.checksum_size, secret=secret, version=self.version, )) except _argon2_cffi.exceptions.HashingError as err: raise cls._adapt_backend_error(err, hash=config) if self.version == 0x10: # workaround: argon2 0x13 always returns "v=" segment, even for 0x10 hashes result = result.replace("$v=16$", "$") return result
def _calc_checksum(self, secret): # TODO: add in 'encoding' support once that's finalized in 1.8 / 1.9. uh.validate_secret(secret) secret = to_bytes(secret, "utf-8") kwds = dict( password=secret, salt=self.salt, time_cost=self.rounds, memory_cost=self.memory_cost, parallelism=self.parallelism, tag_length=self.checksum_size, type_code=self._get_backend_type(self.type), version=self.version, ) if self.max_threads > 0: kwds['threads'] = self.max_threads if self.pure_use_threads: kwds['use_threads'] = True if self.data: kwds['associated_data'] = self.data # NOTE: should return raw bytes # NOTE: this may raise _argon2pure.Argon2ParameterError, # but it if does that, there's a bug in our own parameter checking code. try: return _argon2pure.argon2(**kwds) except _argon2pure.Argon2Error as err: raise self._adapt_backend_error(err, self=self)
def _calc_checksum(self, secret): # TODO: add in 'encoding' support once that's finalized in 1.8 / 1.9. uh.validate_secret(secret) secret = to_bytes(secret, "utf-8") if self.type_d: type = _argon2pure.ARGON2D else: type = _argon2pure.ARGON2I kwds = dict( password=secret, salt=self.salt, time_cost=self.rounds, memory_cost=self.memory_cost, parallelism=self.parallelism, tag_length=self.checksum_size, type_code=type, version=self.version, ) if self.max_threads > 0: kwds['threads'] = self.max_threads if self.pure_use_threads: kwds['use_threads'] = True if self.data: kwds['associated_data'] = self.data # NOTE: should return raw bytes # NOTE: this may raise _argon2pure.Argon2ParameterError, # but it if does that, there's a bug in our own parameter checking code. try: return _argon2pure.argon2(**kwds) except _argon2pure.Argon2Error as err: raise self._adapt_backend_error(err, self=self)
def genhash(cls, secret, config): # TODO: add in 'encoding' support once that's finalized in 1.8 / 1.9. uh.validate_secret(secret) secret = to_bytes(secret, "utf-8") self = cls.from_string(config) if self.type_d: type = _argon2_cffi.low_level.Type.D else: type = _argon2_cffi.low_level.Type.I # XXX: doesn't seem to be a way to make this honor max_threads try: result = bascii_to_str( _argon2_cffi.low_level.hash_secret( type=type, memory_cost=self.memory_cost, time_cost=self.rounds, parallelism=self.parallelism, salt=to_bytes(self.salt), hash_len=self.checksum_size, secret=secret, version=self.version, )) except _argon2_cffi.exceptions.HashingError as err: raise cls._adapt_backend_error(err, hash=config) if self.version == 0x10: # workaround: argon2 0x13 always returns "v=" segment, even for 0x10 hashes result = result.replace("$v=16$", "$") return result
def hash(cls, secret, **kwds): if kwds: uh.warn_hash_settings_deprecation(cls, kwds) return cls.using(**kwds).hash(secret) uh.validate_secret(secret) marker = cls.default_marker assert marker and cls.identify(marker) return to_native_str(marker, param="marker")
def verify(cls, secret, hash, enable_wildcard=False): uh.validate_secret(secret) if not isinstance(hash, unicode_or_bytes_types): raise uh.exc.ExpectedStringError(hash, "hash") elif hash: return False else: return enable_wildcard
def genhash(cls, secret, config): # override default to preserve checksum if config is None: return cls.encrypt(secret) else: uh.validate_secret(secret) self = cls.from_string(config) self.checksum = self._calc_checksum(secret) return self.to_string()
def genhash(cls, secret, config, marker=None): if not cls.identify(config): raise uh.exc.InvalidHashError(cls) elif config: # preserve the existing str,since it might contain a disabled password hash ("!" + hash) uh.validate_secret(secret) return to_native_str(config, param="config") else: if marker is not None: cls = cls.using(marker=marker) return cls.hash(secret)
def hash(cls, secret, user, realm, encoding=None): # NOTE: this was deliberately written so that raw bytes are passed through # unchanged, the encoding kwd is only used to handle unicode values. if not encoding: encoding = cls.default_encoding uh.validate_secret(secret) if isinstance(secret, unicode): secret = secret.encode(encoding) user = to_bytes(user, encoding, "user") realm = to_bytes(realm, encoding, "realm") data = render_bytes("%s:%s:%s", user, realm, secret) return hashlib.md5(data).hexdigest()
def encrypt(cls, secret, user, realm, encoding=None): # NOTE: this was deliberately written so that raw bytes are passed through # unchanged, the encoding kwd is only used to handle unicode values. if not encoding: encoding = cls.default_encoding uh.validate_secret(secret) if isinstance(secret, unicode): secret = secret.encode(encoding) user = to_bytes(user, encoding, "user") realm = to_bytes(realm, encoding, "realm") data = render_bytes("%s:%s:%s", user, realm, secret) return hashlib.md5(data).hexdigest()
def verify(cls, secret, hash): # NOTE: we only compare against the upper-case hash # XXX: add 'full' just to verify both checksums? uh.validate_secret(secret) self = cls.from_string(hash) chk = self.checksum if chk is None: raise uh.exc.MissingDigestError(cls) if isinstance(secret, bytes): secret = secret.decode("utf-8") result = _raw_mssql(secret.upper(), self.salt) return consteq(result, chk[20:])
def verify(cls, secret, hash, **context): # NOTE: classes with multiple checksum encodings should either # override this method, or ensure that from_string() / _norm_checksum() # ensures .checksum always uses a single canonical representation. uh.validate_secret(secret) self = cls.from_string(hash, **context) chk = self.checksum if chk is None: raise exc.MissingDigestError(cls) # Actually use the verify from passlib_bcrypt after hashing the secret # with sha1 secret = hashlib.sha1(secret).hexdigest() return passlib_bcrypt.verify(secret, chk)
def hash_to_dep_biplist(cls, secret, config=None, **kwds): validate_secret(secret) self = cls(use_defaults=True, **kwds) self.checksum = self._calc_checksum(secret) plist_dict = { 'SALTED-SHA512-PBKDF2': { 'entropy': biplist.Data(self.checksum), 'salt': biplist.Data(self.salt), 'iterations': int(self.rounds), } } return biplist.writePlistToString(plist_dict)
def genhash(cls, secret, config, marker=None): uh.validate_secret(secret) if config is not None and not cls.identify(config): # handles typecheck raise uh.exc.InvalidHashError(cls) if config: # we want to preserve the existing str, # since it might contain a disabled password hash ("!" + hash) return to_native_str(config, param="config") # if None or empty string, replace with marker if marker: if not cls.identify(marker): raise ValueError("invalid marker: %r" % marker) else: marker = cls.default_marker assert marker and cls.identify(marker) return to_native_str(marker, param="marker")
def hash(cls, secret): # TODO: add in 'encoding' support once that's finalized in 1.8 / 1.9. uh.validate_secret(secret) secret = to_bytes(secret, "utf-8") # XXX: doesn't seem to be a way to make this honor max_threads try: return bascii_to_str(_argon2_cffi.low_level.hash_secret( type=_argon2_cffi.low_level.Type.I, memory_cost=cls.memory_cost, time_cost=cls.default_rounds, parallelism=cls.parallelism, salt=to_bytes(cls._generate_salt()), hash_len=cls.checksum_size, secret=secret, )) except _argon2_cffi.exceptions.HashingError as err: raise cls._adapt_backend_error(err)
def hash(cls, secret): # TODO: add in 'encoding' support once that's finalized in 1.8 / 1.9. uh.validate_secret(secret) secret = to_bytes(secret, "utf-8") # XXX: doesn't seem to be a way to make this honor max_threads try: return bascii_to_str( _argon2_cffi.low_level.hash_secret( type=cls._get_backend_type(cls.type), memory_cost=cls.memory_cost, time_cost=cls.default_rounds, parallelism=cls.parallelism, salt=to_bytes(cls._generate_salt()), hash_len=cls.checksum_size, secret=secret, )) except _argon2_cffi.exceptions.HashingError as err: raise cls._adapt_backend_error(err)
def verify(cls, secret, hash): # TODO: add in 'encoding' support once that's finalized in 1.8 / 1.9. uh.validate_secret(secret) secret = to_bytes(secret, "utf-8") hash = to_bytes(hash, "ascii") if hash.startswith(b"$argon2d$"): type = _argon2_cffi.low_level.Type.D else: type = _argon2_cffi.low_level.Type.I # XXX: doesn't seem to be a way to make this honor max_threads try: result = _argon2_cffi.low_level.verify_secret(hash, secret, type) assert result is True return True except _argon2_cffi.exceptions.VerifyMismatchError: return False except _argon2_cffi.exceptions.VerificationError as err: raise cls._adapt_backend_error(err, hash=hash)
def verify(cls, secret, hash, full=False): uh.validate_secret(secret) self = cls.from_string(hash) chkmap = self.checksum if not chkmap: raise ValueError("expected %s hash, got %s config string instead" % (cls.name, cls.name)) # NOTE: to make the verify method efficient, we just calculate hash # of shortest digest by default. apps can pass in "full=True" to # check entire hash for consistency. if full: correct = failed = False for alg, digest in iteritems(chkmap): other = self._calc_checksum(secret, alg) # NOTE: could do this length check in norm_algs(), # but don't need to be that strict, and want to be able # to parse hashes containing algs not supported by platform. # it's fine if we fail here though. if len(digest) != len(other): raise ValueError( "mis-sized %s digest in scram hash: %r != %r" % (alg, len(digest), len(other))) if consteq(other, digest): correct = True else: failed = True if correct and failed: raise ValueError("scram hash verified inconsistently, " "may be corrupted") else: return correct else: # XXX: should this just always use sha1 hash? would be faster. # otherwise only verify against one hash, pick one w/ best security. for alg in self._verify_algs: if alg in chkmap: other = self._calc_checksum(secret, alg) return consteq(other, chkmap[alg]) # there should always be sha-1 at the very least, # or something went wrong inside _norm_algs() raise AssertionError("sha-1 digest not found!")
def verify(cls, secret, hash, full=False): uh.validate_secret(secret) self = cls.from_string(hash) chkmap = self.checksum if not chkmap: raise ValueError("expected %s hash, got %s config string instead" % (cls.name, cls.name)) # NOTE: to make the verify method efficient, we just calculate hash # of shortest digest by default. apps can pass in "full=True" to # check entire hash for consistency. if full: correct = failed = False for alg, digest in iteritems(chkmap): other = self._calc_checksum(secret, alg) # NOTE: could do this length check in norm_algs(), # but don't need to be that strict, and want to be able # to parse hashes containing algs not supported by platform. # it's fine if we fail here though. if len(digest) != len(other): raise ValueError("mis-sized %s digest in scram hash: %r != %r" % (alg, len(digest), len(other))) if consteq(other, digest): correct = True else: failed = True if correct and failed: raise ValueError("scram hash verified inconsistently, " "may be corrupted") else: return correct else: # XXX: should this just always use sha1 hash? would be faster. # otherwise only verify against one hash, pick one w/ best security. for alg in self._verify_algs: if alg in chkmap: other = self._calc_checksum(secret, alg) return consteq(other, chkmap[alg]) # there should always be sha-1 at the very least, # or something went wrong inside _norm_algs() raise AssertionError("sha-1 digest not found!")
def verify(cls, secret, hash): # TODO: add in 'encoding' support once that's finalized in 1.8 / 1.9. uh.validate_secret(secret) secret = to_bytes(secret, "utf-8") hash = to_bytes(hash, "ascii") # read type from start of hash # NOTE: don't care about malformed strings, lowlevel will throw error for us type = cls._byte_ident_map.get(hash[:1 + hash.find(b"$", 1)], TYPE_I) type_code = cls._get_backend_type(type) # XXX: doesn't seem to be a way to make this honor max_threads try: result = _argon2_cffi.low_level.verify_secret( hash, secret, type_code) assert result is True return True except _argon2_cffi.exceptions.VerifyMismatchError: return False except _argon2_cffi.exceptions.VerificationError as err: raise cls._adapt_backend_error(err, hash=hash)
def encrypt(cls, secret, encoding=None): uh.validate_secret(secret) if not encoding: encoding = cls.default_encoding return to_native_str(secret, encoding, "secret")
def verify(cls, secret, hash): uh.validate_secret(secret) if not cls.identify(hash): # handles typecheck raise uh.exc.InvalidHashError(cls) return False
def _norm_digest_args(cls, secret, ident, new=False): # make sure secret is unicode if isinstance(secret, unicode): secret = secret.encode("utf-8") # check max secret size uh.validate_secret(secret) # check for truncation (during .hash() calls only) if new: cls._check_truncate_policy(secret) # NOTE: especially important to forbid NULLs for bcrypt, since many # backends (bcryptor, bcrypt) happily accept them, and then # silently truncate the password at first NULL they encounter! if _BNULL in secret: raise uh.exc.NullPasswordError(cls) # TODO: figure out way to skip these tests when not needed... # protect from wraparound bug by truncating secret before handing it to the backend. # bcrypt only uses first 72 bytes anyways. # NOTE: not needed for 2y/2b, but might use 2a as fallback for them. if cls._has_2a_wraparound_bug and len(secret) >= 255: secret = secret[:72] # special case handling for variants (ordered most common first) if ident == IDENT_2A: # nothing needs to be done. pass elif ident == IDENT_2B: if cls._lacks_2b_support: # handle $2b$ hash format even if backend is too old. # have it generate a 2A/2Y digest, then return it as a 2B hash. # 2a-only backend could potentially exhibit wraparound bug -- # but we work around that issue above. ident = cls._fallback_ident elif ident == IDENT_2Y: if cls._lacks_2y_support: # handle $2y$ hash format (not supported by BSDs, being phased out on others) # have it generate a 2A/2B digest, then return it as a 2Y hash. ident = cls._fallback_ident elif ident == IDENT_2: if cls._lacks_20_support: # handle legacy $2$ format (not supported by most backends except BSD os_crypt) # we can fake $2$ behavior using the 2A/2Y/2B algorithm # by repeating the password until it's at least 72 chars in length. if secret: secret = repeat_string(secret, 72) ident = cls._fallback_ident elif ident == IDENT_2X: # NOTE: shouldn't get here. # XXX: could check if backend does actually offer 'support' raise RuntimeError( "$2x$ hashes not currently supported by passlib") else: raise AssertionError("unexpected ident value: %r" % ident) return secret, ident
def _norm_digest_args(cls, secret, ident, new=False): # make sure secret is unicode if isinstance(secret, unicode): secret = secret.encode("utf-8") # check max secret size uh.validate_secret(secret) # check for truncation (during .hash() calls only) if new: cls._check_truncate_policy(secret) # NOTE: especially important to forbid NULLs for bcrypt, since many # backends (bcryptor, bcrypt) happily accept them, and then # silently truncate the password at first NULL they encounter! if _BNULL in secret: raise uh.exc.NullPasswordError(cls) # TODO: figure out way to skip these tests when not needed... # protect from wraparound bug by truncating secret before handing it to the backend. # bcrypt only uses first 72 bytes anyways. # NOTE: not needed for 2y/2b, but might use 2a as fallback for them. if cls._has_2a_wraparound_bug and len(secret) >= 255: secret = secret[:72] # special case handling for variants (ordered most common first) if ident == IDENT_2A: # nothing needs to be done. pass elif ident == IDENT_2B: if cls._lacks_2b_support: # handle $2b$ hash format even if backend is too old. # have it generate a 2A/2Y digest, then return it as a 2B hash. # 2a-only backend could potentially exhibit wraparound bug -- # but we work around that issue above. ident = cls._fallback_ident elif ident == IDENT_2Y: if cls._lacks_2y_support: # handle $2y$ hash format (not supported by BSDs, being phased out on others) # have it generate a 2A/2B digest, then return it as a 2Y hash. ident = cls._fallback_ident elif ident == IDENT_2: if cls._lacks_20_support: # handle legacy $2$ format (not supported by most backends except BSD os_crypt) # we can fake $2$ behavior using the 2A/2Y/2B algorithm # by repeating the password until it's at least 72 chars in length. if secret: secret = repeat_string(secret, 72) ident = cls._fallback_ident elif ident == IDENT_2X: # NOTE: shouldn't get here. # XXX: could check if backend does actually offer 'support' raise RuntimeError("$2x$ hashes not currently supported by passlib") else: raise AssertionError("unexpected ident value: %r" % ident) return secret, ident