def _already_exists(file_name, sha1_sum, quiet): """ Checks if file with same name and checksum already exists. Parameters ---------- file_name : str: File name to verify. sha1_sum : str SHA1 sum Hex digest to verify. Returns ------- not_exists: bool True if file with same version already exist. """ if _isfile(file_name): checksum = _sha1() with open(file_name, "rb") as file: checksum.update(file.read()) if checksum.hexdigest() == sha1_sum: if not quiet: print(f'"{_basename(file_name)}" is up to date.') return True return False
def _verify_and_save(out_file, data, sha1_sum, quiet): """ Verify checksum and save file locally. Parameters ---------- out_file : str File destination. data : bytes File content sha1_sum : str SHA1 sum Hex digest to verify. """ checksum = _sha1() checksum.update(data) if checksum.hexdigest() != sha1_sum + "s": raise RuntimeError(f'"{_basename(out_file)}" checksum does not match.') updated = _isfile(out_file) with open(out_file, "wb") as file: file.write(data) if not quiet: print( f'"{_basename(out_file)}" has been {"updated" if updated else "installed"}.' )
def get_experiment_secure_hash(): """Returns the first six places of the secure hash (sha1) of the main file of the current experiment. Returns ------- hash: string or None first six places of the experiment secure hash (None if no main file can be found) Notes ----- Secure hashes for experiments help to ensure that the correct version is running in the lab. Hash codes are written to all output files and printed in the command line output. If you want to check post hoc the version of your experiment, create the secure hash (sha1) of your expyriment .py-file and compare the first six place with the code in the output file. """ global _secure_hash if _secure_hash != "": return _secure_hash try: with open(_os.path.split(_sys.argv[0])[1]) as f: _secure_hash = _sha1(f.read()).hexdigest()[:6] except: _secure_hash = None return get_experiment_secure_hash()
def sha1_hexdigest(file: Path): if not file.exists(): raise FileNotFoundError(str(file)) elif not file.is_file(): raise FileExistsError((str(file)), "is not file") return _sha1(file.read_bytes()).hexdigest()
def _fastsha1(s): # This function will import sha1 from hashlib or sha (whichever is # available) and overwrite itself with it on the first call. # Subsequent calls will go directly to the imported function. if sys.version_info >= (2, 5): from hashlib import sha1 as _sha1 else: from sha import sha as _sha1 global _fastsha1, sha1 _fastsha1 = sha1 = _sha1 return _sha1(s)
def _fastsha1(s): # This function will import sha1 from hashlib or sha (whichever is # available) and overwrite itself with it on the first call. # Subsequent calls will go directly to the imported function. try: from hashlib import sha1 as _sha1 except ImportError: from sha import sha as _sha1 global _fastsha1, sha1 _fastsha1 = sha1 = _sha1 return _sha1(s)
def test_custom_function(self): s = lambda s: _sha1(s).hexdigest() u1 = User.create(username='******', password=s('p1')) u2 = User.create(username='******', password=s('p2')) uq = User.select().where(password=R('sha1(%s)', 'p2')) self.assertEqual(uq.get(), u2) uq = User.select().where(password=R('sha1(%s)', 'p1')) self.assertEqual(uq.get(), u1) uq = User.select().where(password=R('sha1(%s)', 'p3')) self.assertEqual(uq.count(), 0)
def create_minimal(cls, uris, configloc, schemas, config_password, pidfile=None): if _os.path.isdir(configloc) and _os.access(configloc, _os.W_OK): pass elif not _os.path.exists(configloc): try: _os.mkdir(configloc, 0700) except OSError as exc: raise RuntimeError('cannot create config dir {!r}: {}' .format(configloc, str(exc))) else: raise ValueError('invalid config dir {!r}; expecting a writable' ' directory path' .format(configloc)) if cls._PASSWORD_WITH_SCHEME_RE.match(config_password): config_password_configvalue = config_password else: config_password_salt = _os.urandom(4) config_password_configvalue = \ '{SSHA}' + _b64encode(_sha1(config_password + config_password_salt) .digest() + config_password_salt) if not pidfile: if _os.path.isdir(cls._PIDFILE_STD_DIR) \ and _os.access(cls._PIDFILE_STD_DIR, _os.W_OK): pidfile_dir = cls._PIDFILE_STD_DIR else: pidfile_dir = None pidfile_tmp = _NamedTemporaryFile(dir=pidfile_dir, prefix='slapd-', suffix='.pid') with pidfile_tmp: pass pidfile = pidfile_tmp.name configfile = _NamedTemporaryFile(delete=False) with configfile: for schema in schemas: configfile.write('include {}\n'.format(schema)) configfile.write('pidfile {}\n'.format(pidfile)) configfile.write('database config\n') configfile.write('rootpw {}\n'.format(config_password_configvalue)) service = cls.create_from_configfile(uris=uris, configfile=configfile.name, configdir=configloc) _os.remove(configfile.name) return service
def auth_signature(auth_token, args, encoding="utf-8"): # type: (str, dict) -> bool """timestamp, nonce [echostr] 正确返回 True;错误/失败返回 False""" try: sign_parm = [auth_token] for i in ('timestamp', 'nonce'): sign_parm.append(args[i]) sign_parm.sort() client_sign = _unhexs(args['signature']) server_sign = _sha1(''.join(sign_parm).encode(encoding)).digest() if client_sign == server_sign: return True else: return False except (KeyError, TypeError, AttributeError) as e: traceback.print_exc() logging.warn("验证失败!参数格式不符合要求:[{args}]:::{error}".format(args=args, error=e)) return False
def authSignature (AUTH_TOKEN, args): u'''timestamp,nonce[,echostr] 正确返回 True;错误/失败返回 False''' try: signParm =[AUTH_TOKEN] for i in ['timestamp', 'nonce']: signParm.append( args.get(i) ) signParm.sort() reqSign = _unhexs(args.get('signature')) resSign = _sha1(''.join(signParm)).digest() if reqSign == resSign: log.info(u'请求签名正确,验证通过.') return True log.debug(u"验证失败!计算的请求参数是:%s" % str(signParm)) log.debug(u'验证失败!ReqSign:[%s],CalcSign:[%s]' % (_hexs(reqSign).upper(), _hexs(resSign).upper() ) ) log.warn(u'验证失败!请求签名校验不一致!') return False except Exception: log.warn(u'验证失败!参数格式不符合要求:[%s]' % str(args)) return False
import datetime import random from hashlib import md5 as _md5 from hashlib import sha1 as _sha1 from django.conf import settings from django.utils.encoding import force_bytes from django.utils.http import urlencode from django.utils.six import text_type from .compat import SiteProfileNotAvailable, get_model from .models import Settings from .models import UserProfile as Profile md5 = lambda x: _md5(force_bytes(x, errors='replace')) sha1 = lambda x: _sha1(force_bytes(x, errors='replace')) try: from django.utils.text import truncate_words except ImportError: from django.utils.text import Truncator from django.utils.functional import allow_lazy def truncate_words(s, num, end_text='...'): truncate = end_text and ' %s' % end_text or '' return Truncator(s).words(num, truncate=truncate) truncate_words = allow_lazy(truncate_words, text_type) def upload_to_mugshot(instance, filename):
def sha1(string): if is_py3k: string = string.encode('UTF8') return _sha1(string).hexdigest()
def sha1(string): if is_py3k: string = string.encode("UTF8") return _sha1(string).hexdigest()
def _parse_mbdb_entry(mbdb, pos): ''' parse a single entry in the mbdb file ''' offset = pos domain, pos = _mbdb_string(mbdb, pos) filename, pos = _mbdb_string(mbdb, pos) linktarget, pos = _mbdb_string(mbdb, pos) # a simple test (N=1, scientific is it not?) shows that what is commonly # called 'datahash' in the scripts I used as basis for this, is stored in a # value that is called 'digest' in the newer (iOS10+) backups. So, we call # this 'digest' here as well. digest, pos = _mbdb_string(mbdb, pos) # this is commonly called enckey in the scripts that I used as source, but # in my backups it is consistently an empty string. So assume that it is, # break if it isn't. unknown, pos = _mbdb_string(mbdb, pos) if unknown != '': raise MbdbParseError( 'assumption broken on empty string in unknown field') mode = int.from_bytes(mbdb[pos:pos + 2], 'big') pos += 2 inode = int.from_bytes(mbdb[pos:pos + 8], 'big') pos += 8 uid = int.from_bytes(mbdb[pos:pos + 4], 'big') pos += 4 gid = int.from_bytes(mbdb[pos:pos + 4], 'big') pos += 4 # some sources that I based this function on had a different # order for these timestamps and in addition instead of a # btime assumed an atime, which I think is incorrect based on some simple # experiments (comparing timestamps on a rooted phone with backup # timestamps). mtime = _datetime( *list(_gmtime(int.from_bytes(mbdb[pos:pos + 4], 'big'))[0:7]) + [_UTC]) pos += 4 ctime = _datetime( *list(_gmtime(int.from_bytes(mbdb[pos:pos + 4], 'big'))[0:7]) + [_UTC]) pos += 4 btime = _datetime( *list(_gmtime(int.from_bytes(mbdb[pos:pos + 4], 'big'))[0:7]) + [_UTC]) pos += 4 size = int.from_bytes(mbdb[pos:pos + 8], 'big') pos += 8 # Based on the different values I've encountered in the field that is # commonly called 'flags' in the scripts that I've used as source it would # seem that this is what is called 'protection' in the newer backups. # Perhaps these values represent some enum value of the protection level. # So, I've called this field 'protection' in contrast to the other scripts # out there. protection = int.from_bytes(mbdb[pos:pos + 1], 'big') pos += 1 numprops = int.from_bytes(mbdb[pos:pos + 1], 'big') pos += 1 # determine filetype and permissions based on mode filetype = FileType(mode & 0xE000) permissions = oct(mode & 0x1FFF) extended_attributes = _OD() for ii in range(numprops): pname, pos = _mbdb_string(mbdb, pos) pval, pos = _mbdb_string(mbdb, pos) extended_attributes[pname] = pval # the fileID was originally stored in a separate mbdx file, but we can also # determine this by combining the domain and filepath and calculating sha1 # hash over it fileID = _sha1('{:s}-{:s}'.format(domain, filename).encode('utf8')).hexdigest() return _file_entry(fileID, domain, filename, uid, gid, mtime, ctime, btime, inode, mode, filetype, permissions, size, protection, extended_attributes, linktarget, digest), pos
""" sentry.utils.hashlib ~~~~~~~~~~~~~~~~~~~~ :copyright: (c) 2010-2015 by the Sentry Team, see AUTHORS for more details. :license: BSD, see LICENSE for more details. """ from __future__ import absolute_import from hashlib import md5 as _md5 from hashlib import sha1 as _sha1 from django.utils.encoding import force_bytes md5_text = lambda x: _md5(force_bytes(x, errors='replace')) sha1_text = lambda x: _sha1(force_bytes(x, errors='replace'))
def sha1_text(*args): m = _sha1() for x in args: m.update(force_bytes(x, errors='replace')) return m
def sha1_text(x): return _sha1(force_bytes(x, errors='replace'))
def sha1_text(*args): m = _sha1() for x in args: m.update(force_bytes(x, errors="replace")) return m
def create_basic(cls, uris, configloc, schemas, modules, config_password, dbtype, dbdir, suffix, root_dn, root_password, authz_map=None, access=(), index=(), pidfile=None): suffix_rdns = suffix.split(',') if len(suffix_rdns) > 1: suffix_org_dn = ','.join(suffix_rdns[-2:]) suffix_orgunits_rdns = suffix_rdns[:-2] suffix_org_dn_match = cls._SUFFIX_ORG_DN_RE.match(suffix_org_dn) if not suffix_org_dn_match: raise ValueError('invalid suffix DN {!r}; expected one in' ' which the last two components match {!r}' .format(suffix, cls._SUFFIX_ORG_DN_RE)) suffix_org = suffix_org_dn_match.group('org') if cls._PASSWORD_WITH_SCHEME_RE.match(root_password): root_password_configvalue = root_password else: root_password_salt = _os.urandom(4) root_password_configvalue = \ '{SSHA}' + _b64encode(_sha1(root_password + root_password_salt) .digest() + root_password_salt) authz_map = authz_map or {} try: authz_map_items = authz_map.items() except AttributeError: authz_map_items = authz_map service = cls.create_minimal(uris=uris, configloc=configloc, schemas=schemas, config_password=config_password, pidfile=pidfile) svc_pid = service.start(fork=True) if svc_pid == 0: _sys.exit() try: config_client = service.client() config_client.simple_bind_s('cn=config', config_password) # configure SASL authentication config_client\ .modify_s('cn=config', ((_ldap.MOD_REPLACE, 'olcPasswordHash', '{CLEARTEXT}'), (_ldap.MOD_REPLACE, 'olcAuthzRegexp', tuple('{} {}'.format(match, replacement) for match, replacement in authz_map_items)), )) # configure primary backend database config_client\ .add_s('cn=Module{0},cn=config', (('objectClass', 'olcModuleList'), ('olcModuleLoad', modules))) config_client\ .add_s('olcDatabase={},cn=config'.format(dbtype), (('objectClass', 'olc{}Config'.format(dbtype.capitalize())), ('olcDatabase', dbtype), ('olcDbDirectory', dbdir), ('olcSuffix', suffix), ('olcRootDN', root_dn), ('olcRootPW', root_password_configvalue), )) config_client\ .modify_s('olcDatabase={1}hdb,cn=config', ((_ldap.MOD_ADD, 'olcDbIndex', index), (_ldap.MOD_REPLACE, 'olcAccess', access))) config_client.unbind_s() # initialize suffix suffix_rdns = suffix.split(',') if len(suffix_rdns) > 1: root_client = service.client() root_client.simple_bind_s(root_dn, root_password) root_client\ .add_s(suffix_org_dn, (('objectClass', ('dcObject', 'organization')), ('dc', suffix_org), ('o', suffix_org))) orgunit_dn = suffix_org_dn for orgunit_rdn in suffix_orgunits_rdns: orgunit_dn = ','.join(orgunit_rdn, orgunit_dn) _, orgunit = orgunit_dn.split('=') root_client\ .add_s(suffix_org_dn, (('objectClass', 'dcObject'), ('objectClass', 'organizationalUnit'), ('dc', orgunit), ('ou', orgunit))) root_client.unbind_s() finally: if service.status == _services.ServiceStatus('running'): service.stop() return service
def sha1(obj): return _sha1(dumps(obj, ensure_ascii=False).encode('utf8')).hexdigest()