def stretch_passphrase(self, passphrase, salt): keys = {} sys.stdout.write(strings._('deriving_keys')) sys.stdout.flush() # start with the passphrase key = passphrase # derive a key for each cipher for cipher in common.ciphers: sys.stdout.write(' {0}'.format(cipher)) sys.stdout.flush() # rounds of pbkdf2 and scrypt key = str(PBKDF2.crypt(key, salt.encode('hex'), 100000)) key = scrypt.hash(key, salt, N=2**14, r=8, p=1) if cipher == 'aes256': # AES256 needs 256-bit (32-byte) key keys[cipher] = key[:32] elif cipher == 'blowfish': # Blowfish keys very from 32-448 bits, but we'll use 256-bit (32-byte) key keys[cipher] = key[:32] elif cipher == 'cast5': # CAST5 needs 128-bit (16-byte) key keys[cipher] = key[:16] sys.stdout.write('\n') return keys
def validate_password(self, attempt): ''' Check the password against existing credentials ''' if self._password is not None: return self.password == PBKDF2.crypt( attempt, self.password, ) else: return False
def post(self, *args, **kwargs): ''' Checks submitted username and password ''' user = User.by_handle(self.get_argument('account', '')) password_attempt = self.get_argument('password', '') if user is not None: if user.validate_password(password_attempt): if not user.locked: self.successful_login(user) if self.config.story_mode and user.logins == 1 and not user.is_admin(): self.redirect('/user/missions/firstlogin') else: self.redirect('/user') else: self.render('public/login.html', info=None, errors=["Your account has been locked"]) else: self.failed_login() else: if password_attempt is not None: PBKDF2.crypt(password_attempt, "BurnTheHashTime") self.failed_login()
def register(self): """ Gets authentication info from a DPT-RP1. You can call this BEFORE DigitalPaper.authenticate() Returns (ca, priv_key, client_id): - ca: a PEM-encoded X.509 server certificate, issued by the CA on the device - priv_key: a PEM-encoded 2048-bit RSA private key - client_id: the client id """ reg_url = "http://{addr}:8080".format(addr=self.addr) register_pin_url = "{base_url}/register/pin".format(base_url=reg_url) register_hash_url = "{base_url}/register/hash".format(base_url=reg_url) register_ca_url = "{base_url}/register/ca".format(base_url=reg_url) register_url = "{base_url}/register".format(base_url=reg_url) register_cleanup_url = "{base_url}/register/cleanup".format( base_url=reg_url) print("Cleaning up...") r = self.session.put(register_cleanup_url) print(r) print("Requesting PIN...") r = self.session.post(register_pin_url) m1 = r.json() n1 = base64.b64decode(m1["a"]) mac = base64.b64decode(m1["b"]) yb = base64.b64decode(m1["c"]) yb = int.from_bytes(yb, "big") n2 = os.urandom(16) # random nonce dh = DiffieHellman() ya = dh.gen_public_key() ya = b"\x00" + ya.to_bytes(256, "big") zz = dh.gen_shared_key(yb) zz = zz.to_bytes(256, "big") yb = yb.to_bytes(256, "big") derivedKey = PBKDF2(passphrase=zz, salt=n1 + mac + n2, iterations=10000, digestmodule=SHA256).read(48) authKey = derivedKey[:32] keyWrapKey = derivedKey[32:] hmac = HMAC(authKey, digestmod=SHA256) hmac.update(n1 + mac + yb + n1 + n2 + mac + ya) m2hmac = hmac.digest() m2 = dict( a=base64.b64encode(n1).decode("utf-8"), b=base64.b64encode(n2).decode("utf-8"), c=base64.b64encode(mac).decode("utf-8"), d=base64.b64encode(ya).decode("utf-8"), e=base64.b64encode(m2hmac).decode("utf-8"), ) print("Encoding nonce...") r = self.session.post(register_hash_url, json=m2) m3 = r.json() if base64.b64decode(m3["a"]) != n2: print("Nonce N2 doesn't match") return eHash = base64.b64decode(m3["b"]) m3hmac = base64.b64decode(m3["e"]) hmac = HMAC(authKey, digestmod=SHA256) hmac.update(n1 + n2 + mac + ya + m2hmac + n2 + eHash) if m3hmac != hmac.digest(): print("M3 HMAC doesn't match") return pin = input("Please enter the PIN shown on the DPT-RP1: ") hmac = HMAC(authKey, digestmod=SHA256) hmac.update(pin.encode()) psk = hmac.digest() rs = os.urandom(16) # random nonce hmac = HMAC(authKey, digestmod=SHA256) hmac.update(rs + psk + yb + ya) rHash = hmac.digest() wrappedRs = wrap(rs, authKey, keyWrapKey) hmac = HMAC(authKey, digestmod=SHA256) hmac.update(n2 + eHash + m3hmac + n1 + rHash + wrappedRs) m4hmac = hmac.digest() m4 = dict( a=base64.b64encode(n1).decode("utf-8"), b=base64.b64encode(rHash).decode("utf-8"), d=base64.b64encode(wrappedRs).decode("utf-8"), e=base64.b64encode(m4hmac).decode("utf-8"), ) print("Getting certificate from device CA...") r = self.session.post(register_ca_url, json=m4) print(r) m5 = r.json() if base64.b64decode(m5["a"]) != n2: print("Nonce N2 doesn't match") return wrappedEsCert = base64.b64decode(m5["d"]) m5hmac = base64.b64decode(m5["e"]) hmac = HMAC(authKey, digestmod=SHA256) hmac.update(n1 + rHash + wrappedRs + m4hmac + n2 + wrappedEsCert) if hmac.digest() != m5hmac: print("HMAC doesn't match!") return esCert = unwrap(wrappedEsCert, authKey, keyWrapKey) es = esCert[:16] cert = esCert[16:] hmac = HMAC(authKey, digestmod=SHA256) hmac.update(es + psk + yb + ya) if hmac.digest() != eHash: print("eHash does not match!") return # print("Certificate: ") # print(cert) print("Generating RSA2048 keys") new_key = RSA.generate(2048, e=65537) # with open("key.pem", 'wb') as f: # f.write(new_key.exportKey("PEM")) keyPubC = new_key.publickey().exportKey("PEM") selfDeviceId = str(uuid.uuid4()) print("Device ID: " + selfDeviceId) selfDeviceId = selfDeviceId.encode() # with open("client_id.txt", 'wb') as f: # f.write(selfDeviceId) wrappedDIDKPUBC = wrap(selfDeviceId + keyPubC, authKey, keyWrapKey) hmac = HMAC(authKey, digestmod=SHA256) hmac.update(n2 + wrappedEsCert + m5hmac + n1 + wrappedDIDKPUBC) m6hmac = hmac.digest() m6 = dict( a=base64.b64encode(n1).decode("utf-8"), d=base64.b64encode(wrappedDIDKPUBC).decode("utf-8"), e=base64.b64encode(m6hmac).decode("utf-8"), ) print("Registering device...") r = self.session.post(register_url, json=m6) print(r) print("Cleaning up...") r = self.session.put(register_cleanup_url) print(r) return ( cert.decode("utf-8"), new_key.exportKey("PEM").decode("utf-8"), selfDeviceId.decode("utf-8"), )
def _hash_password(cls, password): return PBKDF2.crypt(password, iterations=ITERATE)
def keyFromText(text, salt): optimalSize = max(AES.key_size) return PBKDF2(text, salt).read(optimalSize)
def main(global_config, **settings): config = Configurator( autocommit=True, settings=settings, authentication_policy=AuthenticationPolicy(settings['auth.file'], __name__), authorization_policy=AuthorizationPolicy(), route_prefix=route_prefix(settings), ) config.include('pyramid_exclog') config.include("cornice") config.add_forbidden_view(forbidden) config.add_request_method(request_params, 'params', reify=True) config.add_request_method(authenticated_role, reify=True) config.add_request_method(extract_tender, 'tender', reify=True) config.add_request_method(check_accreditation) config.add_renderer('prettyjson', JSON(indent=4)) config.add_renderer('jsonp', JSONP(param_name='opt_jsonp')) config.add_renderer('prettyjsonp', JSONP(indent=4, param_name='opt_jsonp')) config.add_subscriber(add_logging_context, NewRequest) config.add_subscriber(set_logging_context, ContextFound) config.add_subscriber(set_renderer, NewRequest) config.add_subscriber(beforerender, BeforeRender) config.scan("openprocurement.edge.views.spore") config.scan("openprocurement.edge.views.health") config.scan("openprocurement.edge.views.tenders") if auctions_core: config.add_request_method(extract_auction, 'auction', reify=True) config.scan("openprocurement.edge.views.auctions") add_auction_design() if contracting: config.add_request_method(extract_contract, 'contract', reify=True) config.scan("openprocurement.edge.views.contracts") add_contract_design() if planning: config.add_request_method(extract_plan, 'plan', reify=True) config.scan("openprocurement.edge.views.plans") add_plan_design() # CouchDB connection db_name = os.environ.get('DB_NAME', settings['couchdb.db_name']) server = Server(settings.get('couchdb.url'), session=Session(retry_delays=range(10))) if 'couchdb.admin_url' not in settings and server.resource.credentials: try: server.version() except Unauthorized: server = Server( extract_credentials(settings.get('couchdb.url'))[0]) config.registry.couchdb_server = server if 'couchdb.admin_url' in settings and server.resource.credentials: aserver = Server(settings.get('couchdb.admin_url'), session=Session(retry_delays=range(10))) config.registry.admin_couchdb_server = aserver users_db = aserver['_users'] if SECURITY != users_db.security: LOGGER.info("Updating users db security", extra={'MESSAGE_ID': 'update_users_security'}) users_db.security = SECURITY username, password = server.resource.credentials user_doc = users_db.get( 'org.couchdb.user:{}'.format(username), {'_id': 'org.couchdb.user:{}'.format(username)}) if not user_doc.get( 'derived_key', '') or PBKDF2(password, user_doc.get( 'salt', ''), user_doc.get('iterations', 10)).hexread( int(len(user_doc.get('derived_key', '')) / 2)) != user_doc.get('derived_key', ''): user_doc.update({ "name": username, "roles": [], "type": "user", "password": password }) LOGGER.info("Updating edge db main user", extra={'MESSAGE_ID': 'update_edge_main_user'}) users_db.save(user_doc) security_users = [ username, ] if 'couchdb.reader_username' in settings and 'couchdb.reader_password' in settings: reader_username = settings.get('couchdb.reader_username') reader = users_db.get( 'org.couchdb.user:{}'.format(reader_username), {'_id': 'org.couchdb.user:{}'.format(reader_username)}) if not reader.get('derived_key', '') or PBKDF2( settings.get('couchdb.reader_password'), reader.get('salt', ''), reader.get( 'iterations', 10)).hexread( int(len(reader.get('derived_key', '')) / 2)) != reader.get('derived_key', ''): reader.update({ "name": reader_username, "roles": ['reader'], "type": "user", "password": settings.get('couchdb.reader_password') }) LOGGER.info("Updating edge db reader user", extra={'MESSAGE_ID': 'update_edge_reader_user'}) users_db.save(reader) security_users.append(reader_username) if db_name not in aserver: aserver.create(db_name) db = aserver[db_name] SECURITY[u'members'][u'names'] = security_users if SECURITY != db.security: LOGGER.info("Updating edge db security", extra={'MESSAGE_ID': 'update_edge_security'}) db.security = SECURITY auth_doc = db.get(VALIDATE_DOC_ID, {'_id': VALIDATE_DOC_ID}) if auth_doc.get( 'validate_doc_update') != VALIDATE_DOC_UPDATE % username: auth_doc['validate_doc_update'] = VALIDATE_DOC_UPDATE % username LOGGER.info("Updating edge db validate doc", extra={'MESSAGE_ID': 'update_edge_validate_doc'}) db.save(auth_doc) # sync couchdb views sync_design(db) db = server[db_name] else: if db_name not in server: server.create(db_name) db = server[db_name] # sync couchdb views sync_design(db) config.registry.db = db config.registry.server_id = settings.get('id', '') config.registry.health_threshold = float( settings.get('health_threshold', 99)) config.registry.update_after = asbool(settings.get('update_after', True)) return config.make_wsgi_app()
def _hash_password(self, raw_pwd): return PBKDF2.crypt(raw_pwd, iterations=0x2537)
def __init__(self, cookie_file=None, domain_name="", key_file=None): self.salt = b"saltysalt" self.iv = b" " * 16 self.length = 16 # below is domain name to filter cookies by self.domain_name = domain_name if sys.platform == "darwin": # Chrome on OSX my_pass = keyring.get_password("Chrome Safe Storage", "Chrome").encode("utf8") iterations = 1003 # count of pbkdf2 iterations for mac os self.key = PBKDF2(my_pass, self.salt, iterations=iterations).read(self.length) cookie_file = cookie_file or os.path.expanduser( "~/Library/Application Support/Google/Chrome/Default/Cookies") elif sys.platform.startswith("linux"): # Chrome on Linux # encrypted with key peanuts my_pass = get_linux_pass().encode("utf8") iterations = 1 self.key = PBKDF2(my_pass, self.salt, iterations=iterations).read(self.length) paths = map( os.path.expanduser, [ "~/.config/google-chrome/Default/Cookies", "~/.config/chromium/Default/Cookies", "~/.config/google-chrome-beta/Default/Cookies", ], ) cookie_file = cookie_file or next(filter(os.path.exists, paths), None) elif sys.platform == "win32": # Read key from file key_file = (key_file or glob.glob( os.path.join( os.getenv("APPDATA", ""), "..\Local\\Google\\Chrome\\User Data\\Local State", )) or glob.glob( os.path.join( os.getenv("LOCALAPPDATA", ""), "Google\\Chrome\\User Data\\Local State", )) or glob.glob( os.path.join( os.getenv("APPDATA", ""), "Google\\Chrome\\User Data\\Local State", ))) if isinstance(key_file, list): if key_file: key_file = key_file[0] if key_file: f = open(key_file, "rb") key_file_json = json.load(f) key64 = key_file_json["os_crypt"]["encrypted_key"].encode( "utf-8") # unprotect data keydpapi = base64.standard_b64decode(key64)[5:] _, self.key = crypt_unprotect_data(keydpapi, is_key=True) # cookie extraction from APPDATA # \\ is required to prevent unicode error in windows cookie_file = (cookie_file or windows_group_policy_path( ) or glob.glob( os.path.join( os.getenv("APPDATA", ""), "..\Local\\Google\\Chrome\\User Data\\Default\\Cookies", )) or glob.glob( os.path.join( os.getenv("LOCALAPPDATA", ""), "Google\\Chrome\\User Data\\Default\\Cookies", )) or glob.glob( os.path.join( os.getenv("APPDATA", ""), "Google\\Chrome\\User Data\\Default\\Cookies", ))) else: raise BrowserCookieError("OS not supported") if isinstance(cookie_file, list): if not cookie_file: raise BrowserCookieError("Failed to fetch Chrome cookie") cookie_file = cookie_file[0] self.tmp_cookie_file = create_local_copy(cookie_file)
def main(global_config, **settings): config = Configurator( autocommit=True, settings=settings, authentication_policy=AuthenticationPolicy(settings['auth.file'], __name__), authorization_policy=AuthorizationPolicy(), route_prefix=route_prefix(settings), ) config.include('pyramid_exclog') config.include("cornice") config.add_forbidden_view(forbidden) config.add_request_method(request_params, 'params', reify=True) config.add_request_method(authenticated_role, reify=True) config.add_request_method(extract_tender, 'tender', reify=True) config.add_request_method(check_accreditation) config.add_renderer('prettyjson', JSON(indent=4)) config.add_renderer('jsonp', JSONP(param_name='opt_jsonp')) config.add_renderer('prettyjsonp', JSONP(indent=4, param_name='opt_jsonp')) config.add_subscriber(add_logging_context, NewRequest) config.add_subscriber(set_logging_context, ContextFound) config.add_subscriber(set_renderer, NewRequest) config.add_subscriber(beforerender, BeforeRender) config.scan("openprocurement.api.views.spore") config.scan("openprocurement.api.views.health") # tender procurementMethodType plugins support config.add_route_predicate('procurementMethodType', isTender) config.registry.tender_procurementMethodTypes = {} config.add_request_method(tender_from_data) config.add_directive('add_tender_procurementMethodType', register_tender_procurementMethodType) # search for plugins plugins = settings.get('plugins') and settings['plugins'].split(',') for entry_point in iter_entry_points('openprocurement.api.plugins'): if not plugins or entry_point.name in plugins: plugin = entry_point.load() plugin(config) # CouchDB connection db_name = os.environ.get('DB_NAME', settings['couchdb.db_name']) server = Server(settings.get('couchdb.url'), session=Session(retry_delays=range(10))) if 'couchdb.admin_url' not in settings and server.resource.credentials: try: server.version() except Unauthorized: server = Server( extract_credentials(settings.get('couchdb.url'))[0]) config.registry.couchdb_server = server if 'couchdb.admin_url' in settings and server.resource.credentials: aserver = Server(settings.get('couchdb.admin_url'), session=Session(retry_delays=range(10))) config.registry.admin_couchdb_server = aserver users_db = aserver['_users'] if SECURITY != users_db.security: LOGGER.info("Updating users db security", extra={'MESSAGE_ID': 'update_users_security'}) users_db.security = SECURITY username, password = server.resource.credentials user_doc = users_db.get( 'org.couchdb.user:{}'.format(username), {'_id': 'org.couchdb.user:{}'.format(username)}) if not user_doc.get( 'derived_key', '') or PBKDF2(password, user_doc.get( 'salt', ''), user_doc.get('iterations', 10)).hexread( int(len(user_doc.get('derived_key', '')) / 2)) != user_doc.get('derived_key', ''): user_doc.update({ "name": username, "roles": [], "type": "user", "password": password }) LOGGER.info("Updating api db main user", extra={'MESSAGE_ID': 'update_api_main_user'}) users_db.save(user_doc) security_users = [ username, ] if 'couchdb.reader_username' in settings and 'couchdb.reader_password' in settings: reader_username = settings.get('couchdb.reader_username') reader = users_db.get( 'org.couchdb.user:{}'.format(reader_username), {'_id': 'org.couchdb.user:{}'.format(reader_username)}) if not reader.get('derived_key', '') or PBKDF2( settings.get('couchdb.reader_password'), reader.get('salt', ''), reader.get( 'iterations', 10)).hexread( int(len(reader.get('derived_key', '')) / 2)) != reader.get('derived_key', ''): reader.update({ "name": reader_username, "roles": ['reader'], "type": "user", "password": settings.get('couchdb.reader_password') }) LOGGER.info("Updating api db reader user", extra={'MESSAGE_ID': 'update_api_reader_user'}) users_db.save(reader) security_users.append(reader_username) if db_name not in aserver: aserver.create(db_name) db = aserver[db_name] SECURITY[u'members'][u'names'] = security_users if SECURITY != db.security: LOGGER.info("Updating api db security", extra={'MESSAGE_ID': 'update_api_security'}) db.security = SECURITY auth_doc = db.get(VALIDATE_DOC_ID, {'_id': VALIDATE_DOC_ID}) if auth_doc.get( 'validate_doc_update') != VALIDATE_DOC_UPDATE % username: auth_doc['validate_doc_update'] = VALIDATE_DOC_UPDATE % username LOGGER.info("Updating api db validate doc", extra={'MESSAGE_ID': 'update_api_validate_doc'}) db.save(auth_doc) # sync couchdb views sync_design(db) db = server[db_name] else: if db_name not in server: server.create(db_name) db = server[db_name] # sync couchdb views sync_design(db) config.registry.db = db # migrate data if not os.environ.get('MIGRATION_SKIP'): migrate_data(config.registry.db) # S3 connection if 'aws.access_key' in settings and 'aws.secret_key' in settings and 'aws.s3_bucket' in settings: connection = S3Connection(settings['aws.access_key'], settings['aws.secret_key']) config.registry.s3_connection = connection bucket_name = settings['aws.s3_bucket'] if bucket_name not in [b.name for b in connection.get_all_buckets()]: connection.create_bucket(bucket_name, location=Location.EU) config.registry.bucket_name = bucket_name config.registry.server_id = settings.get('id', '') config.registry.health_threshold = float( settings.get('health_threshold', 99)) config.registry.update_after = asbool(settings.get('update_after', True)) return config.make_wsgi_app()
def _hash_password(cls, password): return PBKDF2.crypt(password + STATIC_SALT, iterations=ITERATE)
def auth_password(self, other_password): if self._password is not None: return self.password == PBKDF2.crypt(other_password, self.password) else: return False
def main(global_config, **settings): config = Configurator( settings=settings, root_factory=factory, authentication_policy=AuthenticationPolicy(settings['auth.file'], __name__), authorization_policy=AuthorizationPolicy(), route_prefix=ROUTE_PREFIX, ) config.add_forbidden_view(forbidden) config.add_request_method(authenticated_role, reify=True) config.add_renderer('prettyjson', JSON(indent=4)) config.add_renderer('jsonp', JSONP(param_name='opt_jsonp')) config.add_renderer('prettyjsonp', JSONP(indent=4, param_name='opt_jsonp')) if JournalHandler: config.add_subscriber(set_journal_handler, NewRequest) config.add_subscriber(update_journal_handler_role, ContextFound) config.add_subscriber(cleanup_journal_handler, BeforeRender) config.add_subscriber(set_renderer, NewRequest) config.add_subscriber(beforerender, BeforeRender) config.include('pyramid_exclog') config.include("cornice") config.scan("openprocurement.api.views") # CouchDB connection db_name = os.environ.get('DB_NAME', settings['couchdb.db_name']) server = Server(settings.get('couchdb.url'), session=Session(retry_delays=range(10))) if 'couchdb.admin_url' not in settings and server.resource.credentials: try: server.version() except Unauthorized: server = Server( extract_credentials(settings.get('couchdb.url'))[0]) config.registry.couchdb_server = server if 'couchdb.admin_url' in settings and server.resource.credentials: aserver = Server(settings.get('couchdb.admin_url'), session=Session(retry_delays=range(10))) users_db = aserver['_users'] if SECURITY != users_db.security: LOGGER.info("Updating users db security", extra={'MESSAGE_ID': 'update_users_security'}) users_db.security = SECURITY username, password = server.resource.credentials user_doc = users_db.get( 'org.couchdb.user:{}'.format(username), {'_id': 'org.couchdb.user:{}'.format(username)}) if not user_doc.get( 'derived_key', '') or PBKDF2(password, user_doc.get( 'salt', ''), user_doc.get('iterations', 10)).hexread( int(len(user_doc.get('derived_key', '')) / 2)) != user_doc.get('derived_key', ''): user_doc.update({ "name": username, "roles": [], "type": "user", "password": password }) LOGGER.info("Updating api db main user", extra={'MESSAGE_ID': 'update_api_main_user'}) users_db.save(user_doc) security_users = [ username, ] if 'couchdb.reader_username' in settings and 'couchdb.reader_password' in settings: reader_username = settings.get('couchdb.reader_username') reader = users_db.get( 'org.couchdb.user:{}'.format(reader_username), {'_id': 'org.couchdb.user:{}'.format(reader_username)}) if not reader.get('derived_key', '') or PBKDF2( settings.get('couchdb.reader_password'), reader.get('salt', ''), reader.get( 'iterations', 10)).hexread( int(len(reader.get('derived_key', '')) / 2)) != reader.get('derived_key', ''): reader.update({ "name": reader_username, "roles": ['reader'], "type": "user", "password": settings.get('couchdb.reader_password') }) LOGGER.info("Updating api db reader user", extra={'MESSAGE_ID': 'update_api_reader_user'}) users_db.save(reader) security_users.append(reader_username) if db_name not in aserver: aserver.create(db_name) db = aserver[db_name] SECURITY[u'members'][u'names'] = security_users if SECURITY != db.security: LOGGER.info("Updating api db security", extra={'MESSAGE_ID': 'update_api_security'}) db.security = SECURITY auth_doc = db.get(VALIDATE_DOC_ID, {'_id': VALIDATE_DOC_ID}) if auth_doc.get( 'validate_doc_update') != VALIDATE_DOC_UPDATE % username: auth_doc['validate_doc_update'] = VALIDATE_DOC_UPDATE % username LOGGER.info("Updating api db validate doc", extra={'MESSAGE_ID': 'update_api_validate_doc'}) db.save(auth_doc) # sync couchdb views sync_design(db) db = server[db_name] else: if db_name not in server: server.create(db_name) db = server[db_name] # sync couchdb views sync_design(db) config.registry.db = db # migrate data migrate_data(config.registry.db) # S3 connection if 'aws.access_key' in settings and 'aws.secret_key' in settings and 'aws.s3_bucket' in settings: connection = S3Connection(settings['aws.access_key'], settings['aws.secret_key']) config.registry.s3_connection = connection bucket_name = settings['aws.s3_bucket'] if bucket_name not in [b.name for b in connection.get_all_buckets()]: connection.create_bucket(bucket_name, location=Location.EU) config.registry.bucket_name = bucket_name return config.make_wsgi_app()
def _hash_password(self, password): return PBKDF2.crypt(password, iterations=0x2537)
def validate_password(self, attempt): """ Check the password against existing credentials """ if self._password is not None: return self.password == PBKDF2.crypt(attempt, self.password) else: return False
def check_password(self, raw_pwd): if not raw_pwd: return None return self.password == PBKDF2.crypt(raw_pwd, self.password)
def get_salt(self, key): # Salt is generated as the hash of the key with it's own salt acting like a seed value return PBKDF2(key, Storage.salt_seed).read(self.sal_size)
import pytest from pbkdf2 import PBKDF2 import random import string import tempfile from two1.bitcoin.crypto import HDKey, HDPrivateKey from two1.bitcoin.utils import bytes_to_str from two1.bitcoin.utils import rand_bytes from two1.blockchain.mock_provider import MockProvider from two1.wallet import exceptions from two1.wallet.two1_wallet import Two1Wallet enc_key_salt = b'\xaa\xbb\xcc\xdd' passphrase = "test_wallet" passphrase_hash = PBKDF2.crypt(passphrase) master_key = "xprv9s21ZrQH143K2dUcTctuNw8oV8e7gi4ZbHFGAnyGJtWwmKbKTbLGtx48DQGzioGDdhVn8zFhJe8hbDdfDnK19ykxjwXLzd6EpxnTqi4zQGN" # nopep8 master_seed = "tuna object element cancel hard nose faculty noble swear net subway offer" mkey_enc, mseed_enc = Two1Wallet.encrypt(master_key=master_key, master_seed=master_seed, passphrase=passphrase, key_salt=enc_key_salt) config = {'master_key': mkey_enc, 'master_seed': mseed_enc, 'locked': True, 'passphrase_hash': passphrase_hash, 'key_salt': bytes_to_str(enc_key_salt), 'account_type': "BIP44BitcoinMainnet",
def decrypt(key, salt, text): key = PBKDF2(key, salt).read(32) text = base64.b64decode(text) aes = pyaes.AESModeOfOperationCTR(key) decrypted = aes.decrypt(text) return decrypted.decode('utf-8')
def __init__(self, cookie_file=None, domain_name=""): self.salt = b'saltysalt' self.iv = b' ' * 16 self.length = 16 # domain name to filter cookies by self.domain_name = domain_name if sys.platform == 'darwin': # running Chrome on OSX my_pass = keyring.get_password('Chrome Safe Storage', 'Chrome').encode( 'utf8') # get key from keyring iterations = 1003 # number of pbkdf2 iterations on mac self.key = PBKDF2(my_pass, self.salt, iterations=iterations).read(self.length) cookie_file = cookie_file \ or os.path.expanduser('~/Library/Application Support/Google/Chrome/Default/Cookies') elif sys.platform.startswith('linux'): # running Chrome on Linux # chrome linux is encrypted with the key peanuts my_pass = get_linux_pass().encode('utf8') iterations = 1 self.key = PBKDF2(my_pass, self.salt, iterations=iterations).read(self.length) paths = map(os.path.expanduser, [ '~/.config/google-chrome/Default/Cookies', '~/.config/chromium/Default/Cookies', '~/.config/google-chrome-beta/Default/Cookies' ]) cookie_file = cookie_file or next(filter(os.path.exists, paths), None) elif sys.platform == "win32": # Read key from file key_file = glob.glob( os.path.join(os.getenv('APPDATA', ''), '..\Local\\Google\\Chrome\\User Data\\Local State')) \ or glob.glob( os.path.join(os.getenv('LOCALAPPDATA', ''), 'Google\\Chrome\\User Data\\Local State')) \ or glob.glob(os.path.join(os.getenv('APPDATA', ''), 'Google\\Chrome\\User Data\\Local State')) if isinstance(key_file, list): if key_file: key_file = key_file[0] if key_file: f = open(key_file, 'rb') key_file_json = json.load(f) key64 = key_file_json['os_crypt']['encrypted_key'].encode( 'utf-8') # Decode Key, get rid of DPAPI prefix, unprotect data keydpapi = base64.standard_b64decode(key64)[5:] _, self.key = crypt_unprotect_data(keydpapi, is_key=True) # get cookie file from APPDATA # Note: in windows the \\ is required before a u to stop unicode errors cookie_file = cookie_file or windows_group_policy_path() \ or glob.glob( os.path.join(os.getenv('APPDATA', ''), '..\Local\\Google\\Chrome\\User Data\\Default\\Cookies')) \ or glob.glob( os.path.join(os.getenv('LOCALAPPDATA', ''), 'Google\\Chrome\\User Data\\Default\\Cookies')) \ or glob.glob( os.path.join(os.getenv('APPDATA', ''), 'Google\\Chrome\\User Data\\Default\\Cookies')) else: raise BrowserCookieError( "OS not recognized. Works on Chrome for OSX, Windows, and Linux." ) # if the type of cookie_file is list, use the first element in the list if isinstance(cookie_file, list): if not cookie_file: raise BrowserCookieError('Failed to find Chrome cookie') cookie_file = cookie_file[0] self.tmp_cookie_file = create_local_copy(cookie_file)
def getKey(passwd, length, salt=None): #------------------------------------------------------------------------------- if salt is None: salt = mkSalt(8) return PBKDF2(passwd, salt).read(length)
def to_seed(cls, mnemonic, passphrase=''): mnemonic = cls.normalize_string(mnemonic) passphrase = cls.normalize_string(passphrase) return PBKDF2(mnemonic, u'mnemonic' + passphrase, iterations=PBKDF2_ROUNDS, macmodule=hmac, digestmodule=hashlib.sha512).read(64)
def encrypt(key, text): salt = ''.join(random.choices(string.ascii_letters + string.digits, k=12)) key = PBKDF2(key, salt).read(32) aes = pyaes.AESModeOfOperationCTR(key) ciphertext = aes.encrypt(text.encode('utf-8')) return (salt, base64.b64encode(ciphertext).decode())