def _validate_assumptions(self): # Check that dictionary has not changed. # If the master password file exists, then self.data['dict_hash'] will # exist, and we will compare the current hash for the dictionary # against that stored in the master password file, otherwise we will # compare against the one present when the program was configured. self.dictionary.validate(self.data.get('dict_hash', DICTIONARY_SHA1)) # Check that secrets.py and charset.py have not changed for each, sha1 in [ ('secrets', SECRETS_SHA1), ('charsets', CHARSETS_SHA1) ]: path = make_path(get_head(__file__), each + '.py') try: with open(path) as f: contents = f.read() except IOError as err: path = make_path(get_head(__file__), '..', each + '.py') try: with open(path) as f: contents = f.read() except IOError as err: self.logger.error('%s: %s.' % (err.filename, err.strerror)) hash = hashlib.sha1(contents.encode('utf-8')).hexdigest() # Check that file has not changed. # If the master password file exists, then self.data['%s_hash'] # will exist, and we will compare the current hash for the file # against that stored in the master password file, otherwise we # will compare against the one present when the program was # configured. if hash != self.data.get('%s_hash' % each, sha1): self.logger.display("Warning: '%s' has changed." % path) self.logger.display(" " + "\n ".join(wrap(' '.join([ "This could result in passwords that are inconsistent", "with those created in the past.", 'Update the corresponding hash in %s/%s to "%s".' % ( DEFAULT_SETTINGS_DIR, MASTER_PASSWORD_FILENAME, hash), "Then use 'abraxas --changed' to assure that nothing has", "changed." ]))))
def _find_dictionary(self, filename, settings_dir): """Find Dictionary Finds the file that contains the dictionary of words used to construct pass phrases. Initially looks in the settings directory, if not there look in install directory. """ path = make_path(settings_dir, filename) #if not exists(path): # path = make_path(get_head(__file__), filename) if not exists(path): path = make_path(get_head(__file__), make_path('..', filename)) if not file_is_readable(path): self.logger.error("%s: cannot open dictionary." % path) return path
def avendesora_archive(self): """ Avendesora Archive Save all account information to Avendesora files. """ from binascii import b2a_base64, Error as BinasciiError self.logger.log("Archive secrets.") source_files = set() dest_files = {} gpg_ids = {} avendesora_dir = make_path(self.settings_dir, 'avendesora') mkdir(avendesora_dir) header = dedent('''\ # Translated Abraxas Accounts file (%s) # vim: filetype=python sw=4 sts=4 et ai ff=unix fileencoding='utf8' : # # It is recommended that you not modify this file directly. Instead, # if you wish to modify an account, copy it to an account file not # associated with Abraxas and modify it there. Then, to avoid # conflicts, add the account name to ~/.config/abraxas/do-not-export # and re-export the accounts using 'abraxas --export'. from avendesora import Account, Hidden, Question, RecognizeURL, RecognizeTitle ''') # read do-not-export file try: with open(make_path(self.settings_dir, 'do-not-export')) as f: do_not_export = set(f.read().split()) except IOError as err: do_not_export = set([]) def make_camel_case(text): text = text.translate(maketrans('@.-', ' ')) text = ''.join([e.title() for e in text.split()]) if text[0] in '0123456789': text = '_' + text return text def make_identifier(text): text = text.translate(maketrans('@.- ', '____')) if text[0] in '0123456789': text = '_' + text return text # Loop through accounts saving passwords and questions all_secrets = {} for account_id in self.all_accounts(): account = self.get_account(account_id, quiet=True) data = account.__dict__['data'] ID = account.__dict__['ID'] #aliases = data.get('aliases', []) #if set([ID] + aliases) & do_not_export: if ID in do_not_export: print('skipping', ID) continue class_name = make_camel_case(ID) output = [ 'class %s(Account): # %s' % (class_name, '{''{''{1') ] # TODO -- must make ID a valid class name: convert xxx-xxx to camelcase self.logger.debug(" Saving %s account." % ID) try: source_filepath = data['_source_file_'] dest_filepath = make_path( avendesora_dir, rel_path(source_filepath, self.settings_dir) ) if source_filepath not in source_files: source_files.add(source_filepath) # get recipient ids from existing file if get_extension(source_filepath) in ['gpg', 'asc']: try: gpg = Execute( ['gpg', '--list-packets', source_filepath], stdout=True, wait=True ) gpg_ids[dest_filepath] = [] for line in gpg.stdout.split('\n'): if line.startswith(':pubkey enc packet:'): words = line.split() assert words[7] == 'keyid' gpg_ids[dest_filepath].append(words[8]) except ExecuteError as err: print(str(err)) else: gpg_ids[dest_filepath] = None dest_files[dest_filepath] = {None: header % source_filepath} except KeyError: raise AssertionError('%s: SOURCE FILE MISSING.' % ID) except IOError as err: self.logger.error('%s: %s.' % (err.filename, err.strerror)) output.append(" NAME = %r" % ID) password = self.generate_password(account) output.append(" passcode = Hidden(%r)" % b2a_base64( password.encode('ascii')).strip().decode('ascii') ) questions = [] for question in account.get_security_questions(): # convert the result to a list rather than leaving it a tuple # because tuples are formatted oddly in yaml questions += [list(self.generate_answer(question, account))] self.logger.debug( " Saving question (%s) and its answer." % question) if questions: output.append(" questions = [") for question, answer in questions: output.append(" Question(%r, answer=Hidden(%r))," % ( question, b2a_base64(answer.encode('ascii')).strip().decode('ascii') )) output.append(" ]") if 'autotype' in data: autotype = data['autotype'].replace('{password}', '{passcode}') else: if 'username' in data: autotype = '{username}{tab}{passcode}{return}' else: autotype = '{email}{tab}{passcode}{return}' discovery = [] if 'url' in data: urls = [data['url']] if type(data['url']) == str else data['url'] discovery.append('RecognizeURL(%s, script=%r)' % ( ', '.join([repr(e) for e in urls]), autotype )) if 'window' in data: windows = [data['window']] if type(data['window']) == str else data['window'] discovery.append('RecognizeTitle(%s, script=%r)' % ( ', '.join([repr(e) for e in windows]), autotype )) if discovery: output.append(" discovery = [") for each in discovery: output.append(" %s," % each) output.append(" ]") for k, v in data.items(): if k in [ 'password', 'security questions', '_source_file_', 'password-type', 'master', 'num-words', 'num-chars', 'alphabet', 'template', 'url', 'version', 'autotype', 'window', ]: continue key = make_identifier(k) if type(v) == str and '\n' in v: output.append(' %s = """' % key) for line in dedent(v.strip('\n')).split('\n'): if line: output.append(' %s' % line.rstrip()) else: output.append('') output.append(' """') else: output.append(" %s = %r" % (key, v)) output.append('') output.append('') dest_files[dest_filepath][ID] = '\n'.join(output) # This version uses default gpg id to encrypt files. # Could also take gpg ids from actual files. # The gpg ids are gathered from files above, but code to use them is # currently commented out. for filepath, accounts in dest_files.items(): try: header = accounts.pop(None) contents = '\n'.join( [header] + [accounts[k] for k in sorted(accounts)] ) mkdir(get_head(filepath)) os.chmod(get_head(filepath), 0o700) print('%s: writing.' % filepath) # encrypt all files with default gpg ID #if gpg_ids[filepath]: # gpg_id = gpg_ids[filepath] if True: if get_extension(filepath) not in ['gpg', 'asc']: filepath += '.gpg' gpg_id = self.accounts.get_gpg_id() encrypted = self.gpg.encrypt( contents, gpg_id, always_trust=True, armor=True ) if not encrypted.ok: self.logger.error( "%s: unable to encrypt.\n%s" % ( filename, encrypted.stderr)) contents = str(encrypted) with open(filepath, 'w') as f: f.write(contents) os.chmod(filepath, 0o600) except IOError as err: self.logger.error('%s: %s.' % (err.filename, err.strerror))
def _read_accounts_file(self): if not self.path: # There is no accounts file self.data = {} return self.data if not exists(self.path): # If file does not exist, look for encrypted versions for ext in ['gpg', 'asc']: new_path = '.'.join([self.path, ext]) if exists(new_path): self.path = new_path break logger = self.logger accounts_data = {} try: if get_extension(self.path) in ['gpg', 'asc']: # Accounts file is GPG encrypted, decrypt it before loading with open(self.path, 'rb') as f: decrypted = self.gpg.decrypt_file(f) if not decrypted.ok: logger.error("%s\n%s" % ( "%s: unable to decrypt." % (self.path), decrypted.stderr)) code = compile(decrypted.data, self.path, 'exec') exec(code, accounts_data) else: # Accounts file is not encrypted with open(self.path) as f: code = compile(f.read(), self.path, 'exec') exec(code, accounts_data) if 'accounts' not in accounts_data: logger.error( "%s: defective accounts file, 'accounts' not found." % self.path ) for account in accounts_data['accounts'].values(): account['_source_file_'] = self.path # Load additional accounts files additional_accounts = accounts_data.get('additional_accounts', []) if type(additional_accounts) == str: additional_accounts = [additional_accounts] for each in additional_accounts: more_accounts = {} path = make_path(get_head(self.path), each) try: if get_extension(path) in ['gpg', 'asc']: # Accounts file is GPG encrypted, decrypt it with open(path, 'rb') as f: decrypted = self.gpg.decrypt_file(f) if not decrypted.ok: logger.error("%s\n%s" % ( "%s: unable to decrypt." % (path), decrypted.stderr)) continue code = compile(decrypted.data, path, 'exec') exec(code, more_accounts) else: # Accounts file is not encrypted with open(path) as f: code = compile(f.read(), path, 'exec') exec(code, more_accounts) except IOError as err: logger.display('%s: %s. Ignored' % ( err.filename, err.strerror )) continue existing_names = set(accounts_data['accounts'].keys()) new_accounts = more_accounts.get('accounts', {}) new_names = set(new_accounts.keys()) names_in_common = sorted( existing_names.intersection(new_names)) if len(names_in_common) > 2: logger.display( "%s: overrides existing accounts:\n %s" % ( path, ',\n '.join(sorted(names_in_common)))) elif names_in_common: logger.display("%s: overrides existing account: %s" % ( path, names_in_common[0])) for account in new_accounts.values(): account['_source_file_'] = path accounts_data['accounts'].update(new_accounts) except IOError as err: logger.error('%s: %s.' % (err.filename, err.strerror)) except SyntaxError as err: traceback.print_exc(0) sys.exit() self.data = accounts_data return accounts_data['accounts']
def _read_master_password_file(self): data = { 'accounts': None, 'passwords': {}, 'default_password': None, 'password_overrides': {}, 'additional_master_password_files': [], } if not self.stateless: try: with open(self.path, 'rb') as f: decrypted = self.gpg.decrypt_file(f) if not decrypted.ok: self.logger.error("%s" % "%s: unable to decrypt." % (self.path), ) code = compile(decrypted.data, self.path, 'exec') exec(code, data) except IOError as err: self.logger.display( 'Warning: could not read master password file %s: %s.' % ( err.filename, err.strerror)) except SyntaxError as err: traceback.print_exc(0) sys.exit() # assure that the keys on the master passwords are strings for ID in data.get('passwords', {}): if type(ID) != str: self.logger.error( '%s: master password ID must be a string.' % ID) # Open additional master password files additional_password_files = data.get( 'additional_master_password_files', []) if type(additional_password_files) == str: additional_password_files = [additional_password_files] for each in additional_password_files: more_data = {} path = make_path(get_head(self.path), each) if get_extension(path) in ['gpg', 'asc']: # File is GPG encrypted, decrypt it try: with open(path, 'rb') as f: decrypted = self.gpg.decrypt_file(f) if not decrypted.ok: self.logger.error("%s" % "%s: unable to decrypt." % (path), ) continue code = compile(decrypted.data, path, 'exec') exec(code, more_data) except IOError as err: self.logger.display('%s: %s. Ignored.' % ( err.filename, err.strerror )) continue else: self.logger.error( "%s: must have .gpg or .asc extension" % (path)) # Check for duplicate master passwords existing_names = set(data.get('passwords', {}).keys()) new_passwords = more_data.get('passwords', {}) new_names = set(new_passwords.keys()) names_in_common = sorted( existing_names.intersection(new_names)) if names_in_common: self.logger.display( "%s: overrides existing password:\n %s" % ( path, ',\n '.join(sorted(names_in_common)))) data['passwords'].update(new_passwords) # Check for duplicate passwords overrides existing_names = set(data['password_overrides'].keys()) new_overrides = more_data.get('password_overrides', {}) new_names = set(new_overrides.keys()) names_in_common = sorted( existing_names.intersection(new_names)) if names_in_common: self.logger.display( "%s: overrides existing password overrides:\n %s" % ( path, ',\n '.join(sorted(names_in_common)))) data['password_overrides'].update(new_overrides) return data