def run(cls, command, args, settings, options): # read command line cmdline = docopt(cls.USAGE, argv=[command] + args) include_external_archives = cmdline["--include-external"] show_stats = cmdline["--stats"] or settings.show_stats # checking the settings intervals = "within last minutely hourly daily weekly monthly yearly" prune_settings = [("keep_" + s) for s in intervals.split()] if not any(settings.value(s) for s in prune_settings): prune_settings = conjoin(prune_settings, ", or ") raise Error( "No prune settings available", codicil=f"At least one of {prune_settings} must be specified.", ) # run borg borg = settings.run_borg( cmd="prune", args=[settings.destination()], emborg_opts=options, strip_prefix=include_external_archives, show_borg_output=show_stats, ) out = borg.stdout if out: output(out.rstrip())
def initialize(cls, gpg_path=None, gpg_home=None, armor=None ): from .config import get_setting, override_setting cls.gpg_path = to_path( gpg_path if gpg_path else get_setting('gpg_executable') ) override_setting('gpg_executable', cls.gpg_path) cls.gpg_home = to_path( gpg_home if gpg_home else get_setting('gpg_home') ) override_setting('gpg_home', cls.gpg_home) armor = armor if armor is not None else get_setting('gpg_armor') if armor not in ARMOR_CHOICES: warn( "'%s' is not valid, choose from %s." % ( armor, conjoin(ARMOR_CHOICES) ), culprit=(get_setting('config_file'), 'gpg_armor') ) armor = None cls.armor = armor override_setting('gpg_armor', armor) gpg_args = {} if cls.gpg_path: gpg_args.update({'gpgbinary': str(cls.gpg_path)}) if cls.gpg_home: gpg_args.update({'gnupghome': str(cls.gpg_home)}) cls.gpg = gnupg.GPG(**gpg_args)
def initialize(cls, gpg_path=None, gpg_home=None, armor=None): cls.gpg_path = to_path( gpg_path if gpg_path else get_setting('gpg_executable')) override_setting('gpg_executable', cls.gpg_path) cls.gpg_home = to_path( gpg_home if gpg_home else get_setting('gpg_home')) override_setting('gpg_home', cls.gpg_home) armor = armor if armor is not None else get_setting('gpg_armor') if armor not in ARMOR_CHOICES: warn("'%s' is not valid, choose from %s." % (armor, conjoin(ARMOR_CHOICES)), culprit=setting_path('gpg_armor')) armor = 'extension' cls.armor = armor override_setting('gpg_armor', armor) gpg_args = {} if cls.gpg_path: gpg_args.update({'gpgbinary': str(cls.gpg_path)}) if cls.gpg_home: gpg_args.update({'gnupghome': str(cls.gpg_home)}) try: cls.gpg = gnupg.GPG(**gpg_args) except ValueError as e: fatal(e)
def get_account(name): canonical = canonicalize(name) try: return Account._accounts[canonical] except KeyError: # account not found, assemble message giving suggested account names names = Account._accounts.keys() candidates = get_close_matches(canonical, names, 9, 0.6) # do not want to give multiple options all of which are aliases for # the same accounts, so look for up to three unique accounts close_matches = [] for candidate in candidates: account = Account._accounts[candidate] if account not in close_matches: close_matches.append(account) if len(close_matches) >= 3: break # add the aliases to be base account names if available offer = [] for account in close_matches: aliases = getattr(account, 'aliases', None) if aliases: offer.append('{} ({})'.format(account.get_name(), ', '.join(aliases))) else: offer.append(account.get_name()) # generate the message handling 0, 1, 2 or 3 candidates gracefully msg = 'account not found' if offer: msg = '{}, did you mean:\n {}?'.format( msg, conjoin(offer, sep=',\n ', conj=', or\n ')) raise PasswordError(full_stop(msg), culprit=name)
def check(self): # add some possibly useful placeholders into settings home_dir = os.environ.get("HOME") if home_dir and "home_dir" not in self.settings: self.settings["home_dir"] = home_dir self.settings["config_dir"] = CONFIG_DIR self.settings["log_dir"] = DATA_DIR self.do_not_expand = Collection(self.settings.get("do_not_expand", "")) # gather the string valued settings together (can be used by resolve) self.str_settings = { k: v for k, v in self.settings.items() if is_str(v) } # complain about required settings that are missing missing = [] required_settings = "repository".split() for each in required_settings: if not self.settings.get(each): missing.append(each) if missing: m = conjoin(missing) raise Error(f"{m}: no value given for {plural(m):setting}.") self.working_dir = to_path(self.settings.get("working_dir", "/")) if not self.working_dir.exists(): raise Error("{self.working_dir!s} not found.", culprit="working_dir") if not self.working_dir.is_absolute(): raise Error("must be an absolute path.", culprit="working_dir")
def read_confs(self): # read the .conf files in our config directory (except for hosts.conf) for name in "ssh networks locations proxies".split(): conf_file = to_path(CONFIG_DIR, name + ".conf") if conf_file.exists(): settings = PythonFile(conf_file).run() overlap = settings.keys() & self.settings.keys() overlap -= sshconfig_names overlap = [k for k in overlap if not k.startswith("_")] if overlap: warn("conflicting settings:", conjoin(overlap), culprit=conf_file) self.settings.update(settings) self.ssh_config_file = to_path( self.settings.get("CONFIG_FILE", SSH_CONFIG_FILE)) if not self.ssh_config_file.is_absolute(): raise Error( "path to SSH config file should be absolute.", culprit=self.ssh_config_file, ) self.ssh_defaults = self.settings.get("DEFAULTS", "") self.ssh_overrides = self.settings.get("OVERRIDES", "") self.preferred_networks = self.settings.get("PREFERRED_NETWORKS", []) self.locations = self.settings.get("LOCATIONS", {}) self.proxies = self.settings.get("PROXIES", {}) self.available_ciphers = self.settings.get("AVAILABLE_CIPHERS") self.available_macs = self.settings.get("AVAILABLE_MACS") self.available_host_key_algorithms = self.settings.get( "AVAILABLE_HOST_KEY_ALGORITHMS") self.available_kex_algorithms = self.settings.get( "AVAILABLE_KEX_ALGORITHMS")
def challenge_response(self, name, challenge): """Generate a response to a challenge. Given the name of a master seed (actually the basename of the file that contains the master seed), returns an identifying response to a challenge. If no challenge is provided, one is generated based on the time and date. Returns both the challenge and the expected response as a tuple. Args: name (str): The name of the master seed. challenge (str): The challenge (may be empty). """ try: if not challenge: from arrow import utcnow now = str(utcnow()) c = Passphrase() c.set_seeds([now]) challenge = str(c) r = Passphrase() shared_secrets = self.account_files.shared_secrets r.set_seeds([shared_secrets[name], challenge]) response = str(r) return challenge, response except KeyError: choices = conjoin(sorted(shared_secrets.keys())) raise PasswordError('Unknown partner. Choose from %s.' % choices, culprit=name)
def gather_public_keys(self): comment(' gathering public keys') keyname = self.keyname data = self.data clients = conjoin(self.data.get('clients', [])) default_purpose = fmt('This key allows access from {clients}.') purpose = self.data.get('purpose', default_purpose) servers = self.data.get('servers', []) prov = '.provisional' if self.trial_run else '' # read contents of public key try: pubkey = to_path(keyname + '.pub') key = pubkey.read_text().strip() except OSError as err: narrate('%s, skipping.' % os_error(err)) return # get fingerprint of public key try: keygen = Run(['ssh-keygen', '-l', '-f', pubkey], modes='wOeW') fields = keygen.stdout.strip().split() fingerprint = ' '.join([fields[0], fields[1], fields[-1]]) except OSError as err: error(os_error(err)) return # contribute commented and restricted public key to the authorized_key # file for each server for server in servers: if self.update and server not in self.update: continue if server in self.skip: continue server_data = servers[server] description = server_data.get('description', None) restrictions = server_data.get('restrictions', []) remarks = [ '# %s' % t for t in cull([purpose, description, self.warning, fingerprint]) if t ] include_file = server_data.get( 'remote-include-filename', data['remote-include-filename'] ) bypass = server_data.get('bypass') authkeys = AuthKeys(server, include_file, bypass, self.trial_run) authkeys.add_public_key(keyname, key, remarks, restrictions) if not servers: warn( 'no servers specified, you must update them manually.', culprit=keyname )
def set_location(self, given=None): locations.set_location(given if given else self.network.location) unknown = locations.unknown_locations(self.locations) if unknown: warn( "the following locations are unknown (add them to LOCATIONS):") codicil(*sorted(unknown), sep="\n") self.location = self.locations.get(locations.my_location) if locations.my_location and not self.location: raise Error("unknown location, choose from:", conjoin(self.locations))
def get_summary(self): summary = ["Network is", self.network.Name()] network_desc = self.network.description if network_desc: summary.append("({})".format(network_desc)) if self.location: summary.append("located near {}".format(self.location)) if ports.available_ports: summary.append("using port {}".format( conjoin([str(port) for port in ports.available_ports], " or "))) if self.proxy: summary.append("proxying through {}".format(self.proxy)) return full_stop(" ".join(summary))
def run(cls, command, args, settings, options): # read command line cmdline = docopt(cls.USAGE, argv=[command] + args) paths = cmdline['<path>'] archive = cmdline['--archive'] date = cmdline['--date'] # make sure source directories are given as absolute paths for src_dir in settings.src_dirs: if not src_dir.is_absolute(): raise Error('restore command cannot be used', 'with relative source directories', culprit=src_dir) # convert to absolute resolved paths paths = [to_path(p).resolve() for p in paths] # assure that paths correspond to src_dirs src_dirs = settings.src_dirs unknown_path = False for path in paths: if not any([str(path).startswith(str(sd)) for sd in src_dirs]): unknown_path = True warn('unknown path.', culprit=path) if unknown_path: codicil('Paths should start with:', conjoin(src_dirs, conj=', or ')) # remove leading / from paths paths = [str(p).lstrip('/') for p in paths] # get the desired archive if date and not archive: archive = get_name_of_nearest_archive(settings, date) if not archive: archive = get_name_of_latest_archive(settings) output('Archive:', archive) # run borg cd('/') borg = settings.run_borg( cmd='extract', args=[settings.destination(archive)] + paths, emborg_opts=options, ) out = borg.stdout if out: output(out.rstrip())
def check(self): # gather the string valued settings together (can be used by resolve) self.str_settings = { k: v for k, v in self.settings.items() if is_str(v) } # complain about required settings that are missing missing = [] required_settings = 'repository'.split() for each in required_settings: if not self.settings.get(each): missing.append(each) if missing: missing = conjoin(missing) raise Error(f'{missing}: no value given for setting.')
def run(cls, command, args, settings, options): # read command line cmdline = docopt(cls.USAGE, argv=[command] + args) paths = cmdline['<path>'] archive = cmdline['--archive'] date = cmdline['--date'] # remove initial / from paths src_dirs = [str(p).lstrip('/') for p in settings.src_dirs] new_paths = [p.lstrip('/') for p in paths] if paths != new_paths: for path in paths: if path.startswith('/'): warn('removing initial /.', culprit=path) paths = new_paths # assure that paths correspond to src_dirs unknown_path = False for path in paths: if not any([path.startswith(src_dir) for src_dir in src_dirs]): unknown_path = True warn('unknown path.', culprit=path) if unknown_path: codicil('Paths should start with:', conjoin(src_dirs)) # get the desired archive if date and not archive: archive = get_nearest_archive(settings, date) if not archive: raise Error('archive not available.', culprit=date) if not archive: archives = get_available_archives(settings) if not archives: raise Error('no archives are available.') archive = archives[-1]['name'] output('Archive:', archive) # run borg borg = settings.run_borg( cmd='extract', args=[settings.destination(archive)] + paths, emborg_opts=options, ) out = borg.stdout if out: output(out.rstrip())
def write(genRST=False): for each in [PROGRAM_MANPAGE]: rst = dedent(each['contents'][1:-1]).format( released = __released__ , version = __version__ , extensions=conjoin(media_file_extensions, conj=" and ", sep=", ") ) # generate reStructuredText file (only used for debugging) if genRST: print("generating %s.%s.rst" % (each['name'], each['sect'])) with open('%s.%s.rst' % (each['name'], each['sect']), 'w') as f: f.write(rst) # Generate man page print("generating %s.%s" % (each['name'], each['sect'])) with open('%s.%s' % (each['name'], each['sect']), 'w') as f: f.write(publish_string(rst, writer=manpage.Writer()).decode())
def __init__( self, recipe, *, def_alphabet = ALPHANUMERIC, master = None, version = None, shift_sort = False, is_secret = True, ): requirements = [] try: parts = recipe.split() except (ValueError, AttributeError): raise PasswordError( 'recipe must be a string, found %s.' % recipe, culprit=error_source() ) try: each = parts[0] length = int(each) for each in parts[1:]: num, kind, alphabet = self.PATTERN.match(each).groups() if self.ALPHABETS[kind]: alphabet = self.ALPHABETS[kind] requirements += [(alphabet, int('0' + num))] except (ValueError, AttributeError): raise PasswordError( each, recipe, conjoin(self.ALPHABETS.keys(), conj=' or '), template="{0}: invalid term in recipe '{1}'. Choose from {2}.", culprit=error_source() ) self.length = length self.def_alphabet = def_alphabet self.requirements = requirements self.master = master self.version = version self.shift_sort = shift_sort self.is_secret = is_secret
def check(self): # gather the string valued settings together (can be used by resolve) self.str_settings = { k: v for k, v in self.settings.items() if is_str(v) } # complain about required settings that are missing missing = [] for each in [ 'dest_server', 'dest_dir', 'src_dir', 'ssh_backend_method', ]: if not self.settings.get(each): missing.append(each) if missing: missing = conjoin(missing) self.fail(f'{missing}: no value given.') # default the working_dir if it was not specified working_dir = self.settings.get('working_dir') if not working_dir: working_dir = self.resolve(DEFAULT_WORKING_DIR) self.settings['working_dir'] = working_dir self.str_settings['working_dir'] = working_dir # check the ssh_backend_method if self.ssh_backend_method not in ['option', 'protocol']: self.fail( f'{self.ssh_backend_method}:', 'invalid value given for ssh_backend_method.', ) # add the working directory to excludes excludes = self.settings.get('excludes', []) excludes.append(self.working_dir) self.settings['excludes'] = excludes
def run(cls, command, args, settings, options): # read command line docopt(cls.USAGE, argv=[command] + args) # checking the settings intervals = 'within last minutely hourly daily weekly monthly yearly' prune_settings = [('keep_' + s) for s in intervals.split()] if not any(settings.value(s) for s in prune_settings): prune_settings = conjoin(prune_settings, ', or ') raise Error( 'No prune settings available', codicil=f'At least one of {prune_settings} must be specified.') # run borg borg = settings.run_borg( cmd='prune', args=[settings.destination()], emborg_opts=options, ) out = borg.stdout if out: output(out.rstrip())
def open_browser(cls, name, key=None): browser = cls.get_field("browser", default=None) if browser is None or is_str(browser): browser = StandardBrowser(name) # get the urls from the urls attribute if not key: key = getattr(cls, "default_url", None) urls = getattr(cls, "urls", []) if type(urls) != dict: if is_str(urls): urls = urls.split() urls = {None: urls} # get the urls from the url recognizers # currently urls from recognizers dominate over those from attributes discovery = getattr(cls, "discovery", ()) for each in Collection(discovery): urls.update(each.all_urls()) # select the urls try: urls = urls[key] except TypeError: if key: raise Error("keys are not supported with urls on this account.", culprit=key) except KeyError: keys = cull(urls.keys()) if keys: raise Error("unknown key, choose from %s." % conjoin(keys), culprit=key) else: raise Error("keys are not supported with urls on this account.", culprit=key) url = list(Collection(urls))[0] # use the first url specified # open the url browser.run(url)
def open_browser(cls, key=None, browser_name=None, list_urls=False): if not browser_name: browser_name = cls.get_scalar('browser', default=None) browser = StandardBrowser(browser_name) # get the urls from the urls attribute # this must be second so it overrides those from recognizers. primary_urls = getattr(cls, 'urls', []) if type(primary_urls) != dict: if is_str(primary_urls): primary_urls = primary_urls.split() primary_urls = {None: primary_urls} if primary_urls else {} # get the urls from the url recognizers discovery = getattr(cls, 'discovery', ()) urls = {} for each in Collection(discovery): urls.update(each.all_urls()) # combine, primary_urls must be added to urls, so they dominate urls.update(primary_urls) if list_urls: default = getattr(cls, 'default_url', None) for name, url in urls.items(): if is_collection(url): url = list(Collection(url))[0] if name == default: url += HighlightColor(' [default]') if not name: name = '' elif not name: continue output(LabelColor('{:>24s}:'.format(name)), url) return # select the urls keys = cull(list(urls.keys())) if not key: key = getattr(cls, 'default_url', None) if not key and keys and len(keys) == 1: key = keys[0] try: urls = urls[key] except KeyError: if keys: if key: msg = 'unknown key, choose from {}.' else: msg = 'key required, choose from {}.' raise PasswordError(msg.format(conjoin(repr(k) for k in keys)), culprit=key) else: if key: raise PasswordError( 'keys are not supported with urls on this account.', culprit=key) else: raise PasswordError('no url available.') # open the url urls = Collection(urls) url = list(urls)[0] # use the first url specified browser.run(url)
def run(cls, command, args): # read command line cmdline = docopt(cls.USAGE, argv=[command] + args) try: # get the specified template templates = get_setting('account_templates') if cmdline['<template>']: template_name = cmdline['<template>'] else: template_name = get_setting('default_account_template') template = dedent(templates[template_name]).strip() + '\n' # save template to tmp file and open it in the editor from tempfile import mktemp tmpfile = GnuPG(mktemp(suffix='_avendesora.gpg')) tmpfile.save(template, get_setting('gpg_ids')) GenericEditor.open_and_search(tmpfile.path) # read the tmp file and determine if it has changed new = tmpfile.read() tmpfile.remove() if new == template: return output('Unchanged, and so ignored.') # hide the values that should be hidden def hide(match): return 'Hidden(%r)' % Obscure.hide(match.group(1)) new = re.sub("<<(.*?)>>", hide, new) # determine the accounts file prefix = cmdline['--file'] if prefix: candidates = [ p for p in get_setting('accounts_files') if p.startswith(prefix) ] if not candidates: raise Error('not found.', cuplrit=cmdline['--file']) if len(candidates) > 1: raise Error( 'ambiguous, matches %s.' % conjoin(candidates), cuplrit=prefix ) filename = candidates[0] else: filename = get_setting('accounts_files')[0] path = to_path(get_setting('settings_dir'), filename) # get original contents of accounts file orig_accounts_file = PythonFile(path) accounts = orig_accounts_file.run() gpg_ids = accounts.get('gpg_ids') # add new account to the contents accounts = orig_accounts_file.code + new + '\n' # rename the original file and then save the new version orig_accounts_file.rename('.saved') new_accounts_file = GnuPG(path) new_accounts_file.save(accounts, gpg_ids) except OSError as err: error(os_error(err)) except KeyError as err: error( 'unknown account template, choose from %s.' % conjoin( sorted(templates.keys()) ), culprit=template_name )