def execute(cls, name, args): command = cls.find(name) if command: command.run(name, args if args else []) else: error('unknown command.', culprit=name) codicil("Use 'avendesora help' for list of available commands."),
def publish_private_key(self): keyname = self.keyname data = self.data clients = self.data.get('clients', []) prov = '.provisional' if self.trial_run else '' # copy key pair to remote client for client in sorted(clients): if self.update and client not in self.update: continue if client in self.skip: continue narrate(' publishing key pair to', client) client_data = clients[client] # delete any pre-existing provisional files # the goal here is to leave a clean directory when not trial-run try: run_sftp(client, [ fmt('rm .ssh/{keyname}.provisional'), fmt('rm .ssh/{keyname}.pub.provisional'), ]) except OSError as err: pass # now upload the new files try: run_sftp(client, [ fmt('put -p {keyname} .ssh/{keyname}{prov}'), fmt('put -p {keyname}.pub .ssh/{keyname}.pub{prov}'), ]) except OSError as err: error(os_error(err))
def __new__(cls, server, include_file, bypass, trial_run): if server in AuthKeys.known: self = AuthKeys.known[server] if include_file != self.include_file: warn( 'inconsistent remote include file:', fmt('{include_file} != {self.include_file} in {server}.') ) return self self = super(AuthKeys, cls).__new__(cls) AuthKeys.known[server] = self self.server = server self.bypass = bypass self.trial_run = trial_run self.keys = {} self.comment = {} self.restrictions = {} self.include_file = include_file self.include = None # get remote include file if it exists if include_file and not bypass: narrate(fmt(' retrieving remote include file from {server}.')) try: try: run_sftp(self.server, [ fmt('get .ssh/{inc} {inc}.{server}', inc=include_file) ]) self.include = to_path(include_file + '.' + server).read_text() except OSError as err: comment(fmt(' sftp {server}: {include_file} not found.')) except OSError as err: error(os_error(err)) return self
def test_fabricate(): with messenger(hanging_indent=False) as (msg, stdout, stderr, logfile): error('hey now!') codicil('baby', 'bird', sep='\n') error('uh-huh\nuh-huh', culprit='yep yep yep yep yep yep yep yep yep yep yep'.split()) expected = dedent(''' error: hey now! baby bird error: yep, yep, yep, yep, yep, yep, yep, yep, yep, yep, yep: uh-huh uh-huh ''').strip() assert msg.errors_accrued() == 2 assert errors_accrued(True) == 2 assert msg.errors_accrued() == 0 assert strip(stdout) == expected assert strip(stderr) == '' assert log_strip(logfile) == dedent(''' ack: invoked as: <exe> ack: log opened on <date> {expected} ''').strip().format(expected=expected)
def _autotype(self, text, ms_per_char=None): if not text: return # Split the text into individual key strokes and convert the special # characters to their xkeysym names keysyms = [] for char in text: if char in string.ascii_letters + string.digits: keysym = char else: keysym = KEYSYMS.get(char) if not keysym: error('cannot map to keysym, unknown.', culprit=char) else: keysyms.append(keysym) xdotool = get_setting('xdotool_executable') try: cmd = [xdotool, 'key', '--clearmodifiers'] if not ms_per_char: ms_per_char = get_setting('ms_per_char') if ms_per_char: cmd += ['--delay', str(ms_per_char)] cmd += keysyms Run(cmd, 'soEW', log=False) # it is important that log be False; it prevents the password # from ending up in the logfile. except Error as e: e.reraise(culprit=xdotool)
def read_defaults(self): settings = {} try: from appdirs import user_config_dir config_file = to_path(user_config_dir('vdiff'), 'config') try: code = config_file.read_text() try: compiled = compile(code, str(config_file), 'exec') exec(compiled, settings) except Exception as e: error(e, culprit=config_file) except FileNotFoundError: pass except OSError as e: warn(os_error(e)) if self.useGUI is not None: settings['gui'] = self.useGUI except ImportError: pass if settings.get('gui', DEFAULT_GUI): if 'DISPLAY' not in os.environ: warn('$DISPLAY not set, ignoring request for gvim.') else: self.cmd = settings.get('gvimdiff', DEFAULT_GVIM) return self.cmd = settings.get('vimdiff', DEFAULT_VIM)
def main(cls): app = cls.from_params() app.load(DocoptConfig) app.load(MakerConfig) try: app.protocol.print() except StepwiseMolBioError as err: error(err)
def clean(host): try: narrate(fmt('Cleaning {host}.')) run_sftp(host, ['rm .ssh/*.provisional']) except OSError as err: if 'no such file or directory' in str(err).lower(): comment(os_error(err)) else: error('cannot connect.', culprit=host)
def gather_public_keys(self): comment(' gathering public keys') keyname = self.keyname data = self.data clients = conjoin(self.data.get('clients', [])) default_purpose = fmt('This key allows access from {clients}.') purpose = self.data.get('purpose', default_purpose) servers = self.data.get('servers', []) prov = '.provisional' if self.trial_run else '' # read contents of public key try: pubkey = to_path(keyname + '.pub') key = pubkey.read_text().strip() except OSError as err: narrate('%s, skipping.' % os_error(err)) return # get fingerprint of public key try: keygen = Run(['ssh-keygen', '-l', '-f', pubkey], modes='wOeW') fields = keygen.stdout.strip().split() fingerprint = ' '.join([fields[0], fields[1], fields[-1]]) except OSError as err: error(os_error(err)) return # contribute commented and restricted public key to the authorized_key # file for each server for server in servers: if self.update and server not in self.update: continue if server in self.skip: continue server_data = servers[server] description = server_data.get('description', None) restrictions = server_data.get('restrictions', []) remarks = [ '# %s' % t for t in cull([purpose, description, self.warning, fingerprint]) if t ] include_file = server_data.get( 'remote-include-filename', data['remote-include-filename'] ) bypass = server_data.get('bypass') authkeys = AuthKeys(server, include_file, bypass, self.trial_run) authkeys.add_public_key(keyname, key, remarks, restrictions) if not servers: warn( 'no servers specified, you must update them manually.', culprit=keyname )
def show(cls, name=None): if name: command = Command.find(name) if command: return pager(command.help()) for topic in cls.topics(): if name == topic.get_name(): return pager(topic.help()) error('topic not found.', culprit=name) else: cls.help()
def test_showing(): with messenger() as (msg, stdout, stderr, logfile): error('aaa bbb ccc', codicil=('000 111 222', '!!! @@@ ###')) assert msg.errors_accrued() == 1 assert errors_accrued(True) == 1 assert strip(stdout) == dedent(''' error: aaa bbb ccc 000 111 222 !!! @@@ ### ''').strip() assert strip(stderr) == ''
def show(cls, name=None, desc=None): if name: command, _ = Command.find(name) if command: return pager(command.help()) for topic in cls.topics(): if name == topic.get_name(): return pager(topic.help()) error('topic not found.', culprit=name) else: cls.help(desc)
def cleanup(self): if self.vim: self.vim.kill() for each in cull([self.file1, self.file2, self.file3, self.file4]): path = to_path(each) dn = path.parent fn = path.name swpfile = to_path(dn, '.' + fn + '.swp') try: rm(swpfile) except OSError as e: error(os_error(e))
def publish_archive(config, workspace): results = [] for plugin in select_plugins(config, 'publish'): subconfigs = config['publish'].get(plugin.name, []) results += run_plugin( plugin, config, subconfigs, workspace, ) if not results: error(f"No automated publishing rules found.") return bool(results)
def publish(self): narrate('publishing authorized_keys to', self.server) prov = '.provisional' if self.trial_run else '' entries = [ fmt("# This file was generated by sshdeploy on {date}.") ] if self.include: entries += [ '\n'.join([ fmt('# Contents of {self.include_file}:'), self.include ]) ] for name in sorted(self.keys.keys()): key = self.keys[name] comment = self.comment[name] comment = [comment] if is_str(comment) else comment restrictions = self.restrictions[name] if not is_str(restrictions): restrictions = ','.join(restrictions) restricted_key = ' '.join(cull([restrictions, key])) entries.append('\n'.join(comment + [restricted_key])) # delete any pre-existing provisional files # the goal here is to leave a clean directory when not trial-run try: run_sftp(self.server, [ fmt('rm .ssh/authorized_keys.provisional') ]) except OSError as err: pass # now upload the new authorized_keys file try: authkey = to_path('authorized_keys.%s' % self.server) with authkey.open('w') as f: f.write('\n\n'.join(entries) + '\n') authkey.chmod(0o600) if self.bypass: warn( 'You must manually upload', fmt('<keydir>/authorized_keys.{self.server}.'), culprit=self.server ) else: run_sftp(self.server, [ fmt('put -p {authkey} .ssh/authorized_keys{prov}') ]) except OSError as err: error(os_error(err))
def update_params(self, **params): prev_params = self.metadata['parameters'] curr_params = params if prev_params and prev_params != curr_params: error(f"{self.repr} parameters differ from those used previously!") for key in params: prev = prev_params.get(key, '') curr = curr_params.get(key, '') if prev != curr: codicil(f" {key!r} was {prev!r}, now {curr!r}") fatal("Use the -f flag to overwrite. Aborting.") self.metadata['parameters'] = curr_params
def autotype(text): # Split the text into individual key strokes and convert the special # characters to their xkeysym names keysyms = [] for char in text: if char in string.ascii_letters + string.digits: keysym = char else: keysym = KEYSYMS.get(char) if not keysym: error('cannot map to keysym, unknown', culprit=char) else: keysyms.append(keysym) run_xdotool('key --clearmodifiers'.split() + keysyms)
def display_field(self, account, field): # get string to display value, is_secret, name, desc = tuple(account.get_value(field)) label = '%s (%s)' % (name, desc) if desc else name value = dedent(str(value)).strip() label_color = get_setting('_label_color') # indent multiline outputs sep = ' ' if '\n' in value: if is_secret: warn('secret contains newlines, will not be fully concealed.') value = indent(value, get_setting('indent')).strip('\n') sep = '\n' if label: if label[0] == '_': # hidden field label = '!' + label[1:] text = label_color(label.replace('_', ' ') + ':') + sep + value else: text = value label = field log('Writing to TTY:', label) if is_secret: if Color.isTTY(): # Write only if output is a TTY. This is a security feature. # The ideas is that when the TTY writer is called it is because # the user is expecting the output to go to the tty. This # eliminates the chance that the output can be intercepted and # recorded by replacing Avendesora with an alias or shell # script. If the user really want the output to go to something # other than the TTY, the user should use the --stdout option. try: cursor.write(text) cursor.conceal() sleep(get_setting('display_time')) except KeyboardInterrupt: pass cursor.reveal() cursor.clear() else: error('output is not a TTY.') codicil( 'Use --stdout option if you want to send secret', 'to a file or a pipe.' ) else: output(text)
def check_excludes(patterns, roots, src, expand_tilde=True): if not roots: error("no roots available.", culprit=src) return paths = [] for pattern in patterns: pattern = str(pattern).strip() if not pattern or pattern[0] == "#": continue # is comment try: paths.append(check_pattern(pattern, "fm", roots, expand_tilde)) except Error as e: e.report(culprit=src, codicil=repr(pattern)) return paths
def __init__(self, path): # find the dictionary, initially look in the settings directory if not path.exists(): # if not there look in install directory from pkg_resources import resource_filename path = to_path(resource_filename(__name__, 'words')) # open the dictionary try: contents= path.read_text() except OSError as err: error(os_error(err)) contents = '' self.hash = hashlib.md5(contents.encode('utf-8')).hexdigest() self.words = contents.split()
def auth_getpass(config): """ Prompt for a passcode the encrypt the archive with. """ from getpass import getpass try: while True: passcode = getpass("Please enter a password encrypt your spare keys: ") verify = getpass("Enter the same password again to check for typos: ") if passcode == verify: return passcode else: error("The passwords you entered did not match.\nTry again or type Ctrl-C to exit:\n") except EOFError: print() raise SkipPlugin("Received EOF")
def show(cls, name=None): if name: # search commands try: command, _ = Command.find(name) if command: return pager(command.help()) except Error: pass # search topics for topic in cls.topics(): if name == topic.get_name(): return pager(topic.help()) error('topic not found.', culprit=name) else: from .main import synopsis cls.help(synopsis)
def publish_mount(config, workspace): """ Copy the archive to one or more mounted/mountable drives. """ drives = require_one_or_more(config, 'drive') remote_dir = config.get('remote_dir', 'backup/sparekeys') remote_dir = remote_dir.format(**PARAMS) for drive in drives: narrate(f"copying archive to '{drive}'.") try: with mount(drive): dest = to_path(drive, remote_dir) rm(dest); mkdir(dest) cp(workspace, dest) except Error as e: error(e, culprit=drive, codicil='Skipping.') else: display(f"Archive copied to '{drive}'.")
def check_patterns(patterns, roots, working_dir, src, expand_tilde=True): paths = [] default_style = "sh" for pattern in patterns: pattern = pattern.strip() culprit = (str(src), repr(pattern)) kind = pattern[0:1] arg = pattern[1:].lstrip() if kind in ["", "#"]: continue # is comment if kind not in known_kinds: error("unknown type", culprit=culprit) continue if kind == "R": try: if arg[0] == "~" and not expand_tilde: error( "borg does not interpret leading tilde as user.", culprit=culprit, ) root = expand_user(arg) check_root(root, working_dir) roots.append(root) except AttributeError: error("can no longer add roots.", culprit=culprit) paths.append(kind + " " + root) elif kind == "P": if arg in known_styles: default_style = arg else: error("unknown pattern style.", culprit=culprit) paths.append(kind + " " + arg) elif not roots: error("no roots available.", culprit=culprit) return [] else: try: paths.append( kind + " " + check_pattern(arg, default_style, roots, expand_tilde)) except Error as e: e.report(culprit=culprit) return paths
def test_access(host): try: narrate(fmt('Testing connection to {host}.')) payload = fmt('test payload for {host}') ref = to_path('.ref') test = to_path('.test') ref.write_text(payload) rm(test) run_sftp(host, [ fmt('put {ref}'), fmt('get {ref} {test}'), fmt('rm {ref}') ]) if test.read_text() == payload: comment('connection successful.', culprit=host) else: error('cannot connect.', culprit=host) except OSError as err: error('cannot connect.', culprit=host) rm(ref, test)
def main(): import docopt args = docopt.docopt(__doc__) # Create the workspace directory: work = SharedWorkspace(args['<workspace>']) if args['--force']: work.rmdir() if work.exists(): error(f"Workspace '{args['<workspace>']}' already exists.") fatal("Use the -f flag to overwrite. Aborting.") work.mkdir() # Fill in the workspace: copyfile(args['<fasta>'], work.target_fasta) copyfile(args['<pdb>'], work.get_target_pdb(args['<pdb>'])) work.loophash_db.symlink_to( os.path.relpath( Path(args['<loophash_db>']).resolve(), work.root, ), target_is_directory=True, ) work.params['target'] = { 'pdb_chain': args['<pdb_chain>'], } work.params['user'] = { 'email': args['--user-email'], } work.write_params() display("Workspace successfully initialized.")
def test_birthmark(): with messenger() as (msg, stdout, stderr, logfile): expected = dedent(''' error: Lorem ipsum dolor sit amet, consectetur adipiscing elit, sed do eiusmod tempor incididunt ut labore et dolore magna aliqua. Ut enim ad minim veniam, quis nostrud exercitation ullamco laboris nisi ut aliquip ex ea commodo consequat. Duis aute irure dolor in reprehenderit in voluptate velit esse cillum dolore eu fugiat nulla pariatur. Excepteur sint occaecat cupidatat non proident, sunt in culpa qui officia deserunt mollit anim id est laborum. ''').strip() error(lorum_ipsum, wrap=True) assert msg.errors_accrued() == 1 assert errors_accrued() == 1 assert strip(stdout) == expected assert strip(stderr) == '' assert log_strip(logfile) == dedent(''' ack: invoked as: <exe> ack: invoked on: <date> {expected} ''').strip().format(expected=expected)
def run(self, url, name=None): name = name if name else self.name if url: if '://' not in url: url = 'https://' + url try: cmd = self.cmd if self.cmd else get_setting('browsers')[name] try: cmd = cmd.format(url=url) except TypeError: pass log("running '%s'" % cmd) Run(cmd, 'sOew') except KeyError: error('unknown browser, choose from %s.' % ( name, ', '.join(get_setting('browsers')) )) except OSError as err: error(os_error(err)) else: error('url not available.')
with Schematic(filename='mfed.svg', line_width=2, background='none'): vin = Source(name='Vin', value='1 V', kind='sine') Ground(C=vin.n) rs = Resistor(name='Rs', value=Rref, n=vin.p, xoff=25) Wire([vin.p, rs.n]) c1 = Capacitor(name='C1', value=C1, p=rs.p, xoff=25) Ground(C=c1.n) l2 = Inductor(name='L2', value=L2, n=c1.p, xoff=25) Wire([rs.p, l2.n]) c3 = Capacitor(name='C3', value=C3, p=l2.p, xoff=25) Ground(C=c3.n) l4 = Inductor(name='L4', value=L4, n=c3.p, xoff=25) Wire([l2.p, l4.n]) c5 = Capacitor(name='C5', value=C5, p=l4.p, xoff=25) Ground(C=c5.n) rl = Resistor(name='Rl', value=Rref, p=c5.p, xoff=100, orient='v') Ground(C=rl.n) out = Pin(name='out', C=rl.p, xoff=50, w=2) Wire([l4.p, out.t]) Label(S=c3.N, yoff=-50, name=f'{Fo} LPF', loc='s') Dot(C=c1.p) Dot(C=c3.p) Dot(C=c5.p) Dot(C=rl.p) except Error as e: e.report() except OSError as e: error(os_error(e))
print "This must be python 2" from inform import display, warn, error display( 'Display is like print' 'except that it supports logging and can be disabled.', sep=', ') # Display is like print, except that it supports logging and can be disabled. warn('warnings get a header that is printed in yellow.') # warning: warnings get a header that is printed in yellow. error('errors get a header that is printed in red.') # error: errors get a header that is printed in red.
def main(): version = f"{__version__} ({__released__})" cmdline = docopt(__doc__, version=version) quiet = cmdline["--quiet"] problem = False use_color = Color.isTTY() and not cmdline["--no-color"] passes = Color("green", enable=use_color) fails = Color("red", enable=use_color) if cmdline["--verbose"]: overdue_message = verbose_overdue_message else: overdue_message = terse_overdue_message # prepare to create logfile log = to_path(DATA_DIR, OVERDUE_LOG_FILE) if OVERDUE_LOG_FILE else False if log: data_dir = to_path(DATA_DIR) if not data_dir.exists(): try: # data dir does not exist, create it data_dir.mkdir(mode=0o700, parents=True, exist_ok=True) except OSError as e: warn(os_error(e)) log = False with Inform(flush=True, quiet=quiet, logfile=log, version=version): # read the settings file try: settings_file = PythonFile(CONFIG_DIR, OVERDUE_FILE) settings = settings_file.run() except Error as e: e.terminate() # gather needed settings default_maintainer = settings.get("default_maintainer") default_max_age = settings.get("default_max_age", 28) dumper = settings.get("dumper", f"{username}@{hostname}") repositories = settings.get("repositories") root = settings.get("root") # process repositories table backups = [] if is_str(repositories): for line in repositories.split("\n"): line = line.split("#")[0].strip() # discard comments if not line: continue backups.append([c.strip() for c in line.split("|")]) else: for each in repositories: backups.append([ each.get("host"), each.get("path"), each.get("maintainer"), each.get("max_age"), ]) def send_mail(recipient, subject, message): if cmdline["--mail"]: if cmdline['--verbose']: display(f"Reporting to {recipient}.\n") mail_cmd = ["mailx", "-r", dumper, "-s", subject, recipient] Run(mail_cmd, stdin=message, modes="soeW0") # check age of repositories for host, path, maintainer, max_age in backups: maintainer = default_maintainer if not maintainer else maintainer max_age = float(max_age) if max_age else default_max_age try: path = to_path(root, path) if path.is_dir(): paths = list(path.glob("index.*")) if not paths: raise Error("no sentinel file found.", culprit=path) if len(paths) > 1: raise Error("too many sentinel files.", *paths, sep="\n ") path = paths[0] mtime = arrow.get(path.stat().st_mtime) delta = now - mtime age = 24 * delta.days + delta.seconds / 3600 report = age > max_age overdue = ' -- overdue' if report else '' color = fails if report else passes if report or not cmdline["--no-passes"]: display(color(fmt(overdue_message))) if report: problem = True subject = f"backup of {host} is overdue" msg = fmt(mail_overdue_message) send_mail(maintainer, subject, msg) except OSError as e: problem = True msg = os_error(e) error(msg) if maintainer: send_mail( maintainer, f"{get_prog_name()} error", error_message.format(msg), ) except Error as e: problem = True e.report() if maintainer: send_mail( maintainer, f"{get_prog_name()} error", error_message.format(str(e)), ) terminate(problem)
def main(): with Inform( error_status=2, flush=True, logfile=LoggingCache(), prog_name='emborg', version=version, ) as inform: # read command line cmdline = docopt(expanded_synopsis, options_first=True, version=version) config = cmdline["--config"] command = cmdline["<command>"] args = cmdline["<args>"] if cmdline["--mute"]: inform.mute = True if cmdline["--quiet"]: inform.quiet = True if cmdline["--relocated"]: os.environ['BORG_RELOCATED_REPO_ACCESS_IS_OK'] = 'YES' emborg_opts = cull([ "verbose" if cmdline["--verbose"] else "", "narrate" if cmdline["--narrate"] else "", "dry-run" if cmdline["--dry-run"] else "", "no-log" if cmdline["--no-log"] else "", ]) if cmdline["--narrate"]: inform.narrate = True Hooks.provision_hooks() worst_exit_status = 0 try: # find the command cmd, cmd_name = Command.find(command) # execute the command initialization exit_status = cmd.execute_early(cmd_name, args, None, emborg_opts) if exit_status is not None: terminate(exit_status) queue = ConfigQueue(cmd) while queue: with Settings(config, emborg_opts, queue) as settings: try: exit_status = cmd.execute(cmd_name, args, settings, emborg_opts) except Error as e: exit_status = 2 settings.fail(e, cmd=' '.join(sys.argv)) e.terminate() if exit_status and exit_status > worst_exit_status: worst_exit_status = exit_status # execute the command termination exit_status = cmd.execute_late(cmd_name, args, None, emborg_opts) if exit_status and exit_status > worst_exit_status: worst_exit_status = exit_status except KeyboardInterrupt: display("Terminated by user.") except Error as e: e.report() exit_status = 2 except OSError as e: exit_status = 2 error(os_error(e)) if exit_status and exit_status > worst_exit_status: worst_exit_status = exit_status terminate(worst_exit_status)
def run(cls, command, args): # read command line cmdline = docopt(cls.USAGE, argv=[command] + args) try: # get the specified template templates = get_setting('account_templates') if cmdline['<template>']: template_name = cmdline['<template>'] else: template_name = get_setting('default_account_template') template = dedent(templates[template_name]).strip() + '\n' # save template to tmp file and open it in the editor from tempfile import mktemp tmpfile = GnuPG(mktemp(suffix='_avendesora.gpg')) tmpfile.save(template, get_setting('gpg_ids')) GenericEditor.open_and_search(tmpfile.path) # read the tmp file and determine if it has changed new = tmpfile.read() tmpfile.remove() if new == template: return output('Unchanged, and so ignored.') # hide the values that should be hidden def hide(match): return 'Hidden(%r)' % Obscure.hide(match.group(1)) new = re.sub("<<(.*?)>>", hide, new) # determine the accounts file prefix = cmdline['--file'] if prefix: candidates = [ p for p in get_setting('accounts_files') if p.startswith(prefix) ] if not candidates: raise Error('not found.', cuplrit=cmdline['--file']) if len(candidates) > 1: raise Error( 'ambiguous, matches %s.' % conjoin(candidates), cuplrit=prefix ) filename = candidates[0] else: filename = get_setting('accounts_files')[0] path = to_path(get_setting('settings_dir'), filename) # get original contents of accounts file orig_accounts_file = PythonFile(path) accounts = orig_accounts_file.run() gpg_ids = accounts.get('gpg_ids') # add new account to the contents accounts = orig_accounts_file.code + new + '\n' # rename the original file and then save the new version orig_accounts_file.rename('.saved') new_accounts_file = GnuPG(path) new_accounts_file.save(accounts, gpg_ids) except OSError as err: error(os_error(err)) except KeyError as err: error( 'unknown account template, choose from %s.' % conjoin( sorted(templates.keys()) ), culprit=template_name )
def main(): try: # Read command line {{{1 cmdline = docopt(__doc__) keys = cmdline["--keys"].split(",") if cmdline["--keys"] else [] update = cmdline["--update"].split(",") if cmdline["--update"] else [] skip = cmdline["--skip"].split(",") if cmdline["--skip"] else [] Inform( narrate=cmdline["--narrate"] or cmdline["--verbose"], verbose=cmdline["--verbose"], logfile=".sshdeploy.log", prog_name=False, flush=True, version=__version__, ) if keys and not cmdline["--trial-run"]: fatal( "Using the --keys option results in incomplete authorized_keys files.", "It may only be used for testing purposes.", "As such, --trial-run must also be specified when using --keys.", sep="\n", ) # Generated detailed help {{{1 if cmdline["manual"]: from pkg_resources import resource_string try: Run(cmd=["less"], modes="soeW0", stdin=resource_string("src", "manual.rst").decode("utf8")) except OSError as err: error(os_error(err)) terminate() # Read config file {{{1 try: config_file = cmdline.get("--config-file") config_file = config_file if config_file else "sshdeploy.conf" contents = to_path(config_file).read_text() except OSError as err: fatal(os_error(err)) code = compile(contents, config_file, "exec") config = {} try: exec(code, config) except Exception as err: fatal(err) # Move into keydir {{{1 keydir = cmdline["--keydir"] keydir = to_path(keydir if keydir else "keys-" + date) if cmdline["generate"]: comment("creating key directory:", keydir) rm(keydir) mkdir(keydir) cd(keydir) elif cmdline["distribute"]: cd(keydir) # determine default values for key options defaults = {} for name, default in [ ("keygen-options", DefaultKeygenOpts), ("abraxas-account", DefaultAbraxasAccount), ("remote-include-filename", DefaultRemoteIncludeFilename), ]: defaults[name] = config.get(name, default) # Generate keys {{{1 if cmdline["generate"]: for keyname in sorted(config["keys"].keys()): data = config["keys"][keyname] if keys and keyname not in keys: # user did not request this key continue # get default values for missing key options for option in defaults: data[option] = data.get(option, defaults[option]) # generate the key key = Key(keyname, data, update, skip, cmdline["--trial-run"]) key.generate() # Publish keys {{{1 elif cmdline["distribute"]: for keyname in sorted(config["keys"].keys()): data = config["keys"][keyname] if keys and keyname not in keys: continue # user did not request this key # get default values for missing key options for option in defaults: data[option] = data.get(option, defaults[option]) # publish the key pair to clients key = Key(keyname, data, update, skip, cmdline["--trial-run"]) key.publish_private_key() key.gather_public_keys() # publish authorized_keys files to servers {{{1 if cmdline["distribute"]: for each in sorted(AuthKeys.known): authkey = AuthKeys.known[each] authkey.publish() authkey.verify() # Process hosts {{{1 elif cmdline["test"] or cmdline["clean"] or cmdline["hosts"]: hosts = set() for keyname, data in config["keys"].items(): if keys and keyname not in keys: continue # user did not request this key # add servers to list of hosts for server, options in data["servers"].items(): if update and server not in update or server in skip: continue if "bypass" not in options: hosts.add(server) # add clients to list of hosts for client in data["clients"].keys(): if update and client not in update or client in skip: continue hosts.add(client) # process the hosts if cmdline["test"]: # test host for host in sorted(hosts): test_access(host) elif cmdline["clean"]: # clean host for host in sorted(hosts): clean(host) else: # list hosts for host in sorted(hosts): display(host) except OSError as err: error(os_error(err)) except KeyboardInterrupt: display("Killed by user") done()
def run(cls, command, args): # read command line cmdline = docopt(cls.USAGE, argv=[command] + args) # read archive file archive_path = get_setting('archive_file') f = PythonFile(archive_path) archive = f.run() import arrow created = archive.get('CREATED') if created: created = arrow.get(created).format('YYYY-MM-DD hh:mm:ss A ZZ') output('archive created: %s' % created) archive_accounts = archive.get('ACCOUNTS') if not archive_accounts: raise Error( 'corrupt archive, ACCOUNTS missing.', culprit=archive_path ) # run the generator generator = PasswordGenerator() # determine the account and open the URL current_accounts = {} for account in generator.all_accounts: entry = account.archive() if entry: current_accounts[account.get_name()] = entry # report any new or missing accounts new = current_accounts.keys() - archive_accounts.keys() missing = archive_accounts.keys() - current_accounts.keys() for each in sorted(new): output('new account:', each) for each in sorted(missing): output('missing account:', each) # for the common accounts, report any differences in the fields common = archive_accounts.keys() & current_accounts.keys() for account_name in sorted(common): archive_account = archive_accounts[account_name] current_account = current_accounts[account_name] # report any new or missing fields new = current_account.keys() - archive_account.keys() missing = archive_account.keys() - current_account.keys() for each in sorted(new): output(account_name, 'new field', each, sep=': ') for each in sorted(missing): output(account_name, 'new field', each, sep=': ') # for the common fields, report any differences in the values shared = archive_account.keys() & current_account.keys() for field_name in sorted(shared): try: archive_value = archive_account[field_name] current_value = current_account[field_name] if is_collection(current_value) != is_collection(archive_value): output(account_name, 'field dimension differs', field_name, sep=': ') elif is_collection(current_value): archive_items = Collection(archive_account[field_name]).items() current_items = Collection(current_account[field_name]).items() archive_keys = set(k for k, v in archive_items) current_keys = set(k for k, v in current_items) new = current_keys - archive_keys missing = archive_keys - current_keys for each in sorted(new): output(account_name, field_name, 'new member', each, sep=': ') for each in sorted(missing): output(account_name, field_name, 'missing member', each, sep=': ') for k in sorted(archive_keys & current_keys): if str(archive_value[k]) != str(current_value[k]): output(account_name, 'member differs', '%s[%s]' % (field_name, k), sep=': ') else: if dedent(str(archive_value)) != dedent(str(current_value)): output(account_name, 'field differs', field_name, sep=': ') except Exception: error( 'unanticipated situation.', culprit=(account_name, field_name) ) raise
def main(): version = f'{__version__} ({__released__})' cmdline = docopt(__doc__, version=version) quiet = cmdline['--quiet'] problem = False with Inform(flush=True, quiet=quiet, version=version) as inform: # read the settings file settings_file = PythonFile(CONFIG_DIR, OVERDUE_FILE) settings_filename = settings_file.path settings = settings_file.run() # gather needed settings default_maintainer = settings.get('default_maintainer') default_max_age = settings.get('default_max_age', 28) dumper = settings.get('dumper', f'{getusername()}@{gethostname()}') repositories = settings.get('repositories') root = settings.get('root') # process repositories table backups = [] if is_str(repositories): for line in repositories.split('\n'): line = line.split('#')[0].strip() # discard comments if not line: continue backups.append([c.strip() for c in line.split('|')]) else: for each in repositories: backups.append([ each.get('host'), each.get('path'), each.get('maintainer'), each.get('max_age') ]) def send_mail(recipient, subject, message): if cmdline['--mail']: display(f'Reporting to {recipient}.\n') mail_cmd = ['mailx', '-r', dumper, '-s', subject, recipient] Run(mail_cmd, stdin=message, modes='soeW0') # check age of repositories now = arrow.now() display(f'current time = {now}') for host, path, maintainer, max_age in backups: maintainer = default_maintainer if not maintainer else maintainer max_age = int(max_age) if max_age else default_max_age try: path = to_path(root, path) if not path.is_dir(): raise Error('does not exist or is not a directory.', culprit=path) paths = list(path.glob('index.*')) if not paths: raise Error('no sentinel file found.', culprit=path) if len(paths) > 1: raise Error('too many sentinel files.', *paths, sep='\n ') path = paths[0] mtime = arrow.get(path.stat().st_mtime) delta = now - mtime age = 24 * delta.days + delta.seconds / 3600 report = age > max_age display( dedent(f""" HOST: {host} sentinel file: {path!s} last modified: {mtime} since last change: {age:0.1f} hours maximum age: {max_age} hours overdue: {report} """)) if report: problem = True subject = f"backup of {host} is overdue" msg = overdue_message.format(host=host, path=path, age=age) send_mail(maintainer, subject, msg) except OSError as e: problem = True msg = os_error(e) error(msg) if maintaner: send_mail(maintainer, f'{get_prog_name()} error', error_message.format(msg)) except Error as e: problem = True e.report() if maintaner: send_mail(maintainer, f'{get_prog_name()} error', error_message.format(str(e))) terminate(problem)
def main(): version = f'{__version__} ({__released__})' cmdline = docopt(__doc__, version=version) quiet = cmdline['--quiet'] problem = False use_color = Color.isTTY() and not cmdline['--no-color'] passes = Color('green', enable=use_color) fails = Color('red', enable=use_color) # prepare to create logfile log = to_path(DATA_DIR, OVERDUE_LOG_FILE) if OVERDUE_LOG_FILE else False if log: data_dir = to_path(DATA_DIR) if not data_dir.exists(): try: # data dir does not exist, create it data_dir.mkdir(mode=0o700, parents=True, exist_ok=True) except OSError as e: warn(os_error(e)) log = False with Inform(flush=True, quiet=quiet, logfile=log, version=version): # read the settings file try: settings_file = PythonFile(CONFIG_DIR, OVERDUE_FILE) settings = settings_file.run() except Error as e: e.terminate() # gather needed settings default_maintainer = settings.get('default_maintainer') default_max_age = settings.get('default_max_age', 28) dumper = settings.get('dumper', f'{username}@{hostname}') repositories = settings.get('repositories') root = settings.get('root') # process repositories table backups = [] if is_str(repositories): for line in repositories.split('\n'): line = line.split('#')[0].strip() # discard comments if not line: continue backups.append([c.strip() for c in line.split('|')]) else: for each in repositories: backups.append([ each.get('host'), each.get('path'), each.get('maintainer'), each.get('max_age') ]) def send_mail(recipient, subject, message): if cmdline['--mail']: display(f'Reporting to {recipient}.\n') mail_cmd = ['mailx', '-r', dumper, '-s', subject, recipient] Run(mail_cmd, stdin=message, modes='soeW0') # check age of repositories for host, path, maintainer, max_age in backups: maintainer = default_maintainer if not maintainer else maintainer max_age = float(max_age) if max_age else default_max_age try: path = to_path(root, path) if path.is_dir(): paths = list(path.glob('index.*')) if not paths: raise Error('no sentinel file found.', culprit=path) if len(paths) > 1: raise Error('too many sentinel files.', *paths, sep='\n ') path = paths[0] mtime = arrow.get(path.stat().st_mtime) delta = now - mtime age = 24 * delta.days + delta.seconds / 3600 report = age > max_age color = fails if report else passes if report or not cmdline['--no-passes']: display( color( dedent(f""" HOST: {host} sentinel file: {path!s} last modified: {mtime} since last change: {age:0.1f} hours maximum age: {max_age} hours overdue: {report} """).lstrip())) if report: problem = True subject = f"backup of {host} is overdue" msg = overdue_message.format(host=host, path=path, age=age) send_mail(maintainer, subject, msg) except OSError as e: problem = True msg = os_error(e) error(msg) if maintainer: send_mail(maintainer, f'{get_prog_name()} error', error_message.format(msg)) except Error as e: problem = True e.report() if maintainer: send_mail(maintainer, f'{get_prog_name()} error', error_message.format(str(e))) terminate(problem)