示例#1
0
def build(ctx):
    if ctx.env.EMACSCLIENT:
        # This variable should be a one-element list, see below for how it's
        # used.
        editor = ctx.env.EMACSCLIENT
        # This variable can have multiple arguments, also see below for how
        # it's used.
        editor_maybe_nonblock = ctx.env.EMACSCLIENT + ['--no-wait']

        if ctx.env.E_SINK:
            ctx.install_subst_script(
                'e',
                EMACSCLIENT=shquote(ctx.env.EMACSCLIENT[0]),
                E_SINK=shquote(ctx.env.E_SINK[0]))
        else:
            ctx.env.SHELL_ALIASES['e'] = ctx.shquote_cmd(editor_maybe_nonblock)

        # Add Python packages for Emacs.
        # Rope is not Python 3-compatible yet.
        ctx.env.PYENV_VIRTUALENV_DEFAULT_PACKAGES += [
            'elpy==1.6.0',
            'jedi==0.8.1',
            'flake8==2.3.0',
            'mccabe==0.3',
            'pep8==1.5.7',
            'pyflakes==0.8.1',
        ]
    else:
        # Nano doesn't have a non-blocking mode.
        editor = editor_maybe_nonblock = ctx.env.NANO

    # Most programs that use EDITOR expect it to block. Here are the programs
    # that I use that I expect to use the EDITOR variable:
    #
    # git config -e; git commit
    # hg commit
    # crontab -e
    #
    # Different programs handle the value differently. For example, git
    # evaluates the value using shell rules, while crontab can only take a
    # single argument. For this reason, it's best to stick to a single
    # argument. If multiple are needed in the future, create a wrapper script.
    ctx.env.SHELL_ENV['EDITOR'] = shquote(editor[0])

    # Some programs also use VISUAL, but I haven't found a reason to set that
    # yet. Most programs use that before checking EDITOR, adding yet another
    # layer of complexity that's currently not needed.

    # Homebrew also recognizes VISUAL and EDITOR, but does not need its editor
    # to block. We therefore manually override the editor. Homebrew evaluates
    # the value using shell rules, and does therefore not need a script.
    if ctx.env.BREW:
        ctx.env.SHELL_ENV['HOMEBREW_EDITOR'] = shquote(ctx.shquote_cmd(
            editor_maybe_nonblock))
示例#2
0
 def _make_bash_keys(tsk):
     with open(tsk.outputs[0].abspath(), 'w') as out_file:
         for key, binding in tsk.env.SHELL_KEYBINDINGS.items():
             # It is supposed to turn out like this:
             # bind '"\C-j": " 2>&1 | less\C-m"'
             print('bind ' + shquote('"{}": "{}"'.format(key, binding)),
                   file=out_file)
示例#3
0
    def _make_zsh(tsk):
        lines = []
        if not tsk.env.POWERLINE_DAEMON_LAUNCHD:
            lines.append("{} --quiet".format(ctx.shquote_cmd(tsk.env.POWERLINE_DAEMON)))

        # TODO zpython is disabled until we can figure out how to install/use
        # it properly.
        #
        # The Homebrew package for zpython is old. It still uses the zpython
        # branch of ZyX's zsh repo, <https://bitbucket.org/ZyX_I/zsh>. However,
        # it seems that ZyX is doing all new development in
        # <https://bitbucket.org/ZyX_I/zpython>. We're not really comfortable
        # installing from Homebrew until this is fixed. And we've not had luck
        # compiling zpython in its current state. Furthermore, since zpython
        # depends on both Zsh and Python, it needs to be compiled with both.
        # This is particularly important for Python, as in the past we've had
        # zpython use the system Python with an old version of Powerline while
        # we were using another Python with a newer Powerline. If zpython is
        # ever re-enabled, we need to institute a check to make sure that
        # zpython's Python and the default Python under which Powerline is
        # installed are the same interpreter.
        lines.append("POWERLINE_NO_ZSH_ZPYTHON=1")

        # For use in the shell version segment.
        # TODO: Make Powerline PR.
        lines.append("POWERLINE_COMMAND_ARGS=--renderer-arg=" 'shell_version="$ZSH_NAME-$ZSH_VERSION"')

        lines.append("source {}".format(shquote(ctx.get_powerline_path(join("bindings", "zsh", "powerline.zsh")))))
        tsk.outputs[0].write("\n".join(lines) + "\n")
示例#4
0
 def _extract(self):
     if not os.path.isdir(self.dirname):
         warn('Unable to access dir %s' % self.dirname)
         return None
     else:
         self.dirname = tempfile.mkdtemp(prefix='rpmlint.%s.' %
                                         os.path.basename(self.filename),
                                         dir=self.dirname)
         # TODO: sequence based command invocation
         # TODO: warn some way if this fails (e.g. rpm2cpio not installed)
         command_str = \
             'rpm2cpio %(f)s | (cd %(d)s; cpio -id); chmod -R +rX %(d)s' % \
             {'f': shquote(self.filename), 'd': shquote(self.dirname)}
         cmd = getstatusoutput(command_str, shell=True)
         self.extracted = True
         return cmd
示例#5
0
文件: Pkg.py 项目: Fak3/rpmlint
 def _extract(self):
     if not os.path.isdir(self.dirname):
         warn('Unable to access dir %s' % self.dirname)
         return None
     else:
         self.dirname = tempfile.mkdtemp(
             prefix='rpmlint.%s.' % os.path.basename(self.filename),
             dir=self.dirname)
         # TODO: sequence based command invocation
         # TODO: warn some way if this fails (e.g. rpm2cpio not installed)
         command_str = \
             'rpm2cpio %(f)s | (cd %(d)s; cpio -id); chmod -R +rX %(d)s' % \
             {'f': shquote(self.filename), 'd': shquote(self.dirname)}
         cmd = getstatusoutput(command_str, shell=True)
         self.extracted = True
         return cmd
示例#6
0
 def simple_bind_s(self, binddn, password):
     self.connect_opts = ' -x'
     if binddn:
         self.connect_opts += ' -D %s' % (shquote(binddn), )
     if password:
         self.connect_opts += ' -W'
     self.log_connect()
     return self.wrapped.simple_bind_s(binddn, password)
示例#7
0
def build(ctx):
    if ctx.env.SELECTSTARTER:
        ctx.env.SHELL_ALIASES['ss'] = shquote(join(
            ctx.env.SELECTSTARTER, 'Contents', 'MacOS', 'SelectStarter'))
    for alias, exe in ARCHITECT_ALIASES.items():
        cmd = ctx.env[exe.upper()]
        if cmd:
            ctx.env.SHELL_ALIASES[alias] = ctx.shquote_cmd(cmd)
示例#8
0
 def sasl_interactive_bind_s(self, who, auth, *a, **kw):
     self.connect_opts = ' -Y %s' % (auth.mech.decode('ascii'), )
     if sasl.CB_AUTHNAME in auth.cb_value_dict:
         self.connect_opts += ' -U %s' % (shquote(
             auth.cb_value_dict[sasl.CB_AUTHNAME]), )
     if sasl.CB_PASS in auth.cb_value_dict:
         self.connect_opts += ' -W'
     self.log_connect()
     return self.wrapped.sasl_interactive_bind_s(who, auth, *a, **kw)
示例#9
0
 def _make_aliases(tsk):
     with open(tsk.outputs[0].abspath(), 'w') as out_file:
         print('\n# Tool aliases\n', file=out_file)
         for alias, command in tsk.env.SHELL_ALIASES.items():
             print(
                 'alias {}={}'.format(alias, shquote(command)),
                 file=out_file)
         with open(tsk.inputs[0].abspath()) as in_file:
             for line in in_file:
                 out_file.write(line)
示例#10
0
 def search_s(self, base, scope, filter, attributes):
     logger.debug(
         "Doing: ldapsearch%s -b %s -s %s %s %s",
         self.connect_opts,
         base,
         SCOPES_STR[scope],
         shquote(filter),
         ' '.join(attributes or []),
     )
     with self.timer:
         return self.wrapped.search_s(base, scope, filter, attributes)
示例#11
0
def padlock_unlock(encrypted_path):
    """
    Lock the padlock (this is: unmount the directory).

    Returns True if the padlock originally was locked, otherwise False.
    """
    # Location of GPG-encrypted keyfile to use
    keyfile = os.path.join(encrypted_path, ENCFS_KEYFILE)
    configfile = os.path.join(encrypted_path, ENCFS_CONFIGFILE)
    crypted_configfile = configfile + '.asc'

    if (not os.path.exists(keyfile) or not os.path.exists(
            os.path.join(encrypted_path, crypted_configfile))):
        return False

    # Cut the EncFS directory prefix to get the decrypted directory name
    decrypted_path = ''.join(encrypted_path.rsplit(ENCFS_PREFIX, 1))

    # Check if encrypted directory is already mounted
    # NB: This only tests if encfs_decrypted as a mount-point at all,
    # no matter if it is fuse.encfs or not.
    if os.path.ismount(decrypted_path):
        # if it is already mounted, do nothing
        return False

    # Make sure the mount directory exists
    if not os.path.isdir(decrypted_path):
        os.makedirs(decrypted_path)

    # MAke sure the named pipe for the configfile exists
    if not os.path.exists(configfile):
        os.mkfifo(configfile)
    elif not stat.S_ISFIFO(os.stat(configfile).st_mode):
        raise IOError(17, configfile + ' exists but is not a fifo')

    # Start encfs. It will wait for input on the `configfile` named
    # pipe.
    encfs = subprocess.Popen([
        'encfs', encrypted_path, decrypted_path, '--extpass',
        'gpg --no-mdc-warning --output - %s' % shquote(keyfile)
    ])
    # now decrypt the config and write it into the named pipe
    with open(configfile, 'w') as fh:
        # NB: gpg must write to stdout to avoid it is asking whether
        # the file should be overwritten
        subprocess.Popen(
            ['gpg', '--no-mdc-warning', '--output', '-', crypted_configfile],
            stdout=fh).wait()
    encfs.wait()
    os.remove(configfile)
    return True
示例#12
0
    def _encrypt_secrets_encfs(self):
        encfs_keyfile = os.path.join(self.encfs_path, '.encfs6.keyfile')
        encfs_configfile = os.path.join(self.encfs_path, '.encfs6.xml')
        encfs_password = self._get_random_string()

        try:
            gpg_keys = list(self.kwargs.get('keys', None).split(','))
            gpg_recipients = list(
                itertools.chain.from_iterable(['-r', r] for r in gpg_keys))
        except AttributeError:
            raise ValueError('List of GPG recipients not specified')

        if not os.path.exists(self.encfs_path):
            print('Encrypting Ansible secrets using EncFS...')
            os.makedirs(self.encfs_path)
            gpg_cmd = subprocess.Popen([
                self._commands['gpg'], '--encrypt', '--armor', '--output',
                encfs_keyfile
            ] + gpg_recipients,
                                       stdin=subprocess.PIPE)
            gpg_cmd.communicate(encfs_password.encode('utf-8'))
            while not os.path.exists(encfs_keyfile):
                time.sleep(1)

            encfs_cmd = subprocess.Popen([
                self._commands['encfs'], self.encfs_path, self.secret_path,
                '--extpass', self._commands['gpg'] +
                ' --decrypt --no-mdc-warning --output - ' +
                shquote(encfs_keyfile)
            ],
                                         stdin=subprocess.PIPE)
            encfs_cmd.communicate(('p\n' + encfs_password).encode('utf-8'))

            while not os.path.exists(encfs_configfile):
                time.sleep(1)

            # Set the inventory state to correctly lock the secrets
            self.encrypted = True
            self.crypt_method = 'encfs'
            if os.path.ismount(self.secret_path):
                self.encfs_mounted = True
            self.lock()

            gpg_cmd = subprocess.Popen([
                self._commands['gpg'], '--encrypt', '--armor', '--output',
                encfs_configfile + '.asc'
            ] + gpg_recipients + [encfs_configfile])
            while not os.path.exists(encfs_configfile + '.asc'):
                time.sleep(1)
            os.remove(encfs_configfile)
示例#13
0
def padlock_unlock(encrypted_path):
    """
    Lock the padlock (this is: unmount the directory).

    Returns True if the padlock originally was locked, otherwise False.
    """
    # Location of GPG-encrypted keyfile to use
    keyfile = os.path.join(encrypted_path, ENCFS_KEYFILE)
    configfile = os.path.join(encrypted_path, ENCFS_CONFIGFILE)
    crypted_configfile = configfile+'.asc'

    if (not os.path.exists(keyfile) or
        not os.path.exists(os.path.join(encrypted_path, crypted_configfile))):
        return False

    # Cut the EncFS directory prefix to get the decrypted directory name
    decrypted_path = ''.join(encrypted_path.rsplit(ENCFS_PREFIX, 1))

    # Check if encrypted directory is already mounted
    # NB: This only tests if encfs_decrypted as a mount-point at all,
    # no matter if it is fuse.encfs or not.
    if os.path.ismount(decrypted_path):
        # if it is already mounted, do nothing
        return False

    # Make sure the mount directory exists
    if not os.path.isdir(decrypted_path):
        os.makedirs(decrypted_path)

    # MAke sure the named pipe for the configfile exists
    if not os.path.exists(configfile):
        os.mkfifo(configfile)
    elif not stat.S_ISFIFO(os.stat(configfile).st_mode):
        raise IOError(17, configfile+' exists but is not a fifo')

    # Start encfs. It will wait for input on the `configfile` named
    # pipe.
    encfs = subprocess.Popen([
        'encfs', encrypted_path, decrypted_path, 
        '--extpass', 'gpg --no-mdc-warning --output - %s' % shquote(keyfile)])
    # now decrypt the config and write it into the named pipe
    with open(configfile, 'w') as fh:
        # NB: gpg must write to stdout to avoid it is asking whether
        # the file should be overwritten
        subprocess.Popen(['gpg', '--no-mdc-warning', '--output', '-',
                          crypted_configfile], stdout=fh).wait()
    encfs.wait()
    os.remove(configfile)
    return True
示例#14
0
 def _make_bash(tsk):
     lines = []
     if not tsk.env.POWERLINE_DAEMON_LAUNCHD:
         lines.append("{} --quiet".format(ctx.shquote_cmd(tsk.env.POWERLINE_DAEMON)))
     lines += [
         # For use in the shell version segment.
         # TODO: Make Powerline PR.
         "POWERLINE_COMMAND_ARGS=--renderer-arg="
         # Credit: http://stackoverflow.com/a/9429887
         'shell_version=bash-"$(IFS=.; echo "${BASH_VERSINFO[*]:0:3}")"',
         # Powerline bindings
         "POWERLINE_BASH_CONTINUATION=1",
         "POWERLINE_BASH_SELECT=1",
         "source {}".format(shquote(ctx.get_powerline_path(join("bindings", "bash", "powerline.sh")))),
     ]
     tsk.outputs[0].write("\n".join(lines) + "\n")
示例#15
0
    def unlock(self):
        if self.encrypted:
            if self.crypt_method == 'encfs':
                keyfile = os.path.join(self.encfs_path, self.encfs_keyfile)
                configfile = os.path.join(self.encfs_path,
                                          self.encfs_configfile)
                crypted_configfile = os.path.join(
                    self.encfs_path, self.encfs_configfile + '.asc')

                if os.path.ismount(self.secret_path):
                    self.encfs_mounted = True
                    return False
                else:
                    if not os.path.isdir(self.secret_path):
                        os.makedirs(self.secret_path)

                    if not os.path.exists(configfile):
                        os.mkfifo(configfile)
                    elif not stat.S_ISFIFO(os.stat(configfile).st_mode):
                        raise IOError(17,
                                      configfile + ' exists but is not a fifo')

                    subprocess_env = {'LC_ALL': 'C'}
                    encfs = subprocess.Popen([
                        self._commands['encfs'], self.encfs_path,
                        self.secret_path, '--extpass',
                        '{} --decrypt --no-mdc-warning --output - {}'.format(
                            self._commands['gpg'], shquote(keyfile))
                    ],
                                             env=subprocess_env)
                    with open(configfile, 'w') as fh:
                        gpg = subprocess.Popen([
                            self._commands['gpg'], '--decrypt',
                            '--no-mdc-warning', '--output', '-',
                            crypted_configfile
                        ],
                                               stdout=fh,
                                               env=subprocess_env)
                    gpg.communicate()
                    encfs.communicate()
                    os.remove(configfile)
                    self.encfs_mounted = True
                    return True
        else:
            return False
示例#16
0
def _execute_in_clean_bash(ctx, command):
    """Execute a command in the system Bash in a a "clean" environment."""
    # Only source /etc/profile in a clean environment. That should get us the
    # base paths (hopefully).
    profile_path = '/etc/profile'
    if not os.path.isfile(profile_path):
        ctx.fatal('Could not find profile file: ' + profile_path)
    return ctx.cmd_and_log(
        [
            '/bin/bash', '--norc', '--noprofile', '-c',
            'source {profile_path}; {command}'.format(
                profile_path=shquote(profile_path),
                command=command,
            )
        ],
        # Clean environment (roughly equivalent to 'env -i command')
        env={},
    ).rstrip('\n')
示例#17
0
def build(ctx):
    # Process programs
    #
    # All these programs support a -u argument specifying the user. For ps,
    # pgrep, and pkill it is effective user id (euid). For htop and lsof this
    # is unspecified. In most of my cases, euid and ruid will be the same
    # anyway.
    #
    # There are two different versions of pstree:
    # - http://freecode.com/projects/pstree, used on my Mac OS X
    # - http://psmisc.sourceforge.net/, used on most GNU/Linux machines
    # But they both support the -u flag!
    #
    # Note: `id -un' was used since `whoami' has been obsoleted and is not
    # POSIX.
    for prog in ctx.env.PROCESS_PROGRAMS:
        prog_path = ctx.env[prog.upper()]
        ctx.env.SHELL_ALIASES['my' + prog] = (
            shquote(prog_path[0]) + ' -u "$(id -un)"')
示例#18
0
def build(ctx):
    if not ctx.env.HAS_POWERLINE:
        return

    if ctx.env.POWERLINE_DAEMON_LAUNCHD:
        # I made up 'net.powerline'; that's not a real domain.
        label = 'net.powerline'
        plist_node = ctx.path.find_or_declare(label + '.plist')
        @ctx.rule(target=plist_node, vars=['POWERLINE_DAEMON'])
        def _make_launch_agent(tsk):
            ctx.plist_dump_node(
                OrderedDict([
                    ('Label', label),
                    ('ProgramArguments',
                     ctx.env.POWERLINE_DAEMON + ['--foreground']),
                    ('RunAtLoad', True),
                    ('EnvironmentVariables', {
                        # Set LANG to ensure proper output encoding.
                        'LANG': ctx.env.POWERLINE_LANG,
                    }),
                ]),
                tsk.outputs[0],
            )

        ctx.install_launch_agent(plist_node)

    # These are overrides for where Powerline executables should be found.
    # These are used in case virtualenvs have Powerline installed (most will).
    # We want the Powerline executables from the default Python to be used.

    # Both these environment variables specify full paths to the executables,
    # so according to Powerline docs they should not be quoted.
    # http://powerline.readthedocs.org/en/2.0/configuration/local.html#prompt-command
    ctx.env.SHELL_ENV['POWERLINE_CONFIG_COMMAND'] = ctx.env.POWERLINE_CONFIG[0]
    if ctx.env.POWERLINE_CLIENT:
        # Used for shell and tmux.
        ctx.env.SHELL_ENV['POWERLINE_COMMAND'] = ctx.env.POWERLINE_CLIENT[0]

    # While overriding some executables with absolute paths in environment
    # variables is possible, the powerline client (see 'scripts/powerline.c')
    # uses execvp() to run 'powerline-render' if it can't connect to the daemon
    # (with the shell and Python clients doing similar things). This means that
    # 'powerline-render' needs to be on the PATH in some way, shape, or form.
    # We override this by symlinking it into the scripts directory, which
    # admittedly is kind of a hack.
    ctx.symlink_as(join(ctx.env.SCRIPTS_DIR, 'powerline-render'),
                   ctx.env.POWERLINE_RENDER[0])

    ctx.env.PYENV_VIRTUALENV_DEFAULT_PACKAGES.append('powerline-status==2.3')

    def _make_bash(tsk):
        lines = []
        if not tsk.env.POWERLINE_DAEMON_LAUNCHD:
            lines.append(
                '{} --quiet'.format(ctx.shquote_cmd(tsk.env.POWERLINE_DAEMON)))
        lines += [
            # For use in the shell version segment.
            # TODO: Make Powerline PR.
            'POWERLINE_COMMAND_ARGS=--renderer-arg='
            # Credit: http://stackoverflow.com/a/9429887
            'shell_version=bash-"$(IFS=.; echo "${BASH_VERSINFO[*]:0:3}")"',
            # Powerline bindings
            'POWERLINE_BASH_CONTINUATION=1',
            'POWERLINE_BASH_SELECT=1',
            'source {}'.format(shquote(ctx.get_powerline_path(join(
                'bindings', 'bash', 'powerline.sh')))),
        ]
        tsk.outputs[0].write('\n'.join(lines) + '\n')

    def _make_zsh(tsk):
        lines = []
        if not tsk.env.POWERLINE_DAEMON_LAUNCHD:
            lines.append(
                '{} --quiet'.format(ctx.shquote_cmd(tsk.env.POWERLINE_DAEMON)))

        # TODO zpython is disabled until we can figure out how to install/use
        # it properly.
        #
        # The Homebrew package for zpython is old. It still uses the zpython
        # branch of ZyX's zsh repo, <https://bitbucket.org/ZyX_I/zsh>. However,
        # it seems that ZyX is doing all new development in
        # <https://bitbucket.org/ZyX_I/zpython>. We're not really comfortable
        # installing from Homebrew until this is fixed. And we've not had luck
        # compiling zpython in its current state. Furthermore, since zpython
        # depends on both Zsh and Python, it needs to be compiled with both.
        # This is particularly important for Python, as in the past we've had
        # zpython use the system Python with an old version of Powerline while
        # we were using another Python with a newer Powerline. If zpython is
        # ever re-enabled, we need to institute a check to make sure that
        # zpython's Python and the default Python under which Powerline is
        # installed are the same interpreter.
        lines.append('POWERLINE_NO_ZSH_ZPYTHON=1')

        # For use in the shell version segment.
        # TODO: Make Powerline PR.
        lines.append(
            'POWERLINE_COMMAND_ARGS=--renderer-arg='
            'shell_version="$ZSH_NAME-$ZSH_VERSION"')

        lines.append('source {}'.format(shquote(ctx.get_powerline_path(join(
            'bindings', 'zsh', 'powerline.zsh')))))
        tsk.outputs[0].write('\n'.join(lines) + '\n')

    for shell in ctx.env.AVAILABLE_SHELLS:
        out_node = ctx.path.find_or_declare('powerline.' + shell)
        ctx.add_shell_rc_node(out_node, shell)
        rule = locals()['_make_{}'.format(shell)]
        ctx(rule=rule, target=out_node,
            vars=['POWERLINE_DAEMON', 'POWERLINE_DAEMON_LAUNCHD'])

    def _declare(base_path):
        return ctx.path.find_or_declare(join(
            'dotfiles', 'config', 'powerline', join(*base_path) + '.json'))

    config_node = _declare(['config'])

    @ctx.rule(target=config_node,
              vars=['POWERLINE_SEGMENTS_PATH', 'POWERLINE_LOG_PATH'])
    def _make_config(tsk):
        _json_dump_node(
            {
                'common': {
                    'paths': [tsk.env.POWERLINE_SEGMENTS_PATH],
                    'log_file': tsk.env.POWERLINE_LOG_PATH,
                    'log_level': 'INFO',
                },
                'ext': {
                    'shell': {'theme': 'sean'},
                    'tmux': {'theme': 'sean'},
                },
            },
            tsk.outputs[0],
        )

    shell_theme_node = _declare(['themes', 'shell', 'sean'])

    @ctx.rule(target=shell_theme_node, vars=ctx.env.RBENV_TOOLS)
    def _make_shell_theme(tsk):
        # TODO: Consider moving this back to a JSON file which gets read and
        # merged.
        top_left = [
            {
                'function': 'powerline.segments.common.net.hostname',
                'priority': 50,
                'args': {
                    # Always include the hostname.
                    'only_if_ssh': False,
                    'exclude_domain': True,
                },
            },
            {
                'function': 'powerline.segments.common.env.user',
                'priority': 40,
                'before': '👤  ', # Unicode BUSTS IN SILHOUETTE
            },
            {
                'function': 'powerline_sean_segments.shell_version',
                'priority': 10,
                'before': '🐚  ', # Unicode SPIRAL SHELL
            },
            {
                'function': 'powerline.segments.shell.cwd',
                'args': {
                    # Don't split the cwd into multiple
                    # Powerline segments.
                    'use_path_separator': True,
                    # Don't ever shorten the cwd.
                    'dir_limit_depth': None,
                },
                'before': '📂  ', # Unicode OPEN FILE FOLDER
            },
            {
                'function': (
                    'powerline.segments.common.vcs.branch'),
                'priority': 30,
                'args': {
                    # Show whether the branch is dirty.
                    'status_colors': True,
                }
            }
        ]
        for tool, icon in [
                # Unicode SNAKE
                ('pyenv', '🐍'),
                # Unicode GEM STONE
                ('rbenv', '💎'),
                # Unicode DROMEDARY CAMEL
                ('plenv', '🐪'),
                # Unicode HOT BEVERAGE
                ('jenv', '☕'),
                # Unicode WHITE HEXAGON
                ('nodenv', '⬡'),
        ]:
            if tsk.env[tool.upper()]:
                top_left.append(dict(
                    function='powerline_sean_segments.' + tool,
                    priority=2,
                    before=icon + '  ',
                ))
        theme = {
            'segments': {
                # The 'above' key allows us to have a multiline prompt.
                # https://github.com/Lokaltog/powerline/issues/462#issuecomment-46806521
                'above': [
                    {
                        'left': top_left,
                    }
                ],
                'left': [
                    {
                        'type': 'string',
                        'contents': '$',
                        'highlight_groups': ['cwd'],
                    }
                ],
                'right': [
                    {
                        # last_pipe_status is way cooler than the normal
                        # last_status. If any of the pipe commands fail, it
                        # will show the exit status for each of them. For
                        # example, try running:
                        #
                        #     true | false | true
                        #
                        'function': (
                            'powerline.segments.shell.last_pipe_status'),
                        'priority': 20,
                    }
                ]
            }
        }

        _json_dump_node(theme, tsk.outputs[0])

    # We name this file 'colorschemes/shell/default.json' so that it overrides
    # the Powerline 'colorschemes/shell/default.json', but still inherits from
    # 'colorschemes/default.json'.
    shell_colorscheme_node = _declare(['colorschemes', 'shell', 'default'])

    @ctx.rule(target=shell_colorscheme_node, vars=ctx.env.RBENV_TOOLS)
    def _make_shell_colorscheme(tsk):
        groups = dict(
            shell_version=dict(
                fg='gray70',
                bg='darkestpurple',
            )
        )
        for tool, group in dict(
                pyenv=dict(
                    fg='brightyellow',
                    bg='mediumgreen',
                ),
                rbenv=dict(
                    fg='brightestorange',
                    bg='darkestred',
                ),
                plenv=dict(
                    fg='gray9',
                    bg='darkestblue',
                ),
                jenv=dict(
                    fg='gray10',
                    bg='darkestpurple',
                ),
                nodenv=dict(
                    fg='gray10',
                    bg='darkgreen',
                )
        ).items():
            if tsk.env[tool.upper()]:
                groups[tool] = group
        _json_dump_node(
            {
                'name': "Sean's color scheme for shell prompts",
                'groups': groups,
            },
            tsk.outputs[0],
        )

    tmux_theme_node = _declare(['themes', 'tmux', 'sean'])
    # Use an ordered dict else it will trigger unnecessary rebuilds.
    mail_vars = OrderedDict(
        (base, 'POWERLINE_MAIL_' + base.upper())
        for base in ['server', 'port', 'username', 'password'])

    @ctx.rule(target=tmux_theme_node, vars=list(mail_vars.values()))
    def _make_tmux_theme(tsk):
        # TODO: Consider moving this back to a JSON file which gets read and
        # merged.
        segments_right = [
            {
                'function': 'powerline.segments.common.sys.cpu_load_percent',
                'priority': 15,
            },
            {
                'function': 'powerline.segments.common.wthr.weather',
                'args': {
                    'unit': 'F',
                    'location_query': 'Jenison, Michigan',
                },
                'priority': 20,
            },
        ]
        # Will be set to an empty list if keyring.get_password() returns None.
        if tsk.env.POWERLINE_MAIL_PASSWORD != []:
            segments_right.append({
                'function': 'powerline.segments.common.mail.email_imap_alert',
                'args': dict(
                    (base, tsk.env[var]) for base, var in mail_vars.items()),
                'priority': 10,
            })

        segments_right += [
            {
                'function': 'powerline.segments.common.net.internal_ip',
                'before': 'I ',
                'args': {
                    'interface': 'default_gateway',
                },
                'priority': 10,
            },
            {
                'function': 'powerline.segments.common.net.external_ip',
                'before': 'E ',
                'args': {
                    'query_url': 'http://ipv4.icanhazip.com/',
                },
                'priority': 5,
            },
        ]

        _json_dump_node(
            {
                # Set the dividers to make the layout more compact. This is
                # copied from 'themes/powerline.json' and space has been
                # removed. It doesn't look the greatest to the left of the
                # current window index, but it allows for more status line real
                # estate.
                #
                # Note on the dividers issue: The real problem here is that the
                # classic Powerline dividers are used as characters in the
                # actual window status field of tmux and not as tmux window
                # status separators. This is presumably due to the technical
                # reason that tmux only allows a global separator string. This
                # means that the classic Powerline symbols can't truly invert
                # when the current window changes, as each window status has
                # its own fixed width which the Powerline dividers inhabit. If
                # this is not done, then the window statuses shift when the
                # current window changes. The end result of this is that each
                # divider takes up twice the amount of space it actually needs,
                # to reserve space for it when it's not present. Ugh.
                'dividers': {
                    'left': {
                        'hard': '',
                    },
                },
                'segments': {
                    'right': segments_right,
                },
            },
            tsk.outputs[0],
        )

    ctx(source=[
        config_node,
        shell_theme_node,
        shell_colorscheme_node,
        tmux_theme_node,
        # These files don't need any configuration.
        join('dotfiles', 'config', 'powerline', 'colorschemes', 'tmux',
             'default.cjson'),
    ])

    # Install segments file.
    ctx.install_dotfile(ctx.path.find_resource([
        'dotfiles', 'config', 'powerline', 'powerline_sean_segments.py']))

    # Rotate the log using logrotate (if available)
    logrotate_conf_in_node = ctx.path.find_resource([
        'dotfiles', 'config', 'powerline', 'logrotate.conf.in'])
    logrotate_conf_node = logrotate_conf_in_node.change_ext('')
    ctx(features='subst',
        source=logrotate_conf_in_node,
        target=logrotate_conf_node,
        # logrotate paths follow shell quoting rules.
        LOG_PATH=shquote(ctx.env.POWERLINE_LOG_PATH))
    ctx.env.LOGROTATE_NODES.append(logrotate_conf_node)
示例#19
0
文件: tmux.py 项目: seanfisk/dotfiles
def build(ctx):
    if not ctx.env.TMUX_:
        return

    # Don't pass -l; we don't want a login shell.
    # Prefer Zsh, but go with Bash if necessary.
    shell = ctx.env.ZSH or ctx.env.BASH
    # Not exactly sure about the quoting rules in this config file...
    default_command = shquote(
        ctx.shquote_cmd((ctx.env.REATTACH_TO_USER_NAMESPACE + shell) if ctx.env.REATTACH_TO_USER_NAMESPACE else shell)
    )

    in_node = ctx.path.find_resource(["dotfiles", "tmux.conf.in"])
    out_node = in_node.change_ext(ext=".conf", ext_in=".conf.in")

    more_commands = []

    if ctx.env.HAS_POWERLINE:
        tmux_powerline_file = ctx.get_powerline_path(join("bindings", "tmux", "powerline.conf"))
        if not ctx.env.POWERLINE_DAEMON_LAUNCHD:
            more_commands.append(
                # Not exactly sure about the quoting rules in this config
                # file...
                "run-shell "
                + shquote(ctx.shquote_cmd(ctx.env.POWERLINE_DAEMON + ["--quiet"]))
            )
        more_commands.append("source-file " + shquote(tmux_powerline_file))

    # When copying in copy mode in tmux, send the copied text to the system
    # clipboard. See:
    # https://robots.thoughtbot.com/tmux-copy-paste-on-os-x-a-better-future
    if ctx.env.COPY_COMMAND:
        # XXX: These keys are hard-coded
        for key in ["M-w", "C-w"]:
            more_commands.append(
                "bind-key -t emacs-copy {} copy-pipe {}".format(key, ctx.shquote_cmd(ctx.env.COPY_COMMAND))
            )

    ctx(
        features="subst",
        source=in_node,
        target=out_node,
        DEFAULT_COMMAND=default_command,
        MORE_COMMANDS="\n".join(more_commands),
    )
    ctx.install_dotfile(out_node)

    if ctx.env.LSOF:
        # List my tmux sockets
        ctx.env.SHELL_ALIASES["mytmux"] = shquote(ctx.env.LSOF[0]) + " -u \"$(id -un)\" -a -U | grep '^tmux'"

    # Attach or new
    in_node = ctx.path.find_resource(["shell", "tmux.sh.in"])
    out_node = in_node.change_ext(ext=".sh", ext_in=".sh.in")
    new_session_cmd = (
        ctx.env.TMUXIFIER_ + ["load-session", ctx.env.TMUXIFIER_DEFAULT_SESSION]
        if ctx.env.TMUXIFIER_DEFAULT_SESSION
        else ctx.env.TMUX_ + ["new-session"]
    )

    ctx(
        features="subst",
        source=in_node,
        target=out_node,
        TMUX=shquote(ctx.env.TMUX_[0]),
        TMUX_NEW_SESSION=ctx.shquote_cmd(new_session_cmd),
    )
    ctx.add_shell_rc_node(out_node)
示例#20
0
文件: daemon.py 项目: rfinnie/dsari
    def process_scheduled_run(self, run):
        now = datetime.datetime.now()
        job = run.job
        if run.schedule_time > now:
            self.wakeups.append(run.schedule_time)
            return
        if (not job.concurrent_runs) and (job in [x.job for x in self.running_runs]):
            self.wakeups.append(now + backoff(run.schedule_time, now))
            return
        run.concurrency_group = None
        if len(job.concurrency_groups) > 0:
            job_concurrency_groups = copy.copy(job.concurrency_groups)
            random.shuffle(job_concurrency_groups)
            for concurrency_group in job_concurrency_groups:
                if concurrency_group not in self.running_groups:
                    self.running_groups[concurrency_group] = []
                if len(self.running_groups[concurrency_group]) < concurrency_group.max:
                    run.concurrency_group = concurrency_group
                    break
            if not run.concurrency_group:
                backoff_time = backoff(run.schedule_time, now)
                self.logger.debug('[{} {}] Cannot run due to concurrency limits ({}), will try again within {}'.format(
                    job.name,
                    run.id,
                    ', '.join([
                        '{}={}'.format(
                            concurrency_group.name,
                            concurrency_group.max,
                        ) for concurrency_group in sorted(job_concurrency_groups)
                    ]),
                    backoff_time,
                ))
                self.wakeups.append(now + backoff_time)
                return

        (run.previous_run, run.previous_good_run, run.previous_bad_run) = self.db.get_previous_runs(job)

        if not os.path.exists(os.path.join(self.config.data_dir, 'runs', job.name, run.id)):
            os.makedirs(os.path.join(self.config.data_dir, 'runs', job.name, run.id))

        run.start_time = now
        run.term_sent = False
        run.kill_sent = False

        self.db.insert_running_run(run)

        child_pid = os.fork()
        if child_pid == 0:
            self.run_child_executor(run)
            raise OSError('run_child_executor returned, when it should not have')
        run.pid = child_pid
        self.logger.info('[{} {}] Running PID {}: {}'.format(
            job.name,
            run.id,
            run.pid,
            ' '.join([shquote(x) for x in job.command]),
        ))
        self.scheduled_runs.remove(run)
        self.running_runs.append(run)
        if run.concurrency_group:
            self.running_groups[run.concurrency_group].append(run)
        if run.respawn and job.schedule:
            t = get_next_schedule_time(job.schedule, job.name, start_time=now)
            if t is None:
                self.logger.debug('[{}] Schedule {} does not produce a future run, skipping'.format(
                    job.name, job.schedule
                ))
                return
            run = dsari.Run(job)
            run.respawn = True
            run.trigger_type = 'schedule'
            run.schedule_time = t
            self.scheduled_runs.append(run)
            self.logger.debug(
                '[{} {}] Next scheduled run: {} ({})'.format(
                    job.name,
                    run.id,
                    t,
                    (t - now)
                )
            )
示例#21
0
def build(ctx):
    if not ctx.env.HAS_POWERLINE:
        return

    if ctx.env.POWERLINE_DAEMON_LAUNCHD:
        # I made up 'net.powerline'; that's not a real domain.
        label = "net.powerline"
        plist_node = ctx.path.find_or_declare(label + ".plist")

        @ctx.rule(target=plist_node, vars=["POWERLINE_DAEMON"])
        def _make_launch_agent(tsk):
            ctx.plist_dump_node(
                OrderedDict(
                    [
                        ("Label", label),
                        ("ProgramArguments", ctx.env.POWERLINE_DAEMON + ["--foreground"]),
                        ("RunAtLoad", True),
                        (
                            "EnvironmentVariables",
                            {
                                # Set LANG to ensure proper output encoding.
                                "LANG": ctx.env.POWERLINE_LANG
                            },
                        ),
                    ]
                ),
                tsk.outputs[0],
            )

        ctx.install_launch_agent(plist_node)

    # These are overrides for where Powerline executables should be found.
    # These are used in case virtualenvs have Powerline installed (most will).
    # We want the Powerline executables from the default Python to be used.

    # Both these environment variables specify full paths to the executables,
    # so according to Powerline docs they should not be quoted.
    # http://powerline.readthedocs.org/en/2.0/configuration/local.html#prompt-command
    ctx.env.SHELL_ENV["POWERLINE_CONFIG_COMMAND"] = ctx.env.POWERLINE_CONFIG[0]
    if ctx.env.POWERLINE_CLIENT:
        # Used for shell and tmux.
        ctx.env.SHELL_ENV["POWERLINE_COMMAND"] = ctx.env.POWERLINE_CLIENT[0]

    # While overriding some executables with absolute paths in environment
    # variables is possible, the powerline client (see 'scripts/powerline.c')
    # uses execvp() to run 'powerline-render' if it can't connect to the daemon
    # (with the shell and Python clients doing similar things). This means that
    # 'powerline-render' needs to be on the PATH in some way, shape, or form.
    # We override this by symlinking it into the scripts directory, which
    # admittedly is kind of a hack.
    ctx.symlink_as(join(ctx.env.SCRIPTS_DIR, "powerline-render"), ctx.env.POWERLINE_RENDER[0])

    ctx.env.PYENV_VIRTUALENV_DEFAULT_PACKAGES.append("powerline-status==2.3")

    def _make_bash(tsk):
        lines = []
        if not tsk.env.POWERLINE_DAEMON_LAUNCHD:
            lines.append("{} --quiet".format(ctx.shquote_cmd(tsk.env.POWERLINE_DAEMON)))
        lines += [
            # For use in the shell version segment.
            # TODO: Make Powerline PR.
            "POWERLINE_COMMAND_ARGS=--renderer-arg="
            # Credit: http://stackoverflow.com/a/9429887
            'shell_version=bash-"$(IFS=.; echo "${BASH_VERSINFO[*]:0:3}")"',
            # Powerline bindings
            "POWERLINE_BASH_CONTINUATION=1",
            "POWERLINE_BASH_SELECT=1",
            "source {}".format(shquote(ctx.get_powerline_path(join("bindings", "bash", "powerline.sh")))),
        ]
        tsk.outputs[0].write("\n".join(lines) + "\n")

    def _make_zsh(tsk):
        lines = []
        if not tsk.env.POWERLINE_DAEMON_LAUNCHD:
            lines.append("{} --quiet".format(ctx.shquote_cmd(tsk.env.POWERLINE_DAEMON)))

        # TODO zpython is disabled until we can figure out how to install/use
        # it properly.
        #
        # The Homebrew package for zpython is old. It still uses the zpython
        # branch of ZyX's zsh repo, <https://bitbucket.org/ZyX_I/zsh>. However,
        # it seems that ZyX is doing all new development in
        # <https://bitbucket.org/ZyX_I/zpython>. We're not really comfortable
        # installing from Homebrew until this is fixed. And we've not had luck
        # compiling zpython in its current state. Furthermore, since zpython
        # depends on both Zsh and Python, it needs to be compiled with both.
        # This is particularly important for Python, as in the past we've had
        # zpython use the system Python with an old version of Powerline while
        # we were using another Python with a newer Powerline. If zpython is
        # ever re-enabled, we need to institute a check to make sure that
        # zpython's Python and the default Python under which Powerline is
        # installed are the same interpreter.
        lines.append("POWERLINE_NO_ZSH_ZPYTHON=1")

        # For use in the shell version segment.
        # TODO: Make Powerline PR.
        lines.append("POWERLINE_COMMAND_ARGS=--renderer-arg=" 'shell_version="$ZSH_NAME-$ZSH_VERSION"')

        lines.append("source {}".format(shquote(ctx.get_powerline_path(join("bindings", "zsh", "powerline.zsh")))))
        tsk.outputs[0].write("\n".join(lines) + "\n")

    for shell in ctx.env.AVAILABLE_SHELLS:
        out_node = ctx.path.find_or_declare("powerline." + shell)
        ctx.add_shell_rc_node(out_node, shell)
        rule = locals()["_make_{}".format(shell)]
        ctx(rule=rule, target=out_node, vars=["POWERLINE_DAEMON", "POWERLINE_DAEMON_LAUNCHD"])

    def _declare(base_path):
        return ctx.path.find_or_declare(join("dotfiles", "config", "powerline", join(*base_path) + ".json"))

    config_node = _declare(["config"])

    @ctx.rule(target=config_node, vars=["POWERLINE_SEGMENTS_PATH", "POWERLINE_LOG_PATH"])
    def _make_config(tsk):
        _json_dump_node(
            {
                "common": {
                    "paths": [tsk.env.POWERLINE_SEGMENTS_PATH],
                    "log_file": tsk.env.POWERLINE_LOG_PATH,
                    "log_level": "INFO",
                },
                "ext": {"shell": {"theme": "sean"}, "tmux": {"theme": "sean"}},
            },
            tsk.outputs[0],
        )

    shell_theme_node = _declare(["themes", "shell", "sean"])

    @ctx.rule(target=shell_theme_node, vars=ctx.env.RBENV_TOOLS)
    def _make_shell_theme(tsk):
        # TODO: Consider moving this back to a JSON file which gets read and
        # merged.
        top_left = [
            {
                "function": "powerline.segments.common.net.hostname",
                "priority": 30,
                "args": {
                    # Always include the hostname.
                    "only_if_ssh": False,
                    "exclude_domain": True,
                },
            },
            {
                "function": "powerline.segments.common.env.user",
                "priority": 30,
                "before": "👤  ",  # Unicode BUSTS IN SILHOUETTE
            },
            {
                "function": "powerline_sean_segments.shell_version",
                "priority": 40,
                "before": "🐚  ",  # Unicode SPIRAL SHELL
            },
            {
                "function": "powerline.segments.shell.cwd",
                "args": {
                    # Don't split the cwd into multiple
                    # Powerline segments.
                    "use_path_separator": True,
                    # Don't ever shorten the cwd.
                    "dir_limit_depth": None,
                },
                "before": "📂  ",  # Unicode OPEN FILE FOLDER
            },
            {
                "function": ("powerline.segments.common.vcs.branch"),
                "priority": 10,
                "args": {
                    # Show whether the branch is dirty.
                    "status_colors": True
                },
            },
        ]
        for tool, icon in [
            # Unicode SNAKE
            ("pyenv", "🐍"),
            # Unicode GEM STONE
            ("rbenv", "💎"),
            # Unicode DROMEDARY CAMEL
            ("plenv", "🐪"),
            # Unicode HOT BEVERAGE
            ("jenv", "☕"),
            # Unicode WHITE HEXAGON
            ("nodenv", "⬡"),
        ]:
            if tsk.env[tool.upper()]:
                top_left.append(dict(function="powerline_sean_segments." + tool, before=icon + "  "))
        theme = {
            "segments": {
                # The 'above' key allows us to have a multiline prompt.
                # https://github.com/Lokaltog/powerline/issues/462#issuecomment-46806521
                "above": [{"left": top_left}],
                "left": [{"type": "string", "contents": "$", "highlight_groups": ["cwd"]}],
                "right": [
                    {
                        # last_pipe_status is way cooler than the normal
                        # last_status. If any of the pipe commands fail, it
                        # will show the exit status for each of them. For
                        # example, try running:
                        #
                        #     true | false | true
                        #
                        "function": ("powerline.segments.shell.last_pipe_status"),
                        "priority": 10,
                    }
                ],
            }
        }

        _json_dump_node(theme, tsk.outputs[0])

    # We name this file 'colorschemes/shell/default.json' so that it overrides
    # the Powerline 'colorschemes/shell/default.json', but still inherits from
    # 'colorschemes/default.json'.
    shell_colorscheme_node = _declare(["colorschemes", "shell", "default"])

    @ctx.rule(target=shell_colorscheme_node, vars=ctx.env.RBENV_TOOLS)
    def _make_shell_colorscheme(tsk):
        groups = dict(shell_version=dict(fg="gray70", bg="darkestpurple"))
        for tool, group in dict(
            pyenv=dict(fg="brightyellow", bg="mediumgreen"),
            rbenv=dict(fg="brightestorange", bg="darkestred"),
            plenv=dict(fg="gray9", bg="darkestblue"),
            jenv=dict(fg="gray10", bg="darkestpurple"),
            nodenv=dict(fg="gray10", bg="darkgreen"),
        ).items():
            if tsk.env[tool.upper()]:
                groups[tool] = group
        _json_dump_node({"name": "Sean's color scheme for shell prompts", "groups": groups}, tsk.outputs[0])

    tmux_theme_node = _declare(["themes", "tmux", "sean"])
    # Use an ordered dict else it will trigger unnecessary rebuilds.
    mail_vars = OrderedDict(
        (base, "POWERLINE_MAIL_" + base.upper()) for base in ["server", "port", "username", "password"]
    )

    @ctx.rule(target=tmux_theme_node, vars=list(mail_vars.values()))
    def _make_tmux_theme(tsk):
        # TODO: Consider moving this back to a JSON file which gets read and
        # merged.
        segments_right = [
            {"function": "powerline.segments.common.sys.cpu_load_percent", "priority": 15},
            {
                "function": "powerline.segments.common.wthr.weather",
                "args": {"unit": "F", "location_query": "Jenison, Michigan"},
                "priority": 20,
            },
        ]
        # Will be set to an empty list if keyring.get_password() returns None.
        if tsk.env.POWERLINE_MAIL_PASSWORD != []:
            segments_right.append(
                {
                    "function": "powerline.segments.common.mail.email_imap_alert",
                    "args": dict((base, tsk.env[var]) for base, var in mail_vars.items()),
                    "priority": 10,
                }
            )

        segments_right += [
            {
                "function": "powerline.segments.common.net.internal_ip",
                "before": "I ",
                "args": {"interface": "default_gateway"},
                "priority": 10,
            },
            {
                "function": "powerline.segments.common.net.external_ip",
                "before": "E ",
                "args": {"query_url": "http://ipv4.icanhazip.com/"},
                "priority": 5,
            },
        ]

        _json_dump_node(
            {
                # Set the dividers to make the layout more compact. This is
                # copied from 'themes/powerline.json' and space has been
                # removed. It doesn't look the greatest to the left of the
                # current window index, but it allows for more status line real
                # estate.
                #
                # Note on the dividers issue: The real problem here is that the
                # classic Powerline dividers are used as characters in the
                # actual window status field of tmux and not as tmux window
                # status separators. This is presumably due to the technical
                # reason that tmux only allows a global separator string. This
                # means that the classic Powerline symbols can't truly invert
                # when the current window changes, as each window status has
                # its own fixed width which the Powerline dividers inhabit. If
                # this is not done, then the window statuses shift when the
                # current window changes. The end result of this is that each
                # divider takes up twice the amount of space it actually needs,
                # to reserve space for it when it's not present. Ugh.
                "dividers": {"left": {"hard": ""}},
                "segments": {"right": segments_right},
            },
            tsk.outputs[0],
        )

    ctx(
        source=[
            config_node,
            shell_theme_node,
            shell_colorscheme_node,
            tmux_theme_node,
            # These files don't need any configuration.
            join("dotfiles", "config", "powerline", "colorschemes", "tmux", "default.cjson"),
        ]
    )

    # Install segments file.
    ctx.install_dotfile(ctx.path.find_resource(["dotfiles", "config", "powerline", "powerline_sean_segments.py"]))

    # Rotate the log using logrotate (if available)
    logrotate_conf_in_node = ctx.path.find_resource(["dotfiles", "config", "powerline", "logrotate.conf.in"])
    logrotate_conf_node = logrotate_conf_in_node.change_ext("")
    ctx(
        features="subst",
        source=logrotate_conf_in_node,
        target=logrotate_conf_node,
        # logrotate paths follow shell quoting rules.
        LOG_PATH=shquote(ctx.env.POWERLINE_LOG_PATH),
    )
    ctx.env.LOGROTATE_NODES.append(logrotate_conf_node)
示例#22
0
def setup_shell_defaults(self):
    """Set up shell defaults. Call this function before opening up the shell
    setups to other tools.
    """
    # Set variables to be used in the build process.

    # A mapping of environment variable name to value. The values are raw,
    # i.e., they will not be shell-quoted. If that is desired, it needs to be
    # done before insertion to the data structure. The order here is important,
    # as variables may use earlier variables in their values.
    self.env.SHELL_ENV = OrderedDict()
    # A mapping of alias name to command. The command is a [Python] string
    # which will be shell-quoted when written out. Don't forget to
    # pre-shell-quote any paths from the configure step. This is ordered just
    # to keep related aliases together in the generated file -- the order
    # should not really matter, though.
    self.env.SHELL_ALIASES = OrderedDict()
    # Use the list of configurable shells so that other tasks can add to
    # specific shells without getting errors.
    for shell in self.env.CONFIGURABLE_SHELLS:
        shell_up = shell.upper()
        # A mapping of shell to shell file nodes to include in the compiled rc
        # files.
        self.env[shell_up + '_RC_NODES'] = []
        # A mapping of shell to shell file nodes to include in the compiled
        # profile files.
        self.env[shell_up + '_PROFILE_NODES'] = []

    # Add some aliases for shells without all the customizations.
    self.env.SHELL_ALIASES['bash-basic'] = self.shquote_cmd(
        self.env.BASH + ['--noprofile', '--norc'])
    if self.env.ZSH:
        self.env.SHELL_ALIASES['zsh-basic'] = self.shquote_cmd(
            self.env.ZSH + ['--no-rcs'])

    # Key bindings
    #
    # There is apparently no way portable between Bash and Zsh to declare
    # subscripts to an associative array which have backslashes. In the past,
    # we used an intermediate `key' variable workaround to get consistent
    # quoting. However, now that we have the superb data structures of Python,
    # and the ability to change what goes in to them via configuration options,
    # we opted for the Python data structure.
    #
    # We use an OrderedDict to guarantee a stable order for the build.
    self.env.SHELL_KEYBINDINGS = OrderedDict([
        # Paging
        # Note: `|&' is Bash 4 and Zsh only.
        # Note: We used to bind this to C-j, but that interferes with
        # tmuxifier's `run_cmd "clear"' line in `lib/layout-helpers.sh' for
        # some reason. It causes the command strings to be piped to less, which
        # brings the terminal into less and doesn't execute the commands. Not
        # sure about the cause.
        (r'\C-x\C-l', r' |& less\C-m'),
        # Executing last command.
        # This is equivalent to pressing C-p or the up arrow, then Enter.
        (r'\C-xp', r'\C-p\C-m'),
        # Up a directory, aliased to `u' for me. Note: `\ej' means `ESC+' then
        # `j' as opposed to `\M-j', which means `Meta' then `j'. I have both
        # Option keys on my Mac configured to send `ESC+' in iTerm2. Actually
        # sending Meta is apparently a relic of the past, and ESC+ should be
        # used now.
        (r'\ej', r'u\C-m'),
        # Lolcat keybindings. This would be nice to make part of the
        # configuration, but lolcat is an rbenv-managed gem, and that makes it
        # hard to do.
        (r'\C-xl', r' |& lolcat\C-m'),
        (r'\C-xL', r' |& lolcat --force |& less -R\C-m'),
        (r'\C-xa', r' |& lolcat --animate\C-m'),
    ])

    # Include base profile nodes.
    self.add_shell_profile_node(self.path.find_resource([
        'shell', 'profile-base.sh']))

    # Prepare path variables.
    for var in self.env.PATH_VARS:
        # The backslashes make it a little more readable in the file
        # (at the cost of being readable here).
        self.env.SHELL_ENV[var] = '\\\n{}\n'.format((os.pathsep + '\\\n').join(
            map(shquote, self.env[var])))

    # This file comes first in the rc list. We don't want Bash or Zsh scripts
    # to load our entire configuration just to run. That would make them very
    # slow.
    exit_if_nonint_node = self.path.find_resource(
        ['shell', 'exit-if-noninteractive.sh'])
    self.add_shell_rc_node(exit_if_nonint_node)

    # Include zpython for zsh, if available.
    if self.env.ZPYTHON_MODULE_PATH:
        in_node = self.path.find_resource(['shell', 'zpython.zsh.in'])
        out_node = self.path.find_or_declare('zpython.zsh')
        self.env.ZSH_RC_NODES.append(out_node)
        self(features='subst',
             target=out_node,
             source=in_node,
             ZPYTHON_MODULE_PATH=shquote(self.env.ZPYTHON_MODULE_PATH))

    if not self.env.POWERLINE:
        # No powerline; enable basic prompt.

        # Include the prompt file for Bash.
        self.env.BASH_RC_NODES.append(self.path.find_resource([
            'shell', 'prompt.bash']))

        # Turn on our Oh My Zsh theme for Zsh.
        zsh_theme = 'bira-simple'
    else:
        # No Oh My Zsh theme needed if we are using Powerline.
        zsh_theme = ''

    # Include default rc nodes.
    self.env.BASH_RC_NODES.append(
        self.path.find_resource(['shell', 'rc-base.bash']))

    if self.env.ZSH:
        in_node = self.path.find_resource(['shell', 'rc-base.zsh.in'])
        out_node = self.path.find_or_declare('rc-base.zsh')
        self.env.ZSH_RC_NODES.append(out_node)
        self(features='subst',
             target=out_node,
             source=in_node,
             ZSH_THEME=shquote(zsh_theme))
示例#23
0
 def _make_zsh_keys(tsk):
     with open(tsk.outputs[0].abspath(), 'w') as out_file:
         for key, binding in tsk.env.SHELL_KEYBINDINGS.items():
             print(
                 'bindkey -s {} {}'.format(shquote(key), shquote(binding)),
                 file=out_file)
示例#24
0
文件: common.py 项目: kernt/ploy
def shjoin(args):
    return ' '.join(shquote(x) for x in args)
示例#25
0
def padlock_unlock(encrypted_path):
    """
    Unlock the padlock (this is: mount the directory).

    Returns True if the padlock originally was locked, otherwise False.
    """
    # Location of GPG-encrypted keyfile to use
    keyfile = os.path.join(encrypted_path, ENCFS_KEYFILE)
    configfile = os.path.join(encrypted_path, ENCFS_CONFIGFILE)
    crypted_configfile = configfile+'.asc'

    # Location of an alternative executable that will mount the decrypted dir
    unlock_cmd = os.path.join(encrypted_path, PADLOCK_CMD)

    # Cut the EncFS directory prefix to get the decrypted directory name
    decrypted_path = ''.join(encrypted_path.rsplit(ENCFS_PREFIX, 1))

    if os.path.exists(keyfile) and os.path.exists(crypted_configfile):
        print("Mounting '{}' using encfs..".format(encrypted_path))
        unlock_cmd = None
    elif os.access(unlock_cmd, os.X_OK):
        print(
            "Mounting '{}' using '{}'..".format(encrypted_path, unlock_cmd))
    else:
        return False

    # Check if encrypted directory is already mounted
    # NB: This only tests if encfs_decrypted as a mount-point at all,
    # no matter if it is fuse.encfs or not.
    if os.path.ismount(decrypted_path):
        # if it is already mounted, do nothing
        return False

    # Make sure the mount directory exists
    if not os.path.isdir(decrypted_path):
        os.makedirs(decrypted_path)

    if unlock_cmd is not None:
        subprocess.check_output([unlock_cmd, encrypted_path, decrypted_path],
                                stderr=subprocess.STDOUT)
        return True

    # Make sure the named pipe for the configfile exists
    if not os.path.exists(configfile):
        os.mkfifo(configfile)
    elif not stat.S_ISFIFO(os.stat(configfile).st_mode):
        raise IOError(17, configfile+' exists but is not a fifo')

    # Start encfs. It will wait for input on the `configfile` named
    # pipe.
    encfs = subprocess.Popen([
        ENCFS, encrypted_path, decrypted_path,
        '--extpass',
        GPG + ' --decrypt --no-mdc-warning --output - %s' % shquote(keyfile)])
    # now decrypt the config and write it into the named pipe
    with open(configfile, 'w') as fh:
        # NB: gpg must write to stdout to avoid it is asking whether
        # the file should be overwritten
        subprocess.Popen([GPG,
                          '--decrypt', '--no-mdc-warning', '--output', '-',
                          crypted_configfile], stdout=fh).wait()
    encfs.wait()
    os.remove(configfile)
    return True
示例#26
0
def build(ctx):
    for shell in ctx.env.AVAILABLE_SHELLS:
        # Build files that load rbenv and pyenv
        for tool in ctx.env.RBENV_TOOLS:
            tool_up = tool.upper()
            path = ctx.env[tool_up]
            if path:
                out_nodes = []
                for filetype in ['profile', 'rc']:
                    out_node = ctx.path.find_or_declare(
                        '{}-{}.{}'.format(tool, filetype, shell))
                    out_nodes.append(out_node)
                    ctx.add_shell_node(out_node, filetype, shell)
                ctx(rule=_make_tool_files, target=out_nodes,
                    vars=[tool_up])

        if ctx.env.PYENV_VIRTUALENV:
            # If pyenv-virtualenv is installed, generate a file for it, too.
            out_nodes = []
            for filetype in ['profile', 'rc']:
                out_node = ctx.path.find_or_declare(
                    'pyenv-virtualenv-{}.{}'.format(filetype, shell))
                out_nodes.append(out_node)
                ctx.add_shell_node(out_node, filetype, shell)

            ctx(rule=_make_pyenv_virtualenv_files, target=out_nodes,
                vars=['PYENV', 'PYENV_VIRTUALENV'])

    # Install requirements file for the default Python.
    requirements_default_py_node = ctx.path.find_resource([
        'dotfiles', 'pyenv', 'pyenv.d', 'requirements-default-python.txt'])
    ctx.install_dotfile(requirements_default_py_node)

    if ctx.env.PYENV_VIRTUALENV:
        # Disable prompt changing
        # See here: https://github.com/yyuu/pyenv-virtualenv/issues/135
        ctx.env.SHELL_ENV['PYENV_VIRTUALENV_DISABLE_PROMPT'] = '1'

        # Generate a default virtualenv requirements file.
        requirements_base = 'requirements-default-virtualenv.txt'
        requirements_node = ctx.path.find_or_declare([
            'dotfiles', 'pyenv', 'pyenv.d', requirements_base])
        @ctx.rule(target=requirements_node,
                  vars=['PYENV_VIRTUALENV_DEFAULT_PACKAGES'])
        def _make_default_venv_reqs_file(tsk):
            with open(tsk.outputs[0].abspath(), 'w') as out_file:
                for requirement in ctx.env.PYENV_VIRTUALENV_DEFAULT_PACKAGES:
                    print(requirement, file=out_file)

        pyenv_build_node = ctx.bldnode.find_dir(['dotfiles', 'pyenv'])
        requirements_install_path = join(
            ctx.env.PYENV_ROOT,
            requirements_node.path_from(pyenv_build_node))
        # Also generate the hook that installs default packages.
        # See here for the location of hooks:
        # https://github.com/yyuu/pyenv/wiki/Authoring-plugins
        hook_in_node = ctx.path.find_resource([
            'dotfiles', 'pyenv', 'pyenv.d',
            'virtualenv', 'install-default-packages.bash.in',
        ])
        hook_out_node = hook_in_node.change_ext(ext='.bash', ext_in='.bash.in')
        ctx(features='subst',
            target=hook_out_node,
            source=hook_in_node,
            REQUIREMENTS_PATH=shquote(requirements_install_path))

        # We could probably just use ctx.install_dotfile() here. But we'll
        # be careful and use the root that pyenv reports.
        ctx.install_as(
            join(ctx.env.PYENV_ROOT,
                 hook_out_node.path_from(pyenv_build_node)),
            hook_out_node)
        ctx.install_as(
            join(ctx.env.PYENV_ROOT,
                 requirements_node.path_from(pyenv_build_node)),
            requirements_node)

        # Install shortcut scripts.
        ctx.install_subst_script('venv', PYENV=shquote(ctx.env.PYENV[0]))
        ctx.install_subst_script(
            'pyup',
            PYTHON=ctx.env.DEFAULT_PYTHON,
            PYENV=repr(ctx.env.PYENV[0]),
            REQUIREMENTS_DEFAULT_PYTHON_PATH=repr(ctx.dotfile_install_path(
                requirements_default_py_node)),
            REQUIREMENTS_DEFAULT_VENV_PATH=repr(requirements_install_path))
示例#27
0
    def write(self, job: CommandLineJob):
        """Write the script file

        Arguments:
            ws {Workspace} -- The workspace
            connector {Connector} -- [description]
            path {Path} -- [description]
            job {CommandLineJob} -- [description]

        Returns:
            [type] -- [description]
        """
        assert isinstance(job, CommandLineJob)
        assert self.command is not None
        assert job.workspace

        directory = job.jobpath
        connector = job.launcher.connector
        directorypath = connector.resolve(directory)
        ws = job.workspace
        context = ShCommandContext(ws, job.launcher.connector, directory,
                                   job.name, job.config)

        relpath = lambda path: shquote(context.relpath(path))

        scriptpath = job.jobpath / ("%s.sh" % job.name)
        pidpath = relpath(job.pidpath)

        logger.info("Writing script %s", scriptpath)
        with scriptpath.open("wt") as out:
            out.write("#!{}\n".format(self.shpath))
            out.write("# Experimaestro generated task\n")

            # --- Checks locks right away

            # change directory
            out.write(f"""cd {shquote(directorypath)}\n""")

            # Lock all the needed files
            FIRST_FD = 9
            for i, path in enumerate(self.lockfiles):
                out.write(SH_FLOCK.format(i + FIRST_FD, relpath(path)))

            # Use pipefail for fine grained analysis of errors in commands
            out.write("set -o pipefail\n\n")

            for name, value in job.launcher.environ.items():
                out.write("""export {}={}\n""".format(name, shquote(value)))

            # Write some command
            if self.preprocessCommands:
                self.preprocessCommands.output(context, out)

            # --- CLEANUP

            out.write("cleanup() {\n")

            # Write something
            out.write(" echo Cleaning up 1>&2\n")

            # Remove traps
            out.write(" trap - 0\n")

            # Remove PID file
            out.write("""rm %s\n""" % pidpath)

            # Remove temporary files

            def cleanup(c: CommandPart):
                namedRedirections = context.getNamedRedirections(c, False)
                for file in namedRedirections.redirections():
                    out.write(" rm -f {}\n".format(relpath(file)))

            self.command.forEach(cleanup)

            # Notify if possible
            if self.notificationURL:
                out.write(
                    " wget --tries=1 --connect-timeout=1 --read-timeout=1 --quiet -O "
                )
                out.write('/dev/null "$XPM_NOTIFICATION_URL?status=eoj"\n')

            # Kills remaining processes
            out.write(' test ! -z "$PID" && pkill -KILL -P $PID')
            out.write("\n")

            out.write("}\n")

            # --- END CLEANUP

            out.write("# Set trap to cleanup when exiting")
            out.write("\n")
            out.write("trap cleanup 0")
            out.write("\n")

            out.write("\n")

            out.write("""checkerror()  { 
    local e; for e in \"$@\"; do [[ \"$e\" != 0 ]] && [[ "$e" != 141 ]] && exit $e; done; 
    return 0; }\n\n""")

            # Output the full command
            out.write("(\n")
            self.command.output(context, out)
            out.write(") & \n")

            # Retrieve PID
            out.write("PID=$!\n")
            out.write("wait $PID\n")
            out.write("code=$?\n")
            out.write("if test $code -ne 0; then\n")
            out.write(" echo $code > {}\n".format(relpath(job.failedpath)))
            out.write(" exit $code\n")
            out.write("fi\n")
            out.write("\n")
            out.write("touch {}\n".format(relpath(job.donepath)))

        # Set the file as executable
        connector.setExecutable(scriptpath, True)
        return scriptpath
示例#28
0
def shjoin(args):
    return ' '.join(shquote(x) for x in args)