Пример #1
0
    def on_click_dispatcher(self, module_name, event, command):
        """
        Dispatch on_click config parameters to either:
            - Our own methods for special py3status commands (listed below)
            - The i3-msg program which is part of i3wm
        """
        if command is None:
            return
        elif command == 'refresh_all':
            self.py3_wrapper.refresh_modules()
        elif command == 'refresh':
            self.py3_wrapper.refresh_modules(module_name)
        else:
            # In commands we are able to use substitutions for the text output
            # of a module
            if '$OUTPUT' in command or '$OUTPUT_PART' in command:
                full_text, partial_text = self.get_module_text(
                    module_name, event)
                command = command.replace('$OUTPUT_PART',
                                          shell_quote(partial_text))
                command = command.replace('$OUTPUT', shell_quote(full_text))

            # this is a i3 message
            self.i3_msg(module_name, command)
            # to make the bar more responsive to users we ask for a refresh
            # of the module or of i3status if the module is an i3status one
            self.py3_wrapper.refresh_modules(module_name)
Пример #2
0
    def on_click_dispatcher(self, module_name, event, command):
        """
        Dispatch on_click config parameters to either:
            - Our own methods for special py3status commands (listed below)
            - The i3-msg program which is part of i3wm
        """
        if command is None:
            return
        elif command == 'refresh_all':
            self.py3_wrapper.refresh_modules()
        elif command == 'refresh':
            self.py3_wrapper.refresh_modules(module_name)
        else:
            # In commands we are able to use substitutions for the text output
            # of a module
            if '$OUTPUT' in command or '$OUTPUT_PART' in command:
                full_text, partial_text = self.get_module_text(module_name,
                                                               event)
                command = command.replace('$OUTPUT_PART',
                                          shell_quote(partial_text))
                command = command.replace('$OUTPUT', shell_quote(full_text))

            # this is a i3 message
            self.i3_msg(module_name, command)
            # to make the bar more responsive to users we ask for a refresh
            # of the module or of i3status if the module is an i3status one
            self.py3_wrapper.refresh_modules(module_name)
Пример #3
0
    def upload(self, release_id):
        from fabric.contrib import files
        import posixpath

        upload_storage_path = self._abs_path(self.upload_storage_path)
        self._ensure_path_exists(upload_storage_path)
        upload_release_path = self._get_upload_release_path(release_id)
        self._ensure_path_exists(upload_release_path)

        upload_storage_current_link = self._path_join(upload_storage_path, self.upload_storage_current_link)

        for local_path, remote_path in self.upload_paths:
            if files.exists(remote_path) and not files.is_link(remote_path):
                raise RuntimeError('Remote path already exists, but is no symlink (%s)' % remote_path)

            remote_upload_path = self._get_remote_upload_path(release_id, remote_path)

            if self._exists(upload_storage_current_link) and not self._exists(remote_upload_path):
                remote_upload_pathname = self._get_remote_upload_pathname(release_id, remote_path)
                active_remote_upload_path = self._path_join(upload_storage_current_link, remote_upload_pathname)
                cp_active_remote_upload_path = active_remote_upload_path.rstrip(posixpath.sep) + posixpath.sep
                cp_remote_upload_path = remote_upload_path.rstrip(posixpath.sep) + posixpath.sep
                self._run('{cp_bin} {cp_args} {from_path} {to_path}'.format(
                    cp_bin=self.cp_bin(),
                    cp_args=self.cp_args,
                    from_path=shell_quote(cp_active_remote_upload_path),
                    to_path=shell_quote(cp_remote_upload_path),
                ))

            self._rsync_upload(local_path, remote_upload_path)
Пример #4
0
    def run_test_cmd(self, cmd, log):
        cmd = ". bin/activate {0}; {1}".format(shell_quote(self.env_dir), cmd)
        cmd = "bash -c {0}".format(shell_quote(cmd))

        self.print("$ cd cache/env; " + cmd, level=1)

        subprocess.call(cmd, stdout=log, stderr=subprocess.STDOUT, shell=True,
                        cwd=self.env_dir)
Пример #5
0
    def run_test_cmd(self, cmd, log):
        cmd = ". bin/activate {0}; {1}".format(shell_quote(self.env_dir), cmd)
        cmd = "bash -c {0}".format(shell_quote(cmd))

        self.print("$ cd cache/env; " + cmd, level=1)

        subprocess.call(cmd, stdout=log, stderr=log, shell=True,
                        cwd=self.env_dir)
Пример #6
0
 def handle(self, event, container_id):
     cmd="%s %s %s %s" % (
         self.resolved_script,
         shell_quote(self._ini_file),
         shell_quote(event),
         shell_quote(container_id),
     )
     logger.debug('running %r', cmd)
     status=os.system(cmd)
     logger.debug('script of %r returned %r', self._base_name, status)
Пример #7
0
def convert_epub_to_html(input_path_storage=None, logger=None):
    """Convert epub to html.

    Parameters
    ----------
    input_path_storage : None, optional
        Path to where .epub files are stored.
    logger : LogSystem
        The logger.
    """
    input_path = file_utils.expand_path(input_path_storage) if input_path_storage else \
        os.path.join(PATHS["convertions"], "epub_to_html")

    for dirname, dirnames, filenames in os.walk(input_path, topdown=False):
        for filename in filenames:
            if filename.endswith(".epub"):
                f_path = os.path.join(dirname, filename)
                f_name = os.path.basename(f_path)
                dst_name = os.path.splitext(f_name)[0]
                dst_path = os.path.join(os.path.dirname(f_path), dst_name)
                dst_file = os.path.join(dst_path, "index.html")

                if file_utils.is_real_file(dst_file):
                    continue

                logger.info(shell_utils.get_cli_separator("-"), date=False)
                logger.info("**Converting:**")
                logger.info(f_path, date=False)

                os.makedirs(dst_path, mode=0o777, exist_ok=True)

                cmd = [
                    "pandoc", "--standalone",
                    shell_quote(f_path), "--output",
                    shell_quote(dst_file),
                    "--css=/assets/css/bootstrap.min.css",
                    "--css=/assets/css/bootstrap.tweaks.css",
                    "--extract-media=assets",
                    "--template=%s" %
                    shell_quote(PATHS["pandoc_html_template"]), "--wrap=none",
                    "--no-highlight", "--table-of-contents", "--to=html5"
                ]

                try:
                    logger.info(" ".join(cmd), date=False)

                    cmd_utils.run_cmd(" ".join(cmd),
                                      stdout=None,
                                      stderr=None,
                                      cwd=dst_path,
                                      shell=True,
                                      check=True)
                except CalledProcessError as err:
                    logger.error(err)
Пример #8
0
    def run(self,
            args,
            stdin_string=None,
            env_extend=None,
            binary_output=False):
        # Allow overriding default settings. If a piece of code really wants to
        # set own PATH or CIB_file, we must allow it. I.e. it wants to run
        # a pacemaker tool on a CIB in a file but cannot afford the risk of
        # changing the CIB in the file specified by the user.
        env_vars = self._env_vars.copy()
        env_vars.update(dict(env_extend) if env_extend else dict())

        log_args = " ".join([shell_quote(x) for x in args])
        self._logger.debug(
            "Running: {args}\nEnvironment:{env_vars}{stdin_string}".format(
                args=log_args,
                stdin_string=("" if not stdin_string else (
                    "\n--Debug Input Start--\n{0}\n--Debug Input End--".format(
                        stdin_string))),
                env_vars=("" if not env_vars else ("\n" + "\n".join([
                    "  {0}={1}".format(key, val)
                    for key, val in sorted(env_vars.items())
                ])))))
        self._reporter.process(
            reports.run_external_process_started(log_args, stdin_string,
                                                 env_vars))

        try:
            process = subprocess.Popen(
                args,
                # Some commands react differently if they get anything via stdin
                stdin=(subprocess.PIPE
                       if stdin_string is not None else subprocess.DEVNULL),
                stdout=subprocess.PIPE,
                stderr=subprocess.PIPE,
                preexec_fn=(
                    lambda: signal.signal(signal.SIGPIPE, signal.SIG_DFL)),
                close_fds=True,
                shell=False,
                env=env_vars,
                # decodes newlines and in python3 also converts bytes to str
                universal_newlines=(not binary_output))
            out_std, out_err = process.communicate(stdin_string)
            retval = process.returncode
        except OSError as e:
            raise LibraryError(
                reports.run_external_process_error(log_args, e.strerror))

        self._logger.debug(
            ("Finished running: {args}\nReturn value: {retval}" +
             "\n--Debug Stdout Start--\n{out_std}\n--Debug Stdout End--" +
             "\n--Debug Stderr Start--\n{out_err}\n--Debug Stderr End--"
             ).format(args=log_args,
                      retval=retval,
                      out_std=out_std,
                      out_err=out_err))
        self._reporter.process(
            reports.run_external_process_finished(log_args, retval, out_std,
                                                  out_err))
        return out_std, out_err, retval
Пример #9
0
 def as_shell(self):
     cmd = ' '.join(shell_quote(i) for i in self.args)
     if self.pre_pipe:
         cmd = '{} | {}'.format(self.pre_pipe, cmd)
     if self.post_pipe:
         cmd = '{} | {}'.format(cmd, self.post_pipe)
     return cmd
Пример #10
0
    def gen_command_line_args(self, parser_arguments):
        """Generate a gen.py argument list to be embedded in a Ninja file."""
        result = []
        for args, kwargs in self._arguments:
            if len(args) == 2:
                long_option = args[1]
            else:
                long_option = args[0]
            dest = kwargs.get('dest', None)
            if dest is None:
                assert long_option.startswith('--')
                dest = long_option[2:].replace('-', '_')

            if getattr(parser_arguments, dest, None) is None:
                # This was not set on the command-line so skip it.
                continue

            action = kwargs.get('action', None)
            if action == 'store_true':
                if getattr(parser_arguments, dest):
                    result.append(long_option)
            elif action == 'store' or action is None:
                result.append('%s=%s' %
                              (long_option, getattr(parser_arguments, dest)))
            elif action == 'append':
                for item in getattr(parser_arguments, dest):
                    result.append('%s=%s' % (long_option, item))
            else:
                assert action is None, "Unsupported action " + action
        return ' '.join(shell_quote(item) for item in result)
Пример #11
0
 def switch_release(self, release_id):
     git_bin = self.git_bin()
     for local_path, remote_path in self.upload_paths:
         with self._cd(remote_path):
             self._run('{git_bin} checkout {sha}'.format(
                 git_bin=git_bin,
                 sha=shell_quote(self._release_tag(release_id)),
             ))
Пример #12
0
 def switch_release(self, release_id):
     git_bin = self.git_bin()
     for local_path, remote_path in self.upload_paths:
         with self._cd(remote_path):
             self._run('{git_bin} checkout {sha}'.format(
                 git_bin=git_bin,
                 sha=shell_quote(self._release_tag(release_id)),
             ))
Пример #13
0
def value_str(s):
	if isinstance(s, bool):
		return 'true' if s else 'false'
	if isinstance(s, int):
		return str(s)
	if not s:
		return s
	return shell_quote(str(s))
Пример #14
0
    def upload(self, release_id, commit_message=None):
        import datetime

        warnings.warn(
            'The GitUploader has no way to follow the normal upload schema where changes to the files are '
            'only done when calling switch_release. This means the upload already applies the changes and '
            'then reverts back to the original state. THERE MIGHT BE DRAGONS!')

        if commit_message is None:
            commit_message = datetime.datetime.now().isoformat()

        # TODO: Make sure no changes are done to the file system while uploading, which is bad design ;-)
        for local_path, remote_path in self.upload_paths:
            self._ensure_path_exists(remote_path)
            git_bin = self.git_bin()

            with self._cd(remote_path):
                current_release_sha = None
                if not self._exists(self._path_join(remote_path, '.git')):
                    self._run('{git_bin} init .'.format(git_bin=git_bin))
                else:
                    current_release_sha = self._run(
                        '{git_bin} rev-parse HEAD'.format(git_bin=git_bin))
                self._run('{git_bin} checkout master'.format(git_bin=git_bin))
                self._rsync_upload(local_path, remote_path, exclude=('.git', ))
                self._run('{git_bin} add -A'.format(git_bin=git_bin))
                self._run(
                    '{git_bin} ls-files --deleted -z | xargs -r -0 git rm'.
                    format(git_bin=git_bin))
                with fab.settings(warn_only=True):
                    self._run(
                        '{git_bin} commit --allow-empty -m {commit_message}'.
                        format(
                            git_bin=git_bin,
                            commit_message=shell_quote(commit_message),
                        ))
                self._run('{git_bin} tag {release_id}'.format(
                    git_bin=git_bin,
                    release_id=shell_quote(self._release_tag(release_id)),
                ))
                if current_release_sha:  # rollback to previous release
                    self._run('{git_bin} checkout {sha}'.format(
                        git_bin=git_bin,
                        sha=shell_quote(current_release_sha),
                    ))
Пример #15
0
 def get_compilation_commands(self,
                              source_filenames, executable_filename,
                              for_evaluation=True):
     """See Language.get_compilation_commands."""
     compile_command = ["/usr/bin/javac"] + source_filenames
     # We need to let the shell expand *.class as javac create
     # a class file for each inner class.
     if JavaJDK.USE_JAR:
         jar_command = ["/bin/sh", "-c",
                        " ".join(["jar", "cf",
                                  shell_quote(executable_filename),
                                  "*.class"])]
         return [compile_command, jar_command]
     else:
         zip_command = ["/bin/sh", "-c",
                        " ".join(["zip", "-r", "-", "*.class", ">",
                                  shell_quote(executable_filename)])]
         return [compile_command, zip_command]
Пример #16
0
def parse_cflags(raw_cflags):
    raw_args = shlex.split(raw_cflags.strip())
    args, unknown = _cflag_parser.parse_known_args(raw_args)
    config = {k: v or [] for k, v in args.__dict__.items()}
    for i, x in enumerate(config['define_macros']):
        parts = x.split('=', 1)
        value = x[1] or None if len(x) == 2 else None
        config['define_macros'][i] = (parts[0], value)
    return config, ' '.join(shell_quote(x) for x in unknown)
Пример #17
0
 def remote_notify(self, msg):
     """Display a message remotely."""
     try:
         notify_cmd = vim.vars['skuld_notify_cmd']
     except KeyError:
         notify_cmd = None
     if notify_cmd is not None and len(notify_cmd) > 0:
         os.system(notify_cmd + ' ' + shell_quote(msg))
     else:
         remote_cmd = ("<c-\\><c-n>"
                       + ":echohl WarningMsg | echo ''{0}''"
                       + " | echohl None "
                       + " | call foreground() "
                       + " | SkuldBufUpdate<cr>").format(msg)
         vim_cmd = "call remote_send('{0}', '{1}')".format(
             self._vim_server_name, remote_cmd)
         sys_cmd = "gvim --cmd {0} --cmd qa".format(shell_quote(vim_cmd))
         os.system(sys_cmd)
Пример #18
0
def parse_cflags(raw_cflags):
    raw_args = shlex.split(raw_cflags.strip())
    args, unknown = _cflag_parser.parse_known_args(raw_args)
    config = {k: v or [] for k, v in args.__dict__.items()}
    for i, x in enumerate(config['define_macros']):
        parts = x.split('=', 1)
        value = x[1] or None if len(x) == 2 else None
        config['define_macros'][i] = (parts[0], value)
    return config, ' '.join(shell_quote(x) for x in unknown)
Пример #19
0
 def get_compilation_commands(self,
                              source_filenames, executable_filename,
                              for_evaluation=True):
     """See Language.get_compilation_commands."""
     compile_command = ["/usr/bin/javac"] + source_filenames
     # We need to let the shell expand *.class as javac create
     # a class file for each inner class.
     if JavaJDK.USE_JAR:
         jar_command = ["/bin/sh", "-c",
                        " ".join(["jar", "cf",
                                  shell_quote(executable_filename),
                                  "*.class"])]
         return [compile_command, jar_command]
     else:
         zip_command = ["/bin/sh", "-c",
                        " ".join(["zip", "-r", "-", "*.class", ">",
                                  shell_quote(executable_filename)])]
         return [compile_command, zip_command]
Пример #20
0
def _test_fuzz(workdir, target, dumb=False):
    require_commands('py-afl-fuzz', 'afl-fuzz')
    input_dir = workdir + '/in'
    output_dir = workdir + '/out'
    os.mkdir(input_dir)
    os.mkdir(output_dir)
    with open(input_dir + '/in', 'w') as file:
        file.write('0')
    crash_dir = output_dir + '/crashes'
    queue_dir = output_dir + '/queue'
    have_crash = False
    have_paths = False
    n_paths = 0
    with open('/dev/null', 'wb') as devnull:
        with open(workdir + '/stdout', 'wb') as stdout:
            cmdline = [
                'py-afl-fuzz', '-i', input_dir, '-o', output_dir, '--',
                sys.executable, target, token
            ]
            if dumb:
                cmdline[1:1] = ['-n']
            print('$ ' + ' '.join(shell_quote(arg) for arg in cmdline))
            afl = ipc.Popen(
                cmdline,
                stdout=stdout,
                stdin=devnull,
                preexec_fn=clean_environ,
            )
    try:
        timeout = 10
        while timeout > 0:
            if afl.poll() is not None:
                break
            have_crash = len(glob.glob(crash_dir + '/id:*')) >= 1
            n_paths = len(glob.glob(queue_dir + '/id:*'))
            have_paths = (n_paths == 1) if dumb else (n_paths >= 2)
            if have_crash and have_paths:
                break
            timeout -= sleep(0.1)
        if afl.returncode is None:
            afl.terminate()
            afl.wait()
    except:
        afl.kill()
        raise
    with open(workdir + '/stdout', 'rb') as file:
        stdout = file.read()
        if str is not bytes:
            stdout = stdout.decode('ASCII', 'replace')
        print(stdout)
    if not have_crash and '/proc/sys/kernel/core_pattern' in stdout:
        check_core_pattern()
    assert_true(have_crash, "target program didn't crash")
    assert_true(have_paths,
                'target program produced {n} distinct paths'.format(n=n_paths))
Пример #21
0
    async def _shell_job(self, event, task_id):
        maskname = event.maskname.split("|", 1)[0]
        if hasattr(event, "src_pathname"):
            src_pathname = event.src_pathname
        else:
            src_pathname = ""

        cmd = self._cmd.replace("{maskname}", shell_quote(maskname)).replace(
            "{pathname}",
            shell_quote(event.pathname)).replace("{src_pathname}",
                                                 shell_quote(src_pathname))

        logger = SchedulerLogger(self._log, {"event": event, "id": task_id})

        logger.info(f"execute shell command, cmd={cmd}")
        try:
            proc = await asyncio.create_subprocess_shell(cmd, loop=self._loop)
            await proc.communicate()
        except Exception as e:
            logger.error(e)
Пример #22
0
    def upload(self, release_id, commit_message=None):
        import datetime

        warnings.warn(
            'The GitUploader has no way to follow the normal upload schema where changes to the files are '
            'only done when calling switch_release. This means the upload already applies the changes and '
            'then reverts back to the original state. THERE MIGHT BE DRAGONS!'
        )

        if commit_message is None:
            commit_message = datetime.datetime.now().isoformat()

        # TODO: Make sure no changes are done to the file system while uploading, which is bad design ;-)
        for local_path, remote_path in self.upload_paths:
            self._ensure_path_exists(remote_path)
            git_bin = self.git_bin()

            with self._cd(remote_path):
                current_release_sha = None
                if not self._exists(self._path_join(remote_path, '.git')):
                    self._run('{git_bin} init .'.format(git_bin=git_bin))
                else:
                    current_release_sha = self._run('{git_bin} rev-parse HEAD'.format(git_bin=git_bin))
                self._run('{git_bin} checkout master'.format(git_bin=git_bin))
                self._rsync_upload(local_path, remote_path, exclude=('.git',))
                self._run('{git_bin} add -A'.format(git_bin=git_bin))
                self._run('{git_bin} ls-files --deleted -z | xargs -r -0 git rm'.format(git_bin=git_bin))
                with fab.settings(warn_only=True):
                    self._run('{git_bin} commit --allow-empty -m {commit_message}'.format(
                        git_bin=git_bin,
                        commit_message=shell_quote(commit_message),
                    ))
                self._run('{git_bin} tag {release_id}'.format(
                    git_bin=git_bin,
                    release_id=shell_quote(self._release_tag(release_id)),
                ))
                if current_release_sha:  # rollback to previous release
                    self._run('{git_bin} checkout {sha}'.format(
                        git_bin=git_bin,
                        sha=shell_quote(current_release_sha),
                    ))
Пример #23
0
def read_shell_config(config_path=None, whitelisted_keys=[]):
    commands = []
    if config_path is not None:
        commands.append(". %s" % shell_quote(config_path))
    commands.append("cat /proc/self/environ")
    for key in whitelisted_keys:
        commands.append(
            "test -z \"${KEY+x}\" || printf '%s\\0' \"KEY=$KEY\"".replace(
                "KEY", key))
    env = _read_nullsep_output(["sh", "-c", "; ".join(commands)])
    for key, value in env.items():
        yield key, value
Пример #24
0
    def switch_release(self, release_id):
        upload_storage_path = self._abs_path(self.upload_storage_path)
        upload_storage_current_link = self._path_join(upload_storage_path, self.upload_storage_current_link)

        # make sure all symlinks are working
        for local_path, remote_path in self.upload_paths:
            remote_upload_pathname = self._get_remote_upload_pathname(release_id, remote_path)
            if self._exists(remote_path):
                self._run('{rm_bin} {path}'.format(
                    rm_bin=self.rm_bin(),
                    path=shell_quote(remote_path),
                ))
            self._run('{ln_bin} {ln_args} {current_link} {upload_path}'.format(
                ln_bin=self.ln_bin(),
                ln_args=self.ln_args,
                current_link=shell_quote(self._path_join(upload_storage_current_link, remote_upload_pathname)),
                upload_path=shell_quote(remote_path),
            ))

        # switch to current release
        if self._exists(upload_storage_current_link):
            self._run('{rm_bin} {path}'.format(
                rm_bin=self.rm_bin(),
                path=shell_quote(upload_storage_current_link),
            ))
        self._run('{ln_bin} {ln_args} {release_path} {current_link}'.format(
            ln_bin=self.ln_bin(),
            ln_args=self.ln_args,
            release_path=shell_quote(self._get_upload_release_pathname(release_id)),
            current_link=shell_quote(upload_storage_current_link),
        ))
Пример #25
0
 def zfs_command(self, *args, pre_pipe=None, post_pipe=None):
     assert args[0] == 'zfs', 'Only supported zfs arg for now'
     remote_args = ('sudo', 'zfs') + tuple(args[1:])
     # Make the call into a single argument: "'zfs' 'list' '...'"
     remote_arg = ' '.join(shell_quote(i) for i in remote_args)
     # Put pre_pipe and post_pipe inside the (remote) shell command.
     if pre_pipe:
         remote_arg = '{} | {}'.format(pre_pipe, remote_arg)
     if post_pipe:
         remote_arg = '{} | {}'.format(remote_arg, post_pipe)
     # Prepend remote args: ('ssh', 'user@host', "'zfs' 'list' '...'")
     args = self._run_remote_args + (remote_arg,)
     return ZfsCommand(args, pre_pipe=None, post_pipe=None)
Пример #26
0
    def upload(self, release_id):
        from fabric.contrib import files
        import posixpath

        upload_storage_path = self._abs_path(self.upload_storage_path)
        self._ensure_path_exists(upload_storage_path)
        upload_release_path = self._get_upload_release_path(release_id)
        self._ensure_path_exists(upload_release_path)

        upload_storage_current_link = self._path_join(
            upload_storage_path, self.upload_storage_current_link)

        for local_path, remote_path in self.upload_paths:
            if files.exists(remote_path) and not files.is_link(remote_path):
                raise RuntimeError(
                    'Remote path already exists, but is no symlink (%s)' %
                    remote_path)

            remote_upload_path = self._get_remote_upload_path(
                release_id, remote_path)

            if self._exists(upload_storage_current_link
                            ) and not self._exists(remote_upload_path):
                remote_upload_pathname = self._get_remote_upload_pathname(
                    release_id, remote_path)
                active_remote_upload_path = self._path_join(
                    upload_storage_current_link, remote_upload_pathname)
                cp_active_remote_upload_path = active_remote_upload_path.rstrip(
                    posixpath.sep) + posixpath.sep
                cp_remote_upload_path = remote_upload_path.rstrip(
                    posixpath.sep) + posixpath.sep
                self._run('{cp_bin} {cp_args} {from_path} {to_path}'.format(
                    cp_bin=self.cp_bin(),
                    cp_args=self.cp_args,
                    from_path=shell_quote(cp_active_remote_upload_path),
                    to_path=shell_quote(cp_remote_upload_path),
                ))

            self._rsync_upload(local_path, remote_upload_path)
Пример #27
0
def _test_fuzz(workdir, target, dumb=False):
    require_commands('py-afl-fuzz', 'afl-fuzz')
    input_dir = workdir + '/in'
    output_dir = workdir + '/out'
    os.mkdir(input_dir)
    os.mkdir(output_dir)
    with open(input_dir + '/in', 'w') as file:
        file.write('0')
    crash_dir = output_dir + '/crashes'
    queue_dir = output_dir + '/queue'
    have_crash = False
    have_paths = False
    n_paths = 0
    with open('/dev/null', 'wb') as devnull:
        with open(workdir + '/stdout', 'wb') as stdout:
            cmdline = ['py-afl-fuzz', '-i', input_dir, '-o', output_dir, '--', sys.executable, target, token]
            if dumb:
                cmdline[1:1] = ['-n']
            print('$ ' + ' '.join(shell_quote(arg) for arg in cmdline))
            afl = ipc.Popen(
                cmdline,
                stdout=stdout,
                stdin=devnull,
                preexec_fn=clean_environ,
            )
    try:
        timeout = 10
        while timeout > 0:
            if afl.poll() is not None:
                break
            have_crash = len(glob.glob(crash_dir + '/id:*')) >= 1
            n_paths = len(glob.glob(queue_dir + '/id:*'))
            have_paths = (n_paths == 1) if dumb else (n_paths >= 2)
            if have_crash and have_paths:
                break
            timeout -= sleep(0.1)
        if afl.returncode is None:
            afl.terminate()
            afl.wait()
    except:
        afl.kill()
        raise
    with open(workdir + '/stdout', 'rb') as file:
        stdout = file.read()
        if str is not bytes:
            stdout = stdout.decode('ASCII', 'replace')
        print(stdout)
    if not have_crash and '/proc/sys/kernel/core_pattern' in stdout:
        check_core_pattern()
    assert_true(have_crash, "target program didn't crash")
    assert_true(have_paths, 'target program produced {n} distinct paths'.format(n=n_paths))
Пример #28
0
    def test_repo_accessible_through_proxy(self):
        PORT = '4000'

        proxy = shell_quote(self.PROXY_PATH)
        key = shell_quote(self.SSH_KEY)

        self.env['PROXY_CMD'] = '{} {} -i {} --no-host-key-check relay %p %h %r'.format(PYTHON, proxy, key)
        self.env['GIT_SSH'] = os.path.join(self.ROOT_DIR, 'tests', 'git_ssh_proxy.sh')
        self.env['PYTHONPATH'] = self.ROOT_DIR
        self.env['PORT'] = PORT

        server = None
        try:
            # run the server
            server_cmd = os.path.join(self.ROOT_DIR, 'bin', 'simple-ssh-server.py')
            server = subprocess.Popen([PYTHON, server_cmd, PORT], env=self.env)
            # wait a second
            time.sleep(1)
            self.assertIsNone( server.poll() )

            self.check_git_clone(self.env)
        finally:
            if server:
                server.kill()
Пример #29
0
def argsjoin(cmd):
    """
    Return cmd-tuple as a quoted string, safe to pass to a shell.
    """
    def is_safe(arg):
        return all(i in ('ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz'
                         '09123456789_=-+.,/~') for i in arg)

    args = []
    for arg in cmd:
        if is_safe(arg):
            args.append(arg)
        else:
            args.append(shell_quote(arg))
    return ' '.join(args)
Пример #30
0
    def shell(self, cmd, *args):
        """Call to os.system in 'safe mode'
           Replaced by subprocess.call so we can ensure use of BASH. Using DASH breaks everything.
        """
        full_cmd = "set -euo pipefail ; "
        if args:
            full_cmd += cmd.format(*[shell_quote(a) for a in args])
        else:
            # Assume that any curlies are bash expansions
            full_cmd += cmd
        status = subprocess.call(full_cmd, shell=True, executable="/bin/bash")
        if status:
            raise ChildProcessError("Exit status was %s running command:\n%s" %
                                    (status, cmd))

        return status
Пример #31
0
def main() -> None:
    """Main entry point for the simple compiler test harness."""

    if 'SC' in environ:
        cmd = "{} --sc {}".format(argv[0], shell_quote(environ['SC']))
        warn("The SC environment variable is deprecated. Use: {}".format(cmd),
             DeprecationWarning)

        argv[1:1] = ['--sc', environ['SC']]

    args = _get_args()

    test_runner = TextTestRunner(verbosity=args.verbosity)
    test_suite = TestSuite([p(**args.__dict__) for p in args.phases])

    test_runner.run(test_suite)
Пример #32
0
def init_docker_host(context, docker):

    # install ovs ; do in base image Dockerfile

    # start ovs server
    cmd = "/usr/local/share/openvswitch/scripts/ovs-ctl start --system-id=random"
    call_in_docker(docker, cmd)

    # copy wheel file
    vlcp_wheel = "*.whl"

    if "vlcp" in context.config.userdata:
        vlcp_wheel = context.config.userdata["vlcp"]

    cmd = "docker cp %s %s:/opt" % (vlcp_wheel, docker)
    subprocess.check_call(cmd, shell=True)

    # install vlcp
    cmd = "/opt/pip install --upgrade /opt/%s" % vlcp_wheel
    c = "docker exec %s bash -c %s" % (docker, shell_quote(cmd))
    subprocess.check_output(c, shell=True)

    if "coverage" in context.config.userdata:
        cmd = "docker cp %s %s:/opt" % ("coverage.conf", docker)
        subprocess.check_call(cmd, shell=True)

        cmd = "sed -i 's~/opt/python~/opt/coverage run --rcfile=/opt/coverage.conf~g' %s" % "supervisord.conf"
        subprocess.check_call(cmd, shell=True)
    else:
        cmd = "sed -i 's~/opt/coverage run --rcfile=/opt/coverage.conf~/opt/python~g' %s" % "supervisord.conf"
        subprocess.check_call(cmd, shell=True)

    # copy supervisor conf to host
    cmd = "docker cp %s %s:/etc" % ("supervisord.conf",docker)
    subprocess.check_call(cmd, shell=True)

    # start supervisord
    cmd = "supervisord -c /etc/supervisord.conf"
    call_in_docker(docker, cmd)

    # add ovs bridge br0
    cmd = "ovs-vsctl add-br br0"
    call_in_docker(docker, cmd)

    # set br0 controller to 127.0.0.1
    cmd = "ovs-vsctl set-controller br0 tcp:127.0.0.1"
    call_in_docker(docker, cmd)
Пример #33
0
    def lines():
        line = ''
        for a in (shell_quote(a) for a in args):
            # If adding this argument will make the line too long,
            # yield the current line, and start a new one.
            if len(line) + len(a) + 1 > maxwidth:
                yield line
                line = ''

            # Append this argument to the current line, separating
            # it by a space from the existing arguments.
            if line:
                line += ' ' + a
            else:
                line = a

        yield line
Пример #34
0
    def lines():
        line = ''
        for a in (shell_quote(a) for a in args):
            # If adding this argument will make the line too long,
            # yield the current line, and start a new one.
            if len(line) + len(a) + 1 > maxwidth:
                yield line
                line = ''

            # Append this argument to the current line, separating
            # it by a space from the existing arguments.
            if line:
                line += ' ' + a
            else:
                line = a

        yield line
Пример #35
0
def execute_subprocess(args, log=None, failure=None, **kwargs):
    """Run `args`, handling logging and failures.

    :param args: Program argument array.
    :param log: An optional logger.
    :param failure: Raise `failure` as an exception if the command returns a
        nonzero value.  If omitted, nonzero return values are ignored.
    :param kwargs: Other keyword arguments passed on to `subprocess.call`.
    """
    if log is not None:
        log.debug("Executing: %s", " ".join(shell_quote(arg) for arg in args))
    retval = subprocess.call(args, **kwargs)
    if retval != 0:
        if log is not None:
            log.debug("Command returned %d.", retval)
        if failure is not None:
            if log is not None:
                log.debug("Command failed: %s", failure)
            raise failure
def shell(prog, *args, **kwargs):
    """Runs a shell command.

    Positional arguments are used as command arguments.
    Keyword arguments are used as flags.

    :return: Output of the command (stdout)
    """
    cmd_parts = [prog]
    cmd_parts.extend(map(shell_quote, args))
    for flag, value in kwargs.iteritems():
        prefix = '-' if len(flag) == 1 else '--'
        cmd_parts.append((prefix + flag.replace('_', '-')))
        if value and value is not True:
            cmd_parts.append(shell_quote(value))

    cmd = ' '.join(cmd_parts)
    process = subprocess.Popen(cmd, shell=True, stdout=subprocess.PIPE)
    out, _ = process.communicate()
    return out
    def prettyformat_shell_command(self, executable, args=None, kwargs=None, _cwd=None):
        output = [executable]
        if args:
            output.extend([shell_quote(arg) for arg in args])
        if kwargs:
            for key, value in kwargs.items():
                if value:
                    if len(key) == 1:
                        prefix = '-'
                    else:
                        prefix = '--'
                    if value is True:
                        formatted_value = ''
                    else:
                        formatted_value = '={}'.format(value)
                    output.append('{prefix}{key}{formatted_value}'.format(
                        prefix=prefix,
                        key=key,
                        formatted_value=formatted_value
                    ))
        if _cwd:
            output.insert(0, '[CWD={}]'.format(_cwd))

        return ' '.join(output)
Пример #38
0
    def switch_release(self, release_id):
        upload_storage_path = self._abs_path(self.upload_storage_path)
        upload_storage_current_link = self._path_join(
            upload_storage_path, self.upload_storage_current_link)

        # make sure all symlinks are working
        for local_path, remote_path in self.upload_paths:
            remote_upload_pathname = self._get_remote_upload_pathname(
                release_id, remote_path)
            if self._exists(remote_path):
                self._run('{rm_bin} {path}'.format(
                    rm_bin=self.rm_bin(),
                    path=shell_quote(remote_path),
                ))
            self._run('{ln_bin} {ln_args} {current_link} {upload_path}'.format(
                ln_bin=self.ln_bin(),
                ln_args=self.ln_args,
                current_link=shell_quote(
                    self._path_join(upload_storage_current_link,
                                    remote_upload_pathname)),
                upload_path=shell_quote(remote_path),
            ))

        # switch to current release
        if self._exists(upload_storage_current_link):
            self._run('{rm_bin} {path}'.format(
                rm_bin=self.rm_bin(),
                path=shell_quote(upload_storage_current_link),
            ))
        self._run('{ln_bin} {ln_args} {release_path} {current_link}'.format(
            ln_bin=self.ln_bin(),
            ln_args=self.ln_args,
            release_path=shell_quote(
                self._get_upload_release_pathname(release_id)),
            current_link=shell_quote(upload_storage_current_link),
        ))
Пример #39
0
def WriteGenericNinja(path,
                      static_libraries,
                      executables,
                      cxx,
                      ar,
                      ld,
                      platform,
                      host,
                      options,
                      cflags=[],
                      ldflags=[],
                      libflags=[],
                      include_dirs=[],
                      solibs=[]):
    args = ' -d' if options.debug else ''
    for link_lib in options.link_libs:
        args += ' --link-lib=' + shell_quote(link_lib)

    ninja_header_lines = [
        'cxx = ' + cxx,
        'ar = ' + ar,
        'ld = ' + ld,
        '',
        'rule regen',
        '  command = %s ../build/gen.py%s' % (sys.executable, args),
        '  description = Regenerating ninja files',
        '',
        'build build.ninja: regen',
        '  generator = 1',
        '  depfile = build.ninja.d',
        '',
    ]

    template_filename = os.path.join(
        SCRIPT_DIR, {
            'msvc': 'build_win.ninja.template',
            'mingw': 'build_linux.ninja.template',
            'msys': 'build_linux.ninja.template',
            'darwin': 'build_mac.ninja.template',
            'linux': 'build_linux.ninja.template',
            'freebsd': 'build_linux.ninja.template',
            'aix': 'build_aix.ninja.template',
            'openbsd': 'build_openbsd.ninja.template',
            'haiku': 'build_haiku.ninja.template',
            'solaris': 'build_linux.ninja.template',
            'netbsd': 'build_linux.ninja.template',
        }[platform.platform()])

    with open(template_filename) as f:
        ninja_template = f.read()

    if platform.is_windows():
        executable_ext = '.exe'
        library_ext = '.lib'
        object_ext = '.obj'
    else:
        executable_ext = ''
        library_ext = '.a'
        object_ext = '.o'

    def escape_path_ninja(path):
        return path.replace('$ ', '$$ ').replace(' ', '$ ').replace(':', '$:')

    def src_to_obj(path):
        return escape_path_ninja('%s' % os.path.splitext(path)[0] + object_ext)

    def library_to_a(library):
        return '%s%s' % (library, library_ext)

    ninja_lines = []

    def build_source(src_file, settings):
        ninja_lines.extend([
            'build %s: cxx %s' %
            (src_to_obj(src_file),
             escape_path_ninja(
                 os.path.relpath(os.path.join(REPO_ROOT, src_file),
                                 os.path.dirname(path)))),
            '  includes = %s' % ' '.join([
                '-I' + escape_path_ninja(dirname) for dirname in include_dirs
            ]),
            '  cflags = %s' % ' '.join(cflags),
        ])

    for library, settings in static_libraries.items():
        for src_file in settings['sources']:
            build_source(src_file, settings)

        ninja_lines.append(
            'build %s: alink_thin %s' % (library_to_a(library), ' '.join(
                [src_to_obj(src_file) for src_file in settings['sources']])))
        ninja_lines.append('  libflags = %s' % ' '.join(libflags))

    for executable, settings in executables.items():
        for src_file in settings['sources']:
            build_source(src_file, settings)

        ninja_lines.extend([
            'build %s%s: link %s | %s' % (executable, executable_ext, ' '.join(
                [src_to_obj(src_file)
                 for src_file in settings['sources']]), ' '.join(
                     [library_to_a(library) for library in settings['libs']])),
            '  ldflags = %s' % ' '.join(ldflags),
            '  solibs = %s' % ' '.join(solibs),
            '  libs = %s' %
            ' '.join([library_to_a(library) for library in settings['libs']]),
        ])

    ninja_lines.append('')  # Make sure the file ends with a newline.

    with open(path, 'w') as f:
        f.write('\n'.join(ninja_header_lines))
        f.write(ninja_template)
        f.write('\n'.join(ninja_lines))

    with open(path + '.d', 'w') as f:
        f.write('build.ninja: ' + os.path.relpath(
            os.path.join(SCRIPT_DIR, 'gen.py'), os.path.dirname(path)) + ' ' +
                os.path.relpath(template_filename, os.path.dirname(path)) +
                '\n')
Пример #40
0
    def input(self, **kwargs):

        encoding = self.default_encoding
        options = dict(self.options)

        if self.infile is None and "{infile}" in self.command:
            # create temporary input file if needed
            if self.filename is None:
                self.infile = NamedTemporaryFile(mode='wb')
                self.infile.write(self.content.encode(encoding))
                self.infile.flush()
                options["infile"] = self.infile.name
            else:
                # we use source file directly, which may be encoded using
                # something different than utf8. If that's the case file will
                # be included with charset="something" html attribute and
                # charset will be available as filter's charset attribute
                encoding = self.charset  # or self.default_encoding
                self.infile = open(self.filename)
                options["infile"] = self.filename

        if "{outfile}" in self.command and "outfile" not in options:
            # create temporary output file if needed
            ext = self.type and ".%s" % self.type or ""
            self.outfile = NamedTemporaryFile(mode='r+', suffix=ext)
            options["outfile"] = self.outfile.name

        # Quote infile and outfile for spaces etc.
        if "infile" in options:
            options["infile"] = shell_quote(options["infile"])
        if "outfile" in options:
            options["outfile"] = shell_quote(options["outfile"])

        try:
            command = self.command.format(**options)
            proc = subprocess.Popen(
                command, shell=True, cwd=self.cwd, stdout=self.stdout,
                stdin=self.stdin, stderr=self.stderr)
            if self.infile is None:
                # if infile is None then send content to process' stdin
                filtered, err = proc.communicate(
                    self.content.encode(encoding))
            else:
                filtered, err = proc.communicate()
            filtered, err = filtered.decode(encoding), err.decode(encoding)
        except (IOError, OSError) as e:
            raise FilterError('Unable to apply %s (%r): %s' %
                              (self.__class__.__name__, self.command, e))
        else:
            if proc.wait() != 0:
                # command failed, raise FilterError exception
                if not err:
                    err = ('Unable to apply %s (%s)' %
                           (self.__class__.__name__, self.command))
                    if filtered:
                        err += '\n%s' % filtered
                raise FilterError(err)

            if self.verbose:
                self.logger.debug(err)

            outfile_path = options.get('outfile')
            if outfile_path:
                with io.open(outfile_path, 'r', encoding=encoding) as file:
                    filtered = file.read()
        finally:
            if self.infile is not None:
                self.infile.close()
            if self.outfile is not None:
                self.outfile.close()
        return smart_text(filtered)
Пример #41
0
def cmd_systemd(args):
    conf = load_conf(args)

    template = textwrap.dedent("""\
        [Unit]
        Description=Fishnet instance
        After=network.target

        [Service]
        User={user}
        Group={group}
        WorkingDirectory={cwd}
        Environment=PATH={path}
        ExecStart={start}
        KillSignal=SIGINT
        Restart=always

        [Install]
        WantedBy=multi-user.target""")

    config_file = os.path.abspath(args.conf or DEFAULT_CONFIG)

    # Prepare command line arguments
    builder = [shell_quote(sys.executable), shell_quote(os.path.abspath(sys.argv[0]))]

    if not args.no_conf:
        builder.append("--conf")
        builder.append(shell_quote(os.path.abspath(args.conf or DEFAULT_CONFIG)))
    else:
        builder.append("--no-conf")
        if args.key is not None:
            builder.append("--key")
            builder.append(shell_quote(validate_key(args.key, conf)))
        if args.engine_dir is not None:
            builder.append("--engine-dir")
            builder.append(shell_quote(validate_engine_dir(args.engine_dir)))
        if args.engine_command is not None:
            builder.append("--engine-command")
            builder.append(shell_quote(validate_engine_command(args.engine_command, conf)))
        if args.cores is not None:
            builder.append("--cores")
            builder.append(shell_quote(str(validate_cores(args.cores))))
        if args.memory is not None:
            builder.append("--memory")
            builder.append(shell_quote(str(validate_memory(args.memory, conf))))
        if args.threads is not None:
            builder.append("--threads")
            builder.append(shell_quote(str(validate_threads(args.threads, conf))))
        if args.endpoint is not None:
            builder.append("--endpoint")
            builder.append(shell_quote(validate_endpoint(args.endpoint)))
        if args.fixed_backoff:
            builder.append("--fixed-backoff")
    builder.append("run")

    start = " ".join(builder)

    # Virtualenv support
    if hasattr(sys, "real_prefix"):
        start = "while [ true ]; do %s; ret=$?; if [ $ret -eq 70 ]; then pip download fishnet || sleep 10; pip install --upgrade fishnet || sleep 10; else exit $ret; fi; sleep 5; done" % start
        shell_cmd = "source %s; %s" % (shell_quote(os.path.abspath(os.path.join(sys.prefix, "bin", "activate"))), start)
        start = "/bin/bash -c %s" % shell_quote(shell_cmd)

    print(template.format(
        user=getpass.getuser(),
        group=getpass.getuser(),
        cwd=os.path.abspath("."),
        path=shell_quote(os.environ.get("PATH", "")),
        start=start
    ))

    print(file=sys.stderr)

    if os.geteuid() == 0:
        print("# WARNING: Running as root is not recommended!", file=sys.stderr)
        print(file=sys.stderr)

    if not hasattr(sys, "real_prefix"):
        print("# WARNING: Using a virtualenv (to enable auto update) is recommended!", file=sys.stderr)
        print(file=sys.stderr)

    print("# Example usage:", file=sys.stderr)
    print("# python -m fishnet systemd | sudo tee /etc/systemd/system/fishnet.service", file=sys.stderr)
    print("# sudo systemctl enable fishnet.service", file=sys.stderr)
    print("# sudo systemctl start fishnet.service", file=sys.stderr)
Пример #42
0
    def install_pyenv(self, versions=None, skip_pkg=False):
        """
        We will install pyenv by default. and with normal use lib

        Because pyenv is awesome!!!

        :param root:
        :param skip_pkg: skip install depends package, default is False

        :return:
        """
        remote_os = get_platform()

        if remote_os == 'centos':
            if not skip_pkg:
                run('yum install -y git')
                run("yum install -y gcc gcc-c++ make autoconf certbot"
                    " libffi-devel ncurses-devel expat-devel"
                    " zlib-devel zlib libzip-devel"
                    " bzip2 bzip2-devel bzip2-libs"
                    " mariadb-devel mariadb-libs"
                    " sqlite-devel"
                    " libxml2 libxml2-devel libxslt libxslt-devel"
                    " libcurl-devel"
                    " pcre-devel pcre"
                    " libmcrypt libmcrypt-devel openssl-devel openssl-libs"
                    " libjpeg libjpeg-devel libpng libpng-devel"
                    " freetype freetype-devel "
                    " libtiff-devel lcms2-devel libwebp-devel"
                    " tcl-devel tk-devel"
                    )
            run('export PYENV_ROOT=/usr/local/var/pyenv && '
                'curl -L http://nextoa.com/'
                'bin/pyenv-install.sh  | bash')
            run('ln -sfv /usr/local/var/pyenv/bin/pyenv /usr/local/bin/pyenv')

            pass
        elif remote_os == 'mac':
            with settings(warn_only=True):
                run('brew install git pyenv')
            pass

        run('test -d /usr/local/var/pyenv/plugins/pyenv-virtualenv ||'
            ' git clone https://github.com/yyuu/pyenv-virtualenv.git'
            ' /usr/local/var/pyenv/plugins/pyenv-virtualenv')

        run('export PYENV_ROOT="/usr/local/var/pyenv/" &&'
            ' eval "$(pyenv init -)"')

        if isinstance(versions, list):
            for v in versions:
                run('export PYENV_ROOT="/usr/local/var/pyenv/" &&'
                    ' pyenv install -s %s' % v)
        elif type(versions).__name__ in ['str', 'unicode']:
            run('export PYENV_ROOT="/usr/local/var/pyenv/" &&'
                ' pyenv install -s %s' % versions)
        else:
            run('export PYENV_ROOT="/usr/local/var/pyenv/" && '
                'pyenv install -s %s' % self.system_python_version)

        command_list = [
            """grep "PYENV_ROOT" /etc/profile || \
            echo 'export PYENV_ROOT="/usr/local/var/pyenv/"' \
            >> /etc/profile""",
            """grep "pyenv init" /etc/profile || \
            echo 'eval "$(pyenv init -)"' >> \
            /etc/profile""",
            """grep "pyenv virtualenv" /etc/profile || \
            echo 'eval "$(pyenv virtualenv-init -)"' \
            >> /etc/profile""",
        ]

        if remote_os == 'mac':
            command_list = ["sudo sh -c '%s'" % shell_quote(v) for v in
                            command_list]
            pass

        for cmd in command_list:
            run(cmd)
            pass

        pass
Пример #43
0
    def run(
        self, args, stdin_string=None, env_extend=None, binary_output=False
    ):
        # Allow overriding default settings. If a piece of code really wants to
        # set own PATH or CIB_file, we must allow it. I.e. it wants to run
        # a pacemaker tool on a CIB in a file but cannot afford the risk of
        # changing the CIB in the file specified by the user.
        env_vars = self._env_vars.copy()
        env_vars.update(
            dict(env_extend) if env_extend else dict()
        )

        log_args = " ".join([shell_quote(x) for x in args])
        self._logger.debug(
            "Running: {args}\nEnvironment:{env_vars}{stdin_string}".format(
                args=log_args,
                stdin_string=("" if not stdin_string else (
                    "\n--Debug Input Start--\n{0}\n--Debug Input End--"
                    .format(stdin_string)
                )),
                env_vars=("" if not env_vars else (
                    "\n" + "\n".join([
                        "  {0}={1}".format(key, val)
                        for key, val in sorted(env_vars.items())
                    ])
                ))
            )
        )
        self._reporter.process(
            reports.run_external_process_started(
                log_args, stdin_string, env_vars
            )
        )

        try:
            # pylint: disable=subprocess-popen-preexec-fn
            # this is OK as pcs is only single-threaded application
            process = subprocess.Popen(
                args,
                # Some commands react differently if they get anything via stdin
                stdin=(
                    subprocess.PIPE
                    if stdin_string is not None
                    else subprocess.DEVNULL
                ),
                stdout=subprocess.PIPE,
                stderr=subprocess.PIPE,
                preexec_fn=(
                    lambda: signal.signal(signal.SIGPIPE, signal.SIG_DFL)
                ),
                close_fds=True,
                shell=False,
                env=env_vars,
                # decodes newlines and in python3 also converts bytes to str
                universal_newlines=(not binary_output)
            )
            out_std, out_err = process.communicate(stdin_string)
            retval = process.returncode
        except OSError as e:
            raise LibraryError(
                reports.run_external_process_error(log_args, e.strerror)
            )

        self._logger.debug(
            (
                "Finished running: {args}\nReturn value: {retval}"
                + "\n--Debug Stdout Start--\n{out_std}\n--Debug Stdout End--"
                + "\n--Debug Stderr Start--\n{out_err}\n--Debug Stderr End--"
            ).format(
                args=log_args,
                retval=retval,
                out_std=out_std,
                out_err=out_err
            )
        )
        self._reporter.process(reports.run_external_process_finished(
            log_args, retval, out_std, out_err
        ))
        return out_std, out_err, retval
Пример #44
0
def shell_join(command):
    return ' '.join([shell_quote(arg) for arg in command])
Пример #45
0
def quote(data):
    return shell_quote(data)
Пример #46
0
def convert_rst_to_html_pandoc(input_path_storage=None,
                               include_bootstrap_css=True,
                               include_bootstrap_js=False,
                               include_highlight_js=False,
                               logger=None):
    """Convert Rst to HTML with Pandoc.

    Parameters
    ----------
    input_path_storage : None, optional
        Path to where .rst files are stored.
    include_bootstrap_css : bool, optional
        Whether to include the Bootstrap CSS stylesheet.
    include_bootstrap_js : bool, optional
        Whether to include the Bootstrap JS script.
    include_highlight_js : bool, optional
        Whether to include the highlight.js JS script.
    logger : LogSystem
        The logger.
    """
    input_path = file_utils.expand_path(input_path_storage) if input_path_storage else \
        os.path.join(PATHS["convertions"], "rst_to_html")

    for dirname, dirnames, filenames in os.walk(input_path, topdown=False):
        for filename in filenames:
            if filename.endswith(".rst") or filename.endswith(".txt"):
                f_path = os.path.join(dirname, filename)
                f_name = os.path.basename(f_path)
                dst_name = os.path.splitext(f_name)[0]
                dst_path = os.path.join(os.path.dirname(f_path),
                                        dst_name + ".html")

                if file_utils.is_real_file(dst_path):
                    continue

                logger.info(shell_utils.get_cli_separator("-"), date=False)
                logger.info("**Converting:**")
                logger.info(f_path, date=False)

                cmd = [
                    "pandoc",
                    shell_quote(f_path), "--output",
                    shell_quote(dst_path),
                    "--template=%s" %
                    shell_quote(PATHS["pandoc_html_template"]), "--wrap=none",
                    "--no-highlight", "--variable=include-bootstrap-css",
                    "--variable=include-bootstrap-js",
                    "--variable=include-highlight-js", "--from=rst",
                    "--to=html5"
                ]

                try:
                    logger.info(" ".join(cmd), date=False)

                    cmd_utils.run_cmd(" ".join(cmd),
                                      stdout=None,
                                      stderr=None,
                                      cwd=os.path.dirname(dst_path),
                                      shell=True,
                                      check=True)
                except CalledProcessError as err:
                    logger.error(err)
Пример #47
0
    def _restart_services(self):
        """Check if the services that are supposed to run on this
        machine are actually running. If not, start them.

        """
        # To avoid zombies, we poll the process we launched. Anyway we
        # use the information from psutil to see if the process we are
        # interested in are alive (since if the user has already
        # launched another instance, we don't want to duplicate
        # services).
        new_launched_processes = set([])
        for process in self._launched_processes:
            if process.poll() is None:
                new_launched_processes.add(process)
        self._launched_processes = new_launched_processes

        # Look for dead processes, and restart them.
        matcher = ProcessMatcher()
        for service in self._local_services:
            # We let the user start logservice and resourceservice.
            if service.name == "LogService" or \
                    service.name == "ResourceService" or \
                    (self.contest_id is None and
                     service.name == "ProxyService"):
                continue

            # If the user specified not to restart some service, we
            # ignore it.
            if not self._will_restart[service]:
                continue

            # If we don't have a previously found process, or the one
            # we have terminated, we find the process.
            proc = self._procs[service]
            if proc is None or not proc.is_running():
                proc = matcher.find(service, self._services_prev_cpu_times)
                self._procs[service] = proc
            # If we still do not find it, there is no process, and we
            # have nothing to do.
            if proc is None or not proc.is_running():
                # We give contest_id even if the service doesn't need
                # it, since it causes no trouble.
                logger.info("Restarting (%s, %s)...", service.name,
                            service.shard)
                command = "cms%s" % service.name
                if not config.installed:
                    command = os.path.join(".", "scripts",
                                           "cms%s" % service.name)
                args = [command, "%d" % service.shard]
                if self.contest_id is not None:
                    args += ["-c", str(self.contest_id)]
                else:
                    args += ["-c", "ALL"]
                try:
                    process = subprocess.Popen(args,
                                               stdout=DEVNULL,
                                               stderr=subprocess.STDOUT)
                except Exception:
                    logger.error("Error for command line %s",
                                 shell_quote(" ".join(args)))
                    raise
                self._launched_processes.add(process)

        # Run forever.
        return True
Пример #48
0
"""

import unittest
import os
import shlex
import subprocess
import shutil
import time
import sys

try:
    from shlex import quote as shell_quote
except ImportError:
    from pipes import quote as shell_quote

PYTHON = shell_quote(sys.executable) # make sure to run with same python

class BitBucketTest(unittest.TestCase):

    ROOT_DIR = os.path.abspath(os.path.join(__file__, '..', '..'))
    REPO_DIR = os.path.join(ROOT_DIR, 'tests', 'test-git-repo')
    SSH_KEY = os.path.join(ROOT_DIR, 'tests', 'ssh-forward-proxy-test-key')
    PROXY_PATH = os.path.join(ROOT_DIR, 'bin', 'ssh-forward-proxy.py')

    BITBUCKET = 'bitbucket.org'
    REPO_URL = '[email protected]:lincheney/ssh-forward-proxy-test.git'

    GIT_PATH = os.path.join(REPO_DIR, '.git')
    README_PATH = os.path.join(REPO_DIR, 'README')
    README_TEXT = 'This is a test for ssh-forward-proxy.\n'
Пример #49
0
    'handlers': {
        'mail_admins': {
            'level': 'ERROR',
            'filters': ['require_debug_false'],
            'class': 'django.utils.log.AdminEmailHandler'
        }
    },
    'loggers': {
        'django.request': {
            'handlers': ['mail_admins'],
            'level': 'ERROR',
            'propagate': True,
        },
    }
}

NODE_ROOT = join(PROJECT_DIR, '..', 'node_modules')
HANDLEBARS_PATH = join(NODE_ROOT, 'django-ember-precompile', 'bin', 'django-ember-precompile')

# Escape characters in the file path (like spaces)
# Prevents django-compress manipulation errors 
HANDLEBARS_PATH = shell_quote(HANDLEBARS_PATH)

COMPRESS_PRECOMPILERS = (
    ('text/x-handlebars', '{} {{infile}}'.format(HANDLEBARS_PATH)),
)

REST_FRAMEWORK = {
    'FILTER_BACKEND': 'rest_framework.filters.DjangoFilterBackend'
}
Пример #50
0
    def input(self, **kwargs):
        encoding = self.default_encoding
        options = dict(self.options)

        if self.infile is None and "{infile}" in self.command:
            # create temporary input file if needed
            if self.filename is None:
                self.infile = NamedTemporaryFile(mode='wb')
                self.infile.write(self.content.encode(encoding))
                self.infile.flush()
                options["infile"] = self.infile.name
            else:
                # we use source file directly, which may be encoded using
                # something different than utf8. If that's the case file will
                # be included with charset="something" html attribute and
                # charset will be available as filter's charset attribute
                encoding = self.charset  # or self.default_encoding
                self.infile = open(self.filename)
                options["infile"] = self.filename

        if "{outfile}" in self.command and "outfile" not in options:
            # create temporary output file if needed
            ext = self.type and ".%s" % self.type or ""
            self.outfile = NamedTemporaryFile(mode='r+', suffix=ext)
            options["outfile"] = self.outfile.name

        # Quote infile and outfile for spaces etc.
        if "infile" in options:
            options["infile"] = shell_quote(options["infile"])
        if "outfile" in options:
            options["outfile"] = shell_quote(options["outfile"])

        try:
            command = self.command.format(**options)
            proc = subprocess.Popen(command,
                                    shell=True,
                                    cwd=self.cwd,
                                    stdout=self.stdout,
                                    stdin=self.stdin,
                                    stderr=self.stderr)
            if self.infile is None:
                # if infile is None then send content to process' stdin
                filtered, err = proc.communicate(self.content.encode(encoding))
            else:
                filtered, err = proc.communicate()
            filtered, err = filtered.decode(encoding), err.decode(encoding)
        except (IOError, OSError) as e:
            raise FilterError('Unable to apply %s (%r): %s' %
                              (self.__class__.__name__, self.command, e))
        else:
            if proc.wait() != 0:
                # command failed, raise FilterError exception
                if not err:
                    err = ('Unable to apply %s (%s)' %
                           (self.__class__.__name__, self.command))
                    if filtered:
                        err += '\n%s' % filtered
                raise FilterError(err)

            if self.verbose:
                self.logger.debug(err)

            outfile_path = options.get('outfile')
            if outfile_path:
                with io.open(outfile_path, 'r', encoding=encoding) as file:
                    filtered = file.read()
        finally:
            if self.infile is not None:
                self.infile.close()
            if self.outfile is not None:
                self.outfile.close()

        return smart_text(filtered)
Пример #51
0
    def _restart_services(self):
        """Check if the services that are supposed to run on this
        machine are actually running. If not, start them.

        """
        # To avoid zombies, we poll the process we launched. Anyway we
        # use the information from psutil to see if the process we are
        # interested in are alive (since if the user has already
        # launched another instance, we don't want to duplicate
        # services).
        new_launched_processes = set([])
        for process in self._launched_processes:
            if process.poll() is None:
                new_launched_processes.add(process)
        self._launched_processes = new_launched_processes

        # Look for dead processes, and restart them.
        matcher = ProcessMatcher()
        for service in self._local_services:
            # We let the user start logservice and resourceservice.
            if service.name == "LogService" or \
                    service.name == "ResourceService" or \
                    (self.contest_id is None and
                     service.name == "ProxyService"):
                continue

            # If the user specified not to restart some service, we
            # ignore it.
            if not self._will_restart[service]:
                continue

            # If we don't have a previously found process, or the one
            # we have terminated, we find the process.
            proc = self._procs[service]
            if proc is None or not proc.is_running():
                proc = matcher.find(service, self._services_prev_cpu_times)
                self._procs[service] = proc
            # If we still do not find it, there is no process, and we
            # have nothing to do.
            if proc is None or not proc.is_running():
                # We give contest_id even if the service doesn't need
                # it, since it causes no trouble.
                logger.info("Restarting (%s, %s)...",
                            service.name, service.shard)
                command = os.path.join(BIN_PATH, "cms%s" % service.name)
                if not config.installed:
                    command = os.path.join(
                        ".",
                        "scripts",
                        "cms%s" % service.name)
                args = [command, "%d" % service.shard]
                if self.contest_id is not None:
                    args += ["-c", str(self.contest_id)]
                else:
                    args += ["-c", "ALL"]
                try:
                    process = subprocess.Popen(args,
                                               stdout=subprocess.DEVNULL,
                                               stderr=subprocess.STDOUT)
                except Exception:
                    logger.error("Error for command line %s",
                                 shell_quote(" ".join(args)))
                    raise
                self._launched_processes.add(process)

        # Run forever.
        return True
Пример #52
0
    def run(
        self,
        args: Sequence[str],
        stdin_string: Optional[str] = None,
        env_extend: Optional[Mapping[str, str]] = None,
        binary_output: bool = False,
    ) -> Tuple[str, str, int]:
        # Allow overriding default settings. If a piece of code really wants to
        # set own PATH or CIB_file, we must allow it. I.e. it wants to run
        # a pacemaker tool on a CIB in a file but cannot afford the risk of
        # changing the CIB in the file specified by the user.
        env_vars = dict(self._env_vars)
        env_vars.update(dict(env_extend) if env_extend else {})

        log_args = " ".join([shell_quote(x) for x in args])
        self._logger.debug(
            "Running: {args}\nEnvironment:{env_vars}{stdin_string}".format(
                args=log_args,
                stdin_string=("" if not stdin_string else (
                    "\n--Debug Input Start--\n{0}\n--Debug Input End--"
                ).format(stdin_string)),
                env_vars=("" if not env_vars else ("\n" + "\n".join([
                    "  {0}={1}".format(key, val)
                    for key, val in sorted(env_vars.items())
                ]))),
            ))
        self._reporter.report(
            ReportItem.debug(
                reports.messages.RunExternalProcessStarted(
                    log_args,
                    stdin_string,
                    env_vars,
                )))

        try:
            # pylint: disable=subprocess-popen-preexec-fn, consider-using-with
            # this is OK as pcs is only single-threaded application
            process = subprocess.Popen(
                args,
                # Some commands react differently if they get anything via stdin
                stdin=(subprocess.PIPE
                       if stdin_string is not None else subprocess.DEVNULL),
                stdout=subprocess.PIPE,
                stderr=subprocess.PIPE,
                preexec_fn=(
                    lambda: signal.signal(signal.SIGPIPE, signal.SIG_DFL)),
                close_fds=True,
                shell=False,
                env=env_vars,
                # decodes newlines and in python3 also converts bytes to str
                universal_newlines=(not binary_output),
            )
            out_std, out_err = process.communicate(stdin_string)
            retval = process.returncode
        except OSError as e:
            raise LibraryError(
                ReportItem.error(
                    reports.messages.RunExternalProcessError(
                        log_args,
                        e.strerror,
                    ))) from e

        self._logger.debug(
            ("Finished running: {args}\nReturn value: {retval}" +
             "\n--Debug Stdout Start--\n{out_std}\n--Debug Stdout End--" +
             "\n--Debug Stderr Start--\n{out_err}\n--Debug Stderr End--"
             ).format(args=log_args,
                      retval=retval,
                      out_std=out_std,
                      out_err=out_err))
        self._reporter.report(
            ReportItem.debug(
                reports.messages.RunExternalProcessFinished(
                    log_args,
                    retval,
                    out_std,
                    out_err,
                )))
        return out_std, out_err, retval