Esempio n. 1
0
def installer(verbose=False):
    """
    An installer based on Vundle
    :param verbose: If this is True it prints out a bunch of debugging stuff
    :return:
    """
    #print('Identifying Plugins...')
    root = dirname(realpath(__file__))
    plugin_dir = join(root, 'Plugins')
    if not exists(plugin_dir):  # Check if Plugin Directory Exists
        mkdir(plugin_dir)  # Make it if not
    current_plugins = listdir(plugin_dir)  # List plugins
    with open(join(root, 'config', 'plugins.flora')) as f:
        plugins = f.readlines()  # Get a list of installed plugins
    use_pull = []  # Plugins to use Pull Command
    plugins_new = []  # Plugins to use Clone command
    for x in plugins:  # Split them into plugins_new and use_pull
        if x.startswith('Plugin'):
            x = x[6:].strip().strip('"\'')  # Removes the "Plugin"
            if verbose:
                print('----------------')
                print(current_plugins)
                print(x)
                print('----------------')
            any = yield from any2hard(current_plugins, x)
            if any:  # Check if plugin is already installed
                use_pull += [x.rsplit(sep='\\', maxsplit=1)[-1]]  # if its installed, it goes here
            else:
                plugins_new += [x]  # otherwise here
    plugins = plugins_new
    del plugins_new
    del f
    if len(plugins) > 0:
        print('Installing New Plugins')
        for plugin in plugins:
            print('Installing', split(plugin)[-1])
            cmd = 'git clone https://github.com/{}.git'.format(plugin)
            if verbose:
                print('Executing', cmd)
            create = asyncio.create_subprocess_shell(cmd, stdout=asyncio.subprocess.PIPE, cwd =plugin_dir)
            proc = yield from create
            yield from proc.wait()
            print('Installed', split(plugin)[-1])
            if verbose:
                print(proc.stdout.read())
    if len(use_pull) > 0:
        print('Updating old plugins')
        for plugin in use_pull:
            plugin=split(plugin)[-1]
            print('Updating', plugin)
            cmd = 'git pull'
            create = asyncio.create_subprocess_shell(cmd, cwd = join(plugin_dir, plugin))
            proc = yield from create
            yield from proc.wait()
            if verbose:
                print(proc.stdout.read())
Esempio n. 2
0
async def execCmd(cmdDict):
    futures = []
    for item in cmdDict:
        cmd = cmdDict[item]["cmd"]
        futures.append(
            getCmdResults(
                item,
                create_subprocess_shell(
                    cmd,
                    stdout=subprocess.PIPE,
                    stderr=subprocess.STDOUT,
                ),
            ))

    todo = futures
    while len(todo) > 0:
        done, todo = await wait(todo, return_when=FIRST_COMPLETED)
        for task in done:
            # print(dir(task.result()))
            try:
                item, results = task.result()
            except Exception as e:
                item = e
                results = ["[-] failed: {}".format(e)]
            cmdDict[item]['results'] = results
    printResults(cmdDict)
Esempio n. 3
0
def Popen(args, **kwargs):
    kwargs.setdefault('encoding', 'utf8')
    shell = kwargs.pop('shell', None)
    if shell:
        return asyncio.create_subprocess_shell(args, **kwargs)
    else:
        return asyncio.create_subprocess_exec(*args, **kwargs)
Esempio n. 4
0
def start(cmd, input=None, **kwds):
    kwds['stdout'] = PIPE
    kwds['stderr'] = PIPE
    if input is None and 'stdin' not in kwds:
        kwds['stdin'] = None
    else:
        kwds['stdin'] = PIPE
    proc = yield from asyncio.create_subprocess_shell(cmd, **kwds)

    tasks = []
    if input is not None:
        tasks.append(send_input(proc.stdin, input))
    else:
        print('No stdin')
    if proc.stderr is not None:
        tasks.append(log_errors(proc.stderr))
    else:
        print('No stderr')
    if proc.stdout is not None:
        tasks.append(read_stdout(proc.stdout))
    else:
        print('No stdout')

    if tasks:
        # feed stdin while consuming stdout to avoid hang
        # when stdin pipe is full
        yield from asyncio.wait(tasks)

    exitcode = yield from proc.wait()
    print("exit code: %s" % exitcode)
Esempio n. 5
0
def _run_command(command, cwd, output=True, decode=False, loop=None):
    """
    Run the command and returns a tuple with (stdout, stderr, returncode).

    If output is False, stdout and stderr are None.

    If output is True and decode is True, stdout and stderr are decoded using
    system's default encoding.
    """
    loop = loop or asyncio.get_event_loop()

    if output:
        out = asyncio.subprocess.PIPE
    else:
        out = None

    process = yield from asyncio.create_subprocess_shell(
        command, loop=loop, stdout=out, stderr=out,
        limit=GIT_COMMAND_BUFFER_SIZE, cwd=cwd)

    if output:
        # communicate() also waits on the process termination
        stdout, stderr = yield from process.communicate()
        if decode:
            stdout = stdout.decode(sys.getdefaultencoding())
            stderr = stderr.decode(sys.getdefaultencoding())
    else:
        stdout, stderr = None, None
        yield from process.wait()

    if process.returncode:
        raise base.AiogitException(
            (stderr or stdout).decode(sys.getdefaultencoding()))

    return stdout, stderr
Esempio n. 6
0
    def repo_update(repo):
        print("updating http server info in '%s'..." % repo)
        proc = yield from asyncio.create_subprocess_shell(
            "cd %s && git update-server-info" % repo
        )

        yield from proc.wait()
Esempio n. 7
0
async def run_async():
    timeout = 120
    out = ''
    create = asyncio.create_subprocess_shell('python envGLOB.py',
                                             stdout=asyncio.subprocess.PIPE)
    proc = await create
    try:

        await asyncio.wait_for(child(proc), timeout)
        (stdout, stderr) = await proc.communicate()
        out = stdout.decode()
    except asyncio.TimeoutError:
        out = f'```TimeoutExpired: Your code timed out after {timeout} seconds```'
        proc.terminate()
    except Exception as e:
        out = "Something went wrong"
        print("Unpredicted error:")
        print(e)
    if proc.returncode:  # equivalent of CalledProcessError in synchronous version
        proc = subprocess.Popen("python envGLOB.py",
                                stderr=subprocess.STDOUT,
                                stdout=subprocess.PIPE,
                                shell=True)
        encoded = proc.communicate()[0]
        out = '```' + encoded.decode() + '```'
    return out
Esempio n. 8
0
def Popen(args, **kwargs):
    kwargs.setdefault('encoding', 'utf8')
    shell = kwargs.pop('shell', None)
    if shell:
        return asyncio.create_subprocess_shell(args, **kwargs)
    else:
        return asyncio.create_subprocess_exec(*args, **kwargs)
Esempio n. 9
0
async def asyncserverexec(cmdlist, nice=19, wait=False, _wait=False):
    """Server execute command

    Arguments:
        cmdlist {list} -- Command split into list

    Keyword Arguments:
        nice {number} -- Process nice level (default: {19})
        wait {bool} -- Wait and return response (default: {False})
        _wait {bool} -- Wait until ended (default: {False})
    """
    fullcmdlist = ["/usr/bin/nice", "-n", str(nice)] + cmdlist
    cmdstring = " ".join(fullcmdlist)
    if wait:
        proc = await asyncio.create_subprocess_shell(
            cmdstring, stdout=asyncio.subprocess.PIPE, stderr=None
        )
        stdout, stderr = await proc.communicate()
        return {"returncode": proc.returncode, "stdout": stdout}
    elif _wait:
        proc = await asyncio.create_subprocess_shell(
            cmdstring, stdout=None, stderr=None, shell=True
        )
        await proc.communicate()
    else:
        proc = asyncio.create_subprocess_shell(
            cmdstring, stdout=None, stderr=None, shell=True
        )
        asyncio.create_task(proc)
Esempio n. 10
0
async def a_jq_post_process(fname):
    """
    Use jq command line to reprocess fname (a json) into ...
        - A pretty printed jsonl file for easy reading.
        - A ONE object per line file for parallel processing.
    """
    async with aiofiles.open(fname + 'l', "w") as fout_l:
        async with aiofiles.open(fname + '_per_line', "w") as fout_line:
            await asyncio.gather(
                asyncio.create_subprocess_shell(f'jq . -S {fname}',
                                                stdout=fout_l),
                asyncio.create_subprocess_shell(f'jq -c .[] {fname}',
                                                stdout=fout_line),
            )
    print("Created PRETTY file", fname + 'l')
    print("Created object PER LINE file", fname + '_per_line')
Esempio n. 11
0
def get_output(cmd):
    '''
    Performs async OS commands
    '''
    p = yield from asyncio.create_subprocess_shell(cmd, stdout=PIPE, stderr=PIPE)
    # Output returns in byte string so we decode to utf8
    return (yield from p.communicate())[0].decode('utf8')
Esempio n. 12
0
def start(cmd, input=None, **kwds):
    kwds['stdout'] = PIPE
    kwds['stderr'] = PIPE
    if input is None and 'stdin' not in kwds:
        kwds['stdin'] = None
    else:
        kwds['stdin'] = PIPE
    proc = yield from asyncio.create_subprocess_shell(cmd, **kwds)

    tasks = []
    if input is not None:
        tasks.append(send_input(proc.stdin, input))
    else:
        print('No stdin')
    if proc.stderr is not None:
        tasks.append(log_errors(proc.stderr))
    else:
        print('No stderr')
    if proc.stdout is not None:
        tasks.append(read_stdout(proc.stdout))
    else:
        print('No stdout')

    if tasks:
        # feed stdin while consuming stdout to avoid hang
        # when stdin pipe is full
        yield from asyncio.wait(tasks)

    exitcode = yield from proc.wait()
    print("exit code: %s" % exitcode)
Esempio n. 13
0
 def run_process():
     future = asyncio.create_subprocess_shell(
             CMD, stdin=subprocess.PIPE,
             stdout=subprocess.PIPE, stderr=subprocess.PIPE)
     proc = yield from future
     stdout, stderr = yield from proc.communicate(b'something\n')
     return stdout, stderr, proc
Esempio n. 14
0
File: hosts.py Progetto: 198d/mush
    def exec_command(self, command, **kwargs):
        self.logger.debug("Exec'ing command: %s", command)

        kwargs.setdefault('executable', pwd.getpwuid(os.getuid()).pw_shell)

        return asyncio.create_subprocess_shell(
            command, loop=self.loop, **kwargs)
Esempio n. 15
0
def _stream_subprocess(cmd, cwd, env, shell, stdout_cb, stderr_cb):
    """
        This function starts the subprocess, sets up the output stream
        handlers and waits until the process has existed
    """
    # pylint: disable=too-many-arguments

    if shell:
        process = yield from asyncio.create_subprocess_shell(
            cmd,
            env=env,
            cwd=cwd,
            universal_newlines=True,
            stdout=asyncio.subprocess.PIPE,
            stderr=asyncio.subprocess.PIPE)
    else:
        process = yield from asyncio.create_subprocess_exec(
            *cmd,
            cwd=cwd,
            env=env,
            stdout=asyncio.subprocess.PIPE,
            stderr=asyncio.subprocess.PIPE)

    yield from asyncio.wait([
        _read_stream(process.stdout, stdout_cb),
        _read_stream(process.stderr, stderr_cb)
    ])
    ret = yield from process.wait()
    return ret
Esempio n. 16
0
def update():
    print('Updating Base')
    create = asyncio.create_subprocess_shell('git pull', stdout=asyncio.subprocess.PIPE, cwd=root)
    proc = yield from create
    yield from proc.wait()
    print('Updating Plugins')
    yield from pl.installer()
Esempio n. 17
0
    def repo_update(repo):
        print("updating http server info in '%s'..." % repo)
        proc = yield from asyncio.create_subprocess_shell(
            "cd %s && git update-server-info" % repo
        )

        yield from proc.wait()
Esempio n. 18
0
    def execute(self, subcommand, args, timeout=60):

        # We use a lock prevent parallel execution due to strange errors
        # reported by a user and reproduced by us.
        # https://github.com/GNS3/gns3-gui/issues/261
        with (yield from self._execute_lock):
            vboxmanage_path = self.vboxmanage_path
            if not vboxmanage_path:
                vboxmanage_path = self.find_vboxmanage()
            command = [vboxmanage_path, "--nologo", subcommand]
            command.extend(args)
            log.debug("Executing VBoxManage with command: {}".format(command))
            try:
                vbox_user = self.config.get_section_config("VirtualBox").get("vbox_user")
                if vbox_user:
                    # TODO: test & review this part
                    sudo_command = "sudo -i -u {}".format(vbox_user) + " ".join(command)
                    process = yield from asyncio.create_subprocess_shell(sudo_command, stdout=asyncio.subprocess.PIPE, stderr=asyncio.subprocess.PIPE)
                else:
                    process = yield from asyncio.create_subprocess_exec(*command, stdout=asyncio.subprocess.PIPE, stderr=asyncio.subprocess.PIPE)
            except (OSError, subprocess.SubprocessError) as e:
                raise VirtualBoxError("Could not execute VBoxManage: {}".format(e))

            try:
                stdout_data, stderr_data = yield from asyncio.wait_for(process.communicate(), timeout=timeout)
            except asyncio.TimeoutError:
                raise VirtualBoxError("VBoxManage has timed out after {} seconds!".format(timeout))

            if process.returncode:
                # only the first line of the output is useful
                vboxmanage_error = stderr_data.decode("utf-8", errors="ignore")
                raise VirtualBoxError("VirtualBox has returned an error: {}".format(vboxmanage_error))

            return stdout_data.decode("utf-8", errors="ignore").splitlines()
Esempio n. 19
0
def convert_all(ibasedir, obasedir, fpattern, sep, **copts):
    print("Converting:")
    for base, dirs, files in os.walk(ibasedir):
        for ifile in sorted(fnmatch.filter(files, fpattern)):
            odir, grfile, ofile = get_outputs(obasedir, ifile, sep)
            opath = os.path.join(odir, ofile)
            gpath = os.path.join(base, grfile)
            ipath = os.path.join(base, ifile)
            if os.path.exists(opath):
                print("File {ifi} already exists: {ofi}".format(ifi=ifile,
                                                                ofi=ofile))
            else:
                print("File {ifi}: {ofi}".format(ifi=ifile, ofi=ofile))
                with open(gpath, 'wb') as grbfile, bz2.BZ2File(ipath, 'rb') as file:
                    grbfile.write(file.read())
                grbfile.close()
                file.close()
                cdo = CDO.format(finput=gpath, foutput=opath, **copts)
                print(cdo)
                process = yield from asyncio.create_subprocess_shell(
                    cdo, stdout=subprocess.PIPE, stderr=subprocess.PIPE
                )
                stdout, stderr = yield from process.communicate()
                print(stdout)
                print(stderr)
                os.remove(gpath)

            print("-" * 30)
Esempio n. 20
0
def test_simulation(demand_file, agent_conf, tmpdir, event_loop):
    logging.basicConfig(level=logging.DEBUG)
    mosaik_mock = MosaikMock()

    # Setup and start the fake mosaik environment that performs the test:
    def cb(con):
        print('Starting test ...')
        mosaik_mock.test_task = aiomas.async(
            run_test(con, agent_conf, mosaik_mock, demand_file))

    server_sock = yield from aiomas.rpc.start_server(
        ADDR, mosaik_mock, cb)

    try:
        # Start the MAS :
        mas_proc = yield from asyncio.create_subprocess_shell(
            'openvpp-mosaik -l debug %s:%d' % ADDR)

        print('Waiting for MAS...')
        yield from mas_proc.wait()  # Wait for the MAS
        print('Waiting for fake mosaik...')
        yield from mosaik_mock.test_task  # Wait for the fake mosaik
    finally:
        server_sock.close()
        yield from server_sock.wait_closed()

    results_db = h5py.File(agent_conf['ObserverAgent']['log_dbfile'], 'r')
    expected_db = h5py.File(REFERENCE_DBFILE, 'r')
    special = {
       'dap_data': _compare_dap_data,
       'perf_data': _compare_perf_data,
    }
    _compare_hdf5(results_db, expected_db, special)
Esempio n. 21
0
 def test_start_new_session(self):
     # start the new process in a new session
     create = asyncio.create_subprocess_shell('exit 8',
                                              start_new_session=True)
     proc = self.loop.run_until_complete(create)
     exitcode = self.loop.run_until_complete(proc.wait())
     self.assertEqual(exitcode, 8)
Esempio n. 22
0
async def watch():
    watcher = 'fswatch -Ltux tmp'
    watcher = asyncio.create_subprocess_shell(watcher, stdout=asyncio.subprocess.PIPE, stderr=asyncio.subprocess.PIPE)
    proc = await watcher
    while True:
        line = await proc.stdout.readline()
        print('STDOUT', line)
        yield line
Esempio n. 23
0
 def run_process():
     future = asyncio.create_subprocess_shell(CMD,
                                              stdin=subprocess.PIPE,
                                              stdout=subprocess.PIPE,
                                              stderr=subprocess.PIPE)
     proc = yield from future
     stdout, stderr = yield from proc.communicate(b'something\n')
     return stdout, stderr, proc
Esempio n. 24
0
 def test_start_new_session(self):
     # start the new process in a new session
     create = asyncio.create_subprocess_shell('exit 8',
                                              start_new_session=True,
                                              loop=self.loop)
     proc = self.loop.run_until_complete(create)
     exitcode = self.loop.run_until_complete(proc.wait())
     self.assertEqual(exitcode, 8)
Esempio n. 25
0
def run_cmd_async(cmd, cwd, env=None, fail=True, shell=False, liveupdate=True):
    """
        Run a command asynchronously.
    """
    # pylint: disable=too-many-arguments

    # Disable incorrect warning on asyncio.wait_for,
    # https://github.com/PyCQA/pylint/issues/996.
    # pylint: disable=not-an-iterable

    env = env or get_context().environ
    cmdstr = cmd
    if not shell:
        cmdstr = ' '.join(cmd)
    logging.info('%s$ %s', cwd, cmdstr)

    logo = LogOutput(liveupdate)

    try:
        if shell:
            process = yield from asyncio.create_subprocess_shell(
                cmd,
                env=env,
                cwd=cwd,
                universal_newlines=True,
                stdout=asyncio.subprocess.PIPE,
                stderr=asyncio.subprocess.PIPE)
        else:
            process = yield from asyncio.create_subprocess_exec(
                *cmd,
                cwd=cwd,
                env=env,
                stdout=asyncio.subprocess.PIPE,
                stderr=asyncio.subprocess.PIPE)
    except FileNotFoundError as ex:
        if fail:
            raise ex
        return (errno.ENOENT, str(ex))
    except PermissionError as ex:
        if fail:
            raise ex
        return (errno.EPERM, str(ex))

    yield from asyncio.wait([
        _read_stream(process.stdout, logo.log_stdout),
        _read_stream(process.stderr, logo.log_stderr)
    ])
    ret = yield from process.wait()

    if ret and fail:
        msg = 'Command "{cwd}$ {cmd}" failed'.format(cwd=cwd, cmd=cmdstr)
        if logo.stderr:
            msg += '\n--- Error summary ---\n'
            for line in logo.stderr:
                msg += line
        logging.error(msg)

    return (ret, ''.join(logo.stdout))
async def async_exec(cmd):
    create = asyncio.create_subprocess_shell(cmd,
                                             stdout=asyncio.subprocess.PIPE)
    proc = await create
    try:
        await proc.wait()
    except asyncio.TimeoutError:
        proc.kill()
        await proc.wait()
Esempio n. 27
0
async def archivate2(filenames_str):
    args = 'zip -r - ' + filenames_str
    archive_binary = ''
    for chunk in iter(asyncio.create_subprocess_shell(args,
                                                      stdout=asyncio.subprocess.PIPE,
                                                      limit=800)):
        print(chunk)
    print(type(archive_process))
    await asyncio.sleep(2)
Esempio n. 28
0
def test_subprocess_co(loop):
    assert isinstance(threading.current_thread(), threading._MainThread)
    proc = yield from asyncio.create_subprocess_shell(
        "exit 0",
        loop=loop,
        stdin=asyncio.subprocess.DEVNULL,
        stdout=asyncio.subprocess.DEVNULL,
        stderr=asyncio.subprocess.DEVNULL)
    yield from proc.wait()
Esempio n. 29
0
File: async.py Progetto: pnsk/peru
def create_subprocess_with_handle(command, display_handle, *, shell=False, cwd,
                                  **kwargs):
    '''Writes subprocess output to a display handle as it comes in, and also
    returns a copy of it as a string. Throws if the subprocess returns an
    error. Note that cwd is a required keyword-only argument, on theory that
    peru should never start child processes "wherever I happen to be running
    right now."'''

    # We're going to get chunks of bytes from the subprocess, and it's possible
    # that one of those chunks ends in the middle of a unicode character. An
    # incremental decoder keeps those dangling bytes around until the next
    # chunk arrives, so that split characters get decoded properly. Use
    # stdout's encoding, but provide a default for the case where stdout has
    # been redirected to a StringIO. (This happens in tests.)
    encoding = sys.stdout.encoding or 'utf8'
    decoder_factory = codecs.getincrementaldecoder(encoding)
    decoder = decoder_factory(errors='replace')

    output_copy = io.StringIO()

    # Display handles are context managers. Entering and exiting the display
    # handle lets the display know when the job starts and stops.
    with display_handle:
        stdin = asyncio.subprocess.DEVNULL
        stdout = asyncio.subprocess.PIPE
        stderr = asyncio.subprocess.STDOUT
        if shell:
            proc = yield from asyncio.create_subprocess_shell(
                command, stdin=stdin, stdout=stdout, stderr=stderr, cwd=cwd,
                **kwargs)
        else:
            proc = yield from asyncio.create_subprocess_exec(
                *command, stdin=stdin, stdout=stdout, stderr=stderr, cwd=cwd,
                **kwargs)

        # Read all the output from the subprocess as its comes in.
        while True:
            outputbytes = yield from proc.stdout.read(4096)
            if not outputbytes:
                break
            outputstr = decoder.decode(outputbytes)
            outputstr_unified = _unify_newlines(outputstr)
            display_handle.write(outputstr_unified)
            output_copy.write(outputstr_unified)

        returncode = yield from proc.wait()

    if returncode != 0:
        raise subprocess.CalledProcessError(
            returncode, command, output_copy.getvalue())

    if hasattr(decoder, 'buffer'):
        # The utf8 decoder has this attribute, but some others don't.
        assert not decoder.buffer, 'decoder nonempty: ' + repr(decoder.buffer)

    return output_copy.getvalue()
Esempio n. 30
0
def run_command(loop, cmd):
    cmd_proc = yield from asyncio.create_subprocess_shell(
        cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE, loop=loop)
    stdout, stderr = yield from cmd_proc.communicate()
    if cmd_proc.returncode != 0:
        raise CommandFailed(
            "Starting async command (%s) failed (rc=%s). (stderr: %s)", cmd,
            cmd_proc.returncode, stderr)

    return stdout.decode(), stderr.decode()
Esempio n. 31
0
    def run(self):
        '''In this example we are starting another process via the shell.
        Again, as with the server example, we need to return the result of
        creating the Task.
        '''
        self._process = yield from asyncio.create_subprocess_shell('ls -l',
            stdout=asyncio.subprocess.PIPE)
        self._process_output, _ = yield from self._process.communicate()

        return self._process
Esempio n. 32
0
def run_command(loop, cmd):
    cmd_proc = yield from asyncio.create_subprocess_shell(
            cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE, loop=loop
            )
    stdout, stderr = yield from cmd_proc.communicate()
    if cmd_proc.returncode != 0:
        raise CommandFailed("Starting async command (%s) failed (rc=%s). (stderr: %s)",
                            cmd, cmd_proc.returncode, stderr)

    return stdout.decode(), stderr.decode()
Esempio n. 33
0
    def send_signal(self, target, code_id, state):
        switch_command = (
            'sudo pilight-send -p elro_800_switch -s 21 -u {} -{}'.format(
                code_id, state))

        logging.debug('{}: {}: {}'.format(datetime.now(), code_id,
                                          switch_command))

        yield from asyncio.create_subprocess_shell(switch_command,
                                                   loop=self.bot.loop)
Esempio n. 34
0
def main():
    cmds = ["ls", "lls"]
    for cmd in cmds:
        logger.info(cmd)
        p = yield from asyncio.create_subprocess_shell(cmd, stderr=PIPE, stdout=PIPE)
        yield from p.wait()
        ok = yield from check_status(p)
        if ok:
            stdout = yield from p.stdout.read()
            logger.info("output on pid=%s, %s", p.pid, stdout)
    def exec_script(self, script, data):
        """Execute a shell script with the data as yaml input file"""
        self._log.debug("Execute script {} with data {}".format(script, data))

        #Make the script executable if it is not.
        perm = os.stat(script).st_mode
        if not (perm & stat.S_IXUSR):
            self._log.warning(
                "script {} without execute permission: {}".format(
                    script, perm))
            os.chmod(script, perm | stat.S_IXUSR)

        tmp_file = None
        with tempfile.NamedTemporaryFile(delete=False) as tmp_file:
            tmp_file.write(
                yaml.dump(data, default_flow_style=True).encode("UTF-8"))

        cmd = "{} {}".format(script, tmp_file.name)
        self._log.debug("Running the CMD: {}".format(cmd))

        try:
            proc = yield from asyncio.create_subprocess_shell(
                cmd,
                stdout=asyncio.subprocess.PIPE,
                stderr=asyncio.subprocess.PIPE)
            rc = yield from proc.wait()
            script_out, script_err = yield from proc.communicate()

        except Exception as e:
            msg = "Script {} caused exception: {}". \
                  format(script, e)
            self._log.exception(msg)
            rc = 1
            script_err = msg
            script_out = ''

        finally:
            # Remove the tempfile created
            try:
                if rc == 0:
                    os.remove(tmp_file.name)
            except OSError as e:
                self._log.info("Error removing tempfile {}: {}".format(
                    tmp_file.name, e))

        if rc != 0:
            if not os.path.exists(script):
                self._log.error("Script {} not found: ".format(script))
            else:
                self._log.error(
                    "Script {}: rc={}\nStdOut:{}\nStdErr:{} \nPermissions on script: {}"
                    .format(script, rc, script_out, script_err,
                            stat.filemode(os.stat(script).st_mode)))

        return rc, script_err
Esempio n. 36
0
    def run(self):
        if self.isRunning():
            raise Exception('Blockupdate is already running')

        self.__aborted = False
        start_time = time.time()
        cmd = self.__properties['_command']
        if cmd is None:
            self.__return(start_time)
            return

        self.__process = yield from asyncio.create_subprocess_shell(
            cmd,
            stdout=asyncio.subprocess.PIPE,
            stderr=None,
            env=self.__env,
            loop=self.__loop)
        #e = asyncio.Event(loop=self.__loop)
        #p = self.__loop.create_task(
        #    asyncio.create_subprocess_shell(
        #        cmd,
        #        stdout=asyncio.subprocess.PIPE,
        #        stderr=None,
        #        env=self.__env,
        #        loop=self.__loop)
        #)
        #p.add_done_callback(lambda _:e.set())
        #print('wait')
        #e.wait()
        #print('awake')
        #print(p.result())
        #self.__process = p.result()

        #print('grrr')

        try:
            #res, _ = self.__loop.run_until_complete(
            #    asyncio.wait_for(self.__process.communicate(),
            #                     self.__timeout,
            #                     loop=self.__loop))
            res, _ = yield from asyncio.wait_for(self.__process.communicate(),
                                                 self.__timeout,
                                                 loop=self.__loop)
        except asyncio.TimeoutError:
            # log timeout
            self.abort()
            return

        lines = res.decode().splitlines()
        lines.append('')
        lines.append('')
        lines.append('')

        self.__return(start_time, lines[0], lines[1], lines[2])
Esempio n. 37
0
def run_shell(
    cmd: str,
    out: Optional[Union[TeeCapture, IO[str]]] = sys.stdout,
    err: Optional[Union[TeeCapture, IO[str]]] = sys.stderr,
    raise_on_fail: bool = True,
    log_run_to_stderr: bool = True,
    **kwargs,
) -> CommandOutput:
    """Invokes a shell command and waits for it to finish.

    Args:
        cmd: The command line string to execute, e.g. "echo dog | cat > file".
        out: Where to write the process' stdout. Defaults to sys.stdout. Can be
            anything accepted by print's 'file' parameter, or None if the
            output should be dropped, or a TeeCapture instance. If a TeeCapture
            instance is given, the first element of the returned tuple will be
            the captured output.
        err: Where to write the process' stderr. Defaults to sys.stderr. Can be
            anything accepted by print's 'file' parameter, or None if the
            output should be dropped, or a TeeCapture instance. If a TeeCapture
            instance is given, the second element of the returned tuple will be
            the captured error output.
        raise_on_fail: If the process returns a non-zero error code
            and this flag is set, a CalledProcessError will be raised.
            Otherwise the return code is the third element of the returned
            tuple.
        log_run_to_stderr: Determines whether the fact that this shell command
            was executed is logged to sys.stderr or not.
        **kwargs: Extra arguments for asyncio.create_subprocess_shell, such as
            a cwd (current working directory) argument.

    Returns:
        A (captured output, captured error output, return code) triplet. The
        captured outputs will be None if the out or err parameters were not set
        to an instance of TeeCapture.

    Raises:
         subprocess.CalledProcessError: The process returned a non-zero error
            code and raise_on_fail was set.
    """
    if log_run_to_stderr:
        print('shell:', cmd, file=sys.stderr)
    result = asyncio.get_event_loop().run_until_complete(
        _async_wait_for_process(
            asyncio.create_subprocess_shell(cmd,
                                            stdout=asyncio.subprocess.PIPE,
                                            stderr=asyncio.subprocess.PIPE,
                                            **kwargs),
            out,
            err,
        ))
    if raise_on_fail and result[2]:
        raise subprocess.CalledProcessError(result[2], cmd)
    return result
Esempio n. 38
0
 def _shell_exec(self, cmd):
     shell = asyncio.create_subprocess_shell(cmd,
                                             stdin=asyncio.subprocess.PIPE,
                                             stderr=asyncio.subprocess.PIPE,
                                             stdout=asyncio.subprocess.PIPE)
     proc = yield from shell
     out, err = yield from proc.communicate()
     retcode = yield from proc.wait()
     if retcode:
         raise EtcdError(retcode, err.decode('ascii'))
     return out.decode('ascii').rstrip()
Esempio n. 39
0
 def _shell_exec(self, cmd):
     shell = asyncio.create_subprocess_shell(
         cmd, stdin=asyncio.subprocess.PIPE,
         stderr=asyncio.subprocess.PIPE, stdout=asyncio.subprocess.PIPE
     )
     proc = yield from shell
     out, err = yield from proc.communicate()
     retcode = yield from proc.wait()
     if retcode:
         raise EtcdError(retcode, err.decode('ascii'))
     return out.decode('ascii').rstrip()
Esempio n. 40
0
    def process_file(self, path, file):
        source_path = quote('{}{}'.format(self.source_path, path))
        proc = yield from asyncio.create_subprocess_shell(
            'lessc -s{} {}'.format(' -x' if self.compress else '', source_path),
            stdout=asyncio.subprocess.PIPE, stderr=asyncio.subprocess.PIPE)
        contents, err = yield from proc.communicate()

        if err:
            raise Exception('Error in less processing:\n{}\n{}'.format(path, err.decode('utf-8')))

        file['contents'] = io.BytesIO(contents)
Esempio n. 41
0
 def load_background(self):
     #
     # fbi
     #
     # This program displays images using the Linux framebuffer device.
     # Supported formats: PhotoCD, jpeg, ppm, gif, tiff, xwd, bmp, png,
     # webp. It tries to use ImageMagick's convert for unknown file formats.
     #cmd = 'fbi -noverbose -d /dev/fb0 -T 2 -a ' + self.__image_path
     cmd = 'sudo fbi -noverbose -d /dev/fb0 -T 1 ' + self.__image_path
     print(cmd)
     yield from asyncio.create_subprocess_shell(cmd)
Esempio n. 42
0
def run_command(shell_command):
    '''
	Use asyncio to execute a check.
	'''
    p = yield from asyncio.create_subprocess_shell(
        shell_command,
        stdout=asyncio.subprocess.PIPE,
        stderr=asyncio.subprocess.STDOUT)
    stdout, stderr = yield from p.communicate()
    exit_code = p.returncode
    return (stdout, stderr, exit_code)
Esempio n. 43
0
    def play_media(self):

        playSecond = yield from self.get_media_duration()
        # print('playSecond='+ str(playSecond))
        yield from asyncio.sleep(self.__start_at)

        cmd = 'omxplayer ' + self.__media_path

        #print(cmd)
        yield from asyncio.create_subprocess_shell(cmd)
        yield from asyncio.sleep(playSecond)  # keep enough time for play movie
    def test_072_qrexec_to_dom0_simultaneous_write(self):
        """Test for simultaneous write in dom0(src)<-VM(dst) connection

            Similar to test_071_qrexec_dom0_simultaneous_write, but with dom0
            as a "hanging" side.
        """

        self.loop.run_until_complete(self.testvm2.start())

        self.create_remote_file(
            self.testvm2, '/etc/qubes-rpc/test.write', '''\
            # first write a lot of data
            dd if=/dev/zero bs=993 count=10000 iflag=fullblock &
            # and only then read something
            dd of=/dev/null bs=993 count=10000 iflag=fullblock
            sleep 1;
            wait
            ''')

        # can't use subprocess.PIPE, because asyncio will claim those FDs
        pipe1_r, pipe1_w = os.pipe()
        pipe2_r, pipe2_w = os.pipe()
        try:
            local_proc = self.loop.run_until_complete(
                asyncio.create_subprocess_shell(
                    # first write a lot of data to fill all the buffers
                    "dd if=/dev/zero bs=993 count=10000 iflag=fullblock & "
                    # then, only when all written, read something
                    "dd of=/dev/null bs=993 count=10000 iflag=fullblock; ",
                    stdin=pipe1_r,
                    stdout=pipe2_w))

            service_proc = self.loop.run_until_complete(
                self.testvm2.run_service("test.write",
                                         stdin=pipe2_r,
                                         stdout=pipe1_w))
        finally:
            os.close(pipe1_r)
            os.close(pipe1_w)
            os.close(pipe2_r)
            os.close(pipe2_w)

        try:
            self.loop.run_until_complete(
                asyncio.wait_for(service_proc.wait(), timeout=10))
        except asyncio.TimeoutError:
            self.fail("Timeout, probably deadlock")
        else:
            self.assertEqual(service_proc.returncode, 0, "Service call failed")
        finally:
            try:
                service_proc.terminate()
            except ProcessLookupError:
                pass
Esempio n. 45
0
 def __call__(self, loop):
     start = pendulum.now()
     ps = yield from asyncio.create_subprocess_shell(
         self.cmd,
         stdout=subprocess.PIPE,
         stderr=subprocess.STDOUT,
         loop=loop)
     output = (yield from ps.communicate())[0]
     self.elapsed += pendulum.now() - start
     self.run_count += 1
     return ps, output
Esempio n. 46
0
 def start_process(self):
     if self.run_in_shell:
         cmd = ("{} {}".format(self.cmd, shlex.quote(self.name))
                .encode('utf8'))
         logging.info('Starting command: {}'.format(cmd))
         self.process = yield from asyncio.create_subprocess_shell(
             cmd, stdin=sp.PIPE, stdout=sp.PIPE, stderr=sp.PIPE)
     else:
         cmd = self.cmd + [self.name]
         logging.info('Starting command: {}'.format(cmd))
         self.process = yield from asyncio.create_subprocess_exec(
             *cmd, stdin=sp.PIPE, stdout=sp.PIPE, stderr=sp.PIPE)
Esempio n. 47
0
 def start_process(self):
     if self.run_in_shell:
         cmd = ("{} {}".format(self.cmd,
                               shlex.quote(self.name)).encode('utf8'))
         logging.info('Starting command: {}'.format(cmd))
         self.process = yield from asyncio.create_subprocess_shell(
             cmd, stdin=sp.PIPE, stdout=sp.PIPE, stderr=sp.PIPE)
     else:
         cmd = self.cmd + [self.name]
         logging.info('Starting command: {}'.format(cmd))
         self.process = yield from asyncio.create_subprocess_exec(
             *cmd, stdin=sp.PIPE, stdout=sp.PIPE, stderr=sp.PIPE)
Esempio n. 48
0
        def start_wait_process(loop):
            process = yield from asyncio.create_subprocess_shell(
                "true", stdout=subprocess.PIPE, stderr=subprocess.PIPE,
                loop=loop)

            try:
                out, err = yield from asyncio.wait_for(
                    process.communicate(), timeout=.1, loop=loop)
            except:
                process.kill()
                os.waitpid(process.pid, os.WNOHANG)
                raise
Esempio n. 49
0
 def execute(self, pol, stderr, stdout, var_env = {}):
     logging.info("Locally executing command '%s'", pol.command)
     env = self.env
     env.update(var_env)
     proc = yield from asyncio.create_subprocess_shell(
             pol.command, stdout=stdout, stderr=stderr, env=env, start_new_session=True)
     try:
         ret = yield from proc.wait()
         pol.check_status(ret)
     except asyncio.CancelledError as e:
             os.killpg(os.getpgid(proc.pid), signal.SIGTERM)
             logging.info("Local command terminated due to timeout or stop_time.")
Esempio n. 50
0
def run_shell(cmd: str,
              out: Optional[Union[TeeCapture, IO[str]]] = sys.stdout,
              err: Optional[Union[TeeCapture, IO[str]]] = sys.stderr,
              raise_on_fail: bool = True,
              log_run_to_stderr: bool = True,
              **kwargs
              ) -> Tuple[Optional[str], Optional[str], int]:
    """Invokes a shell command and waits for it to finish.

    Args:
        cmd: The command line string to execute, e.g. "echo dog | cat > file".
        out: Where to write the process' stdout. Defaults to sys.stdout. Can be
            anything accepted by print's 'file' parameter, or None if the
            output should be dropped, or a TeeCapture instance. If a TeeCapture
            instance is given, the first element of the returned tuple will be
            the captured output.
        err: Where to write the process' stderr. Defaults to sys.stderr. Can be
            anything accepted by print's 'file' parameter, or None if the
            output should be dropped, or a TeeCapture instance. If a TeeCapture
            instance is given, the second element of the returned tuple will be
            the captured error output.
        raise_on_fail: If the process returns a non-zero error code
            and this flag is set, a CalledProcessError will be raised.
            Otherwise the return code is the third element of the returned
            tuple.
        log_run_to_stderr: Determines whether the fact that this shell command
            was executed is logged to sys.stderr or not.
        **kwargs: Extra arguments for asyncio.create_subprocess_shell, such as
            a cwd (current working directory) argument.

    Returns:
        A (captured output, captured error output, return code) triplet. The
        captured outputs will be None if the out or err parameters were not set
        to an instance of TeeCapture.

    Raises:
         subprocess.CalledProcessError: The process returned a non-zero error
            code and raise_on_fail was set.
    """
    if log_run_to_stderr:
        print('shell:', cmd, file=sys.stderr)
    result = asyncio.get_event_loop().run_until_complete(
        _async_wait_for_process(
            asyncio.create_subprocess_shell(
                cmd,
                stdout=asyncio.subprocess.PIPE,
                stderr=asyncio.subprocess.PIPE,
                **kwargs),
            out,
            err))
    if raise_on_fail and result[2]:
        raise subprocess.CalledProcessError(result[2], cmd)
    return result
Esempio n. 51
0
def cat(loop):
    proc = yield from asyncio.create_subprocess_shell("cat",
                                                      stdin=PIPE,
                                                      stdout=PIPE)
    print("pid: %s" % proc.pid)

    message = "Hello World!"
    print("cat write: %r" % message)

    stdout, stderr = yield from proc.communicate(message.encode('ascii'))
    print("cat read: %r" % stdout.decode('ascii'))

    exitcode = yield from proc.wait()
    print("(exit code %s)" % exitcode)
Esempio n. 52
0
    def send_signal(self, target, code_id, state):
        switch_command = (
            'sudo pilight-send -p elro_800_switch -s 21 -u {} -{}'.format(
                code_id, state
            )
        )

        logging.debug(
            '{}: {}: {}'.format(datetime.now(), code_id, switch_command)
        )

        yield from asyncio.create_subprocess_shell(
            switch_command,
            loop=self.bot.loop
        )
Esempio n. 53
0
        def list_dir():
            proc = yield from asyncio.create_subprocess_shell(
                'ls',
                stdout=asyncio.subprocess.PIPE,
            )

            entries = set()
            line = yield from proc.stdout.readline()
            while line:
                entries.add(line.decode('utf-8').strip())
                line = yield from proc.stdout.readline()

            # Cleanup - close the transport.
            proc._transport.close()
            return entries
Esempio n. 54
0
def runshell(cmd):

    print('run shell', cmd )
    #print( 'Blastn' )
    #print( datetime.datetime.now() )

    # Create the subprocess, redirect the standard output into a pipe
    create = asyncio.create_subprocess_shell( cmd = cmd,
                                             stdout = asyncio.subprocess.PIPE,stderr = asyncio.subprocess.PIPE )
    # Wait for create
    proc = yield from create  # proc is Process Instance

    out,err = yield from proc.communicate()

    return out,err
Esempio n. 55
0
    def restart_with_reloader(self):
        """Spawn a new Python interpreter with the same arguments as this one,
        but running the reloader thread.
        """
        _log('info', ' * Restarting with %s' % self.name)
        args = [sys.executable] + sys.argv
        new_environ = os.environ.copy()
        new_environ['WERKZEUG_RUN_MAIN'] = 'true'

        exit_code = 3
        while exit_code == 3:
            self.process = yield from asyncio.create_subprocess_shell(' '.join(args), env=new_environ,
                                                                      cwd=os.getcwd(),
                                                                      stdout=sys.stdout)
            exit_code = yield from self.process.wait()
        return exit_code
Esempio n. 56
0
def async_exec(command, stdoutCallback):
    fork = yield from asyncio.create_subprocess_shell((command),
                                                      stdout=subprocess.PIPE,
                                                      stderr=subprocess.STDOUT)

    tasks = []
    if fork.stdout is not None:
        tasks.append(read_stdout(fork.stdout, stdoutCallback))
    else:
        print('No stdout')

    yield from asyncio.wait(tasks)

    retCode = yield from fork.wait()

    return retCode
Esempio n. 57
0
def _get_git_changeset():
    """Returns a numeric identifier of the latest git changeset.

    The result is the UTC timestamp of the changeset in YYYYMMDDHHMMSS format.
    This value isn't guaranteed to be unique, but collisions are very unlikely,
    so it's sufficient for generating the development version numbers.
    """
    repo_dir = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
    git_log = yield from asyncio.create_subprocess_shell('git log --pretty=format:%ct --quiet -1 HEAD',
        stdout=subprocess.PIPE,
        stderr=subprocess.PIPE, shell=True, cwd=repo_dir, universal_newlines=False)
    (stdin, stderr) = yield from git_log.communicate()
    try:
        timestamp = datetime.datetime.utcfromtimestamp(int(stdin))
    except ValueError:
        return None
    return timestamp.strftime('%Y%m%d%H%M%S')
Esempio n. 58
0
def run_cmd_async(cmd, cwd, env=None, fail=True, shell=False, liveupdate=True):
    """
        Run a command asynchronously.
    """
    # pylint: disable=too-many-arguments

    env = env or {}
    cmdstr = cmd
    if not shell:
        cmdstr = ' '.join(cmd)
    logging.info('%s$ %s', cwd, cmdstr)

    logo = LogOutput(liveupdate)

    if shell:
        process = yield from asyncio.create_subprocess_shell(
            cmd,
            env=env,
            cwd=cwd,
            universal_newlines=True,
            stdout=asyncio.subprocess.PIPE,
            stderr=asyncio.subprocess.PIPE)
    else:
        process = yield from asyncio.create_subprocess_exec(
            *cmd,
            cwd=cwd,
            env=env,
            stdout=asyncio.subprocess.PIPE,
            stderr=asyncio.subprocess.PIPE)

    yield from asyncio.wait([
        _read_stream(process.stdout, logo.log_stdout),
        _read_stream(process.stderr, logo.log_stderr)
    ])
    ret = yield from process.wait()

    if ret and fail:
        msg = 'Command "{cwd}$ {cmd}" failed'.format(cwd=cwd, cmd=cmdstr)
        if logo.stderr:
            msg += '\n--- Error summary ---\n'
            for line in logo.stderr:
                msg += line
        logging.error(msg)

    return (ret, ''.join(logo.stdout))