def _handle_pack_options(self, src, options): result = [] for item in options: for x in shell_split(item): if x: result.extend(x.split('=', 1) if x.find('=') > 0 else [x]) i = 0 n = len(result) def _quote_path(s): if not os.path.isabs(s): s = os.path.join(src, s) s = s.replace('\\', '/') return ("'%s'" % s) if s.find(' ') > -1 else s while i < n: v = str(result[i]) if v in ('--onefile', '-F', '--onefolder', '-D', '--name', '-N', '--noconfirm', '-y', '--distpath', '--specpath'): raise RuntimeError('Option "%s" could not be used here' % v) if v in ('--add-data', '--add-binary'): i += 1 if result[i].find(os.pathsep) == -1: result[i] += os.pathsep + '.' result[i] = _quote_path(result[i]) elif v in ('-i', '--icon', '-p', '--paths', '--runtime-hook', '--additional-hooks-dir', '--version-file', '-m', '--manifest', '-r', '--resource'): i += 1 result[i] = _quote_path(result[i]) i += 1 return result
def exec_commands(*args: str) -> str or None: results: list[str] = [] for command in args: results.append( check_output(shell_split(command), cwd=running_context.soft_path).decode('utf-8')) return results if len(results) > 1 else results[0]
def swap_argv(replacement): original = sys.argv sys.argv = shell_split(replacement) try: yield finally: sys.argv = original
def __add__(self, other): if isinstance(other, str): other = shell_split(other) elif isinstance(other, ShellList): other = other.list elif not isinstance(other, list): raise TypeError('arg is an instance of %s' % type(arg).__name__) return ShellList(self.list + other)
def __init__(self, arg): if isinstance(arg, str): self.list = shell_split(arg) elif isinstance(arg, list): self.list = arg[:] elif isinstance(arg, ShellList): self.list = arg.list[:] else: raise TypeError('arg is an instance of %s' % type(arg).__name__)
def call_cryptominisat_docker(input_file: Path) -> CompletedProcess: """Calls CryptoMiniSAT in a Docker container, reading a given file as the input problem. """ cms_container = 'msoos/cryptominisat' input_bytes = input_file.read_bytes() args = shell_split("--rm -i -a stdin -a stdout") result = docker_run(cms_container, args, input_bytes) return result
def parse_args(parser, command): """ Parse arguments given in a string using a given parser where the arguments are split as they would have been in the unix shell. :param parser: argparse.ArgumentParser :param command: string :return: parsed args Namespace """ return parser.parse_args(shell_split(command))
def call_unigen_docker(input_file: Path) -> CompletedProcess: """Calls Unigen in a Docker container, reading a given file as the input problem. """ unigen_container = 'msoos/unigen' input_bytes = input_file.read_bytes() args = shell_split("--rm -i -a stdin -a stdout") result = docker_run(unigen_container, args, input_bytes) return result
def __init__(self, command): """ Initializer. """ self._args = shell_split(command, posix=False) self._env = environ.copy() self._stderr = None self._stdout = None self._stdin = None self._shell = False self._cwd = None self._universal_newlines = True self._inner = None
def command(self, string_as_is, ignore_errors=False, once=False): """Runs a drush command string. If the class is not in verbose mode, -q argument will be added ignore_errors may want to be used for commands that exit with non-zero status but are not always errors (like reverting a view that may not exist) command('en -y module_name') command('views-revert my_nonexisting_view', ignore_errors=True) """ with pushd(self._path): split = shell_split(string_as_is) command_line = ['drush'] if not self._verbose: command_line.append('-q') command_line.extend(split) if self._verbose and self._stdout: self._stdout.write(' '.join(command_line) + '\n') try: sp.check_call(command_line, stdout=self._stdout) except sp.CalledProcessError as e: if not ignore_errors: raise e if once: return for uri in set(self._uris): if re.match(r'^https?\://default$', uri): continue command_line = ['drush', '--uri=%s' % (uri)] if not self._verbose: command_line.append('-q') command_line.extend(split) if self._verbose and self._stdout: self._stdout.write(' '.join(command_line) + '\n') try: sp.check_call(command_line, stdout=self._stdout) except sp.CalledProcessError as e: if not ignore_errors: raise e
def process(): while True: try: updated = try_update(running_context.prod_branch) if updated: if running_context.update_action is UpdateAction.RESTART: terminate_and_wait() running_context.pipe = None if running_context.update_action is UpdateAction.START_NEW: running_context.pipe = None if running_context.pipe is None: running_context.pipe = Popen(shell_split( running_context.startup_command), cwd=running_context.soft_path) except Exception as ex: print(ex) sleep(running_context.interval_sec)
def run_named_gist(gist, args=()): """Run the gist specified by owner/name string. This function does not return, because the whole process is replaced by the gist's executable. :param gist: Gist as :class:`Gist` object or <owner>/<name> string :param args: Arguments to pass to the gist """ if isinstance(gist, Gist): gist = gist.ref logger.info("running gist %s ...", gist) executable = bytes(BIN_DIR / gist) try: os.execv(executable, [executable] + list(args)) except OSError as e: if e.errno != 8: # Exec format error raise logger.warning( "couldn't run gist %s directly -- " "does it have a proper hashbang?", gist) # try to figure out the interpreter to use based on file extension # contained within the gist name extension = Path(gist).suffix if not extension: # TODO(xion): use MIME type from GitHub as additional hint # as to the choice of interpreter error( "can't deduce interpreter for gist %s " "without file extension", gist) interpreter = COMMON_INTERPRETERS.get(extension) if not interpreter: error("no interpreter found for extension '%s'", extension) # format an interpreter-specific command line # and execute it within current process (hence the argv shenanigans) cmd = interpreter % dict(script=str(BIN_DIR / gist), args=' '.join(map(shell_quote, args))) cmd_argv = shell_split(cmd) os.execvp(cmd_argv[0], cmd_argv)
def command_output(self, string_as_is, once=False): """Like command() but returns the output, in a list format: [(site_name, stripped_data)]""" with pushd(self._path): split = shell_split(string_as_is) command_line = ['drush'] ret = [] if not self._verbose: command_line.append('-q') command_line.extend(split) if self._verbose and self._stdout: self._stdout.write(' '.join(command_line) + '\n') ret.append(('default', sp.Popen(command_line, stdout=sp.PIPE).communicate()[0].strip(),)) if once: return ret for uri in set(self._uris): if re.match(r'^https?\://default$', uri): continue command_line = ['drush', '--uri=%s' % (uri)] if not self._verbose: command_line.append('-q') command_line.extend(split) if self._verbose and self._stdout: self._stdout.write(' '.join(command_line) + '\n') ret.append((uri, sp.Popen(command_line, stdout=sp.PIPE).communicate()[0].strip(),)) return ret
def __container_args(self): command = self._service.param('command') command_args = [] if command is not None: _cmd = "sh -c \"{}\"".format(command.replace('"', '\\"')) command_args = shell_split(_cmd) logging.info("{} >> Wants to run: {}".format( self, command_args )) _container_args = { 'image' : self._service.param('image'), 'command' : command_args, 'detach' : True, 'stdin_open' : True, 'tty' : True, 'ports' : [ int(port) for port in self._service.params('expose') ], 'volumes' : [ volume.split(':')[1] for volume in self._service.params('volume') ], 'environment' : [ self.__resolve_environment(variable) for variable in self._service.params('env') ], 'name' : self._service.name() } return _container_args
def dl(self, module_names, cache=True, ignore_errors=False): """Downloads modules. Arguments: module_names -- str or list, a module name or a list of module names Keyword Arguments: cache -- boolean, if Drush's cache should be used ignore_errors -- boolean, ignore hash errors Raises DrushError if an error occurs when downloading, unless ignore_errors is True. """ if type(module_names) is str: module_names = shell_split(module_names) command_line = ['drush', 'dl', '-y'] if cache: command_line.append('--cache') if not self._verbose: command_line.append('-q') command_line.extend(module_names) dir_exceptions = [ 'registry_rebuild', ] if len(module_names) == 1 and \ module_names[0].lower() in dir_exceptions: return self._handle_dl(command_line, ignore_errors) with pushd(self._path): self._handle_dl(command_line, ignore_errors)
def _spawn_process(self): _cmd = "sh -c \"{}\"".format(self._service.command().replace('"', '\\"')) _args = shell_split(_cmd) self.__process = Popen(_args, shell=False, preexec_fn=setsid, stdout=self._logfile, stderr=self._logfile)
def mongo_restore( stream: BinaryIO, uri: str, collection: str, db: str = None, buffer_size=None, chunk_size=None, drop=False, cmd_prefix='', ) -> MongoStats: ''' Executes mongorestore, restoring a previously gzipped dump from the given stream. Args: - stream: stream to read from - uri: Mongo connection string - collection: name of the collection to be restored - db: name of the database (defaults to the one in the URI or 'admin') - buffer_size: size of each chunk to be read (default: 10MB) - drop: drop current collection - cmd_prefix: prefix to be added to the mongorestore base command Yields: MongoStats ''' buffer_size = buffer_size or 10_000_000 chunk_size = chunk_size or 10_000_000 process = None parts = parse_uri(uri) db = db or parts.get('db', 'admin') stats = MongoStats(db=db, collection=collection) args = shell_split(cmd_prefix + 'mongorestore') args += get_cmd_args(uri) if drop: args += ['--drop'] args += [ '--archive', '--gzip', '--nsInclude', '*.*', '--nsFrom', '$db$.$col$', '--nsTo', f'{db}.{collection}', ] with stats.measure(): try: stderr = b'' process = subprocess.Popen( args, errors=None, stdin=subprocess.PIPE, stdout=subprocess.DEVNULL, stderr=subprocess.PIPE, bufsize=buffer_size, ) for chunk in iter(lambda: stream.read(chunk_size), b''): process.stdin.write(chunk) _, stderr = process.communicate() finally: if process: process.terminate() if process.returncode != 0: raise Exception('mongorestore exited with error code = ' + str(process.returncode)) stderr = stderr.decode('utf-8') num_match = re.search( f'finished restoring {db}.{collection} ' + r'\((?P<num>\d+) documents\)', stderr, re.MULTILINE, ) stats.num_docs = int(num_match.group('num')) if num_match else None dup_match = re.findall( r"_id_ dup key: \{ : ObjectId\('(?P<id>[0-9a-fA-F]+)'\) \}", stderr, re.MULTILINE, ) stats.duplicated_ids = [ObjectId(id) for id in dup_match if id] stats.num_docs -= len(stats.duplicated_ids) return stats
def mongo_dump( uri: str, collection: str, db: str = None, query: Mapping[str, Any] = None, buffer_size=None, count=True, cmd_prefix: str = '', ) -> ContextManager[MongoDumpOutput]: ''' Executes mongodump, yielding a stream to read its output. Args: - uri: Mongo connection string - collection: name of the collection to be dumped - db: name of the database (defaults to the one in the URI or 'admin') - query: query to select the documents to be dumped - buffer_size: size of the buffer for the stdout (default: 10MB) - count: count the number of documents - cmd_prefix: prefix to be added to the mongodump base command Yields: MongoDumpOutput ''' buffer_size = buffer_size or 10_000_000 process = None parts = parse_uri(uri) db = db or parts.get('db', 'admin') stats = MongoStats(db=db, collection=collection) args = shell_split(cmd_prefix + 'mongodump') args += get_cmd_args(uri) args += [ '--db', db, '--collection', collection, '--archive', '--gzip', ] try: process = subprocess.Popen( args, errors=None, stdout=subprocess.PIPE, stderr=(subprocess.PIPE if count else subprocess.DEVNULL), text=None, encoding=None, universal_newlines=None, bufsize=buffer_size, ) with stats.measure(): yield MongoDumpOutput( stream=process.stdout, stats=stats, ) _, stderr = process.communicate(timeout=1.0) stderr = stderr.decode('utf-8') if stderr else '' if process.returncode != 0: LOGGER.error(stderr) raise Exception( f'mongodump exited with error code = {process.returncode}') m = re.search( f'done dumping {db}.{collection} ' + r'\((?P<num>\d+) documents\)', stderr, re.MULTILINE, ) if m: stats.num_docs = int(m.group('num')) finally: if process: process.terminate()