def git_clone(name, url, rev): print 'git clone %s' % url clone_process = capture_both('git clone %s' % url) if clone_process.returncode != 0: raise Exception('`git clone %s` failed' % url) with cd(name): checkout_process = capture_both('git checkout %s' % rev) if checkout_process.returncode != 0: raise Exception('`git checkout %s` failed' % rev)
def compile_tex(tex, cwd, bibtex=True): """ """ pdflatex = "pdflatex --shell-escape {}".format(tex) bibtex = "bibtex {}".format(tex.split('.')[0]) cmds = [pdflatex, pdflatex] if bibtex: cmds = [pdflatex, bibtex] + cmds for cmd in cmds: sarge.capture_both(cmd, cwd=cwd)
def check_google_mx(domain): """ Check Google MX DNS records Args: domain (str): DNS domain name Returns: int: 0 if OK, 1 on error | https://support.google.com/a/topic/2716885?hl=en&ref_topic=2426592 """ cmd = sarge.shell_format("dig {0} mx +short", domain) log.info('cmd', cmd=cmd) output = sarge.capture_both(cmd).stdout.text.rstrip() log.debug('MX', record=output) result = 0 check_domain1 = "aspmx.l.google.com." check_domain2 = "googlemail.com." lines = output.split('\n') if not lines: log.error('err', msg="No MX records found for %r" % domain) result += 1 for l in lines: l = l.lower() if not (l.endswith(check_domain1) or l.endswith(check_domain2)): result += 1 log.error('err', msg="%r does not end with %r or %r" % (l, check_domain1, check_domain2)) if result is None: result += 1 return result
def check_google_spf(domain): """ Check a Google SPF DNS TXT record Args: domain (str): DNS domain name Returns: int: 0 if OK, 1 on error | https://support.google.com/a/answer/178723?hl=en """ cmd = sarge.shell_format("dig {0} txt +short", domain) log.info('cmd', op='check_google_spf', cmd=cmd) proc = sarge.capture_both(cmd) output = proc.stdout.text.rstrip().split('\n') for line in output: log.debug('TXT', record=line) expected = u"\"v=spf1 include:_spf.google.com ~all\"" if line == expected: return 0 errmsg = "%r != %r" % (output, expected) log.error('err', msg=errmsg) return 1
def carto_css(mml, name): """ Takes MML string input and writes it to a Mapnik XML file. :param mml: an mml string, containing the proper CartoCSS styling and connection attributes required by the CartoCSS conversion program :param name: the unique name of the layer (standard method is to name it with its database schema and table name) :return mapfile: a cascadenik-ready document. """ from sarge import shell_format, run, Capture create_static_content_subdir('cartocss') mml_file = "{0}/cartocss/{1}.mml".format(settings.MEDIA_ROOT, name) xml_file = mml_file.replace(".mml", ".xml") f = open(mml_file, 'w+') f.write(mml) f.close() if not settings.FOOTPRINT_INIT: carto_css_command = shell_format("{0}/carto {1}".format(settings.BIN_DIR, mml_file)) logger.debug("Running carto: %s" % carto_css_command) carto_css_content = None try: carto_result = capture_both(carto_css_command) assert not any(carto_result.returncodes) carto_css_content = carto_result.stdout.text logger.debug("Carto xml content: %s" % carto_css_command) f = open(xml_file, 'w') f.write(carto_css_content) f.close() except AssertionError, e: logger.error("Failed to generate cartocss for {mml}. Exception: {message}. {carto_output}".format( mml=mml_file, message=carto_result.stderr.text, carto_output=carto_css_content)) raise e
def run_cmd(cmd, args, channel): old_cwd = os.getcwd() os.chdir(DEPLOYMENT_PATH) cmd = "su ubuntu -c '%s'" % cmd resp = [] try: p = sarge.capture_both(cmd) if p.returncode == 0: resp.append('Successfully deployed %s to %s on Azure' % ( args.app, args.env)) if args.verbose: resp.append(':\n%s\n%s' % (p.stdout.text, p.stderr.text)) else: resp.append('Failed to deploy %s to %s:\n%s\n%s' % ( args.app, args.env, p.stdout.text, p.stderr.text )) subdomain = None repo = None if args.app == 'tac': repo = 'api' subdomain = 'auth-stage-az' if args.env == 'live' else 'auth-az' elif args.app == 'middle': repo = 'detalytics-middle' subdomain = 'team-az' if args.env == 'live' else 'dev-az' if repo: msg = e2e_ui_test(repo, subdomain) resp.append(msg) finally: notify('\n'.join(resp), channel=channel, now=True) os.chdir(old_cwd)
def check_google_dmarc(domain): """ Check a Google DMARC DNS TXT record Args: domain (str): DNS domain name Returns: int: 0 if OK, 1 on error | https://support.google.com/a/answer/2466580 | https://support.google.com/a/answer/2466563 """ dmarc_domain = "_dmarc." + domain cmd = sarge.shell_format("dig {0} txt +short", dmarc_domain) log.info('cmd', op='check_google_dmarc', cmd=cmd) proc = sarge.capture_both(cmd) output = proc.stdout.text.rstrip().split('\n') for line in output: log.debug('TXT', record=line) expected = u"\"v=DMARC1" # ... "\; p=none\; rua=mailto:" if line.startswith(expected): if 'p=' in line: return 0 errmsg = "%r != %r" % (output, expected) log.error('err', msg=errmsg) return 1
def export(self, name, statement, output_format, geom="wkb_geometry"): if geom: geom_type = '-nlt ' + report_sql_values_as_dict( "SELECT GeometryType({geom}) from ({query}) b group by GeometryType({geom});".format( geom=geom, query=statement) )[0]['geometrytype'] else: geom_type = '' output_file = '{dir}/{name}.{ext}'.format(name=name, ext=self.FORMATS[output_format]['ext'], dir=settings.SENDFILE_ROOT) source = "-sql {statement}".format(statement=pipes.quote(statement)) logger.info("Exporting with sql argument %s" % source) options_formatting = dict(name=name) formatted_options = [option.format(**options_formatting) for option in self.FORMATS[output_format].get('options', [])] ogr_cmd = self.EXPORT_COMMAND.format( output_format=output_format, geom_type=geom_type, source=source, export_file=output_file, options=" ".join(formatted_options) ) logger.info(ogr_cmd) result = capture_both(ogr_cmd) assert not result.returncode, "ERROR: {name} > {output}: {err}".format(name=name, output=output_format, err=result.stderr.text) return result
def _run_cmd(self, cmd): if sys.platform in ("linux", "darwin"): # Linux and MacOs return sarge.capture_both(cmd) elif sys.platform.startswith("win"): return sarge.run(cmd) else: msg = ("Cannot handle platform '{}' - please report to " "developers").format(sys.platform) # pragma: no cover raise NotImplementedError(msg)
def reload(self, branch='master'): """ get or update plugins from git """ resp = [] old_cwd = os.getcwd() os.chdir(self.basedir) try: # clone for the first time if not os.path.exists(self._name): sarge.run('git clone %s' % self._repo) os.chdir(self._name) p = sarge.capture_both( 'git fetch origin && git checkout %s && git pull' % branch ) ok = True for code in p.returncodes: if code != 0: resp.extend([ 'sth went wrong when pulling plugins from %s:' % ( self._repo), p.stdout.text, p.stderr.text]) ok = False break if ok: resp.append('git plugins pulled from %s@%s: "%s"' % ( self._repo, sarge.get_stdout('git rev-parse HEAD').strip('\n')[:8], sarge.get_stdout('git log -1 --pretty=%B').strip('\n') )) if os.path.exists('requirements.txt'): pip = sarge.capture_both( 'pip install -r requirements.txt') if pip.returncode != 0: resp.extend([ 'sth went wrong when installing plugin requirements', pip.stdout.text, pip.stderr.text]) else: current_app.logger.info('file not found') return '\n'.join(resp) finally: os.chdir(old_cwd)
def verify(challenge, filename): os.chmod(filename, 0o755) for test in challenge["tests"]: input_text = str(test["input"]) output_text = str(test["output"]) command = sarge.capture_both(filename + " " + input_text) if command.stdout.text.strip() != output_text.strip(): return False, input_text, output_text, command return True, None, None, None
def get_test_status(): """ Returns the number of test failures and coverage level. """ result = capture_both(NOSE_CMD.format(MODULE)) output = result.stderr.read() if result.returncode != 0: failed = sum(map(int, findall(FAILED_TESTS, output, MULTILINE))) else: failed = 0 coverage = int(findall(COVERAGE, output, MULTILINE)[0]) return failed, coverage
def _get_passwd(cmd): command = sarge.capture_both(cmd) command.wait() passwd = command.stdout.text.strip() if command.returncode != 0: raise FatalError( 'Command "{}" failed\n' 'We cannot get a password\n' 'stdout:\n{}\n' 'stderr:\n{}\n' .format(cmd, passwd, command.stderr.text.strip())) return passwd
def check_multiple_alts(chrom, start, end, alt, samples, reference_file=None): # FIXME: PyVCF can't be used as it loads the wrong coordinate with # fetch() if reference_file is None: return alt alt = alt.split(",") if len(alt) == 1: return alt[0] region = "{0}:{1}-{2}".format(chrom, start, end) sample_display = ",".join(samples) bcftools = ("/usr/bin/bcftools view {0} -r {1} -a -s {2}" " --exclude-uncalled -H") bcftools = sarge.shell_format(bcftools, reference_file, region, sample_display) command = sarge.capture_both(bcftools) mutation_table = pd.read_table(command.stdout, header=None, names=["CHROM", "POS", "ID", "REF", "ALT"], usecols=["CHROM", "POS", "ID", "REF", "ALT"]) if mutation_table.empty: # Try alternative approach: sometimes bcftools asserts when # launched from sarge import subprocess with open(os.devnull) as null: cmd = subprocess.Popen(bcftools, shell=True, stdout=subprocess.PIPE, stderr=null) mutation_table = pd.read_table(cmd.stdout, header=None, names=["CHROM", "POS", "ID", "REF", "ALT"], usecols=["CHROM", "POS", "ID", "REF", "ALT"]) seen_alt = mutation_table["ALT"].item() if len(seen_alt.split(",")) > 1: message = ("Detected more than one allele for sample pair {}: {}," " mutation position {}-{}, ref {}") print(message.format(", ".join(samples), seen_alt, chrom, start, mutation_table["REF"].item()), file=sys.stderr) # This is a false positive! Same locus but different ALTs return return seen_alt
def reload(self, branch='master'): """ get or update plugins from git """ resp = [] old_cwd = os.getcwd() os.chdir(self.basedir) try: # clone for the first time if not os.path.exists(self._name): sarge.run('git clone %s' % self._repo) os.chdir(self._name) p = sarge.capture_both( 'git fetch origin && git checkout %s && git pull' % branch) ok = True for code in p.returncodes: if code != 0: resp.extend([ 'sth went wrong when pulling plugins from %s:' % (self._repo), p.stdout.text, p.stderr.text ]) ok = False break if ok: resp.append( 'git plugins pulled from %s@%s: "%s"' % (self._repo, sarge.get_stdout('git rev-parse HEAD').strip('\n')[:8], sarge.get_stdout('git log -1 --pretty=%B').strip('\n'))) if os.path.exists('requirements.txt'): pip = sarge.capture_both('%s install -r requirements.txt' % pip_bin) if pip.returncode != 0: resp.extend([ 'sth went wrong when installing plugin requirements', pip.stdout.text, pip.stderr.text ]) return '\n'.join(resp) finally: os.chdir(old_cwd)
def whois(domain): """ Get whois information with whois Args: domain (str): DNS domain Returns: str: whois output """ cmd = sarge.shell_format('whois {0}', domain) log.info('cmd', cmd=cmd) output = sarge.capture_both(cmd) return output
def dig_txt(domain): """ Get DNS TXT records with dig Args: domain (str): DNS domain Returns: str: dig output """ cmd = sarge.shell_format( "dig {0} txt +cmd +nocomments +question +noidentify +nostats", domain) log.info('cmd', cmd=cmd) output = sarge.capture_both(cmd) return output
def dig_dnskey(zone): """ Get DNSSEC DNS records with dig Args: zone (str): DNS zone Returns: str: dig output """ cmd = sarge.shell_format( "dig {0} +dnssec dnskey +cmd +nocomments +question +noidentify +nostats", zone) log.info('cmd', cmd=cmd) output = sarge.capture_both(cmd) return output
def generate_png(tempdir: str, filename: str): log.info( f"generating png from stl: {filename} and storing it in {tempdir}") scad_file = generate_scad(tempdir, filename) base_png = get_base_filename(filename) png_filename = f"{tempdir}/{base_png}.png" log.debug(f"saving {png_filename}") cmd = [ OPENSCAD, '-o', png_filename, '--autocenter', '--viewall', '--quiet', scad_file ] out = sarge.capture_both(cmd) log.debug(f"openscad output: {out.stdout.text}") log.error(f"openscad output: {out.stderr.text}") return png_filename
def nslookup(domain, nameserver=''): """ Get nslookup information with nslookup (resolve a domainname to an IP) Args: domain (str): DNS domain nameserver (str): DNS domain name server to query (default: ``''``) Returns: str: nslookup output """ if not domain.endswith('.'): domain = domain + '.' cmd = sarge.shell_format('nslookup {0} {1}', domain, nameserver) log.info('cmd', cmd=cmd) output = sarge.capture_both(cmd) return output
def dig_mx(domain): """ Get MX DNS records with dig Args: domain (str): DNS domain Returns: str: dig output | https://en.wikipedia.org/wiki/MX_record """ cmd = sarge.shell_format( "dig {0} mx +cmd +nocomments +question +noidentify +nostats", domain) log.info('cmd', cmd=cmd) output = sarge.capture_both(cmd) return output
def _reboot(env, channel): old_cwd = os.getcwd() os.chdir(DEPLOYMENT_PATH) cmd = COMMAND.format(env=env) notify('About to reboot %s' % env, channel=channel, now=True) resp = [] try: p = sarge.capture_both(cmd) if p.returncode == 0: resp.append('Successfully rebooted instance in %s environment' % env) else: resp.append('Failed to reboot %s:\n%s\n%s' % ( env, p.stdout.text, p.stderr.text )) finally: notify('\n'.join(resp), channel=channel, now=True) os.chdir(old_cwd)
def export(self, name, statement, output_format, geom="wkb_geometry"): if geom: geom_type = '-nlt ' + report_sql_values_as_dict( "SELECT GeometryType({geom}) from ({query}) b group by GeometryType({geom});" .format(geom=geom, query=statement))[0]['geometrytype'] else: geom_type = '' output_file = '{dir}/{name}.{ext}'.format( name=name, ext=self.FORMATS[output_format]['ext'], dir=settings.SENDFILE_ROOT) for approval_status in ['pending', 'approved', 'rejected']: if approval_status in statement: statement = statement.replace(approval_status, "'" + approval_status + "'") else: statement = statement source = "-sql {statement}".format(statement=pipes.quote(statement)) logger.info("Exporting with sql argument %s" % source) options_formatting = dict(name=name) formatted_options = [ option.format(**options_formatting) for option in self.FORMATS[output_format].get('options', []) ] ogr_cmd = self.EXPORT_COMMAND.format( output_format=output_format, geom_type=geom_type, source=source, export_file=output_file, options=" ".join(formatted_options)) logger.info(ogr_cmd) result = capture_both(ogr_cmd) assert not result.returncode, "ERROR: {name} > {output}: {err}".format( name=name, output=output_format, err=result.stderr.text) return result
def moveto(path, pattern, write_changes=False): """ Move a pattern (glob) of files to a directory Args: path (str) -- directory path pattern (str) -- filename glob pattern Yields: sarge.run outputs """ # files = !ls $pattern log.debug('moveto()', path=path, pattern=pattern, write_changes=write_changes) files = match_file_pattern(pattern) path_ = pathlib.Path(path) path_.name in files and files.remove(path_.name) log.info('patternmatch', files=files) if not files: return # !mkdir $path if not os.path.exists(path): log.info('mkdir', path=path) os.makedirs(path) git_mv_opts = '-n' if write_changes: git_mv_opts = '' for f in files: cmd = sarge.shell_format( "git mv %s {0} {1}" % git_mv_opts, f, os.path.join(path, f)) log.info('cmd', cmd=cmd) yield sarge.capture_both(cmd)
def carto_css(mml, name): """ Takes MML string input and writes it to a Mapnik XML file. :param mml: an mml string, containing the proper CartoCSS styling and connection attributes required by the CartoCSS conversion program :param name: the unique name of the layer (standard method is to name it with its database schema and table name) :return mapfile: a cascadenik-ready document. """ from sarge import shell_format, run, Capture create_static_content_subdir('cartocss') mml_file = "{0}/cartocss/{1}.mml".format(settings.MEDIA_ROOT, name) xml_file = mml_file.replace(".mml", ".xml") f = open(mml_file, 'w+') f.write(mml) f.close() if not settings.FOOTPRINT_INIT: carto_css_command = shell_format("{0}/carto {1}".format( settings.BIN_DIR, mml_file)) logger.debug("Running carto: %s" % carto_css_command) carto_css_content = None try: carto_result = capture_both(carto_css_command) assert not any(carto_result.returncodes) carto_css_content = carto_result.stdout.text logger.debug("Carto xml content: %s" % carto_css_command) f = open(xml_file, 'w') f.write(carto_css_content) f.close() except AssertionError, e: logger.error( "Failed to generate cartocss for {mml}. Exception: {message}. {carto_output}" .format(mml=mml_file, message=carto_result.stderr.text, carto_output=carto_css_content)) raise e
def inner(): old_cwd = os.getcwd() os.chdir(DEPLOYMENT_PATH) if verbose: notify('running cmd `%s`..' % cmd, channel) try: p = sarge.capture_both(cmd) finally: os.chdir(old_cwd) if p.returncode == 0: handler = on_success if verbose: notify( 'stdout:\n%s\nstderr:\n%s' % ( p.stdout.text, p.stderr.text), channel) else: handler = on_error notify('stdout:\n%s\nstderr:\n%s' % ( p.stdout.text, p.stderr.text), channel) if callable(handler): handler(p, **cb_args) elif isinstance(handler, basestring): notify(handler, channel)
def check_google_dkim(domain, prefix=DEFAULT_GOOGLE_DKIM_PREFIX): """ Check a Google DKIM DNS TXT record Args: domain (str): DNS domain name prefix (str): DKIM ``s=`` selector ('DKIM prefix') Returns: int: 0 if OK, 1 on error | https://support.google.com/a/answer/174126 | https://admin.google.com/AdminHome?fral=1#AppDetails:service=email&flyout=dkim .. note:: This check function only finds "v=DKIM1" TXT records; it defaults to the default ``google`` prefix and **does not validate DKIM signatures**. | http://dkim.org/specs/rfc4871-dkimbase.html#rfc.section.3.6.2.1 | http://dkim.org/specs/rfc4871-dkimbase.html#rfc.section.A.3 """ dkim_record_name = "%s._domainkey.%s" % (prefix, domain) cmd = sarge.shell_format("dig {0} txt +short", dkim_record_name) log.info('cmd', op='check_google_dkim', cmd=cmd) proc = sarge.capture_both(cmd) output = proc.stdout.text.rstrip().split('\n') for line in output: log.debug('TXT', record=line) expected = u"\"v=DKIM1" # ... "\; p=none\; rua=mailto:" if line.startswith(expected): if 'k=' in line and 'p=' in line: return 0 errmsg = "%s is not a valid DKIM record" % (output) log.error('err', msg=errmsg) return 1
def extract_images(self, input_path, password=None, prefix=None, directJPEG=False): """ Extract all images from a PDF file. Parameters ---------- input_path : str Input PDF file. password : str PDF password. prefix : str The prefix to the image file (default: name of PDF document). directJPEG: bool Forces the direct extraction of JPEG images regardless of colorspace (default: False). Returns ------- text : str Time taken to complete the process. """ options = (' -password {password}'.format(password=password) if password else '') + \ (' -prefix {prefix}'.format(prefix=prefix) if prefix else '') + \ (' -directJPEG {directJPEG}'.format(directJPEG="-directJPEG") if directJPEG else '') cmd = '"{java_path}" -jar "{pdfbox_path}" ExtractImages {options} "{input_path}"'.format( java_path=self.java_path, pdfbox_path=self.pdfbox_path, options=options, input_path=input_path) p = sarge.capture_both(cmd) return p.stderr.text
#!/usr/bin/env python import re import sarge for n in xrange(50, 4000, 50): c = sarge.capture_both('python -u brian2genn_demo.py -n %s %s %s' % (n, n, n)) for line in c.stdout.readlines(): if re.match('^\d.*', line): print line,
def test_capture_both(self): self.ensure_emitter() p = capture_both('"%s" emitter.py' % sys.executable) self.assertEqual(p.stdout.text.strip(), 'foo') self.assertEqual(p.stderr.text.strip(), 'bar')
#!/usr/bin/env python import re import sarge for n in xrange(50, 4000, 50): c = sarge.capture_both('python -u neurodriver_demo.py -n %s %s %s' % (n, n, n)) for line in c.stdout.readlines(): if re.match('^\d.*', line): print line,
def execute_with_stdin(command_and_args, stdin): """ Executes a system command that requires input given to STDIN, such as psql Returns a tuple (stdout, stderr) """ return capture_both(command_and_args + ((' ' + stdin) if stdin else ''))
def pdf_to_images(self, input_path, password=None, imageType=None, outputPrefix=None, startPage=None, endPage=None, page=None, dpi=None, color=None, cropbox=None, time=True): """ Extract all pages of PDF file as images. Parameters ---------- input_path : str Input PDF file. password : str PDF password. imageType : str The image type to write to. Currently only jpg or png (default: jpg). outputPrefix : str The prefix to the image file (default: name of PDF document). e.g >> outputPrefix = '/output/': Images saved in `output` directory as 1.jpg, 2.jpg, etc. >> outputPrefix = '/output' : Images saved in `output` directory as output1.jpg, output2.jpg, etc. in the same location where the input file is. startPage : bool The first page to convert, one-based (default: 1). endPage : bool The last page to convert, one-based (default: last). page : int The only page to extract, one-based. dpi : int DPI resolution of exported images (default: detected from screen, or 96 if headless). color : str The color depth; may be set to `bilevel`, `gray`, `rgb`, `rgba` (default: `rgb`) cropbox : str The page area to export, e.g "34 45 56 67" time : int Prints timing information to stdout. Returns ------- text : str Time taken to complete the process. """ options = (' -password {password}'.format(password=password) if password else '') + \ (' -imageType {imageType}'.format(imageType=imageType) if imageType else '') + \ (' -outputPrefix {outputPrefix}'.format(outputPrefix=outputPrefix) if outputPrefix else '') + \ (' -startPage {startPage}'.format(startPage=startPage) if startPage else '') + \ (' -endPage {endPage}'.format(endPage=endPage) if endPage else '') + \ (' -page {page}'.format(page=page) if page else '') + \ (' -dpi {dpi}'.format(dpi=dpi) if dpi else '') + \ (' -color {color}'.format(color=color) if color else '') + \ (' -cropbox {cropbox}'.format(cropbox=cropbox) if cropbox else '') + \ (' {time}'.format(time="-time") if time else '') cmd = '"{java_path}" -jar "{pdfbox_path}" PDFToImage {options} "{input_path}"'.format( java_path=self.java_path, pdfbox_path=self.pdfbox_path, options=options, input_path=input_path) p = sarge.capture_both(cmd) return p.stderr.text
def run_subprocess( args: List[str], log_output_live: bool, cwd: Optional[Union[bytes, str]] = None, env: Optional[Dict[str, str]] = None, pipe_output: bool = True, ) -> CompletedProcess: """ Run a command in a subprocess. Args: args: See :py:func:`subprocess.run`. log_output_live: If `True`, log output live. cwd: See :py:func:`subprocess.run`. env: See :py:func:`subprocess.run`. pipe_output: If ``True``, pipes are opened to stdout and stderr. This means that the values of stdout and stderr will be in the returned ``subprocess.CompletedProcess`` and optionally sent to a logger, given ``log_output_live``. If ``False``, no output is sent to a logger and the values are not returned. Returns: See :py:func:`subprocess.run`. Raises: subprocess.CalledProcessError: See :py:func:`subprocess.run`. Exception: An exception was raised in getting the output from the call. """ stdout_list = [] # type: List[bytes] stderr_list = [] # type: List[bytes] stdout_logger = _LineLogger(LOGGER.debug) stderr_logger = _LineLogger(LOGGER.warning) def _read_output(process: sarge.Pipeline, block: bool) -> None: stdout_line = process.stdout.read(block=block) stderr_line = process.stderr.read(block=block) if stdout_line: stdout_list.append(stdout_line) if log_output_live: stdout_logger.log(stdout_line) if stderr_line: stderr_list.append(stderr_line) if log_output_live: stderr_logger.log(stderr_line) try: if pipe_output: process = sarge.capture_both(args, cwd=cwd, env=env, async_=True) while all(command.returncode is None for command in process.commands): _read_output(process=process, block=False) process.poll_all() time.sleep(0.05) # block on final read to ensure all data read. _read_output(process=process, block=True) else: process = sarge.run(args, cwd=cwd, env=env, async_=True) stdout_logger.flush() stderr_logger.flush() # stderr/stdout are not readable anymore which usually means # that the child process has exited. However, the child # process has not been wait()ed for yet, i.e. it has not yet # been reaped. That is, its exit status is unknown. Read its # exit status process.wait() except Exception: # pragma: no cover pylint: disable=broad-except for popen_process in process.processes: # We clean up if there is an error while getting the output. # This may not happen while running tests so we ignore coverage. # Attempt to give the subprocess(es) a chance to terminate. popen_process.terminate() try: popen_process.wait(1) except subprocess.TimeoutExpired: # If the process cannot terminate cleanly, we just kill it. popen_process.kill() raise stdout = b''.join(stdout_list) if pipe_output else None stderr = b''.join(stderr_list) if pipe_output else None if process.returncode != 0: raise subprocess.CalledProcessError( returncode=process.returncode, cmd=args, output=stdout, stderr=stderr, ) return CompletedProcess(args, process.returncode, stdout, stderr)
def get_hostname(): p = run("hostname", stdout=Capture()) return p.stdout.text def get_free_space(location): p = run(shell_format('du -hs {0}', location), stdout=Capture()) return p.stdout.text.split()[0] def run_command(cmd, input=None, async=False, **kwargs): timeout = kwargs.pop('timeout', None) ts = time.time() p = capture_both(cmd, input=input, async=async, **kwargs) return { "command": cmd, "returncode": p.returncode, "stdout": p.stdout.text.split("\n"), "stderr": p.stderr.text.split("\n"), "tstart": ts, "tstop": time.time() } def run_command_raw_output(cmd, input=None, async=False, **kwargs): timeout = kwargs.pop('timeout', None) ts = time.time() p = capture_both(cmd, input=input, async=async, **kwargs) return {
def get_hostname(): p = run("hostname", stdout=Capture()) return p.stdout.text def get_free_space(location): p = run(shell_format('du -hs {0}', location), stdout=Capture()) return p.stdout.text.split()[0] def run_command(cmd, input=None, async=False, **kwargs): timeout = kwargs.pop('timeout', None) ts = time.time() p = capture_both(cmd, input=input, async=async, **kwargs) return {"command": cmd, "returncode": p.returncode, "stdout": p.stdout.text.split("\n"), "stderr": p.stderr.text.split("\n"), "tstart": ts, "tstop": time.time()} def run_command_raw_output(cmd, input=None, async=False, **kwargs): timeout = kwargs.pop('timeout', None) ts = time.time() p = capture_both(cmd, input=input, async=async, **kwargs) return {"command": cmd, "returncode": p.returncode, "stdout": p.stdout.text,
def _do_analysis(self, high_priority=False): self._aborted = False logger = self._plugin._logger results = {'analysisPending': True} self._finished_callback(self._current, results) if self._plugin._settings.get(["enableOctoPrintAnalyzer"]): logger.info("Running built-in analysis.") try: results.update( super(GeniusAnalysisQueue, self)._do_analysis(high_priority)) except AnalysisAborted as e: logger.info( "Probably starting printing, aborting built-in analysis.") raise # Reraise it logger.info("Result: {}".format(results)) self._finished_callback(self._current, results) else: logger.info("Not running built-in analysis.") for analyzer in self._plugin._settings.get(["analyzers"]): command = analyzer["command"].format( gcode=self._current.absolute_path, mcodes=self._plugin.get_printer_config()) if not analyzer["enabled"]: logger.info("Disabled: {}".format(command)) continue logger.info("Running: {}".format(command)) results_err = "" try: if parse_version(sarge.__version__) >= parse_version('0.1.5'): # Because in version 0.1.5 the name was changed in sarge. async_kwarg = 'async_' else: async_kwarg = 'async' sarge_job = sarge.capture_both(command, **{async_kwarg: True}) # Wait for sarge to begin while not sarge_job.processes or not sarge_job.processes[0]: time.sleep(0.5) try: process = psutil.Process(sarge_job.processes[0].pid) for p in [process] + process.children(recursive=True): try: if "IDLE_PRIORITY_CLASS" in dir(psutil): p.nice(psutil.IDLE_PRIORITY_CLASS) else: p.nice(19) except psutil.NoSuchProcess: pass except psutil.NoSuchProcess: pass while sarge_job.commands[0].poll() is None: if self._aborted and not _allow_analysis( self._plugin._printer, self._plugin._settings): for p in process.children(recursive=True) + [process]: p.terminate() sarge_job.close() raise AnalysisAborted(reenqueue=self._reenqueue) time.sleep(0.5) sarge_job.close() results_text = sarge_job.stdout.text results_err = sarge_job.stderr.text if sarge_job.returncode != 0: raise Exception(results_err) logger.info("Sarge output: {}".format(results_err)) logger.info("Result: {}".format(results_text)) new_results = json.loads(results_text) results.update(new_results) logger.info("Merged result: {}".format(results)) self._finished_callback(self._current, results) except AnalysisAborted as e: logger.info("Probably started printing, aborting: '{}'".format( command)) raise # Reraise it except Exception as e: logger.warning("Failed to run '{}'".format(command), exc_info=e) finally: if sarge_job: sarge_job.close() # Before we potentially modify the result from analysis, save them. results.update({'analysisPending': False}) try: if not all(x in results for x in ["progress", "firstFilament", "lastFilament"]): return results results["analysisPrintTime"] = results["estimatedPrintTime"] results["analysisFirstFilamentPrintTime"] = ( results["analysisPrintTime"] - _interpolate_list( results["progress"], results["firstFilament"])[1]) results["analysisLastFilamentPrintTime"] = ( results["analysisPrintTime"] - _interpolate_list( results["progress"], results["lastFilament"])[1]) self.compensate_analysis(results) # Adjust based on history logger.info("Compensated result: {}".format(results)) except Exception as e: logger.warning("Failed to compensate", exc_info=e) results["compensatedPrintTime"] = results["estimatedPrintTime"] if self._plugin._printer._estimator and isinstance( self._plugin._printer._estimator, GeniusEstimator): self._plugin._printer._estimator.recheck_metadata = True return results