def process(args): local.USER = args['<login>'] opts = { 'batch': int(args.get('-b')), 'threads': int(args.get('-t')), } if args['gmail'] and args['set']: gmail.save_credentials(args['<username>'], args['<password>']) elif args['gmail']: gmail.fetch(**opts) if args.get('--parse'): local.parse(**opts) elif args['parse']: # local.save_msgids() # local.save_uid_pairs() local.parse(args.get('<criteria>'), **opts) elif args['threads']: with local.client() as con: local.update_threads(con, criteria=args.get('<criteria>')) elif args['icons']: icons() elif args['web']: web() elif args['test']: run('pytest') elif args['lint']: ci = args['--ci'] and 1 or '' run('ci=%s bin/lint' % ci) else: raise SystemExit('Target not defined:\n%s' % args)
def main(args=None): parser = argparse.ArgumentParser('Manage CLI') cmds = parser.add_subparsers(title='commands') def cmd(name, **kw): p = cmds.add_parser(name, **kw) p.set_defaults(cmd=name) p.arg = lambda *a, **kw: p.add_argument(*a, **kw) and p p.exe = lambda f: p.set_defaults(exe=f) or p return p cmd('icons').exe(lambda a: icons()) cmd('web').exe(lambda a: web()) cmd('test')\ .exe(lambda a: run(''' pytest="pytest -q --cov=mailur" $pytest -n2 -m "not no_parallel" $pytest --cov-append --cov-report=term-missing -m "no_parallel" ''')) cmd('lint')\ .exe(lambda a: run('ci=%s bin/run-lint' % (1 if a.ci else '')))\ .arg('--ci', action='store_true') args = parser.parse_args(sys.argv[1:]) if not hasattr(args, 'cmd'): parser.print_usage() exit(2) elif hasattr(args, 'exe'): try: args.exe(args) except KeyboardInterrupt: raise SystemExit('^C') else: raise ValueError('Wrong subcommand')
def generate_config(port: int) -> SynapseConfig: server_dir = synapse_base_dir.joinpath(f"localhost-{port}") server_dir.mkdir(parents=True, exist_ok=True) server_name = f"localhost:{port}" admin_credentials = get_admin_credentials(server_name) # Always overwrite config file to ensure we're not using a stale version config_file = server_dir.joinpath("synapse_config.yaml").resolve() config_template = _SYNAPSE_CONFIG_TEMPLATE.read_text() config_file.write_text( config_template.format(server_dir=server_dir, port=port, admin_credentials=admin_credentials)) tls_key_file = server_dir.joinpath(f"{server_name}.tls.crt") if not tls_key_file.exists(): subprocess.run( [ sys.executable, "-m", "synapse.app.homeserver", f"--server-name={server_name}", f"--config-path={config_file!s}", "--generate-keys", ], cwd=server_dir, timeout=30, check=True, stderr=DEVNULL, stdout=DEVNULL, ) return server_name, config_file
def test_run_with_shell_timeout_and_capture_output(self): #Output capturing after a timeout mustn't hang forever on open filehandles with self.runs_in_given_time(0.1): with self.assertRaises(subprocess.TimeoutExpired): subprocess.run('sleep 3', shell=True, timeout=0.1, capture_output=True) # New session unspecified.
def gocr_response(resp, pattern, convert=which('convert'), gocr=which('gocr')): cmd = run('{} - pbm:- | {} -C "{}" -'.format(convert, gocr, pattern), stdout=PIPE, input=resp.content, shell=True, check=True) return cmd.stdout.strip().decode()
def process(args): conf['USER'] = args['<login>'] opts = { 'batch': int(args.get('-b')), 'threads': int(args.get('-t')), } if args['remote'] and args['set']: remote.data_account({ 'username': args['<username>'], 'password': args['<password>'], 'imap_host': args['--imap'], 'smtp_host': args['--smtp'], }) elif args['remote']: select_opts = dict(tag=args['--tag'], box=args['--box']) fetch_opts = dict(opts, **select_opts) fetch_opts = {k: v for k, v in fetch_opts.items() if v} remote.fetch(**fetch_opts) if args['--parse']: local.parse(**opts) elif args['parse']: local.parse(args.get('<criteria>'), **opts) elif args['sync']: timeout = args.get('--timeout') params = [int(timeout)] if timeout else [] sync(*params) elif args['sync-flags']: if args['--reverse']: local.sync_flags_to_src() else: local.sync_flags_to_all() elif args['clean-flags']: local.clean_flags(args['<flag>']) elif args['metadata']: local.update_metadata(args.get('<uids>')) elif args['icons']: icons() elif args['web']: web() elif args['test']: run('pytest -q --cov=mailur --cov-report=term-missing') elif args['lint']: ci = args['--ci'] and 1 or '' run('ci=%s bin/run-lint' % ci) else: raise SystemExit('Target not defined:\n%s' % args)
def process(args): conf['USER'] = args['<login>'] opts = { 'batch': int(args.get('-b')), 'threads': int(args.get('-t')), } if args['gmail'] and args['set']: gmail.save_credentials(args['<username>'], args['<password>']) elif args['gmail']: select_opts = dict(tag=args['--tag'], box=args['--box']) fetch_opts = dict(opts, **select_opts) fetch_opts = {k: v for k, v in fetch_opts.items() if v} gmail.fetch(**fetch_opts) if args['--parse']: local.parse(**opts) elif args['parse']: local.parse(args.get('<criteria>'), **opts) elif args['sync']: sync(int(args['--timeout'])) elif args['sync-flags']: if args['--reverse']: local.sync_flags_to_src() else: local.sync_flags_to_all() elif args['clean-flags']: local.clean_flags() elif args['update-links']: local.update_links() elif args['update-metadata']: local.save_addrs() local.save_msgids() local.save_uid_pairs() elif args['threads']: with local.client() as con: local.update_threads(con, criteria=args.get('<criteria>')) elif args['icons']: icons() elif args['web']: web() elif args['test']: run('pytest -n2 -q --cov=mailur --cov-report=term-missing') elif args['lint']: ci = args['--ci'] and 1 or '' run('ci=%s bin/run-lint' % ci) else: raise SystemExit('Target not defined:\n%s' % args)
def process(args): conf['USER'] = args['<login>'] opts = { 'batch': int(args.get('-b')), 'threads': int(args.get('-t')), } if args['remote'] and args['set']: remote.data_account({ 'username': args['<username>'], 'password': args['<password>'], 'imap_host': args['--imap'], 'smtp_host': args['--smtp'], }) elif args['remote']: select_opts = dict(tag=args['--tag'], box=args['--box']) fetch_opts = dict(opts, **select_opts) fetch_opts = {k: v for k, v in fetch_opts.items() if v} remote.fetch(**fetch_opts) if args['--parse']: local.parse(**opts) elif args['parse']: local.parse(args.get('<criteria>'), **opts) elif args['sync']: sync(int(args['--timeout'])) elif args['sync-flags']: if args['--reverse']: local.sync_flags_to_src() else: local.sync_flags_to_all() elif args['clean-flags']: local.clean_flags(args['<flag>']) elif args['metadata']: local.update_metadata(args.get('<uids>')) elif args['icons']: icons() elif args['web']: web() elif args['test']: run('pytest -q -n2 --cov=mailur --cov-report=term-missing') elif args['lint']: ci = args['--ci'] and 1 or '' run('ci=%s bin/run-lint' % ci) else: raise SystemExit('Target not defined:\n%s' % args)
def put_settings(): with state_lock: state = get_state_data() expected_cas = cas_hash(state['v1']['settings']) received_cas = request.headers.get('x-hpos-admin-cas') if received_cas != expected_cas: app.logger.warning('CAS mismatch: {} != {}'.format(received_cas, expected_cas)) return '', 409 state['v1']['settings'] = request.get_json(force=True) state_json = json.dumps(state, indent=2) try: subprocess.run(['hpos-config-is-valid'], check=True, input=state_json, text=True) except CalledProcessError: return '', 400 replace_file_contents(get_state_path(), state_json) # FIXME: see next FIXME # rebuild(priority=5, args=[]) return '', 200
def query_db(query): output = subprocess.run(["sqlite3", "data/historian.sqlite", query], text=True, capture_output=True) # check_returncode() will raise a CalledProcessError if the query fails # see https://docs.python.org/3/library/subprocess.html#subprocess.CompletedProcess.returncode output.check_returncode() return output.stdout
def proxy_worker(self, ip_url, port, **kwargs): assert ip_url.startswith('/') resp = self.session.get(self.ROOT_URL + ip_url) resp.raise_for_status() assert resp.headers['content-type'] == 'image/png' cmd = run('{} - pbm:- | {} -C "0-9." -'.format(self.convert, self.gocr), stdout=PIPE, input=resp.content, shell=True, check=True) # "_" is default for unrecognizable, this is always "7" ip = cmd.stdout.strip().decode().replace('_', '7') yield Proxy(ip + ':' + port, **kwargs)
def _check_hdr(self, from_path): p = Popen( f'ffmpeg -loglevel panic -i "{from_path}" -c:v copy -vbsf hevc_mp4toannexb -f hevc - | hdr10plus_parser --verify -', shell=True, stdin=PIPE, stdout=PIPE, stderr=STDOUT, close_fds=True, ) output = p.stdout.read().decode("utf8") dynamic_hdr = None if "Dynamic HDR10+ metadata detected." in output: dynamic_hdr = True run( f'ffmpeg -i "{from_path}" -c:v copy -vbsf hevc_mp4toannexb -f hevc - | hdr10plus_parser -o /tmp/metadata.json -', shell=True, ) else: p = Popen( f'ffprobe -v error -show_streams -select_streams v:0 -of json -i "{from_path}"', shell=True, stdin=PIPE, stdout=PIPE, stderr=STDOUT, close_fds=True, ) try: output = json.loads(p.stdout.read().decode("utf8")) except json.decoder.JSONDecodeError: pass else: streams = output.get("streams", [{}]) if len(streams) > 0 and streams[0].get( "color_primaries") == "bt2020": dynamic_hdr = False return dynamic_hdr
def process_chat_message(self, sender_name, message_text): self.last_requests = { k: v for k, v in self.last_requests.items() if time.time() - v < 5 } generic_error_reply = 'Something went wrong. Please contact the administrator of this bot or try again later.' if message_text == 'authcode': try: if sender_name in self.last_requests: return 'Jeez.. I just gave you an authcode five seconds ago! Stop being so pushy!' else: output = sp.run('getauthcode.py %s' % sender_name, shell=True, check=True, capture_output=True, text=True).stdout if output.startswith('Received authcode'): authcode = output.split()[2] self.last_requests[sender_name] = time.time() return 'Your authcode is %s' % authcode else: self.logger.error( 'authbot: unexpected output from getauthcode.py: %s' % output) return generic_error_reply except sp.CalledProcessError as e: error_message = e.stderr if e.stderr else str(e) self.logger.error('authbot: failed to run getauthcode.py: %s' % error_message) return generic_error_reply elif message_text == 'status': server_info = json.loads( urlreq.urlopen('http://localhost:9080/status').read()) try: return 'There are %s players and %s servers online' % ( server_info['online_players'], server_info['online_servers']) except KeyError as e: self.logger.error( 'authbot: invalid status received from server: %s' % e) return generic_error_reply else: return 'Hi %s. Valid commands are "authcode" or "status".' % sender_name
def webpack(): run('command -v yarn && yarn run dev || npm run dev', shell=True)
def api(): run('bin/run-web', shell=True)
def rebuild_worker(): while True: (_, cmd) = rebuild_queue.get() rebuild_queue.queue.clear() subprocess.run(cmd)
def reset(): try: subprocess.run(['hpos-reset'], check=True) except CalledProcessError: return '', 500
def run_python(self, code, **kwargs): """Run Python code in a subprocess using subprocess.run""" argv = [sys.executable, "-c", code] return subprocess.run(argv, **kwargs)
def setup_validator(): """Install nodejs deps if they do not exist.""" if not os.path.exists('./node_modules/.bin/bids-validator'): subprocess.run(['yarn'])
def api(): cmd = ('gunicorn mailur.web:app -b :5000 ' ' -k gevent --timeout=300 --reload --access-logfile=-' ' --access-logformat="%(m)s %(U)s %(s)s %(L)ss %(b)sb"') run(cmd, shell=True)
def webpack(): run('which yarn && yarn run dev || npm run dev', shell=True)
def zerotier_info(): proc = subprocess.run(['sudo', 'zerotier-cli', '-j', 'info'], capture_output=True, check=True) return json.loads(proc.stdout)
def _scheduled_builder( # The name of the builder to use. builder, # An absolute path to the file which contains code to test. The file resides in a # temporary directory, which should be used to hold any additional files # produced by the test. file_path, # An absolute path to the Sphinx root directory. sphinx_base_path, # A relative path to the Sphinx source path from the ``sphinx_base_path``. sphinx_source_path, # A relative path to the Sphinx output path from the ``sphinx_base_path``. sphinx_out_path, # A relative path to the source file from the ``sphinx_source_path``, based # on the submitting web page. source_path, ): cwd = os.path.dirname(file_path) if builder == "unsafe-python" and os.environ.get("WEB2PY_CONFIG") == "test": # Run the test in Python. This is for testing only, and should never be used in production; instead, this should be run in a limited Docker container. For simplicity, it lacks a timeout. # # First, copy the test to the temp directory. Otherwise, running the test file from its book location means it will import the solution, which is in the same directory. test_file_name = os.path.splitext(os.path.basename(file_path))[0] + "-test.py" dest_test_path = os.path.join(cwd, test_file_name) shutil.copyfile( os.path.join( sphinx_base_path, sphinx_source_path, os.path.dirname(source_path), test_file_name, ), dest_test_path, ) try: str_out = subprocess.check_output( [sys.executable, dest_test_path], stderr=subprocess.STDOUT, universal_newlines=True, cwd=cwd, ) return str_out, 100 except subprocess.CalledProcessError as e: # from gluon.debug import dbg; dbg.set_trace() return e.output, 0 elif builder != "pic24-xc16-bullylib": raise RuntimeError("Unknown builder {}".format(builder)) # Assemble or compile the source. We assume that the binaries are already in the path. xc16_path = "" # Compile in the temporary directory, in which ``file_path`` resides. sp_args = dict(stderr=subprocess.STDOUT, universal_newlines=True, cwd=cwd,) o_path = file_path + ".o" extension = os.path.splitext(file_path)[1] if extension == ".s": args = [ os.path.join(xc16_path, "xc16-as"), "-omf=elf", "-g", "--processor=33EP128GP502", file_path, "-o" + o_path, ] elif extension == ".c": args = [ os.path.join(xc16_path, "xc16-gcc"), "-mcpu=33EP128GP502", "-omf=elf", "-g", "-O0", "-msmart-io=1", "-Wall", "-Wextra", "-Wdeclaration-after-statement", "-I" + os.path.join(sphinx_base_path, sphinx_source_path, "lib/include"), "-I" + os.path.join(sphinx_base_path, sphinx_source_path, "tests"), "-I" + os.path.join( sphinx_base_path, sphinx_source_path, "tests/platform/Microchip_PIC24" ), "-I" + os.path.join( sphinx_base_path, sphinx_source_path, os.path.dirname(source_path) ), file_path, "-c", "-o" + o_path, ] else: raise RuntimeError("Unknown file extension in {}.".format(file_path)) out = _subprocess_string(args, **sp_args) try: out += subprocess.check_output(args, **sp_args) except subprocess.CalledProcessError as e: out += e.output return out, 0 # Build the test code with a random verification code. verification_code = get_verification_code() waf_root = os.path.normpath( os.path.join( sphinx_base_path, sphinx_out_path, BUILD_SYSTEM_PATH, sphinx_source_path ) ) test_file_path = os.path.join( sphinx_base_path, sphinx_source_path, os.path.splitext(source_path)[0] + "-test.c", ) test_object_path = os.path.join( waf_root, "{}-test.c.{}.o".format(os.path.splitext(source_path)[0], verification_code), ) args = [ os.path.join(xc16_path, "xc16-gcc"), "-mcpu=33EP128GP502", "-omf=elf", "-g", "-O0", "-msmart-io=1", "-Wall", "-Wextra", "-Wdeclaration-after-statement", "-I" + os.path.join(sphinx_base_path, sphinx_source_path, "lib/include"), "-I" + os.path.join(sphinx_base_path, sphinx_source_path, "tests"), "-I" + os.path.join( sphinx_base_path, sphinx_source_path, "tests/platform/Microchip_PIC24" ), "-I" + os.path.join( sphinx_base_path, sphinx_source_path, os.path.dirname(source_path) ), test_file_path, "-DVERIFICATION_CODE=({}u)".format(verification_code), "-c", "-o" + test_object_path, ] out += _subprocess_string(args, **sp_args) try: out += subprocess.check_output(args, **sp_args) except subprocess.CalledProcessError as e: out += e.output return out, 0 # Link. elf_path = file_path + ".elf" args = [ os.path.join(xc16_path, "xc16-gcc"), "-omf=elf", "-Wl,--heap=100,--stack=16,--check-sections,--data-init,--pack-data,--handles,--isr,--no-gc-sections,--fill-upper=0,--stackguard=16,--no-force-link,--smart-io", "-Wl,--script=" + os.path.join( sphinx_base_path, sphinx_source_path, "lib/lkr/p33EP128GP502_bootldr.gld" ), test_object_path, o_path, os.path.join(waf_root, "lib/src/pic24_clockfreq.c.1.o"), os.path.join(waf_root, "lib/src/pic24_configbits.c.1.o"), os.path.join(waf_root, "lib/src/pic24_serial.c.1.o"), os.path.join(waf_root, "lib/src/pic24_timer.c.1.o"), os.path.join(waf_root, "lib/src/pic24_uart.c.1.o"), os.path.join(waf_root, "lib/src/pic24_util.c.1.o"), os.path.join(waf_root, "tests/test_utils.c.1.o"), os.path.join(waf_root, "tests/test_assert.c.1.o"), "-o" + elf_path, "-Wl,-Bstatic", "-Wl,-Bdynamic", ] out += "\n" + _subprocess_string(args, **sp_args) try: out += subprocess.check_output(args, **sp_args) except subprocess.CalledProcessError as e: out += e.output return out, 0 # Simulate. Create the simulation commands. simout_path = file_path + ".simout" ss = get_sim_str_sim30("dspic33epsuper", elf_path, simout_path) # Run the simulation. This is a re-coded version of ``wscript.sim_run`` -- I # couldn't find a way to re-use that code. sim_ret = 0 args = [os.path.join(xc16_path, "sim30")] out += "\nTest results:\n" + _subprocess_string(args, **sp_args) try: cp = subprocess.run( args, input=ss, stdout=subprocess.PIPE, timeout=5, **sp_args ) sim_ret = cp.returncode except subprocess.TimeoutExpired: sim_ret = 1 timeout_str = "\n\nTimeout." else: timeout_str = "" with open(simout_path, encoding="utf-8") as f: out += f.read().rstrip() # Put the timeout string at the end of all the simulator output. out += timeout_str return out, (100 if not sim_ret and check_sim_out(out, verification_code) else 0)