def main(argv): # NOTE: If parsing arguments fails or if the program is run with `-h` # switch to just get the help message, the rest of the function will # not be executed. Parsing arguments before importing and defining # everything thus saves time if the user runs the program with `-h` # flag or if the user makes a mistake in command line arguments. parsed_args = parse_argv(argv) # NOTE: Putting imports here seems to be against Style Guide for # Python Code (PEP 8). However, having imports in the body of # `main` function looks more justifiable than in the bodies of # other functions. from .runner import go go( parsed_args.input_file_name, parsed_args.sampling_number, parsed_args.zero_threshold, parsed_args.signature_parameter, parsed_args.periodicity_parameter, parsed_args.caution ) # NOTE: Apparently according to current practices, `main` function # is expected to return the exit status (with `return`, instead of # calling `sys.exit` itself). However, since `parse_args` is called # inside this function, in some cases this function will be exiting # through `sys.exit` anyway (for example, if the program is called # with `-h` flag to get the help message). Thus it seems unreasonable # to try to return normally from this function in other situations. # TODO: make sure to return the correct exit status in all situations, # based on the outcome of `go` execution sys_exit(0)
def scan_file(file_path): cur_token = '' token_idx = 0 args = [] fd = open(file_path, 'r') for line in fd: line = line.strip() if line.startswith('static const chunk_tag_t'): idx = line.find('[') if idx > 0: cur_token = line[25:idx].strip() token_idx = 0 else: if len(cur_token) > 0: idx1 = line.find('{') idx2 = line.find('CT_') if idx1 >= 0 and idx2 > idx1: tok = line[idx1 + 1:idx2].strip() if tok.startswith('R"'): pos_paren_open = tok.find('(') pos_paren_close = tok.rfind(')') if pos_paren_open == -1 or pos_paren_close == -1: print("raw string parenthesis not found", file=stderr) sys_exit(-1) tok = tok[pos_paren_open+1:pos_paren_close] else: tok = tok[1:-2] # strip off open quotes and commas args.append([tok, '%s[%d]' % (cur_token, token_idx)]) token_idx += 1 return args
def parse_commandline(): """ Return the parsed commandline arguments :return: (obj) argparse.Namespace """ main_parser = argparse.ArgumentParser( description='Validate `LCONF files`', formatter_class=RawDescriptionHelpFormatter, epilog='''EXAMPLES: lconf-validate path-to-first.lconf path-to-second.lconf ''' ) main_parser.add_argument( 'in_files', nargs='*', default=[], help='List of files to be validates', ) args = main_parser.parse_args() if not args.in_files: main_parser.print_help() sys_exit() return args
def main(args=None): parser = ArgumentParser(usage="Usage: %(prog)s taskName xmlFile [options]", description="create new job in DB") parser.add_argument("-d", "--dry", dest="dry", action='store_true', default=False, help='if dry, do not try interacting with batch farm') parser.add_argument("-l", "--local", dest="local", action='store_true', default=False, help='run locally') parser.add_argument("-p", "--pythonbin", dest="python", default=None, type=str, help='the python executable if non standard is chosen') parser.add_argument("-c", "--chunk", dest="chunk", default=100, type=int, help='number of jobs to process per cycle') parser.add_argument("-m", "--maxJobs", dest="maxJobs", default=None, type=int, help='number of jobs that can be in the system') parser.add_argument("-u", "--user", dest="user", default=None, type=str, help='name of user that submits jobs') parser.add_argument("-s", "--skipDBcheck", dest="skipDBcheck", action='store_true', default=False, help='skip DB check for jobs') send_heartbeat("JobFetcher") # encapsulates the heartbeat update! opts = parser.parse_args(args) log = logging.getLogger("script") batchsite = BATCH_DEFAULTS['name'] BEngine = HPC.BatchEngine() if opts.user is not None: BEngine.setUser(opts.user) if opts.maxJobs is not None: try: val = BEngine.checkJobsFast(pending=True) except Exception as err: print 'EXCEPTION during getRunningJobs, falling back to DB check, reason follows: %s' % str(err) val = 0 if opts.skipDBcheck: print 'skipping DB check, assume no jobs to be in the system' else: for stat in ['Running', 'Submitted', 'Suspended']: res = get("%s/newjobs/" % DAMPE_WORKFLOW_URL, data={"site": str(batchsite), "limit": opts.chunk, "status": stat}) res.raise_for_status() res = res.json() if not res.get("result", "nok") == "ok": log.error(res.get("error")) val += len(res.get("jobs")) log.info('found %i jobs running or pending', val) if val >= opts.maxJobs: log.warning( "reached maximum number of jobs per site, not submitting anything, change this value by setting it to higher value") sys_exit(); res = get("%s/newjobs/" % DAMPE_WORKFLOW_URL, data={"site": str(batchsite), "limit": opts.chunk}) res.raise_for_status() res = res.json() if not res.get("result", "nok") == "ok": log.error(res.get("error")) jobs = res.get("jobs") log.info('found %i new job instances to deploy this cycle', len(jobs)) njobs = 0 for job in jobs: j = DmpJob.fromJSON(job) # j.__updateEnv__() j.write_script(pythonbin=opts.python, debug=opts.dry) try: ret = j.submit(dry=opts.dry, local=opts.local) j.updateStatus("Submitted", "WaitingForExecution", batchId=ret, cpu=0., memory=0.) njobs += 1 except Exception, e: log.exception(e)
def add_node(logger, args): logger.debug("action: add_node") p = argparse.ArgumentParser(usage="%(prog)s add_node IP PLUGIN [-u USERNAME] [-p PASSWORD]") p.add_argument("ip") p.add_argument("plugin") p.add_argument("-u", "--username") p.add_argument("-p", "--password") o, a = p.parse_known_args(args) logger.debug("action opts: %s", o) logger.debug("action args: %s", a) if o.ip and o.plugin: try: load_plugin(o.plugin) except ImportError: logger.error("%s is not a valid plugin", o.plugin) sys_exit(1) node = Node(o.ip, o.plugin) try: session.add(node) node.username = o.username node.password = o.password session.commit() logger.info("Node added") except IntegrityError: logger.error("Node already exists") sys_exit(1)
def grep_for(self, exp): """ Execute a grep command to search for the given expression. Results are returned as a list. """ cmd = self._grep_cmd(exp, self.file_patterns) if self.debug: print "=== Grep command ===" print " $ %s\n" % cmd try: response = subprocess.check_output( [cmd], shell=True ) results = response.splitlines() if self.debug: print "=== Grep results ===" print response, "Total results: %d\n" % len(results) except subprocess.CalledProcessError, err: if err.returncode == 1: print "Couldn't find anything matching '%s'" % exp else: print "Whoops, grep returned errorcode %d" % err.returncode sys_exit()
def main(args=None): parser = ArgumentParser(usage="Usage: %(prog)s [options]", description="query datacatalog") parser.add_argument("-H","--host",dest='host',help="hostname of influxdb instance") parser.add_argument("-u","--user",dest="user",help="username") parser.add_argument("-p","--password",dest="pw",help="password") parser.add_argument("-P","--port",dest="port",type=int,default=8086,help="influxdb ingest port") parser.add_argument("-n","--dbname", dest="dbname",help="name of DB to store data in.") parser.add_argument("-d", "--dry", dest="dry", action="store_true", default=False, help="do not report results to grafana") parser.add_argument("-v", "--verbose", dest="verbose", action="store_true", default=False, help="verbose mode") opts = parser.parse_args(args) json_bdy = [] for site in batchSites: status_dict = {key: 0 for key in statii} stats = JobInstance.objects.filter(site=site).item_frequencies("status") status_dict.update(stats) for stat, freq in status_dict.iteritems(): json_bdy.append(__makeEntry__(stat, site, freq)) print 'found %i measurements to add'%len(json_bdy) pp = PrettyPrinter(indent=2) if opts.verbose: pp.pprint(json_bdy) if opts.dry: return if influxdb: client = InfluxDBClient(opts.host,opts.port,opts.user,opts.pw,opts.dbname) client.create_database(opts.dbname) ret = client.write_points(json_bdy) if not ret: try: raise Exception("Could not write points to DB") except Exception: print_exc() sys_exit(int(ret))
def send_blueprint_script(src_path,target_path): with settings(show('warnings', 'running', 'stdout', 'stderr'), warn_only=False, shell='/bin/bash -lc', user='******', ): ## try/except wrapped check to validate existance of local file try: with open(src_path, 'rt') as f: f.readline() except IOError as e: print "I/O error [{0}] {1}: {2}".format(e.errno, e.strerror, e.filename) sys_exit(1) test_exist = fab_run('ls -l %s' % (target_path)) if test_exist.succeeded: replace_yesno = fab_prompt( 'File <{0}> already Exists. OK to Replace? [yes/no]'.format(target_path), default='no') if debug == True: print 'You said [{0}], exiting.'.format(replace_yesno) if 'yes' in replace_yesno.lower(): replace_yesno = True else: replace_yesno = False #ch = lambda x: 'yes' if x == True else 'no' sys_exit(0) test = fab_put(src_path,target_path,use_sudo=False, mirror_local_mode=False, mode=None) else: test = fab_run('[ -f %s ]' % (lsbdata))
def generate_config_file(configdb, config_content): filename = config_content.config_file _logger.info( 'save following config to file %s:\n\t%s', filename, config.pretty(config_content, '\n\t') ) save_config(configdb, config_content, filename) sys_exit(0)
def run(self): # suppress(unused-function) """Run linters.""" import parmap from prospector.formatters.pylint import PylintFormatter cwd = os.getcwd() files = self._get_files_to_lint([os.path.join(cwd, "test")]) if len(files) == 0: sys_exit(0) return use_multiprocessing = ( not os.getenv("DISABLE_MULTIPROCESSING", None) and multiprocessing.cpu_count() < len(files) and multiprocessing.cpu_count() > 2 ) if use_multiprocessing: mapper = parmap.map else: # suppress(E731) mapper = lambda f, i, *a: [f(*((x,) + a)) for x in i] with _patched_pep257(): keyed_messages = dict() # Certain checks, such as vulture and pyroma cannot be # meaningfully run in parallel (vulture requires all # files to be passed to the linter, pyroma can only be run # on /setup.py, etc). non_test_files = [f for f in files if not _file_is_test(f)] mapped = ( mapper(_run_prospector, files, self.stamp_directory) + mapper(_run_flake8, files, self.stamp_directory) + [_stamped_deps(self.stamp_directory, _run_prospector_on, non_test_files, ["vulture"])] + [_stamped_deps(self.stamp_directory, _run_pyroma, "setup.py")] ) # This will ensure that we don't repeat messages, because # new keys overwrite old ones. for keyed_messages_subset in mapped: keyed_messages.update(keyed_messages_subset) messages = [] for _, message in keyed_messages.items(): if not self._suppressed(message.location.path, message.location.line, message.code): message.to_relative_path(cwd) messages.append(message) sys.stdout.write( PylintFormatter(dict(), messages, None).render(messages=True, summary=False, profile=False) + "\n" ) if len(messages): sys_exit(1)
def __init__(self, msg): # Object constructor initialized with a custom user message """ Catch an IsisConfException, print a description of the error and exit without python error printing traceback @param msg spefific error message """ err_msg = "IsisConfException : An error occured while parsing Isis options!\n" err_msg += "\t{}.\n\tPlease ajust your settings\n".format(msg) print (err_msg) sys_exit ()
def selectlayers(args, layers): layer_specs = args.layers if layer_specs is None: selected_indexes = list(range(len(layers[':layers']))) if args.reverse: selected_indexes.reverse() else: selected_indexes = [] last_num_selected_indexes = num_selected_indexes = len(selected_indexes) for l, r, v in layer_specs: last_num_selected_indexes = num_selected_indexes li = layerspec2index(args, layers, l) ri = layerspec2index(args, layers, r) if li is None: continue if r is None: selected_indexes.append(li) elif ri is None: continue elif ri < li: selected_indexes.extend(reversed(range(ri, li + 1))) # upper bounds are inclusive else: selected_indexes.extend(range(li, ri + 1)) # upper bounds are inclusive num_selected_indexes = len(selected_indexes) if num_selected_indexes == last_num_selected_indexes: empty_layer_range_msg = '"%s" resolves to an empty range' if args.strict: _LOGGER.error(empty_layer_range_msg, v) sys_exit(_EXIT_LAYER_SPEC) else: _LOGGER.warning(empty_layer_range_msg, v) if not args.reverse: selected_indexes.reverse() # Take the last of each index specified (so we don't have to look at # each distinct layer more than once) seen = OrderedDict() for i in selected_indexes: if i not in seen: seen[i] = None # use OrderedDict as an ordered set top_most_layer_id = None if not seen else layers[':layers'][min(seen)][':id'] selected_layers = [ layers[':layers'][i] for i in seen ] return top_most_layer_id, selected_layers
def update_probe(logger, args): logger.debug("action: update_probe") p = argparse.ArgumentParser(usage="%(prog)s update_probe [options]") p.add_argument("id", help="probe id") p.add_argument("-n", help="name") p.add_argument("-x", help="x position") p.add_argument("-y", help="y position") p.add_argument("-z", help="z position") p.add_argument("-w", help="width") p.add_argument("-d", help="depth") p.add_argument("-t", help="height") o, a = p.parse_known_args(args) logger.debug("action opts: %s", o) logger.debug("action args: %s", a) if o.n is o.x is o.y is o.z is o.w is o.d is o.t is None: p.print_help() else: probe = session.query(Probe).filter(Probe.id == o.id).first() # Try name if id doesn't match any probes if not probe: logger.info("No probe found with id '%s', trying to match by name", o.id) probe = session.query(Probe).filter(Probe.name == o.id).all() if len(probe) > 1: logger.error("%d probes found with the name '%s', use ID to update each in turn", len(probe), o.id) for p in probe: print p sys_exit(1) probe = probe[0] if probe: if o.n: probe.name = o.n if o.x: probe.x = o.x if o.y: probe.y = o.y if o.z: probe.z = o.z if o.w: probe.w = o.w if o.t: probe.h = o.t if o.d: probe.d = o.d session.commit() logger.info("Probe updated") else: logger.error("No probe found with id or name '%s'", o.id)
def salir(self): """Finaliza los objetos y pygame, y cierra la aplicaión.""" # Eliminar las referencias de la escena self.escena.destruir() # Al no tener referencias, son eliminados al ser olvidados por el núcleo self.escena = None self.mapa_eve = None self.reloj = None # Finalizar Pygame y cerrar la aplicaión pygame.quit() sys_exit()
def shutdown(signum, frame): global pidfile, s try: s.close() except: print 'Cannot shutdown the socket' try: unlink(pidfile) except: pass print 'Shutdown done' sys_exit(0)
def list_markets(): # extracted from Bing Account settings page markets = ( ("es-AR", "Argentina",), ("en-AU", "Australia",), ("de-AT", "Austria",), ("nl-BE", "Belgium - Dutch",), ("fr-BE", "Belgium - French",), ("pt-BR", "Brazil",), ("en-CA", "Canada - English",), ("fr-CA", "Canada - French",), ("es-CL", "Chile",), ("zh-CN", "China",), ("da-DK", "Denmark",), ("ar-EG", "Egypt",), ("fi-FI", "Finland",), ("fr-FR", "France",), ("de-DE", "Germany",), ("zh-HK", "Hong Kong SAR",), ("en-IN", "India",), ("en-ID", "Indonesia",), ("en-IE", "Ireland",), ("it-IT", "Italy",), ("ja-JP", "Japan",), ("ko-KR", "Korea",), ("en-MY", "Malaysia",), ("es-MX", "Mexico",), ("nl-NL", "Netherlands",), ("en-NZ", "New Zealand",), ("nb-NO", "Norway",), ("en-PH", "Philippines",), ("pl-PL", "Poland",), ("pt-PT", "Portugal",), ("ru-RU", "Russia",), ("ar-SA", "Saudi Arabia",), ("en-SG", "Singapore",), ("en-ZA", "South Africa",), ("es-ES", "Spain",), ("sv-SE", "Sweden",), ("fr-CH", "Switzerland - French",), ("de-CH", "Switzerland - German",), ("zh-TW", "Taiwan",), ("tr-TR", "Turkey",), ("ar-AE", "United Arab Emirates",), ("en-GB", "United Kingdom",), ("en-US", "United States - English",), ("es-US", "United States - Spanish",), ) print('Available markets:') for k, v in markets: print(k, ' ', v) sys_exit(0)
def load_config(config_db, args=None): args = argv[1:] if args is None else args set_debug_details(args.count('--debug') + args.count('-d')) default_config = config.DefaultValueLoader().load(config_db) _logger.debug('default config:\n\t%s', config.pretty(default_config, '\n\t')) # parse cli options at first because we need the config file path in it cli_config = config.CommandLineArgumentsLoader().load(config_db, argv[1:]) _logger.debug('cli arg parsed:\n\t%s', config.pretty(cli_config, '\n\t')) run_config = config.merge_config(default_config, cli_config) if run_config.list_markets: list_markets() if run_config.generate_config: generate_config_file(config_db, run_config) config_file = run_config.config_file if not isfile(config_file): _logger.warning("can't find config file %s, use default settings and cli settings", config_file) else: try: conf_config = config.from_file(config_db, run_config.config_file) except config.ConfigFileLoader.ConfigValueError as err: _logger.error(err) sys_exit(1) # noinspection PyUnboundLocalVariable _logger.debug('config file parsed:\n\t%s', config.pretty(conf_config, '\n\t')) run_config = config.merge_config(run_config, conf_config) # override saved settings again with cli options again, because we want # command line options to take higher priority run_config = config.merge_config(run_config, cli_config) if run_config.setter_args: run_config.setter_args = ','.join(run_config.setter_args).split(',') else: run_config.setter_args = list() # backward compatibility modifications if run_config.size_mode == 'collect': _logger.warning( 'size_mode=collect is obsolete, considering use collect=accompany instead' ) run_config.size_mode = 'highest' if 'accompany' not in run_config.collect: run_config.collect.append('accompany') _logger.info('running config is:\n\t%s', config.pretty(run_config, '\n\t')) return run_config
def shutdown(signum, frame): global pidfile, s try: s.close() except: print "Cannot shutdown the socket" try: unlink(pidfile) except: pass thistime = datetime.now() print thistime.strftime("%Y-%m-%d %H:%M:%S") + " Shutdown done" sys_exit(0)
def run(): pygame.init() windowRect = pygame.Rect((0, 0), (1440, 900)) screen = pygame.display.set_mode((0, 0), pygame.FULLSCREEN) # Main game surface clock = pygame.time.Clock() # Clock to regulate FPS caption = "Blobular" # Window caption pygame.display.set_caption(caption) # Set the caption bob = moveblob.MoveBlob(windowRect) # Test blob num_of_players = 1 # Create and initialize joysticks joysticks = [] # List to hold joystick objects pygame.mouse.set_visible(False) for x in range(0, pygame.joystick.get_count()): joysticks.append(pygame.joystick.Joystick(x)) # Append new joystick object to joysticks with ID of x joysticks[x].init() # Initialize newly created joystick object while True: clock.tick(60) # Cap game to 60 fps screen.fill(WHITE) # Fill screen with white events = pygame.event.get() # Holds a list of pygame events keys = pygame.key.get_pressed() # Holds a list of keys being held down # If there's a QUIT event, quit Pygame and exit program for event in events: if event.type == pygame.KEYDOWN: if event.key == pygame.K_ESCAPE: pygame.quit() sys_exit() if event.type == pygame.QUIT: pygame.quit() sys_exit() bob.update(keys, events, joysticks) bob.draw(screen) pygame.display.flip()
def clean(): response = yes_no_quiz( question="Are you sure you want to clean up " "the developers' environment?" ) if not response: print('Exited at user\'s request with code 0.') sys_exit(0) from xml.etree.ElementTree import parse from xml.sax.saxutils import escape contents_xml = parse(XML_FILE_ABSOLUTE_PATH) root = contents_xml.getroot() for file in root: relative_path = [node.text for node in file.iterfind('path')] absolute_path = join_path(BASE_DIR, *relative_path) display_path = join_path('markdownx', *relative_path) if not exists(absolute_path): continue with open(absolute_path, mode='r') as data_file: file_content = data_file.read() if file.find('contents').text.strip() == escape(file_content.strip()): delete(absolute_path, display_path) continue if replace_contents_or_not(display_path): file.find('contents').text = escape(file_content) print('> UPDATED in default setting:', display_path) delete(absolute_path, display_path) continue delete(absolute_path, display_path) delete(join_path(BASE_DIR, 'db.sqlite3'), 'db.sqlite3') contents_xml.write( file_or_filename=XML_FILE_ABSOLUTE_PATH, xml_declaration=True, encoding='UTF-8', method='xml' ) return True
def update_xml_linestyle(xml_in, xml_out, template, new_fields, title="Yes"): """ Open xml file and update it with new fields """ head, tree = clean_xml(xml_in) length = add_item(tree, new_fields, template, title) if length == 0: print("Nothing to change, script ended") sys_exit() # Kill script else: print("{} new element[s] to add, now continue".format(length)) indent(tree) pretty_xml = ET.tostring(tree, encoding="unicode") with open(xml_out, "w") as f: f.write(head) f.write(pretty_xml)
def parseParams(config_Path): found = filter(lambda x: isdir(x), (config_Path, '/etc/secdd/conf')) if not found: print "configuration directory is not exit!" sys_exit(0) recipe = found[0] trmap = dict() for root, dirs, files in os.walk(recipe): for filespath in files: if re_match('.*ml$', filespath): trmap[filespath.split('.')[0]] = expYaml(os.path.join(root, filespath)) return trmap
def do_shutdown(res=0): """Archiver system shutdown""" if platform != "win32" and pidfile is not None: try: unlink(pidfile) except: pass LOG(E_ALWAYS, "[Main] Waiting for child threads") multiplex(serverPoll, "close") LOG(E_ALWAYS, "[Main] Shutdown complete") LOG.close() if main_svc: sys_exit(res) else: return res
def main(): try: # https://urllib3.readthedocs.org # /en/latest/security.html#insecureplatformwarning requests.packages.urllib3.disable_warnings() cli(None) except Exception as e: # pylint: disable=W0703 if not isinstance(e, exception.ReturnErrorCode): maintenance.on_platformio_exception(e) error_str = "Error: " if isinstance(e, exception.PlatformioException): error_str += str(e) else: error_str += format_exc() click.secho(error_str, fg="red", err=True) sys_exit(1)
def class_init (self): """ init class method for instantiation from command line. Parse arguments parse CL arguments """ ### Define parser usage, options optparser = optparse.OptionParser(usage = self.USAGE, version = self.VERSION) optparser.add_option('-f', '--fasta', dest="fasta", help= "Path to the fasta file contaning the complete genome (can be gzipped)") optparser.add_option('-g', '--gff', dest="gff", help= "Path to the gff file containing annotations of the genome file (can be gzipped)") optparser.add_option('-o', '--offset', dest="offset", default = 0, help= "Bases to extract before and after the feature (default 0)") optparser.add_option('--fusion', dest="fusion", action='store_true', default = False, help= "Fuse overlapping features in a meta-feature (default False)") optparser.add_option('--output_gff', dest="output_gff", action='store_true', default = False, help= "Output the gff file corresponding to the extracted features sequences (default False)") optparser.add_option('--features', dest="features", default = "exon", help= "Restrict extraction to a list of features. The list must be SPACE separated and quoted (default exon)") optparser.add_option('--chromosomes', dest="chromosomes", default = "", help= "Restrict extraction to a list of chromosomes. The list must be SPACE separated and quoted (default all)") ### Parse arguments opt, args = optparser.parse_args() # Verify the presence of mandatory options try: assert opt.fasta, "Missing fasta (-f) option" assert opt.gff, "Missing gff (-g) option" except AssertionError as E: print (E) optparser.print_help() sys_exit() ### Init a RefMasker object return GffFastaExtractor ( fasta = opt.fasta, gff = opt.gff, offset = opt.offset, fusion = opt.fusion, output_gff = opt.output_gff, features = opt.features.split(), chromosomes = opt.chromosomes.split())
def main(): while True: os_system(CLEAR_SCREEN) raw_input(START_MSG) try: gameloop(7) except Warning: os_system(CLEAR_SCREEN) print "\r" + _GAME_OVER print "\rYou lost!(press %s)" % _NEXT_MOVE while input_thread.isAlive(): continue a = raw_input("\rWant to play again?(y/n)") if a.lower().startswith("y"): reset() continue else: sys_exit(0)
def layerspec2index(args, layers, layer_spec_part): if layer_spec_part is None: return None try: return layers[layer_spec_part] except KeyError: pass no_layer_msg = '"%s" does not resolve to any layer associated with image "%s"' if args.strict: _LOGGER.error(no_layer_msg, layer_spec_part, args.image) sys_exit(_EXIT_LAYER_SPEC) _LOGGER.warning(no_layer_msg, layer_spec_part, args.image) return None
def farmout(num, setup, worker, isresult, attempts=3, pickletest=None): try: if pickletest is None: pickletest = worker results = [None] * num undone = range(num) for _ in range(attempts): pool = create_pool(pickletest) for i in undone: results[i] = pool.apply_async(worker, setup(i)) pool.close() pool.join() for i in undone: results[i] = results[i].get(0xFFFF) if any([isinstance(r, KeyboardInterrupt) for r in results]): raise KeyboardInterrupt else: undone = [i for i, r in enumerate(results) if not isresult(r)] if not len(undone): break excs = [e for e in results if isinstance(e, Exception)] if len(excs): raise excs[0] if not all([isresult(r) for r in results]): raise RuntimeError("Random and unknown weirdness happened while trying to farm out work to child processes") return results except KeyboardInterrupt as e: if pool is not None: pool.terminate() pool.join() if current_process().daemon: return e else: print('caught ^C (keyboard interrupt), exiting ...') sys_exit(-1)
def daemonize(logfile): try: from os import fork from posix import close except: print 'Daemon mode is not supported on this platform (missing fork() syscall or posix module)' sys_exit(-1) import sys if (fork()): sys_exit(0) # parent return to shell ### Child close(sys.stdin.fileno()) sys.stdin = open('/dev/null') close(sys.stdout.fileno()) sys.stdout = Log(open(logfile, 'a+')) close(sys.stderr.fileno()) sys.stderr = Log(open(logfile, 'a+')) chdir('/')
def check_release(): """ Check that only full git version (x.x or x.x.x) are used for 'register, """ # noinspection PySetFunctionToLiteral options_to_check = set({'register', 'upload', 'upload_docs'}) for option_ in options_to_check: if option_ in sys_argv: # Check if '-' in _version: sys_exit(''' === Error === check_release(): option_: <{}> in options_to_check: <{}> For a release: only full git version (x.x or x.x.x) are supported: You must commit any changes before and TAG the release. _version: <{}>. '''.format(option_, options_to_check, _version) )
Experimental micro-ecosystem for open FPGAs """ # Update help structure if ctx.invoked_subcommand is None: env_help = [] env_commands = ['boards', 'config', 'drivers', 'examples', 'init', 'install', 'system', 'uninstall', 'upgrade'] help = ctx.get_help() help = help.split('\n') # Find env commands' lines for line in list(help): for command in env_commands: if (' ' + command) in line: if line in help: help.remove(line) env_help.append(line) help = '\n'.join(help) help = help.replace('Commands:\n', 'Code commands:\n') help += "\n\nEnvironment commands:\n" help += '\n'.join(env_help) click.secho(help) if __name__ == '__main__': # pragma: no cover sys_exit(cli())
def __error__(self, msg): 'Error to Logger and exit' self.worker.logger.error(msg) sys_exit(1)
def main(): # Read the options from command line parser = _create_input_parser() args = parser.parse_args() args = vars(args) if args['file_verbosity'] is None: args['file_verbosity'] = args['verbosity'] # If no arguments are submitted, print help if len(argv) <= 1: parser.print_help() sys_exit(1) # Set the logging system streamhandler, filehandler = _prepare_logger(args['verbosity'], args['file_verbosity'], args['silent'], args['logfile']) LOGGER.info( 'Starting execution. Executed with the following parameters: ' + ' '.join(argv)) LOGGER.debug('This execution will use %s processes', args['processes']) if get_localzone is None: LOGGER.warning( 'Packet tzlocal is missing. Dates and times will be saved without ' 'timezones. To install this packet, use:\n"pip install tzlocal"') LOGGER.debug( 'Import of tzlocal package failed with the following error:\n{}'. format(tzlocal_import_error)) try: pycordexer_options, input_dir = _read_pycordexer_options(args) except Exception as e: LOGGER.error( 'Error reading command line input. Execution will be terminated!\n' '{}'.format(e)) sys_exit(1) LOGGER.info('Pycordexer will run with the following options: %s', pycordexer_options) LOGGER.debug('Manual mode is: ' + ('ON' if args['manual_mode'] else 'OFF')) if args['remove_files']: LOGGER.warning( 'This script will remove the RegCM files during its execution!') listener = Listener(input_dir, pycordexer_options, args['manual_mode'], args['remove_files'], args['timer']) if args['daemon'] is False: utilities.log_utilities.collect_logs() try: listener.run(args['processes']) except: LOGGER.error(format_exc()) else: LOGGER.debug('Preparing to go in daemon mode') if daemon is None: LOGGER.error('No daemon module found. Please install it with:\n' ' pip install python-daemon\n' 'Execution aborted') LOGGER.debug( 'Import of daemon package failed with the following error:\n{}' .format(daemon_import_error)) sys_exit(2) LOGGER.debug('Creating a context for the daemon') context_options = { 'working_directory': os.getcwd(), 'umask': 0o007, } if filehandler is not None: context_options['files_preserve'] = [ filehandler.stream, ] if args['pidfile'] is not None: try: pidfile = daemon.pidfile.PIDLockFile(args['pidfile']) except: LOGGER.error('Error creating the pid file:\n{}'.format( format_exc())) raise context_options['pidfile'] = pidfile else: LOGGER.warning( 'No PID file specified. This script will run anyway, but there ' 'is no way to call the cordex_listener_stop script to stop ' 'this run (beside using the kill command)') LOGGER.debug('Context will be created with the following options: %s', context_options) context = daemon.DaemonContext(**context_options) LOGGER.debug('Preparing the daemon to handling signals') context.signal_map = { signal.SIGTERM: stop_execution_on_signal, signal.SIGINT: stop_execution_on_signal, signal.SIGUSR1: stop_execution_when_done, } if streamhandler is not None: LOGGER.debug('Detaching from standard output') streamhandler.flush() LOGGER.removeHandler(streamhandler) LOGGER.info('Starting daemon') try: with context: utilities.log_utilities.collect_logs() listener.run(args['processes']) except: LOGGER.error(format_exc()) raise
def unix_startup(config, user=None, debug=False): """ Unix specific startup actions """ global pidfile if user: try: userpw = getpwnam(user) setegid(userpw[3]) seteuid(userpw[2]) except: t, val, tb = exc_info() del t, tb print 'Cannot swith to user', user, str(val) sys_exit(-2) else: user = getpwuid(getuid())[0] try: pidfile = config.get('global', 'pidfile') except: LOG(E_ALWAYS, '[Main] Missing pidfile in config') do_shutdown(-4) locked = 1 try: pid = int(open(pidfile).read().strip()) LOG(E_TRACE, '[Main] Lock: Sending signal to the process') try: kill(pid, 0) LOG(E_ERR, '[Main] Stale Lockfile: Process is alive') except: LOG(E_ERR, '[Main] Stale Lockfile: Old process is not alive') locked = 0 except: locked = 0 if locked: LOG( E_ALWAYS, '[Main] Unable to start Netfarm Archiver, another instance is running' ) do_shutdown(-5) ## Daemonize - Unix only - win32 has service if not debug: try: pid = fork() except: t, val, tb = exc_info() del t print 'Cannot go in background mode', str(val) if pid: sys_exit(0) chdir('/') null = open('/dev/null', 'r') close(stdin.fileno()) dup(null.fileno()) null.close() close(stdout.fileno()) dup(LOG.fileno()) close(stderr.fileno()) dup(LOG.fileno()) ## Save my process id to file mypid = str(getpid()) try: open(pidfile, 'w').write(mypid) except: LOG(E_ALWAYS, '[Main] Pidfile is not writable') do_shutdown(-6) return user, mypid
def check_arguments(self): # REMOVE '-' SIGN FROM FIRST ARGUMENT # if self.arguments_list: self.arguments_list = self.arguments_list.replace( menu_signs.sign_minus, menu_signs.sign_empty) # IF EVEN ONE ARGUMENT NOT GIVEN BUT FIRST ARGUMENT NEED # if self.menu_config.need_first_argument: if not self.arguments_list: if self.if_lunched_from_init: print( menu_signs.sign_empty.join([ menu_signs.sign_newline, menu_text.no_first_argument_given_text, menu_signs.sign_newline ])) self.help() elif not self.if_lunched_from_init: print( menu_signs.sign_empty.join([ menu_signs.sign_newline, menu_text.no_first_argument_given_text, menu_signs.sign_newline ])) self.menu() # IF GIVEN FIRST ARGUMENT NOT IN POSSIBLE ARGUMENT LIST # possible_arguments_list = list(self.menu_config.argument_list.keys()) for argument in self.arguments_list: if argument not in possible_arguments_list: if self.if_lunched_from_init: print( menu_signs.sign_empty.join([ menu_signs.sign_newline, menu_text.not_found_argument_text, menu_signs.sign_newline ])) self.help() elif not self.if_lunched_from_init: print( menu_signs.sign_empty.join([ menu_signs.sign_newline, menu_text.not_found_argument_text, menu_signs.sign_newline ])) self.menu() # ADDITIONAL ARGUMENT CHECK IF EXIST # if self.menu_config.need_second_argument: if not self.additional_argument: if self.if_lunched_from_init: print( menu_signs.sign_empty.join([ menu_signs.sign_newline, config_text.this_app_always_need_second_argument, menu_signs.sign_newline ])) self.help() elif not self.if_lunched_from_init: print( menu_signs.sign_empty.join([ menu_signs.sign_newline, menu_text.no_second_argument_given_text, menu_signs.sign_newline, ])) self.menu() # ADDITIONAL ARGUMENT CHECK FOR ARGUMENTS THAT NEED ADDITIONAL ARGUMENT # for argument in self.arguments_list: if any(argument in additional_argument for additional_argument in self.menu_config.additional_argument_list): if not self.additional_argument: if self.if_lunched_from_init: print( menu_signs.sign_empty.join([ menu_signs.sign_newline, menu_text.no_second_argument_given_text, menu_signs.sign_newline, ])) self.help() elif not self.if_lunched_from_init: print( menu_signs.sign_empty.join([ menu_signs.sign_newline, menu_text.no_second_argument_given_text, menu_signs.sign_newline, ])) self.menu() # ARGUMENTS WHICH HAVE LIST TO BE PRINTED AS SUBMENU # if self.menu_config.any_argument_have_submenu: key_submenu_list = list( self.menu_config.submenu_arguments_list.keys()) for argument in self.arguments_list: if any(argument in key_submenu for key_submenu in key_submenu_list): additional_arguments_list = self.menu_config.submenu_arguments_list[ argument] if self.additional_argument: if any(self.additional_argument == additional_argument_possible for additional_argument_possible in additional_arguments_list): self.main.arguments(self, self.arguments_list, self.additional_argument) if self.if_lunched_from_init: sys_exit( menu_signs.sign_empty.join([ menu_signs.sign_newline, menu_text.close_text, menu_signs.sign_newline ])) elif not self.if_lunched_from_init: self.menu() else: print( menu_signs.sign_empty.join([ menu_signs.sign_newline, menu_text. second_argument_not_found_on_submenu_list, menu_signs.sign_newline ])) for number, argument in enumerate( additional_arguments_list): print( menu_signs.sign_dot.join( [str(number + 1), str(argument)])) if self.if_lunched_from_init: sys_exit( menu_signs.sign_empty.join([ menu_signs.sign_newline, menu_text.close_text, menu_signs.sign_newline ])) elif not self.if_lunched_from_init: self.menu() elif not self.additional_argument: if self.if_lunched_from_init: print( menu_signs.sign_empty.join([ menu_signs.sign_newline, menu_text.second_argument_not_exist_at_all, menu_signs.sign_newline, menu_text.submenu_list, menu_signs.sign_newline ])) for number, argument in enumerate( additional_arguments_list): print( menu_signs.sign_dot.join( [str(number + 1), str(argument)])) sys_exit( menu_signs.sign_empty.join([ menu_signs.sign_newline, menu_text.close_text, menu_signs.sign_newline ])) elif not self.if_lunched_from_init: self.submenu(argument)
""" py_knecht_exe_updater - console tool to update application executable Copyright (C) 2017 Stefan Tapper, All rights reserved. This file is part of RenderKnecht Strink Kerker. RenderKnecht Strink Kerker is free software: you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation, either version 3 of the License, or (at your option) any later version. RenderKnecht Strink Kerker is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. You should have received a copy of the GNU General Public License along with RenderKnecht Strink Kerker. If not, see <http://www.gnu.org/licenses/>. """ from os import replace as os_replace from subprocess import Popen from pathlib import Path from sys import argv as sys_argv from sys import exit as sys_exit from time import sleep, time from datetime import datetime HELPER_DIR = Path('_RenderKnecht-Work') if __name__ == '__main__': if len(sys_argv) < 2: print('Not enough arguments. Provide absolute path to executable to replace.') input('Press Enter to continue...') sys_exit() else: print('py_knecht_exe_updater - updating:\n' + sys_argv[1])
parsed_list.append(line) return parsed_list def main(): """Opens the file, runs fileparser, download links, status print, etc""" print("Downloader.py....") with open(FILE_NAME, 'r', encoding="utf-8") as file_: dl_loc = Updater.folder_check_empty(FILE_DESINATION, 'Downloader', 'pics') links = file_parser(file_) length = len(links) for (i, link) in enumerate(links): percent = Updater.percent_former(i + 1, length) if os.path.exists(os.path.join(dl_loc, (str(i) + '.png'))): msg = 'Skipping ' + link[-30:] Updater.status_print(msg, percent, dl_loc) else: msg = "Downloading " + link[-30:] Updater.status_print(msg, percent, dl_loc) Updater.html_download(link.strip(), True, str(i) + '.png', dl_loc, False) print('\nCompleted') if __name__ == "__main__": try: main() except KeyboardInterrupt as keyb_err: sys_exit(keyb_err)
db = MySQLdb.connect(host=options.dbhost, user=options.dbuser, passwd=options.dbpassword, db=options.dbname) cursor = db.cursor() if options.quota is None: quota = 1073741824 # 1 GB else: # multiplicamos quota por el la cantidad de bytes en un GB quota = int(options.quota) * 1073741824 print(f"new quota is: {quota} bytes") if options.password is None: raw_password = pw_gen() print(f'new random password: {raw_password}') hashed_password = crypt_pass(raw_password) elif len(options.password) < 10: sys_exit("Password length is less than 10 characters") else: print(f"the new password is: {options.password}") hashed_password = crypt_pass(options.password) query = f""" INSERT INTO `virtual_users` (`id`, `domain_id`, `email`, `password`, `quota`) VALUES (NULL, (SELECT id FROM virtual_domains WHERE name='{domain}'), '{user}@{domain}','{hashed_password}', {quota}); """ cursor.execute(query) db.commit() db.close()
@cli.command() def optimisesyncperiod(): maintenance.optimise_sync_period() @cli.command() def purge_cache(): maintenance.purge_cache() @cli.command() @click.argument('search_query', type=str, nargs=1) @click.option('--min-repo-stars', type=int, default=5) def githubterrier(search_query, min_repo_stars): gh = GithubTerrier(config['GITHUB_LOGIN'], config['GITHUB_PASSWORD'], search_query, min_repo_stars) gh.run() def main(): # https://urllib3.readthedocs.org # /en/latest/security.html#insecureplatformwarning requests.packages.urllib3.disable_warnings() cli() if __name__ == "__main__": sys_exit(main())
def cli(): display_benchmark(find_window) sys_exit(0)
DIM = "" NORMAL = "" BRIGHT = "" RESET_ALL = "" def deinit(): pass pass try: from tqdm import tqdm except ImportError: print(Style.BRIGHT + Fore.RED + "Please install tqdm: " + Fore.WHITE + "pip install tqdm") sleep(3) sys_exit("Please install tqdm:\tpip install tqdm") parser = ArgumentParser() # DEF: -1 (or "-1" for strings) is the default - it means that merge_params() used either the preset # from file or from GUI/CLI parser.add_argument("--preset", "-pres", dest="preset", default="default", help="Preset name") parser.add_argument( "--source", "-in", "-i", action="append", dest="source",
devfile = 'devlist.cache' pidfile = '/var/run/binlsrv.pid' ## Parse command line arguments shortopts = 'hdl:a:p:' longopts = ['help', 'daemon', 'logfile=', 'address=', 'port='] try: opts, args = getopt(argv[1:], shortopts, longopts) if len(args) > 1: raise getopt_error, 'Too many device lists files specified %s' % ','.join( args) except getopt_error, errstr: print 'Error:', errstr print __usage__ % argv[0] sys_exit(-1) for opt, arg in opts: opt = opt.split('-').pop() if opt in ('h', 'help'): print __usage__ % argv[0] sys_exit(0) if opt in ('d', 'daemon'): daemon = True continue if opt in ('l', 'logfile'): logfile = arg continue if opt in ('a', 'address'):
def quit(self): self.print_results() if request('game.autosave'): self.save() pygame.quit() sys_exit()
#!/usr/bin/env python3 from os import environ from sys import exit as sys_exit from django import setup from django.conf import settings from django.test.utils import get_runner if __name__ == '__main__': environ['DJANGO_SETTINGS_MODULE'] = 'test_template_pdf.settings' setup() TestRunner = get_runner(settings) test_runner = TestRunner() failures = test_runner.run_tests(['test_template_pdf.tests']) sys_exit(bool(failures))
import os from sys import exit as sys_exit from time import sleep, time from grpc._channel import _Rendezvous from flask import flash from configparser import ConfigParser from vectorcloud.models import Command, Output, Status, ApplicationStore,\ Settings from vectorcloud.paths import list_folder, sdk_config_file from vectorcloud import db try: import anki_vector from anki_vector.util import degrees, radians except ImportError: sys_exit("Cannot import from anki_vector: Install per Anki instructions") # establishes routes decorated w/ @public_route as accessible while not signed # in. See login and register routes for usage def public_route(decorated_function): decorated_function.is_public = True return decorated_function # initiate config parser config = ConfigParser() # ------------------------------------------------------------------------------ # Main functions # ------------------------------------------------------------------------------
""" __title__ = 'pefile_scripts' __version__ = '0.0.1' __author__ = 'Evgeny Drobotun' __author_email__ = '*****@*****.**' __license__ = 'MIT' __copyright__ = 'Copyright (C) 2020 Evgeny Drobotun' from importlib.util import find_spec from sys import version_info from sys import exit as sys_exit if version_info.major < 3: print('Используйте python версии 3.0 и выше') sys_exit() if find_spec('pefile') is None: print('Необходимо загрузить пакет pefile') sys_exit() from .get_time_info import ( get_compile_time, get_debug_compile_time, get_delphi_compile_time ) from .get_section_info import( get_section_num, get_section_info )
log.info("Signal %d received", sigcode) log.info("Exiting gracefully now...") for key in threads: threads[key].exit_event.set() sys_exit(0) # Handle signals signal.signal(signal.SIGINT, exit_gracefully) signal.signal(signal.SIGTERM, exit_gracefully) # issued by docker stop # Check for config dir if not isdir(config_directory): log.fatal("No config found") sys_exit(2) # Start web server for prometheus metrics start_http_server(8000) # Create threads threads = {} # key is path to config file, value is Thread object for config_file in listdir(config_directory): config_path = join(config_directory, config_file) if isfile(config_path) and config_path.endswith(".yaml"): log.info("Starting thread for %s...", config_path) threads[config_file] = ServicesMonitoring(config_path) threads[config_file].daemon = True threads[config_file].start() log.info("Thread started") sleep(5) # Sleep 5s to avoid mixed logs
def main(args): # set working dir to script dir sc_dir = dirname(relpath(__file__)) parser = argparse.ArgumentParser(description='Test CLI Options') parser.add_argument('--diff', action='store_true', help='show diffs when there is a test mismatch') parser.add_argument( '--apply', action='store_true', help= 'auto apply the changes from the results folder to the output folder') parser.add_argument('--build', default=s_path_join(sc_dir, '../../build'), help='specify location of the build directory') parsed_args = parser.parse_args() # find the uncrustify binary bin_found = False uncr_bin = '' bd_dir = parsed_args.build bin_paths = [ s_path_join(bd_dir, 'uncrustify'), s_path_join(bd_dir, 'Debug/uncrustify'), s_path_join(bd_dir, 'Debug/uncrustify.exe'), s_path_join(bd_dir, 'Release/uncrustify'), s_path_join(bd_dir, 'Release/uncrustify.exe'), s_path_join(bd_dir, 'RelWithDebInfo/uncrustify'), s_path_join(bd_dir, 'RelWithDebInfo/uncrustify.exe'), s_path_join(bd_dir, 'MinSizeRel/uncrustify'), s_path_join(bd_dir, 'MinSizeRel/uncrustify.exe') ] for uncr_bin in bin_paths: if not isfile(uncr_bin): eprint("is not a file: %s" % uncr_bin) else: print("Uncrustify binary found: %s" % uncr_bin) bin_found = True break if not bin_found: eprint("No Uncrustify binary found") sys_exit(EX_USAGE) clear_dir(s_path_join(sc_dir, "./results")) return_flag = True # # Test help # -h -? --help --usage if not check_uncrustify_output( uncr_bin, parsed_args, out_expected_path=s_path_join(sc_dir, 'output/help.txt'), out_result_path=s_path_join(sc_dir, 'results/help.txt'), out_result_manip=[ string_replace( ' --mtime : Preserve mtime on replaced files.\n', ''), string_replace('.exe', '') ]): # return_flag = False # # Test false parameter # --xyz if not check_uncrustify_output( uncr_bin, parsed_args, args_arr=['--xyz'], err_expected_path=s_path_join(sc_dir, 'output/xyz-err.txt'), err_result_path=s_path_join(sc_dir, 'results/xyz-err.txt')): # return_flag = False # # Test --show-config # if not check_uncrustify_output( uncr_bin, parsed_args, args_arr=['--show-config'], out_expected_path=s_path_join(sc_dir, 'output/show_config.txt'), out_result_path=s_path_join(sc_dir, 'results/show_config.txt'), out_result_manip=reg_replace(r'\# Uncrustify.+', '')): return_flag = False # # Test --update-config # if not check_uncrustify_output( uncr_bin, parsed_args, args_arr=[ '-c', s_path_join(sc_dir, 'config/mini_d.cfg'), '--update-config' ], out_expected_path=s_path_join(sc_dir, 'output/mini_d_uc.txt'), out_result_path=s_path_join(sc_dir, 'results/mini_d_uc.txt'), out_result_manip=reg_replace(r'\# Uncrustify.+', ''), err_expected_path=s_path_join(sc_dir, 'output/mini_d_error.txt'), err_result_path=s_path_join(sc_dir, 'results/mini_d_error0.txt'), err_result_manip=string_replace('\\', '/')): return_flag = False if not check_uncrustify_output( uncr_bin, parsed_args, args_arr=[ '-c', s_path_join(sc_dir, 'config/mini_nd.cfg'), '--update-config' ], out_expected_path=s_path_join(sc_dir, 'output/mini_nd_uc.txt'), out_result_path=s_path_join(sc_dir, 'results/mini_nd_uc.txt'), out_result_manip=reg_replace(r'\# Uncrustify.+', ''), err_expected_path=s_path_join(sc_dir, 'output/mini_d_error.txt'), err_result_path=s_path_join(sc_dir, 'results/mini_d_error1.txt'), err_result_manip=string_replace('\\', '/')): return_flag = False # # Test --update-config-with-doc # if not check_uncrustify_output( uncr_bin, parsed_args, args_arr=[ '-c', s_path_join(sc_dir, 'config/mini_d.cfg'), '--update-config-with-doc' ], out_expected_path=s_path_join(sc_dir, 'output/mini_d_ucwd.txt'), out_result_path=s_path_join(sc_dir, 'results/mini_d_ucwd.txt'), out_result_manip=reg_replace(r'\# Uncrustify.+', ''), err_expected_path=s_path_join(sc_dir, 'output/mini_d_error.txt'), err_result_path=s_path_join(sc_dir, 'results/mini_d_error2.txt'), err_result_manip=string_replace('\\', '/')): return_flag = False if not check_uncrustify_output( uncr_bin, parsed_args, args_arr=[ '-c', s_path_join(sc_dir, 'config/mini_nd.cfg'), '--update-config-with-doc' ], out_expected_path=s_path_join(sc_dir, 'output/mini_nd_ucwd.txt'), out_result_path=s_path_join(sc_dir, 'results/mini_nd_ucwd.txt'), out_result_manip=reg_replace(r'\# Uncrustify.+', ''), err_expected_path=s_path_join(sc_dir, 'output/mini_d_error.txt'), err_result_path=s_path_join(sc_dir, 'results/mini_d_error3.txt'), err_result_manip=string_replace('\\', '/')): return_flag = False # # Test -p # if not check_uncrustify_output( uncr_bin, parsed_args, args_arr=[ '-c', s_path_join(sc_dir, 'config/mini_nd.cfg'), '-f', s_path_join(sc_dir, 'input/testSrcP.cpp'), '-p', s_path_join(sc_dir, 'results/p.txt') ], gen_expected_path=s_path_join(sc_dir, 'output/p.txt'), gen_result_path=s_path_join(sc_dir, 'results/p.txt'), gen_result_manip=reg_replace(r'\# Uncrustify.+[^\n\r]', '')): return_flag = False # # Test --replace # copyfile("input/backup.h-save", "input/backup.h") if not check_uncrustify_output( uncr_bin, parsed_args, args_arr=[ '-c', s_path_join(sc_dir, 'config/replace.cfg'), '-F', s_path_join(sc_dir, 'input/replace.list'), '--replace' ], gen_expected_path=s_path_join(sc_dir, 'output/backup.h'), gen_result_path=s_path_join(sc_dir, 'input/backup.h')): return_flag = False # The flag CMAKE_BUILD_TYPE must be set to "Release", or all lines with # 'Description="<html>(<number>)text abc.</html>" must be changed to # 'Description="<html>text abc.</html>" # # OR it is possible to introduce a new parameter: gen_expected_manip # # The last "reg_replace(r'\r', '')" is necessary under Windows, because # fprintf puts a \r\n at the end of a line. To make the check, we use # output/universalindent.cfg, generated under Linux, with only \n at the # end of a line. if not check_uncrustify_output( uncr_bin, parsed_args, args_arr=[ '-o', s_path_join(sc_dir, 'results/universalindent.cfg'), '--universalindent' ], gen_expected_path=s_path_join(sc_dir, 'output/universalindent.cfg'), gen_result_path=s_path_join(sc_dir, 'results/universalindent.cfg'), gen_result_manip=[ reg_replace(r'version=U.+', ''), reg_replace(r'\(\d+\)', ''), reg_replace(r'\r', '') ]): return_flag = False # Debug Options: # -L # look at src/log_levels.h Ls_A = ['9', '21', '25', '28', '31', '36', '66', '92'] for L in Ls_A: if not check_uncrustify_output( uncr_bin, parsed_args, args_arr=[ '-c', NULL_DEVICE, '-L', L, '-o', NULL_DEVICE, '-f', s_path_join(sc_dir, 'input/testSrc.cpp') ], err_expected_path=s_path_join(sc_dir, 'output/%s.txt' % L), err_result_path=s_path_join(sc_dir, 'results/%s.txt' % L), err_result_manip=[ reg_replace(r'[0-9]', ''), reg_replace(RE_CALLSTACK, '[CallStack]'), reg_replace(RE_DO_SPACE, '') ]): return_flag = False # Test logger buffer overflow if not check_uncrustify_output( uncr_bin, parsed_args, args_arr=[ '-c', NULL_DEVICE, '-L', '99', '-o', NULL_DEVICE, '-f', s_path_join(sc_dir, 'input/logger.cs') ], err_expected_path=s_path_join(sc_dir, 'output/logger_cs_L_99.txt'), err_result_path=s_path_join(sc_dir, 'results/logger_cs_L_99.txt'), err_result_manip=reg_replace(r'[0-9]', '')): return_flag = False # misc error_tests error_tests = ["I-842", "unmatched_close_pp"] for test in error_tests: if not check_uncrustify_output( uncr_bin, parsed_args, args_arr=[ '-c', s_path_join(sc_dir, 'config/%s.cfg' % test), '-f', s_path_join(sc_dir, 'input/%s.cpp' % test), '-o', NULL_DEVICE, '-q' ], err_expected_path=s_path_join(sc_dir, 'output/%s.txt' % test), err_result_path=s_path_join(sc_dir, 'results/%s.txt' % test)): return_flag = False if return_flag: print("all tests are OK") sys_exit(EX_OK) else: print("some problem(s) are still present") sys_exit(EX_SOFTWARE)
with open(args.input_file_path, 'r') as f: grammar_string = f.read() grammar = CFG.fromstring(grammar_string) for sentence in generate(grammar, depth=args.depth): print(''.join(sentence)) return 0 if __name__ == "__main__": arg_parser = argparse.ArgumentParser() arg_parser.add_argument('-i', '--input_file_path', metavar='<path>', type=lambda x: valid_file(arg_parser, x), help="Path to the grammar file", required=False) arg_parser.add_argument('-d', '--depth', metavar='<nr>', type=int, default=9, help='Max depth of grammar tree.') FLAGS, unparsed = arg_parser.parse_known_args() sys_exit(main(FLAGS))
master.print_status() sleep(10) # Step 9: Sort the completed tasks completed_tasks: List[Task] = master.get_completed_tasks() completed_tasks.sort(key=byID) # Step 10: Create image files for the tasks i = 0 task_filenames = [] for task in completed_tasks: output_filename = working_dir + "/" + f"output_{i}.bmp" i += 1 with open(output_filename, "wb") as output: output.write(task.payload) task_filenames.append(output_filename) output.flush() # Step 11: Combine the images using slab recombination combine_slabs(task_filenames, image_width, image_height) end_time = time.time() print("{} seconds to complete".format(end_time - start_time)) sys_exit(0) except KeyboardInterrupt: # call master.exit print("exiting") sys_exit(0)
!Assembly World: - Second Line !Assembly World: - Third Line Resulting output: Hello: - First Line - Second Line - Third Line See https://assemyaml.nz for details on document syntax. Options: --help Show this usage information. --no-local-tag | -l Ignore !Transclude and !Assembly local tags and use global tags only. --output <filename> | -o <filename> Write output to filename instead of stdout. """ % {"argv0": basename(argv[0])}) fd.flush() return if __name__ == "__main__": # pragma: nocover sys_exit(main(argv[1:]))
files += 1 out += dir_path.name + '\n' iterator = inner(dir_path, level=level) for line in islice(iterator, length_limit): out += line + '\n' if next(iterator, None): out += f'... length_limit, {length_limit}, reached, counted:' + '\n' out += f'\n{directories} directories' + (f', {files} files' if files else '') + '\n' # replace the first line with `.` out = '.\n' + out.split('\n', 1)[-1] tree_f = open(project_dir + '/tree.txt', 'w') tree_f.write(out) tree_f.close() tree(project_dir) print('Done!') generate_status_table(project_dir) generate_contributors_table(project_dir) if exit_code != 0: print( "Warning: some of info.json files are not valid. Process will be exited with 1 exit code" ) sys_exit(exit_code)
def exit(self): sys_exit()
def display_error(message: str, exit: Optional[int] = None) -> None: click.secho("ERROR: {}".format(message), fg="red") if exit: sys_exit(exit)
"actions": ["make -C docs {posargs}"], "doc": "Run a target in subdir 'doc'", "uptodate": [False], "pos_arg": "posargs", } def task_DeployToGitHubPages(): cwd = str(ROOT / "public") return { "actions": [ CmdAction(cmd, cwd=cwd) for cmd in [ "git init", "cp ../.git/config ./.git/config", "touch .nojekyll", "git add .", 'git config --local user.email "push@gha"', 'git config --local user.name "GHA"', "git commit -am '{posargs}'", "git push -u origin +HEAD:gh-pages", ] ], "doc": "Create a clean branch in subdir 'public' and push to branch 'gh-pages'", "pos_arg": "posargs", } if __name__ == '__main__': sys_exit(DoitMain(ModuleTaskLoader(globals())).run(sys_argv[1:]))
def nazad2(self): os.system('python3 main.py') sys_exit()
def loop_callback(self): if self.result: self.remote.activate_window(self.result) sys_exit(0)
default=False, help= "NOT IMPLEMENTED: return the result as json data, Default is in a table" ) parser.add_option( "-s", "--shell", # Someing fun todo one day. action="store_true", dest="shell", default=False, help="NOT IMPLEMENTED: interactive shell just like MySQL but to mongo") (options, args) = parser.parse_args() if not args: parser.print_help() sys_exit(1) ## Reconstruct the query string query = ' '.join( [arg if ' ' not in arg else "'" + arg + "'" for arg in args]) if query[0] in ['"', "'"] and query[0] == query[-1]: query = query.strip(query[0]) if not options.verbose: DEBUG = False # TODO: Reverse this logic when DEBUG defaults to false. query_dict = sql_to_spec(query) if options.no_db or options.verbose: print "The SQL query: ", query print "is this mongo query: ", create_mongo_shell_query(query_dict) elif not options.no_db: result = execute_query(query_dict, options.mongo_server)
def graceful_exit(self, *args, **kwargs): shutdown_msg = 'cannot recover from error, shutting down...' self.logger.error(shutdown_msg, 'BlueShift') self.run_callbacks() sys_exit(1) os_exit(1)
self.font10 = parent.font10 self.left = parent.left self.top = int(parent.height * 0.2) self.width = parent.width self.height = int(parent.height * 0.8) self.setGeometry(self.left, self.top, self.width, self.height) self.allFont = QFont("Times New Roman") self.allFont.setPointSize(14) # Create tabs self.tabMain = MainTab(self) self.tabMain.setFont(self.allFont) self.tabPLC = PLCTab(self) self.tabTraining = TrainingTab(self) self.tabPrinting = PrintTab(self) # Add tabs self.addTab(self.tabMain, "Main") self.addTab(self.tabPLC, "PLC") self.addTab(self.tabTraining, "Training") self.addTab(self.tabPrinting, "In ấn") if __name__ == '__main__': app = QApplication(argv) window = MainWindow(app) sys_exit(app.exec_())