def get_contract(client): contract = client.ib.reqContractDetails( ContFuture(symbol=config.SYMBOL, exchange=config.EXCHANGE)) if not contract: log.error("Failed to Grab Continuous Future {}".format(config.SYMBOL)) sysexit() return contract[0].contract, contract[0].tradingHours
def onPlayBackError(self): # control.homeWindow.clearProperty('venom.isplaying.playlist') # not used atm control.homeWindow.clearProperty('venom.preResolved_nextUrl') Bookmarks().reset(self.current_time, self.media_length, self.name, self.year) log_utils.error() xbmc.log('[ plugin.video.venom ] onPlayBackError callback', LOGINFO) sysexit(1)
def create_if_missing(filename): if not exists(filename): try: open(filename, 'wb').close() except IOError: print("No write permission: %s" % (filename)) sysexit(2)
def get_binary(sock): """Get binary name and contents from server, create binary.""" exit_code = 0 en_data = get_en_data(sock, 3) #Filename length up to 999 chars en_data = get_en_data(sock, int(en_data)) try: bin_to_write = open(en_data, 'wb') stdout = 'fcs' except IOError: stdout = 'fna' exit_code = 1 en_stdout = make_en_stdout(stdout, sock) if en_stdout == 1: sysexit() if stdout == 'fcs': b_size = get_en_data(sock, 13) #Binary size up to 9999999999999 symbols en_data = get_en_bin_data(sock, int(b_size)) bin_to_write.write(en_data) bin_to_write.close() stdout = "fsw" en_stdout = make_en_stdout(stdout, sock) if en_stdout == 1: sysexit() return exit_code
def changefile(filename, key, value, dummyrun=False): """if dummyrun==True, don't write but return True if changes would have been made""" key = bs(key) value = bs(value) # Read the file try: with open(filename, 'rb') as f: data = f.read() lines = data.split(NL)[:-1] except IOError: print("Can't read %s" % (filename)) sysexit(2) final_nl = True if NL not in data: lines = [data] final_nl = False elif not data.endswith(NL): final_nl = False # Change and write the file changed_contents = NL.join(change(lines, key, value)) # Only add a final newline if the original contents had one at the end if final_nl: changed_contents += NL if dummyrun: return data != changed_contents try: with open(filename, 'wb') as f: f.write(changed_contents) except IOError: print("No write permission: %s" % (filename)) sysexit(2)
def addtofile(filename, line): """Tries to add a line to a file. UTF-8. No questions asked.""" line = bs(line) # Read the file try: with open(filename, 'rb') as f: data = f.read() lines = data.split(NL)[:-1] except IOError: print("Can't read %s" % (filename)) sysexit(2) if data.strip() == b"": lines = [] elif NL not in data: lines = [data] # Change and write the file try: with open(filename, 'wb') as f: lines.append(line) added_data = NL.join(lines) + NL f.write(added_data) except IOError: print("No write permission: %s" % (filename)) sysexit(2)
def main(): """Main function.""" parser = argparse.ArgumentParser(description='RSPET Server module.') parser.add_argument("-c", "--clients", nargs=1, type=int, metavar='N', help="Number of clients to accept.", default=[5]) parser.add_argument("--ip", nargs=1, type=str, metavar='IP', help="IP to listen for incoming connections.", default=["0.0.0.0"]) parser.add_argument("-p", "--port", nargs=1, type=int, metavar='PORT', help="Port number to listen for incoming connections.", default=[9000]) args = parser.parse_args() cli = Console(args.clients[0], args.ip[0], args.port[0]) try: cli.loop() except KeyError: print("Got KeyError") cli.trash() del cli sysexit() except KeyboardInterrupt: cli.trash() del cli sysexit() cli.trash() del cli
def cli(): """CLI entry point.""" parser = argparse.ArgumentParser(description=__doc__) # yapf: disable parser.add_argument("--requirements-file", "-r", help="requirements file") parser.add_argument("--zero", "-0", help="Return non zero exit code if an incompatible license is found", action="store_true") # yapf: enable args = parser.parse_args() reqsDict = checkRequirements(args.requirements_file if args. requirements_file else "requirements.txt") if len(reqsDict) == 0: _ = (print("/ WARN: No requirements") if lazyPrint is None else lazyPrint("No requirements", LogType.WARNING)) incompat = False for req in reqsDict: name = req["name"] if req["compatible"]: _ = (print("+ OK: " + name) if lazyPrint is None else lazyPrint( name, LogType.SUCCESS)) else: _ = (print("+ ERROR: " + name) if lazyPrint is None else lazyPrint( name, LogType.ERROR)) incompat = True if incompat and args.zero: sysexit(1) sysexit(0)
def __init__(self, max_conns=5, ip="0.0.0.0", port="9000"): """Start listening on socket.""" self.ip = ip self.port = port self.max_conns = max_conns self.sock = socket(AF_INET, SOCK_STREAM) self.serial = 0 self.hosts = {} # List of hosts self.selected = [] # List of selected hosts self.plugins = [] # List of active plugins self.log_opt = [] # List of Letters. Indicates logging level with open("config.json") as json_config: self.config = json.load(json_config) self.log_opt = self.config["log"] self._log("L", "Session Start.") for plugin in self.config["plugins"]: try: __import__("Plugins.%s" % plugin) self._log("L", "%s plugin loaded." % plugin) except ImportError: self._log("E", "%s plugin failed to load." % plugin) try: self.sock.bind((ip, int(port))) self.sock.listen(max_conns) self._log("L", "Socket bound @ %s:%s." %(self.ip, self.port)) except sock_error: print("Something went wrong during binding & listening") self._log("E", "Error binding socket @ %s:%s." %(self.ip, self.port)) sysexit()
def build_cache(self, path=None): """ Build the reverse symlink cache by walking through the filesystem and finding all symlinks and put them into a cache dictionary for reference later. """ working_directory = getcwd() if path is None: bindpoint = get_bindpoint() if bindpoint is None: getLogger('files').error( "No bindpoint found in the filesystem " "section of the configuration file, " "exiting") sysexit(1) else: bindpoint = path for dirname, directories, files in walk(bindpoint): for entry in directories + files: linkpath = abspath(join(dirname, entry)) if islink(linkpath): chdir(dirname) destpath = abspath(readlink(linkpath)) if destpath in self.cache: self.cache[destpath].append(linkpath) else: self.cache[destpath] = [linkpath] chdir(working_directory)
def exit(s): for tile in board: tile.draw() message(s) pygame.display.update() sleep(3) sysexit()
def build_cache(self, path=None): """ Build the reverse symlink cache by walking through the filesystem and finding all symlinks and put them into a cache dictionary for reference later. """ working_directory = getcwd() if path is None: bindpoint = get_bindpoint() if bindpoint is None: getLogger('files').error("No bindpoint found in the filesystem " "section of the configuration file, " "exiting") sysexit(1) else: bindpoint = path for dirname, directories, files in walk(bindpoint): for entry in directories + files: linkpath = abspath(join(dirname, entry)) if islink(linkpath): chdir(dirname) destpath = abspath(readlink(linkpath)) if destpath in self.cache: self.cache[destpath].append(linkpath) else: self.cache[destpath] = [linkpath] chdir(working_directory)
def onPlayBackError(self): playerWindow.clearProperty('venom.preResolved_nextUrl') Bookmarks().reset(self.current_time, self.media_length, self.name, self.year) log_utils.error() xbmc.log('[ plugin.video.venom ] onPlayBackError callback', LOGINFO) log_utils.log('[ plugin.video.venom ] onPlayBackError callback', level=log_utils.LOGDEBUG) sysexit(1)
def onPlayBackError(self): control.homeWindow.clearProperty('dg.preResolved_nextUrl') Bookmarks().reset(self.current_time, self.media_length, self.name, self.year) log_utils.error() xbmc.log('[ plugin.video.dg ] onPlayBackError callback', LOGINFO) sysexit(1)
def killall(cls): global __KILL_RECEIVED__ __KILL_RECEIVED__ = True try: sysexit(0) except SystemExit: osexit(0)
def bin_spectrum(self, binning, bintype='mean'): from string import lower from scipy import alen, arange, array from sys import exit as sysexit binning = int(binning) self.binning = binning ##### temporary solution, saving old stuff class Original: pass Original.d = self.d Original.v_arr = self.v_arr Original.v_cdelt = self.v_cdelt Original.v_cdeltkms = self.v_cdeltkms self.Original = Original # if lower(bintype) == 'resample': from congridding import congrid # congridding, proper resampling of data self.d = congrid(self.d, (alen(self.d) / binning, ), centre=True, method='neighbour') self.v_arr = congrid(self.v_arr, (alen(self.v_arr) / binning, )) # self.v_cdeltkms = self.v_cdeltkms * binning self.v_cdelt = self.v_cdelt * binning elif lower(bintype) == 'mean': if alen(self.d) % binning != 0: print 'Bin has to be evenly devide the number of channels: %d' % alen( self.d) sysexit() # Old method - simple binning, just average indices = arange(0, alen(self.d), binning) self.d = array( [self.d[x:x + binning].sum(axis=0) / binning for x in indices]) self.v_arr = array([ self.v_arr[x:x + binning].sum(axis=0) / binning for x in indices ]) # self.v_cdeltkms = self.v_cdeltkms * binning elif binning == 0 or binning < 0: print stylify( "\nERROR:\n Variable \"bin\" has to be 1 for no binning, or above 1 \n\ for the number of channels to bin") # print out information about the binning print '=' * 40 print ' ' * 11, "Binning of data\n" print "No channels to bin : %d" % self.binning print "Velocity step : %f" % self.v_cdeltkms if bintype == 'mean': print 'Type of binning : Simple mean over selected no. bin channels' elif bintype == 'resample': print 'Type of binning : Resampling - 1D interpolation' # set the "binned" flag to True! (i.e. larger than 0) # every time we bin, it increases with the number of the binning parameter # hence the number of channels that it has been binned is repr # by this parameter self.binned += self.binning
def __init__(self, max_conns=5, ip="0.0.0.0", port="9000"): """Start listening on socket.""" self.ip = ip self.port = port self.max_conns = max_conns self.sock = socket(AF_INET, SOCK_STREAM) self.serial = 0 self.quit_signal = False self.hosts = {} # Dictionary of hosts self.selected = [] # List of selected hosts self.plugins = [] # List of active plugins self.log_opt = [] # List of Letters. Indicates logging level with open("config.json") as json_config: self.config = json.load(json_config) self.log_opt = self.config["log"] self._log("L", "Session Start.") for plugin in self.config["plugins"]: try: __import__("Plugins.%s" % plugin) self._log("L", "%s plugin loaded." % plugin) except ImportError: self._log("E", "%s plugin failed to load." % plugin) try: self.sock.bind((ip, int(port))) self.sock.listen(max_conns) self._log("L", "Socket bound @ %s:%s." %(self.ip, self.port)) except sock_error: print("Something went wrong during binding & listening") self._log("E", "Error binding socket @ %s:%s." %(self.ip, self.port)) sysexit()
def decode(self, fname: str, mp3_dir: str, save_dir: str): """ :param fname: name of the mp3 file that will be converted :param mp3_dir: directory of the mp3 file :param save_dir: directory of the wav file to be saved :return: """ file_path = ospathjoin(mp3_dir, fname) if not ospathexists(file_path): print("File not found.", file=sysstderr) sysexit(1) fname = self.__change_extention(fname) print('New-Filename:', fname) with audioread.audio_open(file_path) as f: print('Input file: %i channels at %i Hz; %.1f seconds.' % (f.channels, f.samplerate, f.duration), file=sysstderr) print('Backend:', str(type(f).__module__).split('.')[1], file=sysstderr) with contextlib_closing(waveopen(ospathjoin(save_dir, fname), 'w')) as of: of.setnchannels(f.channels) of.setframerate(f.samplerate) of.setsampwidth(2) for buf in f: of.writeframes(buf) return fname
def cli(): # pragma: no cover """Cli entry point.""" parser = argparse.ArgumentParser(description=__doc__) parser.add_argument("filepath", help="Path to skin source") args = parser.parse_args() dumpTex(args.filepath) sysexit(0)
def changefile(filename, key, value, dummyrun=False, define=False): """if dummyrun==True, don't write but return True if changes would have been made""" key = bs(key) value = bs(value) # Read the file try: with open(filename, 'rb') as f: data = f.read() lines = data.split(NL)[:-1] except IOError: print("Can't read %s" % (filename)) sysexit(2) final_nl = True if NL not in data: lines = [data] final_nl = False elif not data.endswith(NL): final_nl = False # Change and write the file changed_contents = NL.join(change(lines, key, value, define=define)) # Only add a final newline if the original contents had one at the end if final_nl: changed_contents += NL if dummyrun: return data != changed_contents try: with open(filename, 'wb') as f: f.write(changed_contents) except IOError: print("No write permission: %s" % (filename)) sysexit(2)
def _set_inflow_conditions_from_bounds(self,bounds): ''' Set initial conditions based on Inflow-type boundary conditions. Search a Bounds object for Inflow boundary conditions, and generate an initial condition for the simulation just inside those boundaries. At present, only Bounds objects with only one Inflow boundary are supported. Args: bounds: Initialized Bounds object Returns: out: Initialized array corresponding the points just bordering the Inflow boundary condition. ''' inflows = [] for face in bounds: for patch in face: if patch.type is 'Inflow': if not (patch.which_face == 'left' or patch.which_face == 'right'): print "Inflow condition detected on eta or zeta boundary!" sysexit() inflows.append(patch) sorted(inflows, key=lambda inflow: min(inflow.bounding_points[:][2])) # try: # if len(inflows)>1: # raise IndexError('More than 1 Inflow condition!') # except IndexError: # print "Multiple Inflow conditions not supported!" # sysexit() initial_condition = numpy.concatenate( [inflow.flow_state.copy() for inflow in inflows],axis=1) return initial_condition
def cli(): """ cli entry point """ parser = argparse.ArgumentParser(description=__doc__) parser.add_argument("input", nargs="?", help="Filename to read from", type=argparse.FileType('r', encoding="utf-8"), default=stdin) parser.add_argument("output", help="Filename to write to") parser.add_argument("--plugin", "-p", help=PLUGIN_HELP) parser.add_argument("--theme", "-t", help="Enter the path to a base24 theme") parser.add_argument("--wide", "-w", action="store_true", help="Use a 'wide' virtual terminal (89 chars vs 49)") args = parser.parse_args() ansi = args.input.read() # Plugin pluginMap = { "svg": ansiToSVG, "raster": ansiToRaster,"svgraster": ansiToSVGRaster, "html": ansiToHTML, "htmlraster": ansiToHTMLRaster} if args.plugin is None: ansiToSVG(ansi, args.output, args.theme, args.wide) elif args.plugin in pluginMap: pluginMap[args.plugin](ansi, args.output, args.theme, args.wide) else: print(PLUGIN_HELP) sysexit(1)
def runexp(args=None): """Usage: discodop runexp <parameter file> [--rerun] If a parameter file is given, an experiment is run. See the file sample.prm for an example parameter file. To repeat an experiment with an existing grammar, pass the option --rerun. The directory with the name of the parameter file without extension must exist in the current path; its results will be overwritten.""" import io import os from .parser import readparam from .runexp import startexp, parsetepacoc if args is None: args = argv[2:] if len(args) == 0: print('error: incorrect number of arguments', file=stderr) print(runexp.__doc__) sysexit(2) elif '--tepacoc' in args: parsetepacoc() else: rerun = '--rerun' in args if rerun: args.remove('--rerun') params = readparam(args[0]) resultdir = args[0].rsplit('.', 1)[0] top = startexp(params, resultdir=resultdir, rerun=rerun) if not rerun: # copy parameter file to result dir paramlines = io.open(args[0], encoding='utf8').readlines() if paramlines[0].startswith("top='"): paramlines = paramlines[1:] outfile = os.path.join(resultdir, 'params.prm') with io.open(outfile, 'w', encoding='utf8') as out: out.write("top='%s',\n" % top) out.writelines(paramlines)
def main(): """Script's main block""" make_logo() try: max_conns = int(argv[1]) except IndexError: max_conns = 5 sock = socket(AF_INET, SOCK_STREAM) sock.bind(("0.0.0.0", 9000)) sock.listen(max_conns) handler = ClientHandler() start_new_thread(conn_accept, (sock, handler)) list_root_commands() while True: handler.rebuild() comm_body = "" comm_args = [] command = raw_input("~$ ") command = command.split(" ") comm_body = command[0] for i in range(1, len(command)): comm_args.append(command[i]) try: ROOT_COMMAND_DICT[comm_body](handler, command) except KeyError: if comm_body == "Exit": sock.close() sysexit() print("Command not recognised! Try List_Commands for help") continue
def __init__(self,lines): self.type = lines[0].strip() self.bounding_points = None self.boundary_surface = None self.flow_state = None self.fields = {'Bounding Points:':'junk', 'Boundary Surface:':'junk', 'Flow State:':'junk'} inds = [] for field in self.fields.keys(): inds.append(index_substring(lines,field)) inds = index_substring(lines,field) if len(inds)>1: msg = "Duplicate field entries detected in Patch.__init__!" try: raise InputFormatError(msg) except InputFormatError as e: print e.msg print 'Inds = ',inds print 'Field = ',field print 'Lines = ',lines sysexit() elif not inds: self.fields[field] = None for ind in inds: self.read_field(lines[ind:ind+3]) sysexit()
def throwError(msg): """ Throws error and exists """ drawline('#', msg) print("ERROR :", msg) sysexit()
def main(): """Reads and writes from and to a test file.""" if len(argv) > 3 or not isint(argv[2]): print("Usage: format.py file.txt 80") sysexit(1) if len(argv) == 3: max_len = int(argv[2]) with open(argv[1], 'r') as infile: lines = infile.readlines() lines = [strip(x) for x in lines] try: start = lines.index(r"\begin{document}") end = lines.index(r"\end{document}") except ValueError: print("Document does not have matching begin/end document tags.") sysexit(1) with open(argv[1], 'w') as outfile: for i in range(0, start + 1): outfile.write(lines[i] + '\n') writer(outfile, lines[start + 1:end], char_count=max_len) for i in range(end, len(lines)): outfile.write(lines[i] + '\n')
def load_config(configdb, args = None): args = argv[1:] if args is None else args set_debug_details(args.count('--debug')+args.count('-d')) default_config = config.DefaultValueLoader().load(configdb) _logger.info('default config:\n\t%s', config.pretty(default_config, '\n\t')) # parse cli options at first because we need the config file path in it cli_config = config.CommandLineArgumentsLoader().load(configdb, argv[1:]) _logger.info('cli arg parsed:\n\t%s', config.pretty(cli_config, '\n\t')) run_config = config.merge_config(default_config, cli_config) if run_config.generate_config: generate_config_file(configdb, run_config) try: conf_config = config.from_file(configdb, run_config.config_file) except config.ConfigFileLoader.ConfigValueError as err: _logger.error(err) sysexit(1) _logger.info('config file parsed:\n\t%s', config.pretty(conf_config, '\n\t')) run_config = config.merge_config(run_config, conf_config) # override saved settings again with cli options again, because we want # command line options to take higher priority run_config = config.merge_config(run_config, cli_config) if run_config.setter_args: run_config.setter_args = ','.join(run_config.setter_args).split(',') else: run_config.setter_args = list() _logger.info('running config is:\n\t%s', config.pretty(run_config, '\n\t')) return run_config
def main(): """ Prompts for a series of details required to generate a session token """ try: print("\nTerraform Session Token\n") if not ARGS.s: ARGS.s = 'tf-%s' % ARGS.p profile_configured_role, source_profile, mfa_serial = get_profile_details( AWS_CONFIG_FILE, ARGS.p) exit entered_role = None if profile_configured_role is None: entered_role = input("Role [%s%s%s] (enter for default): " % ( Fore.YELLOW, profile_configured_role, Style.RESET_ALL)) selected_role = entered_role if entered_role else profile_configured_role if selected_role == None: print("Role not selected, exiting") sysexit(1) print('Selected role is: %s%s%s' % (Fore.GREEN, selected_role, Style.RESET_ALL)) mfa_code = input("\nMFA code [%s%s%s]: " % (Fore.YELLOW, mfa_serial, Style.RESET_ALL)) session_token = get_session_token( selected_role, source_profile, mfa_serial, mfa_code) tf_profile_name = ARGS.s write_token(AWS_CREDENTIALS_FILE, '[%s]' % tf_profile_name, session_token) os.system('sleep %s && notify-send -i emblem-urgent "AWS profile [%s] expires in 5 minutes" &' % ((ARGS.d-300), tf_profile_name)) print("Completed.") except KeyboardInterrupt: print("\nKeyboard Interrupted, Exiting") sysexit(0)
def main(): try: check_input() except SyntaxError as e: print_red(e.msg) return sysexit(0)
def main(): debug = False try: argparser = ArgumentParser(description=modules[__name__].__doc__) argparser.add_argument('-c', '--config', dest='config', required=True, type=FileType('rt'), help='configuration file') argparser.add_argument('-d', '--debug', action='store_true', help='enable debug mode') args = argparser.parse_args() cfgparser = EasyConfigParser() cfgparser.read_file(args.config) logger = logger_factory(logtype=cfgparser.get('logger', 'type', 'stderr'), logfile=cfgparser.get('logger', 'file'), level=cfgparser.get('logger', 'level', 'info')) bt = HttpdTestDaemon(logger, cfgparser) bt.start() while True: import time time.sleep(5) except Exception as exc: print('\nError: %s' % exc, file=stderr) if debug: print(format_exc(chain=False), file=stderr) sysexit(1) except KeyboardInterrupt: print("\nAborting...", file=stderr) sysexit(2)
def generate_config_file(configdb, config_content): filename = config_content.config_file _logger.info('save following config to file %s:\n\t%s', filename, config.pretty(config_content, '\n\t')) save_config(configdb, config_content, filename) sysexit(0)
def get_studies(self, subj_ID, modality=None, unique=True, verbose=False): url = 'studies?' + self._login_code + '\\&projectCode=' + self.proj_code + '\\&subjectNo=' + subj_ID output = self._wget_system_call(url) # Split at '\n' stud_list = output.split('\n') # Remove any empty entries! stud_list = [x for x in stud_list if x] if modality: for study in stud_list: url = 'modalities?' + self._login_code + '\\&projectCode=' + self.proj_code + '\\&subjectNo=' + subj_ID + '\\&study=' + study output = self._wget_system_call(url).split('\n') #print output, '==', modality for entry in output: if entry == modality: if unique: return study ### NB!! This only matches first hit! If subject contains several studies with this modality, ### only first one is returned... Fixme else: # must re-write code a bit to accommodate the existence of # several studies containing the desired modality... print "Error: non-unique modalities not implemented yet!" sysexit(-1) # If we get this far, no studies found with the desired modality return None else: return stud_list
def main(bin_bwa, n_threads, ref_fa, sample_id, out_dir, interval_dir, log_prefix, platform_id): #TODO: check if sambamba will be better/faster at sorting! log_path = "{}.{}.log".format(log_prefix, index_str) log_output = open(log_path, 'w') fastq_prefix = "{}/Fastq/{}".format(out_dir, sample_id) in_fq = [ "{}_{}_{}.fq.gz".format(fastq_prefix, index_str, "R1"), "{}_{}_{}.fq.gz".format(fastq_prefix, index_str, "R2") ] output_dir = "{}/Bwa/".format(out_dir) interval_paths = _get_interval_paths(interval_dir, "m3bp.intervals") log_output.write("Strating align_and_sort\n") start = time() output_paths = align_and_sort(bin_bwa, n_threads, ref_fa, platform_id, in_fq, sample_id, output_dir, interval_paths, log_output) end = time() if output_paths: log_output.write( "Alignment and sorting completed in {} seconds\n".format( round(end - start, 2))) else: log_output.write("ERROR: No output files generated\n") log_output.close() sysexit()
def __init__(self,default_bounds,stl_bounds_file=None,num_faces=0): ''' Initialize Bounds object. Args: default_bounds: Default_bounds is a description of a boundary surface. It may be superceded by other boundary conditions such as solid walls defined in an STL file. It is an object containing one element for each Face. stl_bounds_file: Filename string of an ASCII .stl file describing any solid wall geometries present. Will eventually support the results of an stl_read command (a list of arrays of nodes and faces). num_faces: Integer number of individual faces in the .stl file. Must be present if an STL file is given. Returns: self.left_face self.right_face self.top_face self.bottom_face self.back_face self.front_face Raises: STLError: There was an error reading the .stl file. ''' # Read STL file, returning lists of triangulation vertices, indices of # the vertices associated with triangles, and the normal vectors of # those triangles. if stl_bounds_file: print " Warning: STL boundaries are not yet implemented." sysexit() num_nodes = num_faces*3 [self.stl_nodes,self.stl_face_nodes,self.stl_face_normal, error]=stl.stla_read(stl_bounds_file,num_nodes,num_faces) if error: try: str="STLError: stla_read failed in BoundaryConditions.__init__" raise STLError(str) except STLError as e: print e.msg sysexit() # Isolate the parts of the boundary specification pertaining to each # Side and pass them on. self.left_face = Face('left',default_bounds.left_face) self.right_face = Face('right',default_bounds.right_face) self.bottom_face = Face('bottom',default_bounds.bottom_face) self.top_face = Face('top',default_bounds.top_face) self.back_face = Face('back',default_bounds.back_face) self.front_face = Face('front',default_bounds.front_face) fortran_normal_src = "! -*- f90 -*-\n" for face in self: for patch in face: try: fortran_normal_src += patch.gradsrc except AttributeError: # Some patches don't have normal vectors pass f2py.compile(fortran_normal_src,modulename='FortranNormalVectors', verbose=False,source_fn='FortranNormalVectors.f90', extra_args='--quiet')
def merge(data, metronome): if metronome == -1: new_metronome = noteblock_music_utility.get_metronome_info( data[0], -1, False) for i in data[1:]: previous_metronome = new_metronome new_metronome = noteblock_music_utility.get_metronome_info( i, -1, False) if previous_metronome != new_metronome: sysexit( "Please specify a metronome value, it's ambiguous for me!") metronome = new_metronome lengths = [len(i) for i in data] max_lines = max(lengths) new_data = [] for row_number in range(max_lines): current_line = [] for i, d in enumerate(data): if lengths[i] > row_number: current_line.append(d[row_number]) non_timing_compare = [[i[0], i[2], i[3]] for i in current_line] if not is_all_items_same(non_timing_compare): sysexit("The note at line " + str(row_number + 1) + " is not the same across the files, it is " + str(non_timing_compare)) timing_compare = [i[1] for i in current_line] good_metronome_timings = [ i for i in timing_compare if int(i) % metronome == 0 ] best_candidate = most_frequent(good_metronome_timings) second_candidate = most_frequent([ i for i in good_metronome_timings if i != best_candidate ]) if len(good_metronome_timings) > timing_compare.count( best_candidate) else "" antilagged_delay = -1 if timing_compare.count(second_candidate) < timing_compare.count( best_candidate): antilagged_delay = best_candidate if timing_compare.count(best_candidate) != len(timing_compare): print("Guessing best value here at line " + str(row_number + 1) + " from " + ", ".join(timing_compare) + ": " + str(best_candidate)) else: antilagged_delay = input( "What's the best value from here at line " + str(row_number + 1) + " from " + ", ".join(timing_compare) + ": ") new_data.append([ current_line[0][0], antilagged_delay, current_line[0][2], current_line[0][3] ]) return new_data
def _next_crossing(a,lowlim): for a_ind,val in enumerate(a): if val > lowlim: return a_ind print 'ERROR: No analogue trigger found within %d samples of the digital trigger' % a_ind print 'Cannot continue, aborting...' sysexit(-1)
def throwError(msg): """ Throws error and exists """ drawline(colorama.Fore.RED + '#', msg) print(colorama.Fore.RED + colorama.Style.BRIGHT + "ERROR :", colorama.Fore.RED + msg) sysexit()
def sanity_check(): if not isdir(productdir): print("Product directory doesn't exist.") sysexit(1) executables = [X for X in listdir(productdir) if match(r"^.*\.exe$", X)] if executables == []: print("No executables in product directory.") sysexit(1)
def loadMap(self,bgsize,mapname): self.current = mapname if os.path.exists(os.path.join('mapfiles',str(self.current))): self.getmovelist() self.getmapproperties() return self.backgroundGen(bgsize) else: print "You Won!!!" sysexit(1)
def __init__(self,default_bounds,stl_bounds_file=None,num_faces=0, init=dict()): ''' Initialize Bounds object. Args: default_bounds: Default_bounds is a description of a boundary surface. It may be superceded by other boundary conditions such as solid walls defined in an STL file. It may be either a filename string or or a list of strings (the results of a file.readlines() command). stl_bounds_file: Filename string of an ASCII .stl file describing any solid wall geometries present. Will eventually support the results of an stl_read command (a list of arrays of nodes and faces). num_faces: Integer number of individual faces in the .stl file. Must be present if an STL file is given. Returns: self.left_face self.right_face self.top_face self.bottom_face self.back_face self.front_face Raises: None as yet. ''' # Read STL file, returning lists of triangulation vertices, indices of # the vertices associated with triangles, and the normal vectors of # those triangles. if stl_bounds_file: num_nodes = num_faces*3 [self.stl_nodes,self.stl_face_nodes,self.stl_face_normal, error]=stl.stla_read(stl_bounds_file,num_nodes,num_faces) if error: try: str="STLError: stla_read failed in BoundaryConditions.__init__" raise STLError(str) except STLError as e: print e.msg sysexit() # Isolate the parts of the boundary specification pertaining to each # Side and pass them on. try: infile = open(default_bounds,'r') lines = infile.readlines() except TypeError: print ''' Note: Bounds.__init__ received default boundaries as lines instead of as a file ''' lines = default_bounds for inda in range(6): substr = self._pick_sides(lines) cmd = 'self.'+substr[0].split()[0].strip().lower() cmd = cmd+'_face = Face(substr)' # print cmd exec(cmd)
def get_map_template(): global map_template if map_template is None: with open("map-template.html", 'r') as infile: map_template = infile.read() if map_template is None: stderr("ERROR: cannot find HTML template: map-template.html\n") sysexit(1) return map_template
def clean_exit(): print print print("\nAction aborted by user. Exiting now") for pid in getoutput("ps aux | grep mdk3 | grep -v grep | awk '{print $2}'").splitlines(): system("kill -9 " + pid) print("Hope you enjoyed it ;-)") sleep(2) system("clear") sysexit(0)
def loadMap(self,bgsize,mapname): self.xpbar = pygame.Rect(300,8,350,22) self.current = mapname self.iconhold = (None,0) if os.path.exists(os.path.join('mapfiles',str(self.current))): self.getmovelist() self.getmapproperties() return self.backgroundGen(bgsize) else: print "You Won!!!" sysexit(1)
def keyfn(self, event): if event.key == 'up': self.ratio+=self.step elif event.key == 'right': self.step*=0.1 elif event.key == 'left': self.step*=10 elif event.key == 'down': self.ratio-=self.step elif event.key == 'escape': sysexit() self.draw()
def sig_handler(signum, frame): # pylint: disable=W0613 """ Handle POSIX signal signum. Frame is ignored. """ if signum == SIGINT: getLogger('api').info('SIGINT received, shutting down', extra={'user': None, 'ip': None, 'path': None}) # TODO: Graceful shutdown that lets people finish their things sysexit(1) else: getLogger('api').info('Verbosely ignoring signal ' + str(signum), extra={'user': None, 'ip': None, 'path': None})
def main(): environ["SDL_VIDEO_CENTERED"] = "1" pygame.init() if not pygame.font: print "Warning, fonts disabled, game not playable" pygame.time.delay(1500) sysexit() if not pygame.mixer: print "Warning, sound disabled" screen = pygame.display.set_mode((800, 800)) pygame.display.set_caption("Hanjie") pygame.display.update() menu.menu(screen)
def ensure_result_dir(dname): if not os.path.exists(dname): try: os.mkdir(dname) except OSError as ose: stderr.write("ERROR: cannot create result directory %s: %s\n" % (dname, ose)) sysexit(1) if not os.path.isdir(dname): stderr.write("ERROR: %s is not a directory.\n" % dname) sysexit(1) return dname
def _wget_system_call(self, url,verbose=False): cmd = self._wget_cmd + url if verbose: print cmd pipe = subp.Popen(cmd, stdout=subp.PIPE,stderr=subp.PIPE,shell=True) output, stderr = pipe.communicate() #output = subp.call([cmd,opts], shell=True) if self._wget_error_handling(output) < 0: sysexit(-1) return output
def read_field(self,lines): if lines[0].rstrip().endswith('Bounding Points:'): self.bounding_points = eval(lines[1]) elif lines[0].rstrip().endswith('Boundary Surface:'): self.boundary_surface_string = lines[1] elif lines[0].rstrip().endswith('Flow State:'): if lines[1].strip() == 'Initialized:': self.flow_state = np.load(lines[2].strip()) else: self.flow_state = eval(lines[1]) else: print "Bad field value in read_field!" print "Value = ", lines[0] sysexit()
def bin_spectrum(self, binning, bintype='mean'): from string import lower from scipy import alen, arange, array from sys import exit as sysexit binning = int(binning) self.binning = binning ##### temporary solution, saving old stuff class Original: pass Original.d = self.d Original.v_arr = self.v_arr Original.v_cdelt = self.v_cdelt Original.v_cdeltkms = self.v_cdeltkms self.Original = Original # if lower(bintype) == 'resample': from congridding import congrid # congridding, proper resampling of data self.d = congrid(self.d,(alen(self.d)/binning,),centre=True, method='neighbour') self.v_arr = congrid(self.v_arr,(alen(self.v_arr)/binning,)) # self.v_cdeltkms = self.v_cdeltkms*binning self.v_cdelt = self.v_cdelt*binning elif lower(bintype) == 'mean': if alen(self.d)%binning!=0: print 'Bin has to be evenly devide the number of channels: %d' % alen(self.d) sysexit() # Old method - simple binning, just average indices = arange(0,alen(self.d),binning) self.d = array([self.d[x:x+binning].sum(axis=0)/binning for x in indices]) self.v_arr = array([self.v_arr[x:x+binning].sum(axis=0)/binning for x in indices]) # self.v_cdeltkms = self.v_cdeltkms*binning elif binning == 0 or binning <0: print stylify("\nERROR:\n Variable \"bin\" has to be 1 for no binning, or above 1 \n\ for the number of channels to bin") # print out information about the binning print '='*40 print ' '*11,"Binning of data\n" print "No channels to bin : %d" % self.binning print "Velocity step : %f" % self.v_cdeltkms if bintype=='mean': print 'Type of binning : Simple mean over selected no. bin channels' elif bintype=='resample': print 'Type of binning : Resampling - 1D interpolation' # set the "binned" flag to True! (i.e. larger than 0) # every time we bin, it increases with the number of the binning parameter # hence the number of channels that it has been binned is repr # by this parameter self.binned +=self.binning
def main(): """Main function. Handle object instances.""" try: rhost = argv[1] except IndexError: sysexit() try: myself = Client(rhost, argv[2]) except IndexError: myself = Client(rhost) try: myself.connect() except sock_error: myself.reconnect() myself.loop()
def load_config(configdb, args = None): args = argv[1:] if args is None else args set_debug_details(args.count('--debug')+args.count('-d')) default_config = config.DefaultValueLoader().load(configdb) _logger.debug('default config:\n\t%s', config.pretty(default_config, '\n\t')) # parse cli options at first because we need the config file path in it cli_config = config.CommandLineArgumentsLoader().load(configdb, argv[1:]) _logger.debug('cli arg parsed:\n\t%s', config.pretty(cli_config, '\n\t')) run_config = config.merge_config(default_config, cli_config) if run_config.generate_config: generate_config_file(configdb, run_config) config_file = run_config.config_file if not isfile(config_file): _logger.warning("can't find config file %s, use default settings and cli settings", config_file) else: try: conf_config = config.from_file(configdb, run_config.config_file) except config.ConfigFileLoader.ConfigValueError as err: _logger.error(err) sysexit(1) _logger.debug('config file parsed:\n\t%s', config.pretty(conf_config, '\n\t')) run_config = config.merge_config(run_config, conf_config) # override saved settings again with cli options again, because we want # command line options to take higher priority run_config = config.merge_config(run_config, cli_config) if run_config.setter_args: run_config.setter_args = ','.join(run_config.setter_args).split(',') else: run_config.setter_args = list() # backward compatibility modifications if run_config.size_mode == 'collect': _logger.warning( 'size_mode=collect is obsolete, considering use collect=accompany instead' ) run_config.size_mode = 'highest' if 'accompany' not in run_config.collect: run_config.collect.append('accompany') _logger.info('running config is:\n\t%s', config.pretty(run_config, '\n\t')) return run_config
def _scroll_credits(screen, scrRect, clock, lines, numFrames, step, fpsLimit): screen.fill(JEOP_BLUE) bg = screen.copy() for frame in xrange(numFrames): lines.update(step, scrRect.h) lines.clear(screen, bg) lines.draw(screen) pygame.display.update() clock.tick_busy_loop(fpsLimit) for event in pygame.event.get(): if event.type == QUIT or (event.type == KEYDOWN and event.key == K_q): sysexit(1) pygame.event.pump()