def __require_valid_keywords(key_args: Tuple[str]): require(len(key_args), 'No keywords were given') for keyword in key_args: require( __is_keyword_valid(keyword) is not None, f'Keyword "{keyword}" has wrong format')
def __parse_arg_num(number: Any) -> int: require( isinstance(number, int) or (isinstance(number, str) and number.lstrip("-+").isdigit()), f'Argument "{number}" is not a valid number' ) return number if not isinstance(number, str) else int(number)
def buildBin(testname, cmdToBuild = None, subdir = None, cmdToInstall = 'default', pkg2build = None): ''' Build binary if necessary ''' if pkg2build is not None: # To double ensure in the right place os.chdir(u.codeIn(srcDir(testname), pkg2build)) Makefile = 'Makefile' makefile = 'makefile' configure = 'configure' if testcaseAttr(testname, 'binary2build') == 'Yes': u.require('make') # FIXME - CLR doesn't search sub-folders in env PATH #installTo = binDir() + '/' + testcaseAttr(testname, 'name') installTo = binDir() if cmdToBuild is not None: MAKE = cmdToBuild else: MAKE = 'make all' if cmdToInstall == 'default' or cmdToInstall == None: INSTALL = "make install --eval='DESTDIR=%s'" % installTo #u.mkdir(installTo) elif cmdToInstall == 'manual': INSTALL = None else: INSTALL = cmdToInstall if subdir is not None: MAKE = "%s -C %s" % (MAKE,subdir) if INSTALL is not None: INSTALL = "%s -C %s" % (INSTALL,subdir) if subdir[-1] is '/': Makefile = subdir + 'Makefile' makefile = subdir + 'makefile' configure = subdir + 'configure' else: Makefile = '/'.join([subdir,'Makefile']) makefile = '/'.join([subdir,'makefile']) configure = '/'.join([subdir, 'configure']) # Make sure there is make file before it really starts if u.isfile(Makefile) or u.isfile(makefile) or u.isfile(configure): os.system(MAKE) if INSTALL is not None: u.RunCommand(INSTALL).run() else: u.manualInstall(srcDir(testname), binDir()) u.manualInstall(subdir, binDir()) else: raise Exception("No necessary Makefile found!") else: u.logger.warning("%s -- Not necessary to build running binaries."% testname)
def profit(d: Dict[str, any]) -> int: require(isinstance(d, dict), f"Expected a 'dictionary', '{type(d).__name__}' given") require(bool(d), 'Cannot perform profit calculation, empty dictionary given') require_dictionary_keys_exist(d, 'sell_price', 'cost_price', 'inventory') require_dictionary_value_type(d, 'sell_price', float) require_dictionary_value_type(d, 'cost_price', float) require_dictionary_value_type(d, 'inventory', int) return d['sell_price'] * d['inventory'] - d['cost_price'] * d['inventory']
def validate_id(item, errors): """ Checks item for id presence and validity Raises ValueError if no id present """ item_id = item.id() utils.require(item_id is not None, ValueError("Some items do not own the id")) if not const.ID_REGEXP.match(item_id): errors.add("Item id doesn't match ID_REGEXP") if len(item_index["id"][item_id]) != 1: errors.add("Item id is not unique")
def int_to_str(number: int) -> str: require(number is not None, "Expected an integer, 'None' given") require( isinstance(number, int), f"Expected an integer, '{type(number).__name__}' given" ) str_num = '' digits = digit_count(number) for i in range(0, digits): str_num = chr(ord('0') + digit(abs(number), i)) + str_num return str_num if number >= 0 else '-' + str_num
def check_id(item, errors): """ Checks item for id presence and validity Raises ValueError if no id present """ item_id = item.id() utils.require( item_id is not None, ValueError("Some items do not own the id") ) if not const.ID_REGEXP.match(item_id): errors.add("Item id doesn't match ID_REGEXP") if len(item_index["id"][item_id]) != 1: errors.add("Item id is not unique")
def __convert_int_to_base(number: int, base: int) -> str: require( number >= 0, f'Only positive integers and zero are allowed, "{number}" given' ) if number == 0: return "0" digits = [] dividend = number while dividend > 0: digits.append(__map_number_to_letter(dividend % base)) dividend //= base digits.reverse() return __tuple_to_string(tuple(digits))
def getStnDates(request): from wrcc.wea_server.libwea.products.listers import getStnDates error = require(request, ['stn']) if error: return ErrorResponse(error) stn = request.args.get('stn') try: result = getStnDates(stn) except IOError: return ErrorResponse("No data available.") return JsonResponse(result)
def getDataSingleDay(request): from wrcc.wea_server.libwea.products.listers import getDataSingleDay error = require(request, ['stn', 'sD']) if error: return ErrorResponse(error) stn = request.args.get('stn') sD = parse_date(request.args.get('sD')) units_system = request.args.get('units', 'N') # N (native) units by default try: result = getDataSingleDay(stn, sD, units_system=units_system) except IOError: return ErrorResponse("No data available.") return JsonResponse(result)
def getMostRecentData(request): from wrcc.wea_server.libwea.products.listers import getMostRecentData error = require(request, ['stn']) if error: return ErrorResponse(error) stn = request.args.get('stn') eD = parse_date(request.args.get('eD', None)) units_system = request.args.get('units', 'N') # N (native) units by default try: result = getMostRecentData(stn, eD, units_system=units_system) except IOError: return ErrorResponse("No data available.") return JsonResponse(result)
def setup(config): """ Load the Vhosts """ for directory in Application.vhostDirs: # get files in the directory try: files = os.listdir(directory) except Exception as ex: logging.getLogger().warn( 'Invalid VHosts Directory: %s. Error: %s' % (directory, ex)) continue # vhosts[ip][port][i]= vhost vhosts = {} lookup = {} files.sort() for file in files: if file[-3:] != '.py' or file[0:1] == '_': continue klass = utils.require(directory + '/' + file, 'VHost') vhost = klass(config) vhost.load() vhost.build() if not vhosts.has_key(vhost.ip): vhosts[vhost.ip] = {} if not vhosts[vhost.ip].has_key(vhost.port): vhosts[vhost.ip][vhost.port] = [] iPort = int(vhost.port) if not Application.defaultHost.has_key(iPort): Application.defaultHost[iPort] = vhost pos = len(vhosts[vhost.ip][vhost.port]) vhosts[vhost.ip][vhost.port].append(vhost) for host in vhost.host: # @todo: the order is missing here lookup["%s:%s:%s" % (host, vhost.port, vhost.ip)] = vhosts[vhost.ip][vhost.port][pos] Application.vhosts = vhosts Application.lookup = lookup
def setup(config): """ Load the Vhosts """ for directory in Application.vhostDirs: # get files in the directory try: files = os.listdir(directory) except Exception as ex: logging.getLogger().warn('Invalid VHosts Directory: %s. Error: %s' % (directory,ex)) continue # vhosts[ip][port][i]= vhost vhosts = {} lookup = {} files.sort() for file in files: if file[-3:]!='.py' or file[0:1] == '_': continue klass = utils.require(directory+'/'+file,'VHost') vhost = klass(config) vhost.load() vhost.build() if not vhosts.has_key(vhost.ip): vhosts[vhost.ip] = {} if not vhosts[vhost.ip].has_key(vhost.port): vhosts[vhost.ip][vhost.port] = [] iPort = int(vhost.port) if not Application.defaultHost.has_key(iPort): Application.defaultHost[iPort] = vhost pos = len(vhosts[vhost.ip][vhost.port]) vhosts[vhost.ip][vhost.port].append(vhost) for host in vhost.host: # @todo: the order is missing here lookup["%s:%s:%s" % (host, vhost.port, vhost.ip)] = vhosts[vhost.ip][vhost.port][pos] Application.vhosts = vhosts Application.lookup = lookup
def str_to_int(number: str) -> int: require(number is not None, "Expected a string, 'None' given") require( isinstance(number, str), f"Expected a string, '{type(number).__name__}' given" ) require( number.lstrip("-+").isdigit(), f"Given string does not contain an integer, '{number}' given" ) num = number.lstrip("-+") int_num = 0 for i, char_dig in enumerate(num): int_num += (ord(char_dig) - ord('0')) * 10 ** (len(num) - i - 1) return int_num if not number.startswith('-') else int_num * -1
from visitor import visitor from entity import entity, action, cmd from utils import require from os import environ from logger import logger import event from namespace import namespace from test import suite, test, run_test if 'DVPY_PRELOAD' in environ: for f in environ['DVPY_PRELOAD'].split(':'): logger.info('loading file ' + f + '.py') require(f)
def main(): """ Toil RNA-seq Workflow Computational Genomics Lab, Genomics Institute, UC Santa Cruz RNA-seq samples are trimmed, QCed, combined, aligned, and quantified: - CutAdapt - FastQC - STAR -> RSEM - Kalisto - Hera Quickstart: 1. Type `toil-rnaseq generate` to create an editable manifest and config. 2. Parameterize the workflow by editing the config. 3. Fill in the manifest with information pertaining to your samples. 4. Type `toil-rnaseq run ./jobStore` to execute the workflow locally. Please read the README before use and check the github wiki for additional details: https://github.com/BD2KGenomics/toil-scripts/tree/master/src/toil_scripts/rnaseq_cgl """ # Process command line arguments args = cli() # Parse subparsers related to config and manifest config_path = os.path.join(os.getcwd(), 'config-toil-rnaseq.yaml') manifest_path = os.path.join(os.getcwd(), 'manifest-toil-rnaseq.tsv') if args.command == 'generate': generate_file(config_path, generate_config) generate_file(manifest_path, generate_manifest) elif args.command == 'config-input': user_input_config(config_path) elif args.command == 'manifest-input': user_input_manifest(manifest_path) # Workflow execution elif args.command == 'run': # Parse manifest require( os.path.exists(args.manifest), '{} not found. Run "toil-rnaseq generate"'.format(args.manifest)) samples = parse_samples(args.manifest) # Parse config and store as Expando object for dot attribute accession require(os.path.exists(args.config), '{} not found. Run "toil-rnaseq generate"'.format(args.config)) config = rexpando(yaml.load(open(args.config).read())) config.maxCores = int( args.maxCores ) if args.maxCores else sys.maxint # Set maxCores from args # Sanity check configuration file config = configuration_sanity_checks(config) # Start the workflow, calling map_job() to run the workflow for each sample with Toil(args) as toil: if args.restart: toil.restart() else: toil.start(Job.wrapJobFn(map_job, workflow, samples, config))
def main(): global server_p # parsing arguments (opts, args) = args_parse() in_q = Queue() out_q = Queue() logger.info('running dv.py') # start agent server #loop = asyncio.get_event_loop() server_p = Process(target=start_agent_server, args=( in_q, out_q, path.abspath(opts.out_dir), opts.verbose, )) #server_p = Thread(target=start_agent_server, args=(loop, in_q, out_q,)) server_p.start() try: # waiting for server started host, port = in_q.get() #logger.info("agent server started on {}:{}".format(host, port)) # set gcf engine if opts.gcf == 'local': GCFEngine.set_imp( Local(host, port, path.abspath(opts.out_dir), opts.verbose)) else: if opts.gcf == 'lsf': GCFEngine.set_imp( LSF(host, port, path.abspath(opts.out_dir), opts.verbose)) else: raise Exception('unsupported gcf engine {}'.format(opts.gcf)) # config job engine JobEngine.connect(in_q, out_q) JobEngine.out_dir = path.abspath(opts.out_dir) logger.info('max agents = {}'.format(opts.max_agents)) JobEngine.max_cmds = opts.max_agents # load files require('loader') if opts.patchfile: for f in opts.patchfile: require(f) # evaluate experssions @visitor def top(): @join def body(self): if opts.expr: for e in opts.expr: @spawn(self) def body(ee=e): res = eval(ee, get_ns(), get_ns()) if type(res) == GeneratorType: yield from res return res if opts.test: @spawn(self) def body(): res = run_test(*opts.test, action=opts.action, where=opts.where) if type(res) == GeneratorType: yield from res return res yield from body() # run while True: JobEngine.run() Scheduler.run() if JobEngine.is_waiting() or Scheduler.is_waiting(): next else: break for t in Test.test_status: if Test.test_status[t] == 'passed': logger.info("*** test '{}' passed".format(t)) else: logger.error("*** test '{}' failed".format(t)) if top.exception: def print_exception(e, indent=0): if isinstance(e, Exception): for l in extract_tb(e.__traceback__): logger.debug((" " * indent) + str(l)) if not isinstance(e, Exception): logger.error((" " * indent) + str(e)) return for i in e.args: if not isinstance(i, list): i = [i] for j in i: print_exception(j, indent + 2) print_exception(top.exception) logger.error('dv.py failed') #raise top.exception else: logger.info('dv.py passed') finally: event.notify('dvpy_done') cleanup()
#!/usr/bin/env python # -*- coding: utf-8 -*- """ @author: Serge Watchou """ from utils import require require("pypubsub") from pubsub import pub from model import ChromaAnalyse from view import RootView, ConfirmAnalyseFrame, ConfigTimeFrame, Popup, InsertUSBFrame, GraphFrame, HomeFrame from hardware import Broche import time __all__ = [ 'ProviderActionForFrame', ] class ProviderActionForFrame(object): def __init__(self, controller=None, *args, **kw): super(ProviderActionForFrame).__init__(*args, **kw) self.controller = controller self.animationForGraphFrameFunction = None def setController(self, controller): self.controller = controller def getActionWhenQuit(self, frameName): method_name = 'action_when_quit_' + frameName method = getattr( self, method_name,
def require_dictionary_value_type(d: dict, k: any, t): require(isinstance(d[k], t), f"Expected a '{t.__name__}', '{type(d[k]).__name__}' given")
def __check_console_input(args: List[str]): require(len(args) >= 1, 'No filename was given') require(len(args) >= 2, 'No keywords were given')
def main(): global server_p # parsing arguments (opts, args) = args_parse() in_q = Queue() out_q = Queue() logger.info('running dv.py') # start agent server #loop = asyncio.get_event_loop() server_p = Process(target=start_agent_server, args=(in_q, out_q, path.abspath(opts.out_dir), opts.verbose,)) #server_p = Thread(target=start_agent_server, args=(loop, in_q, out_q,)) server_p.start() try: # waiting for server started host, port = in_q.get() #logger.info("agent server started on {}:{}".format(host, port)) # set gcf engine if opts.gcf == 'local': GCFEngine.set_imp(Local(host, port, path.abspath(opts.out_dir), opts.verbose)) else: if opts.gcf == 'lsf': GCFEngine.set_imp(LSF(host, port, path.abspath(opts.out_dir), opts.verbose)) else: raise Exception('unsupported gcf engine {}'.format(opts.gcf)) # config job engine JobEngine.connect(in_q, out_q) JobEngine.out_dir = path.abspath(opts.out_dir) logger.info('max agents = {}'.format(opts.max_agents)) JobEngine.max_cmds = opts.max_agents # load files require('loader') if opts.patchfile: for f in opts.patchfile: require(f) # evaluate experssions @visitor def top(): @join def body(self): if opts.expr: for e in opts.expr: @spawn(self) def body(ee=e): res = eval(ee, get_ns(), get_ns()) if type(res) == GeneratorType: yield from res return res if opts.test: @spawn(self) def body(): res = run_test(*opts.test, action=opts.action, where=opts.where) if type(res) == GeneratorType: yield from res return res yield from body() # run while True: JobEngine.run() Scheduler.run() if JobEngine.is_waiting() or Scheduler.is_waiting(): next else: break for t in Test.test_status: if Test.test_status[t] == 'passed': logger.info("*** test '{}' passed".format(t)) else: logger.error("*** test '{}' failed".format(t)) if top.exception: def print_exception(e, indent=0): if isinstance(e, Exception): for l in extract_tb(e.__traceback__): logger.debug((" "*indent)+str(l)) if not isinstance(e, Exception): logger.error((" "*indent)+str(e)) return for i in e.args: if not isinstance(i, list): i = [i] for j in i: print_exception(j, indent+2) print_exception(top.exception) logger.error('dv.py failed') #raise top.exception else: logger.info('dv.py passed') finally: event.notify('dvpy_done') cleanup()
def __is_argument_a_filename(filename: str): require(filename.strip(), "Filename cannot be empty") require( re.match(r'^[\w-]+(\.[a-zA-Z0-9]+)*$', filename) is not None, 'Filename has invalid characters')
from visitor import visitor from entity import entity, action, cmd from utils import require from os import environ from logger import logger import event from namespace import namespace from test import suite, test, run_test if "DVPY_PRELOAD" in environ: for f in environ["DVPY_PRELOAD"].split(":"): logger.info("loading file " + f + ".py") require(f)
def __check_console_input(str_numbers: list): require(len(str_numbers) > 1, "No arguments provided")
def require_dictionary_keys_exist(d: dict, *key_args): for k in key_args: require(k in d, f"'{k}' data is missing from dictionary")