def main(): try: print(t.bold("{} version {}\n").format(name, version)) if len(sys.argv) == 1 or (len(sys.argv) == 2 and sys.argv[1] in ["-h", "help", "--help"]): print( "ChemProfileSeq toolchain for analysing chemically induced stops or mutations from NGS sequencing data.\n\n" ) argh.dispatch_commands( sorted([ align, check, count, enrichment_table, plotStops, logo, ], key=lambda x: x.__name__)) except KeyboardInterrupt: print("\nInterrupted; Data might be corrupted.") except QurallkyException as e: print(t.red("An error occured:")) print("\t", e) except Exception as e: print(e) if debug: raise e
def dispatch_commands(_globals, _name_): argh.dispatch_commands( sorted([ v for k, v in _globals.items() if type(v) == types.FunctionType and v.__module__ == _name_ and not k.startswith('_') ], key=lambda x: x.__name__))
def dispatch_commands(_globals, _name_): argh.dispatch_commands(sorted([ v for k, v in _globals.items() if type(v) == types.FunctionType and v.__module__ == _name_ and not k.startswith('_') ], key=lambda x: x.__name__))
def main(): if '--version' in sys.argv: print("This is bioconda-utils version", VERSION) sys.exit(0) argh.dispatch_commands([ build, dag, dependent, do_lint, duplicates, update_pinning, bioconductor_skeleton, clean_cran_skeleton, autobump, bot ])
def main(): """Main entry point""" _create_components_objects() argh.dispatch_commands([ validate_command, install, configure, remove, start, stop, restart, create_internal_certs, create_external_certs, create_pkcs12, sanity_check, add_networks ])
def main(): sys.path.append(os.getcwd()) argh.dispatch_commands([ init, build, runserver, extract_messages, ])
def __main__(): argh.dispatch_commands( [ ssh, remote, local, rehash ])
def main(): with contextualized_tracebacks( ['submission', 'table', 'reading_file']) as dcontext: shared_dcontext.dcontext = dcontext argh.dispatch_commands([ add, add_demo_subset, add_demo_mini, ])
def main(): greeting() argh.dispatch_commands([ run, summarize, compare, combine_comparisons, ])
def main(): argh.dispatch_commands([ add_generic, animal_info, animals_id, animals_info, append_parameter, cage_info, further_cages, ])
def main(): argh.dispatch_commands([ build, dag, dependent, lint, duplicates, bioconductor_skeleton, pypi_check, clean_cran_skeleton, ])
def _main(): import argh argh.dispatch_commands([ replicate_sample_man, compute_sigs_run, check_manifest_replicate_equality, check_fasta_replicate_equality, check_within_run_replicate_equality, compute_run_sigs, check_runs_for_equality ])
def execute(): argh.dispatch_commands([ build, serve, shell, stop, pip_build, pip_publish, push, deploy, ])
def main(): setup_console_logger() """ command dispatcher """ argh.dispatch_commands([pre.create_files, pre.generate_snp_file, cmd.gene_heritability, cmd.gw_heritability, ])
def _main(): utils.init_logging(r'logs\main.log') out = six.StringIO() # Expose all functions that don't begin with an underscore "_" in the current module argh.dispatch_commands([ obj for name, obj in inspect.getmembers(sys.modules[__name__]) if inspect.isfunction(obj) and obj.__module__ == '__main__' and not name.startswith('_') ], output_file=out) print(out.getvalue())
def configure(*args): for arg in args: add_testcase(*arg) cmds = [_test_from_stdio, _test_from_files] if len(_testcases): @argh.named('included') def _test_from_given_testcase(): for f in _testcases: _test_from_files(f[0], f[1]) break cmds.append(_test_from_given_testcase) argh.dispatch_commands(cmds)
def _main(): logutils.init_logging(f'logs/vgmapf.log') out = six.StringIO() argh.dispatch_commands( [obj for name, obj in inspect.getmembers(sys.modules[__name__]) if inspect.isfunction(obj) and obj.__module__ == '__main__' and not name.startswith('_')], output_file=out ) try: print(out.getvalue()) except Exception: pprint.pprint(out.getvalue())
def main(self): try: argh.dispatch_commands([ self.companies_sync, self.departments_sync, self.events_sync, self.invoices_sync, self.projects_sync, self.users_sync, self.teamleader_sync, self.teamleader_status ]) except (PSQLError) as e: logger.error(e) raise e
def dispatch_funcs(funcs, interface=None, *args, **kwargs): if interface is None: print("Choices of interface:") print(", ".format(['cli', 'ws', 'dash'])) return if not isinstance(funcs, (tuple, list)): funcs = [funcs] if interface == 'cli': print('Hi!') from argh import dispatch_commands return dispatch_commands(funcs) elif interface == 'ws': from py2api.py2rest.app_maker import dflt_run_app_kwargs, dispatch_funcs_to_web_app app = dispatch_funcs_to_web_app(funcs, *args, **kwargs) run_app_kwargs = kwargs.get('run_app_kwargs', dflt_run_app_kwargs) app.run(**run_app_kwargs()) elif interface == 'dash': from py2dash.app_makers import dispatch_funcs app = dispatch_funcs(funcs) debug = kwargs.get('debug', False) app.run_server(debug=debug)
def main(): argh.dispatch_commands([ # network summary and graph file cmd.network_summary, cmd.network_graphml, cmd.get_connected_components, # geneset network topology analyses cmd.test_topology_total_degree, cmd.test_topology_internal_degree, cmd.test_topology_module, cmd.test_topology_sp, cmd.test_topology_rwr, cmd.test_diffusion_hotnet, # comparison analysis cmd.test_association_sp, cmd.test_association_rwr, # building functions cmd.build_distance_matrix, cmd.build_rwr_diffusion, # paint paint.paint_datasets_stats, paint.paint_comparison_matrix, paint.plot_adjacency, paint.paint_volcano_plot, paint.paint_summary_gnt, # utils utils.convert_gmt, utils.geneset_from_table, utils.convert_csv, utils.generate_group_gmt, # simulations bm.generate_gnt_sbm, bm.generate_gna_sbm, dm.generate_hdn_network, bm.generate_sbm_network, bm.generate_sbm2_network, dm.hdn_add_partial, dm.hdn_add_extended, dm.hdn_add_branching, ], )
def main(): # Set the umask to 0022; restore it later. current_umask = os.umask(CFY_UMASK) """Main entry point""" argh.dispatch_commands([ validate_command, install, configure, remove, start, stop, restart, create_internal_certs, create_external_certs, create_pkcs12, sanity_check, add_networks, update_encryption_key, ]) os.umask(current_umask)
def main(): import argh from gurgle.util import resolve_str_specification def list_possible_inputs(): print(*obj_store, sep='\n') print(''' Example usages: To get a list of elements: gurgle-terminal list-possible-inputs For volume tracking: gurgle-terminal launch chk_std print_ascii_levels_01_70 For online spectral projector outliers tracking: gurgle-terminal launch compute_outlier print_ascii_levels_2_70 ''') argh.dispatch_commands( [resolve_str_specification(obj_store)(launch), list_possible_inputs])
def main(): argh.dispatch_commands([kmer_lca, annotate_diamond, filter_taxa, matrix])
generate_text(model, generate_length, vocab_size, ix_to_char) if not weights == '': model.load_weights(weights) epochs = int(weights[weights.rfind('_') + 1:weights.find('.')]) else: epochs = 0 # Training if there is no trained weights specified if mode == 'train' or weights == '': while True: print('\n\nEpoch: {}\n'.format(epochs)) model.fit(X, y, batch_size=batch_size, verbose=1, epochs=1) epochs += 1 generate_text(model, generate_length, vocab_size, ix_to_char) if epochs % 10 == 0: model.save_weights('checkpoint_layer_{}_hidden_{}_epoch_{}.hdf5'.format(layer_num, hidden_dim, epochs)) # Else, loading the trained weights and performing generation only elif weights != '': # Loading the trained weights model.load_weights(weights) generate_text(model, generate_length, vocab_size, ix_to_char) print('\n\n') else: print('\n\nNothing to do!') if __name__ == '__main__': argh.dispatch_commands([run, mangle_data])
def main(): argh.dispatch_commands([ build, dag, dependent, lint, duplicates, update_pinning, bioconductor_skeleton, clean_cran_skeleton, autobump, bot ])
'GetFollowerIDsPaged', 'GetFriendIDsPaged', 'GetFollowerIDs', 'GetFriendIDs', 'GetFollowersPaged', 'GetFriendsPaged', 'GetFollowers', 'GetFriends', 'GetUser', 'GetDirectMessages', 'GetSentDirectMessages', 'GetFavorites', 'GetMentions', 'GetSubscriptions', 'GetMemberships', 'GetListsList', 'GetListTimeline', 'GetListMembersPaged', 'GetListMembers', 'GetListsPaged', 'GetLists', 'GetStreamSample', 'GetStreamFilter', 'GetUserStream' ] if __name__ == '__main__': cmds = [getattr(api, k) for k in cmds] import argh argh.dispatch_commands(cmds)
def main(): dispatch_commands([list_packages, search_packages, install_package, cdn_snippet])
# print(" copy create date from video file to jpg geotag file") cmd("-o", dst, TAG_FILE, _err=sys.stderr) #, _out=sys.stdout if tag_file_time_shift != 0: # print(f" time shift {tag_file_time_shift} for tmp jpg geotag file") cmd = sh.exiftool.bake( "-overwrite_original", *_exiftool_time_shift_option(tag_file_time_shift, EXIF_DATE_TAGS)) cmd(dst, _out=sys.stdout, _err=sys.stderr) print(f'\t{dst} created') print('====== geotag for all tmp jpg files ======') image(video2tag.values(), gpslog, overwrite_original=True) print('====== copy GPS from tmp jpg to video ======') for vfile in fpath: geotag_jpg_file = video2tag[vfile] copy_gps(geotag_jpg_file, vfile, time_shift=time_shift) os.unlink(geotag_jpg_file) if __name__ == "__main__": argh.dispatch_commands([ shift_time, copy_time, copy_gps, video, image, ])
def main(): argh.dispatch_commands([show, run, clean, image, version, rehash])
driver.find_element_by_id('button-BTC').click() maybe_close(close) def ghs(close=True): start() def _get_sell_orders(): while True: so = sell_orders().next() if so: return so while True: if bitcoins_top() < config.balance_threshold: logging.debug( "Bitcoin balance ({0}) less than {1}. Exiting".format( bitcoins_top(), config.balance_threshold)) break else: logging.info("Getting sell orders") so = _get_sell_orders() logging.info("Ordering hashes") order_hashes(so) maybe_close(close) if __name__ == '__main__': argh.dispatch_commands([ghs, withdraw])
def xcodebuild(scheme, workspace, config, commands): cmd = [ "xcrun", "xcodebuild", "-workspace", workspace, "-scheme", scheme, "-configuration", config ] cmd = cmd + commands cmd.append('BUILD_DIR=%s' % (build_base_dir)) try: output = check_string_output(cmd) return output except subprocess.CalledProcessError as e: raise BuildError(str(e)) def check_string_output(command): return subprocess.check_output(command).decode().strip() def sign_app(app_path): sign.sign_everything_in_app(app_path, key=signing_key) def package_app(app_path, image_path, image_name): package.package(app_path, image_path, image_name) if __name__ == "__main__": script_dir = os.path.dirname(os.path.abspath(__file__)) argh.dispatch_commands([clean, build])
def main(): argh.dispatch_commands([diagnose])
with open(os.path.join(scratch,"input.oligos"),"w") as out: out.write("primer {} {}\n".format( primer_forward, primer_reverse) ) shutil.copy(pcr_target_fasta,os.path.join(scratch,"pcr_target.fasta")) shutil.copy(reference_ali,os.path.join(scratch,"ref.align")) call_mothur_cmds(mothur_path,[ "get.seqs(fasta=pcr_target.fasta,accnos=pcr_target.accnos)", "pcr.seqs(fasta=current,oligos=input.oligos,keepprimer=F)", "align.seqs(fasta=current,reference=pcr_target.pick.fasta)" ], cwd=scratch) ali_report_file = find_mothur_output(os.path.join(scratch,"*.align.report")) tpl_range = extract_template_ali_range(template_name=pcr_target_idseq,ali_report_file=ali_report_file) start = max(1,tpl_range["start"]-pcr_target_padding) end = min(tpl_range["template_length"],tpl_range["end"]+pcr_target_padding) call_mothur_cmds(mothur_path,[ "pcr.seqs(fasta=pcr_target.pick.fasta,start={},end={})".format(start,end), "align.seqs(fasta=current,reference=ref.align)", "pcr.seqs(fasta=ref.align,ecoli=pcr_target.pick.pcr.align,keepdots=F)" ], cwd=scratch) shutil.move(os.path.join(scratch,"ref.pcr.align"),".") if __name__ == "__main__": argh.dispatch_commands([pcr_reference_alignment])
d[x['login']] = list(get_followers(user=x['login'], depth=depth-1)) return d def get_following(user=libcache._username, depth=0): print('get_following {} depth={}'.format(user, depth)) d = dict() d[user] = list(libcache.get_following(user)) if depth > 0: for x in d[user]: d[x['login']] = list(get_following(user=x['login'], depth=depth-1)) return d def run_command_get_output(cmd, shell=True, splitlines=True, do_raise=True): print('running: {}'.format(cmd)) p = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE, shell=shell) out, err = p.communicate() status = p.returncode out = out.decode() err = err.decode() if splitlines: out = out.split('\n') err = err.split('\n') res = dict(out=out, err=err, status=status) if res['status'] != 0 and do_raise: raise Exception('some problem: {}'.format(res)) return res if __name__ == '__main__': import argh argh.dispatch_commands([clone_user])
def main(): # Adapting function names to 1-level hierarchy generic.__name__ = 'generic-prep' legacy.__name__ = 'legacy-prep' argh.dispatch_commands([diagnose, bru2bids, l1, generic, legacy])
def main(): argh.dispatch_commands([ filter_comparisons, combine_comparisons ])
def main(): argh.dispatch_commands([ build, dag, dependent, lint, duplicates, bioconductor_skeleton, clean_cran_skeleton, autobump ])
def dispatch(self): argh.dispatch_commands([self.get_args, self.maybe_get_all, self.maybe_get_one, self.get_file, self.check_local_dirty_clean])
resolve_entity(data) else: print response.text def callback(channel, method, header, body): data = xmltodict.parse(body) print "{0}:".format(data['vmext:Notification']['@type']) print body resolver = data['vmext:Notification']['vmext:Link']['@href'] for entity in data['vmext:Notification']['vmext:EntityLink']: name = entity['@type'] id = entity['@id'] resolve(resolver+id, name) def consume(host, username, password, virtual_host, queue): connection = AmqpConnection(host=host, username=username, password=password, virtual_host=virtual_host) connection.receive(callback, queue=queue) def drain_call(*args, **kwargs): print "draining" def drain(host, username, password, virtual_host, queue): connection = AmqpConnection(host=host, username=username, password=password, virtual_host=virtual_host) connection.receive(drain_call, queue=queue) argh.dispatch_commands([consume, drain])
def main(): argh.dispatch_commands([diagnose, bru2bids])
def main(): dispatch_commands([start_engine])
def _main(): import argh argh.dispatch_commands( [annotate_plasmids, annotate_resistance_genes, annotate_cge])
'./octopress_site_3', './octopress_site_4', ] for site in sites: print genpost(gen_posts_dir="{}/source/_posts".format(site), num_posts=num_posts) sh.cd(site) print sh.rake('generate') print sh.rake('push') sh.cd("../") def clock(time_minutes=15): from apscheduler.scheduler import Scheduler from apscheduler.events import EVENT_JOB_EXECUTED print "Starting scheduler" sched = Scheduler() sched.start() def a_g(): print autogen() sched.add_interval_job(a_g, minutes=time_minutes) while True: pass if __name__ == "__main__": import argh argh.dispatch_commands([shell, genpost, autogen, clock])
n_chains, "n_events": n_events, "n_event_types": n_event_types, "batch_size": batch_size, "n_samples": n_samples, "n_burnin": n_burnin, "n_moments": n_moments, "tiny_lognormal_sigma": tiny_lognormal_sigma, "beta_sum": beta_sum, "lognormal_sigma": lognormal_sigma, "rate_gamma_prior": rate_gamma_prior, "hawkes": hawkes } results += [result] if __name__ == "__main__": argh.dispatch_commands([stein, mcmc])
#!/usr/bin/env python import argh @argh.aliases('t', 'te') def test(module, settings=None): '''run tests''' pass def run(host='localhost', port=8000, no_reload=False, settings=None): '''run dev server''' pass if __name__ == '__main__': argh.dispatch_commands([run, test])
And that weblinks go into df.url. That is what we need to recurse into. """ url = urls[name] filename = get_hash_pickle_name(name, url) if os.path.exists(filename) and not force: print("found {}".format(filename)) else: rightmove_object = rightmove_data(url) pickle.dump(rightmove_object, open(filename, 'wb')) if read: print("reading {}".format(filename)) return pickle.load(open(filename, 'rb')) def get_hash_pickle_name(name, url): # round to hour or day and use that as throttle name = name.lower().replace(' ', '_') date = datetime.datetime.now().replace(hour=0, minute=0, second=0, microsecond=0).isoformat() filename = 'data_appendonly/rightmove/{}_{}_{}.pickle'.format(name, hashlib.sha1(url.encode()).hexdigest(), date) dirname = os.path.dirname(filename) if not os.path.exists(dirname): os.makedirs(dirname) return filename def get_all(): for k in urls: get_data(k, read=False) if __name__ == '__main__': argh.dispatch_commands([get_data, get_all])
elif not force: raise ValueError("File does not exist: {!r}".format(game_file)) save_assoc(assoc) def load_assoc(): if not os.path.exists(ASSOC_FILE): return {} with open(ASSOC_FILE) as f: return json.loads(f.read()) def save_assoc(assoc): with open(ASSOC_FILE, 'w') as f: f.write(json.dumps(assoc, indent=4) + '\n') def install(*files): assoc = load_assoc() game_path = get_game_path() if not files: files = assoc.keys() for path in files: dest = os.path.join(game_path, assoc[path]) print "Installed {} -> {}".format(path, dest) shutil.copy2(path, dest) if __name__ == '__main__': argh.dispatch_commands([configure, associate, install])
logging.debug(unicode(e)) @argh.arg('-d', '--dir', help="target directory where dummy data will be" "written into. if skipped, cfg['data_dir'] will be chosen.") def gen_dummydata(**kwargs): td = kwargs['dir'] if td is None: td = cfg['data_dir'] return _gen_dummydata(td) @argh.arg('-e', '--type', help="event type") @argh.arg('-i', '--info', help="event info") def register_event(**kwargs): etype = kwargs['type'] einfo = kwargs['info'] print etype, einfo def run_info(): for ri in iter_run_info(): print ri[0] print ' ' + str(ri[1]) if __name__ == "__main__": argh.dispatch_commands([cache_all, register_cron, run_notebook, gen_dummydata, register_event, check_cache, update_notebooks, run_info])
maybe_close(close) def ghs(close=True): start() def _get_sell_orders(): while True: so = sell_orders().next() if so: return so while True: if bitcoins_top() < config.balance_threshold: logging.debug( "Bitcoin balance ({0}) less than {1}. Exiting".format( bitcoins_top(), config.balance_threshold)) break else: logging.info("Getting sell orders") so = _get_sell_orders() logging.info("Ordering hashes") order_hashes(so) maybe_close(close) if __name__ == '__main__': argh.dispatch_commands([ghs, withdraw])
def main(): greeting() argh.dispatch_commands([run, summarize, compare, combine_comparisons])
A full listing of the directory and all subsequent changes. Starts the watch first, then globs. $ ./glob_and_watch glob-and-watch ./tmp ['2018-07-25T22:57:36', '/path/to/somewhere/tmp/asdf', 'glob'] ['2018-07-25T22:57:36', '/path/to/somewhere/tmp/jadfs', 'glob'] ['2018-07-25T22:57:36', '/path/to/somewhere/tmp/jj', 'glob'] ['2018-07-25T22:57:36', '/path/to/somewhere/tmp/j', 'glob'] ['2018-07-25T21:57:48', '/path/to/somewhere/tmp/a', ['Created', 'IsFile']] ['2018-07-25T21:57:48', '/path/to/somewhere/tmp/b', ['Created', 'IsFile']] ['2018-07-25T21:57:52', '/path/to/somewhere/tmp/a', ['Created', 'PlatformSpecific', 'Updated', 'IsFile']] ['2018-07-25T21:57:55', '/path/to/somewhere/tmp/a', ['Created', 'PlatformSpecific', 'Updated', 'IsFile']] ['2018-07-25T21:57:56', '/path/to/somewhere/tmp/a', ['Created', 'Removed', 'PlatformSpecific', 'Updated', 'IsFile']] ['2018-07-25T21:57:56', '/path/to/somewhere/tmp/b', ['Created', 'Removed', 'IsFile']] """ p_watch = watch(dirname) p_glob = glob(dirname) for x in itertools.chain(p_glob, p_watch): assert len(x) == 3, 'bad length {}'.format(x) yield x def watch(dirname): """ watches a directory for events """ for x in Watch(dirname): yield x if __name__ == "__main__": argh.dispatch_commands([glob, watch, glob_and_watch])
def display(interface=None): for iface, entries in _load_mac_table().items(): if interface and iface != interface: continue print 'Interface %s (%i items)' % (iface, len(entries)) for ip, mac in sorted(entries): print '\t %s \t %s' % (ip, mac) print def flush(ip=None, interface=None): if not ip: table = _load_mac_table() entries = table.get(interface, []) if interface else itertools.chain(*table.values()) else: entries = [(ip, None)] for ip, _ in entries: sh.arp('-d', ip) logging.info('Flushed %s' % ip) if __name__ == '__main__': utils.assure_root() utils.config_graceful_exit() logging.getLogger("scapy.runtime").setLevel(logging.ERROR) argh.dispatch_commands([poison, flush, monitor, display])
# -*- coding: utf-8 -*- import sys import os sys.path.append(os.path.join(os.path.dirname(__file__), '..')) import argh from syne_audio.run import run argh.dispatch_commands([run])
unpack = Unpack(opts) unpack.unpack() @argh.arg('src_dir', type=str, help='path to the source data directory') @argh.arg('min_ind', type=int, help='starting index') @argh.arg('max_ind', type=int, help='ending index') def unpack_scan(src_dir, min_ind, max_ind): print('start unpacking...') opts = Options(UNPACK_OPTS_DICT) opts.src_dir = src_dir opts.EXP_START = min_ind opts.EXP_END = max_ind opts.NUM_EXP = -1 unpack = UnpackScan(opts) unpack.unpack() @argh.arg('folder', type=str, help='folder location to place the option file') @argh.arg('-f', '--filename', type=str, help='output file name') def genopts(folder, filename='opts.json'): opts = RECON_OPTS_DICT with open(os.path.join(folder, filename), 'w') as f: f.write(json.dumps(opts, indent=2)) f.write('\n') if __name__ == '__main__': argh.dispatch_commands((unpack, unpack_scan, reconstruct, reconstruct_gui, screenspeed, genopts))
] if len(expected) > 0: plt.scatter(expected, actual) low = min(min(expected), min(actual)) high = max(max(expected), max(actual)) plt.plot( [low, high], [low, high], label="Perfect", ) plt.title("Expected vs actual task completion times") plt.xlabel("Expected time (s)") plt.ylabel("Actual Time (s)") plt.show() def write(tasks: TaskDict, active_task: Optional[Task], taskdir: Path) -> None: """ Write the todo task list and currently active task to the given task directory.""" pickle.dump(tasks, open(taskdir / TASKS_FILENAME, "wb")) active_task_file = open(taskdir / ACTIVE_TASK_FILENAME, "w") active_task_file.writelines( [active_task.name if active_task is not None else ""]) active_task_file.close() if __name__ == "__main__": logging.basicConfig(level="INFO", format="") argh.dispatch_commands( [add, close, start, stop, status, calibrate, examine])
df.loc[:,'t'] = (df.date - df.date.min()).dt.days df.loc[:,xcols] = df[xcols].diff() # use the increments? dunno df = df.dropna(subset=ycols + xcols) yy = df[ycols].values xx = df[xcols].values # NOTE: YOU SHOULD NOTE USE t UNLESS YOU ARE USING THE dt IN THE SIGNATURE OR UNROLLED VERION OR SOMETHING LIKE THAT X = list() y = list() for i in range(n_steps_back, df.shape[0]): X.append(xx[i-n_steps_back:i].flatten()) y.append(yy[i]) X = np.array(X) y = np.array(y) return X, y # so we want to to try various models on the data, start with one name only, there are many train/test/val strategies. take simplest first. # main point is to make this easily swappable with different models. Some models with require data transformations to make them work. # start with the easiest ones first that don't require ANY data scaling (trees) # really would be best to look at modeldb for this def get_sample_data(): """ reload(do); globals().update(do.get_sample_data()) """ df = pd.read_parquet('enriched/nrows=all/product=etfs/name=qqq') X_train, y_train, X_val, y_val = get_xy_data_plain(df) return locals() if __name__ == '__main__': argh.dispatch_commands([run_raw, run_enriched])
subprocess.check_call(["agvtool", "new-marketing-version", marketing_version]) subprocess.check_call(["agvtool", "new-version", "-all", build_version]) def xcodebuild(scheme, workspace, configuration, commands, build_dir): cmd = ["xcrun", "xcodebuild", "-scheme", scheme, "-workspace", workspace, "-configuration", configuration] cmd = cmd + commands cmd.append('CONFIGURATION_BUILD_DIR=%s' % (build_dir)) try: output = check_string_output(cmd) return output except subprocess.CalledProcessError as e: raise BuildError(str(e)) def check_string_output(command): return subprocess.check_output(command).decode().strip() def sign_app(app_path): sign.sign_everything_in_app(app_path, key=signing_key) def package_app(app_path, image_path, image_name): package.package(app_path, image_path, image_name) if __name__ == "__main__": script_dir = os.path.dirname(os.path.abspath(__file__)) os.chdir(script_dir) argh.dispatch_commands([clean, build])
yield os.path.join(root, f) nf = next_file() def create_zip(zip_name): # Ignore the unlikely empty zip file case with zipfile.ZipFile(zip_name, 'w', zipfile.ZIP_DEFLATED) as z: print(zip_name) size = 0 for file_count in range(1000): try: n = nf.next() z.write(n) # zip members have unix-style slashes n = n.replace('\\', '/') size += z.getinfo(n).compress_size # Keep well under 100MB to avoid hitting github limit if size > 50000000: break except StopIteration: return False return True while True: zip_name = '{}{:0>4}{}'.format(d, zip_index, SUFFIX) zip_index += 1 if not create_zip(os.path.join(SRC_DIR, zip_name)): break if __name__ == '__main__': argh.dispatch_commands([zip_all, unzip_all])