def main(unparsed_args_list): """Uses parsed_args to run the entirety of the pham align pipeline. :param unparsed_args_list: Input a list of command line args. :type unparsed_args_list: list[str] """ args = parse_pham_align(unparsed_args_list) config = configfile.build_complete_config(args.config_file) alchemist = pipelines_basic.build_alchemist(args.database, config=config) values = pipelines_basic.parse_value_input(args.input) execute_pham_align(alchemist, folder_path=args.folder_path, folder_name=args.folder_name, values=values, filters=args.filters, groups=args.groups, file_type=args.file_type, mat_out=args.distmat_out, tree_out=args.guidetree_out, verbose=args.verbose, dump=args.dump, force=args.force, threads=args.number_threads)
def main(unparsed_args_list): """ Run the get_db pipeline. The database data can be retrieved from three places: The server, which needs to be downloaded to a new folder. A local file, in which no download and no new folder are needed. The empty schema stored within pdm_utils, in which no download, new folder, or local file are needed. :param unparsed_args_list: list of arguments to run the pipeline unparsed :type unparsed_args_list: list """ args = parse_args(unparsed_args_list) # Set values that are shared between all three options. config = configfile.build_complete_config(args.config_file) alchemist = pipelines_basic.build_alchemist(None, config=config, ask_database=False) if args.option == "file": execute_get_file_db(alchemist, args.database, args.filename, config_file=args.config_file, schema_version=args.schema_version, verbose=args.verbose) elif args.option == "new": execute_get_new_db(alchemist, args.database, args.schema_version, config_file=args.config_file, verbose=args.verbose) else: url = args.url if url is None: server_creds = config["download_server"] url = server_creds.get("url") if url is None: url = DEFAULT_SETTINGS["url"] execute_get_server_db(alchemist, args.database, url, folder_path=args.output_folder, db_name=args.db_name, config_file=args.config_file, verbose=args.verbose, subdirectory=args.remote_directory, download_only=args.download_only, get_version=args.get_version, force_pull=args.force_pull, schema_version=args.schema_version)
def main(unparsed_args_list): args = parse_make_db(unparsed_args_list) alchemist = pipelines_basic.build_alchemist(args.database) values = pipelines_basic.parse_value_input(args.input) execute_make_db(alchemist, args.db_type, folder_path=args.folder_path, folder_name=args.folder_name, values=values, verbose=args.verbose, filters=args.filters, groups=args.groups, db_name=args.database_name, threads=args.threads, use_mpi=args.use_mpi)
def main(unparsed_args_list): """Run main get_gb_records pipeline.""" # Parse command line arguments args = parse_args(unparsed_args_list) # Create config object with data obtained from file and/or defaults. config = configfile.build_complete_config(args.config_file) alchemist = pipelines_basic.build_alchemist(args.database, config=config) mysqldb.check_schema_compatibility(alchemist.engine, "the get_gb_records pipeline") values = pipelines_basic.parse_value_input(args.input) execute_get_gb_records(alchemist, args.file_type, folder_path=args.folder_path, folder_name=args.folder_name, config=config, values=values, verbose=args.verbose, filters=args.filters, groups=args.groups)
def main(unparsed_args_list): """Uses parsed args to run the entirety of the file export pipeline. :param unparsed_args_list: Input a list of command line args. :type unparsed_args_list: list[str] """ # Returns after printing appropriate error message from parsing/connecting. args = parse_export(unparsed_args_list) config = configfile.build_complete_config(args.config_file) alchemist = pipelines_basic.build_alchemist(args.database, config=config) # Exporting as a SQL file is not constricted by schema version. if args.pipeline != "sql": mysqldb.check_schema_compatibility(alchemist.engine, "export") values = None if args.pipeline in FILTERABLE_PIPELINES: values = pipelines_basic.parse_value_input(args.input) if not values: values = None if args.pipeline not in PIPELINES: print("ABORTED EXPORT: Unknown pipeline option discrepency.\n" "Pipeline parsed from command line args is not supported") sys.exit(1) if args.pipeline != "I": execute_export(alchemist, args.pipeline, folder_path=args.folder_path, folder_name=args.folder_name, table=args.table, values=values, filters=args.filters, groups=args.groups, sort=args.sort, include_columns=args.include_columns, exclude_columns=args.exclude_columns, sequence_columns=args.sequence_columns, raw_bytes=args.raw_bytes, concatenate=args.concatenate, db_name=args.db_name, verbose=args.verbose, dump=args.dump, force=args.force, threads=args.number_processes, phams_out=args.phams_out) else: pass
def main(unparsed_args): """Uses parsed args to run the entirety of the find primers pipeline. :param unparsed_args: Input a list of command line args. :type unparsed_args: list[str] """ args = parse_find_primers(unparsed_args) config = configfile.build_complete_config(args.config_file) alchemist = pipelines_basic.build_alchemist(args.database, config=config) values = pipelines_basic.parse_value_input(args.input) execute_find_primers(alchemist, folder_path=args.folder_path, folder_name=args.folder_name, values=values, filters=args.filters, groups=args.groups, verbose=args.verbose, threads=args.threads, prc=args.prc, minD=args.minD, maxD=args.maxD, hpn_min=args.hpn_min, ho_min=args.ho_min, het_min=args.het_min, GC_max=args.GC, len_oligomer=args.oligomer_length, tm_min=args.tm_min, tm_max=args.tm_max, tm_gap=args.tm_gap, ta_min=args.ta_min, ta_max=args.ta_max, mode=args.mode, soft_cap=args.soft_cap, phams_in=args.phams_in, fwd_in=args.fwd_in, rvs_in=args.rvs_in)
def main(unparsed_args_list): """Uses parsed args to run the entirety of the pham_finder pipeline. :param unparsed_args_list: Input a list of command line args. :type unparsed_args_list: list[str] """ args = parse_pham_finder(unparsed_args_list) config = configfile.build_complete_config(args.config_file) alchemist = pipelines_basic.build_alchemist(None, ask_database=False, config=config) values = None if args.input: values = pipelines_basic.parse_value_input(args.input) execute_pham_finder(alchemist, args.folder_path, args.folder_name, args.adatabase, args.bdatabase, values=values, filters=args.filters, groups=args.groups, sort=args.sort, show_per=args.show_percentages, use_locus=args.use_locus, verbose=args.verbose)
def main(unparsed_args_list): args = parse_build_pan(unparsed_args_list) config = configfile.build_complete_config(args.config_file) alchemist = pipelines_basic.build_alchemist(args.database, config=config) values = pipelines_basic.parse_value_input(args.input) execute_build_pan(alchemist, hhdb_path=args.hhsuite_database, folder_path=args.folder_path, folder_name=args.folder_name, values=values, verbose=args.verbose, filters=args.filters, groups=args.groups, threads=args.number_threads, M=args.min_percent_gaps, aD=args.avg_distance, mD=args.min_distance, B=args.DB_stiffness, PANgraph_out=args.PANgraph_out)
def main(unparsed_args_list): args = parse_cluster_db(unparsed_args_list) config = configfile.build_complete_config(args.config_file) alchemist = pipelines_basic.build_alchemist(args.database, config=config) values = pipelines_basic.parse_value_input(args.input) execute_cluster_db(alchemist, folder_path=args.folder_path, folder_name=args.folder_name, values=values, verbose=args.verbose, filters=args.filters, groups=args.groups, threads=args.number_threads, kmer=args.kmer_size, sketch=args.sketch_size, gcs=args.gene_content_similarity_min, ani=args.average_nucleotide_identity_min, gcsmax=args.gene_content_similarity_max, animax=args.average_nucleotide_identity_max, gcsS=args.gcsS, gcsM=args.gcsM, aniS=args.aniS, aniM=args.aniM, evaluate=args.dump_evaluation, mat_out=args.distmat_out, subcluster=args.subcluster, cluster_prefix=args.cluster_prefix)
def main(unparsed_args_list): """Uses parsed args to run the entirety of the pham_review pipeline. :param unparsed_args_list: Input a list of command line args. :type unparsed_args_list: list[str] """ args = parse_pham_review(unparsed_args_list) config = configfile.build_complete_config(args.config_file) alchemist = pipelines_basic.build_alchemist(args.database, config=config) values = pipelines_basic.parse_value_input(args.input) if not args.all_reports: gr_reports = args.gene_reports s_report = args.summary_report psr_reports = args.pham_summary_reports else: gr_reports = True s_report = True psr_reports = True execute_pham_review(alchemist, folder_path=args.folder_path, folder_name=args.folder_name, no_review=args.no_review, values=values, force=args.force, filters=args.filters, groups=args.groups, sort=args.sort, s_report=s_report, gr_reports=gr_reports, production=args.production, psr_reports=psr_reports, verbose=args.verbose)
def main(unparsed_args_list): """Uses parsed args to run the entirety of the revise pipeline. :param unparsed_args_list: Input a list of command line args. :type unparsed_args_list: list[str] """ args = parse_revise(unparsed_args_list) config = configfile.build_complete_config(args.config_file) alchemist = pipelines_basic.build_alchemist(args.database, config=config) if args.pipeline == "local": execute_local_revise(alchemist, args.revisions_file, folder_path=args.folder_path, folder_name=args.folder_name, config=config, input_type=args.input_type, output_type=args.output_type, filters=args.filters, groups=args.groups, verbose=args.verbose, force=args.force, production=args.production) elif args.pipeline == "remote": values = pipelines_basic.parse_value_input(args.input) execute_remote_revise(alchemist, folder_path=args.folder_path, folder_name=args.folder_name, config=config, values=values, filters=args.filters, groups=args.groups, verbose=args.verbose, output_type=args.output_type, force=args.force)