def build(args): welcome = [ "========= F L O W C R A F T =========", "Build mode\n" "version: {}".format(__version__), "build: {}".format(__build__), "=====================================" ] parsed_output_nf = validate_build_arguments(args) logger.info(colored_print("\n".join(welcome), "green_bold")) # If a recipe is specified, build pipeline based on the # appropriate recipe if args.recipe: pipeline_string, list_processes = brew_recipe(args) else: pipeline_string = args.tasks list_processes = None # used for lists print proc_collector(process_map, args, list_processes) logger.info(colored_print("Resulting pipeline string:\n")) logger.info(colored_print(pipeline_string + "\n")) try: logger.info(colored_print("Checking pipeline for errors...")) pipeline_list = parse_pipeline(pipeline_string) except SanityError as e: logger.error(colored_print(e.value, "red_bold")) sys.exit(1) logger.debug("Pipeline successfully parsed: {}".format(pipeline_list)) # Exit if only the pipeline parser needs to be checked if args.check_only: sys.exit() nfg = NextflowGenerator(process_connections=pipeline_list, nextflow_file=parsed_output_nf, pipeline_name=args.pipeline_name, auto_dependency=args.no_dep) logger.info(colored_print("Building your awesome pipeline...")) # building the actual pipeline nf file nfg.build() # copy template to cwd, to allow for immediate execution if not args.pipeline_only: copy_project(parsed_output_nf) logger.info(colored_print("DONE!", "green_bold"))
def build(args): # Disable standard logging for stdout when the following modes are # executed: if args.export_params or args.export_directives: logger.setLevel(logging.ERROR) welcome = [ "========= F L O W C R A F T =========", "Build mode\n" "version: {}".format(__version__), "build: {}".format(__build__), "=====================================" ] parsed_output_nf = validate_build_arguments(args) logger.info(colored_print("\n".join(welcome), "green_bold")) # If a recipe is specified, build pipeline based on the # appropriate recipe if args.recipe: if args.recipe == "innuendo": pipeline_string = brew_recipe(args, available_recipes) else: pipeline_string = available_recipes[args.recipe] if args.tasks: logger.warning(colored_print( "-t parameter will be ignored for recipe: {}\n" .format(args.recipe), "yellow_bold") ) if args.check_recipe: logger.info(colored_print("Pipeline string for recipe: {}" .format(args.recipe), "purple_bold")) logger.info(pipeline_string) sys.exit(0) else: pipeline_string = args.tasks # used for lists print proc_collector(process_map, args, pipeline_string) try: logger.info(colored_print("Checking pipeline for errors...")) pipeline_list = parse_pipeline(pipeline_string) except SanityError as e: logger.error(colored_print(e.value, "red_bold")) sys.exit(1) logger.debug("Pipeline successfully parsed: {}".format(pipeline_list)) # Exit if only the pipeline parser needs to be checked if args.check_only: sys.exit() nfg = NextflowGenerator(process_connections=pipeline_list, nextflow_file=parsed_output_nf, pipeline_name=args.pipeline_name, auto_dependency=args.no_dep, merge_params=args.merge_params, export_params=args.export_params) logger.info(colored_print("Building your awesome pipeline...")) if args.export_params: nfg.export_params() sys.exit(0) elif args.export_directives: nfg.export_directives() sys.exit(0) else: # building the actual pipeline nf file nfg.build() # copy template to cwd, to allow for immediate execution if not args.pipeline_only: copy_project(parsed_output_nf) logger.info(colored_print("DONE!", "green_bold"))
def run(args): if args.debug: logger.setLevel(logging.DEBUG) # create formatter formatter = logging.Formatter( '%(asctime)s - %(name)s - %(levelname)s - %(message)s') else: logger.setLevel(logging.INFO) # create special formatter for info logs formatter = logging.Formatter('%(message)s') # create console handler and set level to debug ch = logging.StreamHandler(sys.stdout) ch.setLevel(logging.DEBUG) # add formatter to ch ch.setFormatter(formatter) logger.addHandler(ch) welcome = [ "========= A S S E M B L E R F L O W =========", "version: {}".format(__version__), "build: {}".format(__build__), "=============================================" ] logger.info(colored_print("\n".join(welcome), "green_bold")) # If a recipe is specified, build pipeline based on the # appropriate recipe if args.recipe: pipeline_string, list_processes = brew_recipe(args) else: pipeline_string = args.tasks list_processes = None # used for lists print proc_collector(process_map, args, list_processes) # Validate arguments. This must be done after the process collector part passed = check_arguments(args) if not passed: return logger.info(colored_print("Resulting pipeline string:\n")) logger.info(colored_print(pipeline_string + "\n")) try: logger.info(colored_print("Checking pipeline for errors...")) pipeline_list = parse_pipeline(pipeline_string) except SanityError as e: logger.error(colored_print(e.value, "red_bold")) sys.exit(1) logger.debug("Pipeline successfully parsed: {}".format(pipeline_list)) # Exit if only the pipeline parser needs to be checked if args.check_only: sys.exit() nfg = NextflowGenerator(process_connections=pipeline_list, nextflow_file=args.output_nf) logger.info(colored_print("Building your awesome pipeline...")) # building the actual pipeline nf file nfg.build() # copy template to cwd, to allow for immediate execution if args.include_templates: copy_project(args.output_nf) logger.info(colored_print("DONE!", "green_bold"))