def main(ctx: click.core.Context, **kwargs: Any) -> None: """primer - prime projects for blackening... 🏴""" LOG.debug(f"Starting {sys.argv[0]}") # TODO: Change to asyncio.run when Black >= 3.7 only loop = asyncio.get_event_loop() try: ctx.exit(loop.run_until_complete(async_main(**kwargs))) finally: loop.close()
def print_version( ctx: click.core.Context, param: Union[click.core.Option, click.core.Parameter], value: Any, ) -> Any: """Print version click callback.""" if not value or ctx.resilient_parsing: return click.echo(__package__ + " version: " + __version__) ctx.exit()
def main(ctx: click.core.Context, **kwargs) -> None: ret_val = 0 LOG.info(f"Starting {sys.argv[0]}") loop = asyncio.get_event_loop() try: ret_val = loop.run_until_complete(async_main(**kwargs)) finally: loop.close() ctx.exit(ret_val)
def access_token_check( ctx: click.core.Context, _: click.core.Option, access_token: Optional[str], required: bool, ) -> Union[str, NoReturn]: """Check if access token is present.""" if not access_token and required: display_message(ERROR_MESSAGES["missing_access_token"], msg_type="error") ctx.exit(1) else: return access_token
def cli(ctx: click.core.Context) -> None: """ A Python package for generating static websites using AWS CloudFormation. \f :type ctx: click.core.Context :param ctx: Click context object :rtype: None :return: None """ if ctx.invoked_subcommand is None: click.echo(ctx.get_help()) ctx.exit(1)
def print_version(ctx: click.core.Context, param: Union[click.core.Option, click.core.Parameter], value: Union[bool, int, str]): """Print version callback method Args: ctx: click context param: click param value: click value """ if param == 'test': return if not value or ctx.resilient_parsing: return click.echo('Putio Aria2c Downloader version {}'.format(__version__)) ctx.exit()
def convert(ctx: click.core.Context, tar_file: str, script_path: str, file: str, merge: bool, debug: bool): """ 指定されたtarファイルを展開し、変換ルールに従って変換します。 """ if debug: log.set_level(log.Level.DEBUG) if not script_path is None: s = script_path else: conf = __get_params(ctx) if (conf.convert_rule is not None) and (not os.path.exists(conf.convert_rule)): click.echo("convert_rule \"%s\" が存在しません。" % conf.convert_rule) ctx.exit(2) else: s = conf.convert_rule p = conv.ConvertParams() p.script_path = s p.log_path = tar_file p.file = file ret = None try: ret, out_dir = conv.Converter(p).exec() if merge and not (out_dir is None): ret = mrg.Merge().exec(os.path.join(out_dir, conf.merge_dir)) except IOError as e: click.echo(e.args) except Exception as e: click.echo(e.args) # finished if ret: click.echo("正常に終了しました。") else: click.echo("失敗しました。")
def main(ctx: click.core.Context, **kwargs: Any) -> None: LOG.debug(f"Starting {sys.argv[0]}") ctx.exit(asyncio.run(async_main(**kwargs)))
def start_workflow( # noqa: C901 ctx: click.core.Context, file_name: str, file_description: str, folder_name: str, report_format: str, access_level: str, reuse_newest_upload: bool, reuse_newest_job: bool, dry_run: bool, ): """The foss_cli start_workflow command.""" global JOB_SPEC logger.debug(f"Try to schedule job for {file_name}") foss = ctx.obj["FOSS"] # check/set the requested report format the_report_format = check_get_report_format(report_format) # check/set the requested access level the_access_level = check_get_access_level(access_level) # check/get the folder to use identified by the provided folder_name folder_to_use = check_get_folder(ctx, folder_name) # check/get the foss.upload to use if reuse_newest_upload: the_upload = get_newest_upload_of_file(ctx, file_name, folder_name) else: if dry_run: logger.warning( "Skip upload as dry_run is requested without --reuse_newest_upload" ) the_upload = None else: logger.debug(f"Initiate new upload for {file_name}") the_upload = foss.upload_file( folder_to_use, file=file_name, description=file_description, access_level=the_access_level, ) logger.debug(f"Finished upload for {file_name}") if the_upload is None: logger.fatal(f"Unable to find upload for {file_name}.") ctx.exit(1) # check/get job correlated with the upload job = None if reuse_newest_job: logger.debug(f"Try to find a scheduled job on {the_upload.uploadname}") the_jobs, pages = foss.list_jobs(the_upload) newest_date = "0000-09-05 13:25:38.079869+00" for the_job in the_jobs: if the_job.queueDate > newest_date: newest_date = the_job.queueDate job = the_job if job is None: logger.info(f"Upload {the_upload.uploadname} never started a job ") else: logger.debug( f"Can reuse old job on Upload {the_upload.uploadname}: Newest Job id {job.id} is from {job.queueDate} " ) if job is None: # always true if --no_reuse_newest_job job = foss.schedule_jobs( folder_to_use if folder_to_use else foss.rootFolder, the_upload, JOB_SPEC, wait=True, # we wait (default 30 sec) for the job to complete ) logger.debug(f"Scheduled new job {job}") # check/get state of job correlated with the upload logger.debug(f"job {job.id} is in state {job.status} ") if job.status == "Processing": logger.fatal( f"job {job.id} is still in state {job.status}: Please try again later with --reuse_newest_upload --reuse_newest_job " ) ctx.exit(1) assert job.status == "Completed" # trigger generation of report report_id = foss.generate_report(the_upload, report_format=the_report_format) logger.debug(f"Generated report {report_id}") # download report content, name = foss.download_report(report_id) logger.debug( f"Report downloaded: {name}: content type: {type(content)} len: {len(content)}." ) destination_file = os.path.join(ctx.obj["RESULT_DIR"], name) with open(destination_file, "wb") as fp: written = fp.write(content) assert written == len(content) logger.info( f"Report written to file: report_name {name} written to {destination_file}" )