def get_collection(foreign_id): collection = Collection.by_foreign_id(foreign_id, deleted=True) if collection is None: raise click.BadParameter("No such collection: %r" % foreign_id) return collection
def _validate_repo_name(ctx, param, value): """Callback used to check if repository argument was given.""" if "/" not in value: raise click.BadParameter('Expected format for REPOSITORY is ' '"<org_name>/<project_name>" (e.g "jcfr/sandbox")') return value
def validate_email(ctx, param, value): if not re.match(r'[^@]+@[^@]+\.[^@]+', value): raise click.BadParameter('Invalid email') # Also validate that the email doesn't exist in the database return validate_user_unique_field(ctx, param, value)
def validate_language(ctx, param, value): if value is not None and not is_valid_language(value): raise click.BadParameter('Unsupported language "%s".' % value) return value
def validate_ansible_dir(path): if not path: raise click.BadParameter('An ansible path must be provided') return path
def confirm_directory(ctx, parm, value): if os.path.isdir(value): return value else: raise click.BadParameter(f'[{value}] directory does not exist')
def validate_email(ctx, param, value): if not EMAIL_REGEX.match(value): raise click.BadParameter("Not a valid email address") return value
def validate_version(ctx, param, value): try: decode_version(value) return value except ValueError as e: raise click.BadParameter("{}".format(e))
def validate_header_size(ctx, param, value): min_hdr_size = image.IMAGE_HEADER_SIZE if value < min_hdr_size: raise click.BadParameter( "Minimum value for -H/--header-size is {}".format(min_hdr_size)) return value
def revert(uid): try: change.revert(uid) except errors.SolarError as er: raise click.BadParameter(str(er))
def validate_pos_int(ctx, param, value): if value < 0: raise click.BadParameter("Value needs to be positive") return value
def validate(ctx, param, value): if value < 0: raise click.BadParameter("should be positive") return value
def _pathy_data_path(ctx, param, value): """Parameter callback for click to transform str into pathy local or GCS path.""" try: return pathy.Pathy.fluid(value) except (TypeError, ValueError): raise click.BadParameter("Data path needs to be a local or GCS file path.")
def tunnel_validation(ctx, param, value): if value and not re.match("^[0-9]{1,5}:[0-9]{1,5}$", value): raise click.BadParameter( 'Tunnel needs to be in format localPort:remotePort') else: return value
def edit(ctx, input, bidx, nodata, unset_nodata, crs, unset_crs, transform, units, description, tags, allmd, like): """Edit a dataset's metadata: coordinate reference system, affine transformation matrix, nodata value, and tags. The coordinate reference system may be either a PROJ.4 or EPSG:nnnn string, --crs 'EPSG:4326' or a JSON text-encoded PROJ.4 object. --crs '{"proj": "utm", "zone": 18, ...}' Transforms are JSON-encoded Affine objects like: --transform '[300.038, 0.0, 101985.0, 0.0, -300.042, 2826915.0]' Prior to Rasterio 1.0 GDAL geotransforms were supported for --transform, but are no longer supported. Metadata items may also be read from an existing dataset using a combination of the --like option with at least one of --all, `--crs like`, `--nodata like`, and `--transform like`. rio edit-info example.tif --like template.tif --all To get just the transform from the template: rio edit-info example.tif --like template.tif --transform like """ import numpy as np def in_dtype_range(value, dtype): kind = np.dtype(dtype).kind if kind == 'f' and np.isnan(value): return True infos = {'c': np.finfo, 'f': np.finfo, 'i': np.iinfo, 'u': np.iinfo} rng = infos[kind](dtype) return rng.min <= value <= rng.max with ctx.obj['env'], rasterio.open(input, 'r+') as dst: if allmd: nodata = allmd['nodata'] crs = allmd['crs'] transform = allmd['transform'] tags = allmd['tags'] if unset_nodata and nodata is not options.IgnoreOption: raise click.BadParameter( "--unset-nodata and --nodata cannot be used together.") if unset_crs and crs: raise click.BadParameter( "--unset-crs and --crs cannot be used together.") if unset_nodata: # Setting nodata to None will raise NotImplementedError # if GDALDeleteRasterNoDataValue() isn't present in the # GDAL library. try: dst.nodata = None except NotImplementedError as exc: # pragma: no cover raise click.ClickException(str(exc)) elif nodata is not options.IgnoreOption: dtype = dst.dtypes[0] if nodata is not None and not in_dtype_range(nodata, dtype): raise click.BadParameter( "outside the range of the file's " "data type (%s)." % dtype, param=nodata, param_hint='nodata') dst.nodata = nodata if unset_crs: dst.crs = None # CRS() elif crs: dst.crs = crs if transform: dst.transform = transform if tags: dst.update_tags(**tags) if units: dst.set_units(bidx, units) if description: dst.set_description(bidx, description) # Post check - ensure that crs was unset properly if unset_crs: with ctx.obj['env'], rasterio.open(input, 'r') as src: if src.crs: warnings.warn( 'CRS was not unset. Availability of his functionality ' 'differs depending on GDAL version and driver')
def _validate_memory(ctx, param, value): if value and re.match(r"^\d+(Gi|G|Mi|M)$", value) is None: raise click.BadParameter("--memory should be a number then Gi/G/Mi/M e.g 1Gi") return value
def convert(self, value, param, ctx): if NAME_FORMAT.fullmatch(value) is None: raise click.BadParameter( "must contain only alphanumeric, underscore and dash characters" ) return value
def parse_bento_tag_callback(ctx, param, value): # pylint: disable=unused-argument if param.required and not _is_valid_bento_tag(value): raise click.BadParameter( "Bad formatting. Please present in BentoName:Version, for example " "iris_classifier:v1.2.0") return value
def confirm_subs(ctx, parm, value): if len(value) % 2 == 0: return value else: raise click.BadParameter(f'[{value}] provide an even number of subs')
def validate_kSize(ctx, param, value): if not value % 2: raise click.BadParameter( f"kmer size: {value} is even, please enter an odd value.") return value
def type_cast_value(self, ctx, value): try: return ast.literal_eval(value) except ValueError as e: logger.exception(e) raise click.BadParameter(value)
def validate_option(ctx, param, value): global default_subscriptionId global azure_cli_processing_complete if param.name == "credentials": if value and value[0] and value[1]: output.param("CREDENTIALS", value, "Setting Credentials...", azure_cli_processing_complete) if not azure_cli.login_account(*value): sys.exit() if param.name == "service_principal": if value and value[0] and value[1] and value[2]: output.param("SERVICE PRINCIPAL", value, "Setting Credentials...", azure_cli_processing_complete) if not azure_cli.login_sp(*value): sys.exit() if param.name == "subscription": output.param("SUBSCRIPTION", value, f("Setting Subscription to '{value}'..."), azure_cli_processing_complete) # first verify that we have an existing auth token in cache, otherwise login using interactive if not default_subscriptionId: default_subscriptionId = azure_cli.user_has_logged_in() if not default_subscriptionId and not azure_cli.login_interactive( ): sys.exit() if default_subscriptionId != value: subscription = azure_cli.set_subscription(value) if not subscription: raise click.BadParameter( f('Please verify that your subscription Id or Name is correct' )) if len(subscription) < 36: value = click.prompt(param.prompt, default=default_subscriptionId) return validate_option(ctx, param, value) if param.name == "resource_group_name": output.param("RESOURCE GROUP NAME", value, f("Setting Resource Group Name to '{value}'..."), azure_cli_processing_complete) envvars.RESOURCE_GROUP_NAME = value if not azure_cli.resource_group_exists(value): if not azure_cli.create_resource_group( value, envvars.RESOURCE_GROUP_LOCATION): raise click.BadParameter( f('Could not find Resource Group {value}')) else: # resource group exist, so don't ask for location envvars.RESOURCE_GROUP_LOCATION = azure_cli.get_resource_group_location( value) if param.name == "resource_group_location": output.param("RESOURCE GROUP LOCATION", value, f("Setting Resource Group Location to '{value}'..."), azure_cli_processing_complete) envvars.RESOURCE_GROUP_LOCATION = value if param.name == "iothub_sku": output.param("IOT HUB SKU", value, f("Setting IoT Hub SKU to '{value}'..."), azure_cli_processing_complete) envvars.IOTHUB_SKU = value if param.name == "iothub_name": output.param("IOT HUB", value, f("Setting IoT Hub to '{value}'..."), azure_cli_processing_complete) envvars.IOTHUB_NAME = value if not azure_cli.extension_exists("azure-cli-iot-ext"): azure_cli.add_extension("azure-cli-iot-ext") if not azure_cli.iothub_exists(value, envvars.RESOURCE_GROUP_NAME): # check if the active subscription already contains a free IoT Hub # if yes ask if the user wants to create an S1 # otherwise exit if envvars.IOTHUB_SKU == "F1": free_iot_name, free_iot_rg = azure_cli.get_free_iothub() if free_iot_name: output.info( "You already have a Free IoT Hub SKU in your subscription, " "so you must either use that existing IoT Hub or create a new S1 IoT Hub. " "Enter (F) to use the existing Free IoT Hub or enter (S) to create a new S1 IoT Hub:" ) user_response = sys.stdin.readline().strip().upper() if user_response == "S": envvars.IOTHUB_SKU = "S1" elif user_response == "F": envvars.IOTHUB_NAME = free_iot_name envvars.RESOURCE_GROUP_NAME = free_iot_rg return free_iot_name else: sys.exit() if not azure_cli.create_iothub(value, envvars.RESOURCE_GROUP_NAME, envvars.IOTHUB_SKU): raise click.BadParameter( f('Could not create IoT Hub {value} in {envvars.RESOURCE_GROUP_NAME}' )) if param.name == "edge_device_id": output.param("EDGE DEVICE", value, f("Setting Edge Device to '{value}'..."), azure_cli_processing_complete) envvars.EDGE_DEVICE_ID = value if not azure_cli.edge_device_exists(value, envvars.IOTHUB_NAME, envvars.RESOURCE_GROUP_NAME): if not azure_cli.create_edge_device(value, envvars.IOTHUB_NAME, envvars.RESOURCE_GROUP_NAME): raise click.BadParameter( f('Could not create IoT Edge Device {value} in {envvars.IOTHUB_NAME} in {envvars.RESOURCE_GROUP_NAME}' )) output.header("CONNECTION STRINGS") envvars.IOTHUB_CONNECTION_STRING = azure_cli.get_iothub_connection_string( envvars.IOTHUB_NAME, envvars.RESOURCE_GROUP_NAME) envvars.DEVICE_CONNECTION_STRING = azure_cli.get_device_connection_string( envvars.EDGE_DEVICE_ID, envvars.IOTHUB_NAME, envvars.RESOURCE_GROUP_NAME) if envvars.IOTHUB_CONNECTION_STRING and envvars.DEVICE_CONNECTION_STRING: output.info( f("IOTHUB_CONNECTION_STRING=\"{envvars.IOTHUB_CONNECTION_STRING}\"" )) output.info( f("DEVICE_CONNECTION_STRING=\"{envvars.DEVICE_CONNECTION_STRING}\"" )) azure_cli_processing_complete = True output.line() return value
def _validate_plugin_dir(ctx, param, value): if not os.path.exists(os.path.join(value, 'setup.py')): raise click.BadParameter('no setup.py found in {}'.format(value)) return value
def main(action, uninstalled, signed_off, quiet, username, password, package, db_path, noconfirm): """ Interface with Arch Linux package signoffs. """ if action is None: if package: action = "signoff" else: action = "list" options = Options(action=action, show_uninstalled=uninstalled, show_signed_off=signed_off, quiet=quiet, packages=set(package), db_path=db_path, username=username, noconfirm=noconfirm) # initialize alpm handle and signoff session alpm_handle = pyalpm.Handle("/", options.db_path) session = SignoffSession(options.username, password) # fetch and filter signoff packages signoffs = list(list_signoffs(session, alpm_handle)) packages = list(filter_signoffs(signoffs, options)) pkgbases = set(signoff_pkg["pkgbase"] for signoff_pkg, _ in packages) # if packages are supplied as parameters, validate them for pkgbase in options.packages: if pkgbase not in pkgbases: raise click.BadParameter( "package base {} not found in signoffs".format(pkgbase)) if action == "list": # output packages and exit for signoff_pkg, local_pkg in packages: click.echo(format_signoff(signoff_pkg, local_pkg, options)) if not options.quiet: click.echo() # add a line between packages elif action == "signoff": # sign-off packages for signoff_pkg, local_pkg in packages: warn_if_outdated(signoff_pkg, local_pkg) if options.noconfirm or confirm("Sign off {}?".format( click.style(" ".join(pkgbases), bold=True))): for signoff_pkg, local_pkg in packages: session.signoff_package(signoff_pkg) click.echo("Signed off {}.".format(signoff_pkg["pkgbase"])) elif action == "revoke": # revoke sign-offs for signoff_pkg, local_pkg in packages: warn_if_outdated(signoff_pkg, local_pkg) if options.noconfirm or confirm("Revoke sign-off for {}?".format( click.style(" ".join(pkgbases), bold=True))): for signoff_pkg, local_pkg in packages: session.revoke_package(signoff_pkg) click.echo("Revoked sign-off for {}.".format( signoff_pkg["pkgbase"])) elif action == "interactive": # interactively sign-off or revoke for signoff_pkg, local_pkg in packages: click.echo(format_signoff(signoff_pkg, local_pkg, options)) warn_if_outdated(signoff_pkg, local_pkg) if not options.quiet: click.echo() # check if we're signing off or revoking pkgbase = signoff_pkg["pkgbase"] signed_off = signoff_status(signoff_pkg, options.username) == "signed-off" if signed_off: prompt = "Revoke sign-off for {}?".format(pkgbase) else: prompt = "Sign off {}?".format(pkgbase) # confirm and signoff/revoke if confirm(prompt): if signed_off: session.revoke_package(signoff_pkg) click.echo("Revoked sign-off for {}.".format(pkgbase)) else: session.signoff_package(signoff_pkg) click.echo("Signed off {}.".format(pkgbase)) click.echo() session.logout()
def validate_prompt_hostname(hostname): if '' == hostname or is_valid_hostname(hostname): return hostname raise click.BadParameter('"{}" appears to be an invalid hostname. ' \ 'Please double-check this value i' \ 'and re-enter it.'.format(hostname))
def validate_prompt_hostname(hostname): if '' == hostname or is_valid_hostname(hostname): return hostname raise click.BadParameter( 'Invalid hostname. Please double-check this value and re-enter it.')
def recognizer(model, pad, no_segmentation, bidi_reordering, script_ignore, input, output) -> None: import json from kraken import rpred ctx = click.get_current_context() bounds = None if 'base_image' not in ctx.meta: ctx.meta['base_image'] = input if ctx.meta['first_process']: if ctx.meta['input_format_type'] != 'image': doc = get_input_parser(ctx.meta['input_format_type'])(input) ctx.meta['base_image'] = doc['image'] doc['text_direction'] = 'horizontal-lr' bounds = doc try: im = Image.open(ctx.meta['base_image']) except IOError as e: raise click.BadParameter(str(e)) if not bounds and ctx.meta['base_image'] != input: with open_file(input, 'r') as fp: try: fp = cast(IO[Any], fp) bounds = json.load(fp) except ValueError as e: raise click.UsageError(f'{input} invalid segmentation: {str(e)}') elif not bounds: if no_segmentation: bounds = {'script_detection': False, 'text_direction': 'horizontal-lr', 'boxes': [(0, 0) + im.size]} else: raise click.UsageError('No line segmentation given. Add one with the input or run `segment` first.') elif no_segmentation: logger.warning('no_segmentation mode enabled but segmentation defined. Ignoring --no-segmentation option.') scripts = set() # script detection if 'script_detection' in bounds and bounds['script_detection']: it = rpred.mm_rpred(model, im, bounds, pad, bidi_reordering=bidi_reordering, script_ignore=script_ignore) else: it = rpred.rpred(model['default'], im, bounds, pad, bidi_reordering=bidi_reordering) preds = [] with log.progressbar(it, label='Processing') as bar: for pred in bar: preds.append(pred) ctx = click.get_current_context() with open_file(output, 'w', encoding='utf-8') as fp: fp = cast(IO[Any], fp) message(f'Writing recognition results for {ctx.meta["orig_file"]}\t', nl=False) logger.info('Serializing as {} into {}'.format(ctx.meta['mode'], output)) if ctx.meta['mode'] != 'text': from kraken import serialization fp.write(serialization.serialize(preds, ctx.meta['base_image'], Image.open(ctx.meta['base_image']).size, ctx.meta['text_direction'], scripts, bounds['regions'] if 'regions' in bounds else None, ctx.meta['mode'])) else: fp.write('\n'.join(s.prediction for s in preds)) message('\u2713', fg='green')
def launch_workflow(code, structure, pseudo_family, kpoints_distance, ecutwfc, ecutrho, hubbard_u, hubbard_v, hubbard_file_pk, starting_magnetization, smearing, automatic_parallelization, clean_workdir, max_num_machines, max_wallclock_seconds, with_mpi, daemon): """Run a `PwBaseWorkChain`.""" from aiida.orm import Bool, Float, Dict from aiida.plugins import WorkflowFactory from aiida_quantumespresso.utils.resources import get_default_options, get_automatic_parallelization_options builder = WorkflowFactory('quantumespresso.pw.base').get_builder() cutoff_wfc, cutoff_rho = pseudo_family.get_recommended_cutoffs( structure=structure, unit='Ry') parameters = { 'SYSTEM': { 'ecutwfc': ecutwfc or cutoff_wfc, 'ecutrho': ecutrho or cutoff_rho, }, } try: hubbard_file = validate.validate_hubbard_parameters( structure, parameters, hubbard_u, hubbard_v, hubbard_file_pk) except ValueError as exception: raise click.BadParameter(str(exception)) try: validate.validate_starting_magnetization(structure, parameters, starting_magnetization) except ValueError as exception: raise click.BadParameter(str(exception)) try: validate.validate_smearing(parameters, smearing) except ValueError as exception: raise click.BadParameter(str(exception)) builder.pw.code = code builder.pw.structure = structure builder.pw.parameters = Dict(dict=parameters) builder.pw.pseudos = pseudo_family.get_pseudos(structure=structure) builder.kpoints_distance = Float(kpoints_distance) if hubbard_file: builder.hubbard_file = hubbard_file if automatic_parallelization: automatic_parallelization = get_automatic_parallelization_options( max_num_machines, max_wallclock_seconds) builder.automatic_parallelization = Dict( dict=automatic_parallelization) else: builder.pw.metadata.options = get_default_options( max_num_machines, max_wallclock_seconds, with_mpi) if clean_workdir: builder.clean_workdir = Bool(True) launch.launch_process(builder, daemon)
def validate_url(ctx, param, value): try: return value except ValueError: raise click.BadParameter('url need to be format: tcp://ipv4:port')
def validate_count(ctx, param, value): if value < 0 or value % 2 != 0: raise click.BadParameter('Should be a positive, even integer.') return value