def __call__(self, parser, namespace, values, option_string=None): if values: wrong_choices = [] supported_target_frameworks = TargetFrameworkAction\ .get_supported_target_frameworks() for value in values: if value not in supported_target_frameworks: wrong_choices.append(value) if wrong_choices: message = ', '.join(wrong_choices) message = 'Invalid choice(s): {}'.format(message) raise ArgumentError(self, message) setattr(namespace, self.dest, list(set(values)))
def __call__(self, parser, namespace, values, option_string=None): parser_name = values[0] # this bit is taken directly from argparse: try: parser = self._name_parser_map[parser_name] except KeyError: tup = parser_name, ', '.join(self._name_parser_map) msg = 'unknown parser %r (choices: %s)' % tup raise ArgumentError(self, msg) self._setup_subparser(parser_name, parser) caller = super(LazySubParsersAction, self).__call__ return caller(parser, namespace, values, option_string)
def _do_call( self, string, delim=",", data_type=None, choices=None, min_len=None, max_len=None): if isinstance(string, str): vals = string.split(delim) if delim else (string,) else: vals = string if vals[0] == "*" and choices is not None: vals = choices if data_type: vals = [data_type(v) for v in vals] if min_len and len(vals) < min_len: raise ArgumentError( self, "there must be at least {} values".format(min_len)) if max_len and len(vals) > max_len: raise ArgumentError( self, "there can be at most {} values".format(max_len)) return vals
def _match_argument_wrapper(self, action, arg_strings_pattern) -> int: # Wrapper around ArgumentParser._match_argument behavior to support nargs ranges nargs_pattern = self._get_nargs_pattern(action) match = re.match(nargs_pattern, arg_strings_pattern) # raise an exception if we weren't able to find a match if match is None: nargs_range = getattr(action, ATTR_NARGS_RANGE, None) if nargs_range is not None: raise ArgumentError( action, generate_range_error(nargs_range[0], nargs_range[1])) return orig_argument_parser_match_argument(self, action, arg_strings_pattern)
def __call__(self, parser, args, values, option_string=None): if len(values) < minlen or len(values) > maxlen: if minlen == maxlen: lentxt = str(minlen) else: lentxt = 'between %d and % d' % (minlen, maxlen) raise ArgumentError( self, 'requires %s arguments (got %d)' % (lentxt, len(values))) if not hasattr(args, action): setattr(args, action, []) getattr(args, action).append([self.dest] + values)
def main(argv=None): """ CLI main function for cdd_cli_gen :param argv: argv, defaults to ```sys.argv``` :type argv: ```Optional[List[str]]``` """ argv = argv or sys.argv usage = "Usage: {executable} {script} <module_name>".format( executable=sys.executable, script=argv[0]) if len(argv) != 2: raise ArgumentError(None, usage) elif len(argv) > 1 and argv[1] in frozenset(("-h", "--help")): print(usage) exit() mod_pl = argv[1] mod = (lambda s: mod_pl if s is False else s)(p.singular_noun(mod_pl)) mod_cap = mod.capitalize() tab = " " * 4 print(" ".join("{tab}--{opt} '{val!s}'".format( tab=tab, opt=opt.replace("_", "-"), val=val ) for opt, val in dict( name_tpl="{name}Config", input_mapping= "ml_params_tensorflow.ml_params.type_generators.exposed_{mod_pl}". format(mod_pl=mod_pl), prepend='""" Generated {mod_cap} CLI parsers """\\n' "import tensorflow as tf\\n" "from typing import Any, Optional, Union, List, Iterator, Iterable, Generator\\n" "from {typing} import Literal\\n\\n" "from dataclasses import dataclass\\n\\n" "from yaml import safe_load as loads\\n" "\\n".format( mod_cap=mod_cap, typing="typing" if sys.version_info[:2] < ( 3, 8) else "typing_extensions", ), imports_from_file="tf.keras.{mod_pl}.{mod_cap}".format( mod_pl=mod_pl, mod_cap=mod_cap), emit="argparse", output_filename=path.join( "ml_params_tensorflow", "ml_params", "{mod_pl}.py".format(mod_pl=mod_pl), ), decorator="dataclass", ).items()))
def load_mail_credentials(path): """ Loads mail credentials from json file. Check if required fields are defined. :param path: location of json file :return: dict with provider, username, app_password and message """ with open(path) as f: data = json.loads(f.read()) required = ['provider', 'username', 'app_password'] for k in required: if k not in data: raise ArgumentError('Missing required information in mail_credentials json file.') return data
def validate_args(args): """ Validates command line input args. """ assert args.rrup.is_file() for imcsv in args.imcsv: assert Path(imcsv[0]).is_file() if args.srf is not None: assert args.srf.is_file() if args.config is not None: assert args.config.is_file() elif args.config is not None: # srf file required if config given raise ArgumentError("SRF required if config given")
def __init__(self, platform=None): if platform is None and XilinxILATracer.platform is None: ArgumentError("Platform is not specified.") self.platform = platform # Quick and dirty hack # Parse XDCs as TCL script platform.toolchain.pre_synthesis_commands += [ "add_files -fileset constrs_1 -norecurse gen_ila.tcl", "set_property FILE_TYPE TCL [get_files *.tcl -filter \"IS_GENERATED == false\"]", ] platform.toolchain.additional_commands += [ "write_debug_probe -force {build_name}.ltx" ]
def handle(self, options, args): self.log.debug('handle: options=%s', options) if options.target is None and options.measurement_id is None: raise ArgumentError( None, 'either --target or --measurement-id ' 'is required') func = { 'ping': self._ping, 'sslcert': self._sslcert, }[options.command] self._run_in_background(func, options) self.write_echo()
def __call__(self, parser, cli_args, value, option_string=None): if value is None: f = sys.stdin # No stdin was piped and we have a "dummy" csv file to build if self.dummy_csv_column is not None and sys.stdin.isatty(): value = getattr(cli_args, self.column_dest) if self.dummy_csv_guard is not None and not self.dummy_csv_guard( value): raise ArgumentError( self, self.dummy_csv_error + (' Got "%s"' % value)) f = CsvCellIO(self.dummy_csv_column, value) setattr(cli_args, self.column_dest, self.dummy_csv_column) else: try: f = open(value, 'r', encoding='utf-8') except OSError as e: args = {'filename': value, 'error': e} message = gettext('can\'t open \'%(filename)s\': %(error)s') raise ArgumentError(self, message % args) setattr(cli_args, self.dest, f)
def __init__( self, name: str, date: str, nightlies: List[str], package_owner: str, config: Dict[str, Any], working_dir: str, jira_username: str, jira_password: str, default_advisories: bool = False, dry_run: bool = False, ) -> None: _LOGGER.info("Initializing and verifying parameters...") self.config = config self.release_name = name self.release_date = date self.package_owner = package_owner or self.config["advisory"]["package_owner"] self.working_dir = Path(working_dir).absolute() self.default_advisories = default_advisories self.dry_run = dry_run self.release_version = tuple(map(int, self.release_name.split(".", 2))) self.group_name = ( f"openshift-{self.release_version[0]}.{self.release_version[1]}" ) self.candidate_nightlies = {} if self.release_version[0] < 4 and nightlies: _LOGGER.warn("No nightly needed for OCP3 releases") if self.release_version[0] >= 4 and not nightlies: raise ArgumentError("You need to specify at least one nightly.") for nightly in nightlies: if "s390x" in nightly: arch = "s390x" elif "ppc64le" in nightly: arch = "ppc64le" elif "arm64" in nightly: arch = "aarch64" else: arch = "x86_64" if ":" not in nightly: # prepend pullspec URL to nightly name # TODO: proper translation facility between brew and go arch nomenclature arch_suffix = "" if arch == "x86_64" else "-arm64" if arch == "aarch64" else "-" + arch nightly = f"registry.ci.openshift.org/ocp{arch_suffix}/release{arch_suffix}:{nightly}" self.candidate_nightlies[arch] = nightly self.elliott_working_dir = self.working_dir / "elliott-working" self._jira_client = JIRAClient.from_url(config["jira"]["url"], (jira_username, jira_password)) self.mail = MailService.from_config(config)
def makeDataRefList(self, namespace): """!Make self.refList from self.idList It's difficult to make a data reference that merely points to an entire tract: there is no data product solely at the tract level. Instead, we generate a list of data references for patches within the tract. @param namespace namespace object that is the result of an argument parser """ datasetType = namespace.config.coaddName + "Coadd_calexp" def getPatchRefList(tract): return [ namespace.butler.dataRef(datasetType=datasetType, tract=tract.getId(), filter=dataId["filter"], patch="%d,%d" % patch.getIndex()) for patch in tract ] tractRefs = {} # Data references for each tract for dataId in self.idList: # There's no registry of coadds by filter, so we need to be given # the filter if "filter" not in dataId: raise ArgumentError(None, "--id must include 'filter'") skymap = self.getSkymap(namespace, datasetType) if "tract" in dataId: tractId = dataId["tract"] if tractId not in tractRefs: tractRefs[tractId] = [] if "patch" in dataId: tractRefs[tractId].append( namespace.butler.dataRef(datasetType=datasetType, tract=tractId, filter=dataId['filter'], patch=dataId['patch'])) else: tractRefs[tractId] += getPatchRefList(skymap[tractId]) else: tractRefs = dict( (tract.getId(), tractRefs.get(tract.getId(), []) + getPatchRefList(tract)) for tract in skymap) self.refList = list(tractRefs.values())
def _check_value(self, action, value): # converted value must be one of the choices (if specified) if action.choices is not None and value not in action.choices: args = { 'value': value, 'choices': ', '.join( map( repr, filter(lambda act: act != DEFAULT_COMMAND, action.choices))) } msg = gettext( 'invalid choice: %(value)r (choose from %(choices)s)') raise ArgumentError(action, msg % args)
def __init__(self, call_cost, is_raise=False, min_raise=None, max_raise=None): """ User can check if call_cost is 0. Otherwise, cost to call is call_cost. User can raise if min_raise and max_raise aren't None. If so, user can raise to between min_raise and max_raise. Note that each of these represents a raise total, not a contribution, and not what the amount of their raising. """ self.call_cost = call_cost self.is_raise = is_raise if (min_raise is None) != (max_raise is None): raise ArgumentError( "specify both min_raise and max_raise, or neither") self.min_raise = min_raise self.max_raise = max_raise
def matsub(A, B): """ returns matrix difference C = A - B. """ try: m, n = matdim(A) if m != len(B) or n != len(B[0]): raise ArgumentError("Incompatible matrices for matsub") C = matzero(m, n) for i in range(m): for j in range(n): C[i][j] = A[i][j] - B[i][j] return C except: return None
def get_env_vars(): dotenv_path = os.path.join(get_root_path(), '.env') if os.path.exists(dotenv_path): with open(dotenv_path, 'r') as f: dotenv_ls = [ l.strip().replace(' ', '') for l in f if l.strip() and not l.startswith('#') ] dotenv_dict = dict( [l.split('=', 1) for l in dotenv_ls if '=' in l]) else: raise ArgumentError( 'A valid .env file was not found in your uwbionlp-parser directory. Exiting...' ) return dotenv_dict
def test_doesnt_modify_error_if_unknown_reason(): def my_fn(log=True): pass parser = CustomParser() arg = Mock() arg.options_strings = ['a', 'b'] parser.add_argument = Mock(side_effect=ArgumentError(None, 'message')) with parser: pass with pytest.raises(ArgumentError) as excinfo: _add_args_from_callable(parser, my_fn) assert 'message' == str(excinfo.value)
def matadd(A, B): """ Returns matrix sum C = A + B. """ try: m, n = matdim(A) if m != len(B) or n != len(B[0]): raise ArgumentError("Incompatible matrices in matadd") C = matzero(m, n) for i in range(m): for j in range(n): C[i][j] = A[i][j] + B[i][j] return C except: return None
def cli(): args = get_cli_args() import sardem.dem if args.left_lon and args.geojson or args.left_lon and args.bbox: raise ArgumentError( args.geojson, "Can only use one of positional arguments (left_lon top_lat dlon dlat) " ", --geojson, or --bbox", ) # Need all 4 positionals, or the --geosjon elif (any(a is None for a in (args.left_lon, args.top_lat, args.dlon, args.dlat)) and not args.geojson and not args.bbox and not args.wkt_file): raise ValueError("Need --bbox, --geojoin, or --wkt-file") geojson_dict = json.load(args.geojson) if args.geojson else None if args.bbox: left, bot, right, top = args.bbox left_lon, top_lat = left, top dlon = right - left dlat = top - bot elif args.wkt_file: left_lon, top_lat, dlon, dlat = None, None, None, None elif args.left_lon: left_lon, top_lat = args.left_lon, args.top_lat dlon, dlat = args.dlon, args.dlat if not args.output: output = ("watermask.wbd" if args.data_source == "NASA_WATER" else "elevation.dem") else: output = args.output sardem.dem.main( left_lon, top_lat, dlon, dlat, geojson_dict, args.wkt_file, args.data_source, args.xrate, args.yrate, args.keep_egm96, output, )
def inner_circle(_r: str) -> str: try: r = int(_r) if r > 0: d = 2 * r + 1 rx, ry = d / 2, d / 2 x, y = np.indices((d + 1, d + 1)) arr_0 = np.abs(np.hypot(rx - x, ry - y) - r) # print('x =', len(arr_0[0]), ', y =', len(arr_0)) arr = (arr_0 < 0.54).astype(int) borders_top(r) start_inner_circle = False for row in arr: arr_2 = '#' + ' ' * r for i in row: if i: arr_2 += '*' if not start_inner_circle: start_inner_circle = True else: start_inner_circle = False else: if start_inner_circle: arr_2 += 'o' elif arr_2[:2 * r + 3] == ( '#' + ' ' * 2 * r + '**'): # circle top and bottom points arr_2 += ' ' elif arr_2[-3:] == ' **' or arr_2[ -1:] == 'o' or arr_2[:2] == '**': arr_2 += 'o' else: arr_2 += ' ' arr_2 += ' ' * r + '#' print(arr_2) borders_bottom(r) else: raise ValueError("Number must be positive integer") except Exception as ex: raise ArgumentError( None, "The input N shall be an positive integer number!\n%s" % ex)
def bind_list(self, interface, scope=None): """bind decorator""" if not isinstance(interface, list): raise ArgumentError(interface, "Interface type must be a subtype of list") def decorator(target): if isinstance(target, FunctionType): self.injector.binder.multibind( interface, [self.injector.call_with_injection(target)], scope) else: self.injector.binder.multibind(interface, [target], scope) return target logger.debug(f"inject bind_list:interface:{interface}, scope:{scope}") return decorator
def validate_ip_arg(ip_or_hostname: str) -> str: """ Validate that an IP address or hostname is valid Raise ArgumentError if the address is invalid :param ip_or_hostname: ip address or hostname :return: The same IP address """ try: inet_aton(ip_or_hostname) return ip_or_hostname except socket_error: try: return gethostbyname(ip_or_hostname) except socket_error: raise ArgumentError( None, "Invalid IP Address or hostname: {}".format(ip_or_hostname))
def export(self,svc,export_props=None): ''' Export the given svc via the given export_props. Note that the export_props must have and ENDPOINT_ID and contain the other standard Endpoint Description service properties as described by the OSGI R5+ Chapter 122 (Remote Service Admin) in the enterprise specification. :param svc: The Python service to export to java service registry :param export_props: An optional dictionary of Python properties. Note these properties must contain all the properties describing the service as required by the OSGI Endpoint Description. :return: The ENDPOINT_ID value from the export_props. This value may be used to get subsequent access to the svc and/or the export_properties via get_export_endpoint ''' with self._lock: self._raise_not_connected() '''The Java class attribute must be present''' java = getattr(svc,PY4J_JAVA_ATTRIBUTE) '''The Java.implements must be present''' objClass = getattr(java,PY4J_JAVA_IMPLEMENTS_ATTRIBUTE) if isinstance(objClass,str): objClass = [objClass] props = {} '''The export_properties attribute does not need to be present''' j_props = getattr(java,EXPORT_PROPERTIES_NAME,None) if j_props: props = osgiservicebridge.merge_dicts(props,j_props) if export_props: props = osgiservicebridge.merge_dicts(props,export_props) sec = props.get(osgiservicebridge.REMOTE_CONFIGS_SUPPORTED,None) if not sec: sec = [PY4J_EXPORTED_CONFIG] # check that OBJECTCLASS is set, if not props1 = get_edef_props(object_class=objClass,exported_cfgs=sec,ep_namespace=None,ecf_ep_id=self.get_id(),ep_rsvc_id=None,ep_ts=None) export_props = merge_dicts(props1, props) try: endpointid = export_props[ENDPOINT_ID] except KeyError: raise ArgumentError('Cannot export service since no ENDPOINT_ID present in export_props') with self._exported_endpoints_lock: self._exported_endpoints[endpointid] = (svc,export_props) # without holding lock, call __export try: self.__export(svc,export_props) except Exception as e: # if it fails, remove from exported endpoints self.remove_export_endpoint(endpointid) raise e return endpointid
def write(self, arg, move=False, align="left", font=("Arial", 8, "normal")): if move: raise ArgumentError('move', 'Parameter is not supported.') if align == 'left': anchor = 'sw' elif align == 'center': anchor = 's' elif align == 'right': anchor = 'se' kwargs = dict(text=str(arg), anchor=anchor, font=font) if self._pencolor: kwargs['fill'] = self._pencolor self.screen.cv.create_text(self.xcor() + self.__xoff, -self.ycor() + self.__yoff, **kwargs)
def _as_settings(args): libvirt_xml = args.LIBVIRT_XML output_xml = args.output profile = Profile.from_str(args.profile) in_place = args.in_place uri = args.uri multithreaded_pinning = args.multithreaded_pinning if in_place and not libvirt_xml: raise ArgumentError(None, message="no LIBVIRT_XML specified") return Settings(libvirt_xml=libvirt_xml, output_xml=output_xml, in_place=in_place, profile=profile, force_multithreaded_pinning=multithreaded_pinning, connection_uri=uri)
def main(): logging.getLogger().setLevel(logging.INFO) # Setup Selenium web driver parser = get_arg_parser() args = parser.parse_args() if args.browser == "docker": driver = webdriver.Remote( command_executor="http://127.0.0.1:4444/wd/hub", desired_capabilities=DesiredCapabilities.FIREFOX, ) elif args.browser == "firefox": firefox_profile = webdriver.FirefoxProfile() firefox_profile.set_preference("intl.accept_languages", "en-us") firefox_profile.update_preferences() driver = webdriver.Firefox(firefox_profile) elif args.browser == "chrome": driver = webdriver.Chrome() else: raise ArgumentError(message="Unknown driver.") driver.set_window_size(1920, 1080) login_using_cookie_file(driver, cookie_file=args.login_cookies) driver.get("https://www.youtube.com") assert "YouTube" in driver.title try: confirm_logged_in(driver) driver.get("https://studio.youtube.com") assert "Channel dashboard" in driver.title driver.file_detector = LocalFileDetector() upload_file( driver, video_path=args.video_path, title=args.title, thumbnail_path=args.thumbnail, description=args.description, game=args.game, kids=args.kids, upload_time=args.upload_time, ) except: driver.close() raise
def get_latest_commit_from_base_url(self): ''' Traverse the provided URL, find the latest commit, match the correct distro, and release, etc.. ''' if not re.search('commit.{0,1}$', self.args.base_url): raise ArgumentError('Expected baseurl to end with commit dir') #Find latest commit... base_url = self.args.base_url latest = {'timestamp': 0, 'count': 0, 'commit': None} for commit in self._get_dirs_from_body(base_url): y, m, d, c = commit.strip('/').split('-')[1:] self.debug('commit:{0}, y{1}, m,{2}, d{3}, c{4}'.format( str(commit), y, m, d, c)) date = "{0}/{1}/{2}".format(d, m, y) c = int(c) timestamp = time.mktime( datetime.datetime.strptime(date, "%d/%m/%Y").timetuple()) if timestamp >= latest['timestamp']: if timestamp == latest['timestamp'] and c <= latest['count']: continue latest = {'timestamp': timestamp, 'count': c, 'commit': commit} base_url = base_url + latest['commit'] #Get the distro if (self.args.distro + "/") not in self._get_dirs_from_body(base_url): raise ValueError('Distro "' + str(self.args.distro) + '" not found at path:' + str(base_url)) base_url += self.args.distro + "/" #Get the release num new_base_url = None base_release = self.clc.distro_ver.split('.')[0] rel_dirs = self._get_dirs_from_body(base_url) for dir in rel_dirs: if re.search('^' + base_release, dir): new_base_url = base_url + dir break if not new_base_url: raise RuntimeError('Could find matching release for:' + str(base_release)) for dir in self._get_dirs_from_body(new_base_url): if re.search('x86_64.{0,1}$', dir): self.args.base_url = new_base_url + dir return self.args.base_url raise RuntimeError('Could not find baseurl for image repo from:' + str(self.args.baseurl))
def _parse_args(parser): parsed_args = parser.parse_args() args = {} args['in_fpath'] = parsed_args.input[0] args['out_fpath'] = parsed_args.output if parsed_args.ignore_alt: if parsed_args.alt_gt_num is None: raise ArgumentError('alt_gt_num is required when ignore_alt') else: args['alt_gt_num'] = parsed_args.alt_gt_num else: args['alt_gt_num'] = None args['pre_read_max_size'] = parsed_args.pre_read_max_size args['ignore_alt'] = parsed_args.ignore_alt args['kept_fields'] = parsed_args.kept_fields args['ignored_fields'] = parsed_args.ignored_fields return args
def __call__(self, args): super(DocumentsUpdateCommand, self).__call__(args) a = ['description', 'meta_data', 'name', 'new_file'] one_set = False for i in a: if getattr(args, i, None) is not None: one_set = True break if not one_set: raise ArgumentError( None, "You need to choose --file or at least one of the three options : --name or --meta-data or --description" ) cli = self.ls.documents try: document = cli.get(args.uuid) if args.new_file: json_obj = cli.update_file(args.uuid, args.new_file) if json_obj: json_obj['time'] = self.ls.last_req_time json_obj['new'] = args.new_file self.log.info( "The file '%(name)s' (%(uuid)s) was updated with %(new)s. (%(time)ss)", json_obj) else: return False else: original_name = document.get('name') rbu = cli.get_rbu() rbu.copy(document) rbu.load_from_args(args) json_obj = cli.update(rbu.to_resource()) if json_obj.get('name') == original_name: message_ok = "The following document '%(name)s' was successfully updated" self.pprint(message_ok, json_obj) else: json_obj['original_name'] = original_name message_ok = "The former document '%(original_name)s' (renamed to '%(name)s') was successfully updated" self.pprint(message_ok, json_obj) if self.verbose or self.debug: self.pretty_json(json_obj) return True except LinShareException as ex: self.pprint_error(ex.args[1] + " : " + args.uuid) return False