def subparser_call(self, parser, namespace, values, option_string=None): from argparse import ArgumentError, SUPPRESS, _UNRECOGNIZED_ARGS_ATTR parser_name = values[0] arg_strings = values[1:] # set the parser name if requested if self.dest is not SUPPRESS: setattr(namespace, self.dest, parser_name) # select the parser try: parser = self._name_parser_map[parser_name] except KeyError: tup = parser_name, ', '.join(self._name_parser_map) msg = _('unknown parser {!r} (choices: {})').format(*tup) raise ArgumentError(self, msg) # parse all the remaining options into the namespace # store any unrecognized options on the object, so that the top # level parser can decide what to do with them namespace, arg_strings = parser.parse_known_args(arg_strings, namespace) if arg_strings: vars(namespace).setdefault(_UNRECOGNIZED_ARGS_ATTR, []) getattr(namespace, _UNRECOGNIZED_ARGS_ATTR).extend(arg_strings)
def take_action(action, argument_strings, option_string=None): # pragma: no cover seen_actions.add(action) argument_values = self._get_values(action, argument_strings) # error if this argument is not allowed with other previously # seen arguments, assuming that actions that use the default # value don't really count as "present" if argument_values is not action.default: seen_non_default_actions.add(action) for conflict_action in action_conflicts.get(action, []): if conflict_action in seen_non_default_actions: msg = gettext('not allowed with argument %s') action_name = _get_action_name(conflict_action) raise ArgumentError(action, msg % action_name) # take the action if we didn't receive a SUPPRESS value # (e.g. from a default) if argument_values is not SUPPRESS: action(self, namespace, argument_values, option_string)
def run_cleanup(args, parser): if args.check and args.stdout: parser.error( ArgumentError( message="Only check, stdout can not be used together!", argument=args.check, )) repository = load_yaml(args.repository) release_files = [ filename for sublist in args.releases for filename in sublist ] used_versions = load_all_releases(release_files) unused_versions = find_unused_versions(used_versions, repository) if args.check: if not unused_versions: print("No unused software versions found!") else: print("The following software version are not in use:") for lib, versions in unused_versions.items(): print(lib, versions) return remove_unused_versions(repository=repository, unused_versions=unused_versions) if args.stdout: print(prettier(repository)) return output_file = args.repository if args.output: output_file = args.output write_to_file(repository, output_file) if unused_versions: print("Success! New repository file written to {}".format(output_file)) print("The following software version are not in use:") for lib, versions in unused_versions.items(): print(lib, versions)
def __init__(self, list_options): self.list_options = dict([ (( sub_option if not sub_option.startswith('no-') else sub_option[2:] ), sub_option.startswith('no-') ) for sub_option in [ re.split(r'[, ]', option) for option in list_options ]]) invalid_list_options = ( set(self.list_options.keys()) - VALID_LIST_OPTIONS ) if invalid_list_options: raise ArgumentError( u'--list-options', u'Invalid list options: {0}'.format( u', '.join(list(map(repr, invalid_list_options)))))
def vln(): parser = CommonParser(description=DESCRIPTION) parser.add_argument('source', help="location that link will point to.") parser.add_argument('target', help="location of the LinkNode") try: opt = parser.parse_args() set_logging_level_from_args(opt) if not vos.is_remote_file(opt.source) or \ not vos.is_remote_file(opt.target): raise ArgumentError( None, "source must be vos node or http url, target must be vos node") client = vos.Client(vospace_certfile=opt.certfile, vospace_token=opt.token) client.link(opt.source, opt.target) except ArgumentError as ex: parser.print_usage() exit_on_exception(ex) except Exception as ex: exit_on_exception(ex)
def __init__(self, first_layer=None, layers=None, layers_sizes=None): def connect_layers(layers_list): for i in range(len(layers_list) - 1): layers_list[i].next_layer = layers_list[i + 1] layers_list[i + 1].previous_layer = layers_list[i] return layers_list[0], layers_list[-1] if layers is None: if first_layer is None: if layers_sizes is None: raise ArgumentError("No layers specified") else: layers = [] for i in range(len(layers_sizes) - 1): input_size = layers_sizes[i] layer_size = layers_sizes[i + 1] neurons = [] for j in range(layer_size): weights = [ random.uniform(-2, 2) for _ in range(input_size) ] bias = random.uniform(-2, 2) neurons.append(SigmoidNeuron(weights, bias)) layers.append(NeuralLayer(neurons)) self.first_layer, self.output_layer = connect_layers( layers) else: # Case: only the first layer of an already built network was supplied self.first_layer = first_layer temp_layer = first_layer while temp_layer.next_layer is not None: # Find the output layer temp_layer = temp_layer.next_layer self.output_layer = temp_layer else: # All the layers were supplied. Let's connect them in the same order they are listed self.first_layer, self.output_layer = connect_layers(layers)
def execute(args: List[str]): if len(args) == 1: bump = args[0] else: raise ArgumentError(args, "Must provide a packaging bump.") package: Package working_directory = os.getcwd() note(f"Working Directory: {Fore.YELLOW}[{working_directory}]") path_markers = { "node": ["node"], "vscode": ["vscode"], "unity3d": ["Assets", "mdabros"], } tech = "generic" for marker in path_markers.keys(): for value in path_markers[marker]: if value in working_directory: tech = marker break if tech == "generic": package = Generic(working_directory, bump) if tech == "node": package = Node(working_directory, bump) elif tech == "vscode": package = VSCode(working_directory, bump) elif tech == "unity3d": package = Unity3D(working_directory, bump) package.execute_version() celebrate(f"{tech} {bump} versioning successful!")
def run_duration_string_to_seconds(s: str) -> Optional[int]: """ Parse a string that represents a timespan, and returns it converted into seconds. The string is expected to be floating point number with a single character suffix s, m, h, d for seconds, minutes, hours, day. Examples: '3.5h', '2d'. If the argument is an empty string, None is returned. :param s: The string to parse. :return: The timespan represented in the string converted to seconds. """ s = s.strip() if not s: return None suffix = s[-1] if suffix == "s": multiplier = 1 elif suffix == "m": multiplier = 60 elif suffix == "h": multiplier = 60 * 60 elif suffix == "d": multiplier = 24 * 60 * 60 else: raise ArgumentError("s", f"Invalid suffix: Must be one of 's', 'm', 'h', 'd', but got: {s}") return int(float(s[:-1]) * multiplier)
def _read_config_file(self): all_tokens_in_file = [] config_file_abspath = get_config_path(self.workspace_root) try: all_tokens_in_file = my_utils.tokens_from_file(config_file_abspath) except FileNotFoundError: pass # no config file is perfectly ok config_values = {} iter_tokens = iter(all_tokens_in_file) for token in iter_tokens: name = self.config_names_dict.get(token) if name is not None: value = next(iter_tokens, None) if not value: raise ArgumentError(name, f'in {config_file_abspath} expected one argument after {token}') existing_value = config_values.get(name) if existing_value is not None: my_utils.builder_print_warning(f'Ignoring value {existing_value} for options for option {name}, as an other value has already be found ({existing_value}).') else: config_values[name] = value self.config_values_from_file = config_values
def main(): args = parse_args() content = "" if args.target.startswith("http://") or args.target.startswith("https://"): resp = requests.get(args.target) resp.raise_for_status() content = resp.text elif os.path.exists(args.target): with open(args.target) as f: content = f.read() else: raise ArgumentError(f"Invalid target: {args.target}") if not content: raise ValueError("No content found") logger = logging.getLogger(__name__) if args.debug: logging.basicConfig(level=logging.DEBUG) converter = load_converter(args, logger=logger) result = converter.convert(content) print(result, file=args.outfile)
def get_ecf_props(ep_id, ep_id_ns=None, rsvc_id=None, ep_ts=None): ''' Get all of the ECF RS properties given the minimum required information. :param ep_id: a String to be used for the ECF_ENDPOINT_ID value. :param ep_id_ns: an optional string that is the ECF_ENDPOINT_CONTAINERID_NAMESPACE value. :param rsvc_id: an optional integer that will be the value of ECF_RSVC_ID :param ep_ts: an optional integer timestamp. if None, the returned value of time_since_epoch() will be used. :return: a single dictionary containing all the ECF RS-required properties. ''' results = {} if not ep_id: raise ArgumentError('ep_id must be a valid endpoint id') results[ECF_ENDPOINT_ID] = ep_id if not ep_id_ns: ep_id_ns = PY4J_NAMESPACE results[ECF_ENDPOINT_CONTAINERID_NAMESPACE] = ep_id_ns if not rsvc_id: rsvc_id = get_next_rsid() results[ECF_RSVC_ID] = rsvc_id if not ep_ts: ep_ts = time_since_epoch() results[ECF_ENDPOINT_TIMESTAMP] = ep_ts return results
def csv_str(vstr, sep = ","): """ Parse comma-separated value string for "nodes" option. Args: vstr (str): Value string from CLI, including commas. sep (str): String separator. Returns: values (list): List of nodes. Raises: ArgumentError: Non-string value detected. """ values = [] for v in vstr.split(sep): try: v = str(v) except ValueError: raise ArgumentError("Invalid value: {0}, must be string".format(v)) else: v = v.strip(" ,") values.append(v) return values
def _select_probes(self, options): # TODO: make sure probes match address family kwargs = {} radius = options.radius_miles * 1.60934 \ if options.radius_miles is not None else options.radius # ASN if options.asn_v4 is not None: kwargs['asn_v4'] = options.asn_v4 elif options.asn_v6 is not None: kwargs['asn_v6'] = options.asn_v6 if options.ip_address is not None: kwargs['radius'] = radius return self._select_probes_ip_address(options, kwargs) elif not (options.lat is None and options.lon is None): kwargs['radius'] = radius return self._select_probes_lat_lon(options, kwargs) elif options.country is not None: return self._select_probes_country(options, kwargs) raise ArgumentError(None, 'Missing probe selection parameters')
def query(query, url=DEFAULT_URL): """ Method for sending a query to a serer under the specified URL. In case no URL was given, the DEFAULT_URL is used. The query must contain "OFFSET ?offset" and "LIMIT 10000", this is ruled by the fact that the query is written especially for the dbpedia. """ if ("OFFSET ?offset" not in query) or ("LIMIT 10000" not in query): raise ArgumentError( "Please add 'LIMIT 10000' and 'OFFSET ?offset' to the query.") connection = SPARQLWrapper(url) connection.setReturnFormat(JSON) offset = 0 result = [] finished = False while not finished: tempQuery = query.replace("?offset", str(offset)) connection.setQuery(tempQuery) res = [] while res == []: try: res = connection.query().convert() except: print("Error! Sleeping ...") sleep(10) print("... next try.") result.extend(res["results"]["bindings"]) finished = len(res["results"]["bindings"]) != 10000 offset += 10000 return result
def _update_func_policies(self, args, cli, uuid, position=None, count=None): """TODO""" # pylint: disable=too-many-arguments # pylint: disable=unused-argument resource = cli.get(uuid, args.domain) policy = resource.get('activationPolicy') if args.policy_type is not None: policy = resource.get(args.policy_type) if policy is None: raise ArgumentError(None, "No delegation policy for this functionality") policy['policy'] = args.status if args.status == "ENABLE" or args.status == "DISABLE": if args.status == "ENABLE": policy['status'] = True else: policy['status'] = False policy['policy'] = "ALLOWED" return self._update(args, cli, resource)
def _get_distance_matrix(metric, data, matching_ddi, data_ddi_match_idx): """ Returns a square distance matrix based on the desired metric. If the metric is "synergy", the values of the matrix will just be synergy scores. Otherwise they will be pairwise distances based on the morgan fingerprint and desired metric. """ metric = metric.lower() valid_metrics = {"synergy", "tanimoto", "jaccard", "l2"} if metric not in valid_metrics: raise ArgumentError("Metric '%s' is invalid. Must be in %s" % (metric, valid_metrics)) if metric == "synergy": return _get_synergy_matrix(data.ddi_edge_response, matching_ddi, data_ddi_match_idx) elif metric == "tanimoto" or metric == "jaccard": # Only use fingerprint to compute the distance return _get_pairwise_distance_matrix( data.x_drugs[:, :data.fp_bits].type(torch.bool), matching_ddi, "jaccard") else: return _get_pairwise_distance_matrix(data.x_drugs[:, :data.fp_bits], matching_ddi, "l2")
def error(self, message): raise ArgumentError(None, message)
def vls(): parser = CommonParser(description=DESCRIPTION, add_help=False) parser.add_argument('node', nargs="+", help="VOSpace Node to list.") parser.add_option("--help", action="help", default='==SUPPRESS==', help='show this help message and exit') parser.add_option("-l", "--long", action="store_true", help="verbose listing sorted by name") parser.add_option("-g", "--group", action="store_true", help="display group read/write information") parser.add_option("-h", "--human", action="store_true", help="make sizes human readable", default=False) parser.add_option("-S", "--Size", action="store_true", help="sort files by size", default=False) parser.add_option("-r", "--reverse", action="store_true", help="reverse the sort order", default=False) parser.add_option("-t", "--time", action="store_true", help="sort by time copied to VOSpace") try: opt = parser.parse_args() this.human = opt.human set_logging_level_from_args(opt) # set which columns will be printed columns = [] if opt.long or opt.group: columns = ['permissions'] if opt.long: columns.extend(['creator']) columns.extend( ['readGroup', 'writeGroup', 'isLocked', 'size', 'date']) # create a client to send VOSpace command client = vos.Client(vospace_certfile=opt.certfile, vospace_token=opt.token) files = [] dirs = [] # determine if their is a sorting order if opt.Size: sort = vos.SortNodeProperty.LENGTH elif opt.time: sort = vos.SortNodeProperty.DATE else: sort = None if opt.reverse: order = 'asc' if sort else 'desc' else: order = 'desc' if sort else 'asc' for node in opt.node: if not node.startswith('vos:'): raise ArgumentError(opt.node, "Invalid node name: {}".format(node)) logging.debug("getting listing of: %s" % str(node)) targets = client.glob(node) # segregate files from directories for target in targets: target_node = client.get_node(target) if target_node.isdir(): dirs.append((_get_sort_key(target_node, sort), target_node, target)) else: files.append((_get_sort_key(target_node, sort), target_node)) for f in sorted(files, key=lambda ff: ff[0], reverse=(order == 'desc')): _display_target(columns, f[1]) for d in sorted(dirs, key=lambda dd: dd[0], reverse=(order == 'desc')): n = d[1] if (len(dirs) + len(files)) > 1: sys.stdout.write('\n{}:\n'.format(n.name)) if opt.long: sys.stdout.write('total: {}\n'.format( int(n.get_info()['size']))) for row in client.get_children_info(d[2], sort, order): _display_target(columns, row) except Exception as ex: exit_on_exception(ex)
def integer_at_least_one(param: str) -> int: val = int(param) if val < 1: raise ArgumentError(param, "Number of instances should be at least 1") return val
def error(self, message: str) -> None: raise ArgumentError(None, message)
def __call__(self, value): # Session name can be a path or just a name. if (os.path.sep not in value and not VALID_SESSION_NAME_PATTERN.search(value)): raise ArgumentError(None, self.error_message) return value
def check_legacy_command(action, value): """Checks command value and raise error if value is in removed command""" new_command = COMMAND_MAP.get(value) if new_command is not None: msg = f"`airflow {value}` command, has been removed, please use `airflow {new_command}`" raise ArgumentError(action, msg)
def run(self, args): """Reads in a CSV, performs augmentation, and outputs an augmented CSV. Preserves all columns except for the input (augmneted) column. """ if args.interactive: print("\nRunning in interactive mode...\n") augmenter = eval(AUGMENTATION_RECIPE_NAMES[args.recipe])( pct_words_to_swap=args.pct_words_to_swap, transformations_per_example=args.transformations_per_example, ) print("--------------------------------------------------------") while True: print( '\nEnter a sentence to augment, "q" to quit, "c" to change arguments:\n' ) text = input() if text == "q": break elif text == "c": print("\nChanging augmenter arguments...\n") recipe = input( "\tAugmentation recipe name ('r' to see available recipes): " ) if recipe == "r": print("\n\twordnet, embedding, charswap, eda\n") args.recipe = input("\tAugmentation recipe name: ") else: args.recipe = recipe args.pct_words_to_swap = float( input("\tPercentage of words to swap (0.0 ~ 1.0): ")) args.transformations_per_example = int( input("\tTransformations per input example: ")) print("\nGenerating new augmenter...\n") augmenter = eval(AUGMENTATION_RECIPE_NAMES[args.recipe])( pct_words_to_swap=args.pct_words_to_swap, transformations_per_example=args. transformations_per_example, ) print( "--------------------------------------------------------" ) continue elif not text: continue print("\nAugmenting...\n") print( "--------------------------------------------------------") for augmentation in augmenter.augment(text): print(augmentation, "\n") print( "--------------------------------------------------------") else: textattack.shared.utils.set_seed(args.random_seed) start_time = time.time() if not (args.csv and args.input_column): raise ArgumentError( "The following arguments are required: --csv, --input-column/--i" ) # Validate input/output paths. if not os.path.exists(args.csv): raise FileNotFoundError( f"Can't find CSV at location {args.csv}") if os.path.exists(args.outfile): if args.overwrite: textattack.shared.logger.info( f"Preparing to overwrite {args.outfile}.") else: raise OSError( f"Outfile {args.outfile} exists and --overwrite not set." ) # Read in CSV file as a list of dictionaries. Use the CSV sniffer to # try and automatically infer the correct CSV format. csv_file = open(args.csv, "r") dialect = csv.Sniffer().sniff(csv_file.readline(), delimiters=";,") csv_file.seek(0) rows = [ row for row in csv.DictReader( csv_file, dialect=dialect, skipinitialspace=True) ] # Validate input column. row_keys = set(rows[0].keys()) if args.input_column not in row_keys: raise ValueError( f"Could not find input column {args.input_column} in CSV. Found keys: {row_keys}" ) textattack.shared.logger.info( f"Read {len(rows)} rows from {args.csv}. Found columns {row_keys}." ) augmenter = eval(AUGMENTATION_RECIPE_NAMES[args.recipe])( pct_words_to_swap=args.pct_words_to_swap, transformations_per_example=args.transformations_per_example, ) output_rows = [] for row in tqdm.tqdm(rows, desc="Augmenting rows"): text_input = row[args.input_column] if not args.exclude_original: output_rows.append(row) for augmentation in augmenter.augment(text_input): augmented_row = row.copy() augmented_row[args.input_column] = augmentation output_rows.append(augmented_row) # Print to file. with open(args.outfile, "w") as outfile: csv_writer = csv.writer(outfile, delimiter=",", quotechar='"', quoting=csv.QUOTE_MINIMAL) # Write header. csv_writer.writerow(output_rows[0].keys()) # Write rows. for row in output_rows: csv_writer.writerow(row.values()) textattack.shared.logger.info( f"Wrote {len(output_rows)} augmentations to {args.outfile} in {time.time() - start_time}s." )
def vchmod(): # TODO: seperate the sys.argv parsing from the actual command. parser = CommonParser(description=DESCRIPTION) parser.add_argument( 'mode', type=__mode__, help=r'permission setting accepted modes: (og|go|o|g)[+-=](rw|wr|r\w)') parser.add_argument( "node", help="node to set mode on, eg: vos:Root/Container/file.txt") parser.add_argument( 'groups', nargs="*", help="name of group(s) to assign read/write permission to") parser.add_option( "-R", "--recursive", action='store_const', const=True, help="Recursive set read/write properties") opt = parser.parse_args() set_logging_level_from_args(opt) group_names = opt.groups mode = opt.mode props = {} try: if 'o' in mode['who']: if mode['op'] == '-': props['ispublic'] = 'false' else: props['ispublic'] = 'true' if 'g' in mode['who']: if '-' == mode['op']: if not len(group_names) == 0: raise ArgumentError( opt.groups, "Names of groups not valid with remove permission") if 'r' in mode['what']: props['readgroup'] = None if "w" in mode['what']: props['writegroup'] = None else: if not len(group_names) == len(mode['what']): name = len(mode['what']) > 1 and "names" or "name" raise ArgumentError(None, "{} group {} required for {}".format( len(mode['what']), name, mode['what'])) if mode['what'].find('r') > -1: # remove duplicate whitespaces read_groups = " ".join( group_names[mode['what'].find('r')].split()) props['readgroup'] = \ (CADC_GMS_PREFIX + read_groups.replace(" ", " " + CADC_GMS_PREFIX)) if mode['what'].find('w') > -1: wgroups = " ".join( group_names[mode['what'].find('w')].split()) props['writegroup'] = \ (CADC_GMS_PREFIX + wgroups.replace(" ", " " + CADC_GMS_PREFIX)) except ArgumentError as er: parser.print_usage() logging.error(str(er)) sys.exit(er) logging.debug("Setting {} on {}".format(props, opt.node)) try: client = Client(vospace_certfile=opt.certfile, vospace_token=opt.token) node = client.get_node(opt.node) if opt.recursive: node.props.clear() node.clear_properties() # del node.node.findall(vos.Node.PROPERTIES)[0:] if 'readgroup' in props: node.chrgrp(props['readgroup']) if 'writegroup' in props: node.chwgrp(props['writegroup']) if 'ispublic' in props: node.set_public(props['ispublic']) logging.debug("Node: {0}".format(node)) status = client.update(node, opt.recursive) if status: sys.exit(status) except Exception as ex: exit_on_exception(ex)
def check_indice_args(self, start, end): if start < 0 or start > start > len(self.points) - 2: raise ArgumentError("invalid start index") elif end <= start or end > len(self.points) - 1: raise ArgumentError("invalid end index")
def __call__(self, parser, args, values, option_string=None): if not nmin <= len(values) <= nmax: raise ArgumentError(self, 'requires between %s and %s arguments' % (nmin, nmax)) setattr(args, self.dest, values)
def vecsub(X, Y): n = len(X) if n != len(Y): raise ArgumentError("incompatible vector lengths in vecsub().") return [x - y for x, y in zip(X, Y)]
def __call__(self, parser, namespace, value, option_string=None): if not re.match(r"\s*\w+\s*=\s*\d+", value): raise ArgumentError(self, "should be like nloc=20") k, val = value.split("=", 2) getattr(namespace, self.dest)[k.strip()] = int(val.strip())
def consume_optional(start_index): # get the optional identified at this index option_tuple = option_string_indices[start_index] action, option_string, explicit_arg = option_tuple # identify additional optionals in the same arg string # (e.g. -xyz is the same as -x -y -z if no args are required) match_argument = self._match_argument action_tuples = [] while True: # if we found no optional action, skip it if action is None: extras.append(arg_strings[start_index]) return start_index + 1 # if there is an explicit argument, try to match the # optional's string arguments to only this if explicit_arg is not None: arg_count = match_argument(action, 'A') # if the action is a single-dash option and takes no # arguments, try to parse more single-dash options out # of the tail of the option string chars = self.prefix_chars if arg_count == 0 and option_string[1] not in chars: action_tuples.append((action, [], option_string)) char = option_string[0] option_string = char + explicit_arg[0] new_explicit_arg = explicit_arg[1:] or None optionals_map = self._option_string_actions if option_string in optionals_map: action = optionals_map[option_string] explicit_arg = new_explicit_arg else: msg = _('ignored explicit argument %r') raise ArgumentError(action, msg % explicit_arg) # if the action expect exactly one argument, we've # successfully matched the option; exit the loop elif arg_count == 1: stop = start_index + 1 args = [explicit_arg] action_tuples.append((action, args, option_string)) break # error if a double-dash option did not use the # explicit argument else: msg = _('ignored explicit argument %r') raise ArgumentError(action, msg % explicit_arg) # if there is no explicit argument, try to match the # optional's string arguments with the following strings # if successful, exit the loop else: start = start_index + 1 selected_patterns = arg_strings_pattern[start:] self.active_actions = [action] # Added by argcomplete action.num_consumed_args = 0 # Added by argcomplete arg_count = match_argument(action, selected_patterns) stop = start + arg_count args = arg_strings[start:stop] # Begin added by argcomplete # If the pattern is not open (e.g. no + at the end), remove the action from active actions (since # it wouldn't be able to consume any more args) if action.nargs not in [ ZERO_OR_MORE, ONE_OR_MORE, PARSER, REMAINDER ]: self.active_actions.remove(action) elif action.nargs == OPTIONAL and len(args) == 1: self.active_actions.remove(action) action.num_consumed_args = len(args) # End added by argcomplete action_tuples.append((action, args, option_string)) break # add the Optional to the list and return the index at which # the Optional's string args stopped assert action_tuples for action, args, option_string in action_tuples: take_action(action, args, option_string) return stop
action='store_true', default=False, help='Delete output XPI if it already exists.') p.add_option('-v', '--verbose', dest='verbose', action='store_true', default=False, help='Be more noisy') return p if __name__ == '__main__': options, args = create_option_parser().parse_args() if len(args) < 1: from argparse import ArgumentError raise ArgumentError(None, 'You need to specify at least a language!') if options.verbose: logging.basicConfig(level=logging.DEBUG) build_xpi(l10nbase=os.path.abspath(options.l10nbase), srcdir=os.path.abspath(options.srcdir), outputdir=os.path.abspath(options.outputdir), lang=args[0], product=options.mozproduct, delete_dest=options.delete_dest)