def _parse_without_formatting(string, *, recursion_depth=2, recursive=False): if recursion_depth < 0: raise ValueError("Max string recursion exceeded") formatter = Formatter() parser = AnsiParser() messages_color_tokens = [] for literal_text, field_name, format_spec, conversion in formatter.parse( string): if literal_text and literal_text[-1] in "{}": literal_text += literal_text[-1] parser.feed(literal_text, raw=recursive) if field_name is not None: if field_name == "message": if recursive: messages_color_tokens.append(None) else: color_tokens = parser.current_color_tokens() messages_color_tokens.append(color_tokens) field = "{%s" % field_name if conversion: field += "!%s" % conversion if format_spec: field += ":%s" % format_spec field += "}" parser.feed(field, raw=True) _, color_tokens = Colorizer._parse_without_formatting( format_spec, recursion_depth=recursion_depth - 1, recursive=True) messages_color_tokens.extend(color_tokens) return parser.done(), messages_color_tokens
def on_query_completions(self, view, prefix, locations): # Only trigger within LaTeX if view.score_selector(locations[0], "text.tex.latex") == 0: return [] point = locations[0] try: completions, prefix, post_brace, new_point_a, new_point_b = get_cite_completions( view, point, autocompleting=True) except UnrecognizedCiteFormatError: return [] except NoBibFilesError: sublime.status_message("No bib files found!") return [] except BibParsingError as e: sublime.status_message("Bibliography " + e.filename + " is broken!") return [] # filter against keyword or title if prefix: completions = [ comp for comp in completions if prefix.lower() in "%s %s" % (comp['keyword'].lower(), comp['title'].lower()) ] prefix += " " # get preferences for formating of autocomplete entries cite_autocomplete_format = get_setting('cite_autocomplete_format', "{keyword}: {title}") formatter = Formatter() r = [(prefix + formatter.vformat(cite_autocomplete_format, (), completion), completion['keyword'] + post_brace) for completion in completions] # print "%d bib entries matching %s" % (len(r), prefix) return r
def _write_to_file(table_a_dest, genome_ids_a, genome_ids_b, common_prefix_a, common_prefix_b, calculations): ''' :param table_a_dest: :type table_a_dest: filename :param genome_ids_a: :type genome_ids_a: list or genome ids :param common_prefix_a: :type common_prefix_a: string :param common_prefix_b: :type common_prefix_b: string :param calculations: :type calculations: list of clade_calcs instances ''' with open(table_a_dest, 'a') as write_handle: # Print introduction about the strain comparison write_handle.write( '#{} {} strains compared with {} {} strains\n'.format( len(genome_ids_a), common_prefix_a, len(genome_ids_b), common_prefix_b)) # Print the genome IDs involved in each of the strains write_handle.write('#IDs {}: {}\n'.format(common_prefix_a, ', '.join(genome_ids_a))) write_handle.write('#IDs {}: {}\n'.format(common_prefix_b, ', '.join(genome_ids_b))) # Print column headers for the data to come max_nton = len(genome_ids_a) // 2 headers = _get_column_headers(max_nton) write_handle.write('#' + '\t'.join(headers)) write_handle.write('\n') # Print data rows format_str = '\t'.join('{{{}}}'.format(key) for key in headers) from string import Formatter formatter = Formatter() for clade_calcs in calculations: write_handle.write( formatter.vformat(format_str, None, clade_calcs.values)) write_handle.write('\n')
def _resolveTemplate(template, model, obj): vals = {'model': model} for (literal, param, repval, conv) in Formatter().parse(template): if param and param != 'model': if (param[0] == '_'): val = ConfigVar.getval(param[1:]) if val: vals[param] = val else: raise Exception( "template references unset ConfigVariable %s" % param[1:]) else: try: vals[param] = iter(getattr_path(obj, param)).next() except: if param == 'slug': vals[param] = obj.id return template.format(**vals)
def generate_folder_name(metadata): """ Fill in the values from the folder template using the metadata, then strip away the unnecessary keys. """ metadata = { **metadata, **{ "artists": _compile_artist_str(metadata["artists"]) } } template = config.FOLDER_TEMPLATE keys = [fn for _, fn, _, _ in Formatter().parse(template) if fn] for k in keys.copy(): if not metadata.get(k): template = strip_template_keys(template, k) keys.remove(k) sub_metadata = _fix_format(metadata, keys) return template.format( **{k: _sub_illegal_characters(sub_metadata[k]) for k in keys})
def generate_expression(template, relations): # Generate a human-readable phrase that expresses the relation. expression_keys = [ k[1].replace('_', '') for k in Formatter().parse(template.expression) if k[1] is not None ] expression_data = {} for key in expression_keys: try: relation = relations[int(key[0])] attr_name = PRED_MAP.get(key[1]) value = expression_partial(getattr(relation, attr_name)) except ValueError: value = '[missing]' except KeyError: value = '[missing]' if not attr_name: continue expression_data[key] = value return template.expression.replace('_', '').format(**expression_data)
def get_symbol_from_number(num, forecast=None): # http://openweathermap.org/weather-conditions if isinstance(num, int): num = str(num) if forecast is not None: localtime = ((datetime.fromtimestamp(forecast.time).hour + forecast.location.timezone) % 24) if not 6 < localtime < 20: #print('location is nighttime') # Something is wrong with this line return 'o' try: return weather_icon_lookup[int(num)] except KeyError: try: return weather_icon_lookup[int(num[0])] except KeyError: text = Formatter().format( 'The weather symbol could not be found. Symbol: {num}', num=num) Logger.warn(text) return 'l' # N/A
def clean_format(formatter: str, format_info): """Formats track or folder names sanitizing every formatter key. :param formatter: :type formatter: str :param kwargs: """ fmt_keys = [i[1] for i in Formatter().parse(formatter) if i[1] is not None] logger.debug("Formatter keys: %s", fmt_keys) clean_dict = dict() for key in fmt_keys: if isinstance(format_info.get(key), (str, float)): clean_dict[key] = sanitize_filename(str(format_info[key])) elif isinstance(format_info.get(key), int): # track/discnumber clean_dict[key] = f"{format_info[key]:02}" else: clean_dict[key] = "Unknown" return formatter.format(**clean_dict)
def content(self): if self.content_enabled: key_dict = dict([(key, getattr(self, key)) for key in self.required_attrs if hasattr(self, key)]) # validate for key in self.required_attrs: if not key_dict.get(key, False): raise AttributeError( 'Missing required attribute "{0}"'.format(key)) key_dict.update( dict([(key, getattr(self, key)) for key in self.optional_attrs if hasattr(self, key)])) return self.html.format( **dict([(key[1], key_dict.get(key[1], 'Unknown')) for key in Formatter().parse(self.html)])) raise NotImplementedError
def __init__( self, *filenames: str, tee: arg.switch('-t', help='Forward all inputs to STDOUT.') = False, stream: arg.switch( '-s', help='Dump all incoming data to the same file.') = False, plain: arg.switch( '-p', help='Never apply any formatting to file names.') = False, ): if stream and len(filenames) != 1: raise ValueError('Can only use exactly one file in stream mode.') super().__init__(filenames=filenames, tee=tee, stream=stream) if plain: self.formatted = False else: from string import Formatter nf = Formatter() self.formatted = any( any(t.isalnum() for t in fields) for f in filenames for _, fields, *__ in nf.parse(f) if fields) self._reset()
def substitute(self, string): """ Substitute envvars in string with Item values """ string = string.replace(':', '_colon_') subs = {} for key in [ i[1] for i in Formatter().parse(string.rstrip('/')) if i[1] is not None ]: if key == 'id': subs[key] = self.id elif key in ['date', 'year', 'month', 'day']: vals = { 'date': self.date, 'year': self.date.year, 'month': self.date.month, 'day': self.date.day } subs[key] = vals[key] else: subs[key] = self[key.replace('_colon_', ':')] return Template(string).substitute(**subs)
def _parse_request(self, request, base_url): if isinstance(request, str): parameters = [ parameter for _, parameter, _, _ in Formatter().parse(request) if parameter ] values = {} for parameter in parameters: new_request = parameter.split('[')[0] values[new_request] = self._populate_parameter( new_request, base_url) return request.format(**values) if isinstance(request, list): parsed = [] for value in request: parsed.append(self._parse_request(value, base_url)) return parsed parsed = {} for attribute, value in request.items(): parsed[attribute] = self._parse_request(value, base_url) return parsed
def _render(self, nonterminal, params = {}): curr = self._nonterminals[nonterminal] curr._setParams(params) curr._incrementCount() curr.registerChoices() curr.updateRubric() render = curr.render() to_generate = [t[1] for t in Formatter().parse(render) if t[1] is not None] formatter = dict() for format_key in to_generate: try: symbol_to_gen = self._symbol_from_key(format_key) except: import pdb; pdb.set_trace() formatter[format_key] = self._render(symbol_to_gen) curr._setParams({}) # clear params return render.format(**formatter)
def format_from_obj(obj, format_string, client): """Generates a human-readable string from an object. :param JSONModelObject or dict: an ArchivesSpace object. :returns: a string in the chosen format. :rtype: str """ obj = resolve_to_json(obj, client) if not format_string: raise Exception("No format string provided.") else: try: d = {} matches = [i[1] for i in Formatter().parse(format_string) if i[1]] for m in matches: d.update({m: obj[m]}) return format_string.format(**d) except KeyError as e: raise KeyError("The field {} was not found in this object".format( str(e)))
def reagex(pattern, **group_patterns): """ Utility function for writing regular expressions with many capturing groups in a readable, clean and hierarchical way. It is just a wrapper of ``str.format`` and it works in the same way. A minimal example:: pattern = reagex( '{name} "{nickname}" {surname}', name='[A-Z][a-z]+', nickname='[a-z]+', surname='[A-Z][a-z]+' ) Args: pattern (str): a pattern where you can use ``str.format`` syntax for groups ``{group_name}``. Groups are capturing unless they starts with ``'_'``. For each group in this argument, this function expects a keyword argument with the same name containing the pattern for the group. **group_patterns (str): patterns associated to groups; for each group in ``pattern`` of the kind ``{group_name}`` this function expects a keyword argument. Returns: a pattern you can pass to ``re`` functions """ out = [] formatter = Formatter() for literal_text, group_name, format_spec, conversion in formatter.parse( pattern): if literal_text: out.append(literal_text) if group_name: pattern = group_patterns[group_name] if group_name.startswith('_'): out.append('(?:%s)' % pattern) else: out.append('(?P<%s>%s)' % (group_name, pattern)) return ''.join(out)
def generator(ir, parameters): """Generate UFC code for a coordinate mapping.""" logger.info("Generating code for coordinate mapping:") logger.info(f"--- cell shape: {ir.cell_shape}") logger.info(f"--- gdim: {ir.geometric_dimension}") logger.info(f"--- tdim: {ir.topological_dimension}") logger.info(f"--- name: {ir.name}") logger.info(f"--- scalar dofmap name: {ir.scalar_dofmap_name}") d = {} # Attributes d["factory_name"] = ir.name d["prefix"] = ir.prefix d["signature"] = f"\"{ir.signature}\"" d["geometric_dimension"] = ir.geometric_dimension d["topological_dimension"] = ir.topological_dimension d["is_affine"] = 1 if ir.is_affine else 0 d["cell_shape"] = ir.cell_shape d["scalar_dofmap_name"] = ir.scalar_dofmap_name d["family"] = f"\"{ir.coordinate_element_family}\"" d["degree"] = ir.coordinate_element_degree # Check that no keys are redundant or have been missed from string import Formatter fields = [ fname for _, fname, _, _ in Formatter().parse(ufc_coordinate_mapping.factory) if fname ] assert set(fields) == set( d.keys()), "Mismatch between keys in template and in formattting dict." # Format implementation code implementation = ufc_coordinate_mapping.factory.format_map(d) # Format declaration declaration = ufc_coordinate_mapping.declaration.format(factory_name=ir.name, prefix=ir.prefix) return declaration, implementation
def invoked(self, ctx): if ctx.args.GROUP == 'all-jobs': if ctx.args.attrs: print_objs('job', ctx.sa, True) def filter_fun(u): return u.attrs['template_unit'] == 'job' print_objs('template', ctx.sa, True, filter_fun) jobs = get_all_jobs(ctx.sa) if ctx.args.format == '?': all_keys = set() for job in jobs: all_keys.update(job.keys()) print(_('Available fields are:')) print(', '.join(sorted(list(all_keys)))) return if not ctx.args.format: # setting default in parser.add_argument would apply to all # the list invocations. We want default to be present only for # the 'all-jobs' group. ctx.args.format = 'id: {full_id}\n{_summary}\n' for job in jobs: unescaped = ctx.args.format.replace( '\\n', '\n').replace('\\t', '\t') class DefaultKeyedDict(defaultdict): def __missing__(self, key): return _('<missing {}>').format(key) # formatters are allowed to use special field 'unit_type' so # let's add it to the job representation assert 'unit_type' not in job.keys() if job.get('template_unit') == 'job': job['unit_type'] = 'template_job' else: job['unit_type'] = 'job' print(Formatter().vformat( unescaped, (), DefaultKeyedDict(None, job)), end='') return elif ctx.args.format: print(_("--format applies only to 'all-jobs' group. Ignoring...")) print_objs(ctx.args.GROUP, ctx.sa, ctx.args.attrs)
def generator(ir, parameters): """Generate UFC code for a dofmap.""" logger.info("Generating code for dofmap:") logger.info(f"--- num element support dofs: {ir.num_element_support_dofs}") logger.info(f"--- name: {ir.name}") d = {} # Attributes d["factory_name"] = ir.name d["signature"] = f"\"{ir.signature}\"" d["num_global_support_dofs"] = ir.num_global_support_dofs d["num_element_support_dofs"] = ir.num_element_support_dofs d["num_sub_dofmaps"] = ir.num_sub_dofmaps d["num_entity_dofs"] = ir.num_entity_dofs + [0, 0, 0, 0] d["block_size"] = ir.block_size import ffcx.codegeneration.C.cnodes as L # Functions d["tabulate_entity_dofs"] = tabulate_entity_dofs(L, ir) d["sub_dofmap_declaration"] = sub_dofmap_declaration(L, ir) d["create_sub_dofmap"] = create_sub_dofmap(L, ir) # Check that no keys are redundant or have been missed from string import Formatter fields = [fname for _, fname, _, _ in Formatter().parse(ufc_dofmap.factory) if fname] # Remove square brackets from any field names fields = [f.split("[")[0] for f in fields] assert set(fields) == set( d.keys()), "Mismatch between keys in template and in formattting dict." # Format implementation code implementation = ufc_dofmap.factory.format_map(d) # Format declaration declaration = ufc_dofmap.declaration.format(factory_name=ir.name) return declaration, implementation
def get_format_args(fstr): """ Turn a format string into two lists of arguments referenced by the format string. One is positional arguments, and the other is named arguments. Each element of the list includes the name and the nominal type of the field. >>> get_format_args("{noun} is {1:d} years old{punct}") ([(1, <type 'int'>)], [('noun', <type 'str'>), ('punct', <type 'str'>)]) """ # TODO: memoize formatter = Formatter() fargs, fkwargs, _dedup = [], [], set() def _add_arg(argname, type_char='s'): if argname not in _dedup: _dedup.add(argname) argtype = _TYPE_MAP.get(type_char, str) # TODO: unicode try: fargs.append((int(argname), argtype)) except ValueError: fkwargs.append((argname, argtype)) for lit, fname, fspec, conv in formatter.parse(fstr): if fname is not None: type_char = fspec[-1:] fname_list = re.split('[.[]', fname) if len(fname_list) > 1: raise ValueError('encountered compound format arg: %r' % fname) try: base_fname = fname_list[0] assert base_fname except (IndexError, AssertionError): raise ValueError('encountered anonymous positional argument') _add_arg(fname, type_char) for sublit, subfname, _, _ in formatter.parse(fspec): # TODO: positional and anon args not allowed here. if subfname is not None: _add_arg(subfname) return fargs, fkwargs
def evaluate_components(queries, annotation): for question in annotation: question_text = question["question"] response = requests.post(url="http://localhost:8080/startquestionansweringwithtextquestion", params={ "question": question_text, "componentlist[]": ["template_classifier", "relation_classifier", "dbpedia_spotlight_annotator", "question_validator", "sparql_builder", "dbpedia_sparql_worker", "template_generator"] }).json() endpoint = response['endpoint'] in_graph = response['inGraph'] query_header = None graph = in_graph for key in list(queries.keys()): if key != "query_header": SPARQLquery = query_header + queries[key] field_names = [fname for _, fname, _, _ in Formatter().parse(SPARQLquery) if fname] question_annotation = question[key] question_annotation["graph_urn"] = graph data_dict = dict() for name in field_names: data_dict[name] = question_annotation[name] SPARQLquery = SPARQLquery.format(**data_dict) result = queryTriplestore(endpoint + "/query", graph, SPARQLquery) print("Q:{0}, C:{1}, R:{2}".format(question_text, key, result['boolean'])) else: query_header = queries[key]
def get_rendered_string(area_code, area, date_to_display, object_name, duration, purpose): fields = { "area": area_code, "area_name": area["name"], "object": object_name, "date": date_to_display, "duration": duration, "purpose": purpose, } elements = [] for literal, field, _, _ in Formatter().parse(area["rendering_string"]): if len(literal) > 0: elements.append(literal) if field is not None: if field not in fields: raise ValueError( f"{area['rendering_string']} is an invalid rendering string. Reason: '{field}' is an invalid field." ) elements.append(Highlight(fields.get(field))) return elements
def _format_regex(regex, items): """ Format derived source with object attributes :param str regex: string to format, e.g. {identifier}{file_id}_data.txt :param Iterable[Iterable[Iterable | str]] items: items to format the string with :raise InvalidSampleTableFileException: if after merging subannotations the lengths of multi-value attrs are not even :return Iterable | str: formatted regex string(s) """ keys = [i[1] for i in Formatter().parse(regex) if i[1] is not None] if not keys: return [regex] if "$" in regex: _LOGGER.warning( "Not all environment variables were populated " "in derived attribute source: {}".format(regex)) attr_lens = [ len(v) for k, v in items.items() if (isinstance(v, list) and k in keys) ] if not bool(attr_lens): return [_safe_format(regex, items)] if len(set(attr_lens)) != 1: msg = "All attributes to format the {} ({}) have to be the " \ "same length, got: {}. Correct your {}".\ format(DERIVED_SOURCES_KEY, regex, attr_lens, SUBSAMPLE_SHEET_KEY) raise InvalidSampleTableFileException(msg) vals = [] for i in range(0, attr_lens[0]): items_cpy = cp(items) for k in keys: if isinstance(items_cpy[k], list): items_cpy[k] = items_cpy[k][i] vals.append(_safe_format(regex, items_cpy)) return vals
def gen_num_args_from_url(url: str) -> int: """Parse number of position arguments from URL format string. Uses the built-in ``string.Formatter`` class to parse the number of positional arguments. Does not support keyword arguments. Raises: ValueError: if URL contains keyword arguments. """ format_args = list(Formatter().parse(url)) num_args = 0 # If only one entry and field_name is None, then doesn't have any format args # Each entry in format_args is 4-tuple with (iteral_text, field_name, format_spec, conversion) # See: https://docs.python.org/3.6/library/string.html#string.Formatter.parse if len(format_args) != 1 or format_args[0][1] is not None: for _, field_name, __, ___ in format_args: if field_name: raise ValueError( f'must not have keyword arguments ("{field_name}")') if field_name == "": num_args += 1 return num_args
def end(self, tag): if tag == 'sheet': pass elif tag == 'row': self._current_ws.append(self._row_buf) self._row_buf = [] self._row += 1 self._refs['row'] = self._row + 1 elif tag == 'cell': if self._cell.value: keys = [ e[1] for e in Formatter().parse(self._cell.value) if e[1] != None ] stringified = { k: ', '.join(text_type(e) for e in self._refs.get(k, '')) if hasattr(self._refs.get(k, ''), '__iter__') else text_type(self._refs.get(k, '')) for k in keys or [] } self._cell.value = self._cell.value.format(**stringified) if self._cell_type == 'number': if self._cell.value: try: self._cell.value = Decimal(self._cell.value) except InvalidOperation: pass elif self._cell_type == 'date': if self._cell.value: try: self._cell.value = datetime.strptime( self._cell.value, self._cell_date_format).date() except TypeError: pass self._row_buf.append(self._cell) self._cell = None self._col += 1 self._refs['col'] = self._col + 1
def _compose_filename(self, scene): """Compose output filename.""" # Dynamic fields if self.debug: # In debug mode, use a constant creation time to prevent a different filename in each # run creation_time = datetime(2020, 1, 1) else: creation_time = datetime.now() start_time, end_time = self._get_temp_cov(scene) platform = get_platform_short_name(scene["4"].attrs["platform_name"]) try: version = self.global_attrs["product_version"] except KeyError: version = "0.0.0" msg = "No product_version set in global attributes. Falling back to 0.0.0" LOG.warning(msg) warnings.warn(msg) version_int = self._get_integer_version(version) fields = { "start_time": start_time.strftime(self.time_fmt).data, "end_time": end_time.strftime(self.time_fmt).data, "platform": platform, "version": version, "version_int": version_int, "creation_time": creation_time.strftime(self.time_fmt), } # Search for additional static fields in global attributes for _, field, _, _ in Formatter().parse(self.fname_fmt): if field and field not in fields: try: fields[field] = self.global_attrs[field] except KeyError: raise KeyError( "Cannot find filename component {} in global attributes" .format(field)) return self.fname_fmt.format(**fields)
def completeDict(self, record): ''' Cette methode permet de completer le dictionnaire servant d'argument a la methode de formatage des caractères présente dans la fonction wrtiePatern. :param record: :return: ''' self.complete_dict = self.dict_format.copy() list_string = [ fn for _, fn, _, _ in Formatter().parse(self.pattern) if fn is not None ] for champ_manquant in [ i for i in list_string if i not in self.dict_format.keys() ]: try: self.complete_dict.update( {champ_manquant: record.__getitem__(champ_manquant)}) return self.complete_dict except KeyError: raise KeyError( 'Vous tentez d\'acceder a un champ non-existant')
def __init__(self, host_url: str, op_spec: Operation): self.spec = op_spec self._url_parts = urlsplit(host_url) formatter = Formatter() self.url_vars = [ var for _, var, _, _ in formatter.parse(op_spec.path_name) if var is not None ] self._path_pattern = self._url_parts.path + op_spec.path_name self.full_url_pattern = urljoin(host_url, self._path_pattern) self.method = op_spec.http_method.lower() self.body = None self.parameters = RequestParameters( path={}, query=parse_qs(self._url_parts.query), header={}, cookie={}, ) self.mimetype = list( op_spec.request_body.content)[0] if op_spec.request_body else None
def _prepare_output_vals(output_vals, spec_params_dict, batch_size): """ output_vals - dict with outputs set up from user input spec_params - dict with info about all parameters in the spec This validates output values that are present. * If it's a list, make sure it's the right length * If it's a template, make sure the keys are present * If we're missing an output value, make sure there's a default in the spec, and templatize it If anything fails, raises a ValueError. """ parsed_out_vals = deepcopy(output_vals) # avoid side effects for p_id, p in spec_params_dict.items(): val = output_vals.get(p_id) if val: if isinstance(val, list): if len(val) != batch_size: raise ValueError( "The output parameter {} must have {} values if it's a list" .format(p_id, batch_size)) elif val is not None: # check keys in the string for i in Formatter().parse(val): field = i[1] if (field and field not in spec_params_dict and field != "run_number"): raise ValueError( "Output template field {} doesn't match a parameter id or 'run_number'" .format(field)) else: if p.get("is_output"): if not p["default"]: raise ValueError( 'No output template provided for parameter "{}" and no default value found!' .format(p_id)) else: parsed_out_vals[p_id] = p["default"] + "${run_number}" return parsed_out_vals
def check_and_insert(input_, sample_record): """This function is responsible for finding keys and replace them with values This is the main function of the whole program, it's divided into three logical parts, firstly we declare a template, then check if everything is ok with braces and then check whether a template contains keys that are presented in our example Args: input_ (string): a string that a user has entered. sample_record (dict): key-value pairs to be inserted. Returns: Either a string with inserted values or a string with a error message. """ # replace the braces to make it suitable for being a Template input_ = input_.replace('{', '${') # make our input an input_template input_template = Template(input_) try: # parse all the keys from input_ like {name} => ['name'] input_keys = [i[1] for i in Formatter().parse(input_)] # ValueError, it occurs in situations like {name{, }name} etc except ValueError as e: print(e) return 'Your template is invalid' # check if keys that a user typed are the same as in our record if not set(input_keys).issubset(set(sample_record.keys())): # for each key that is in input_keys but not in sample keys for each in set(input_keys).difference(set(sample_record.keys())): print(f"There is no '{each}' key") return 'Your template is invalid' return input_template.substitute(sample_record)
def bundle( self, start_index: int = 1, bundled: t.Optional[t.List[Fragment]] = None, combined_args: t.Optional[t.List] = None, ): # Split up the string, separating by {}. fragments = [ Fragment(prefix=i[0]) for i in Formatter().parse(self.template) ] bundled = [] if bundled is None else bundled combined_args = [] if combined_args is None else combined_args for index, fragment in enumerate(fragments): try: value = self.args[index] except IndexError: # trailing element fragment.no_arg = True bundled.append(fragment) else: if type(value) == self.__class__: fragment.no_arg = True bundled.append(fragment) start_index, _, _ = value.bundle( start_index=start_index, bundled=bundled, combined_args=combined_args, ) else: fragment.index = start_index bundled.append(fragment) start_index += 1 combined_args.append(value) return (start_index, bundled, combined_args)