def rewrite_header_access(tokens: list[Token], i: int, *, meta_name: str) -> None: meta_idx = find(tokens, i, name=NAME, src="META") replace(tokens, meta_idx, src="headers") str_idx = find(tokens, meta_idx, name=STRING) raw_header_name = meta_name[len("HTTP_") :] header_name = "-".join(x.title() for x in raw_header_name.split("_")) replace(tokens, str_idx, src=repr(header_name))
def remove_providing_args(tokens: list[Token], i: int, *, node: ast.Call) -> None: j = find(tokens, i, name=OP, src="(") func_args, _ = parse_call_args(tokens, j) if len(node.args): start_idx, end_idx = func_args[0] if len(node.args) == 1: del tokens[start_idx:end_idx] else: # Have to replace with None tokens[start_idx:end_idx] = [Token(name=CODE, src="None")] else: for n, keyword in enumerate(node.keywords): if keyword.arg == "providing_args": start_idx, end_idx = func_args[n] start_idx = reverse_consume(tokens, start_idx, name=UNIMPORTANT_WS) start_idx = reverse_consume(tokens, start_idx, name=INDENT) if n > 0: start_idx = reverse_consume(tokens, start_idx, name=OP, src=",") if n < len(node.keywords) - 1: end_idx = consume(tokens, end_idx, name=UNIMPORTANT_WS) end_idx = consume(tokens, end_idx, name=OP, src=",") end_idx = consume(tokens, end_idx, name=UNIMPORTANT_WS) end_idx = consume(tokens, end_idx, name=COMMENT) end_idx += 1 del tokens[start_idx:end_idx]
def fix_url_call( tokens: list[Token], i: int, *, regex_path: str | None, state: State ) -> None: new_name = "re_path" if regex_path is not None: path = convert_path_syntax(regex_path) if path is not None: string_idx = find(tokens, i, name=STRING) replace(tokens, string_idx, src=repr(path)) new_name = "path" state_used_names.setdefault(state, set()).add(new_name) replace(tokens, i, src=new_name)
def fix_offset_arg(tokens: list[Token], i: int, *, node: ast.Call) -> None: j = find(tokens, i, name=OP, src="(") func_args, _ = parse_call_args(tokens, j) rewrote_offset_arg = False if len(node.args) >= 1: if not isinstance(node.args[0], ast.Starred): start_idx, end_idx = func_args[0] insert(tokens, end_idx, new_src=")") insert(tokens, start_idx, new_src="timedelta(minutes=") rewrote_offset_arg = True else: for n, keyword in enumerate(node.keywords): if keyword.arg == "offset": start_idx, end_idx = func_args[n] insert(tokens, end_idx, new_src=")") equal_idx = find(tokens, start_idx, name=OP, src="=") insert(tokens, equal_idx + 1, new_src="timedelta(minutes=") rewrote_offset_arg = True if rewrote_offset_arg: replace(tokens, i, src="timezone")
def fix_null_boolean_field(tokens: list[Token], i: int, *, node: ast.Call) -> None: j = find(tokens, i, name=OP, src="(") func_args, j = parse_call_args(tokens, j) new_src = "null=True" if len(func_args) > 0: new_src = " " + new_src final_start_idx, final_end_idx = func_args[-1] final_has_comma = any(t.name == OP and t.src == "," for t in tokens[final_start_idx:final_end_idx + 1]) if not final_has_comma: new_src = "," + new_src tokens.insert(j - 1, Token(name=CODE, src=new_src)) find_and_replace_name(tokens, i, name="NullBooleanField", new="BooleanField")
def add_on_delete_keyword(tokens: list[Token], i: int, *, num_pos_args: int) -> None: open_idx = find(tokens, i, name=OP, src="(") func_args, close_idx = parse_call_args(tokens, open_idx) new_src = "on_delete=models.CASCADE" if num_pos_args < len(func_args): new_src += ", " if num_pos_args == 0: insert_idx = open_idx + 1 else: new_src = " " + new_src pos_start_idx, pos_end_idx = func_args[num_pos_args - 1] insert_idx = pos_end_idx + 1 arg_has_comma = (tokens[pos_end_idx].name == OP and tokens[pos_end_idx].src == ",") if not arg_has_comma: new_src = "," + new_src insert_idx -= 1 insert(tokens, insert_idx, new_src=new_src)
def remove_assignment(tokens: list[Token], i: int, *, node: ast.Assign) -> None: j = find(tokens, i, name=LOGICAL_NEWLINE) tokens[i : j + 1] = []
def rewrite_setting(tokens: list[Token], i: int, *, node: ast.Assign) -> None: tokens[i] = tokens[i]._replace(name=CODE, src=NEW_NAME) j = find(tokens, i, name=OP, src="=") tokens.insert(j + 1, Token(name=CODE, src=" 60 * 60 * 24 *"))