def read_input(data: Dict[str, Any], *, nest: int = 1) -> str: analyzed = utils.get_analyzed(data) if analyzed.input_format is None or analyzed.input_variables is None: lines = [ '# failed to analyze input format', 'n = parse(Int, readline()) # TODO: edit here', 'a = parse.(Int, split(readline())) # TODO: edit here', ] return _join_with_indent(lines, nest=nest, data=data) node = _read_input_dfs(analyzed.input_format, decls=analyzed.input_variables, data=data) node = _optimize_syntax_tree(node, data=data) try: node = _realize_input_nodes_without_tokens( node, declared=set(), initialized=set(), decls=analyzed.input_variables, data=data) except TokenizedInputRequiredError: node = _realize_input_nodes_with_tokens(node, 'tokens', decls=analyzed.input_variables, data=data) node = _optimize_syntax_tree(node, data=data) lines = list(_serialize_syntax_tree(node, data=data)) return _join_with_indent(lines, nest=nest, data=data)
def actual_arguments(data: Dict[str, Any]) -> str: analyzed = utils.get_analyzed(data) if analyzed.input_format is None or analyzed.input_variables is None: return 'n, a' decls = analyzed.input_variables decls = utils._filter_ignored_variables(decls, data=data) return ', '.join(decls.keys())
def write_output(data: Dict[str, Any], *, nest: int = 1) -> str: analyzed = utils.get_analyzed(data) if analyzed.output_format is None or analyzed.output_variables is None: lines = [ 'print(ans) # TODO: edit here', ] return _join_with_indent(lines, nest=nest, data=data) node = _write_output_dfs(analyzed.output_format, decls=analyzed.output_variables, data=data) node = _optimize_syntax_tree(node, data=data) lines = list(_serialize_syntax_tree(node, data=data)) return _join_with_indent(lines, nest=nest, data=data)
def generate_input(data: Dict[str, Any], *, nest: int = 1) -> str: analyzed = utils.get_analyzed(data) if analyzed.input_format is None or analyzed.input_variables is None: lines = [ '# failed to analyze input format', 'n = random.randint(1, 10 ** 9) # TODO: edit here', 'a = [random.randint(1, 10 ** 9) for _ in range(n)] # TODO: edit here', ] return _join_with_indent(lines, nest=nest, data=data) node = _generate_input_dfs(analyzed.input_format, declared=set(), initialized=set(), decls=analyzed.input_variables, data=data) node = _optimize_syntax_tree(node, data=data) lines = list(_serialize_syntax_tree(node, data=data)) return _join_with_indent(lines, nest=nest, data=data)
def formal_arguments(data: Dict[str, Any], *, typed: bool = True) -> str: if not typed: return actual_arguments(data=data) analyzed = utils.get_analyzed(data) if analyzed.input_format is None or analyzed.input_variables is None: return 'n: int, a: List[int]' args: List[str] = [] for name, decl in analyzed.input_variables.items(): type = _get_python_type(decl.type) for _ in decl.dims: type = f"""List[{type}]""" args.append(f"""{name}: {type}""") return ', '.join(args)
def return_value(data: Dict[str, Any]) -> str: analyzed = utils.get_analyzed(data) output_type = analyzed.output_type if isinstance(output_type, OneOutputType): return output_type.name elif isinstance(output_type, TwoOutputType): return f"""[{output_type.name1}, {output_type.name2}]""" elif isinstance(output_type, YesNoOutputType): return output_type.name elif isinstance(output_type, VectorOutputType): return output_type.name elif output_type is None: return "ans" else: assert False
def return_type(data: Dict[str, Any]) -> str: analyzed = utils.get_analyzed(data) output_type = analyzed.output_type if isinstance(output_type, OneOutputType): return _get_base_type(output_type.type, data=data) elif isinstance(output_type, TwoOutputType): return f"""{_get_std(data=data)}pair<{_get_base_type(output_type.type1, data=data)}, {_get_base_type(output_type.type2, data=data)}>""" elif isinstance(output_type, YesNoOutputType): return "bool" elif isinstance(output_type, VectorOutputType): return f"""{_get_std(data=data)}vector<{_get_base_type(output_type.type, data=data)}>""" elif output_type is None: return "auto" else: assert False
def return_type(data: Dict[str, Any]) -> str: analyzed = utils.get_analyzed(data) if analyzed.output_format is None or analyzed.output_variables is None: return 'Any' types: List[str] = [] for decl in analyzed.output_variables.values(): type = _get_python_type(decl.type) for _ in decl.dims: type = f"""List[{type}]""" types.append(type) if len(types) == 0: return "None" elif len(types) == 1: return types[0] else: return f"""Tuple[{", ".join(types)}]"""
def formal_arguments(data: Dict[str, Any]) -> str: analyzed = utils.get_analyzed(data) if analyzed.input_format is None or analyzed.input_variables is None: return f"""int n, const {_get_std(data=data)}vector<int64_t> & a""" decls = analyzed.input_variables decls = utils._filter_ignored_variables(decls, data=data) args = [] for name, decl in decls.items(): type = _get_base_type(decl.type, data=data) for _ in reversed(decl.dims): space = ' ' if type.endswith('>') else '' type = f"""{_get_std(data=data)}vector<{type}{space}>""" if decl.dims: type = f"""const {type} &""" args.append(f"""{type} {name}""") return ', '.join(args)
def write_input(data: Dict[str, Any], *, nest: int = 1) -> str: analyzed = utils.get_analyzed(data) if analyzed.input_format is None or analyzed.input_variables is None: return _write_input_fallback(message="failed to analyze input format", data=data, nest=nest) try: node = _write_output_dfs(analyzed.input_format, decls=analyzed.input_variables, data=data) except CPlusPlusGeneratorError as e: return _write_input_fallback( message="failed to generate input part: " + str(e), data=data, nest=nest) node = _optimize_syntax_tree(node, data=data) lines = list(_serialize_syntax_tree(node, data=data)) return _join_with_indent(iter(lines), nest=nest, data=data)
def generate_input(data: Dict[str, Any], *, nest: int = 1) -> str: analyzed = utils.get_analyzed(data) if analyzed.input_format is None or analyzed.input_variables is None: return _generate_input_fallback( message="failed to analyze input format", data=data, nest=nest) try: make_node = lambda var, type: GenerateNode(expr=(var, type)) node = _read_input_dfs(analyzed.input_format, declared=set(), initialized=set(), decls=analyzed.input_variables, data=data, make_node=make_node) except CPlusPlusGeneratorError as e: return _read_input_fallback(message="failed to generate input part: " + str(e), data=data, nest=nest) node = _optimize_syntax_tree(node, data=data) lines = list(_serialize_syntax_tree(node, data=data)) return _join_with_indent(iter(lines), nest=nest, data=data)
def actual_arguments(data: Dict[str, Any]) -> str: analyzed = utils.get_analyzed(data) if analyzed.input_format is None or analyzed.input_variables is None: return 'n, a' return ', '.join(analyzed.input_variables.keys())
def is_topcoder(data: Dict[str, Any]) -> bool: definition = get_analyzed(data).topcoder_class_definition return definition is not None
def method_name(data: Dict[str, Any]) -> str: definition = get_analyzed(data).topcoder_class_definition if definition is None: return 'theMethodName' return definition.method_name
def write_output(data: Dict[str, Any], *, nest: int = 1) -> str: analyzed = utils.get_analyzed(data) output_type = analyzed.output_type if isinstance(output_type, OneOutputType): node: CPlusPlusNode = OutputNewlineNode(exprs=[(output_type.name, output_type.type)]) elif isinstance(output_type, TwoOutputType): sentences: List[CPlusPlusNode] = [] sentences.append( OutputTokensNode(exprs=[(output_type.name1, output_type.type1)])) if output_type.print_newline_after_item: sentences.append(OutputNewlineNode(exprs=[])) sentences.append( OutputNewlineNode(exprs=[(output_type.name2, output_type.type2)])) node = SentencesNode(sentences=sentences) elif isinstance(output_type, YesNoOutputType): expr = f"""({output_type.name} ? {output_type.yes} : {output_type.no})""" node = OutputNewlineNode(exprs=[(expr, VarType.String)]) elif isinstance(output_type, VectorOutputType): inner_sentences: List[CPlusPlusNode] = [] inner_sentences.append( OutputTokensNode(exprs=[(output_type.subscripted_name, output_type.type)])) if output_type.print_newline_after_item: inner_sentences.append(OutputNewlineNode(exprs=[])) sentences = [] size = f"""({_get_base_type(VarType.IndexInt, data=data)}){output_type.name}.size()""" if output_type.print_size: sentences.append(OutputTokensNode(exprs=[(size, VarType.IndexInt)])) if output_type.print_newline_after_size: sentences.append(OutputNewlineNode(exprs=[])) sentences.append( RepeatNode(name=output_type.counter_name, size=size, body=SentencesNode(sentences=inner_sentences))) if not output_type.print_newline_after_item: inner_sentences.append(OutputNewlineNode(exprs=[])) node = SentencesNode(sentences=sentences) elif output_type is None: if analyzed.output_format is None or analyzed.output_variables is None: return _write_output_fallback( message="failed to analyze output format", data=data, nest=nest) try: node = _write_output_dfs(analyzed.output_format, decls=analyzed.output_variables, data=data) except CPlusPlusGeneratorError as e: return _write_output_fallback( message="failed to generate output part: " + str(e), data=data, nest=nest) else: assert False node = _optimize_syntax_tree(node, data=data) lines = list(_serialize_syntax_tree(node, data=data)) return _join_with_indent(iter(lines), nest=nest, data=data)
def return_value(data: Dict[str, Any]) -> str: analyzed = utils.get_analyzed(data) if analyzed.output_format is None or analyzed.output_variables is None: return 'res' return ', '.join(analyzed.output_variables.keys())
def declare_constants(data: Dict[str, Any], *, nest: int = 0) -> str: analyzed = utils.get_analyzed(data) lines: List[str] = [] for decl in analyzed.constants.values(): lines.append(_declare_constant(decl, data=data)) return _join_with_indent(lines, nest=nest, data=data)
def class_name(data: Dict[str, Any]) -> str: definition = get_analyzed(data).topcoder_class_definition if definition is None: return 'theClassName' return definition.class_name