def sign_module_binary(x, size): if size <= 1: raise error.ParserError(None, "Cannot use value of lenght 1 for sign module.") output = f"{'0' if x >= 0 else '1'}{padbin(abs(x), size-1, False)}" if len(output) != size: raise error.ParserError(None, f"Argument of value: {x} ({len(output)} bits) cannot be parsed with {size} bits.") return output
def unsigned_binary(x, size): if x < 0: raise error.ParserError(None, f"Cannot parse value: {x} as unsigned.") output = padbin(x, size, False) if len(output) != size: raise error.ParserError(None, f"Argument of value: {x} ({len(output)} bits) cannot be parsed with {size} bits.") return output
def get_value(strage_format: str): """Returns value of strage_format""" strage_format = strage_format.strip() try: return int(strage_format, base=0) except: pass if len(strage_format[2:]) == 0: raise error.ParserError(-1, f"Value: '{strage_format}' cannot be parsed.") elif strage_format[:2] == "0x": return int(strage_format[2:], base=16) elif strage_format[:2] == "0b": return int(strage_format[2:], base=2) else: raise error.ParserError(-1, f"Value: '{strage_format}' cannot be parsed.")
def find_labels(program, context): find_labels = patterns.Pattern("{label:any_str}:") labels = {} output = list() for line_obj in program: label = match_expr.match_expr(find_labels, line_obj, None) if label is not None: if 'label' not in label: raise error.ParserError(line_obj.line_index_in_file, f"Cannot find label '{label}'") if label['label'] in labels: raise error.ParserError( line_obj.line_index_in_file, f"Label '{label['label']}' is not unique") labels[label['label']] = len(output) + 1 else: output.append(line_obj) context['labels'] = labels return output, context
def check_keywords_exists(entry, context): profile: Profile = context['profile'] profilekeywords = set(profile.keywords) entrykeywords = set(entry.keys()) missing_entrypoints = profilekeywords - entrykeywords missing_keywords = entrykeywords - profilekeywords if len(missing_keywords) != 0: msg = f"Keywords: {missing_keywords} are not defined in profile" if len( missing_entrypoints ) != 1 else f"Keyword: {missing_keywords} is not defined in profile" if config.rise_on_missing_keyword: raise error.ParserError(None, msg) # bad one else: context['warnings'].append(msg) if len(missing_entrypoints) != 0: msg = f"Keywords: {missing_entrypoints} are defined in profile but not used" if len( missing_entrypoints ) != 1 else f"Keyword: {missing_entrypoints} is defined in profile but never used" if config.rise_on_missing_entrypoint: raise error.ParserError(None, msg) else: context['warnings'].append(msg)
def format_output(program, context): if config.save == 'py': formatter_function, req_tabulate = get_py, False elif config.save == 'bin': formatter_function, req_tabulate = get_bin, True elif config.save == 'pad' or config.save == "schem": formatter_function, req_tabulate = get_pad, True elif config.save == 'raw': formatter_function, req_tabulate = get_raw, True else: raise for _, program_lines in program.items(): for line in program_lines: try: line.formatted = formatter_function(line, context) except error.ParserError as err: raise error.ParserError(line.line_index_in_file, err.info) context['tabulate'] = req_tabulate return program, context
def join_quote_str(line): """ Joins tokens between euotes: `['a', 'b', '"', ' ', "c". '"'] => ['a', 'b', '" c"'] """ tokens = line.tokenized state = False output = list() joined = "" for i in range(len(tokens)): if tokens[i] == '"': if state: output.append(f'"{joined}"') joined = "" state = not state continue if state: joined += tokens[i] else: output.append(tokens[i]) if state: raise error.ParserError(line.line_index_in_file, 'Expected \'"\'') line.tokenized = output return line
def encode_argument(layout, name, val): encoding = get_encoding(layout[name]) try: return "{}".format(encoding(val, layout[name]["size"])) except error.ParserError as err: raise error.ParserError(None, f"{err.info}, Encoding: {encoding.__name__}, Argument: {name}")
def u2_module_binary(x, size): output = padbin(x, size, False) if x > 2**size or x <= -(2**size): raise error.ParserError(None, f"Argument of value: {x} cannot be parsed as {size} bit u2 number") return output
def binary(x, size): output = padbin(x, size, False) if len(output) != size: raise error.ParserError(None, f"Argument of value: {x} ({len(output)} bits) cannot be parsed with {size} bits.") return output
def split_into_chunks(program, context): entry: dict = context['entry'] labels: dict = context['labels'] profile: Profile = context['profile'] physical_adresses = dict() chunk_labels = dict() namespace = dict() check_keywords_exists(entry, context) try: entry = { chunk_name: (label_name, offset, labels[label_name]) for chunk_name, (label_name, offset) in entry.items() } except KeyError as err: raise error.ParserError( None, f"Label marked as global: {err} cannot be founded in skope") if len(entry) == 0: raise error.ParserError( None, f"No entrypoint is defined (Consider adding #global to file. Avalible are {profile.keywords})" ) chunks = {chunk_name: list() for chunk_name in entry.keys()} chunk_name, current_chunk = get_next_chunk(entry, 0) chunk_name_next, current_chunk_next = get_next_chunk( entry, current_chunk[2]) current_chunk_physical_adress = 0 current_chunk_adress = 1 for i, program_line in enumerate(program): i += 1 if i >= current_chunk_next[2]: if chunk_name_next is None: break chunk_name, current_chunk = get_next_chunk(entry, current_chunk[2]) chunk_name_next, current_chunk_next = get_next_chunk( entry, current_chunk[2]) current_chunk_physical_adress = 0 current_chunk_adress = 1 label = find_key_by_value(labels, i) if len(label) != 0: for lab in label: physical_adresses[ lab] = current_chunk_physical_adress // profile.adressing.bin_len + profile.adressing.offset + current_chunk[ 1] chunk_labels[lab] = current_chunk_adress program_line.physical_adress = current_chunk_physical_adress // profile.adressing.bin_len + profile.adressing.offset + current_chunk[ 1] chunks[chunk_name].append(program_line) current_chunk_physical_adress += get_adress_offset( program_line, profile) current_chunk_adress += 1 context['physical_adresses'] = { key: val for key, val in physical_adresses.items() } context['chunk_adreses'] = chunk_labels context['namespace'] = namespace context['use_phisical_adresses'] = True return chunks, context