示例#1
0
 def run(self) -> object:
     # Process parameters
     if len(sys.argv) < 4:
         usage()
         return
     filename = sys.argv[1]
     output = sys.argv[2]
     coded_filename = sys.argv[3]
     tokeniser = find_tokeniser(sys.argv[4:])
     self.verbosity = find_verbosity(sys.argv[4:])
     load_address = find_base_load_address(sys.argv[4:], LOAD_ADDRESS)
     header_length = find_header_length(sys.argv[4:], HEADER_LENGTH)
     # Read file
     with open(filename, "rb") as sourceFile:
         file_data = sourceFile.read().decode()
     self.report(1, f"Located program {filename}")
     result, token_stream = tokeniser.parse_program(file_data, load_address)
     if result == 0:
         self.report(1, "file successfully encoded")
         formatter = formats.Cas_Format.CasFormat([], tokeniser,
                                                  self.verbosity)
         output_data = formatter.build_file(coded_filename, token_stream,
                                            header_length)
         with open(output, "wb") as f:
             f.write(output_data)
         self.report(1, f"cas file written as {output}")
     else:
         self.report(2, "file processing failed")
示例#2
0
 def run(self) -> object:
     # Process parameters
     if len(sys.argv) < 3:
         usage()
         return
     self.filename = sys.argv[1]
     self.output = sys.argv[2]
     tokeniser = find_tokeniser(sys.argv[3:])
     self.verbosity = find_verbosity(sys.argv[3:])
     self.report(0, f"Using {tokeniser.name}")
     formatter = initialise_formatter(self.filename, tokeniser,
                                      self.verbosity)
     self.process_cas(formatter)
def test_given_no_options_find_tokeniser_returns_the_default_dragon_tokens():
    opts = []
    actual = find_tokeniser(opts)
    assert isinstance(actual, DragonToken)
def test_given_conflicting_options_returns_the_highest_priority_tokens(
        test_input, expected):
    actual = find_tokeniser(test_input)
    assert isinstance(actual, expected)
def test_given_a_rsdos_option_find_tokeniser_returns_the_coco_rsdos_tokens(
        test_input):
    opts = [test_input]
    actual = find_tokeniser(opts)
    assert isinstance(actual, RsDosToken)
def test_given_a_dragondos_option_find_tokeniser_returns_the_dragondos_tokens(
        test_input):
    opts = [test_input]
    actual = find_tokeniser(opts)
    assert isinstance(actual, DragonDosToken)
def test_given_an_invalid_option_find_tokeniser_ignores_it():
    opts = ["--rubbish"]
    actual = find_tokeniser(opts)
    assert isinstance(actual, DragonToken)