Beispiel #1
0
 def test_column_type_checking(self):
     model = MappingPreviewModel()
     model.reset_model([["1", "0h", "2018-01-01 00:00"], ["2", "1h", "2018-01-01 00:00"]])
     model.set_type(0, value_to_convert_spec('float'))
     self.assertEqual(model._column_type_errors, {})
     self.assertEqual(model._row_type_errors, {})
     model.set_type(1, value_to_convert_spec('duration'))
     self.assertEqual(model._column_type_errors, {})
     self.assertEqual(model._row_type_errors, {})
     model.set_type(2, value_to_convert_spec('datetime'))
     self.assertEqual(model._column_type_errors, {})
     self.assertEqual(model._row_type_errors, {})
    def test_row_type_checking_produces_error(self):
        model = MappingPreviewModel()
        model.reset_model([["1", "2.4"], ["Not a valid number", "3"]])
        model.set_type(1,
                       value_to_convert_spec('float'),
                       orientation=Qt.Vertical)
        error_index = (1, 0)
        self.assertEqual(len(model._row_type_errors), 1)
        self.assertEqual(model._column_type_errors, {})
        self.assertTrue(error_index in model._row_type_errors)
        # Error should only be shown if we have a pivot mapping on that row.
        self.assertEqual(model.data(model.index(*error_index)),
                         "Not a valid number")

        # if we add mapping error should be shown.
        mapping = MappingSpecModel(
            dict_to_map({
                "map_type": "ObjectClass",
                "name": {
                    "map_type": "row",
                    "value_reference": 1
                }
            }))
        model.set_mapping(mapping)
        self.assertEqual(model.data(model.index(*error_index)), "Error")
    def test_column_type_checking_produces_error(self):
        model = MappingPreviewModel()
        model.reset_model([["Not a valid number", "2.4"], ["1", "3"]])
        model.set_type(0, value_to_convert_spec('float'))
        error_index = (0, 0)
        self.assertEqual(len(model._column_type_errors), 1)
        self.assertEqual(model._row_type_errors, {})
        self.assertTrue(error_index in model._column_type_errors)
        self.assertEqual(model.data(model.index(*error_index)), "Error")

        # if we add a pivoted mapping for the row with the error, the error should not be shown
        mapping = MappingSpecModel(
            dict_to_map({
                "map_type": "ObjectClass",
                "name": {
                    "map_type": "row",
                    "value_reference": 0
                }
            }))
        model.set_mapping(mapping)
        self.assertEqual(model.data(model.index(*error_index)),
                         "Not a valid number")

        # or if we add a mapping where there reading starts from a row bellow the error, the error should not be shown.
        mapping = MappingSpecModel(
            dict_to_map({
                "map_type": "ObjectClass",
                "read_start_row": 1
            }))
        model.set_mapping(mapping)
        self.assertEqual(model.data(model.index(*error_index)),
                         "Not a valid number")
Beispiel #4
0
 def test_interger_sequence_datetime(self):
     self.assertIsInstance(
         value_to_convert_spec({
             "start_datetime": "2019-01-01T00:00",
             "start_int": 0,
             "duration": "1h"
         }),
         IntegerSequenceDateTimeConvertSpec,
     )
Beispiel #5
0
 def test_Duration(self):
     self.assertIsInstance(value_to_convert_spec("duration"),
                           DurationConvertSpec)
Beispiel #6
0
 def test_DateTime(self):
     self.assertIsInstance(value_to_convert_spec("datetime"),
                           DateTimeConvertSpec)
Beispiel #7
0
 def test_float(self):
     self.assertIsInstance(value_to_convert_spec("float"), FloatConvertSpec)
Beispiel #8
0
 def test_string(self):
     self.assertIsInstance(value_to_convert_spec("string"),
                           StringConvertSpec)
def run(checked_files, all_import_settings, all_source_settings, urls_downstream, logs_dir, cancel_on_error):
    print("starting importer program")
    all_data = []
    all_errors = []
    for source in checked_files:
        settings = all_import_settings.get(source, None)
        if settings == "deselected":
            continue
        if settings is None or not settings:
            print(f"There are no mappings defined for {source}, moving on...")
            continue
        source_type = settings["source_type"]
        source_settings = all_source_settings.get(source_type)
        connector = {
            "CSVConnector": CSVConnector,
            "ExcelConnector": ExcelConnector,
            "GdxConnector": GdxConnector,
            "JSONConnector": JSONConnector,
        }[source_type](source_settings)
        try:
            connector.connect_to_source(source)
        except IOError as error:
            print(f"Failed to connect to source: {error}", file=sys.stderr)
            sys.exit(1)
        table_mappings = {
            name: mapping
            for name, mapping in settings.get("table_mappings", {}).items()
            if name in settings["selected_tables"]
        }
        table_options = {
            name: options
            for name, options in settings.get("table_options", {}).items()
            if name in settings["selected_tables"]
        }

        table_types = {
            tn: {int(col): value_to_convert_spec(spec) for col, spec in cols.items()}
            for tn, cols in settings.get("table_types", {}).items()
        }
        table_row_types = {
            tn: {int(col): value_to_convert_spec(spec) for col, spec in cols.items()}
            for tn, cols in settings.get("table_row_types", {}).items()
        }
        try:
            data, errors = connector.get_mapped_data(
                table_mappings, table_options, table_types, table_row_types, max_rows=-1
            )
        except spinedb_api.InvalidMapping as error:
            print(f"Failed to imoport '{source}': {error}", file=sys.stderr)
            if cancel_on_error:
                sys.exit(1)
            else:
                continue
        print(f"Read {sum(len(d) for d in data.values())} data from {source} with {len(errors)} errors")
        all_data.append(data)
        all_errors.extend(errors)
    if all_errors:
        # Log errors in a time stamped file into the logs directory
        timestamp = _create_log_file_timestamp()
        logfilepath = os.path.abspath(os.path.join(logs_dir, timestamp + "_error.log"))
        with open(logfilepath, 'w') as f:
            for err in all_errors:
                f.write(f"{err}\n")
        # Make error log file anchor with path as tooltip
        logfile_anchor = (
            "<a style='color:#BB99FF;' title='" + logfilepath + "' href='file:///" + logfilepath + "'>error log</a>"
        )

        print("Import errors. Logfile: {0}".format(logfile_anchor), file=sys.stderr)
        if cancel_on_error:
            sys.exit(-1)
    if all_data:
        for url in urls_downstream:
            _import(all_data, url, logs_dir, cancel_on_error)
def run(checked_files, all_settings, urls_downstream, logs_dir,
        cancel_on_error):
    all_data = []
    all_errors = []
    for source in checked_files:
        settings = all_settings.get(source, None)
        if settings is None or not settings:
            print("There are no mappings defined for {0}, moving on...".format(
                source))
            continue
        source_type = settings["source_type"]
        connector = {
            "CSVConnector": CSVConnector,
            "ExcelConnector": ExcelConnector,
            "GdxConnector": GdxConnector
        }[source_type]()
        connector.connect_to_source(source)
        table_mappings = {
            name: mapping
            for name, mapping in settings.get("table_mappings", {}).items()
            if name in settings["selected_tables"]
        }
        table_options = {
            name: options
            for name, options in settings.get("table_options", {}).items()
            if name in settings["selected_tables"]
        }

        table_types = {
            tn: {
                int(col): value_to_convert_spec(spec)
                for col, spec in cols.items()
            }
            for tn, cols in settings.get("table_types", {}).items()
        }
        table_row_types = {
            tn: {
                int(col): value_to_convert_spec(spec)
                for col, spec in cols.items()
            }
            for tn, cols in settings.get("table_row_types", {}).items()
        }
        data, errors = connector.get_mapped_data(table_mappings,
                                                 table_options,
                                                 table_types,
                                                 table_row_types,
                                                 max_rows=-1)
        print("Read {0} data from {1} with {2} errors".format(
            sum(len(d) for d in data.values()), source, len(errors)))
        all_data.append(data)
        all_errors.extend(errors)
    if all_errors:
        # Log errors in a time stamped file into the logs directory
        timestamp = _create_log_file_timestamp()
        logfilepath = os.path.abspath(
            os.path.join(logs_dir, timestamp + "_error.log"))
        with open(logfilepath, 'w') as f:
            for err in all_errors:
                f.write("{0}\n".format(err))
        # Make error log file anchor with path as tooltip
        logfile_anchor = ("<a style='color:#BB99FF;' title='" + logfilepath +
                          "' href='file:///" + logfilepath + "'>error log</a>")

        print("Import errors. Logfile: {0}".format(logfile_anchor),
              file=sys.stderr)
        if cancel_on_error:
            sys.exit(-1)
    if all_data:
        for url in urls_downstream:
            _import(all_data, url, logs_dir, cancel_on_error)