Example #1
0
 def __get_git_hashes(self):
     git_hashes = {}
     import subprocess, os
     from main import get_root_path
     for name in os.listdir(get_root_path()):
         if os.path.isdir(name):
             subdirs = os.listdir(name)
             if '.git' in subdirs:
                 os.chdir(get_root_path() + '/' + name)
                 git_commit_number = subprocess.check_output(["git", "rev-parse", 'HEAD']).decode('ascii')
                 git_hashes[name] = git_commit_number
     return str(git_hashes).replace("'", '"')
Example #2
0
    def test03_dv_update(self):
        # return
        print("test_run2:\n")
        self.pipe.mappings[0].file_name = get_root_path(
        ) + '/PYELT/tests/data/zorgverleners2_rob.csv'
        self.pipe.mappings[0].file_name = get_root_path(
        ) + '/PYELT/tests/data/zorgverleners2_rob.csv'

        self.pipeline.run()

        test_row_count(self, 'sor_test_system.zorgverlener_hstage', 8)
        test_row_count(self, 'dv.zorgverlener_hub', 4)
        test_row_count(self, 'dv.zorgverlener_sat', 5)
        test_row_count(self, 'dv.zorgverlener_sat_personalia', 5)
        test_row_count(self, 'dv.zorgverlener_adres_link', 5)
Example #3
0
    def __create_path_and_filename_by_type(logger_type: str = LoggerTypes.MAIN, runid=0.00, configs={}, filename_args = ''):
        import os
        from main import get_root_path
        path = get_root_path() + configs['log_path']
        if logger_type == LoggerTypes.SQL and 'sql_log_path' in configs:
            path = get_root_path() + configs['sql_log_path']
        if logger_type == LoggerTypes.DDL and 'ddl_log_path' in configs:
            path = get_root_path() + configs['ddl_log_path']
        if not os.path.exists(path):
            os.makedirs(path)
        filename = 'LOG {0:%Y-%m-%d %H.%M.%S} {1}.log'.format(datetime.now(), logger_type)
        if runid:
            filename = 'LOG {1:%Y-%m-%d %H.%M.%S} RUN{0:07.2f} {2}.log'.format(runid, datetime.now(), logger_type)
        if filename_args:
            filename = filename.replace('.log', '_' + filename_args + '.log')

        return path, filename
Example #4
0
    def send_log_mail(self):
        if not 'email_settings' in self.config:
            return
        elif 'send_log_mail_after_run' in self.config['email_settings'] and self.config['email_settings']['send_log_mail_after_run']:
            params = self.config['email_settings']
            params['to'] = params['to'].replace(';', ',')
            from main import get_root_path
            params['attachments_command'] = ' -a "' + get_root_path() + self.config['log_path'] + self.logger.filename + '"'

            if self.logger.errors:
                params['subject'] += ' ER IS IETS FOUT GEGAAN '
                params['attachments_command'] += ' -a "' + get_root_path() + self.config['log_path'] + self.error_logger.filename + '"'

            from sys import platform
            if platform == "linux" or platform == "linux2":
                linux_cmd = """echo -e "{msg}" | mail {attachments_command} -s "{subject}" -r "{from}" "{to}" """.format(**params)
                import os
                os.system(linux_cmd)
Example #5
0
 def test_run2_from_source_to_sor(self):
     self.pipe.mappings[0].file_name = get_root_path(
     ) + '/PYELT/tests/data/patienten2.csv'
     self.pipe.mappings[0].delimiter = ';'
     self.pipe.mappings[1].file_name = get_root_path(
     ) + '/PYELT/tests/data/zorgtrajecten4.csv'
     self.pipeline.run()
     test_row_count(self, 'sor_test_system.patient_hstage', 7)
     test_row_count(self, 'dv.patient_hub', 5)
     test_row_count(self, 'dv.patient_sat', 5)
     test_row_count(self, 'dv.patient_sat_personalia', 5)
     test_row_count(self, 'dv.patient_sat_adres', 7)
     test_row_count(self, 'dv.patient_sat_inschrijving', 6)
     test_row_count(self, 'dv.patient_sat_contactgegevens', 11)
     test_row_count(self, 'dv.patient_traject_link', 5)
     test_row_count(self, 'dv.traject_sat_record_status', 2,
                    'deleted is not null')
     test_row_count(self, 'dv.patient_traject_link_sat_record_status', 2,
                    'deleted is not null')
Example #6
0
 def get_or_create_datatransfer_path(self):
     # path = pyelt_config['datatransfer_path']
     path = self.temp_datatransfer_path
     if not path:
         from main import get_root_path
         path = get_root_path() + '/data/transfer/'
     path += '/' + self.name
     if not os.path.exists(path):
         os.makedirs(path)
     return path
Example #7
0
    def test03_dv_updates(self):

        self.pipe.mappings[0].file_name = get_root_path(
        ) + '/PYELT/tests/data/zorgverlenersB_rob.csv'
        self.pipeline.run()

        test_row_count(self, 'sor_test_system.zorgverlener_hstage', 7)
        test_row_count(self, 'dv.zorgverlener_hub', 4)
        test_row_count(self, 'dv.zorgverlener_sat', 4)
        test_row_count(self, 'dv.zorgverlener_sat_personalia', 4)
Example #8
0
 def test08_postcode_correct(self):
     print("test_run4a:\n")
     self.pipe.mappings[0].file_name = get_root_path(
     ) + '/PYELT/tests/data/zorgverleners4_rob.csv'
     self.pipeline.run()
     result = get_field_value_from_table('postcode',
                                         'pyelt_unittests.dv.adres_sat',
                                         """char_length(postcode) >7""")
     # detecteer dat er geen strings zijn met een te lange postcode
     self.assertTrue(len(result) == 0)
Example #9
0
 def test_run2_from_source_to_sor(self):
     self.pipe.mappings[0].file_name = get_root_path(
     ) + '/PYELT/tests/data/patienten2.csv'
     self.pipe.mappings[0].delimiter = ';'
     self.pipeline.run()
     get_row_count(self, 'sor_test_system.patient_hstage', 7)
     get_row_count(self, 'dv.patient_hub', 5)
     get_row_count(self, 'dv.patient_sat', 5)
     get_row_count(self, 'dv.patient_sat_personalia', 5)
     get_row_count(self, 'dv.patient_sat_adres', 7)
     get_row_count(self, 'dv.patient_sat_inschrijving', 6)
     get_row_count(self, 'dv.patient_sat_contactgegevens', 11)
Example #10
0
def run_staging():

    pipeline = Pipeline(config)
    pipe = pipeline.get_or_create_pipe('test_source', source_config)

    source_file = CsvFile(get_root_path() + '/sample_data/patienten1.csv',
                          delimiter=';')
    source_file.reflect()
    source_file.set_primary_key(['patientnummer'])
    mapping = SourceToSorMapping(source_file, 'persoon_hstage', auto_map=True)
    pipe.mappings.append(mapping)

    pipeline.run()
Example #11
0
def init_source_to_sor_mappings():
    mappings = []
    source_file = CsvFile(get_root_path() +
                          '/PYELT/tests/data/zorgverlenersA_rob.csv',
                          delimiter=';')
    source_file.reflect()
    source_file.set_primary_key(['zorgverlenernummer'])
    sor_mapping = SourceToSorMapping(source_file,
                                     'zorgverlener_hstage',
                                     auto_map=True)
    mappings.append(sor_mapping)

    source_file = CsvFile(get_root_path() +
                          '/PYELT/tests/data/zorginstelling_rob.csv',
                          delimiter=';')
    source_file.reflect()
    source_file.set_primary_key(['zorginstellings_nummer'])
    sor_mapping = SourceToSorMapping(source_file,
                                     'zorginstelling_hstage',
                                     auto_map=True)
    mappings.append(sor_mapping)

    return mappings
Example #12
0
    def test06_invalid_data_entry(self):
        # huisnummer niet als getal, maar als letter ingevoerd:
        print("test_run3a:\n")

        expected_error = None
        try:
            self.pipe.mappings[0].file_name = get_root_path(
            ) + '/tests/data/zorgverleners3_rob.csv'
            self.pipeline.run()

        except Exception as err:
            expected_error = err

        self.assertIsNotNone(expected_error, 'ik verwachte een error')
Example #13
0
    def get_or_create_datatransfer_path(self):
        path = pyelt_config['datatransfer_path']

        if not path:
            from main import get_root_path
            path = get_root_path() + '/data/transfer/'
        path += '/' + self.name
        if not os.path.exists(path):
            os.makedirs(path)
        return path

        #delete /tmp/datatranfser
        if self.source_db:
            path = self.source_db.get_or_create_datatransfer_path()
            import shutil
            shutil.rmtree(path)
Example #14
0
    def test_source_to_sor_mappings(self):

        source_file = CsvFile(get_root_path() +
                              '/PYELT/tests/data/patienten1.csv',
                              delimiter=';')
        source_file.reflect()
        l = len(source_file.columns)
        self.assertEqual(len(source_file.columns), 16)
        self.assertEqual(len(source_file.primary_keys()), 0)
        source_file.set_primary_key(['patientnummer'])
        self.assertEqual(len(source_file.primary_keys()), 1)

        sor_mapping = SourceToSorMapping(source_file,
                                         'persoon_hstage',
                                         auto_map=True)
        self.assertEqual(sor_mapping.name, 'patienten1.csv -> persoon_hstage')
        self.assertEqual(len(sor_mapping.field_mappings), 16)
        self.pipe.mappings.append(sor_mapping)
Example #15
0
    def test09_null_and_hybridsat_update(self):
        # deze unittest test 2 dingen: wordt een Null veld geupdate en wordt een hybride_sat geupdate
        print("test_run5:\n")
        self.pipe.mappings[0].file_name = get_root_path(
        ) + '/PYELT/tests/data/zorgverleners4_rob.csv'
        self.pipeline.run()

        result = get_field_value_from_dv_table(
            'telnummer', 'zorgverlener', 'contactgegevens', '448',
            ["""type = 'mobiel2'""", """_active = True"""])

        if len(result) > 0:
            result = result[0][0]
        else:
            result = None
        self.assertIsNotNone(
            result,
            'Ik verwacht dat een verandering in een oorspronkelijk Null veld wel in de DV laag terecht zou komen; bestaat de gebruikte bk wel?'
        )
Example #16
0
            print(e)


def delete_all_ddl_logs(folder):
    for the_file in os.listdir(folder):
        file_path = os.path.join(folder, the_file)
        if 'DDL' in the_file:
            try:
                if os.path.isfile(file_path):
                    os.unlink(file_path)
            except Exception as e:
                print(e)


def delete_logs_older_than_today(folder):
    today = datetime.now()
    today = today.replace(hour=00, minute=00, second=00)
    for the_file in os.listdir(folder):
        file_path = os.path.join(folder, the_file)
        datetime_string = the_file[11:30]
        datetime_date = datetime.strptime(datetime_string, '%Y-%m-%d %H.%M.%S')
        if datetime_date < today:
            try:
                if os.path.isfile(file_path):
                    os.unlink(file_path)
            except Exception as e:
                print(e)


delete_all_logs(get_root_path() + '/logs/')