Example #1
0
def start_opus_diff(ad_reader=None):
    """
    Start an opus update, use the oldest available dump that has not
    already been imported.
    """
    SETTINGS = load_settings()

    dumps = opus_helpers.read_available_dumps()
    run_db = Path(SETTINGS["integrations.opus.import.run_db"])
    filter_ids = SETTINGS.get("integrations.opus.units.filter_ids", [])
    skip_employees = SETTINGS.get("integrations.opus.skip_employees", False)

    if not run_db.is_file():
        logger.error("Local base not correctly initialized")
        raise RunDBInitException("Local base not correctly initialized")
    xml_date, latest_date = opus_helpers.next_xml_file(run_db, dumps)

    while xml_date:
        import_one(ad_reader,
                   xml_date,
                   latest_date,
                   dumps,
                   filter_ids,
                   opus_id=None)
        # Check if there are more files to import
        xml_date, latest_date = opus_helpers.next_xml_file(run_db, dumps)
        logger.info("Ended update")
Example #2
0
def import_opus(
    ad_reader=None,
    import_all: bool = False,
    import_last=False,
    opus_id=None,
    rundb_write: bool = True,
) -> None:
    """Import one or all files from opus even if no previous files have been imported"""
    settings = load_settings()
    filter_ids = settings.get("integrations.opus.units.filter_ids", [])
    skip_employees = settings.get("integrations.opus.skip_employees", False)
    dumps = opus_helpers.read_available_dumps()

    all_dates = dumps.keys()
    # Default is read first file only
    export_dates = [min(all_dates)]
    if import_last:
        export_dates = [max(all_dates)]
    elif import_all:
        export_dates = sorted(all_dates)

    export_dates = prepend(None, export_dates)
    date_pairs = pairwise(export_dates)
    for date1, date2 in date_pairs:
        import_one(
            ad_reader,
            date2,
            date1,
            dumps,
            filter_ids,
            opus_id=opus_id,
            rundb_write=rundb_write,
        )
Example #3
0
def find_cancelled(dry_run):
    """Find cancelled data and delete it"""
    settings = load_settings()
    filter_ids = settings.get("integrations.opus.units.filter_ids", [])
    dumps = opus_helpers.read_available_dumps()
    for date1, date2 in tqdm(pairwise(sorted(dumps)),
                             total=len(dumps) - 1,
                             unit="file-pairs"):
        file_diffs = opus_helpers.file_diff(dumps[date1],
                                            dumps[date2],
                                            disable_tqdm=True)
        units = file_diffs["units"]
        employees, _ = opus_helpers.split_employees_leaves(
            file_diffs["cancelled_employees"])
        # set enddate to filedate for cancelled employees
        employees = opus_helpers.include_cancelled(dumps[date2], [],
                                                   list(employees))
        if units or employees:
            msg = f"Found {len(units)} units and {len(employees)} employees wich were cancelled on {date2}"
            if dry_run:
                click.echo(msg + "(Dry-run)")
                continue
            click.echo(msg + ". Terminating now.")
            diff = OpusDiffImport(date2, None, {}, filter_ids=filter_ids)
            for employee in employees:
                # Updates each employee with their leave-date overwritten, so that their engagement will be terminated.
                try:
                    diff.update_employee(employee)
                except UnknownOpusUnit:
                    # The unit might be terminated by now, since we're looking through older files. No problem, carry on.
                    continue
            # Handles cancelled units as filtered, wich means terminates them from the date of the file.
            mo_units = diff.find_unterminated_filtered_units(units)
            diff.handle_filtered_units(mo_units)
Example #4
0
def find_opus_name() -> str:
    """Generates uuid for opus root.

    Reads the first available opus file and generates the uuid for the first unit in the file.
    Assumes this is the root organisation of opus.
    """
    dumps = opus_helpers.read_available_dumps()

    first_date = min(sorted(dumps.keys()))
    units, _ = opus_helpers.parser(dumps[first_date])
    main_unit = first(units)
    calculated_uuid = opus_helpers.generate_uuid(main_unit["@id"])
    return str(calculated_uuid)
Example #5
0
 def _check_xml_files_availability(self):
     print('Tjeck der findes xml-filer at importere')
     # Import will fail non-gracefully if keys are missing, hold import
     # until bare_minimum_check is performed
     from integrations.opus import opus_helpers
     dumps = opus_helpers.read_available_dumps()
     if len(dumps) == 0:
         msg = ' * Fandt ingen xml-filer i {}'
         print(
             msg.format(self.settings['integrations.opus.import.xml_path']))
         exit(1)
     else:
         newest_dump = sorted(dumps)[-1]
         msg = ' * Fandt {}-xml filer. Nyeste fil fra {}'
         print(msg.format(len(dumps), newest_dump))
         print()
Example #6
0
def start_opus_import(importer, ad_reader=None, force=False):
    """
    Start an opus import, run the oldest available dump that
    has not already been imported.
    """
    SETTINGS = load_settings()
    dumps = opus_helpers.read_available_dumps()

    run_db = Path(SETTINGS['integrations.opus.import.run_db'])
    if not run_db.is_file():
        logger.error('Local base not correctly initialized')
        if not force:
            raise RunDBInitException('Local base not correctly initialized')
        else:
            opus_helpers.initialize_db(run_db)
        xml_date = sorted(dumps.keys())[0]
    else:
        if force:
            raise RedundantForceException('Used force on existing db')
        xml_date = opus_helpers.next_xml_file(run_db, dumps)

    xml_file = dumps[xml_date]
    opus_helpers.local_db_insert((xml_date, 'Running since {}'))

    employee_mapping = opus_helpers.read_cpr_mapping()

    opus_importer = OpusImport(importer,
                               org_name=SETTINGS['municipality.name'],
                               xml_data=str(xml_file),
                               ad_reader=ad_reader,
                               import_first=True,
                               employee_mapping=employee_mapping)
    logger.info('Start import')
    opus_importer.insert_org_units()
    opus_importer.insert_employees()
    opus_importer.add_addresses_to_employees()
    opus_importer.importer.import_all()
    logger.info('Ended import')

    opus_helpers.local_db_insert((xml_date, 'Import ended: {}'))