コード例 #1
0
def find_cancelled(dry_run):
    """Find cancelled data and delete it"""
    settings = load_settings()
    filter_ids = settings.get("integrations.opus.units.filter_ids", [])
    dumps = opus_helpers.read_available_dumps()
    for date1, date2 in tqdm(pairwise(sorted(dumps)),
                             total=len(dumps) - 1,
                             unit="file-pairs"):
        file_diffs = opus_helpers.file_diff(dumps[date1],
                                            dumps[date2],
                                            disable_tqdm=True)
        units = file_diffs["units"]
        employees, _ = opus_helpers.split_employees_leaves(
            file_diffs["cancelled_employees"])
        # set enddate to filedate for cancelled employees
        employees = opus_helpers.include_cancelled(dumps[date2], [],
                                                   list(employees))
        if units or employees:
            msg = f"Found {len(units)} units and {len(employees)} employees wich were cancelled on {date2}"
            if dry_run:
                click.echo(msg + "(Dry-run)")
                continue
            click.echo(msg + ". Terminating now.")
            diff = OpusDiffImport(date2, None, {}, filter_ids=filter_ids)
            for employee in employees:
                # Updates each employee with their leave-date overwritten, so that their engagement will be terminated.
                try:
                    diff.update_employee(employee)
                except UnknownOpusUnit:
                    # The unit might be terminated by now, since we're looking through older files. No problem, carry on.
                    continue
            # Handles cancelled units as filtered, wich means terminates them from the date of the file.
            mo_units = diff.find_unterminated_filtered_units(units)
            diff.handle_filtered_units(mo_units)
コード例 #2
0
def start_opus_diff(ad_reader=None):
    """
    Start an opus update, use the oldest available dump that has not
    already been imported.
    """
    SETTINGS = load_settings()

    dumps = opus_helpers.read_available_dumps()
    run_db = Path(SETTINGS["integrations.opus.import.run_db"])
    filter_ids = SETTINGS.get("integrations.opus.units.filter_ids", [])
    skip_employees = SETTINGS.get("integrations.opus.skip_employees", False)

    if not run_db.is_file():
        logger.error("Local base not correctly initialized")
        raise RunDBInitException("Local base not correctly initialized")
    xml_date, latest_date = opus_helpers.next_xml_file(run_db, dumps)

    while xml_date:
        import_one(ad_reader,
                   xml_date,
                   latest_date,
                   dumps,
                   filter_ids,
                   opus_id=None)
        # Check if there are more files to import
        xml_date, latest_date = opus_helpers.next_xml_file(run_db, dumps)
        logger.info("Ended update")
コード例 #3
0
def cli(**args):
    SETTINGS = load_settings()

    ad_reader = ADParameterReader()

    if args['update']:
        try:
            start_opus_diff(ad_reader=ad_reader)
        except RunDBInitException:
            print('RunDB not initialized')

    if args['import']:
        importer = ImportHelper(
            create_defaults=True,
            mox_base=SETTINGS['mox.base'],
            mora_base=SETTINGS['mora.base'],
            store_integration_data=False,
            seperate_names=True,
            demand_consistent_uuids=False
        )
        try:
            start_opus_import(importer, ad_reader=ad_reader, force=True)
        except RunDBInitException:
            print('RunDB not initialized')

    if args['update_single_user']:
        employment_number = args['update_single_user']
        days = args['days']
        update_employee(employment_number, days)
コード例 #4
0
def json_config_settings_source(settings: BaseSettings) -> Dict[str, Any]:
    """
    Read config from settings.json.

    Reads all keys starting with 'os2sync.' and a few common settings into Settings.
    """
    try: 
        all_settings = load_settings()
    except FileNotFoundError:
        #No settingsfile found. Using environment variables"
        return {}
    #Read os2sync specific settings
    os2sync_settings = dict(
        filter(
            apply(lambda key, value: key.startswith("os2sync")), all_settings.items()
        )
    )
    
    # replace dots with underscore. eg: os2sync.ignored.unit_levels -> os2sync_ignored_unit_levels
    final_settings = {key.replace(".", "_"): val for key, val in os2sync_settings.items()}

    #Add needed common settings
    municipality = all_settings.get("municipality.cvr")
    if municipality:
        final_settings["municipality"] = municipality
    mora_base = all_settings.get("mora.base")
    if mora_base:
        final_settings["mora_base"] = mora_base

    return final_settings
コード例 #5
0
def main() -> None:
    # Settings
    settings = load_settings()
    host = settings["mora.base"]
    org = settings["reports.org_name"]
    pay_org = settings.get("reports.pay_org_name", org)
    outdir = Path(settings["mora.folder.query_export"])

    # Reports
    reports = CustomerReports(host, org)
    sd_reports = CustomerReports(host, pay_org)

    report_to_csv(reports.employees(), outdir / "Alle Stillinger OS2mo.csv")
    report_to_csv(reports.managers(),
                  outdir / "Alle Lederfunktioner OS2mo.csv")
    report_to_csv(
        reports.organisation_employees(),
        outdir / "Organisationsstruktur og Stillinger OS2mo.csv",
    )
    report_to_csv(reports.organisation_units(),
                  outdir / "Organisationsenheder OS2mo.csv")
    report_to_csv(
        sd_reports.organisation_overview(),
        outdir / "SDLønorganisation og P-Nummer OS2mo.csv",
    )
コード例 #6
0
def create_new_root_and_it(settings=None) -> None:
    """Setup all necessary classes etc to perform opus-import."""
    logger.warning(
        "'create_new_root_and_it' is deprecated. Use os2mo-init instead.")
    settings = settings or load_settings()
    mox_base = settings.get("mox.base", "http://localhost:8080")
    mora_base = settings.get("mora.base", "http://localhost:5000")

    # Init
    os2mo = ImportHelper(
        create_defaults=True,
        store_integration_data=True,
        mox_base=mox_base,
        mora_base=mora_base,
    )

    # The Organisation class is the main entry point,
    # It exposes the related sub classes such as:
    # Facet, Klasse, Itsystem, OrganisationUnit, Employee
    main_name = settings.get("municipality.name", "Magenta ApS")
    main_uuid = opus_helpers.generate_uuid(main_name)
    os2mo.add_organisation(
        identifier=main_name,
        uuid=str(main_uuid),
        user_key=main_name,
        municipality_code=settings.get("municipality.code", 1234),
    )
    os2mo.new_itsystem(identifier=constants.Opus_it_system,
                       system_name=constants.Opus_it_system)
    os2mo.new_itsystem(identifier=constants.AD_it_system,
                       system_name=constants.AD_it_system)

    # Perfom setup of root unit and it systems.
    os2mo.import_all()
コード例 #7
0
    def __init__(self,
                 xml_date,
                 ad_reader,
                 employee_mapping={},
                 filter_ids={}):
        logger.info("Opus diff importer __init__ started")
        self.xml_date = xml_date
        self.ad_reader = ad_reader
        self.employee_forced_uuids = employee_mapping or opus_helpers.read_cpr_mapping(
        )

        self.settings = load_settings()
        self.filter_ids = filter_ids or self.settings.get(
            "integrations.opus.units.filter_ids", [])

        self.session = Session()
        self.helper = self._get_mora_helper(
            hostname=self.settings["mora.base"], use_cache=False)
        try:
            self.org_uuid = self.helper.read_organisation()
        except KeyError:
            msg = "No root organisation in MO"
            logger.warning(msg)
            print(msg)
            return
        except requests.exceptions.RequestException as e:
            logger.error(e)
            print(e)
            exit()
        self.updater = OPUSPrimaryEngagementUpdater()

        it_systems = self.helper.read_it_systems()
        self.it_systems = dict(map(itemgetter("name", "uuid"), it_systems))

        logger.info("__init__ done, now ready for import")
コード例 #8
0
def generate_connection_url(
    database_function: DatabaseFunction,
    force_sqlite: bool = False,
    settings: Optional[Dict] = None,
) -> str:
    """Utilize settings or settings from disk to derive database connection url."""
    settings = settings or load_settings()

    db_type, db_name = generate_db_type_and_name(database_function,
                                                 force_sqlite, settings)
    user = get_db_username(database_function, settings)
    db_host = get_db_host(database_function, settings)
    pw_raw = get_db_password(database_function, settings)
    pw_raw = pw_raw or ""
    pw = urllib.parse.quote_plus(pw_raw)

    if db_type == "Memory":
        return "sqlite://"
    if db_type == "SQLite":
        return "sqlite:///{}.db".format(db_name)
    if db_type == "MS-SQL":
        return "mssql+pymssql://{}:{}@{}/{}".format(user, pw, db_host, db_name)
    if db_type == "MS-SQL-ODBC":
        quoted = urllib.parse.quote_plus(
            ("DRIVER=libtdsodbc.so;Server={};Database={};UID={};" +
             "PWD={};TDS_Version=8.0;Port=1433;").format(
                 db_host, db_name, user, pw_raw))
        return "mssql+pyodbc:///?odbc_connect={}".format(quoted)
    if db_type == "Mysql":
        return "mysql+mysqldb://{}:{}@{}/{}".format(user, pw, db_host, db_name)
    if db_type == "Postgres":
        return "postgresql://{}:{}@{}/{}".format(user, pw, db_host, db_name)
    raise Exception("Unknown DB type")
コード例 #9
0
def import_opus(
    ad_reader=None,
    import_all: bool = False,
    import_last=False,
    opus_id=None,
    rundb_write: bool = True,
) -> None:
    """Import one or all files from opus even if no previous files have been imported"""
    settings = load_settings()
    filter_ids = settings.get("integrations.opus.units.filter_ids", [])
    skip_employees = settings.get("integrations.opus.skip_employees", False)
    dumps = opus_helpers.read_available_dumps()

    all_dates = dumps.keys()
    # Default is read first file only
    export_dates = [min(all_dates)]
    if import_last:
        export_dates = [max(all_dates)]
    elif import_all:
        export_dates = sorted(all_dates)

    export_dates = prepend(None, export_dates)
    date_pairs = pairwise(export_dates)
    for date1, date2 in date_pairs:
        import_one(
            ad_reader,
            date2,
            date1,
            dumps,
            filter_ids,
            opus_id=opus_id,
            rundb_write=rundb_write,
        )
コード例 #10
0
def prepare_re_import(
    settings: Optional[list] = None,
    opus_uuid: Optional[str] = None,
    truncate: Optional[bool] = None,
    connections: int = 4,
) -> None:
    """Create a MO setup with necessary classes.

    Clear MO database, or only the opus-unit with the given uuid.
    Ensure necessary classes exists.
    """
    settings = settings or load_settings()
    mox_base = settings.get("mox.base")
    if truncate:
        truncate_db(mox_base)
        # Create root org and it systems
        create_new_root_and_it()
    elif opus_uuid:
        session = requests.session()
        dub = find_duplicates_classes(session=session, mox_base=mox_base)
        if dub:
            raise Exception(
                "There are duplicate classes, remove them with tools/data_fixers/remove_duplicate_classes.py --delete"
            )
        subtreedeleter_helper(
            opus_uuid,
            delete_functions=True,
            keep_functions=["KLE", "Relateret Enhed"],
            connections=connections,
        )
    ensure_default_classes()
コード例 #11
0
def get_opus_filereader(
        settings: Optional[Dict] = None) -> OpusReaderInterface:
    """Get the correct opus reader interface based on values from settings."""
    settings = settings or load_settings()
    if settings.get("integrations.opus.gcloud_bucket_name"):
        return GcloudOpusReader(settings)
    if settings.get("integrations.opus.smb_host"):
        return SMBOpusReader(settings)
    return LocalOpusReader(settings)
コード例 #12
0
def main() -> None:
    settings = load_settings()
    query_path = settings["mora.folder.query_export"]
    run_report(
        list_org_units,
        "Organsiationsenheder",
        "Svendborg Kommune",
        query_path + "/Organisationsenheder.xlsx",
    )
コード例 #13
0
def terminate_filtered_units(dry_run):
    settings = load_settings()
    filter_ids = settings.get("integrations.opus.units.filter_ids", [])
    latest_date, opus_dump = opus_helpers.get_latest_dump()
    file_diffs = opus_helpers.file_diff(None, opus_dump)
    filtered_units, _ = opus_helpers.filter_units(file_diffs["units"],
                                                  filter_ids)
    diff = OpusDiffImport(latest_date, ad_reader=None, employee_mapping={})
    mo_units = list(diff.find_unterminated_filtered_units(filtered_units))
    diff.handle_filtered_units(mo_units, dry_run=dry_run)
    return mo_units
コード例 #14
0
def get_engine(dbpath=None):
    if dbpath is None:
        settings = load_settings()
        dbpath = settings.get("lc-for-jobs.actual_db_name",
                              lc_for_jobs_actual_db_name)

    dbpath = str(dbpath)
    if dbpath != ":memory:":
        dbpath += ".db"
    db_string = "sqlite:///{}".format(dbpath)
    return create_engine(db_string)
コード例 #15
0
    def __init__(self, settings=None, dry_run=False):
        self.settings = settings or load_settings()
        self.dry_run = dry_run

        self.helper = self._get_mora_helper(self.settings["mora.base"])

        # List of engagement filters to apply to check / recalculate respectively
        # NOTE: Should be overridden by subclasses
        self.check_filters = []
        self.calculate_filters = []

        self.primary_types, self.primary = self._find_primary_types()
コード例 #16
0
def generate_engine_settings(
    database_function: DatabaseFunction,
    force_sqlite: bool = False,
    settings: Optional[Dict] = None,
) -> Dict[str, Any]:
    settings = settings or load_settings()

    db_type = get_db_type(database_function, force_sqlite, settings)
    engine_settings: Dict = {"pool_pre_ping": True}
    if db_type == "Mysql":
        engine_settings.update({"pool_recycle": 3600})
    return engine_settings
コード例 #17
0
async def subtreedeleter_helper(
    org_unit_uuid: str, delete_functions: bool = False, keep_functions: List[str] = [], connections: int = 4
) -> None:
    settings = load_settings()
    api_token = settings.get("crontab.SAML_TOKEN")
    timeout = aiohttp.ClientTimeout(total=None)
    async with aiohttp.ClientSession(timeout=timeout) as session:
        session.headers.update({"session": api_token})
        deleter = SubtreeDeleter(session, org_unit_uuid, connections=connections)
        await deleter.run(
            org_unit_uuid, delete_functions, keep_functions=keep_functions
        )
コード例 #18
0
def read_settings(top_settings=None, index=0):
    if top_settings is None:
        top_settings = load_settings()

    settings = {}
    settings["global"] = _read_global_settings(top_settings)
    settings["primary"] = _read_primary_ad_settings(top_settings, index)
    # TODO: better check for AD-writer.
    if "integrations.ad.write.level2orgunit_field" in top_settings:
        settings["primary_write"] = _read_primary_write_information(
            top_settings)
    return settings
コード例 #19
0
 def __init__(self):
     super().__init__()
     self.settings = load_settings()
     self._check_ad_uuid_field_is_configured()
     self.helper = self._get_mora_helper()
     self.org_uuid = self.helper.read_organisation()
     self.reader = ADParameterReader()
     self.stats = {
         "attempted_users": 0,
         "user_not_in_mo": 0,
         "already_ok": 0,
         "updated": 0,
     }
コード例 #20
0
    def __init__(self):

        self.settings = load_settings()
        self.root_ou_uuid = self.settings["integrations.ad.import_ou.mo_unit_uuid"]
        self.helper = MoraHelper(hostname=self.settings["mora.base"], use_cache=False)
        self.org_uuid = self.helper.read_organisation()

        self.ad_reader = ADParameterReader()
        self.ad_reader.cache_all(print_progress=True)

        its = self.helper.read_it_systems()
        AD_its = only(filter(lambda x: x["name"] == constants.AD_it_system, its))
        self.AD_it_system_uuid = AD_its["uuid"]
コード例 #21
0
    def __init__(self, all_settings=None):
        logger.info("AD Sync Started")

        self.settings = all_settings
        if self.settings is None:
            self.settings = load_settings()

        self.helper = self._setup_mora_helper()
        self.org = self.helper.read_organisation()

        # Possibly get IT-system directly from LoRa for better performance.
        self.lc = self._setup_lora_cache()

        self._setup_visibilities()
コード例 #22
0
def cli(**args):
    SETTINGS = load_settings()

    ad_reader = ADParameterReader()

    if args['update']:
        try:
            start_opus_diff(ad_reader=ad_reader)
        except RunDBInitException:
            print('RunDB not initialized')

    if args['import']:
        importer = ImportHelper(
            create_defaults=True,
            mox_base=SETTINGS['mox.base'],
            mora_base=SETTINGS['mora.base'],
            store_integration_data=False,
            seperate_names=True,
            demand_consistent_uuids=False
        )

        med_name = 'MED Organisation'
        importer.add_klasse(
            identifier=med_name,
            facet_type_ref='org_unit_type',
            user_key=med_name,
            scope='TEXT',
            title=med_name
        )

        importer.add_organisation_unit(
            identifier=med_name,
            name=med_name,
            user_key=med_name,
            type_ref=med_name,
            date_from='1930-01-01',
            date_to=None,
            parent_ref=None
        )

        try:
            start_opus_import(importer, ad_reader=ad_reader, force=True)
        except RunDBInitException:
            print('RunDB not initialized')

    if args['update_single_user']:
        employment_number = args['update_single_user']
        days = args['days']
        update_employee(employment_number, days)
コード例 #23
0
def main() -> None:
    # Settings
    settings = load_settings()
    host = settings["mora.base"]
    org = settings["reports.org_name"]
    outdir = Path(settings["mora.folder.query_export"])

    # Survey
    survey = Survey(host, org)

    with pd.ExcelWriter(outdir / "Datasæt til trivselsundersøgelse.xlsx") as writer:
        survey.org_unit_overview().to_excel(
            writer, sheet_name="Organisation", index=False
        )
        survey.employees().to_excel(writer, sheet_name="Medarbejdere", index=False)
コード例 #24
0
def terminate_filtered_employees(dry_run):
    settings = load_settings()
    filter_ids = settings.get("integrations.opus.units.filter_ids", [])
    mox_base = settings.get("mox.base", "localhost:8080")
    latest_date, opus_dump = opus_helpers.get_latest_dump()
    file_diffs = opus_helpers.file_diff(None, opus_dump)
    # Get every id of filtered units
    all_ids = opus_helpers.find_all_filtered_ids(opus_dump, filter_ids)
    # find all engagements to a filtered unit in latest opus-file
    filtered_employees = list(
        filter(lambda emp: emp.get("orgUnit") in all_ids,
               file_diffs["employees"]))
    diff = OpusDiffImport(latest_date, ad_reader=None, employee_mapping={})
    # Check if any engagements exist that should have been filtered
    eng_info = [
        diff._find_engagement(e["@id"], "Engagement", present=False)
        for e in filtered_employees
    ]
    eng_info = list(filter(lambda x: x is not None, eng_info))
    if dry_run:
        print(
            f"There are {len(eng_info)} engagements that should have been terminated."
        )
        return eng_info

    for eng_uuid in tqdm(eng_info, desc="Deleting filtered engagements"):
        r = httpx.delete(
            f"{mox_base}/organisation/organisationfunktion/{eng_uuid}")
        r.raise_for_status()

    # Check users in MO - if no engagements are left then delete the user and all details to it.
    user_cprs = set(map(opus_helpers.read_cpr, filtered_employees))
    users = [diff.helper.read_user(user_cpr=cpr) for cpr in user_cprs]
    users = filter(lambda x: x, users)
    user_uuids = set(map(itemgetter("uuid"), users))

    user_engagements = {
        user_uuid: diff.helper.read_user_engagements(user_uuid, read_all=True)
        for user_uuid in user_uuids
    }

    delete_users = dict(filter(lambda x: x[1] == [], user_engagements.items()))
    for user_uuid in tqdm(delete_users,
                          desc="Deleting users with no other engagements"):
        delete_object_and_orgfuncs(uuid=user_uuid,
                                   mox_base=mox_base,
                                   object_type="bruger",
                                   dry_run=dry_run)
コード例 #25
0
def sd_lookup_settings():
    settings = load_settings()

    institution_identifier = settings["integrations.SD_Lon.institution_identifier"]
    if not institution_identifier:
        raise ValueError("Missing setting, institution_identifier")

    sd_user = settings["integrations.SD_Lon.sd_user"]
    if not sd_user:
        raise ValueError("Missing setting, sd_user")

    sd_password = settings["integrations.SD_Lon.sd_password"]
    if not sd_password:
        raise ValueError("Missing setting, sd_password")

    return institution_identifier, sd_user, sd_password
コード例 #26
0
def json_config_settings_source(settings: BaseSettings) -> Dict[str, Any]:
    settings_json = load_settings() or {}
    prefix = "integrations.aarhus_los"

    def _get_setting_value(key: str, default: Any = None):
        return settings_json.get(f"{prefix}.{key}", default)

    return dict(
        ftp_url=_get_setting_value("ftp_url", "ftp.aarhuskommune.dk"),
        ftp_user=_get_setting_value("ftp_user"),
        ftp_pass=_get_setting_value("ftp_pass"),
        ftp_folder=_get_setting_value("ftp_folder", "TEST"),
        import_state_file=_get_setting_value("state_file"),
        import_csv_folder=_get_setting_value("import_csv_folder"),
        azid_it_system_uuid=_get_setting_value("azid_it_system_uuid",
                                               uuids.AZID_SYSTEM),
    )
コード例 #27
0
def main(filename):
    logger.info("reading top_settings")
    top_settings = load_settings()
    SFTP_USER = top_settings["emus.sftp_user"]
    SFTP_HOST = top_settings["emus.sftp_host"]
    SFTP_KEY_PATH = top_settings["emus.sftp_key_path"]
    SFTP_KEY_PASSPHRASE = top_settings["emus.sftp_key_passphrase"]
    MUSSKEMA_RECIPIENT = top_settings["emus.recipient"]
    QUERY_EXPORT_DIR = top_settings["mora.folder.query_export"]
    EMUS_FILENAME = top_settings.get("emus.outfile_name", "emus_filename.xml")

    generated_file = io.StringIO()
    logger.info("encoding file for transfer")
    with open(filename, "r", encoding="utf-8") as source_file:
        generated_file.write(source_file.read())
    filetosend = io.BytesIO(generated_file.getvalue().encode("utf-8"))

    logger.info("connecting sftp")
    try:
        sp = SpSftp({
            "user": SFTP_USER,
            "host": SFTP_HOST,
            "ssh_key_path": SFTP_KEY_PATH,
            "ssh_key_passphrase": SFTP_KEY_PASSPHRASE,
        })
    except Exception:
        logger.exception("error in sftp connection")
        raise
    sp.connect()

    output_filename = datetime.datetime.now().strftime(
        "%Y%m%d_%H%M%S_os2mo2musskema.xml")
    logger.info("sending %s to %s", output_filename, MUSSKEMA_RECIPIENT)
    sp.send(filetosend, output_filename, MUSSKEMA_RECIPIENT)
    sp.disconnect()

    # write the file that is sent into query export dir too
    if QUERY_EXPORT_DIR and EMUS_FILENAME:
        filepath = os.path.join(QUERY_EXPORT_DIR,
                                os.path.basename(EMUS_FILENAME))
        with open(filepath, "w", encoding="utf-8") as f:
            f.write(generated_file.getvalue())

    logger.info("program ended")
コード例 #28
0
def cli(**args):
    """
    Command line interface.
    """
    logger.info("Command line args: %r", args)

    settings = load_settings()

    sql_export = SqlExport(
        force_sqlite=args["force_sqlite"],
        historic=args["historic"],
        settings=settings,
    )
    sql_export.perform_export(
        resolve_dar=args["resolve_dar"],
        use_pickle=args["read_from_cache"],
    )
    sql_export.swap_tables()
    logger.info("*SQL export ended*")
コード例 #29
0
async def ensure_class_in_lora(facet: str, klasse: str,
                               **kwargs) -> Tuple[str, bool]:
    """Ensures class exists in lora.

    Returns the uuid of the existing class or creates it and returns uuid of the new class.
    Uses mox_utils ensure_class_exists but caches results, so subsequent calls with same parameters will return the correct uuid without any calls to lora.
    Returns a tuple contaning a uuid of the class and a boolean of wether it was created or not.
    Remember that the 'created' boolean is also cached so it will only show if it was created the first time this was called.
    Example:
        uuid, _ = ensure_class_in_lora('org_unit_type', 'Enhed')
        uuid, _ = ensure_class_in_lora('employee_address_type', 'Email', scope = 'EMAIL')
    """
    settings = load_settings()
    mox_base = settings.get("mox.base")
    response = await ensure_class_exists_helper(bvn=klasse,
                                                facet_bvn=facet,
                                                mox_base=mox_base,
                                                **kwargs)
    return response
コード例 #30
0
def sql_export(resolve_dar):

    # Load settings file
    org_settings = load_settings()

    # Override settings
    settings = {
        "exporters.actual_state.type":
        "SQLite",
        "exporters.actual_state_historic.type":
        "SQLite",
        "exporters.actual_state.db_name":
        org_settings.get("lc-for-jobs.actual_db_name", "ActualState"),
        "exporters.actual_state.manager_responsibility_class":
        org_settings["exporters.actual_state.manager_responsibility_class"],
    }

    sql_export = SqlExport(force_sqlite=True,
                           historic=False,
                           settings=settings)
    sql_export.perform_export(resolve_dar=resolve_dar)