Beispiel #1
0
    def __init__(self, system_name, end_marker, mox_base, mora_base,
                 demand_consistent_uuids, store_integration_data=False,
                 dry_run=False):

        # Import Params
        self.demand_consistent_uuids = demand_consistent_uuids
        self.store_integration_data = store_integration_data
        if store_integration_data:
            self.ia = IntegrationAbstraction(mox_base, system_name, end_marker)

        # Service endpoint base
        self.mox_base = mox_base
        self.mora_base = mora_base

        # Session
        self.mh = MoraHelper(self.mora_base, use_cache=False)
        self.session = Session()

        # Placeholder for UUID import
        self.organisation_uuid = None

        # Existing UUIDS
        # TODO: More elegant version of this please
        self.existing_uuids = []

        # UUID map
        self.inserted_organisation = {}
        self.inserted_facet_map = {}
        self.inserted_klasse_map = {}
        self.inserted_itsystem_map = {}
        self.inserted_org_unit_map = {}
        self.inserted_employee_map = {}

        # Deprecated
        self.dry_run = dry_run
Beispiel #2
0
        def get_manager(org_unit_uuid, mh: MoraHelper):

            present = mh._mo_lookup(org_unit_uuid,
                                    "ou/{}/details/manager?validity=present")
            future = mh._mo_lookup(org_unit_uuid,
                                   "ou/{}/details/manager?validity=future")
            managers = present + future

            if not managers:
                return None
            if len(managers) > 1:
                logger.warning("More than one manager exists for {}".format(
                    org_unit_uuid))
            manager = managers[0]

            person = manager.get("person")
            if not person:
                return None

            ad_guid, sam_account_name = get_employee_from_map(
                person["uuid"], mapping_file_path)
            # Only import users who are in AD
            if not ad_guid or not sam_account_name:
                return {}

            return {"uuid": person["uuid"], "userId": sam_account_name}
Beispiel #3
0
def cli(mox_base, mora_base, delete, full_history, opus_id, use_ad, dry_run):
    """Reimport object from opus with given opus-ID to MO
    Optionally deletes the object and all related orgfuncs directly from Lora.
    Defaults to reading latest file only, but supports reading full history
    """
    helper = MoraHelper(hostname=mora_base)
    object_type, obj = find_type(opus_id, full_history)
    if object_type == "bruger":
        cpr = opus_helpers.read_cpr(obj)
        user = helper.read_user(user_cpr=cpr)
        uuid = user["uuid"] if user else None
    else:
        uuid = opus_helpers.generate_uuid(obj["@id"])

    if delete and uuid and not dry_run:
        delete_object_and_orgfuncs(uuid, mox_base, object_type)
    if dry_run:
        click.echo(
            f"Dry-run: {'Delete and reimport' if delete else 'Reimport'} '{object_type}' with {uuid=}"
        )
    else:
        AD = ad_reader.ADParameterReader() if use_ad else None
        import_opus(
            ad_reader=AD,
            import_all=full_history,
            import_last=not full_history,
            opus_id=opus_id,
            rundb_write=False,
        )
Beispiel #4
0
def cli(mox_base: str, mora_base: str, dry_run: bool):

    helper = MoraHelper(hostname=mora_base)

    org_unit_types, _ = helper.read_classes_in_facet("org_unit_type")
    org_unit_types = sorted(org_unit_types, key=lambda x: x["name"])
    groups = groupby(org_unit_types, key=lambda x: x["name"])

    split_classes = map(split, groups)
    split_classes = filter(is_duplicate, split_classes)
    if dry_run:
        click.echo(
            f"Dry-run: Found {len(list(split_classes))} duplicated classes to fix."
        )
        return

    session = requests.session()

    for no_scope, scope in tqdm(split_classes, desc="Moving relations to one class"):
        old_uuid = no_scope["uuid"]
        move_class_helper(
            old_uuid=old_uuid,
            new_uuid=scope["uuid"],
            copy=False,
            mox_base=mox_base,
            relation_type="organisation/organisationenhed",
        )

        delete_class(session=session, base=mox_base, uuid=old_uuid)
Beispiel #5
0
def full_import(org_only: bool, mora_base: str, mox_base: str):
    """Tool to do an initial full import."""
    # Check connection to MO before we fire requests against SD
    mh = MoraHelper(mora_base)
    if not mh.check_connection():
        raise click.ClickException("No MO reply, aborting.")

    importer = ImportHelper(
        create_defaults=True,
        mox_base=mox_base,
        mora_base=mora_base,
        store_integration_data=False,
        seperate_names=True,
    )
    sd = SdImport(importer,
                  settings=get_importer_settings(),
                  org_only=org_only,
                  ad_info=None)

    sd.create_ou_tree(create_orphan_container=False,
                      sub_tree=None,
                      super_unit=None)
    if not org_only:
        sd.create_employees()

    importer.import_all()
    print("IMPORT DONE")
Beispiel #6
0
def fixup_single_user(
    mora_base: AnyHttpUrl,
    person_uuid: UUID,
    engagement_uuid: UUID,
    dry_run: bool = False,
) -> Tuple[Dict[str, Any], Any]:
    """Fixup the end-date of a single engagement for a single user."""
    helper = MoraHelper(hostname=mora_base, use_cache=False)
    # Fetch all present engagements for the user
    engagements: Iterator[Dict[str, Any]] = helper._mo_lookup(
        person_uuid,
        "e/{}/details/engagement",
        validity="present",
        only_primary=False,
        use_cache=False,
        calculate_primary=False,
    )
    # Find the engagement we are looking for in the list
    engagements = filter(
        lambda engagement: engagement["uuid"] == str(engagement_uuid),
        engagements)
    engagement: Dict[str, Any] = one(engagements)

    # Construct data-part of our payload using current data.
    uuid_keys = [
        "engagement_type",
        "job_function",
        "org_unit",
        "person",
        "primary",
    ]
    direct_keys = ["extension_" + str(i) for i in range(1, 11)] + [
        "fraction",
        "is_primary",
        "user_key",
        "uuid",
    ]
    data: Dict[str, Any] = {}
    data.update({key: {"uuid": engagement[key]["uuid"]} for key in uuid_keys})
    data.update({key: engagement[key] for key in direct_keys})
    data.update(
        {"validity": {
            "from": engagement["validity"]["from"],
            "to": None,
        }})

    # Construct entire payload
    payload: Dict[str, Any] = {
        "type": "engagement",
        "uuid": str(engagement_uuid),
        "data": data,
        "person": {
            "uuid": str(person_uuid, )
        },
    }
    if dry_run:
        return payload, AttrDict({"status_code": 200, "text": "Dry-run"})
    response = helper._mo_post("details/edit", payload)
    return payload, response
Beispiel #7
0
 def __init__(self, enddate_field, uuid_field, settings=None):
     super().__init__(all_settings=settings)
     self.helper = MoraHelper(
         hostname=self.all_settings["global"]["mora.base"], use_cache=False
     )
     self.enddate_field = enddate_field
     self.uuid_field = uuid_field
     self.cpr_field = self.all_settings["primary"]["cpr_field"]
Beispiel #8
0
def main(mora_base: str, use_ad: bool, output_file_path: str) -> None:
    mh = MoraHelper(hostname=mora_base, export_ansi=True)

    employees: List[ExportUser] = create_mapping(mh, use_ad)
    employee_dicts: List[Dict] = list(map(methodcaller("dict"), employees))

    fields = ["cpr", "mo_uuid", "ad_guid", "sam_account_name"]
    mh._write_csv(fields, employee_dicts, output_file_path)
Beispiel #9
0
 def __init__(self, session):
     self.session = session
     self.top_per_unit = {}
     self.helper = MoraHelper(hostname=settings["mora.base"],
                              use_cache=False)
     self.unit_types, self.unit_type_facet = self._find_classes(
         "org_unit_type")
     self.unit_levels, self.unit_level_facet = self._find_classes(
         "org_unit_level")
Beispiel #10
0
 def setUp(self):
     super().setUp()
     util.amqp.publish_message = lambda a, b, c, d, e: None
     self.mh = MoraHelper()
     if not getattr(requests, "_orgget", False):
         requests._orgget = requests.get
         requests.get = self.get
     self._test_data_result = str(testdata /
                                  (pathlib.Path(__file__).stem + "_" +
                                   self._testMethodName + "_result.json"))
Beispiel #11
0
    def _read_classes(self):
        """Read engagement_types and job_function types from MO."""
        mora_base = self.settings.mora_base
        helper = MoraHelper(hostname=mora_base, use_cache=False)

        self.engagement_types = helper.read_classes_in_facet("engagement_type")
        if self.update_job_functions:
            self.job_function_types = helper.read_classes_in_facet(
                "engagement_job_function"
            )
Beispiel #12
0
    def __init__(self,
                 system_name="Import",
                 end_marker="_|-STOP",
                 mox_base="http://localhost:8080",
                 mora_base="http://localhost:5000",
                 store_integration_data=False,
                 create_defaults=True,
                 seperate_names=False,
                 demand_consistent_uuids=True,
                 ImportUtility=ImportUtility):

        self.seperate_names = seperate_names
        mora_type_config(mox_base=mox_base,
                         system_name=system_name,
                         end_marker=end_marker)
        self.mox_base = mox_base
        # Import Utility
        self.store = ImportUtility(
            mox_base=mox_base,
            mora_base=mora_base,
            system_name=system_name,
            end_marker=end_marker,
            demand_consistent_uuids=demand_consistent_uuids,
            store_integration_data=store_integration_data)
        # TODO: store_integration_data could be passed to ImportUtility by passing
        # the actual self.ia object
        if store_integration_data:
            self.morah = MoraHelper(use_cache=False)
            self.ia = IntegrationAbstraction(mox_base, system_name, end_marker)

        self.organisation = None
        self.klassifikation = None

        self.klasse_objects = {}
        self.facet_objects = {}
        self.addresses = []
        self.itsystems = {}

        self.organisation_units = {}
        self.employees = {}

        # Compatibility map
        self.available_types = {
            "klasse": "klasse_objects",
            "facet": "facet_objects",
            "organisation_unit": "organisation_units",
            "employee": "employees"
        }

        self.organisation_unit_details = {}
        self.employee_details = {}

        # Create default facet and klasse
        if create_defaults:
            self.create_default_facet_types()
Beispiel #13
0
def find_bad_engagements(
        mora_base: AnyHttpUrl) -> Iterator[Tuple[UUID, List[UUID]]]:
    """Find users with engagements that ends after 9999-01-01."""
    def enrich_user_uuid(user_uuid: UUID) -> Tuple[UUID, List[UUID]]:
        """Enrich each user_uuid with engagements that end after 9999-01-01."""
        # Fetch all engagements for the user
        mo_engagements: List[Dict] = helper.read_user_engagement(
            user=str(user_uuid),
            only_primary=False,
            read_all=True,
            skip_past=True)
        # Extract uuid and end-date, filter out infinity end-dates.
        mo_engagement_tuples_str: Iterator[Tuple[UUID, str]] = map(
            lambda mo_engagement: (
                mo_engagement["uuid"],
                mo_engagement["validity"]["to"],
            ),
            mo_engagements,
        )
        mo_engagement_tuples_str = filter(
            apply(lambda mo_uuid, end_date: end_date is not None),
            mo_engagement_tuples_str,
        )
        # Convert end-date to datetime.date, and filter out dates before 9999-01-01
        mo_engagement_tuples: Iterator[Tuple[UUID, date]] = map(
            apply(lambda mo_uuid, end_date: (
                mo_uuid,
                datetime.strptime(end_date, "%Y-%m-%d").date(),
            )),
            mo_engagement_tuples_str,
        )
        mo_engagement_tuples = filter(
            apply(lambda mo_uuid, end_date: end_date >= date(9999, 1, 1)),
            mo_engagement_tuples,
        )
        # Extract and convert resulting engagement uuids
        mo_engagement_uuid_strings: Iterator[str] = map(
            itemgetter(0), mo_engagement_tuples)
        mo_engagement_uuids: Iterator[UUID] = map(UUID,
                                                  mo_engagement_uuid_strings)
        return user_uuid, list(mo_engagement_uuids)

    helper = MoraHelper(hostname=mora_base, use_cache=False)
    # Read all users and map to just their UUIDs
    users: Iterator[Dict] = tqdm(helper.read_all_users())
    user_uuid_strings: Iterator[str] = map(itemgetter("uuid"), users)
    user_uuids: Iterator[UUID] = map(UUID, user_uuid_strings)
    # Enrich each user_uuid with a list of uuids from engagements that has a bad end-date, filter empty lists
    user_tuples: Iterator[Tuple[UUID,
                                List[UUID]]] = map(enrich_user_uuid,
                                                   user_uuids)
    user_tuples = filter(
        apply(lambda user_uuid, engagement_uuids: bool(engagement_uuids)),
        user_tuples)
    return user_tuples
Beispiel #14
0
    def __init__(self):
        self.helper = MoraHelper(hostname=MORA_BASE, use_cache=False)
        self.org_uuid = self.helper.read_organisation()

        self.mo_person = None

        # Currently primary is set first by engagement type (order given in
        # settings) and secondly by job_id. self.primary is an ordered list of
        # classes that can considered to be primary. self.primary_types is a dict
        # with all classes in the primary facet.
        self.eng_types_order = SETTINGS[
            'integrations.opus.eng_types_primary_order']
        self.primary_types, self.primary = self._find_primary_types()
Beispiel #15
0
    def __init__(self):

        self.settings = load_settings()
        self.root_ou_uuid = self.settings["integrations.ad.import_ou.mo_unit_uuid"]
        self.helper = MoraHelper(hostname=self.settings["mora.base"], use_cache=False)
        self.org_uuid = self.helper.read_organisation()

        self.ad_reader = ADParameterReader()
        self.ad_reader.cache_all(print_progress=True)

        its = self.helper.read_it_systems()
        AD_its = only(filter(lambda x: x["name"] == constants.AD_it_system, its))
        self.AD_it_system_uuid = AD_its["uuid"]
Beispiel #16
0
    def _find_manager(self, org_unit_uuid, mora_helper: MoraHelper):
        url = "ou/{}/details/manager"
        managers = mora_helper._mo_lookup(org_unit_uuid, url)
        responsibility_class = self.settings[
            "exporters.viborg.primary_manager_responsibility"]

        for manager in managers:
            if responsibility_class in map(lambda x: x.get("uuid"),
                                           manager["responsibility"]):
                return manager

        parent = mora_helper.read_ou(org_unit_uuid).get("parent")
        if not parent:
            return {}
        return self._find_manager(parent["uuid"], mora_helper)
Beispiel #17
0
    def setUpClass(self):
        self.morah = MoraHelper()
        org = self.morah.read_organisation()
        # This assumes a single top-unit. Tests will fail if we have more.
        roots = self.morah.read_top_units(org)
        self.nodes = self.morah.read_ou_tree(roots[0]['uuid'])
        self.counts = self.morah._mo_lookup(org, 'o/{}/')

        cq.export_orgs(self.morah, self.nodes, 'all_employees.csv')
        cq.export_orgs(self.morah,
                       self.nodes,
                       'all_orgs.csv',
                       include_employees=False)
        cq.export_managers(self.morah, self.nodes, 'all_managers.csv')
        cq.export_adm_org(self.morah, self.nodes, 'adm_org.csv')
Beispiel #18
0
        def get_employee_positions(employee_uuid, mh: MoraHelper):
            present = mh._mo_lookup(
                employee_uuid, "e/{}/details/engagement?validity=present")
            future = mh._mo_lookup(employee_uuid,
                                   "e/{}/details/engagement?validity=future")
            engagements = present + future

            converted_positions = []
            for engagement in engagements:
                converted_positions.append({
                    "name":
                    engagement["job_function"]["name"],
                    "orgUnitUuid":
                    engagement["org_unit"]["uuid"],
                })
            return converted_positions
Beispiel #19
0
def main(
        emus_xml_file,
        root_org_unit_uuid=MORA_ROOT_ORG_UNIT_UUID,
        mh=MoraHelper(),
        t=time.time(),
):
    if not root_org_unit_uuid:
        logger.error("root_org_unit_uuid must be specified")
        exit(1)

    logger.warning("caching all ou's,"
                   " so program may seem unresponsive temporarily")

    nodes = mh.read_ou_tree(root_org_unit_uuid)

    # Write the xml file
    emus_xml_file.write('<?xml version="1.0" encoding="utf-8"?>\n')
    emus_xml_file.write("<OS2MO>\n")

    # had to switch the sequence - write e to tmp before append after ou
    temp_file = io.StringIO()
    export_e_emus(mh, nodes, temp_file)
    export_ou_emus(mh, nodes, emus_xml_file)

    emus_xml_file.write(temp_file.getvalue())

    emus_xml_file.write("</OS2MO>")
Beispiel #20
0
def main(read_from_cache):
    # fortiden er termineret i forvejen
    # ad_sync kan ikke lave fremtidige, derfor ingen historik i cache
    mh = MoraHelper(hostname=SETTINGS["mora.base"])
    lc = LoraCache(resolve_dar=False, full_history=False)
    lc.populate_cache(dry_run=read_from_cache, skip_associations=True)
    kill_addresses(mh, lc)
    kill_it_connections(mh, lc)
Beispiel #21
0
def main(**args):
    user_file_path = args['user_file_path']
    mapping_file_path = "cpr_mo_ad_map.csv"

    settings = load_settings()
    mora_helper = MoraHelper(settings['mora.base'])

    comparison = UserComparison(settings, mora_helper, mapping_file_path,
                                user_file_path)
    comparison.run()
Beispiel #22
0
        def get_employee_email(employee_uuid, mh: MoraHelper):
            present = mh._mo_lookup(employee_uuid,
                                    "e/{}/details/address?validity=present")
            future = mh._mo_lookup(employee_uuid,
                                   "e/{}/details/address?validity=future")
            addresses = present + future

            emails = list(
                filter(
                    lambda address: address["address_type"]["scope"] ==
                    "EMAIL",
                    addresses,
                ))

            if emails:
                if len(emails) > 1:
                    logger.warning(
                        "More than one email exists for user {}".format(
                            employee_uuid))
                return emails[0]["value"]
            return None
Beispiel #23
0
    def __init__(self, lc=None, lc_historic=None, **kwargs):
        super().__init__(**kwargs)
        self.settings = self.all_settings
        self.skip_occupied_names = kwargs.get("skip_occupied_names", False)

        # Setup datasource for getting MO data.
        # TODO: Create a factory instead of this hackery?
        # Default to using MORESTSource as data source
        self.datasource = MORESTSource(self.settings)
        # Use LoraCacheSource if LoraCache is provided
        if lc and lc_historic:
            self.datasource = LoraCacheSource(lc, lc_historic, self.datasource)
        # NOTE: These should be eliminated when all uses are gone
        # NOTE: Once fully utilized, tests should be able to just implement a
        #       MODataSource for all their mocking needs.
        self.lc = lc
        self.lc_historic = lc_historic
        self.helper = MoraHelper(hostname=self.settings["global"]["mora.base"],
                                 use_cache=False)

        self._init_name_creator()
Beispiel #24
0
        def get_kle(org_unit_uuid: str,
                    mh: MoraHelper) -> Tuple[List[str], List[str]]:
            present = mh._mo_lookup(org_unit_uuid,
                                    "ou/{}/details/kle?validity=present")
            future = mh._mo_lookup(org_unit_uuid,
                                   "ou/{}/details/kle?validity=future")
            kles = present + future

            def get_kle_tuples(
                kles: List[dict], ) -> Generator[Tuple[str, str], None, None]:
                for kle in kles:
                    number = kle["kle_number"]["user_key"]
                    for aspect in kle["kle_aspect"]:
                        yield number, aspect["scope"]

            kle_tuples = get_kle_tuples(kles)
            buckets = bucket(kle_tuples, key=itemgetter(1))

            interest = map(itemgetter(0), buckets["INDSIGT"])
            performing = map(itemgetter(0), buckets["UDFOERENDE"])

            return list(interest), list(performing)
Beispiel #25
0
def ensure_class_in_facet(mora_base, bvn, facet, title, uuid, scope):
    """Creates a class if it doesn't allready exist

    Example:
        metacli ensure_class_exists --bvn=Orlov --facet=leave_type

    Returns the uuid of the created/existing class.
    """
    helper = MoraHelper(hostname=mora_base, use_cache=False)
    title = title or bvn
    assert all(arg != ""
               for arg in (bvn, title, facet, scope)), "Inputs can't be empty"
    class_uuid = helper.ensure_class_in_facet(facet=facet,
                                              bvn=bvn,
                                              title=title,
                                              uuid=uuid,
                                              scope=scope)
    if uuid:
        assert (
            class_uuid == uuid
        ), f"This class allready existed with another uuid {class_uuid}"
    click.echo(class_uuid)
Beispiel #26
0
    def export_engagement(self, mh: MoraHelper, filename, lc, lc_historic):
        rows = []

        logger.info("Reading users")
        if lc:
            employees = list(map(lambda x: x[0], lc.users.values()))
        else:
            employees = mh.read_all_users()

        logger.info("Reading engagements")
        # Todo: This O(#employees x #engagments), a pre-sorting of engagements would
        # make it O(#employees + #engagments) - consider if this is worth the effort
        for employee in employees:
            logger.info("employee: %r", employee)
            if lc:
                for row in self._gen_from_loracache(employee, lc, lc_historic):
                    rows.append(row)
            else:
                for row in self._gen_from_mo(employee, mh):
                    rows.append(row)

        mh._write_csv(self.fieldnames, rows, filename)
Beispiel #27
0
def main(
    report_outfile,
    root_org_unit_name=MORA_ROOT_ORG_UNIT_NAME,
    mh=MoraHelper(),
):
    root_org_unit_uuid = get_root_org_unit_uuid(mh, root_org_unit_name)

    if not root_org_unit_uuid:
        logger.error("%s not found in root-ous", root_org_unit_name)
        exit(1)

    logger.warning("caching all ou's,"
                   " so program may seem unresponsive temporarily")
    nodes = mh.read_ou_tree(root_org_unit_uuid)
    find_people(mh, nodes)
    fieldnames, rows = prepare_report(mh, nodes)
    rows = collapse_same_manager_more_departments(rows)
    mh._write_csv(fieldnames, rows, report_outfile)
Beispiel #28
0
class SnurreBasse:
    def __init__(self, session):
        self.session = session
        self.helper = MoraHelper(hostname=settings["mora.base"],
                                 use_cache=False)

    def terminate(self, typ, uuid):
        response = self.helper._mo_post(
            "details/terminate",
            {
                "type": typ,
                "uuid": uuid,
                "validity": {
                    "to": "2020-10-01"
                }
            },
        )
        if response.status_code == 400:
            assert response.text.find("raise to a new registration") > 0
        else:
            response.raise_for_status()

    def run_it(self):
        for i in (session.query(ItForbindelse.uuid).filter(
                and_(ItForbindelse.bruger_uuid != None)).all()):
            try:
                print(i)
                self.terminate("it", i[0])
            except:
                pass

    def run_adresse(self):
        for i in (session.query(Adresse.uuid).filter(
                and_(Adresse.adressetype_scope == "E-mail",
                     Adresse.bruger_uuid != None)).all()):
            try:
                print(i)
                self.terminate("address", i[0])
            except:
                pass
Beispiel #29
0
    def run(self, speedup=False, dry_run=True):
        mora_base = self.settings["mora.base"]
        query_exports_dir = pathlib.Path(
            self.settings["mora.folder.query_export"])
        if "exports_viborg_eksterne.outfile_basename" not in self.settings:
            print(
                "Missing key in settings: exports_viborg_eksterne.outfile_basename"
            )
            exit(1)
        outfile_name = (
            query_exports_dir /
            self.settings["exports_viborg_eksterne.outfile_basename"])
        logger.info("writing to file %s", outfile_name)

        t = time.time()
        mh = MoraHelper(hostname=mora_base, export_ansi=False)

        if speedup:
            # Here we should activate read-only mode, actual state and
            # full history dumps needs to be in sync.

            # Full history does not calculate derived data, we must
            # fetch both kinds.
            lc = LoraCache(resolve_dar=True, full_history=False)
            lc.populate_cache(dry_run=dry_run, skip_associations=True)
            lc.calculate_derived_unit_data()

            lc_historic = LoraCache(resolve_dar=False,
                                    full_history=True,
                                    skip_past=True)
            lc_historic.populate_cache(dry_run=dry_run, skip_associations=True)
            # Here we should de-activate read-only mode
        else:
            lc = None
            lc_historic = None

        self.export_engagement(mh, str(outfile_name), lc, lc_historic)
        logger.info("Time: {}s".format(time.time() - t))

        logger.info("Export completed")
Beispiel #30
0
def export_from_mo(root, threaded_speedup, hostname):
    threaded_speedup = threaded_speedup
    t = time.time()

    mh = MoraHelper(hostname=hostname, export_ansi=False)

    org = mh.read_organisation()
    roots = mh.read_top_units(org)

    if root is None:
        trees = {}
        max_height = 0
        for root in roots:
            name = root['name']
            uuid = root['uuid']
            trees[name] = mh.read_ou_tree(uuid)
            if trees[name]['root'].height > max_height:
                main_root = name
            nodes = trees[main_root]
    else:
        nodes = mh.read_ou_tree(root)
    print('Find main tree: {}'.format(time.time() - t))

    if threaded_speedup:
        cq.pre_cache_users(mh)
        print('Build cache: {}'.format(time.time() - t))

    filename = 'alle_lederfunktioner_os2mo.csv'
    cq.export_managers(mh, nodes, filename)
    print('Alle ledere: {}s'.format(time.time() - t))

    filename = 'alle-medarbejdere-stilling-email_os2mo.csv'
    cq.export_all_employees(mh, nodes, filename)
    print('alle-medarbejdere-stilling-email_os2mo.csv: {}s'.format(time.time() - t))

    filename = 'org_incl-medarbejdere.csv'
    cq.export_orgs(mh, nodes, filename)
    print('org_incl-medarbejdere.csv: {}s'.format(time.time() - t))

    filename = 'adm-org-incl-start-og-stopdata-og-enhedstyper-os2mo.csv'
    cq.export_adm_org(mh, nodes, filename)
    print('adm-org-incl-start-stop: {}s'.format(time.time() - t))

    filename = 'tilknytninger.csv'
    cq.export_all_teams(mh, nodes, filename)
    print('tilknytninger: {}s'.format(time.time() - t))