Exemplo n.º 1
0
    async def handle_edit(self, filename: str, filedate: datetime):
        """
        Handle changes to existing org units and details
        We are guaranteed to only have one row per org unit

        New details on an existing org unit will show up in this file, rather than the
        'nye' file. So we have to potentially perform inserts of new data.

        As a row contains information about the org unit as well as its details,
        we do not know what has been changed. However, all information is managed
        by the external system so we can safely reimport the "same" data, as opposed to
        trying to compare the existing objects in OS2mo
        """
        org_units = los_files.read_csv(filename, OrgUnit)
        org_unit_payloads = self.create_unit_payloads(org_units)
        detail_payloads = await self.create_detail_payloads(org_units)

        orgfunk_uuids = set(await util.lookup_organisationfunktion())
        detail_creates, detail_edits = partition(
            lambda payload: payload["uuid"] in orgfunk_uuids, detail_payloads)
        converter = partial(mo_payloads.convert_create_to_edit,
                            from_date=filedate.date().isoformat())
        edits = map(converter, chain(org_unit_payloads, detail_edits))

        async with util.get_client_session() as session:
            await util.create_details(session, detail_creates)
            await util.edit_details(session, edits)
Exemplo n.º 2
0
 async def handle_create(self, filename: str, filedate: datetime):
     """
     Handle creating new manager functions
     """
     managers = los_files.read_csv(filename, ManagerCreate)
     manager_payloads = self.create_manager_payloads(
         managers)  # type: ignore
     async with util.get_client_session() as session:
         await util.create_details(session, manager_payloads)
Exemplo n.º 3
0
    async def handle_terminate(self, filename: str, filedate: datetime):
        """
        Handle termination of engagements. We are guaranteed one row per engagement.
        """
        persons = los_files.read_csv(filename, PersonTermination)
        termination_fn = partial(self.generate_engagement_termination_payload,
                                 to_date=filedate)
        payloads = map(termination_fn, persons)

        async with util.get_client_session() as session:
            await util.terminate_details(session, payloads)
Exemplo n.º 4
0
    async def handle_create(self, filename: str, filedate: datetime):
        """
        Handle creating new persons and details
        We are guaranteed to only have one row per person
        """
        persons = los_files.read_csv(filename, Person)
        employee_payloads = self.create_employee_payloads(persons)
        detail_payloads = self.create_detail_payloads(
            persons,
            filedate.date().isoformat())

        async with util.get_client_session() as session:
            await util.create_details(
                session, chain(employee_payloads, detail_payloads))
Exemplo n.º 5
0
    async def handle_create(self, filename):
        """
        Handle creating new org units and details
        We are guaranteed to only have one row per org unit
        """
        org_units = los_files.read_csv(filename, OrgUnit)

        await self.handle_addresses(org_units, filename)

        org_unit_payloads = self.create_unit_payloads(org_units)
        detail_payloads = await self.create_detail_payloads(org_units)

        async with util.get_client_session() as session:
            await util.create_details(
                session, chain(org_unit_payloads, detail_payloads))
Exemplo n.º 6
0
 def _load_csv_if_newer(
         cls, csv_class: StamCSVType,
         last_import: datetime) -> Union[List[StamCSVType], None]:
     filename = csv_class.get_filename()
     fileset = los_files.get_fileset_implementation()
     try:
         modified_datetime = fileset.get_modified_datetime(filename)
     except ValueError:
         # Raised by `fileset.get_modified_datetime` if file could not be
         # found.
         return None
     else:
         if modified_datetime <= last_import:
             return None
         return los_files.read_csv(filename, csv_class)
Exemplo n.º 7
0
 async def handle_edit(self, filename: str, filedate: datetime):
     """
     Handle changes to managers
     """
     managers = los_files.read_csv(filename, ManagerEdit)
     manager_payloads = self.create_manager_payloads(
         managers)  # type: ignore
     orgfunk_uuids = set(await util.lookup_organisationfunktion())
     detail_creates, detail_edits = partition(
         lambda payload: payload["uuid"] in orgfunk_uuids, manager_payloads)
     converter = partial(mo_payloads.convert_create_to_edit,
                         from_date=filedate.date().isoformat())
     edits = map(converter, chain(manager_payloads, detail_edits))
     async with util.get_client_session() as session:
         await util.create_details(session, detail_creates)
         await util.edit_details(session, edits)
Exemplo n.º 8
0
 async def handle_terminate(self, filename: str, filedate: datetime):
     """
     Handle termination of managers
     """
     managers = los_files.read_csv(filename, ManagerTerminate)
     payloads = [
         mo_payloads.terminate_detail(
             "manager",
             self._generate_rel_uuid(manager),
             filedate,
         ) for manager in managers
     ]
     async with util.get_client_session() as session:
         await util.terminate_details(session,
                                      payloads,
                                      ignored_http_statuses=None)
Exemplo n.º 9
0
    async def handle_initial(self, filename):
        """
        Handles reading the special 'initial' file

        The file contains org unit data, as well as data on the associated details

        The initial org unit file contains historic data, so a minimal set of
        create/edit payloads are created accordingly
        """
        org_units = los_files.read_csv(filename, OrgUnit)

        await self.handle_addresses(org_units, filename)

        unit_payloads = self.create_unit_payloads(org_units)
        detail_payloads = await self.create_detail_payloads(org_units)
        payloads = list(unit_payloads) + list(detail_payloads)

        # Bucket all payloads referring to the same object
        uuid_buckets = bucket(payloads, key=lambda payload: payload["uuid"])
        sorted_buckets = map(
            lambda uuid_key: sorted(uuid_buckets[uuid_key],
                                    key=lambda x: x["validity"]["from"]),
            uuid_buckets,
        )
        consolidated_buckets = list(
            map(self.consolidate_payloads, sorted_buckets))

        split_lists = map(lambda x: (x[0], x[1:]), consolidated_buckets)
        heads, tails = unzip(split_lists)

        # OS2mo reads an object before performing an edit to it, so we need to ensure
        # that we don't perform multiple edits to an object in parallel, which could
        # cause one edit to be overwritten by another
        # We create layers containing at most one edit request for each org unit UUID,
        # and execute the layers sequentially, while allowing the importer to submit the
        # individual requests in a layer in parallel
        edit_payloads = map(partial(map, mo_payloads.convert_create_to_edit),
                            tails)
        edit_layers = zip_longest(*edit_payloads)
        edit_layers_filtered = map(partial(filter, None.__ne__), edit_layers)

        async with util.get_client_session() as session:
            await util.create_details(session, heads)
            for edit_layer in edit_layers_filtered:
                await util.edit_details(session, edit_layer)