Пример #1
0
def run_report(reporttype, sheetname: str, org_name: str, xlsx_file: str):

    # Make a sqlalchemy session - Name of database is read from settings
    session = sessionmaker(bind=get_engine(), autoflush=False)()

    # Make the query
    data = reporttype(session, org_name)

    # write data as excel file
    workbook = xlsxwriter.Workbook(xlsx_file)
    excel = XLSXExporter(xlsx_file)
    excel.add_sheet(workbook, sheetname, data)
    workbook.close()
Пример #2
0
 def test_get_engine(self):
     testnumber = random.randint(1, 1000)
     with tempfile.TemporaryDirectory() as d:
         pd = pathlib.Path(d)
         dbpath = pd / "testdb"
         myengine = sqlalchemy.create_engine(
             "sqlite:///{}.db".format(dbpath))
         myengine.execute("create table x(x integer);")
         myengine.execute("insert into x values(%d)" % testnumber)
         myengine.dispose()
         yourengine = get_engine(dbpath)
         result = yourengine.execute("select x from x")
         self.assertEqual(result.first(), (testnumber, ))
Пример #3
0
 def setUp(self):
     """
     setup db and populate with quite minimal data
     """
     self.engine = get_engine(dbpath=":memory:")
     self.session = sessionmaker(bind=self.engine, autoflush=False)()
     # Lav tables via tabledefs fra LoraCache og fyld dataen ind
     Base.metadata.create_all(self.engine)
     bruger = Bruger(
         fornavn="fornavn",
         efternavn="efternavn",
         uuid="b1",
         bvn="b1bvn",
         cpr="cpr1",
     )
     self.session.add(bruger)
     it = ItSystem(navn=AD_it_system, uuid="ItSystem1")
     self.session.add(it)
     it = ItSystem(navn="it_navn2", uuid="ItSystem2")
     self.session.add(it)
     it = ItForbindelse(
         id=1,
         uuid="if1",
         it_system_uuid="ItSystem1",
         bruger_uuid="b1",
         enhed_uuid="e1",
         brugernavn="AD-logon",
         startdato="0",
         slutdato="1",
         primær_boolean=True,
     )
     self.session.add(it)
     it = ItForbindelse(
         id=2,
         uuid="if2",
         it_system_uuid="ItSystem2",
         bruger_uuid="b1",
         enhed_uuid="e1",
         brugernavn="if_bvn2",
         startdato="0",
         slutdato="1",
         primær_boolean=True,
     )
     self.session.add(it)
     self.session.commit()
Пример #4
0
def main(delete):
    engine = get_engine()

    # Prepare session
    Session = sessionmaker(bind=engine, autoflush=False)
    session = Session()

    # List of tuples, it_sys_uuid, bruger_uuid, enhed_uuid, brugernavn, count
    duplicates = find_duplicate_it_connections(session)
    # List of dicts from id --> uuid (for rows to be deleted)
    duplicate_maps = map(partial(construct_duplicate_dict, session), duplicates)
    # One combined dict from id --> uuid (for rows to be deleted)
    output = dict(ChainMap(*duplicate_maps))

    if delete:
        settings = get_settings()
        delete_from_lora(settings["mox.base"], output.values())
    else:
        # Output delete-map
        print(json.dumps(output, indent=4, sort_keys=True))
Пример #5
0
def main(emus_xml_file, settings):
    session = get_session(get_engine())
    nodes = read_ou_tree(session, settings["MORA_ROOT_ORG_UNIT_UUID"])

    # Write employees to temp file, counting to determine if ou included
    temp_file = io.StringIO()
    export_e_emus(session, settings, nodes, temp_file)

    # Begin the xml file
    emus_xml_file.write('<?xml version="1.0" encoding="utf-8"?>\n')
    emus_xml_file.write("<OS2MO>\n")

    # write included units to xml-file
    export_ou_emus(session, nodes, temp_file)

    # Write employees to xml file
    emus_xml_file.write(temp_file.getvalue())

    # End xml file
    emus_xml_file.write("</OS2MO>")
Пример #6
0
        if response.status_code == 400:
            assert response.text.find("raise to a new registration") > 0
        else:
            response.raise_for_status()

    def run_it(self):
        for i in (session.query(ItForbindelse.uuid).filter(
                and_(ItForbindelse.bruger_uuid != None)).all()):
            try:
                print(i)
                self.terminate("it", i[0])
            except:
                pass

    def run_adresse(self):
        for i in (session.query(Adresse.uuid).filter(
                and_(Adresse.adressetype_scope == "E-mail",
                     Adresse.bruger_uuid != None)).all()):
            try:
                print(i)
                self.terminate("address", i[0])
            except:
                pass


if __name__ == "__main__":

    session = get_session(get_engine())
    SnurreBasse(session).run_adresse()
    SnurreBasse(session).run_it()
Пример #7
0
def generate_json():
    # TODO: Async database access
    engine = get_engine()
    # Prepare session
    Session = sessionmaker(bind=engine, autoflush=False)
    session = Session()

    # Count number of queries
    def query_counter(*_):
        query_counter.count += 1

    query_counter.count = 0
    event.listen(engine, "before_cursor_execute", query_counter)

    # Print number of employees
    total_number_of_employees = session.query(Bruger).count()
    print("Total employees:", total_number_of_employees)

    def filter_missing_entry(entry_map, entry_type, unit_uuid, entry):
        if unit_uuid not in entry_map:
            logger.error(entry_type + " not found in map: " + str(unit_uuid))
            return False
        return True

    def enrich_org_unit_with_x(org_unit_map, entry_type, entry_gen, entries):
        def gen_entry(x, bruger):
            return x.enhed_uuid, entry_gen(x, bruger)

        # Bind two arguments so the function only takes unit_uuid, entry.
        # Then apply_tuple to the function takes a tuple(unit_uuid, entry).
        missing_entry_filter = apply_tuple(
            partial(filter_missing_entry, org_unit_map,
                    entry_type.capitalize()))

        entries = starmap(gen_entry, entries)
        entries = filter(missing_entry_filter, entries)
        for unit_uuid, entry in entries:
            org_unit_map[unit_uuid][entry_type].append(entry)
        return org_unit_map

    def enrich_employees_with_x(employee_map, entry_type, entry_gen, entries):
        def gen_entry(x, enhed):
            return x.bruger_uuid, entry_gen(x, enhed)

        # Bind two arguments so the function only takes unit_uuid, entry.
        # Then apply_tuple to the function takes a tuple(unit_uuid, entry).
        missing_entry_filter = apply_tuple(
            partial(filter_missing_entry, employee_map,
                    entry_type.capitalize()))

        # Add org-units to queue as side-effect
        entries = side_effect(lambda x_enhed: add_org_unit(x_enhed[1]),
                              entries)
        entries = starmap(gen_entry, entries)
        entries = filter(missing_entry_filter, entries)
        for bruger_uuid, entry in entries:
            employee_map[bruger_uuid][entry_type].append(entry)
        return employee_map

    def enrich_org_units_with_engagements(org_unit_map):
        def gen_engagement(engagement, bruger):
            return {
                "title": engagement.stillingsbetegnelse_titel,
                "name": bruger.fornavn + " " + bruger.efternavn,
                "uuid": bruger.uuid,
            }

        engagements = session.query(
            Engagement,
            Bruger).filter(Engagement.bruger_uuid == Bruger.uuid).all()
        return enrich_org_unit_with_x(org_unit_map, "engagements",
                                      gen_engagement, engagements)

    def enrich_org_units_with_associations(org_unit_map):
        def gen_association(tilknytning, bruger):
            return {
                "title": tilknytning.tilknytningstype_titel,
                "name": bruger.fornavn + " " + bruger.efternavn,
                "uuid": bruger.uuid,
            }

        associations = session.query(
            Tilknytning,
            Bruger).filter(Tilknytning.bruger_uuid == Bruger.uuid).all()
        return enrich_org_unit_with_x(org_unit_map, "associations",
                                      gen_association, associations)

    def enrich_org_units_with_management(org_unit_map):
        def gen_management(leder, bruger):
            return {
                "title": leder.ledertype_titel,
                "name": bruger.fornavn + " " + bruger.efternavn,
                "uuid": bruger.uuid,
            }

        managements = session.query(
            Leder, Bruger).filter(Leder.bruger_uuid == Bruger.uuid).all()
        return enrich_org_unit_with_x(org_unit_map, "management",
                                      gen_management, managements)

    def enrich_org_units_with_kles(org_unit_map):
        def gen_kle(kle):
            return kle.enhed_uuid, {
                "title": kle.kle_nummer_titel,
                # "name": kle.kle_aspekt_titel,
                "uuid": kle.uuid,
            }

        # Bind two arguments so the function only takes unit_uuid, entry.
        # Then apply_tuple to the function takes a tuple(unit_uuid, entry).
        missing_entry_filter = apply_tuple(
            partial(filter_missing_entry, org_unit_map, "KLE"))

        kles = session.query(KLE).all()
        kles = filter(lambda kle: kle.kle_aspekt_titel == 'Udførende', kles)
        kles = map(gen_kle, kles)
        kles = filter(missing_entry_filter, kles)
        for unit_uuid, kle in kles:
            org_unit_map[unit_uuid]["kles"].append(kle)
        return org_unit_map

    org_unit_map = {}
    org_unit_queue = set()

    def queue_org_unit(uuid=None):
        if uuid is None:
            return
        org_unit_queue.add(uuid)

    def fetch_parent_org_units():
        # We trust that heirarchies are somewhat shallow, and thus a query per layer is okay.
        while org_unit_queue:
            query_queue = list(org_unit_queue)
            org_unit_queue.clear()
            queryset = session.query(Enhed).filter(
                Enhed.uuid.in_(query_queue)).all()
            for enhed in queryset:
                add_org_unit(enhed)

    def add_org_unit(enhed):
        # Assuming it has already been added, do not read
        if enhed.uuid in org_unit_map:
            return

        unit = {
            "uuid": enhed.uuid,
            "name": enhed.navn,
            "parent": enhed.forældreenhed_uuid,
            "engagements": [],
            "associations": [],
            "management": [],
            "kles": [],
            "addresses": {
                "DAR": [],
                "PHONE": [],
                "EMAIL": [],
                "EAN": [],
                "PNUMBER": [],
                "WWW": [],
            },
        }
        org_unit_map[enhed.uuid] = unit

        # Add parent to queue for bulk fetching later (if any)
        queue_org_unit(enhed.forældreenhed_uuid)

    def fetch_employees():
        def employee_to_dict(employee):
            return {
                "uuid": employee.uuid,
                "surname": employee.efternavn,
                "givenname": employee.fornavn,
                "name": employee.fornavn + " " + employee.efternavn,
                "engagements": [],
                "associations": [],
                "management": [],
                "addresses": {
                    "DAR": [],
                    "PHONE": [],
                    "EMAIL": [],
                    "EAN": [],
                    "PNUMBER": [],
                    "WWW": []
                }
            }

        def create_uuid_tuple(entry):
            return entry["uuid"], entry

        employees = map(employee_to_dict, session.query(Bruger).all())
        employee_map = dict(map(create_uuid_tuple, employees))
        return employee_map

    def enrich_employees_with_engagements(employee_map):
        def gen_engagement(engagement, enhed):
            return {
                "title": engagement.stillingsbetegnelse_titel,
                "name": enhed.navn,
                "uuid": enhed.uuid,
            }

        engagements = session.query(
            Engagement,
            Enhed).filter(Engagement.enhed_uuid == Enhed.uuid).all()
        return enrich_employees_with_x(employee_map, "engagements",
                                       gen_engagement, engagements)

    def enrich_employees_with_associations(employee_map):
        def gen_association(tilknytning, enhed):
            return {
                "title": tilknytning.tilknytningstype_titel,
                "name": enhed.navn,
                "uuid": enhed.uuid,
            }

        associations = session.query(
            Tilknytning,
            Enhed).filter(Tilknytning.enhed_uuid == Enhed.uuid).all()
        return enrich_employees_with_x(employee_map, "associations",
                                       gen_association, associations)

    def enrich_employees_with_management(employee_map):
        def gen_management(leder, enhed):
            return {
                "title": leder.ledertype_titel,
                "name": enhed.navn,
                "uuid": enhed.uuid,
            }

        managements = session.query(
            Leder, Enhed).filter(Leder.enhed_uuid == Enhed.uuid).filter(
                # Filter vacant leders
                Leder.bruger_uuid != None).all()
        return enrich_employees_with_x(employee_map, "management",
                                       gen_management, managements)

    def filter_employees(employee_map):
        def filter_function(phonebook_entry):
            # Do NOT import employees without an engagement or association
            # https://redmine.magenta-aps.dk/issues/34812

            # We do however want to import employees with management roles.
            # As an external employee may be a manager for an organisation unit.
            if (not phonebook_entry["associations"]
                    and not phonebook_entry["engagements"]
                    and not phonebook_entry["management"]):
                logger.info(
                    "OS2MO_IMPORT_ROUTINE Skip employee due to missing engagements, associations, management"
                )

                # Reference to the skipped employee to debug log
                logger.debug(
                    "OS2MO_IMPORT_ROUTINE - NO_RELATIONS_TO_ORG_UNIT employee={phonebook_entry['uuid']}"
                )
                return False
            return True

        filtered_map = {
            uuid: entry
            for uuid, entry in employee_map.items() if filter_function(entry)
        }
        return filtered_map

    def enrich_org_units_with_addresses(org_unit_map):
        # Enrich with adresses
        queryset = session.query(Adresse).filter(Adresse.enhed_uuid != None)

        return address_helper(queryset, org_unit_map,
                              lambda address: address.enhed_uuid)

    def enrich_employees_with_addresses(employee_map):
        # Enrich with adresses
        queryset = session.query(Adresse).filter(Adresse.bruger_uuid != None)

        return address_helper(queryset, employee_map,
                              lambda address: address.bruger_uuid)

    def address_helper(queryset, entry_map, address_to_uuid):
        da_address_types = {
            "DAR": "DAR",
            "Telefon": "PHONE",
            "E-mail": "EMAIL",
            "EAN": "EAN",
            "P-nummer": "PNUMBER",
            "Url": "WWW",
        }

        dawa_queue = {}

        def process_address(address):
            entry_uuid = address_to_uuid(address)
            if entry_uuid not in entry_map:
                return

            atype = da_address_types[address.adressetype_scope]

            if address.værdi:
                value = address.værdi
            elif address.dar_uuid is not None:
                dawa_queue[address.dar_uuid] = dawa_queue.get(
                    address.dar_uuid, [])
                dawa_queue[address.dar_uuid].append(address)
                return
            else:
                logger.warning("Address: {address.uuid} does not have a value")
                return

            formatted_address = {
                "description": address.adressetype_titel,
                "value": value,
            }

            entry_map[entry_uuid]["addresses"][atype].append(formatted_address)

        queryset = queryset.filter(
            # Only include address types we care about
            Adresse.adressetype_scope.in_(da_address_types.keys())).filter(
                # Do not include secret addresses
                or_(Adresse.synlighed_titel == None,
                    Adresse.synlighed_titel != "Hemmelig"))
        for address in queryset.all():
            process_address(address)

        uuids = set(dawa_queue.keys())
        queryset = session.query(DARAdresse).filter(DARAdresse.uuid.in_(uuids))
        betegnelser = map(attrgetter('betegnelse'), queryset.all())
        betegnelser = filter(lambda x: x is not None, betegnelser)
        for value in betegnelser:
            for address in dawa_queue[dar_uuid]:
                entry_uuid = address_to_uuid(address)
                atype = da_address_types[address.adressetype_scope]

                formatted_address = {
                    "description": address.adressetype_titel,
                    "value": value,
                }

                entry_map[entry_uuid]["addresses"][atype].append(
                    formatted_address)

        found = set(map(attrgetter('uuid'), queryset.all()))
        missing = uuids - found
        if missing:
            print(missing, "not found in DAWA")

        return entry_map

    # Employees
    # ----------
    employee_map = None
    with elapsedtime("fetch_employees"):
        employee_map = fetch_employees()
    # NOTE: These 3 queries can run in parallel
    with elapsedtime("enrich_employees_with_engagements"):
        employee_map = enrich_employees_with_engagements(employee_map)
    with elapsedtime("enrich_employees_with_associations"):
        employee_map = enrich_employees_with_associations(employee_map)
    with elapsedtime("enrich_employees_with_management"):
        employee_map = enrich_employees_with_management(employee_map)
    # Filter off employees without engagements, assoications and management
    with elapsedtime("filter_employees"):
        employee_map = filter_employees(employee_map)
    with elapsedtime("enrich_employees_with_addresses"):
        employee_map = enrich_employees_with_addresses(employee_map)

    # Org Units
    # ----------
    with elapsedtime("fetch_parent_org_units"):
        fetch_parent_org_units()
    # NOTE: These 3 queries can run in parallel
    with elapsedtime("enrich_org_units_with_engagements"):
        org_unit_map = enrich_org_units_with_engagements(org_unit_map)
    with elapsedtime("enrich_org_units_with_associations"):
        org_unit_map = enrich_org_units_with_associations(org_unit_map)
    with elapsedtime("enrich_org_units_with_management"):
        org_unit_map = enrich_org_units_with_management(org_unit_map)
    with elapsedtime("enrich_org_units_with_kles"):
        org_unit_map = enrich_org_units_with_kles(org_unit_map)
    with elapsedtime("enrich_org_units_with_addresses"):
        org_unit_map = enrich_org_units_with_addresses(org_unit_map)

    print("Processing took", query_counter.count, "queries")

    # Write files
    # ------------
    # TODO: Asyncio to write both files at once?
    with open("tmp/employees.json", "w") as employees_out:
        json.dump(employee_map, employees_out)

    with open("tmp/org_units.json", "w") as org_units_out:
        json.dump(org_unit_map, org_units_out)