Exemplo n.º 1
0
def process_employments(employment_file, use_fok, people_to_ignore=None):
    "Synchronise the data in person_employment based on the latest SAP file."

    logger.debug("processing employments")
    employment_cache = cache_db_employments()
    for tpl in make_employment_iterator(
            file(employment_file), use_fok, logger):
        if not tpl.valid():
            logger.debug("Ignored invalid entry for person while "
                         "processing employment: «%s»",
                         tpl.sap_ansattnr)
            continue

        if people_to_ignore and tpl.sap_ansattnr in people_to_ignore:
            # e.g. those with wrong MG/MU
            logger.debug("Invalid person with sap_id=%s", tpl.sap_ansattnr)
            continue

        # just like process_affiliations
        ou_id = get_ou_id(tpl.sap_ou_id)
        if ou_id is None:
            logger.debug("No OU registered for SAP ou_id=%s", tpl.sap_ou_id)
            continue

        person = get_person(tpl.sap_ansattnr)
        if person is None:
            logger.debug("No person is registered for SAP ansatt# %s",
                         tpl.sap_ansattnr)
            continue

        synchronise_employment(employment_cache, tpl, person, ou_id)
        # Add person to employee-set, which is later used by
        # populate_work_titles()
        if person not in employees:
            employees.add(person)

    remove_db_employments(employment_cache)
    logger.debug("done with employments")
Exemplo n.º 2
0
def process_employments(employment_file, use_fok, people_to_ignore=None):
    "Synchronise the data in person_employment based on the latest SAP file."

    logger.debug("processing employments")
    employment_cache = cache_db_employments()
    for tpl in make_employment_iterator(file(employment_file), use_fok,
                                        logger):
        if not tpl.valid():
            logger.debug(
                "Ignored invalid entry for person while "
                "processing employment: «%s»", tpl.sap_ansattnr)
            continue

        if people_to_ignore and tpl.sap_ansattnr in people_to_ignore:
            # e.g. those with wrong MG/MU
            logger.debug("Invalid person with sap_id=%s", tpl.sap_ansattnr)
            continue

        # just like process_affiliations
        ou_id = get_ou_id(tpl.sap_ou_id)
        if ou_id is None:
            logger.debug("No OU registered for SAP ou_id=%s", tpl.sap_ou_id)
            continue

        person = get_person(tpl.sap_ansattnr)
        if person is None:
            logger.debug("No person is registered for SAP ansatt# %s",
                         tpl.sap_ansattnr)
            continue

        synchronise_employment(employment_cache, tpl, person, ou_id)
        # Add person to employee-set, which is later used by
        # populate_work_titles()
        if person not in employees:
            employees.add(person)

    remove_db_employments(employment_cache)
    logger.debug("done with employments")
Exemplo n.º 3
0
def process_affiliations(employment_file, person_file, use_fok,
                         people_to_ignore=None):
    """Parse employment_file and determine all affiliations.

    There are roughly 3 distinct parts:

    #. Cache all the affiliations in Cerebrum
    #. Scan the file and compare the file data with the cache. When there is a
       match, remove the entry from the cache.
    #. Remove from Cerebrum whatever is left in the cache (once we are done
       with the file, the cache contains those entries that were in Cerebrum
    """

    expired = load_expired_employees(file(person_file), use_fok, logger)

    # First we cache all existing affiliations. It's a mapping person-id =>
    # mapping (ou-id, affiliation) => status.
    affiliation_cache = cache_db_affiliations()
    person_cache = dict()

    def person_cacher(empid):
        ret = person_cache.get(empid, NotSet)
        if ret is NotSet:
            ret = person_cache[empid] = get_person(empid)
        return ret

    for tpl in make_employment_iterator(
            file(employment_file), use_fok, logger):
        if not tpl.valid():
            logger.debug("Ignored invalid entry for person while "
                         "processing affiliation: «%s»",
                         tpl.sap_ansattnr)
            continue

        if people_to_ignore and tpl.sap_ansattnr in people_to_ignore:
            logger.debug("Invalid person with sap_id=%s", tpl.sap_ansattnr)
            continue

        if tpl.sap_ansattnr in expired:
            logger.debug("Person sap_id=%s is no longer an employee; "
                         "all employment info will be ignored",
                         tpl.sap_ansattnr)
            continue

        # is the entry within a valid time frame?
        # The shift by 180 days has been requested by UiA around 2007-03-27
        if not (tpl.start_date - DateTimeDelta(180) <= today() <=
                tpl.end_date):
            logger.debug("Entry %s has wrong timeframe (start: %s, end: %s)",
                         tpl, tpl.start_date, tpl.end_date)
            continue

        ou_id = get_ou_id(tpl.sap_ou_id)
        if ou_id is None:
            logger.warn("Cannot map SAP OU %s to Cerebrum ou_id (employment "
                        "for person sap_id=%s).",
                        tpl.sap_ou_id, tpl.sap_ansattnr)
            continue

        person = person_cacher(tpl.sap_ansattnr)
        if person is None:
            logger.warn("Cannot map SAP ansattnr %s to cerebrum person_id",
                        tpl.sap_ansattnr)
            continue

        (affiliation,
         affiliation_status) = sap_employment2affiliation(tpl.lonnstittel)

        synchronize_affiliations(affiliation_cache,
                                 person,
                                 ou_id, affiliation,
                                 affiliation_status)

    # We are done with fetching updates from file.
    # Need to write persons
    for p in person_cache.values():
        if p is None:
            continue
        logger.info("Writing cached affs for person id:%s", p.entity_id)
        p.write_db()

    # All the affiliations left in the cache exist in Cerebrum, but NOT in the
    # datafile. Thus delete them!
    remove_affiliations(affiliation_cache)
Exemplo n.º 4
0
def process_affiliations(employment_file,
                         person_file,
                         use_fok,
                         people_to_ignore=None):
    """Parse employment_file and determine all affiliations.

    There are roughly 3 distinct parts:

    #. Cache all the affiliations in Cerebrum
    #. Scan the file and compare the file data with the cache. When there is a
       match, remove the entry from the cache.
    #. Remove from Cerebrum whatever is left in the cache (once we are done
       with the file, the cache contains those entries that were in Cerebrum
    """

    expired = load_expired_employees(file(person_file), use_fok, logger)

    # First we cache all existing affiliations. It's a mapping person-id =>
    # mapping (ou-id, affiliation) => status.
    affiliation_cache = cache_db_affiliations()
    person_cache = dict()

    def person_cacher(empid):
        ret = person_cache.get(empid, NotSet)
        if ret is NotSet:
            ret = person_cache[empid] = get_person(empid)
        return ret

    for tpl in make_employment_iterator(file(employment_file), use_fok,
                                        logger):
        if not tpl.valid():
            logger.debug(
                "Ignored invalid entry for person while "
                "processing affiliation: «%s»", tpl.sap_ansattnr)
            continue

        if people_to_ignore and tpl.sap_ansattnr in people_to_ignore:
            logger.debug("Invalid person with sap_id=%s", tpl.sap_ansattnr)
            continue

        if tpl.sap_ansattnr in expired:
            logger.debug(
                "Person sap_id=%s is no longer an employee; "
                "all employment info will be ignored", tpl.sap_ansattnr)
            continue

        # is the entry within a valid time frame?
        # The shift by 180 days has been requested by UiA around 2007-03-27
        if not (tpl.start_date - DateTimeDelta(180) <= today() <=
                tpl.end_date):
            logger.debug("Entry %s has wrong timeframe (start: %s, end: %s)",
                         tpl, tpl.start_date, tpl.end_date)
            continue

        ou_id = get_ou_id(tpl.sap_ou_id)
        if ou_id is None:
            logger.warn(
                "Cannot map SAP OU %s to Cerebrum ou_id (employment "
                "for person sap_id=%s).", tpl.sap_ou_id, tpl.sap_ansattnr)
            continue

        person = person_cacher(tpl.sap_ansattnr)
        if person is None:
            logger.warn("Cannot map SAP ansattnr %s to cerebrum person_id",
                        tpl.sap_ansattnr)
            continue

        (affiliation,
         affiliation_status) = sap_employment2affiliation(tpl.lonnstittel)

        synchronize_affiliations(affiliation_cache, person, ou_id, affiliation,
                                 affiliation_status)

    # We are done with fetching updates from file.
    # Need to write persons
    for p in person_cache.values():
        if p is None:
            continue
        logger.info("Writing cached affs for person id:%s", p.entity_id)
        p.write_db()

    # All the affiliations left in the cache exist in Cerebrum, but NOT in the
    # datafile. Thus delete them!
    remove_affiliations(affiliation_cache)