Exemple #1
0
    def __autorefresh(self):
        # Refresh projects
        enrich_backend = self._get_enrich_backend()
        field_id = enrich_backend.get_field_unique_id()

        if False:
            # TODO: Waiting that the project info is loaded from yaml files
            logger.info("Refreshing project field in enriched index")
            field_id = enrich_backend.get_field_unique_id()
            eitems = refresh_projects(enrich_backend)
            enrich_backend.elastic.bulk_upload(eitems, field_id)

        # Refresh identities
        logger.info("Refreshing identities fields in enriched index %s", self.backend_section)
        uuids_refresh = []
        after = self.last_autorefresh
        logger.debug('Getting last modified identities from SH since %s for %s', after, self.backend_section)
        (uuids_refresh, ids_refresh) = api.search_last_modified_identities(self.db, after)
        self.last_autorefresh = datetime.utcnow()
        if uuids_refresh:
            logger.debug("Refreshing for %s uuids %s", self.backend_section, uuids_refresh)
            eitems = refresh_identities(enrich_backend,
                                        {"name": "author_uuid",
                                         "value": uuids_refresh})
            enrich_backend.elastic.bulk_upload(eitems, field_id)
        else:
            logger.debug("No uuids to be refreshed found")
        if ids_refresh:
            logger.debug("Refreshing for %s ids %s", self.backend_section, ids_refresh)
            eitems = refresh_identities(enrich_backend,
                                        {"name": "author_id",
                                         "value": ids_refresh})
            enrich_backend.elastic.bulk_upload(eitems, field_id)
        else:
            logger.debug("No ids to be refreshed found")
    def __autorefresh(self, enrich_backend, studies=False):
        # Refresh projects
        field_id = enrich_backend.get_field_unique_id()

        if False:
            # TODO: Waiting that the project info is loaded from yaml files
            logger.info("Refreshing project field in enriched index")
            field_id = enrich_backend.get_field_unique_id()
            eitems = refresh_projects(enrich_backend)
            enrich_backend.elastic.bulk_upload(eitems, field_id)

        # Refresh identities
        logger.info("Refreshing identities fields in enriched index %s",
                    self.backend_section)

        if studies:
            after = self.last_autorefresh_studies
        else:
            after = self.last_autorefresh

        # As we are going to recover modified indentities just below, store this time
        # to make sure next iteration we are not loosing any modification, but don't
        # update corresponding field with this below until we make sure the update
        # was done in ElasticSearch
        next_autorefresh = self.__update_last_autorefresh()

        logger.debug(
            'Getting last modified identities from SH since %s for %s', after,
            self.backend_section)
        (uuids_refresh,
         ids_refresh) = api.search_last_modified_identities(self.db, after)

        if uuids_refresh:
            logger.debug("Refreshing for %s uuids %s", self.backend_section,
                         uuids_refresh)
            eitems = refresh_identities(enrich_backend, {
                "name": "author_uuid",
                "value": uuids_refresh
            })
            enrich_backend.elastic.bulk_upload(eitems, field_id)
        else:
            logger.debug("No uuids to be refreshed found")
        if ids_refresh:
            logger.debug("Refreshing for %s ids %s", self.backend_section,
                         ids_refresh)
            eitems = refresh_identities(enrich_backend, {
                "name": "author_id",
                "value": ids_refresh
            })
            enrich_backend.elastic.bulk_upload(eitems, field_id)
        else:
            logger.debug("No ids to be refreshed found")

        # Update corresponding autorefresh date
        if studies:
            self.last_autorefresh_studies = next_autorefresh
        else:
            self.last_autorefresh = next_autorefresh
    def __autorefresh(self):
        # Refresh projects
        enrich_backend = self._get_enrich_backend()
        field_id = enrich_backend.get_field_unique_id()

        if False:
            # TODO: Waiting that the project info is loaded from yaml files
            logger.info("Refreshing project field in enriched index")
            field_id = enrich_backend.get_field_unique_id()
            eitems = refresh_projects(enrich_backend)
            enrich_backend.elastic.bulk_upload(eitems, field_id)

        # Refresh identities
        logger.info("Refreshing identities fields in enriched index %s",
                    self.backend_section)
        uuids_refresh = []
        after = self.last_autorefresh
        logger.debug(
            'Getting last modified identities from SH since %s for %s', after,
            self.backend_section)
        (uuids_refresh,
         ids_refresh) = api.search_last_modified_identities(self.db, after)
        self.last_autorefresh = datetime.utcnow()
        if uuids_refresh:
            logger.debug("Refreshing for %s uuids %s", self.backend_section,
                         uuids_refresh)
            eitems = refresh_identities(enrich_backend, {
                "name": "author_uuid",
                "value": uuids_refresh
            })
            enrich_backend.elastic.bulk_upload(eitems, field_id)
        else:
            logger.debug("No uuids to be refreshed found")
        if ids_refresh:
            logger.debug("Refreshing for %s ids %s", self.backend_section,
                         ids_refresh)
            eitems = refresh_identities(enrich_backend, {
                "name": "author_id",
                "value": ids_refresh
            })
            enrich_backend.elastic.bulk_upload(eitems, field_id)
        else:
            logger.debug("No ids to be refreshed found")