def __autorefresh(self): logging.info("[%s] Refreshing project and identities " + \ "fields for all items", self.backend_name) # Refresh projects if False: # TODO: Waiting that the project info is loaded from yaml files logging.info("Refreshing project field in enriched index") enrich_backend = self.get_enrich_backend() field_id = enrich_backend.get_field_unique_id() eitems = refresh_projects(enrich_backend) enrich_backend.elastic.bulk_upload_sync(eitems, field_id) # Refresh identities logging.info("Refreshing identities fields in enriched index") enrich_backend = self.get_enrich_backend() field_id = enrich_backend.get_field_unique_id() eitems = refresh_identities(enrich_backend) enrich_backend.elastic.bulk_upload_sync(eitems, field_id)
def __autorefresh(self): # Refresh projects enrich_backend = self._get_enrich_backend() field_id = enrich_backend.get_field_unique_id() if False: # TODO: Waiting that the project info is loaded from yaml files logger.info("Refreshing project field in enriched index") field_id = enrich_backend.get_field_unique_id() eitems = refresh_projects(enrich_backend) enrich_backend.elastic.bulk_upload_sync(eitems, field_id) # Refresh identities logger.info("Refreshing identities fields in enriched index %s", self.backend_section) uuids_refresh = [] after = self.last_autorefresh logger.debug( 'Getting last modified identities from SH since %s for %s', after, self.backend_section) (uuids_refresh, ids_refresh) = api.search_last_modified_identities(self.db, after) self.last_autorefresh = datetime.utcnow() if uuids_refresh: logger.debug("Refreshing for %s uuids %s", self.backend_section, uuids_refresh) eitems = refresh_identities(enrich_backend, { "name": "author_uuid", "value": uuids_refresh }) enrich_backend.elastic.bulk_upload_sync(eitems, field_id) else: logger.debug("No uuids to be refreshed found") if ids_refresh: logger.debug("Refreshing for %s ids %s", self.backend_section, ids_refresh) eitems = refresh_identities(enrich_backend, { "name": "author_id", "value": ids_refresh }) enrich_backend.elastic.bulk_upload_sync(eitems, field_id) else: logger.debug("No ids to be refreshed found")
def __autorefresh(self): logger.info( "[%s] Refreshing project and identities " + "fields for updated uuids ", self.backend_section) # Refresh projects if False: # TODO: Waiting that the project info is loaded from yaml files logger.info("Refreshing project field in enriched index") enrich_backend = self._get_enrich_backend() field_id = enrich_backend.get_field_unique_id() eitems = refresh_projects(enrich_backend) enrich_backend.elastic.bulk_upload_sync(eitems, field_id) # Refresh identities logger.info("Refreshing identities fields in enriched index") enrich_backend = self._get_enrich_backend() field_id = enrich_backend.get_field_unique_id() # Now we need to get the uuids to be refreshed logger.debug("Checking if there are uuids to refresh in %s", self.backend_section) backends_uuids = TasksManager.UPDATED_UUIDS_QUEUE.get() if backends_uuids: logger.debug("Doing autorefresh for %s (%s uuids)", self.backend_section, backends_uuids) if backends_uuids[self.backend_section]: uuids_refresh = backends_uuids[self.backend_section] backends_uuids[self.backend_section] = [] logger.debug("New uuids data: %s", backends_uuids) TasksManager.UPDATED_UUIDS_QUEUE.put(backends_uuids) logger.debug("Refreshing uuids %s", uuids_refresh) eitems = refresh_identities(enrich_backend, { "name": "author_uuid", "value": uuids_refresh }) enrich_backend.elastic.bulk_upload_sync(eitems, field_id) else: TasksManager.UPDATED_UUIDS_QUEUE.put(backends_uuids) else: TasksManager.UPDATED_UUIDS_QUEUE.put(backends_uuids) logger.warning("No dict with uuids per backend to be refreshed")