def handle(self, *args, **options): load_essays(settings.ESSAYS_FEED, index=True)
def handle(self, *args, **options): start = datetime.now() _logger.info("Starting title sync process.") # only load titles if the BIB_STORAGE is there, not always the case # for folks in the opensource world bib_isdir = os.path.isdir(settings.BIB_STORAGE) bib_hasattr = hasattr(settings, "BIB_STORAGE") bib_settings = bool(bib_hasattr and bib_isdir) if bib_settings: bib_storage = settings.BIB_STORAGE worldcat_dir = bib_storage + '/worldcat_titles/' pull_titles = bool(options['pull_title_updates'] and hasattr(settings, "WORLDCAT_KEY")) if pull_titles: call_command('pull_titles',) _logger.info("Starting load of OCLC titles.") bulk_dir = worldcat_dir + 'bulk' if os.path.isdir(bulk_dir): call_command('load_titles', bulk_dir, skip_index=True) tnu = self.find_titles_not_updated() # Only update by individual lccn if there are records that need updating. if pull_titles and len(tnu): _logger.info("Pulling titles from OCLC by individual lccn & oclc num.") self.pull_lccn_updates(tnu) _logger.info("Loading titles from second title pull.") lccn_dir = worldcat_dir + 'lccn' if os.path.isdir(lccn_dir): call_command('load_titles', lccn_dir, skip_index=True) tnu = self.find_titles_not_updated(limited=False) _logger.info("Running pre-deletion checks for these titles.") # Make sure that our essays are up to date if not options['skip_essays']: load_essays(settings.ESSAYS_FEED) if bib_settings: if len(tnu): # Delete titles haven't been update & do not have essays or issues attached. for title in tnu: essays = title.essays.all() issues = title.issues.all() error = "DELETION ERROR: Title %s has " % title error_end = "It will not be deleted." if not essays or not issues: delete_txt = (title.name, title.lccn, title.oclc) _logger.info('TITLE DELETED: %s, lccn: %s, oclc: %s' % delete_txt) title.delete() elif essays: _logger.warning(error + 'essays.' + error_end) continue elif issues: _logger.warning(error + 'issues.' + error_end) continue # Load holdings for all remaining titles. holdings_dir = settings.BIB_STORAGE + '/holdings' call_command('load_holdings', holdings_dir) # overlay place info harvested from dbpedia onto the places table try: self.load_place_links() except Exception, e: _logger.exception(e)
def handle(self, *args, **options): start = datetime.now() LOGGER.info("Starting title sync process.") # only load titles if the BIB_STORAGE is there, not always the case # for folks in the opensource world bib_in_settings = validate_bib_dir() if bib_in_settings: worldcat_dir = bib_in_settings + '/worldcat_titles/' pull_titles = bool(options['pull_title_updates'] and hasattr(settings, "WORLDCAT_KEY")) if pull_titles: call_command('pull_titles') LOGGER.info("Starting load of OCLC titles.") bulk_dir = worldcat_dir + 'bulk' if os.path.isdir(bulk_dir): call_command('load_titles', bulk_dir, skip_index=True) tnu = self.find_titles_not_updated() # Only update by individual lccn if there are records that need updating. if pull_titles and len(tnu): LOGGER.info( "Pulling titles from OCLC by individual lccn & oclc num.") self.pull_lccn_updates(tnu) LOGGER.info("Loading titles from second title pull.") lccn_dir = worldcat_dir + 'lccn' if os.path.isdir(lccn_dir): call_command('load_titles', lccn_dir, skip_index=True) tnu = self.find_titles_not_updated(limited=False) LOGGER.info("Running pre-deletion checks for these titles.") # Make sure that our essays are up to date if not options['skip_essays']: load_essays(settings.ESSAYS_FEED) if bib_in_settings: if len(tnu): # Delete titles haven't been update & do not have essays or issues attached. for title in tnu: essays = title.essays.all() issues = title.issues.all() error = "DELETION ERROR: Title %s has " % title error_end = "It will not be deleted." if not essays or not issues: delete_txt = (title.name, title.lccn, title.oclc) LOGGER.info('TITLE DELETED: %s, lccn: %s, oclc: %s' % delete_txt) title.delete() elif essays: LOGGER.warning(error + 'essays.' + error_end) continue elif issues: LOGGER.warning(error + 'issues.' + error_end) continue # Load holdings for all remaining titles. call_command('load_holdings') # overlay place info harvested from dbpedia onto the places table try: self.load_place_links() except Exception as e: LOGGER.exception(e) index.index_titles() # Time of full process run end = datetime.now() total_time = end - start LOGGER.info('start time: %s' % start) LOGGER.info('end time: %s' % end) LOGGER.info('total time: %s' % total_time) LOGGER.info("title_sync done.")