Exemplo n.º 1
0
 def load(cache, trials, sleep, timeout):
     act = Action.GETCOMMPEERS
     log.info("Start %s:..." % (act))
     success = False
     try:
         for _ in range(trials):
             try:
                 CacheCommPeer.__loadTry(cache.ui, cache)
                 success = True
                 break
             except urllib2.HTTPError as inst:
                 cache.reconnect(sleep, timeout)
         if not success:
             log.error("exhausted trials; could not %s %s" % (act, cache))
     except Exception as inst:
         log.error("could not digest comm peer status %s (%s)" % (str(inst), traceback.format_exc()))
     finally:
         log.debug("Stop %s: Success %s..." % (act, success))
     return success
Exemplo n.º 2
0
    def __loadTry(ui, auId): 
        '''
        delete existing LockssAuCrawlStatus and create new by reading status info from cache 
        '''
        log.debug("try %s" % (auId))
        st = { 'auId' : auId }
        status = ui.getCrawlStatus(auId.masterAuId.getLockssAu()) 
        reportDate = datetime.utcnow().replace(tzinfo=pytz.utc)

        if (not status): 
            log.debug2("No CrawlStatus Info for %s %s" % (auId.cache, auId.auId) )
        else: 
            for s in status: 
                # work on status info 
                if (not s):
                    raise LockssError("CrawlStatusTable returned empty info"); 
                try: 
                    st['reportDate'] = reportDate 
                    st['type'] = s['crawl_type']
                    st['startTime'] = datetime.strptime(s['start'], utils.UI_STRFTIME)
                    st['nBytesFetched'] = s['content_bytes_fetched'].replace(",", "") 
                    st['status'] = s['crawl_status']['value']
                    st['nMimeTypes'] = str(s['num_of_mime_types']['value'].replace(",", "")) 
                    st['duration'] = str(s['dur'])
                    for f in LockssCrawlStatus.INTFIELDMAP: 
                        val = s[LockssCrawlStatus.INTFIELDMAP[f]]
                        if (val.__class__ == unicode):
                            st[f] = int(val.replace(",", ""))
                        else: 
                            st[f] = str(val['value'].replace(",", "")) 
                except KeyError: 
                    raise LockssError("CrawlStatusTable returned faulty info for %s: %s" % (auId.auId, s)); 
    
                try:
                    # update existing crawl status  
                    crawl = LockssCrawlStatus.objects.get( auId = auId, startTime = st['startTime'])
                    crawl.__dict__.update(st) 
                    log.debug("LockssCrawlStatus UPD %s %s" % (crawl.startTime, str(crawl.auId)))
                except ObjectDoesNotExist:
                    # create new crawlstatus 
                    crawl = LockssCrawlStatus.objects.create(**st)
                    log.debug("LockssCrawlStatus NEW %s %s" % (crawl.startTime, str(crawl.auId)))
                crawl.save()