Пример #1
0
 def load_keystrings(keystrings):
     for auid in keystrings:
         if (not MasterAuId.objects.filter(auId=auid).exists()): 
             try: 
                 MasterAuId(auId=auid).save()
             except LockssError as err:
                 log.warn("could not create: %s" % (err))   
Пример #2
0
 def load(cache, auId, doUrls, expire, trials, sleep, timeout): 
     act = Action.GETAUSUMMARY
     if (doUrls): 
         act = act +  "," + Action.GETURLLIST
     log.info('Start %s: %s expire=%s ...' % (act, auId, str(expire)))
     success = False
     try: 
         for _ in  range(trials): 
             try: 
                 if (doUrls):
                     UrlReport.loadTry(cache.ui, auId, expire)
                 else:
                     LockssCacheAuSummary.__loadTry(cache.ui, auId, expire)
                 success = True
                 break
             except urllib2.HTTPError as inst:
                 cache.reconnect(sleep,timeout)
             except ExpatError: 
                 log.error("XML Parser error; could not %s %s" % (auId, act))
                 success = False; # try again 
         if (not success):                  
             log.error("exhausted trials for %s; could not load %s" % (auId, act))
     except LockssError as inst:
         log.warn("LockssException: %s" % inst)  # output is scanned for the ERROR string 
         log.warn("could not digest %s for %s" % (act, auId.auId)) 
     finally: 
         log.debug2('Stop %s: %s Success = %s ...' % (act, auId, success))
     return success
Пример #3
0
 def printConfigFile(fname, action,  auIds, cache):
     log.warn("printing ids for %s on %s to %s" % (action, cache, fname))
     f = open(fname, 'w')
     f.write('[%s]\n' % LockssScript.PARAMSECTION)
     f.write('actionlist = %s\n' % action)
     f.write('auidlist =\n')
     for au in auIds:
         f.write('\t' + au.auId + "\n ")
     f.write('\n')
     f.close()
Пример #4
0
 def load(cache, auId, trials, sleep,timeout):
     success = False
     log.debug2('Start %s: %s ...' % (Action.GETCRAWLSTATUS, auId))
     try: 
         log.info('get %s: %s ...' % (Action.GETCRAWLSTATUS, auId))
         for i in  range(trials): 
             try: 
                 LockssCrawlStatus.__loadTry(cache.ui, auId)
                 success = True
                 break
             except urllib2.HTTPError as inst:
                 cache.reconnect(sleep,timeout)
                 log.error("exhausted trials for %s, could not load crawlstatus" % (auId))
     except LockssError as inst:
         log.warn("LockssException: %s" % inst)  # output is scanned for the ERROR string 
         log.warn("could not digest %s for %s" % (Action.GETCRAWLSTATUS, auId.auId)) 
     finally: 
         log.debug2('Stop %s: %s Success = %s ...' % (Action.GETCRAWLSTATUS, auId, success))
     return  success
Пример #5
0
 def printcsv(folder, auids, orderby, hdrs, sep, minrev = 1):
     '''
     print url reports for all given auids including urls that have a 
     version at least as great as minrev, which defaults to 1
     '''       
     if (not auids): 
         log.info('NOOP %s: No auids to print to %s' % (Action.PRTURLLIST, folder) )
         return
     
     for auid in auids:
         urls = []
         try: 
             if (orderby == 'minversion' or orderby == 'replication'): 
                 urls = auid.urlreport.url_set.filter(version__gte=minrev).all()
             else: 
                 urls = auid.urlreport.url_set.filter(version__gte=minrev).order_by(orderby).all()
             ext = ".tsv"
             if (sep == ","): 
                 ext = ".csv"
             f = open(folder + "/" + auid.auId + ext, 'w')
             if (urls.count() == 0):
                 log.info("NOOP %s: file %s No Urls with version >= %s" % (Action.PRTURLLIST, f.name, minrev))
             log.info('Start %s: file %s version %s' % (Action.PRTURLLIST, f.name, minrev))
             try:
                 reportDate = auid.urlreport.reportDate 
                 f.write("ReportDate\t%s\nIncluding Urls with version >= %s\n\n" % (str(reportDate), minrev))
                 f.write(sep.join(hdrs) + "\n")
                 for url in urls:
                     f.write(url.csv(hdrs, sep) + "\n")
                 log.debug2('Stop %s: file %s version %s' % (Action.PRTURLLIST, f.name, minrev))
                 f.close()
             except IndexError:
                 log.info("NOOP %s: file %s No Urls at all" % (Action.PRTURLLIST, f.name))
         
         except ObjectDoesNotExist: 
             log.warn('Start %s: No UrlReport for %s at %s' % 
                      (Action.PRTURLLIST, auid, auid.cache.name))
Пример #6
0
    def process(self):
        log.info("---") 
        log.info("Start Processing") 
    
        if (self.options.dryrun):
            return
        opts = self.options.__dict__
        print "# COMMAND", self.options._COMMAND; 
        for key in ['ausetname', 'auseturl']: 
            if (opts.has_key(key)): 
                print "#", key, opts.get(key); 
        print "# "; 
        
        caches = self.get_caches();
        print "# SERVER\t", "\n# SERVER\t".join([str(c) for c in caches]);
        print "# ";
                
        # TODO deal with serverlist - aka restrict to aus on given servers 
        fields = ["auid", "repl", "reportDate", "cache", "agree", 
                  "sizeMB", "diskMB", "repository", 
                  "#urls", "avg_version", "min_version", "max_version", 
                  "best_copy"]
        print "\t".join(fields); 
        for auid in self.options.auids: 
            # TODO restrict to servers in options.serverlist 
            auids = LockssCacheAuId.objects.filter(Q(auId=auid) ,self.get_Q_caches() )
            log.info("AUID: %s" % "\t".join([auid, "#matches=%d" % auids.count()]));

            profiles = []
            for au in auids: 
                prof = {
                        'auid' : auid,
                        'repl' : au.replication(),
                        'reportDate' : "",
                        'cache' : au.cache.name,
                        'agree' : "",
                        'repository' : "",
                        '#urls' : "",
                        'avg_version' : "",
                        'min_version' : "",
                        'max_version' : "", 
                        'sizeMB': "", 
                        'diskMB': "",
                        "best_copy" : False
                }
                lausum = LockssCacheAuSummary.objects.filter(auId = au); 
                if (not lausum.exists()):
                    log.warn("No AuSummary Info for " + str(au))
                else: 
                    lausum = lausum[0]; 
                    if (lausum.agreement != None) : prof['agree'] = lausum.agreement
                    prof['repository'] = lausum.repository
                    prof['sizeMB'] = lausum.contentSizeMB()
                    prof['diskMB'] = lausum.diskUsageMB
                    prof['reportDate'] = lausum.reportDate
                    
                    urlReport = UrlReport.objects.filter(auId = au);
                    
                    if (not urlReport.exists()): 
                        log.warn("No Url Info for " + str(au))
                        prof['nurls'] = 0
                    else: 
                        report = urlReport[0];
                        urls = report.url_set; 
                        
                        version_info = urls.aggregate(Avg('version'), Min('version'), Max('version'))
                        prof['avg_version'] =  version_info['version__avg'];
                        prof['min_version'] =  version_info['version__min'];
                        prof['max_version'] =  version_info['version__max'];
                profiles.append(prof) 
                    
            
            # find the au that has the max agreement and if there are multiple 
            # the one/one of the ones with the max avg_version number 
            # and designate as best_copy
            #
            # find max avg_version number 
            
            if (not [] == profiles):
                max_agree = max(v['agree'] for v in profiles) 
                # find all that have that max_agree value 
                candidates = []
                for prof in profiles:
                    if (prof['agree'] == max_agree): 
                        candidates.append(prof)
                max_version = max(v['avg_version'] for v in candidates) 
                for candidate in profiles:
                    if (candidate['avg_version'] == max_version): 
                        candidate['best_copy'] = 'True'
                        break
                assert(candidate)
                
                for prof in profiles: 
                    vals = []; 
                    for f in fields: 
                        v = prof[f]
                        if (v.__class__ == float):
                            vals.append("%.2f" % v)
                        else:
                            vals.append(str(v))
                    print "\t".join(vals) 
                for i in range(0,3): 
                    print "";
                           
        log.info("Stop Processing")