def checklogs(self, **kwargs): logger.info("%d files to download" % FileLog.objects.filter(status_code=0).count()) logger.info("%d files to indexing" % FileLog.objects.filter(is_indexed=False).count()) b = Beaker() for it in FileLog.objects.filter(status_code=0)\ .order_by("-created")[0:settings.MAX_LOGS_IN_ONE_CHECK]: it.status_code, logpath = b.downloadLog(it.url) if not logpath: # if file is not download then skip and not save object it.save() continue it.path = logpath it.is_downloaded = True it.save() try: it.parse_journal() except Exception as e: logger.debug("parse log file: %s" % e) if settings.ELASTICSEARCH: for it in FileLog.objects.filter(is_downloaded=True, is_indexed=False)\ .order_by("-created")[0:settings.MAX_LOGS_IN_ONE_CHECK]: try: it.index() except Exception as e: logger.info("indexing %s: %s" % (it.path, e)) FileLog.clean_old()
def download_files_from_recipe(recipe): """ function download log files from beaker by global filter 'backuplogs' @param object(Recipe) download selected files from this recipe @return None """ b = Beaker() for url in b.listLogs(recipe.uid): namefile = os.path.basename(urlparse(url).path) if namefile in backuplogs: logfile, created = FileLog.objects.get_or_create( url=url, defaults={"recipe": recipe}) if created: logfile.save()
def init(*args, **kwargs): progress = CheckProgress() bkr = Beaker() cfg_running = kwargs["running"] cfg_init = kwargs["init"] cfg_minid = kwargs["minid"] cfg_date = kwargs["date"] cfg_quiet = kwargs["quiet"] if cfg_date: cfg_date = datetime.strptime(kwargs["date"], "%Y-%m-%d") if kwargs["jobs"]: jobslist = kwargs["jobs"].split(" ") elif cfg_init: bkr_filter = {"owner": settings.BEAKER_OWNER} if cfg_minid: bkr_filter["minid"] = kwargs["minid"] else: # find job from previous init (date) - checked only new jobs # datetime.today().date() minid = Job.objects.values("uid").filter( date__lt=(currentDate() - timedelta(days=2))).order_by("-uid")[:1] if minid: bkr_filter["minid"] = minid[0]["uid"][2:] jobslist = bkr.listJobs(bkr_filter) else: jobslist = [ it["uid"] for it in Job.objects.values("uid").filter(is_finished=False) ] progress.totalsum = len(jobslist) progress.save() for it in jobslist: if not cfg_quiet: logger.info("%d/%d (%s)" % (progress.actual, progress.totalsum, it)) bkr.parse_job(it, running=cfg_running, date_created=cfg_date) progress.counter() progress.finished()
def test_create_job(self): bkr = Beaker() pwd = os.path.dirname(__file__) xmlpath = os.path.join(pwd, "tests", "example.fedora.xml") # xmlcontent = get_content_from_file(xmlpath) # self.assertIsNotNone(xmlcontent, msg="file %s is not load" % xmlcontent) jobids = bkr.scheduleFromXmlFile(xmlpath) for jobid in jobids: self.assertIsNotNone(jobid, msg="job is not created") # check jobs from beaker bkr.parse_job(jobid) job = Job.objects.get(uid=jobid) # cancel created job bkr.jobCancel(job)
def __init__(self): self.beaker = Beaker()