Esempio n. 1
0
def handle_recipe_finished(sender, **kwargs):
    if sender:
        recipe = kwargs.get("recipe")
        b = Beaker()

        listurls = b.listLogs("R:%d" % int(recipe.uid))
        for url in listurls:
            if len([it for it in backuplogs if url.endswith(it)]) > 0:
                b.downloadLog(url)
        logger.debug("Download recipe log %s from %s" % (recipe, sender))
Esempio n. 2
0
    def checklogs(self, **kwargs):
        logger.info("%d files to download" % FileLog.objects.filter(status_code=0).count())
        logger.info("%d files to indexing" % FileLog.objects.filter(is_indexed=False).count())

        b = Beaker()
        for it in FileLog.objects.filter(status_code=0)\
                         .order_by("-created")[0:settings.MAX_LOGS_IN_ONE_CHECK]:
            it.status_code, logpath = b.downloadLog(it.url)
            if not logpath:
                # if file is not download then skip and not save object
                it.save()
                continue
            it.path = logpath
            it.is_downloaded = True
            it.save()
            try:
                it.parse_journal()
            except Exception as e:
                logger.debug("parse log file: %s" % e)

        if settings.ELASTICSEARCH:
            for it in FileLog.objects.filter(is_downloaded=True, is_indexed=False)\
                            .order_by("-created")[0:settings.MAX_LOGS_IN_ONE_CHECK]:
                try:
                    it.index()
                except Exception as e:
                    logger.info("indexing %s: %s" % (it.path, e))

        FileLog.clean_old()
Esempio n. 3
0
    def checklogs(self, **kwargs):
        logger.info("%d files to download" %
                    FileLog.objects.filter(status_code=0).count())
        logger.info("%d files to indexing" %
                    FileLog.objects.filter(is_indexed=False).count())

        b = Beaker()
        for it in FileLog.objects.filter(status_code=0)\
                         .order_by("-created")[0:settings.MAX_LOGS_IN_ONE_CHECK]:
            it.status_code, logpath = b.downloadLog(it.url)
            if not logpath:
                # if file is not download then skip and not save object
                it.save()
                continue
            it.path = logpath
            it.is_downloaded = True
            it.save()
            try:
                it.parse_journal()
            except Exception as e:
                logger.debug("parse log file: %s" % e)

        if settings.ELASTICSEARCH:
            for it in FileLog.objects.filter(is_downloaded=True, is_indexed=False)\
                            .order_by("-created")[0:settings.MAX_LOGS_IN_ONE_CHECK]:
                try:
                    it.index()
                except Exception as e:
                    logger.info("indexing %s: %s" % (it.path, e))

        FileLog.clean_old()
Esempio n. 4
0
def handle_recipe_finished(sender, **kwargs):
    if sender:
        recipe = kwargs.get("recipe")
        b = Beaker()
        listurls = b.listLogs("R:%d" % int(recipe.uid))
        for url in listurls:
            if len([it for it in backuplogs if url.endswith(it)]) > 0:
                logpath = b.downloadLog(url)
                if not logpath:
                    continue
                logfile = FileLog(path=logpath, recipe=recipe)
                logfile.save()

        logger.debug("Download recipe log %s from %s" % (recipe, sender))