Example #1
0
    def checklogs(self, **kwargs):
        logger.info("%d files to download" %
                    FileLog.objects.filter(status_code=0).count())
        logger.info("%d files to indexing" %
                    FileLog.objects.filter(is_indexed=False).count())

        b = Beaker()
        for it in FileLog.objects.filter(status_code=0)\
                         .order_by("-created")[0:settings.MAX_LOGS_IN_ONE_CHECK]:
            it.status_code, logpath = b.downloadLog(it.url)
            if not logpath:
                # if file is not download then skip and not save object
                it.save()
                continue
            it.path = logpath
            it.is_downloaded = True
            it.save()
            try:
                it.parse_journal()
            except Exception as e:
                logger.debug("parse log file: %s" % e)

        if settings.ELASTICSEARCH:
            for it in FileLog.objects.filter(is_downloaded=True, is_indexed=False)\
                            .order_by("-created")[0:settings.MAX_LOGS_IN_ONE_CHECK]:
                try:
                    it.index()
                except Exception as e:
                    logger.info("indexing %s: %s" % (it.path, e))

        FileLog.clean_old()
Example #2
0
    def checklogs(self, **kwargs):
        logger.info("%d files to download" % FileLog.objects.filter(status_code=0).count())
        logger.info("%d files to indexing" % FileLog.objects.filter(is_indexed=False).count())

        b = Beaker()
        for it in FileLog.objects.filter(status_code=0)\
                         .order_by("-created")[0:settings.MAX_LOGS_IN_ONE_CHECK]:
            it.status_code, logpath = b.downloadLog(it.url)
            if not logpath:
                # if file is not download then skip and not save object
                it.save()
                continue
            it.path = logpath
            it.is_downloaded = True
            it.save()
            try:
                it.parse_journal()
            except Exception as e:
                logger.debug("parse log file: %s" % e)

        if settings.ELASTICSEARCH:
            for it in FileLog.objects.filter(is_downloaded=True, is_indexed=False)\
                            .order_by("-created")[0:settings.MAX_LOGS_IN_ONE_CHECK]:
                try:
                    it.index()
                except Exception as e:
                    logger.info("indexing %s: %s" % (it.path, e))

        FileLog.clean_old()
Example #3
0
def handle_recipe_finished(sender, **kwargs):
    if sender:
        recipe = kwargs.get("recipe")
        b = Beaker()

        listurls = b.listLogs("R:%d" % int(recipe.uid))
        for url in listurls:
            if len([it for it in backuplogs if url.endswith(it)]) > 0:
                b.downloadLog(url)
        logger.debug("Download recipe log %s from %s" % (recipe, sender))
Example #4
0
def handle_recipe_finished(sender, **kwargs):
    if sender:
        recipe = kwargs.get("recipe")
        b = Beaker()
        listurls = b.listLogs("R:%d" % int(recipe.uid))
        for url in listurls:
            if len([it for it in backuplogs if url.endswith(it)]) > 0:
                logpath = b.downloadLog(url)
                if not logpath:
                    continue
                logfile = FileLog(path=logpath, recipe=recipe)
                logfile.save()

        logger.debug("Download recipe log %s from %s" % (recipe, sender))
Example #5
0
def download_files_from_recipe(recipe):
    """
    function download log files from beaker by global filter 'backuplogs'

    @param object(Recipe)  download selected files from this recipe

    @return None
    """
    b = Beaker()
    listurls = b.listLogs("R:%d" % int(recipe.uid))
    for url in listurls:
        namefile = os.path.basename(urlparse(url).path)
        if namefile in backuplogs:
            logfile, created = FileLog.objects.get_or_create(url=url, recipe=recipe)
            if created:
                logfile.save()
Example #6
0
def download_files_from_recipe(recipe):
    """
    function download log files from beaker by global filter 'backuplogs'

    @param object(Recipe)  download selected files from this recipe

    @return None
    """
    b = Beaker()
    for url in b.listLogs(recipe.uid):
        namefile = os.path.basename(urlparse(url).path)
        if namefile in backuplogs:
            logfile, created = FileLog.objects.get_or_create(
                url=url, defaults={"recipe": recipe})
            if created:
                logfile.save()
Example #7
0
def init(*args, **kwargs):
    progress = CheckProgress()
    bkr = Beaker()

    cfg_running = kwargs["running"]
    cfg_init = kwargs["init"]
    cfg_minid = kwargs["minid"]
    cfg_date = kwargs["date"]
    cfg_quiet = kwargs["quiet"]

    if cfg_date:
        cfg_date = datetime.strptime(kwargs["date"], "%Y-%m-%d")

    if kwargs["jobs"]:
        jobslist = kwargs["jobs"].split(" ")
    elif cfg_init:
        bkr_filter = {"owner": settings.BEAKER_OWNER}
        if cfg_minid:
            bkr_filter["minid"] = kwargs["minid"]
        else:
            # find job from previous init (date) - checked only new jobs
            # datetime.today().date()
            minid = Job.objects.values("uid").filter(
                date__lt=(currentDate() -
                          timedelta(days=2))).order_by("-uid")[:1]
            if minid:
                bkr_filter["minid"] = minid[0]["uid"][2:]
        jobslist = bkr.listJobs(bkr_filter)
    else:
        jobslist = [
            it["uid"]
            for it in Job.objects.values("uid").filter(is_finished=False)
        ]

    progress.totalsum = len(jobslist)
    progress.save()
    for it in jobslist:
        if not cfg_quiet:
            logger.info("%d/%d (%s)" %
                        (progress.actual, progress.totalsum, it))

        bkr.parse_job(it, running=cfg_running, date_created=cfg_date)
        progress.counter()
    progress.finished()
Example #8
0
def init(*args, **kwargs):
    progress = CheckProgress()
    bkr = Beaker()

    cfg_running = kwargs["running"]
    cfg_init = kwargs["init"]
    cfg_minid = kwargs["minid"]
    cfg_date = kwargs["date"]
    cfg_quiet = kwargs["quiet"]

    if cfg_date:
        cfg_date = datetime.strptime(kwargs["date"], "%Y-%m-%d")

    if kwargs["jobs"]:
        jobslist = kwargs["jobs"].split(" ")
    elif cfg_init:
        bkr_filter = {"owner": settings.BEAKER_OWNER}
        if cfg_minid:
            bkr_filter["minid"] = kwargs["minid"]
        else:
            # find job from previous init (date) - checked only new jobs
            # datetime.today().date()
            minid = Job.objects.values("uid").filter(
                date__lt=(currentDate() - timedelta(days=2))).order_by("-uid")[:1]
            if minid:
                bkr_filter["minid"] = minid[0]["uid"][2:]
        jobslist = bkr.listJobs(bkr_filter)
    else:
        jobslist = [it["uid"]
                    for it in Job.objects.values("uid").filter(is_finished=False)]

    progress.totalsum = len(jobslist)
    progress.save()
    for it in jobslist:
        if not cfg_quiet:
            logger.info(
                "%d/%d (%s)" %
                (progress.actual, progress.totalsum, it))

        bkr.parse_job(it, running=cfg_running, date_created=cfg_date)
        progress.counter()
    progress.finished()
Example #9
0
    def handle(self, *args, **kwargs):
        bk = Beaker()
        label = None

        filter = dict()
        if "ids" in kwargs and kwargs["ids"]:
            for it in kwargs["ids"].split():
                jobTs = JobTemplate.objects.filter(id=int(it))
                if len(jobTs) == 0:
                    logger.error("This JobTemplate (%s) does not exist." % it)
                    continue
                bk.jobSchedule(jobTs[0])
                # Beaker guys told us we are causing too big load,
                # so adding this sleep
                # FIXME only temporary, rewrite code for scheduling to tttt
                # taskomatic
                time.sleep(4.5)
        if "files" in kwargs and kwargs["files"]:
            cfg_files = kwargs["files"].split()
            if len(cfg_files) == 0:
                logger.error("Missing XmlFile.")
                return False
            for xmlfile in cfg_files:
                print bk.scheduleFromXmlFile(xmlfile)
        if "daily" in kwargs and kwargs["daily"]:
            label = "daily-automation"
            filter["period"] = JobTemplate.DAILY
        if "weekly" in kwargs and kwargs["weekly"]:
            label = "weekly-automation"
            filter["period"] = JobTemplate.WEEKLY
        if "tags" in kwargs and kwargs["tags"]:
            filter["tags__name__in"] = kwargs["tags"].split()
        if "all" in kwargs or len(filter) > 0:
            filter["is_enable"] = True
            jobTs = JobTemplate.objects.filter(**filter).distinct()
            # set schedule period run
            try:
                count = TaskPeriodSchedule.objects.filter(title=label).count()
                schedule = TaskPeriodSchedule.objects.get(
                    title=label, counter=count)
            except TaskPeriodSchedule.DoesNotExist:
                schedule = TaskPeriodSchedule.objects.create(
                    title=label,
                    counter=count,
                )

            logger.info("%s JobTemplates are prepared by schedule %s." %
                        (len(jobTs), schedule))
            for jobT in jobTs:
                job = bk.jobSchedule(jobT, simulate=True)
                logger.info("Job is created: %s" % job)
                if schedule:
                    job.schedule = schedule
                    job.save()
                # Beaker guys told us we are causing too big load,
                # so adding this sleep
                # FIXME only temporary, rewrite code for scheduling to tttt
                # taskomatic
                time.sleep(45)
                return
Example #10
0
    def test_create_job(self):
        bkr = Beaker()

        pwd = os.path.dirname(__file__)
        xmlpath = os.path.join(pwd, "tests", "example.fedora.xml")

        # xmlcontent = get_content_from_file(xmlpath)
        # self.assertIsNotNone(xmlcontent, msg="file %s is not load" % xmlcontent)
        jobids = bkr.scheduleFromXmlFile(xmlpath)

        for jobid in jobids:
            self.assertIsNotNone(jobid, msg="job is not created")

            # check jobs from beaker
            bkr.parse_job(jobid)

            job = Job.objects.get(uid=jobid)

            # cancel created job
            bkr.jobCancel(job)
Example #11
0
    def handle(self, *args, **kwargs):
        if len(args) == 0:
            self.print_help("./manage.py", "beaker")
            return

        self.beaker = Beaker()
        if args[0] == "beaker":
            # web commnand call argv with (u'beaker', u'reschedule')
            action = args[1]
        else:
            # commnand call argv with ('reschedule',)
            action = args[0]

        if action == 'schedule':
            self.__scheduleActions(args, kwargs)
        elif action == 'reschedule':
            self.__rescheduleActions(args, kwargs)
        elif action == 'return2beaker':
            self.__return2beakerActions(args, kwargs)
        elif action == 'cancel':
            self.__cancelActions(args, kwargs)
        else:
            logger.error("Action %s is not supported" % action)
Example #12
0
    def test_create_job(self):
        bkr = Beaker()

        pwd = os.path.dirname(__file__)
        xmlpath = os.path.join(pwd, "tests", "example.fedora.xml")

        # xmlcontent = get_content_from_file(xmlpath)
        # self.assertIsNotNone(xmlcontent, msg="file %s is not load" % xmlcontent)
        jobids = bkr.scheduleFromXmlFile(xmlpath)

        for jobid in jobids:
            self.assertIsNotNone(jobid, msg="job is not created")

            # check jobs from beaker
            bkr.parse_job(jobid)

            job = Job.objects.get(uid=jobid)

            # cancel created job
            bkr.jobCancel(job)
Example #13
0
 def __init__(self):
     self.beaker = Beaker()
Example #14
0
class Command(AdvancedCommand):
    requires_system_checks = True
    can_import_settings = True
    schedule = None

    option_list = AdvancedCommand.option_list + (
        make_option('--info',
                    action='store_true',
                    dest='info',
                    default=False,
                    help='Show more informations.'),
        make_option('--simulate',
                    action='store_true',
                    dest='simulate',
                    default=False,
                    help='Simulate action, use it with --fullinfo.'),
        make_option('--label',
                    dest='label',
                    default=False,
                    help='Name of schedule label'),
    )
    option_groups = (
        # SCHEDULE
        make_option_group(
            'Options for schedule',
            description='Options for scheduling of jobs in beaker',
            option_list=(
                make_option('--schedule-all',
                            action='store_true',
                            dest='all',
                            default=False,
                            help='Schedule all active job templates'),
                make_option('--schedule-daily',
                            action='store_true',
                            dest='daily',
                            default=False,
                            help='Schedule daily job templates'),
                make_option('--schedule-weekly',
                            action='store_true',
                            dest='weekly',
                            default=False,
                            help='Schedule weekly job templates'),
                make_option('--schedule-template',
                            dest='template',
                            help='Schedule only job templates, which are required. We '
                            'can use more values, which are separated by comma.'),
                make_option('--schedule-tags',
                            dest='tags',
                            default=False,
                            help='Schedule job templates, which have required tags.'
                            'We can use more values, separated by comma'),
                make_option('--schedule-reservsys',
                            action='store_true',
                            dest='reservsys',
                            default=False,
                            help='Adding forcibly of the reservsys task into new job.'),
                make_option('--schedule_id',
                            dest='schedule_id',
                            default=False,
                            help='Set period schedule run'),
            ),
        ),
        # RESCHEDULE
        make_option_group(
            'Options for reschedule',
            description='Options for rescheduling of jobs',
            option_list=(
                make_option('--reschedule-all',
                            action='store_true',
                            dest='all',
                            default=False,
                            help='Reschedule all active jobs'),
                make_option('--reschedule-daily',
                            action='store_true',
                            dest='daily',
                            default=False,
                            help='Reschedule daily job'),
                make_option('--reschedule-weekly',
                            action='store_true',
                            dest='weekly',
                            default=False,
                            help='Reschedule weekly jobs'),
                make_option('--reschedule-job',
                            dest='job',
                            help='Reschedule only jobs, which are required. Use UID '
                            '(J:12345) for identify of job. We can use more '
                            'values, which are separated by comma.'),
                make_option('--reschedule-template',
                            dest='template',
                            help='Reschedule only jobs, which are required. We '
                            'can use more values, which are separated by comma.'),
                make_option('--reschedule-tags',
                            dest='tags',
                            default=False,
                            help='Reschedule jobs, which have required tags.'
                            'We can use more values, separated by comma'),
                make_option('--reschedule-message',
                            dest='message',
                            default="",
                            help='The comment for rescheduling of jobs.'),
            ),
        ),
        # RETURN2BEAKER
        make_option_group(
            'Options for return2beaker',
            description='Options for return2beaker of jobs',
            option_list=(
                make_option('--return2beaker-all',
                            action='store_true',
                            dest='all',
                            default=False,
                            help='Return2beaker all active jobs'),
                make_option('--return2beaker-daily',
                            action='store_true',
                            dest='daily',
                            default=False,
                            help='Return2beaker daily jobs'),
                make_option('--return2beaker-weekly',
                            action='store_true',
                            dest='weekly',
                            default=False,
                            help='Return2beaker weekly jobs'),
                make_option('--return2beaker-recipe',
                            dest='recipe',
                            help='Return2beaker only recipes, which are required. Use '
                            'UID (R:12345) for identify of recipe. We can use '
                            'more values, which are separated by comma.'),
                make_option('--return2beaker-job',
                            dest='job',
                            help='Return2beaker only jobs, which are required. Use UID'
                            ' (J:12345) for identify of job. We can use more '
                            'values, which are separated by comma.'),
                make_option('--return2beaker-template',
                            dest='template',
                            help='Return2beaker only jobs, which are required. We '
                            'can use more values, which are separated by comma.'),
                make_option('--return2beaker-tags',
                            dest='tags',
                            default=False,
                            help='Return2beaker jobs, which have required tags.'
                            'We can use more values, separated by comma'),
            ),
        ),
        # CANCEL
        make_option_group(
            'Options for cancel',
            description='Options for canceling of jobs',
            option_list=(
                make_option('--cancel-all',
                            action='store_true',
                            dest='all',
                            default=False,
                            help='Cancel all active jobs'),
                make_option('--cancel-daily',
                            action='store_true',
                            dest='daily',
                            default=False,
                            help='Cancel today daily jobs'),
                make_option('--cancel-weekly',
                            action='store_true',
                            dest='weekly',
                            default=False,
                            help='Cancel weekly jobs'),
                make_option('--cancel-job',
                            dest='job',
                            help='Cancel only jobs, which are required. Use UID '
                            '(J:12345) for identify of job. We can use more '
                            'values, which are separated by comma.'),
                make_option('--cancel-template',
                            dest='template',
                            help='Cancel only jobs, which are scheduled from required '
                            'job templates. We can use more values, which are '
                            'separated by comma.'),
                make_option('--cancel-tags',
                            dest='tags',
                            default=False,
                            help='Cancel jobs, which have required tags.'
                            'We can use more values, separated by comma'),
                make_option('--cancel-message',
                            dest='message',
                            default="",
                            help='The comment for canceling of jobs.'),
            ),
        ),
    )

    def usage(self, subcommand):
        return (" %%prog %s (schedule|reschedule|return2beaker|cancel) "
                "[options]\n\n=== This utility provides functions for playing"
                " with beaker ==\n")\
            % subcommand

    def handle(self, *args, **kwargs):
        if len(args) == 0:
            self.print_help("./manage.py", "beaker")
            return

        self.beaker = Beaker()
        if args[0] == "beaker":
            # web commnand call argv with (u'beaker', u'reschedule')
            action = args[1]
        else:
            # commnand call argv with ('reschedule',)
            action = args[0]

        if action == 'schedule':
            self.__scheduleActions(args, kwargs)
        elif action == 'reschedule':
            self.__rescheduleActions(args, kwargs)
        elif action == 'return2beaker':
            self.__return2beakerActions(args, kwargs)
        elif action == 'cancel':
            self.__cancelActions(args, kwargs)
        else:
            logger.error("Action %s is not supported" % action)

    # --------------------------------------------------------------------------
    # SCHEDULE
    # --------------------------------------------------------------------------

    def __scheduleActions(self, args, kwargs):
        filter = dict()
        if kwargs.get("label"):
            label = kwargs.get("label")
            filter["schedule__label"] = label

        if kwargs.get("tags"):
            filter["tags__name__in"] = kwargs.get("tags", "").split(",")
            if len(filter["tags__name__in"]) == 0:
                logger.error("Minimal one tag is required.")
                return False
        if kwargs.get("all") or len(filter) > 0:
            filter["is_enable"] = True
        if kwargs.get("template"):
            filter['id__in'] = kwargs.get("template", "").split(',')
            if len(filter['id__in']) == 0:
                logger.error("Minimal one job template is required.")
                return False
        if kwargs.get("schedule_id"):
            schedule_id = int(kwargs["schedule_id"])
            try:
                self.schedule = TaskPeriodSchedule.objects.get(id=schedule_id)
            except TaskPeriodSchedule.DoesNotExist:
                coutner = len(TaskPeriodSchedule.objects.filter(period=None))
                self.schedule = TaskPeriodSchedule.objects.create(
                    title="%s" % datetime.now(), counter=coutner)
        if len(filter) > 0:
            self.__scheduleTemplates(filter,
                                     label,
                                     kwargs.get("info"),
                                     kwargs.get("simulate"),
                                     kwargs.get("reservsys"))
            return True
        if kwargs.get("files"):
            cfg_files = kwargs["files"].split()
            if len(cfg_files) == 0:
                logger.error("Minimal one XML file is required.")
                return False
            for xmlfile in cfg_files:
                res = self.beaker.scheduleFromXmlFile(xmlfile)
                if not res:
                    logger.error("Problem with schedule '%s' xml file" %
                                 xmlfile)
            return True
        logger.error("Unsupported parameters")
        return False

    def __scheduleTemplates(self, filter, label, fullInfo, simulate, reserve):
        jobTs = JobTemplate.objects.filter(**filter).distinct()
        logger.info("%s JobTemplates are prepared." % len(jobTs))
        if fullInfo:
            table = Texttable()
            table.set_deco(Texttable.HEADER)
            table.header(["Job", "Whiteboard", "Tags"])

        # do not create TaskPeriodSchedule when there are no jobs to schedule
        if len(jobTs) == 0:
            logger.info("No jobs for TaskPeriod %s" % label)
            return

        period = TaskPeriod.objects.get(label=label)
        count = TaskPeriodSchedule.objects.filter(period=period).count()
        schedule = TaskPeriodSchedule.objects.create(
            title=label,
            period=period,
            counter=count,
        )

        for jobT in jobTs:
            job = ""
            if not simulate:
                job = self.beaker.jobSchedule(jobT, reserve)
                if job:
                    job.schedule = schedule
                    job.save()
                    logger.info("%s job was successful scheduled."
                                % job.uid)
                else:
                    logger.info("Problem with scheduling of job template (%s)."
                                % jobT.id)
            if fullInfo:
                tags = ",".join([tag.name for tag in jobT.tags.all()])
                table.add_row([str(job), jobT.whiteboard, tags])
            if not simulate:
                # Beaker guys told us we are causing too big load,
                # so adding this sleep
                # FIXME only temporary, rewrite code for scheduling to tttt
                # taskomatic
                time.sleep(4.5)
        if fullInfo:
            print table.draw()

    # --------------------------------------------------------------------------
    # RESCHEDULE
    # --------------------------------------------------------------------------

    def __rescheduleActions(self, args, kwargs):
        filter = dict()
        if kwargs.get("daily"):
            filter["template__period"] = JobTemplate.DAILY
        if kwargs.get("weekly"):
            filter["template__period"] = JobTemplate.WEEKLY
        if kwargs.get("tags"):
            filter["template__tags__name__in"] = kwargs.get("tags", "")\
                                                       .split(",")
            if len(filter["template__tags__name__in"]) == 0:
                logger.error("Minimal one tag is required.")
                return False
        if kwargs.get("template"):
            filter['template__id__in'] = kwargs.get("template", "").split(',')
            if len(filter['template__id__in']) == 0:
                logger.error("Minimal one job template is required.")
                return False
        if kwargs.get("job"):
            # need escaping quotes in string !!
            filter['uid__in'] = kwargs.get(
                "job", "").replace('\'', '').split(',')
            if len(filter['uid__in']) == 0:
                logger.error("Minimal one job is required.")
                return False
        if kwargs.get("all") or len(filter) > 0:
            self.__rescheduleTemplates(filter,
                                       kwargs.get("info"),
                                       kwargs.get("simulate"),
                                       kwargs.get("message"))
            return True
        logger.error("Unsupported parameters")
        return False

    def __rescheduleTemplates(self, filter, fullInfo, simulate, message):
        jobs = Job.objects.filter(**filter).distinct()
        logger.info("%s jobs are prepared to reschedule." % len(jobs))
        if fullInfo:
            table = Texttable()
            table.set_deco(Texttable.HEADER)
            table.header(["from Job", "to Job", "Whiteboard", "Tags"])
        for job in jobs:
            jobN = ""
            if not simulate:
                jobN = self.beaker.jobReschedule(job, message)
                if jobN:
                    logger.info("%s job was rescheduled as %s."
                                % (job.uid, jobN.uid))
                else:
                    logger.info("Problem with rescheduling of job (%s)."
                                % job.uid)
            if fullInfo:
                tags = ",".join([tag.name for tag in job.template.tags.all()])
                table.add_row([job.uid, str(jobN), job.template.whiteboard,
                               tags])
            else:
                print str(jobN)
            if not simulate:
                # Beaker guys told us we are causing too big load,
                # so adding this sleep
                # FIXME only temporary, rewrite code for scheduling to tttt
                # taskomatic
                time.sleep(4.5)
        if fullInfo:
            print table.draw()

    # --------------------------------------------------------------------------
    # RETURN2BEAKER
    # --------------------------------------------------------------------------

    def __return2beakerActions(self, args, kwargs):
        filter = dict()
        if kwargs.get("daily"):
            filter["job__template__period"] = JobTemplate.DAILY
        if kwargs.get("weekly"):
            filter["job__template__period"] = JobTemplate.WEEKLY
        if kwargs.get("tags"):
            filter["job__template__tags__name__in"] = kwargs.get("tags", "")\
                                                            .split(",")
            if len(filter["job__template__tags__name__in"]) == 0:
                logger.error("Minimal one tag is required.")
                return False
        if kwargs.get("template"):
            filter['job__template__id__in'] = kwargs.get("template", "")\
                .split(',')
            if len(filter['job__template__id__in']) == 0:
                logger.error("Minimal one job template is required.")
                return False
        if kwargs.get("job"):
            filter['job__uid__in'] = kwargs.get("job", "").split(',')
            if len(filter['job__uid__in']) == 0:
                logger.error("Minimal one job is required.")
                return False
        if kwargs.get("recipe"):
            filter['uid__in'] = kwargs.get("recipe", "").replace('R:', '')\
                .replace("'", "").split(',')
            if len(filter['uid__in']) == 0:
                logger.error("Minimal one recipe is required.")
                return False
        if kwargs.get("all") or len(filter) > 0:
            filter['status'] = Recipe.RUNNING
            self.__return2beakerTemplates(filter,
                                          kwargs.get("info"),
                                          kwargs.get("simulate"))
            return True
        logger.error("Unsupported parameters")
        return False

    def __return2beakerTemplates(self, filter, fullInfo, simulate):
        recipes = Recipe.objects.filter(**filter).distinct()
        logger.info("%s recipes are prepared to return2beaker." % len(recipes))
        if fullInfo:
            table = Texttable()
            table.set_deco(Texttable.HEADER)
            table.header(["Returned", "Recipe", "Job", "Whiteboard", "Tags"])
        for recipe in recipes:
            res = False
            if not simulate:
                res = self.beaker.return2beaker(recipe)
                if res:
                    logger.info("R:%s recipes was returned to beaker."
                                % recipe.uid)
                else:
                    logger.info("Problem with returning to beaker (R:%s)."
                                % recipe.uid)
            if fullInfo:
                tags = ",".join([tag.name for tag in recipe.job.template
                                                               .tags.all()])
                table.add_row([res, "R:%s" % str(recipe), str(recipe.job),
                               "%s - %s" % (recipe.job.template.whiteboard,
                                            recipe.whiteboard), tags])
            else:
                print str(recipe)
        if fullInfo:
            print table.draw()

    # --------------------------------------------------------------------------
    # CANCEL
    # --------------------------------------------------------------------------
    def __cancelActions(self, args, kwargs):
        filter = dict()
        if kwargs.get("daily"):
            filter["template__period"] = JobTemplate.DAILY
        if kwargs.get("weekly"):
            filter["template__period"] = JobTemplate.WEEKLY
        if kwargs.get("tags"):
            filter["template__tags__name__in"] = kwargs.get("tags", "")\
                                                       .split(",")
            if len(filter["template__tags__name__in"]) == 0:
                logger.error("Minimal one tag is required.")
                return False
        if kwargs.get("template"):
            filter['template__id__in'] = kwargs.get("template", "").split(',')
            if len(filter['template__id__in']) == 0:
                logger.error("Minimal one job template is required.")
                return False
        if kwargs.get("job"):
            filter['uid__in'] = kwargs.get("job", "").split(',')
            if len(filter['uid__in']) == 0:
                logger.error("Minimal one job is required.")
                return False
        if kwargs.get("all") or len(filter) > 0:
            filter['is_finished'] = False
            self.__cancelJobs(filter, kwargs.get("info"),
                              kwargs.get("simulate"), kwargs.get("message"))
            return True
        logger.error("Unsupported parameters")
        return False

    def __cancelJobs(self, filter, fullInfo, simulate, message):
        jobs = Job.objects.filter(**filter).distinct()
        logger.info("You are going to cancel %s jobs " % len(jobs))
        if fullInfo:
            table = Texttable()
            table.set_deco(Texttable.HEADER)
            table.header(["Canceled", "Job", "Whiteboard", "Tags"])
        for job in jobs:
            res = False
            if not simulate:
                res = self.beaker.jobCancel(job, message)
                if res:
                    logger.info("%s job was cancled." % job.uid)
                else:
                    logger.info("Problem with canceling of job (%s)."
                                % job.uid)
            if fullInfo:
                tags = ",".join([tag.name for tag in job.template.tags.all()])
                table.add_row([str(res), job.uid, job.template.whiteboard,
                               tags])
            if not simulate:
                # Beaker guys told us we are causing too big load,
                # so adding this sleep
                # FIXME only temporary, rewrite code for scheduling to tttt
                # taskomatic
                time.sleep(4.5)
        if fullInfo:
            print table.draw()
Example #15
0
class BeakerCommand():

    def __init__(self):
        self.beaker = Beaker()

    def return2beaker(self, return2beaker_recipe, *argvs, **kwargs):
        if not return2beaker_recipe:
            raise CommandError(
                "return2beaker - parameter return2beaker_recipe cannot be empty")

        for uid in return2beaker_recipe:
            recipe = Recipe.objects.get(uid=uid[2:])
            res = self.beaker.return2beaker(recipe)
            if res:
                logger.info("R:%s recipes was returned to beaker."
                            % recipe.uid)
            else:
                logger.info("Problem with returning to beaker (R:%s)."
                            % recipe.uid)
            self.beaker.systemRelease(recipe)

    def reschedule(self, reschedule_job, *argvs, **kwargs):
        if not reschedule_job:
            raise CommandError(
                "reschedule - parameter reschedule_job cannot be empty")

        message = kwargs.get("reschedule-message")
        for uid in reschedule_job:
            job = Job.objects.get(uid=uid)
            job_new = self.beaker.jobReschedule(job, message)
            if job_new:
                logger.info("%s job was rescheduled as %s."
                            % (job.uid, job_new.uid))
            else:
                logger.info("Problem with rescheduling of job (%s)."
                            % job.uid)

    def cancel(self, cancel_job, *argvs, **kwargs):
        if not cancel_job:
            raise CommandError("cancel - parameter cancel_job cannot be empty")

        message = kwargs.get("cancel-message")
        for uid in cancel_job:
            job = Job.objects.get(uid=uid)
            res = self.beaker.jobCancel(job, message)
            if res:
                logger.info("%s job was cancled." % job.uid)
            else:
                logger.info("Problem with canceling of job (%s)." % job.uid)

    def schedule(self, label="default", *argvs, **kwargs):
        simulate = kwargs.get("simulate")
        reserver = kwargs.get("reserve")
        fullInfo = kwargs.get("fullInfo")

        if kwargs.get("list"):
            tp = TaskPeriod.objects.all()
            table = Texttable()
            table.set_deco(Texttable.HEADER)
            table.header(["Id", "Title", "Label", "Schedule"])
            for it in tp:
                table.add_row([it.id, it.title, it.label, it.cron])
            print(table.draw())

        if kwargs.get("template_id"):
            template_ids = kwargs.get("template_id")
            logger.debug("Schedule template id %s" % template_ids)

            filter = {"id__in": template_ids}
            self.scheduleByJobTemplates(
                filter, label, fullInfo, simulate, reserver)

        if kwargs.get("schedule_label"):
            period_label = kwargs.get("schedule_label")
            filter = {"schedule__label__in": period_label, "is_enable": True}
            if not label:
                label = period_label
            self.scheduleByJobTemplates(
                filter, "".join(label), fullInfo, simulate, reserver)

    def checklogs(self, **kwargs):
        logger.info("%d files to download" % FileLog.objects.filter(status_code=0).count())
        logger.info("%d files to indexing" % FileLog.objects.filter(is_indexed=False).count())

        b = Beaker()
        for it in FileLog.objects.filter(status_code=0)\
                         .order_by("-created")[0:settings.MAX_LOGS_IN_ONE_CHECK]:
            it.status_code, logpath = b.downloadLog(it.url)
            if not logpath:
                # if file is not download then skip and not save object
                it.save()
                continue
            it.path = logpath
            it.is_downloaded = True
            it.save()
            try:
                it.parse_journal()
            except Exception as e:
                logger.debug("parse log file: %s" % e)

        if settings.ELASTICSEARCH:
            for it in FileLog.objects.filter(is_downloaded=True, is_indexed=False)\
                            .order_by("-created")[0:settings.MAX_LOGS_IN_ONE_CHECK]:
                try:
                    it.index()
                except Exception as e:
                    logger.info("indexing %s: %s" % (it.path, e))

        FileLog.clean_old()

    def scheduleByJobTemplates(
            self, filter, label, fullInfo, simulate, reserve):
        jobTs = JobTemplate.objects.filter(**filter).distinct()
        logger.info("%s JobTemplates are prepared." % len(jobTs))
        if fullInfo:
            table = Texttable()
            table.set_deco(Texttable.HEADER)
            table.header(["Job", "Whiteboard", "Tags"])
        # do not create TaskPeriodSchedule when there are no jobs to schedule
        if len(jobTs) == 0:
            logger.info("No jobs for TaskPeriod %s" % label)
            return

        period = TaskPeriod.objects.get(label=label)
        count = TaskPeriodSchedule.objects.filter(
            period=period).aggregate(Max('counter')).get("counter__max")

        schedule = TaskPeriodSchedule.objects.create(
            title=label,
            period=period,
            counter=count + 1 if count is not None else 0,
        )

        for jobT in jobTs:
            job = ""
            if not simulate:
                job = self.beaker.jobSchedule(jobT, reserve)
                if job:
                    job.schedule = schedule
                    job.save()
                    logger.info("%s job was successful scheduled."
                                % job.uid)
                else:
                    logger.info("Problem with scheduling of job template (%s)."
                                % jobT.id)
            if fullInfo:
                tags = ",".join([tag.name for tag in jobT.tags.all()])
                table.add_row([str(job), jobT.whiteboard, tags])
            if not simulate:
                # Beaker guys told us we are causing too big load,
                # so adding this sleep
                # FIXME only temporary, rewrite code for scheduling to tttt
                # taskomatic
                time.sleep(4.5)
        if fullInfo:
            print table.draw()
Example #16
0
 def __init__(self):
     self.beaker = Beaker()
Example #17
0
class BeakerCommand():
    def __init__(self):
        self.beaker = Beaker()

    def return2beaker(self, return2beaker_recipe, *argvs, **kwargs):
        if not return2beaker_recipe:
            raise CommandError(
                "return2beaker - parameter return2beaker_recipe cannot be empty"
            )

        for uid in return2beaker_recipe:
            recipe = Recipe.objects.get(uid=uid[2:])
            res = self.beaker.return2beaker(recipe)
            if res:
                logger.info("R:%s recipes was returned to beaker." %
                            recipe.uid)
            else:
                logger.info("Problem with returning to beaker (R:%s)." %
                            recipe.uid)
            self.beaker.systemRelease(recipe)

    def reschedule(self, reschedule_job, *argvs, **kwargs):
        if not reschedule_job:
            raise CommandError(
                "reschedule - parameter reschedule_job cannot be empty")

        message = kwargs.get("reschedule-message")
        for uid in reschedule_job:
            job = Job.objects.get(uid=uid)
            job_new = self.beaker.jobReschedule(job, message)
            if job_new:
                logger.info("%s job was rescheduled as %s." %
                            (job.uid, job_new.uid))
            else:
                logger.info("Problem with rescheduling of job (%s)." % job.uid)

    def cancel(self, cancel_job, *argvs, **kwargs):
        if not cancel_job:
            raise CommandError("cancel - parameter cancel_job cannot be empty")

        message = kwargs.get("cancel-message")
        for uid in cancel_job:
            job = Job.objects.get(uid=uid)
            res = self.beaker.jobCancel(job, message)
            if res:
                logger.info("%s job was cancled." % job.uid)
            else:
                logger.info("Problem with canceling of job (%s)." % job.uid)

    def schedule(self, label="default", *argvs, **kwargs):
        simulate = kwargs.get("simulate")
        reserver = kwargs.get("reserve")
        fullInfo = kwargs.get("fullInfo")

        if kwargs.get("list"):
            tp = TaskPeriod.objects.all()
            table = Texttable()
            table.set_deco(Texttable.HEADER)
            table.header(["Id", "Title", "Label", "Schedule"])
            for it in tp:
                table.add_row([it.id, it.title, it.label, it.cron])
            print(table.draw())

        if kwargs.get("template_id"):
            template_ids = kwargs.get("template_id")
            logger.debug("Schedule template id %s" % template_ids)

            filter = {"id__in": template_ids}
            self.scheduleByJobTemplates(filter, label, fullInfo, simulate,
                                        reserver)

        if kwargs.get("schedule_label"):
            period_label = kwargs.get("schedule_label")
            filter = {"schedule__label__in": period_label, "is_enable": True}
            if not label:
                label = period_label
            self.scheduleByJobTemplates(filter, "".join(label), fullInfo,
                                        simulate, reserver)

    def checklogs(self, **kwargs):
        logger.info("%d files to download" %
                    FileLog.objects.filter(status_code=0).count())
        logger.info("%d files to indexing" %
                    FileLog.objects.filter(is_indexed=False).count())

        b = Beaker()
        for it in FileLog.objects.filter(status_code=0)\
                         .order_by("-created")[0:settings.MAX_LOGS_IN_ONE_CHECK]:
            it.status_code, logpath = b.downloadLog(it.url)
            if not logpath:
                # if file is not download then skip and not save object
                it.save()
                continue
            it.path = logpath
            it.is_downloaded = True
            it.save()
            try:
                it.parse_journal()
            except Exception as e:
                logger.debug("parse log file: %s" % e)

        if settings.ELASTICSEARCH:
            for it in FileLog.objects.filter(is_downloaded=True, is_indexed=False)\
                            .order_by("-created")[0:settings.MAX_LOGS_IN_ONE_CHECK]:
                try:
                    it.index()
                except Exception as e:
                    logger.info("indexing %s: %s" % (it.path, e))

        FileLog.clean_old()

    def scheduleByJobTemplates(self, filter, label, fullInfo, simulate,
                               reserve):
        jobTs = JobTemplate.objects.filter(**filter).distinct()
        logger.info("%s JobTemplates are prepared." % len(jobTs))
        if fullInfo:
            table = Texttable()
            table.set_deco(Texttable.HEADER)
            table.header(["Job", "Whiteboard", "Tags"])
        # do not create TaskPeriodSchedule when there are no jobs to schedule
        if len(jobTs) == 0:
            logger.info("No jobs for TaskPeriod %s" % label)
            return

        period = TaskPeriod.objects.get(label=label)
        count = TaskPeriodSchedule.objects.filter(period=period).aggregate(
            Max('counter')).get("counter__max")

        schedule = TaskPeriodSchedule.objects.create(
            title=label,
            period=period,
            counter=count + 1 if count is not None else 0,
        )

        for jobT in jobTs:
            job = ""
            if not simulate:
                job = self.beaker.jobSchedule(jobT, reserve)
                if job:
                    job.schedule = schedule
                    job.save()
                    logger.info("%s job was successful scheduled." % job.uid)
                else:
                    logger.info(
                        "Problem with scheduling of job template (%s)." %
                        jobT.id)
            if fullInfo:
                tags = ",".join([tag.name for tag in jobT.tags.all()])
                table.add_row([str(job), jobT.whiteboard, tags])
            if not simulate:
                # Beaker guys told us we are causing too big load,
                # so adding this sleep
                # FIXME only temporary, rewrite code for scheduling to tttt
                # taskomatic
                time.sleep(4.5)
        if fullInfo:
            print table.draw()