Exemplo n.º 1
0
def engage():
    banner.print_banner()
    oneness_scheduler = BlockingScheduler({
        'apscheduler.executors.processpool': {
            'class': 'apscheduler.executors.pool:ProcessPoolExecutor',
            'max_workers': '20'
        },
        'job_defaults': {
            'coalesce': False,
            'executor': 'processpool'
        }
    })

    oneness_scheduler.add_executor('processpool')
    t_retweet = oneness_scheduler.add_job(twitter.retweet.start,
                                          'interval',
                                          minutes=60,
                                          id='twitter_retweet_bot')
    t_follow = oneness_scheduler.add_job(twitter.follow.start,
                                         'interval',
                                         minutes=10,
                                         id='twitter_follow_bot')

    # quoted_im_generator = oneness_scheduler.add_job(
    #     image_generator.quoted_image.start,
    #     'interval', minutes=300,
    #     id='quoted_im_generator',
    #     kwargs={'overlay_flag': True}
    # )

    im_with_quote_generator = oneness_scheduler.add_job(
        image_generator.quoted_image.start,
        'interval',
        minutes=120,
        id='image_with_quote_generator',
        kwargs={'overlay_flag': False})

    try:
        # oneness_scheduler.start()
        for job in oneness_scheduler.get_jobs():
            job.modify(next_run_time=datetime.now())
        oneness_scheduler.start()
    except (KeyboardInterrupt, SystemExit):
        oneness_scheduler.shutdown()
Exemplo n.º 2
0
class DisseminationPlayer(object):

    MIDNIGHT = datetime.time(0,0,0)

    def __init__(self, top_data_dir, index_file, dir_files_to_parse, files_to_parse, job_func, destination):
        """
            :return:
        """
        self._parser = eumetsat.dmon.parsers.xferlog_parser.XferlogParser(no_gems_header = True)
        self._dir_files = dir_files_to_parse
        self._files = files_to_parse
        self._job_func = job_func
        self._scheduler = BlockingScheduler()

        res = []
        t = ftimer(Indexer.load_index, [top_data_dir, index_file], {}, res)
        print("Read index in %d seconds." % (t))
        self._index = res[0]

        #can now set reference time
        #ref time = now time plus one minute
        self._defer_time = 5 
        self._reference_date = datetime.datetime.now() +  datetime.timedelta(seconds=self._defer_time)

        #destination info (depends on the type of job)
        self._destination = destination


    def add_jobs(self):
        """
          Create the jobs from the reference time
        :return:
        """
        for a_file in self._files:
            f_path = "%s/%s" % (self._dir_files, a_file)
            print("Parsing xferlog file %s" % f_path )
            fd = open(f_path)
            self._parser.set_lines_to_parse(fd)
            for elem in self._parser:
                #print("time = %s, filename = %s\n" % (elem['time'], elem['file']))
                #find file in index
                filepath = self._index.get(elem['file'], None)
                if filepath:
                    #get time difference
                    midnight_date = utc.localize(datetime.datetime.combine(elem['time'].date(), self.MIDNIGHT))
                    #print("midnight date = %s ///// elem[time] = %s" % (midnight_date, elem['time']))
                    time_diff = elem['time'] - midnight_date
                    scheduled_date = self._reference_date + time_diff
                    #create job and schedule it with the time difference added to the starting reference time
                    d_trigger = DateTrigger(scheduled_date)

                    self._scheduler.add_job(self._job_func, d_trigger, args=[filepath, self._destination])
                else:
                    print("Could not find %s\n in Index" % (elem['file']))

        print("Player. %d jobs scheduled.\n" % (len(self._scheduler.get_jobs())))


    def start(self):
        """
        :return:
        """
        self._scheduler.configure(jobstores=jobstores, executors=executors, job_defaults=job_defaults, timezone=utc)

        print("Start Scheduler. Jobs will start to be played in %d sec." % self._defer_time)
        self._scheduler.start()
Exemplo n.º 3
0
class DisseminationPlayer(object):

    MIDNIGHT = datetime.time(0, 0, 0)

    def __init__(self, top_data_dir, index_file, dir_files_to_parse,
                 files_to_parse, job_func, destination):
        """
            :return:
        """
        self._parser = eumetsat.dmon.parsers.xferlog_parser.XferlogParser(
            no_gems_header=True)
        self._dir_files = dir_files_to_parse
        self._files = files_to_parse
        self._job_func = job_func
        self._scheduler = BlockingScheduler()

        res = []
        t = ftimer(Indexer.load_index, [top_data_dir, index_file], {}, res)
        print("Read index in %d seconds." % (t))
        self._index = res[0]

        #can now set reference time
        #ref time = now time plus one minute
        self._defer_time = 5
        self._reference_date = datetime.datetime.now() + datetime.timedelta(
            seconds=self._defer_time)

        #destination info (depends on the type of job)
        self._destination = destination

    def add_jobs(self):
        """
          Create the jobs from the reference time
        :return:
        """
        for a_file in self._files:
            f_path = "%s/%s" % (self._dir_files, a_file)
            print("Parsing xferlog file %s" % f_path)
            fd = open(f_path)
            self._parser.set_lines_to_parse(fd)
            for elem in self._parser:
                #print("time = %s, filename = %s\n" % (elem['time'], elem['file']))
                #find file in index
                filepath = self._index.get(elem['file'], None)
                if filepath:
                    #get time difference
                    midnight_date = utc.localize(
                        datetime.datetime.combine(elem['time'].date(),
                                                  self.MIDNIGHT))
                    #print("midnight date = %s ///// elem[time] = %s" % (midnight_date, elem['time']))
                    time_diff = elem['time'] - midnight_date
                    scheduled_date = self._reference_date + time_diff
                    #create job and schedule it with the time difference added to the starting reference time
                    d_trigger = DateTrigger(scheduled_date)

                    self._scheduler.add_job(self._job_func,
                                            d_trigger,
                                            args=[filepath, self._destination])
                else:
                    print("Could not find %s\n in Index" % (elem['file']))

        print("Player. %d jobs scheduled.\n" %
              (len(self._scheduler.get_jobs())))

    def start(self):
        """
        :return:
        """
        self._scheduler.configure(jobstores=jobstores,
                                  executors=executors,
                                  job_defaults=job_defaults,
                                  timezone=utc)

        print("Start Scheduler. Jobs will start to be played in %d sec." %
              self._defer_time)
        self._scheduler.start()
Exemplo n.º 4
0
class TaskExecutor:
    def __init__(self, db, task_instance, task_param):
        self.task_instance = task_instance
        self.task_param = task_param
        self.db = db
        # invoke log
        self.invoke_log_map = {}
        self.jobs = {}
        logging.config.fileConfig("../logger.ini")
        self.logger = logging.getLogger("taskExecutor")
        invoke_count = int(self.task_param.get_invoke_args()['invoke_count'])
        executors = {
            'default': {
                'type': 'threadpool',
                'max_workers': invoke_count + 1
            }
        }
        self.scheduler = BlockingScheduler(executors=executors)

    def execute(self):
        self.scheduler.add_listener(
            self._job_listener,
            events.EVENT_JOB_EXECUTED | events.EVENT_JOB_ERROR
            | events.EVENT_JOB_ADDED | events.EVENT_JOB_MISSED)

        # invoke_log_map up server
        self.scheduler.add_job(self._invoke_break_heart, "interval", seconds=2)
        try:
            self.scheduler.start()
        except Exception as e:
            print(e)
            self.scheduler.shutdown(wait=True)

    def _job_listener(self, ev):
        """
        监听job的事件,job完成后再发起下次调用,对于异常也要处理
        :param ev:
        :return:
        """
        if self.task_instance.status == 'off':
            return
        if ev.code == events.EVENT_JOB_ADDED:
            self.jobs[ev.job_id] = self.scheduler.get_job(ev.job_id)
        elif ev.code == events.EVENT_JOB_EXECUTED or ev.code == events.EVENT_JOB_ERROR:
            if ev.code == events.EVENT_JOB_ERROR:
                self.logger.error(ev.exception)
                self.logger.error(ev.traceback)
            job = self.jobs[ev.job_id]
            self.scheduler.add_job(
                job.func,
                next_run_time=(datetime.datetime.now() +
                               datetime.timedelta(seconds=1)),
                id=ev.job_id,
                args=job.args)
        else:
            pass

    def _invoke_break_heart(self):
        if self.task_instance.status == 'off':
            jobs = self.scheduler.get_jobs()
            for job in jobs:
                try:
                    job.pause()
                    job.remove()
                except Exception as e:
                    self.logger.error(e)
        self.db.save_task_logs(self.invoke_log_map)
Exemplo n.º 5
0
# sched.add_job(build_add, 'date', run_date='2018-03-08 18:01:50',misfire_grace_time=10000,max_instances=3)
# # # sched = BlockingScheduler(jobstores=jobstores, executors=executors, job_defaults=job_defaults)

# # print "scheduled job from store",sched.get_jobs('default')

# date='2018-03-08 18:05:58'

# @sched.scheduled_job('date', run_date=date)
# def build_scheduled():
#     # supplying the values  for kwargs

#      # server_c = jenkins.Jenkins('http://10.144.169.116:8080', username='******', password='******')
#      # print request.POST['CARD/CPS/Authorization']
#      # url=request.POST['url2']
#      # print url
#      try:
#       # build=server_c.build_job('POC/ET_Trail');
#       print "job build initiated successfully"
#       # return HttpResponse(simplejson.dumps(build), content_type='application/json')
#      except Exception as e:
#         print e
#      with open("test.txt", "a") as myfile:
#          myfile.write("2017-03-07 13:37:40' printed")

print "scheduled job from store", sched.get_jobs('default')
# print "scheduled jobs are ",sched.get_jobs(pending='True')

sched.start()
sched.wakeup()
sched.shutdown(wait=True)
Exemplo n.º 6
0
                     start_date='2017-06-19 10:42:40',
                     coalesce=True,
                     seconds=5)
'''
# TODO 计划任务 2017-6-19 10:27:30
job = sched.add_job(job_fun, 'date', run_date=datetime(2017, 6, 19, 10, 27, 30),args=['this is date job'])
'''
'''
# TODO 计划任务 周一到周五21:39执行 2017-6-30开始,2017-9-30停止
job = sched.add_job(job_fun, 'cron', day_of_week='mon-fri', hour=21, minute=39, start_date='2017-06-30' ,end_date='2017-09-30')
'''

#TODO 获得任务属性和状态
sched.print_jobs()
#TODO 获得任务id和执行函数
print(sched.get_jobs())
#TODO 删除任务不执行
#job.remove()
'''
#TODO 暂停以及恢复任务
job2.pause()
job2.resume()
'''
print("press Ctrl+{} TO STOP TASK".format('break' if platform.system() ==
                                          'Windows' else 'C'))
try:
    sched.start()
except (SyntaxWarning, BlockingIOError, SystemError) as e:
    print(e)
except (KeyboardInterrupt, SystemExit):
    print('clean the job')
Exemplo n.º 7
0
class JobLauncher(object):
    def __init__(self, background=False, deamon=True, **kwargs):
        logging.basicConfig(format="[%(asctime)s] %(message)s",
                            atefmt="%Y-%m-%d %H:%M:%S")
        logging.getLogger('apscheduler').setLevel(logging.DEBUG)

        if background:
            self.sched = BackgroundScheduler(deamon=deamon)  # background
        else:
            self.sched = BlockingScheduler(deamon=deamon)  # foreground

        # TODO: Read from configuration file.
        self.sched.configure(
            jobstores={
                # "sqlite": SQLAlchemyJobStore(url='sqlite:///app/database/example.db'),
                # "default": MemoryJobStore()
                "default":
                SQLAlchemyJobStore(url='sqlite:///app/database/example.db')
            },
            executors={
                'default': ThreadPoolExecutor(20),
                'processpool': ProcessPoolExecutor(5)
            },
            job_defaults={
                'coalesce': False,
                'max_instances': 3
            },
            timezone=get_localzone()  # Asia/Seoul
        )

        self.retried = 0
        self.logger = logging.getLogger('apscheduler')

        super(JobLauncher, self).__init__()

    def start(self):
        try:
            if self.sched.state != STATE_RUNNING:
                self.printJobs(jobstore='default')
                started = self.sched.start()

        except ConflictingIdError as e:
            traceback.print_exc()

        except KeyboardInterrupt as e:
            traceback.print_exc()

        finally:
            pass
            # Remove all remained store.
            # self.sched.remove_all_jobs()
            # for job in self.getJobs():
            #   if job.pending:
            #     job.pause()

            self.logger.info('Finished')
            self.logger.info(self.getJobs())
            self.printJobs()

    def stop(self, wait=False):
        if self.sched.state == STATE_RUNNING:
            self.sched.shutdown(wait=wait)

    def resume(self):
        if self.sched.state == STATE_RUNNING:
            self.sched.resume()

    def pause(self):
        if self.sched.state == STATE_RUNNING:
            self.sched.pause()

    def addListener(self, listener, types):
        self.sched.add_listener(listener, types)

    def addJob(self, job, **kwargs):
        execute, trigger, options = job.build(**kwargs)

        added_job = self.sched.add_job(execute, trigger, **options)

        self.printJobs()

        return added_job

    def getJob(self, job_id):
        return self.sched.get_job(job_id)

    def getJobs(self, jobstore=None):
        return self.sched.get_jobs(jobstore=jobstore)

    def removeJob(self, job_id, jobstore=None):
        return self.sched.remove_job(job_id, jobstore=jobstore)

    def removeAllJob(self, jobstore=None):
        return self.sched.remove_all_jobs(jobstore=jobstore)

    def printJobs(self, jobstore=None, out=None):
        return self.sched.print_jobs(jobstore=jobstore, out=None)

    def getJobState(self, job_id=None, jobstore=None):
        state = list()

        if job_id is not None:
            job = self.sched.get_job(job_id, jobstore=jobstore)

            if job is not None:
                temp = dict()
                temp[job.id] = {
                    "next_run_time": job.next_run_time,
                    "state": job.pending,
                }
                state.append(temp)

        else:
            for job in self.sched.get_jobs(jobstore=jobstore):
                temp = dict()
                temp[job.id] = {
                    "next_run_time": job.next_run_time,
                    "state": job.pending,
                }
                state.append(temp)

        return state