from models import db_session from schema import schema from update import main as main_update # ------------------------------ app = Flask(__name__) cors = CORS(app, resource = {r'/graphql/*': {'origins': "http://localhost:3000"}}) # crontab = Crontab(app) app.debug = True # ------------------------------ # CRON JOBS HERE cron = Scheduler(deamon = True) cron.start() @cron.interval_schedule(hours = 6) def database_update(): main_update() # shutdown cron thread after web process is ended atexit.register(lambda: cron.shutdown(wait = False)) # ----------------------------- # Graphql setup app.add_url_rule('/graphql',view_func = GraphQLView.as_view( 'graphql', schema = schema, graphiql = True
print "saveing!" data = getJson() date = datetime.now().date() dateStr = str(date) hour = datetime.now().hour hourStr = str(hour) nameStr = dateStr+'-'+hourStr file(nameStr+'.json','w').write(json.dumps(data,indent=2)) f = open(nameStr+'.json') jsoned = json.load(f) f.close() with open(nameStr+'.csv','wb+') as csv_file: csv_writer = csv.writer(csv_file) for item in jsoned: csv_writer.writerow([item[u'pm2_5'],item[u'primary_pollutant'],item[u'co'],item[u'pm10'],item[u'area'],item[u'o3_8h'], item[u'o3'],item[u'o3_24h'],item[u'station_code'],item[u'quality'],item[u'co_24h'],item[u'no2_24h'], item[u'so2'],item[u'so2_24h'],item[u'time_point'],item[u'pm2_5_24h'],item[u'position_name'], item[u'o3_8h_24h'],item[u'aqi'],item[u'pm10_24h'],item[u'no2']]) if __name__ == '__main__': saveData() scheduler = Scheduler() scheduler.daemonic = False scheduler.add_interval_job(saveData,seconds=3600) print('One Hour') scheduler.start()
me = '*****@*****.**' msg = MIMEText(context) msg['subject'] = sub msg['From'] = '*****@*****.**' msg['To'] = COMMASPACE.join(mailto_list) send_smtp = smtplib.SMTP(mail_host) send_smtp.sendmail(me, mail_list, msg.as_string()) send_smtp.close() def job(): bookList = [] isSendMail = False context = 'Today free books are' mailto_list = ['*****@*****.**'] bookList = get_book(bookList) context, isSendMail = record_book(bookList, context, isSendMail) if isSendMail: send_mail(mailto_list, 'Free Book is Update', context) def job_listener(event): logging.basicConfig() if event.exception: print 'job failed' else: print 'job succeed' if __name__ = '__main__': sched = Scheduler(daemonic = False) sched.add_listener(job_listener, EVENT_JOB_EXECUTED | EVENT_JOB_ERROR) sched.add_interval_job(job, minutes=30) sched.start()
path('exportBookExcel/', proApp_book.exportBookExcel, name='exportBookExcel'), # 导出功能 path('importBook/', proApp_book.importBook, name='importBook'), # 导入功能 path('uploadFile/', proApp_book.uploadFile, name='uploadFile'), # 单附件上传 path('uploadMoreFile/', proApp_book.uploadMoreFile, name='uploadMoreFile'), # 多附件上传 path('uploadBookCover/', proApp_book.uploadBookCover, name='uploadBookCover'), # 图书封面上传,图片上传 ] urlpatterns = pageurlpatterns + interfaceurlpatterns # 定时任务1 from apscheduler.scheduler import Scheduler import datetime, time from proApp.task import tasks sched = Scheduler() # 创建调度器 @sched.interval_schedule(minutes= 100) def my_task(): task = tasks() task.testFuncton() sched.start() # 定时任务2 阻塞式:前一个执行完后面的才执行 单线程 import sched, time schedule = sched.scheduler(time.time, time.sleep) def print_time(): print ("From print_time: ", time.time()) def func(string1,float1):
# print(v) # time.sleep(300) def appdo(): v = 1 text = 'doing this' # while v==1: v = send_message(text) print(v) # v=1 # time.sleep(300) cron = Scheduler(daemon=True) # Explicitly kick off the background thread cron.start() @cron.interval_schedule(minutes=5) def job_function(): appdo() # Shutdown your cron thread if the web process is stopped #atexit.register(lambda: cron.shutdown(wait=False)) if __name__ == '__main__': app.run()
def stop(self): if not self.__scheduler is None: self.__scheduler.shutdown(wait=False) self.__scheduler = Scheduler(AlertSchedulerHandler.APS_CONFIG) logger.info("[AlertScheduler] Stopped the alert scheduler.")
def __init__(self): # check to see if values are in text file, otherwise load defaults try: f = open('spyeconfig.txt', 'r') # problem opening the file, load the default values except: logger.warn("Could not open spyeconfig.txt") self.filepath = Observable( "c:/users/public/documents/spyeworks/content/") self.mac = Observable("00:00:00:00:00:00") self.ipaddy = Observable("192.168.1.110") self.active = Observable("active") self.activedelay = Observable("10") self.idle = Observable("idle") self.idledelay = Observable("10") self.daysLabel = Observable("Daily") self.onHour = Observable(7) self.onMin = Observable(0) self.offHour = Observable(19) self.offMin = Observable(0) self.UpdateTextFile() logger.warn("spyeconfig.txt created with default values.") else: logger.info("Parsing spyeconfig.txt...") self.filepath = Observable(f.readline()[:-1]) self.mac = Observable(f.readline()[:-1]) # self.ipaddy = Observable(find_mac_on_network(self.mac.get())) self.ipaddy = Observable('0.0.0.0') self.GetIP(self.mac.get()) self.active = Observable(f.readline()[:-1]) self.activedelay = Observable(f.readline()[:-1]) self.idle = Observable(f.readline()[:-1]) self.idledelay = Observable(f.readline()[:-1]) self.daysLabel = Observable(f.readline()[:-1]) self.onHour = Observable(int(f.readline()[:-1])) self.onMin = Observable(int(f.readline()[:-1])) self.offHour = Observable(int(f.readline()[:-1])) self.offMin = Observable(int(f.readline()[:-1])) logger.info("Parsing complete.") # close the file f.close() # add the sensor self.sensorstate = Sensor(23) # add the tv self.tv = Display('ttyUSB0') # Start the scheduler self.sched = Scheduler() self.sched.start() # set turn on and turn off times self.days = dayOptions[self.daysLabel.get()] self.DisplayOnJob = self.sched.add_cron_job( self.displayPowerOn, day_of_week=self.days, hour=str(self.onHour.get()), minute=str(self.onMin.get())) self.DisplayOffJob = self.sched.add_cron_job( self.displayPowerOff, day_of_week=self.days, hour=str(self.offHour.get()), minute=str(self.offMin.get())) # add the spyeworks player self.spyeworks = Spyeworks(self.ipaddy.get(), self.filepath.get(), self.active.get(), self.idle.get()) # setup spyeworks callbacks self.ipaddy.addCallback(self.updateSpyeworksIP) self.filepath.addCallback(self.updateSpyeworksFilepath) self.active.addCallback(self.updateSpyeworksActive) self.idle.addCallback(self.updateSpyeworksIdle)
import logging from datetime import datetime from django.core.exceptions import ValidationError from django.core.mail import send_mail from django.db.models.signals import post_save from django.dispatch import receiver from apscheduler.scheduler import Scheduler from api.models import User logging.basicConfig() cron = Scheduler() @cron.interval_schedule(minutes=5) def check_boards(): from api.models import Building, User board_list = [] for building in Building.objects.all(): message = '' for board in building.board_set.all(): if board.last_request and board.status( ) == False and board.last_notification and board.last_request > board.last_notification: message += 'Bildschirm %s ist seit %s nicht mehr online.\n' % ( board.title, board.last_request) board.last_notification = board.last_request board.save()
### Libraries required by this Node import logging import atexit from apscheduler.scheduler import Scheduler import re import itertools from datetime import date ### Parameters used by this Node param_schedule = Parameter( '{ "title" : "Schedule", "group" : "Schedule", "schema": { "type": "array", "title": "Schedule", "required": false, "items": { "type": "object", "required": false, "properties": { "cron": { "type": "string", "format": "cron", "required": true, "title": "Cron", "desc": "Format: <minute> <hour> <day> <month> <day of week>" }, "signal": { "type": "string", "required": true, "title": "Signal", "format": "event" }, "except": { "type":"array", "required":false, "title":"Except", "items": { "type":"object", "required":false, "properties":{ "date": { "type":"string", "format":"date", "title":"Date", "required":false } } } }, "note": { "type": "string", "format": "long", "required": false, "title": "Notes", "desc": "Schedule notes" } } } } }' ) logging.basicConfig() # 10 second grace time on jobs sched = Scheduler(misfire_grace_time=10) def cleanup(): sched.shutdown() atexit.register(cleanup) _split_re = re.compile("\s+") _cron_re = re.compile(r"^(?:[0-9-,*/]+\s){4}[0-9-,*/]+$") _sched_seq = ('minute', 'hour', 'day', 'month', 'day_of_week') ### Local events this Node provides local_event_AllOn = LocalEvent('{ "title" : "AllOn", "group" : "Input" }') local_event_AllOff = LocalEvent('{ "title" : "AllOff", "group" : "Input" }')
return None, None def cronTask(): logger.info('enter task') dbFile = 'orderbook_' + datetime.now().strftime("%Y-%m-%d") + '.db' conn = sqlite3.connect(dbFile) cursor = conn.cursor() cursor.execute(CREATE_SQL) pair = 'ETC_USDT' askBook1, bidBook1 = OKEXTask(pair) askBook2, bidBook2 = poloniex(pair) lst = [] ts = int(time.time() * 1000) lst.append((pair, askBook1, bidBook1, ts, 'okex')) lst.append((pair, askBook2, bidBook2, ts, 'poloniex')) cursor.executemany(INSERT_SQL, lst) cursor.connection.commit() conn.close() logger.info('end task') if __name__ == '__main__': sched = Scheduler(standalone=True, misfire_grace_time=5) sched.add_interval_job(cronTask, seconds=5, coalesce=True) try: sched.start() except (KeyboardInterrupt, SystemExit): pass
import logging from apscheduler.scheduler import Scheduler #from apscheduler.jobstores.sqlalchemy_store import SQLAlchemyJobStore #from precious.utils import get_db_uri from tasks import * logger = logging.getLogger("scheduler") scheduler = Scheduler(standalone=True, misfire_grace_time=10, coalescing=False) #scheduler.add_jobstore(SQLAlchemyJobStore(get_db_uri()), 'file') scheduler.add_interval_job(check_remote_git_revision, seconds=30) scheduler.add_interval_job(check_should_buid, minutes=30) scheduler.add_interval_job(check_project_buid_web, seconds=10)
def run(self,input,sender,channel): regex = '(.*)\s%s\s+(\S*)' % self.keyword method_checker = re.match(regex,input) response = 'default' if(method_checker.group(2) == 'archive'): """ banana:: slack archive""" self.archive() response = 'Slack Archived!' elif(method_checker.group(2) == 'cron'): """ banana:: slack cron """ sched = Scheduler() sched.add_cron_job(self.archive,hour=1,minute=0) sched.start() response = 'Started cron' elif(method_checker.group(2) == 'getArchive'): """ banana:: slack getArchive """ getparams = {'token': self.slack_token} req = requests.get('https://slack.com/api/channels.list', params=getparams, verify=False) channellist = json.loads(req.content) path = "archive/" response = '' f = '' for channelobj in channellist['channels']: channelid = channelobj['id'] if channelid == channel: channelname = channelobj['name'] try: with open(path + channelname + '-ch.log','r') as f: data = f.readlines() for dataitem in data: response = response + dataitem + '\n' except IOError: response = 'Not yet archived.' return response postparams = {"content":response} getparams = {"token":self.slack_token,"channels":channel, "filetype":"txt", "title":channelname + ' logs'} req = requests.post('https://slack.com/api/files.upload',params=getparams,data=postparams, verify=False) response = 'Returning archives' getparams = {'token': self.slack_token} req = requests.get('https://slack.com/api/groups.list', params=getparams, verify=False) grouplist = json.loads(req.content) for group in grouplist['groups']: groupid = group['id'] if groupid == channel: channelname = group['name'] try: with open(path + channelname + '-pg.log','r') as f: data = f.readlines() for dataitem in data: response = response + dataitem + '\n' except IOError: response = 'Not yet archived.' return response postparams = {"content":response} getparams = {"token":self.slack_token,"channels":channel, "filetype":"txt", "title":channelname + ' logs'} req = requests.post('https://slack.com/api/files.upload',params=getparams,data=postparams, verify=False) response = 'Returning archives' return response
from django.conf.urls import url from . import views from apscheduler.scheduler import Scheduler from tutorial.views import Monitor sched = Scheduler() # 实例化,固定格式 @sched.interval_schedule(seconds=120) # 装饰器,seconds=120意思为该函数为2分钟运行一次 def mytask(): Monitor() sched.start() # 启动该脚本 urlpatterns = [ # /tutorial url('signin', views.sign_in, name='signin'), url('callback', views.callback, name='callback'), url('signout', views.sign_out, name='signout'), url(r"^calendar(?P<pIndex>[0-9]*)/$", views.calendar, name='calendar'), url('', views.home, name='home'), ]
def __init__(self, **kwargs): self.schedudler = Scheduler(daemonic=kwargs['daemonic'])
def setup(self): self.scheduler = Scheduler()
def __init__(self): self._sched = Scheduler() self._sched.start() self._jobs = {}
def setup(self): self.scheduler = Scheduler() self.scheduler.start()
def _clear_old_exceptions(): print("Clearing out exceptions that have an expired TTL...") clear_old_exceptions() print("Completed clearing out exceptions that have an expired TTL.") pool = ThreadPool( core_threads=app.config.get('CORE_THREADS', 25), max_threads=app.config.get('MAX_THREADS', 30), keepalive=0 ) scheduler = Scheduler( standalone=True, threadpool=pool, coalesce=True, misfire_grace_time=30 ) def setup_scheduler(): """Sets up the APScheduler""" log = logging.getLogger('apscheduler') try: accounts = Account.query.filter(Account.third_party == False).filter(Account.active == True).all() # noqa accounts = [account.name for account in accounts] for account in accounts: print "Scheduler adding account {}".format(account) rep = Reporter(accounts=[account]) for period in rep.get_intervals(account):
LOG_LIST = [] PORT = None DATABASE = None INIT_LOCK = threading.Lock() __INITIALIZED__ = False DEVELOPMENT = False WEBROOT = '' rovlogger = None pandoraplayer = None SERVER = None HOST = '0.0.0.0' KIOSK = False DATA_DIR = None SCRIPT_DIR = None THREADS = [] SCHEDULE = Scheduler() UPDATER = True CURRENT_COMMIT = None LATEST_COMMIT = None COMMITS_BEHIND = 0 COMMITS_COMPARE_URL = '' FIRST_RUN = 0 def initialize(): """Init function for this module""" with INIT_LOCK: global __INITIALIZED__, app, FULL_PATH, RUNDIR, ARGS, DAEMON, PIDFILE, VERBOSE, LOG_FILE, LOG_DIR, rovlogger, PORT, SERVER, DATABASE, AUTH, \ UPDATER, CURRENT_COMMIT, LATEST_COMMIT, COMMITS_BEHIND, COMMITS_COMPARE_URL, USE_GIT, WEBROOT, HOST, KIOSK, DATA_DIR, SCRIPT_DIR, \
#!/bin/env python2.7 #coding:utf-8 ##### code by mik #### ##### qiangchuan.wu #### from utils_getRemoteuser import getUserInfo from conf import conf from apscheduler.scheduler import Scheduler cron_py = Scheduler(daemonic=False) #直接设定更新间隔 @cron_py.interval_schedule(seconds=conf.diskInterval) #cron_py.cron_schedule(hours='02,08,12,16,22') def disk(): getUserInfo() cron_py.start()
from datetime import datetime from apscheduler.scheduler import Scheduler def tick(): print('Tick! The time is: %s' % datetime.now()) if __name__ == '__main__': scheduler = Scheduler(standalone=True) scheduler.add_interval_job(tick, seconds=3) print('Press Ctrl+C to exit') try: scheduler.start() except (KeyboardInterrupt, SystemExit): pass
def __init__(self, db_path='sqlite:///scheduler.db'): self.scheduler = Scheduler() self.scheduler.add_jobstore(SQLAlchemyJobStore(url=db_path), 'default')
#get valid user cursor.execute( "SELECT ip_addr FROM ip WHERE title = %s and ip_addr > %s ORDER BY ip_addr LIMIT 1;", (row[0], current_user)) valid_user = cursor.fetchone() #update current user value to valid user cursor.execute( "UPDATE stories SET current_ip_addr = %s WHERE title = %s", (valid_user, row[0])) db.commit() db.close() sched = Scheduler(daemon=True) sched.start() sched.add_interval_job(time_out_user, minutes=10) @app.route('/story/start', methods=["POST"]) def start_story(): user_ip = request.remote_addr if request.headers['Content-Type'] == 'application/json': arguments = request.get_json() title = arguments.get("title") text = arguments.get("text") if check_grammar_bot(text) == True:
def __init__(self, **kwargs): """ Args: weeks: number of weeks to wait (DEFAULT: 0). days: number of days to wait (DEFAULT: 0). hours: number of hours to wait (DEFAULT: 0). minutes: number of minutes to wait (DEFAULT: 0). seconds: number of seconds to wait (DEFAULT: 0). mailto: The scheduler will send an email to `mailto` every `remindme_s` seconds. (DEFAULT: None i.e. not used). verbose: (int) verbosity level. (DEFAULT: 0) use_dynamic_manager: "yes" if the :class:`TaskManager` must be re-initialized from file before launching the jobs. (DEFAULT: "no") max_njobs_inqueue: Limit on the number of jobs that can be present in the queue. (DEFAULT: 200) remindme_s: The scheduler will send an email to the user specified by `mailto` every `remindme_s` seconds. (int, DEFAULT: 1 day). max_num_pyexcs: The scheduler will exit if the number of python exceptions is > max_num_pyexcs (int, DEFAULT: 0) max_num_abierrs: The scheduler will exit if the number of errored tasks is > max_num_abierrs (int, DEFAULT: 0) safety_ratio: The scheduler will exits if the number of jobs launched becomes greater than `safety_ratio` * total_number_of_tasks_in_flow. (int, DEFAULT: 5) max_nlaunches: Maximum number of tasks launched in a single iteration of the scheduler. (DEFAULT: -1 i.e. no limit) debug: Debug level. Use 0 for production (int, DEFAULT: 0) fix_qcritical: "yes" if the launcher should try to fix QCritical Errors (DEFAULT: "yes") rmflow: If "yes", the scheduler will remove the flow directory if the calculation completed successfully. (DEFAULT: "no") killjobs_if_errors: "yes" if the scheduler should try to kill all the runnnig jobs before exiting due to an error. (DEFAULT: "yes") """ # Options passed to the scheduler. self.sched_options = AttrDict( weeks=kwargs.pop("weeks", 0), days=kwargs.pop("days", 0), hours=kwargs.pop("hours", 0), minutes=kwargs.pop("minutes", 0), seconds=kwargs.pop("seconds", 0), #start_date=kwargs.pop("start_date", None), ) if all(not v for v in self.sched_options.values()): raise self.Error("Wrong set of options passed to the scheduler.") self.mailto = kwargs.pop("mailto", None) self.verbose = int(kwargs.pop("verbose", 0)) self.use_dynamic_manager = as_bool( kwargs.pop("use_dynamic_manager", False)) self.max_njobs_inqueue = kwargs.pop("max_njobs_inqueue", 200) self.max_ncores_used = kwargs.pop("max_ncores_used", None) self.contact_resource_manager = as_bool( kwargs.pop("contact_resource_manager", False)) self.remindme_s = float(kwargs.pop("remindme_s", 1 * 24 * 3600)) self.max_num_pyexcs = int(kwargs.pop("max_num_pyexcs", 0)) self.max_num_abierrs = int(kwargs.pop("max_num_abierrs", 0)) self.safety_ratio = int(kwargs.pop("safety_ratio", 5)) #self.max_etime_s = kwargs.pop("max_etime_s", ) self.max_nlaunches = kwargs.pop("max_nlaunches", -1) self.debug = kwargs.pop("debug", 0) self.fix_qcritical = as_bool(kwargs.pop("fix_qcritical", True)) self.rmflow = as_bool(kwargs.pop("rmflow", False)) self.killjobs_if_errors = as_bool( kwargs.pop("killjobs_if_errors", True)) self.customer_service_dir = kwargs.pop("customer_service_dir", None) if self.customer_service_dir is not None: self.customer_service_dir = Directory(self.customer_service_dir) self._validate_customer_service() if kwargs: raise self.Error("Unknown arguments %s" % kwargs) if not has_apscheduler: raise RuntimeError("Install apscheduler with pip") if has_sched_v3: logger.warning("Using scheduler v>=3.0.0") from apscheduler.schedulers.blocking import BlockingScheduler self.sched = BlockingScheduler() else: from apscheduler.scheduler import Scheduler self.sched = Scheduler(standalone=True) self.nlaunch = 0 self.num_reminders = 1 # Used to keep track of the exceptions raised while the scheduler is running self.exceptions = deque(maxlen=self.max_num_pyexcs + 10) # Used to push additional info during the execution. self.history = deque(maxlen=100)
from io import StringIO from apscheduler.scheduler import Scheduler from django.conf import settings schedudler = Scheduler(daemonic=False) if settings.SCHEDULE_START: schedudler.start() def startSchedule(): schedudler.start() def addScheduler(jobname, crontab, jobfunc, jobfuncparamslist): minute, hour, day, month, dayofweek = tuple(crontab.strip().split(" ")) schedudler.add_cron_job(jobfunc, minute=minute, hour=hour, day=day, month=month, day_of_week=dayofweek, name=jobname, args=jobfuncparamslist) def removeScheduler(jobname): try: jobs = schedudler.get_jobs() for job in jobs: if job.name == jobname:
def start_sch(JobName,projectName,version,user,password,Environment,Day_of_Week,Hours,Minutes): s = Scheduler() s.add_cron_job(call_mat, args=[JobName,projectName,version,user,password,Environment], day_of_week=Day_of_Week,hour=Hours,minute=Minutes) s.start()
def __prepare_schedule(self): self.__load_programs() self.__scheduler = Scheduler(timezone=pytz.utc) self.__scheduled_jobs = dict()
def tag_check_task(tag, min=1): cron = Scheduler(daemon=True) cron.start() @cron.interval_schedule(minutes=min) def job_function(): now_day = datetime.now().date() tag_db = Tags().get_or_create(tag, now_day) tag_list = tag_filter(tag, 100) tag_list.reverse() new_tag = {} for tags in tag_list: _create_time = datetime.strptime( tags['created'], '%Y-%m-%dT%H:%M:%S') + timedelta(hours=3) if now_day.day == _create_time.day: new_tag[tags['id']] = tags post_list = list(new_tag.keys()) post_list.sort() if not tag_db.last: tag_db.last = post_list[-1] post_ids = tag_db.last db.session.add(tag_db) db.session.commit() else: try: post_list = post_list[post_list.index(tag_db.last) + 1:] except: last_post = Posts().query.order_by('-id').first() tag_db.last = None db.session.delete(last_post) db.session.add(tag_db) db.session.commit() last_tag_get_update() return if not len(post_list): return for post_db in post_list: if not Posts.query.filter_by(post_id=post_db, tag=tag_db).first() or False: _post = Posts() _post.post_id = post_db _post.author = new_tag[post_db]['author'] _post.author = new_tag[post_db]['author'] _post.title = new_tag[post_db]['title'] _post.url = new_tag[post_db]['url'] _post.tag = tag_db _post.date = datetime.strptime( new_tag[post_db]['created'], '%Y-%m-%dT%H:%M:%S') + timedelta(hours=3) db.session.add(_post) db.session.commit() tag_db.last = post_db db.session.add(tag_db) db.session.commit() atexit.register(lambda: cron.shutdown(wait=False))
onlyfiles = [] spath = "static/images/SnapView" sbasefolder = "images/SnapView/" nfdate = datetime.datetime.today().strftime("%Y-%m-%d") else: spath = "static/images/SnapView/%s" % sdate sbasefolder = "images/SnapView/%s" % sdate nfdate = datetime.datetime.strptime(str(sdate), "%Y%m%d").strftime('%Y-%m-%d') onlyfiles = get_detections(sdate) sact = getActivity() sres = getTop() return render_template('index.html', detections=sdate, files=onlyfiles, path=sbasefolder, dtvalue=nfdate, activities=sact, recent=sres) # fswebcam -d /dev/video0 -r 640x480 /opt/SnapView/test1.jpg with app.app_context(): setup_app() mail = Mail(app) global sched sched = Scheduler() sched.add_interval_job(autoshot, seconds=60) sched.start()
def start(self): self.scheduler = Scheduler() self.scheduler.start() self.scheduler.add_interval_job(self.evaluate, seconds=CONST_TIME)