Exemple #1
0
def consumer_():
    """Consumer thread, tweets the book updates."""
    con_schedule = Scheduler(config.TIMETABLE_TWI)
    if config.BOT_TYPE == 'DesktopBot':
        bot = DesktopBot()
        bot.sign_in(auth.user, auth.password)
    else:
        bot = TwitterAPIBot()
        bot.sign_in()
    while True:
        if con_schedule.is_time():
            book = new_books.get()
            total_len = len(book[0]) + len(book[1]) + 1
            if total_len > 140:
                book = (book[0][:-(total_len - 140)], book[1])
            bot.tweet(book[0] + " " + book[1])
            try:
                print " [ consumer@%s ] Tweet: %s" % (strftime("%H:%M:%S, %d/%m/%y"),
                                                      (book[0] + " " + book[1]))
            except:
                print " [ consumer@%s ] Tweet." % strftime("%H:%M:%S, %d/%m/%y")
            with to_mark_lock:
                to_mark.append(book[1])
                to_mark_lock.notify()
            sleep(config.TW_FREQ)
        else:
            sleep(60)
Exemple #2
0
def nda_loop():
	ndutil.setTimezone()

	ndlCom = NdlCom('nDroid-Executer', '127.0.0.1', 12322)
	ndlCom.doCom('Initiating')

	ndlCom.doCom('Loading Config')
	cnfManager = CnfManager()
	cnfManager.load('./nde.cnf')
	cnfData = cnfManager.getCnfData()

	nsQueue = Queue()
	nsLock = threading.Lock()

	netManager = NetManager()
	netManager.setNdlCom(ndlCom)
	netManager.setNsQueue(nsQueue, nsLock)

	ndlCom.doCom('Starting Threads')
	scheduler = Scheduler([ndlCom, nsQueue, nsLock], 'Scheduler')

	scheduler.start()

	reactor.listenUDP(cnfData['comPort'], netManager)
	ndlCom.doCom('Listening Com Port')
	reactor.run()

	scheduler.join()
Exemple #3
0
    def testRunEnterDaysLessThanOne(self):
        schedulefiledata = FileTester.read_file(
            FileTester.test_enter_lessthan1
        )
        tempschedulefile = FileTester.write_to_temp_file(
            FileTester.test_enter_lessthan1,
            schedulefiledata
        )
        schedulefile = ScheduleFile(tempschedulefile)

        templedgerfile = FileTester.create_temp_file('')
        ledgerfile = LedgerFile(templedgerfile)

        scheduler = Scheduler(ledgerfile, schedulefile)
        scheduler.run()

        ledgerfile.write_file()
        schedulefile.write_file()

        schedulefile_actual = FileTester.read_file(tempschedulefile)
        schedulefile_expected = FileTester.read_file(
            FileTester.test_enter_lessthan1
        )

        os.remove(templedgerfile)
        os.remove(tempschedulefile)

        self.assertEqual(
            schedulefile_expected,
            schedulefile_actual
        )
Exemple #4
0
def run_scheduler():
    from scheduler import Scheduler
    scheduler = Scheduler(taskdb=get_taskdb(), projectdb=get_projectdb(),
            newtask_queue=newtask_queue, status_queue=status_queue, out_queue=scheduler2fetcher)

    run_in_thread(scheduler.xmlrpc_run, port=scheduler_xmlrpc_port)
    scheduler.run()
Exemple #5
0
 def save(self, args):
     if args["oldName"] == "":
         key = args["name"]
         av = Availability()
     else:
         key = args["oldName"]
         av = Availability.find(self.env.get_db_cnx(), key)
     av.name = args["name"]
     av.validFrom = args["validFrom"]
     av.validUntil = args["validUntil"]
     av.resources = args["resources"]
     av.weekdays = ""
     self.appendWeekdays(av, args, "1")
     self.appendWeekdays(av, args, "2")
     self.appendWeekdays(av, args, "3")
     self.appendWeekdays(av, args, "4")
     self.appendWeekdays(av, args, "5")
     self.appendWeekdays(av, args, "6")
     self.appendWeekdays(av, args, "0")
     av.workFrom = args["workFrom"]
     av.workUntil = args["workUntil"]
     av.save(self.env.get_db_cnx(), key)
     scheduler = Scheduler()
     scheduler.schedule(self.env, self.config);
     return 'admin_availability.html', {"availabilities" : self.availabilities(), "view": "list"}
Exemple #6
0
def producer_():
    """Producer thread, checks the book pages."""
    pro_schedule = Scheduler(config.TIMETABLE_SCRA)
    crawlers = []
    for subcrawler in BookCrawler.__subclasses__():
        crawlers.append(BookCrawler.factory(subcrawler.__name__))
    while True:
        if pro_schedule.is_time():
            for crawler in crawlers:
                books = crawler.get_books()
                for book in books.keys():
                    if is_new_book(book):
                        # url of web page, book name, book url
                        insert_book(crawler.get_url(), books[book], book)
                        try:
                            print " [ producer@%s ] New book: %s" % (strftime("%H:%M:%S, %d/%m/%y"),
                                                                     books[book] + \
                                                                     " - " + \
                                                                     crawler.get_url() + \
                                                                     book)
                        except:
                            print " [ producer@%s ] New book." % strftime("%H:%M:%S, %d/%m/%y")
                        new_books.put((books[book], crawler.get_url() + book))
                        with to_mark_lock:
                            while not to_mark:
                                to_mark_lock.wait()
                            mark = to_mark.pop(0)
                            mark_tweeted(mark)
                        sleep(1)
            # Wait
            sleep(config.S_FREQ)
        else:
            sleep(60)
Exemple #7
0
    def bootstrap_scheduler(self):
        bootstrapped = False
        try:
            self.save("plugin_modules_library", self._plugin_modules_library)
            Scheduler.clear_locks(self)
            self.scheduler = Scheduler()

            for plugin_info, fn, function_name in self.periodic_tasks:
                meta = fn.will_fn_metadata
                self.add_periodic_task(
                    plugin_info["full_module_name"],
                    plugin_info["name"],
                    function_name,
                    meta["sched_args"],
                    meta["sched_kwargs"],
                    meta["function_name"],
                )
            for plugin_info, fn, function_name in self.random_tasks:
                meta = fn.will_fn_metadata
                self.add_random_tasks(
                    plugin_info["full_module_name"],
                    plugin_info["name"],
                    function_name,
                    meta["start_hour"],
                    meta["end_hour"],
                    meta["day_of_week"],
                    meta["num_times_per_day"]
                )
            bootstrapped = True
        except Exception, e:
            self.startup_error("Error bootstrapping scheduler", e)
Exemple #8
0
    def test_scheduler_bind_config(self):
        scheduler = Scheduler()

        config = Config('''
                        timer test_timer {
                            interval: 2
                            event: test_timer_event
                        }

                        process demo_add {
                            trigger: op1_value, op2_value
                            script: {
                                sum := op1_value + op2_value
                                emit sum_value(sum)
                            }
                        }

                        process gen_op1 {
                            trigger: test_timer_event
                            script: {
                                emit op1_value(2)
                            }
                        }

                        process gen_op2 {
                            trigger: test_timer_event
                            script: {
                                emit op2_value(3)
                            }
                        }''')

        scheduler.bind(config)
Exemple #9
0
def scheduler(ctx):
    """Run Scheduler."""
    from fulmar.scheduler.projectdb import projectdb
    from fulmar.message_queue import newtask_queue, ready_queue, cron_queue
    from scheduler import Scheduler
    scheduler = Scheduler(newtask_queue, ready_queue, cron_queue, projectdb)
    scheduler.run()
Exemple #10
0
class Executor(object):
	def __init__(self, someprocess=None, logit=True, ticks=sys.maxint):
		super(Executor,self).__init__()
		if someprocess:
			self.process = someprocess
		else:
			if ticks < sys.maxint:
				ticks += 1
			self.process = Scheduler(ticks=ticks, name="")
		self.logit = logit
		self.linker = Linker()

	def schedule(self, components):
		if type(components) is not list:
			components = components['components']

		self.process.send(('activate',components), 'control')
	
	def kill(self, components):
		if type(components) is not list:
			components = components['components']

		self.process.send(('deactivate',components), 'control')

	def build(self, links):
		self.graph = self.linker.link(links)
		self.schedule(self.graph)

	def run(self):
		for _ in self.process.run():
			if self.logit:
				print utils.COLOR.cyan,
				print "\tExecd: ", _,
				print utils.COLOR.white
Exemple #11
0
def main():
    auth = Conf("bot.conf").getSection("auth")
    if not "login" in auth or not "password" in auth:
        print("Configuration not entirely filled.. Try again.")
        sys.exit(1)
    xmpp = Jarvis(auth["login"], auth["password"], auth["room"])
    xmpp.ssl_version = ssl.PROTOCOL_SSLv3
    xmpp.register_plugin('xep_0030') # Service Discovery
    xmpp.register_plugin('xep_0199')
    xmpp.register_plugin('xep_0085') # status message
    xmpp.register_plugin('xep_0071') # Xhtml
    xmpp.register_plugin('xep_0045') # multi user chan
    xmpp.register_plugin('xep_0203')  # XMPP Delayed messages
    xmpp.register_plugin('xep_0249')  # XMPP direct MUC invites
    e = Events(xmpp)
    e.start()
    s = Scheduler(xmpp)
    s.start()
    t = ClientTask(xmpp)
    t.start()

    if xmpp.connect():
        xmpp.process(block=True)
    else:
        print('Unable to connect')
Exemple #12
0
	def test_block(self):
		# Test that packets are not generated when the link is blocked.
		
		scheduler = Scheduler()
		system = SpiNNakerSystem(scheduler, 10)
		
		link = DeadLink(scheduler)
		
		# Uniform generator node
		tg = SpiNNakerTrafficGenerator( scheduler
		                              , system
		                              , 1
		                              , 0.1
		                              , link
		                              , link
		                              )
		tg.set_mesh_dimensions(100,100)
		tg.set_mesh_position(50,50)
		
		it = scheduler.run()
		
		# Perform 1000 cycles
		while it.next() < 2000 and tg.counters["generator_cycles"] < 1000:
			pass
		
		# Should have done 1000 cycles
		self.assertEqual(tg.counters["generator_cycles"], 1000)
		
		# We should have tried to send some number of packets that isn't all the
		# time and not never (well, in theory we might not but hey, if this is going
		# wrong you've got a bad day on your hands).
		self.assertTrue(10 < tg.counters["generator_dropped_packets"] < 1000)
		
		# None should have gone out
		self.assertEqual(tg.counters["generator_injected_packets"], 0)
Exemple #13
0
    def bootstrap_scheduler(self):
        print "Bootstrapping scheduler..."
        bootstrapped = False
        try:

            self.save("plugin_modules_library", self._plugin_modules_library)
            Scheduler.clear_locks(self)
            self.scheduler = Scheduler()

            for plugin_info, fn, function_name in self.periodic_tasks:
                self.add_periodic_task(
                    plugin_info["full_module_name"],
                    plugin_info["name"],
                    function_name,
                    fn.sched_args,
                    fn.sched_kwargs,
                    fn.function_name,
                )
            for plugin_info, fn, function_name in self.random_tasks:
                self.add_random_tasks(
                    plugin_info["full_module_name"],
                    plugin_info["name"],
                    function_name,
                    fn.start_hour,
                    fn.end_hour,
                    fn.day_of_week,
                    fn.num_times_per_day,
                )
            bootstrapped = True
        except Exception, e:
            self.startup_error("Error bootstrapping scheduler", e)
Exemple #14
0
	def test_do_now(self):
		# If I schedule something now, it happens in the zeroth clock and then it
		# exits
		s = Scheduler()
		s.do_now((lambda: None))
		iterator = s.run()
		self.assertEqual(iterator.next(), 0)
		self.assertRaises(StopIteration, iterator.next)
Exemple #15
0
class SpiNNaker101Tests(unittest.TestCase):
	"""
	Tests a chip in a very vague way...
	"""
	
	def setUp(self):
		# Test that packets are generated appropriately when distributing with a
		# uniform distribution.
		
		self.scheduler = Scheduler()
		self.system = SpiNNakerSystem(self.scheduler, 50000000)
		
		self.chip = SpiNNaker101( self.scheduler
		                        , self.system
		                        , 4 # injection_buffer_length
		                        , 10 # router_period
		                        , 300000000
		                        , 600000000
		                        , 1 # core_period
		                        , 1.0
		                        , None
		                        )
	
	
	def test_loopback(self):
		it = self.scheduler.run()
		
		# Perform 1000 cycles
		while it.next() < 4001:
			pass
		
		# Should have allowed all but 4 packets which are still in the queue
		self.assertEqual(
			self.chip.traffic_generator.counters["generator_injected_packets"] -
			self.chip.traffic_generator.counters["generator_packets_received"],
			4)
		
		# Should have routed one packet per ten cycles...
		self.assertEqual(self.chip.router.counters["packets_routed"], 400)
	
	
	def test_external(self):
		# Put the chip in a large mesh so stuff ends up there
		self.chip.set_mesh_dimensions(1000,1000)
		
		it = self.scheduler.run()
		
		# Perform 1000 cycles
		while it.next() < 4001:
			pass
		
		# Should have allowed very few packets through
		self.assertTrue(
			self.chip.traffic_generator.counters["generator_injected_packets"] < 10)
		
		# The router should be very frustrated
		self.assertTrue(self.chip.router.counters["router_blocked_cycles"] > 300)
Exemple #16
0
	def test_silistix_link(self):
		s = Scheduler()
		sl = SilistixLink(s, 10, 5)
		
		# A simple packet container
		class Packet(object):
			def __init__(self,data,length):
				self.data   = data
				self.length = length
		
		# Initially can send
		self.assertTrue(sl.can_send())
		self.assertFalse(sl.can_receive())
		
		sl.send(Packet(123,2))
		
		# Can't send after sending something
		self.assertFalse(sl.can_send())
		self.assertFalse(sl.can_receive())
		
		it = s.run()
		
		# Can't send or recieve until send delay has elapsed
		while it.next() != 10*2 + 5*1:
			self.assertFalse(sl.can_send())
			self.assertFalse(sl.can_receive())
		
		# Can only recieve once data is stable
		self.assertFalse(sl.can_send())
		self.assertTrue(sl.can_receive())
		
		# Can peek
		self.assertEqual(sl.peek().data, 123)
		self.assertFalse(sl.can_send())
		self.assertTrue(sl.can_receive())
		self.assertEqual(sl.peek().data, 123)
		self.assertFalse(sl.can_send())
		self.assertTrue(sl.can_receive())
		
		# Recieved data is correct
		self.assertEqual(sl.receive().data, 123)
		
		# Can't recieve any more
		self.assertFalse(sl.can_send())
		self.assertFalse(sl.can_receive())
		
		# Can't send or recieve until Acknowledge arrives
		while it.next() != 10*2 + 5*2:
			self.assertFalse(sl.can_send())
			self.assertFalse(sl.can_receive())
		
		# Can send once ack is back
		self.assertTrue(sl.can_send())
		self.assertFalse(sl.can_receive())
		
		# Nothing else got scheduled...
		self.assertRaises(StopIteration, it.next)
Exemple #17
0
Fichier : job.py Projet : wehu/pydv
 def cmd_done(cls, data):
     #print(data)
     agent_id = data['agent_id']
     cmd_spec = cls.running_cmds[agent_id]
     cmd_spec['exitcode'] = data['exitcode']
     cmd_spec['errmsg']   = data['errmsg']
     del cls.running_cmds[agent_id]
     cls.idle_agents.add(agent_id)
     Scheduler.wake(cls.agent_visitor[agent_id])
Exemple #18
0
	def test_dead_link(self):
		s = Scheduler()
		dl = DeadLink(s)
		
		# Can't do anything...
		self.assertFalse(dl.can_send())
		self.assertFalse(dl.can_receive())
		
		# Didn't schedule anything
		self.assertRaises(StopIteration, s.run().next)
Exemple #19
0
 def test_scheduler_insert_event(self):
     scheduler = Scheduler()
     
     t = time()
     while time() < t+1:
         self.assertEqual(scheduler.schedule(),'idle')
         sleep(0.05)
  
     event = Event(time(), 'test_event')
     self.assertEqual(scheduler.schedule([event]),event)
Exemple #20
0
    def test_start_stop(self):
        cp = Scheduler(
            [("odd_year", "* * * * * *")],
            [("even_year", "* * * * * */2")],
            start_year=1990, stop_year=1999
        )

        self.assertEqual(cp.get_matching_rules(datetime(1991, 12, 19, 18, 31))[0], "odd_year")
        self.assertEqual(cp.get_matching_rules(datetime(1992, 12, 19, 18, 31))[0], "even_year")
        self.assertEqual(cp.get_matching_rules(datetime(2000, 12, 19, 18, 31)), [])
Exemple #21
0
def run_scheduler(g=g):
    from scheduler import Scheduler
    scheduler = Scheduler(taskdb=g.taskdb, projectdb=g.projectdb, resultdb=g.resultdb,
            newtask_queue=g.newtask_queue, status_queue=g.status_queue,
            out_queue=g.scheduler2fetcher)
    if g.demo_mode:
        scheduler.INQUEUE_LIMIT = 1000

    run_in_thread(scheduler.xmlrpc_run, port=g.scheduler_xmlrpc_port, bind=g.webui_host)
    scheduler.run()
Exemple #22
0
 def update(self, nffg, delete = False):        
     session = Session().get_active_user_session_by_nf_fg_id(nffg.id, error_aware=True)
     Session().updateStatus(session.id, 'updating')
     
     # Get profile from session
     graphs_ref = Graph().getGraphs(session.id)
     if len(graphs_ref) > 1:
         # If the graph has been split, the smart update is not supported
         logging.warning("The graph has been split in various nffg, in this case the smart update is not supported.")
         self.delete(nffg.id)
     else:
         
         old_nffg = Graph().get_nffg(graphs_ref[0].id)
         logging.debug('NF-FG that has to be updated: '+old_nffg.getJSON())
         nffg.db_id = old_nffg.db_id
         
         # Get VNFs templates
         self.prepareNFFG(nffg)
 
         # Get the component adapter associated  to the node where the nffg was instantiated
         old_node = Node().getNode(Graph().getNodeID(graphs_ref[0].id))
         scheduler = Scheduler(old_nffg.db_id, self.user_data)
         orchestrator, new_node = scheduler.schedule(nffg)
         
         # If the orchestrator have to connect two graphs in different nodes,
         # the end-points must be characterized to allow a connection between nodes
         remote_nffgs_dict = self.analizeRemoteConnection(nffg, new_node)
         
         # If needed, update the remote graph
         self.updateRemoteGraph(remote_nffgs_dict)
         
         if new_node.id != old_node.id:
             logging.warning("The graph will be instantiated in a different node, in this case the smart update is not supported.")
             orchestrator.deinstantiateProfile(nffg, old_node)
             Graph().delete_session(session.id)
             Graph().addNFFG(nffg, session.id)
             Graph().setNodeID(graphs_ref[0].id, Node().getNodeFromDomainID(new_node.domain_id).id)
             try:
                 orchestrator.instantiateProfile(nffg, new_node)
             except Exception as ex:
                 logging.exception(ex)
                 Session().set_error(session.id)          
                 raise ex
         else:
             # Update the nffg
             try:
                 orchestrator.updateProfile(nffg, old_nffg, new_node)
             except Exception as ex:
                 logging.exception(ex)
                 Session().set_error(session.id)
                 raise ex
     
     Session().updateStatus(session.id, 'complete')
     Session().updateSessionNode(session.id, new_node.id, new_node.id)
     return session.id
Exemple #23
0
    def test_get_matching_rules(self):
        cp = Scheduler(
            [("open", "7:00 19:30 * * * *"), ("closed", "* 6 * * * *"), ("closed", "19:31 23:59 * * * *")],
            [("closed", "0:00 8:30 * * 6-7 *"), ("closed", "18:30 23:59 * * 6-7 *"), ("closed", "* * 24,25 12 * *"), ("closed", "* * 4 7 * *"), ("closed", "* * 5 4 * 2015")]
        )


        # Weekday
        self.assertEqual(cp.get_matching_rules(datetime(2014, 12, 19, 18, 31))[0], "open")

        # Weekday Night
        self.assertEqual(cp.get_matching_rules(datetime(2014, 12, 19, 19, 31))[0], "closed")

        # Weekend Morning
        self.assertEqual(cp.get_matching_rules(datetime(2014, 12, 20, 8, 0))[0], "closed")

        # Weekend Day
        self.assertEqual(cp.get_matching_rules(datetime(2014, 12, 20, 12, 0))[0], "open")

        # #Weekend Night
        self.assertEqual(cp.get_matching_rules(datetime(2014, 12, 20, 18, 31))[0], "closed")

        # Christmas (Thursday)
        self.assertEqual(cp.get_matching_rules(datetime(2014, 12, 25, 12, 0))[0], "closed")

        # Christmas Eve (Wednesday)
        self.assertEqual(cp.get_matching_rules(datetime(2014, 12, 24, 12, 0))[0], "closed")

        # Easter (Sunday)
        self.assertEqual(cp.get_matching_rules(datetime(2015, 4, 5, 12, 0))[0], "closed")
Exemple #24
0
def run():
    s = Scheduler()
    s.start()
    try:
        templates = "%s/views" % os.path.realpath(os.path.dirname(__file__))
        bottle.TEMPLATE_PATH.insert(0, templates)
        # bottle.debug(True)
        bottle.run(host='', port=6060) #, reloader=True)
    except KeyboardInterrupt:
        pass
    s.stop()
class Thermostat(object):
    last_outside_fetch = -1
    outside_cache = None
    OUTSIDE_REFRESH_SECS = 60 * 15
    
    last_inside_fetch = -1
    inside_cache = None
    INSIDE_REFRESH_SECS = 30
    
    ## TODO: update last_inside_fetch on mutation so it updates without latency
    
    def __init__(self, imp_url, wunderground_url, password):
    	self.imp_url = imp_url
    	self.wunderground_url = wunderground_url
    	self.password = password
    	self.scheduler = Scheduler(self.set_temp)
    
    def therm_info(self):
    	if self.inside_cache == None or time.time() - self.last_inside_fetch > self.INSIDE_REFRESH_SECS:
    		print "Downloading temp from imp"
    		response = requests.get(self.imp_url + "/status")
    		self.inside_cache = response.json()
    		self.last_inside_fetch = time.time()
    		print "Got temp from imp"
    	return self.inside_cache
    
    def inside_temp(self):
    	return self.therm_info()['temp']
    
    def setpoint(self):
    	return self.therm_info()['setpoint']
    
    def heat_on(self):
    	return bool(self.therm_info()['heat'])
    
    def outside_temp(self):
    	if self.outside_cache == None or time.time() - self.last_outside_fetch > self.OUTSIDE_REFRESH_SECS:
    		print "Downloading temp from wunderground"
    		response = requests.get(self.wunderground_url)
    		self.last_outside_fetch = time.time()
    		self.outside_cache = response.json()['current_observation']['temp_f']
    
    	return self.outside_cache
    
    def set_override(self, temp, time_minutes):
    	print "Setting temperature to ", temp, "for", time_minutes
        if time_minutes == None:
            time_minutes = 60
    	self.scheduler.add_override(temp, time_minutes)
    	self.last_inside_fetch = -1
    
    def set_temp(self, temp):
        requests.post(self.imp_url + "/set", json = {"password": self.password, "temp": str(temp)})
    def test_traverse_course_dependencies(self):
        geometry = Course('geometry')
        physics = Course('physics')
        algebra = Course('algebra')
        physics.add_prerequisite_courses(algebra)
        physics.add_prerequisite_courses(geometry)

        scheduler = Scheduler('mock_courses.json')
        scheduler.traverse_course_dependencies(physics)

        self.assertEqual(len(scheduler.ordered_course_list), 3)
        self.assertEqual(scheduler.ordered_course_list[2].name, 'Physics')
 def __init__(self, config, issue_num_hint_s=None):
    ''' 
    Initializes this panel.
    
    'config' -> the shared global Configuration object
    'issue_num_hint_s' -> issue number to use for figuring out which cover
        art to display when the currently set ref is a SeriesRef.
    '''
    
    # the shared global configuration
    self.__config = config
    
    # the issue number to use when finding cover art for a SeriesRef
    self.__issue_num_hint_s = issue_num_hint_s
    
    # a PictureBox that displays the cover art for the current selected issue
    self.__coverpanel = None
    
    # a linklabel describing the currently displayed cover image
    self.__label = None
    
    # the function that gets called if a link on self.__label gets clicked
    self.__link_callback = None
    
    # the "next" button for seeing a ref's next available cover
    self.__nextbutton = None
    
    # the "prev" button for seeing a ref's previous cover
    self.__prevbutton = None
    
    # the IssueRef or SeriesRef whose cover we are currently displaying
    self.__ref = None
    
    # a mapping of refs to _ButtonModels.  Basically caches the 
    # next/prev button state for each ref.
    self.__button_cache = {}
    
    # a mapping of SeriesRefs to IssueRefs (or SeriesRefs).  this cache lets
    # us avoid querying the database twice for the same SeriesRef.
    self.__series_cache = {}
    
    # a scheduler (thread) for finding cover images...
    self.__finder_scheduler = Scheduler()
    
    # a scheduler (thread) for setting new refs...
    self.__setter_scheduler = Scheduler()
    
    # a tuple containing the user's alternate cover art choice (a url) for 
    # a specific IssueRef i.e. (IssuRef, url). none if no alt choice was made.
    self.__alt_cover_choice = None
    
    Panel.__init__(self)
    self.__build_gui()
Exemple #28
0
def main():
	global api
	
	root_app = bottle.Bottle()
	result = config.validate(validator, copy = True)

	if result is False:
		print "Config file validation failed"
		sys.exit(1)

	api = API(app, config, VERSION)
	ConfigAPI(api, config)
	tellstick = TellstickAPI(api, config, VERSION)
	
	scheduler = Scheduler(config, tellstick)
	SchedulerAPI(api, config)
	
	class ConfigWatcher(object):
		def notify(self, observable, key):
			print "writing"
			config.write()

	watcher = ConfigWatcher()
	config.observe(watcher)
	
	if config['installed'] != VERSION:
		api.install()
	
	if config['webroot']:
		root_app.mount(config['webroot'], app)
	else:
		root_app.merge(app)

	session_opts = {
		'session.type': 'cookie',
		'session.validate_key': config['cookieKey'],
		'session.auto': True,
	}

	bottle.run(SessionMiddleware(root_app, session_opts),
		host = config['host'],
		port = config['port'],
		debug = config['debug'],
		reloader = False,
		server = 'cherrypy')

	if scheduler:
		scheduler.stop()
	
	# The goal was to have all changes to the config call the watcher
	# method which writes to file. Unfortunately subsections wouldn't
	# play along, so write to file once we've finished with the service
	config.write()
Exemple #29
0
def main():
    floor_num = input("Please enter the number of floors:")
    ele_num = input("Please enter the number of elevators:")
    scheduler = Scheduler(ele_num, floor_num)
    # elevator = Elevator(floor_num)
    try:
        while True:
            scheduler.get_request()
            print scheduler
            scheduler.execute()
    except KeyboardInterrupt:
        print "Exiting elevator simulator...."
Exemple #30
0
 def getResourcesStatus(self, session_id):
     graphs_ref = Graph().getGraphs(session_id)
     for graph_ref in graphs_ref:
         # Check where the nffg is instantiated and get the instance of the CA and the endpoint of the node
         node = Node().getNode(Graph().getNodeID(graph_ref.id))
         
         # Get the status of the resources
         scheduler = Scheduler(graph_ref.id, self.user_data)  
         orchestrator = scheduler.getInstance(node)
         status = orchestrator.getStatus(node)
         logging.debug(status)
         return status
Exemple #31
0
    async def check_queue(self):
        logger.info('Проверяем очередь, мб уже есть что постить.')
        nearest_post = await self.pop()

        if nearest_post is None:
            logger.info('Походу очередь пуста.')
            return self.STATUS_EMPTY

        post_dtime = datetime.fromisoformat(nearest_post['post_time'])

        if self.its_time_to_post(Scheduler.get_current_datetime(), post_dtime):
            scheduled_message = await self._bot.forward_message(
                chat_id=nearest_post['chat_id'],
                from_chat_id=nearest_post['chat_id'],
                message_id=nearest_post['message_id'],
                disable_notification=True)

            # вернем сообщению ID пользователя, а не бота, а то не
            # загрузится хранилище пользователя
            scheduled_message.from_user.id = nearest_post['user_id']

            await self.share_message(scheduled_message)

            return self.STATUS_OK
        else:
            await self.append(post_dtime, nearest_post['chat_id'],
                              nearest_post['message_id'],
                              nearest_post['user_id'], True)

            return self.STATUS_TOO_EARLY
Exemple #32
0
    def run(self):
        simulator = Simulator(self.plant)
        evaluator = Evaluator.fromXmlFile(self.configFilename, self.plant)
        optimizer = Optimizer.fromXmlFile(self.configFilename, self.plant,
                                          self.orderList, simulator, evaluator)
        scheduler = Scheduler(self.plant, self.orderList)

        result = scheduler.start()
        if result != None:
            solutions = parseSolutions(result, self.plant, self.orderList)
            for s in solutions:
                s.loadStartTimes(self.plant)
            result = optimizer.run(solutions)
            best = bestSolution(result)
            best.unNormalize(self.normValue)
            print best
Exemple #33
0
def sampleSchedule():
    GMT = TimeTape('GMT', 0)
    EST = TimeTape('EST', -5)

    column1 = Column('Larry', languages, abr='L')
    column2 = Column('Gerry', languages, abr='G')
    column3 = Column('Alp', languages, abr='A')
    column4 = Column('Balp', languages, abr='B')
    column5 = Column('Calp', languages, abr='C')
    column6 = Column('Dalp', languages, abr='D')
    column7 = Column('Ealp', languages, abr='E')
    '''
    spaghet = Event('3:00','4:00','Spaghetti', 'Eat it', 'Eat with fork',
                    languages)
    column1.addEvent(spaghet)
    '''
    column3.events = [
        Event('3:00', '4:00', 'Search', 'Eat it', 'Eat with fork', languages),
        Event('4:45', '5:00', 'Sleep', 'In the bed', 'For lots of time',
              languages)
    ]

    column2.events = [
        Event('12:30', '12:35', 'Experiment', 'In the lab', 'throw the lab',
              languages)
    ]

    schedule = Scheduler(
        [GMT, EST],
        [column1, column2, column3, column4, column5, column6, column7])

    return schedule
Exemple #34
0
	def __init__(self, identifier, memory):
		super(Core, self).__init__()
		log.info("Initializing core: %d", identifier)

		## Instruction memory
		self.memory         = memory
		## Identifier of this core. (integer)
		self.identifier     = identifier
		## See if this core is running.
		self.active         = True
		## Message Queues of the other cores
		self.cores          = None
		## Highest index of the core array
		self.maxIdx         = None

		## Message Queue of this core
		self.inbox          = multiprocessing.Queue()
		## Context creator for this core
		self.contextCreator = ContextCreator(self)
		## Tokenizer for this core
		self.tokenizer      = Tokenizer(self)
		## Token dispatcher for this core
		self.dispatcher     = TokenDispatcher(self)
		## Scheduler for this core
		self.scheduler      = Scheduler(self)
		## Context matcher for this core
		self.matcher        = ContextMatcher(self)
Exemple #35
0
    def execute(self, environ, start_response):
        # 匹配 URL 路由,返回 (handler, kwargs)。
        handler, kwargs = Router.instance.match(environ)

        if handler is None:
            return not_found(start_response)

        # 根据 Handler 参数列表动态构建实参对象。
        # 省略掉不需要的中间对象,以提升性能,减少 GC 压力。
        handler_args = handler.func_code.co_varnames[:handler.func_code.
                                                     co_argcount]

        if "environ" in handler_args:
            kwargs["environ"] = environ
        if "start_response" in handler_args:
            kwargs["start_response"] = start_response
        if "request" in handler_args:
            kwargs["request"] = Request(environ)
        if "response" in handler_args:
            kwargs["response"] = Response()

        # 调度器 (异常保护)
        with Scheduler(self._engine, handler) as execute:
            ret = execute(**kwargs)

        # 处理结果。
        if "response" in handler_args:
            return kwargs["response"](environ, start_response)
        elif not "start_response" in handler_args:
            return Response(ret)(environ, start_response)
        elif hasattr(ret, "__iter__"):
            return ret

        return (ret, )
Exemple #36
0
    def __init__(self, scene, view):
        # Record the scene and view
        self.scene = scene
        self.view = view
        self.scheduler = Scheduler()
        self.animation = Animation(self.scene, self.scheduler)
        self.mouseX = 0  # Current mouse X coordinate in the window
        self.leftButtonState = GLUT_UP  # Current left-button state

        glutDisplayFunc(self.view.display)
        glutReshapeFunc(self.view.reshape)
        glutMotionFunc(self.mouseMotionFunc)
        glutMouseFunc(self.mouseButtonFunc)
        glutKeyboardFunc(self.keyFunc)
        glutSpecialFunc(self.specialFunc)
        glutIdleFunc(self.idleFunc)
Exemple #37
0
def main(argv=None):
    if argv is None:
        argv = sys.argv

    config = ConfigParser.ConfigParser()
    config.read('config.ini')
    try:
        server_ip = config.get('system', 'host')
        server_port = config.get('system', 'port')
        instance_type = config.get('system', 'instance_type')
        instance_num = config.get('system', 'instance_num')
        jobs_log_dir = config.get('system', 'jobs_log_dir')
        bot_file = config.get('system', 'bot_file')
    except ConfigParser.NoOptionError:
        raise Exception('[-] missing config item in the "system" section')

    # Bag of tasks is in the form of {job_id: Job}
    bot = {}

    lines = open(bot_file).readlines()
    for i in range(0, len(lines)):
        bot[i] = Job(lines[i].strip(), i)

    scheduler = Scheduler(bot, instance_num, instance_type, server_ip,
                          server_port, jobs_log_dir)
Exemple #38
0
    def __init__( self, nets=None, parent=None, width=3 ):
        Frame.__init__( self, parent )

        #data structure
        self.netManager = NetManager()
        self.dataManager = DataManager()
        self.scheduler = Scheduler()
        self.hosts = []
        # UI
        self.top = self.winfo_toplevel()
        self.top.title( 'Mininet节点调度子系统' )
        self.createMenuBar()
        self.menubar = self.createFramBar()
        self.createCfram()
        cleanUpScreens()
        self.pack( expand=True, fill='both' )
Exemple #39
0
def main():
    menu = Menu()
    f = None
    experiments = import_module("experiments").__all__
    for exp_name in experiments:
        if exp_name.startswith("*"):
            f = exp_name
            break
    if not f:
        f = menu.experiments_prompt()
    curr_exp = Current_experiment(import_module("experiments." + f))

    arduino = arduino_serial.Arduino(serial_name=curr_exp.serial_name)
    arduino.start()

    scheduler = Scheduler(curr_exp)

    controller = Controller(curr_exp, scheduler.schedule_a,
                            scheduler.schedule_b, scheduler.schedule_mat)
    controller.send_scheduled_commands()

    tagger = Data_handler(curr_exp, scheduler.schedule_a, scheduler.schedule_b)
    previous_time = datetime.now()

    running = True
    while running:
        try:
            if (datetime.now() - previous_time).total_seconds() > 5:
                controller.send_scheduled_commands()
                previous_time = datetime.now()

            tagger.ard_grab_and_tag_data(arduino)
        except KeyboardInterrupt:
            running = False
Exemple #40
0
def run():
    global rousers, scheduler
    args = docopt(__doc__)

    interfaces = []
    for key, email_info in emails.items():
        interface = EmailInterface(**email_info)
        interface.startup()
        interfaces.append(interface)

    rousers = []
    for rouser_name, rouser_config in rouser_configs.items():
        rouser_config['name'] = rouser_name
        rouser_config['alarms'] = alarms.get(rouser_name, {})

        rouser = Rouser(**rouser_config)
        rousers.append(rouser)

        rouser_thread = Thread(target=rouser.main_loop)
        rouser_thread.start()

    scheduler = Scheduler('schedule_rules.json',
                          rousers=rousers,
                          interfaces=interfaces)
    scheduler_thread = Thread(target=scheduler.main_loop)
    scheduler_thread.start()

    signal.signal(signal.SIGINT, shutdown)
    signal.signal(signal.SIGTERM, shutdown)

    if args.get('--shell'):
        IPython.embed()
        shutdown()
Exemple #41
0
    def bootstrap_scheduler(self):
        bootstrapped = False
        try:
            self.save("plugin_modules_library", self._plugin_modules_library)
            Scheduler.clear_locks(self)
            self.scheduler = Scheduler()

            for plugin_info, fn, function_name in self.periodic_tasks:
                meta = fn.will_fn_metadata
                self.add_periodic_task(plugin_info["full_module_name"], plugin_info["name"], function_name, meta["sched_args"], meta["sched_kwargs"], meta["function_name"],)
            for plugin_info, fn, function_name in self.random_tasks:
                meta = fn.will_fn_metadata
                self.add_random_tasks(plugin_info["full_module_name"], plugin_info["name"], function_name, meta["start_hour"], meta["end_hour"], meta["day_of_week"], meta["num_times_per_day"])
            bootstrapped = True
        except Exception, e:
            self.startup_error("Error bootstrapping scheduler", e)
Exemple #42
0
def main(loop):
    config = TomlConfig("config.toml", "config.template.toml")
    if config.is_new:
        logger.warning("No config detected, extracting from the template...")
        return
    tickets_manager = TicketsManager(config)
    tickets_manager.load()
    if config.auto_clean:
        scheduler = Scheduler(loop)
        scheduler.schedule_repeating_task(
            config.task_delay,
            tickets.clean,
            tickets_manager=tickets_manager,
            config=config,
        )
    WebAPI(config, tickets_manager).start()
Exemple #43
0
    def execute_schedule(self,
                         schedule,
                         track_full_pass=False,
                         compute_ant_points=True,
                         N=None):
        if self._disabled:  # Note, the scheduler itself is also enabled/disabled. But also want to prevent the creation of new schedules.
            raise Exception(
                "RPCSchedulerServer: Currently disabled. Call enable() first")

        schedule = Schedule.from_json(schedule)
        self.scheduler = Scheduler(self.gs,
                                   schedule,
                                   track_full_pass=track_full_pass,
                                   compute_ant_points=compute_ant_points,
                                   enforce_signoffs=self._enforce_signoffs)
        self.scheduler.execute(N=N)
Exemple #44
0
def main():
    #开启调度器
    Scheduler.runScheduler()
    #开启抓取器
    Fetcher.runFetcher(processNum=10)
    #开启处理器
    Processor.runProcessor()
    # print(u"cpu 个数:" + str(multiprocessing.cpu_count()))
    # for fetcher in multiprocessing.active_children():
    #     fetcher.join()
    #     print("child   p.name:" + fetcher.name + "\tp.id" + str(fetcher.pid))

    for p in SafeQueue.processList:
        p.start()
    for p in SafeQueue.processList:
        p.join()
Exemple #45
0
class scheduler_test1(unittest.TestCase):
    def setUp(self):
        self.s = Scheduler(app=None)
        #self.s.parseJSON("schedule.json")

    def test1_load(self):
        self.s.parseJSON("schedule.json")
        self.s.new_day("mo")
        r = self.s.rules.find("1234-1234")
        self.assertTrue(r.execute())

    #def test2_execute_rule(self):
    #    r = self.s.rules.find("1234-1234")
    #    self.assertTrue(r.execute())

    def tearDown(self):
        pass
Exemple #46
0
    def __init__(self, agent_id, host, port):
        self._server_url = "http://hungrycats.herokuapp.com"
        self._heartbeat_interval = 120
        self._ip = self._get_ip()

        print('ip: ' + self._ip)

        self._id = agent_id  #TODO persist this, maybe just in a text file

        self._host = host
        self._port = port

        self._app = Bottle()
        self._scheduler = Scheduler()
        self._setup_routes()

        self._create_checkin(self._server_url)
Exemple #47
0
def main():
    # 入口文件,运行调度器
    try:
        # 改变标准输出的默认编码
        sys.stdout = io.TextIOWrapper(sys.stdout.buffer, encoding='utf8')
        Scheduler().run()
    except Exception as result:
        print("The <main> program got an exception:\"{}\"".format(result))
Exemple #48
0
class WeatherFacade:
    def __init__(self, config):
        self.__storage = Storage()
        self.__client = HttpClient(config.proxy, 10)
        self.__weather_service = YahooWeatherApi(self.__client)
        self.__update_weather()
        self.__weather_scheduler = Scheduler(self.__update_weather)
        self.__weather_scheduler.start(3600, 00)

    def get_weather_forecast(self):
        weather = self.__storage.get('weather')
        return weather

    def __update_weather(self):
        weather = self.__weather_service.get_forecast(834463).to_json()
        logging.info(u'Updated weather')
        self.__storage.add('weather', weather)
Exemple #49
0
class RatesFacade:
    def __init__(self, config):
        self.__storage = Storage()
        self.__client = HttpClient(config.proxy, 10)
        self.__rates_service = NbrbRates(self.__client)
        self.__update_rates()
        self.__rates_scheduler = Scheduler(self.__update_rates)
        self.__rates_scheduler.start(3600 * 24, 00, 00)

    def get_rate(self):
        cur_rate = self.__storage.get('cur_rate')
        res = json.loads(cur_rate)
        return res['result']

    def get_rates(self, currency, tenor, start=None, end=None):
        if tenor is not None:
            match = re.search('(\\d+)([mMwW])', tenor)
            if match is not None:
                tenor_i = int(match.group(1))
                m_w = match.group(2)
                key = 'rates-' + str(
                    datetime.date.today()) + str(tenor_i) + m_w
                res = self.__storage.get(key)
                if res is None:
                    end = datetime.date.today()
                    start = None
                    if m_w in ['m', 'M']:
                        start = utils.month_delta(end, tenor_i * -1)
                    elif m_w in ['w', 'W']:
                        start = end - datetime.timedelta(days=7 * tenor_i)
                    rates = self.__rates_service.get_rates_dynamics(
                        currency, start, end).to_json()
                    self.__storage.add(key, rates)
                    return rates
                else:
                    return res
        else:
            s_start = utils.string_to_date(start, "%Y-%m-%d")
            s_end = utils.string_to_date(end, "%Y-%m-%d")
            return self.__rates_service.get_rates_dynamics(
                currency, s_start, s_end).to_json()

    def __update_rates(self):
        cur_rate = self.__rates_service.get_today_rate('USD').to_json()
        logging.info(u'Updated rates')
        self.__storage.add('cur_rate', cur_rate)
Exemple #50
0
    def __init__(self):
        #self.spider = Spider()
        #self.spiders = spiders  # spiders 是一个包含东哥爬虫对象的字典
        self.spiders = self._auto_import_module_cls(SPIDERS, True)

        self.scheduler = Scheduler()
        self.downloader = Downloader()

        #self.pipelines = pipelines
        self.pipelines = self._auto_import_module_cls(PIPELINES)

        #self.downloader_mids = downloader_mids
        self.downloader_mids = self._auto_import_module_cls(
            DOWNLOADER_MIDDLEWARES)

        #self.spider_mids = spider_mids
        self.spider_mids = self._auto_import_module_cls(SPIDER_MIDDLEWARES)
Exemple #51
0
    def testSchedule(self):
        mock = Mock()
        mock.time.return_value = 100

        scheduler = Scheduler(mock.time, mock.sleep)

        mock.return_value = "foo"

        result = scheduler.schedule(300, mock)

        self.assertEquals(
            result, "foo",
            "schedule did not return result of calling function")

        self.assertTrue(mock.time.called)
        self.assertTrue(mock.called)
        mock.sleep.assert_called_with(200)
Exemple #52
0
def main():
    """Run the radio scheduler interface."""
    net = RadioNetwork()
    net.start()

    sched = Scheduler()
    sched.start()

    sleep(1.5)
    sched.register_client(1000, 1)
    job_id = sched.insert_job(1000, 5.0)
    sleep(0.75)
    sched.register_client(1001, 1)
    job_id = sched.insert_job(1001, 8.0)

    while True:
        pass
Exemple #53
0
 def test_cycle_detection(self):
     """Test public methods on a task set, which forms a cycle."""
     a_scheduler = Scheduler()
     # Build the dependency graph, which is a cycle:
     #   A -> B -> C -> A
     a_scheduler.add_a_prerequisite(task='A', prerequisite='B')
     a_scheduler.add_a_prerequisite(task='B', prerequisite='C')
     a_scheduler.add_a_prerequisite(task='C', prerequisite='A')
     with self.assertRaises(AssertionError):
         a_scheduler.schedule()
Exemple #54
0
    def __init__(self,
                 cluster,
                 preference_value,
                 json_dir,
                 user_number,
                 flag="initial"):
        self.flag = flag
        self.cluster = cluster
        self.preference_value = preference_value
        self.json_dir = json_dir
        self.cluster = cluster
        self.scheduler = Scheduler(cluster)
        self.block_list = list()
        self.job_list = list()  # list of lists. A job list for each user.
        self.event_queue = Q.PriorityQueue()
        self.timestamp = 0
        self.user_number = user_number
        self.job_durations = {}
        self.stage_durations = {}
        self.job_execution_profile = {
        }  # record the execution information of jobs
        self.time_out = 3  # added for delay scheduling
        self.threshold = 0.8
        self.threshold_step = 0.2

        # add by xiandong
        for user_index in range(0, user_number):
            self.job_execution_profile[user_index] = {}

        for user_index in range(0, user_number):
            """currently, we load the 'job info (job, stage, runtime)' for each user separately.
            which is equivalent to each user has 'exact same' job submition now!!! by xiandong
            """
            stage_profile_path = "Workloads/stage_profile.json"
            self.stage_profile = json.load(open(stage_profile_path, 'r'),
                                           object_pairs_hook=OrderedDict)

            runtime_path = "Workloads/runtime.json"
            self.runtime_profile = json.load(open(runtime_path, 'r'),
                                             object_pairs_hook=OrderedDict)

            job_path = "Workloads/job.json"
            self.job_profile = json.load(open(job_path, 'r'),
                                         object_pairs_hook=OrderedDict)
            self.generate_job_profile(user_index)
def test_start():
    start_time = utils.getFutureTime(datetime.datetime.now(), 1)
    task_start = f"""
    {{
        "time": "{start_time}",
        "verb": "start",
        "program_name": "./dummy",
        "pidfile_name": "test_1.pid"
    }}
    """
    print(task_start)

    sched1 = Scheduler(task_start, 30)
    t = threading.Thread(target=lambda sched:
                         (sched.waitUntilComplete(), sched.stopAll()),
                         args=[sched1]).start()
    sched1.run()
    assert sched1._Scheduler__isProcessRunningByName("dummy") == True
 def test_2_scheduler_vars(self):
     """Test variables of the scheduler class."""
     obj = Scheduler()
     self.assertIsNotNone(obj._name)
     self.assertIsNotNone(obj._session)
     self.assertIsNotNone(obj._qs)
     self.assertIsNotNone(obj.LOGGER)
     self.assertIsNotNone(obj.cf)
     self.assertIsNotNone(obj.sns)
Exemple #57
0
def instantiate(path2graph):

    with open(path2graph) as fh:
        graphDesc = json.load(fh)

    with open(path2net) as fh:
        networkDesc = json.load(fh)

    return Scheduler(graphDesc, networkDesc)
def check_24_hours():
    s = Scheduler(reload_freq=timedelta(seconds=12))
    asyncio.ensure_future(s.run())

    # EXAMPLE DOWNLOAD START
    rule1 = rrule(freq=SECONDLY, interval=5, dtstart=datetime.now(), count=60)
    rule2 = rrule(freq=SECONDLY, interval=9, dtstart=datetime.now(), count=60)

    e1 = Event()
    e1.recurrences = [str(rule1)]
    e1.actions = [{"name": "action for event 1"}]

    e2 = Event()
    e2.recurrences = [str(rule2)]
    e2.actions = [{"name": "action for event 2"}]
    # EXAMPLE DOWNLOAD END

    s.push_events([e1, e2])
    def __init__(self, config_dir, app_data_dir):
        self._lock = threading.RLock()
        self._active = False

        # load config # todo: move to launch?
        self._config_dir = config_dir
        config_path = os.path.join(self.config_dir, 'core.config')
        self.config = load_config(config_path, TPersonalAssistantConfig)
        self._app_data_dir = app_data_dir

        # configurable # todo: move to launch?
        plugins = [p.Name.lower() for p in self.config.Plugins if p.IsActive]
        self._plugins = {p: PluginMap[p](self) for p in plugins}

        self.scheduler = Scheduler()

        self._callback_ringbell = threading.Condition(
        )  # todo: consider moving ringbell into scheduler
Exemple #60
0
 def __init__(self): 
    ''' Defines member variables for new instances of this class. '''
    # the ref of whatever image should currently be displayed, or None
    self.__current_image_ref = None
    
    # a simple "last-in-and-ignore-everything-else" scheduler
    self.__scheduler = Scheduler()
    
    # our cache of loaded image objects. {(imageref,issuehint)->Image}
    self.__image_cache = {}
    
    # the image that gets displayed if we have nothing else to display
    self.__unknown_image = Resources.createComicVineLogo()
    
    # the image that gets displayed while we are loading another image
    self.__loading_image = self.__copy_transparent(self.__unknown_image)
    
    self._initialize()