Exemple #1
0
    def daemonize(self):
        if DEBUG:
            print "TASK %s IS NOW BEING DAEMONIZED. LOG FOUND AT %s" % (self.task_path, self.log_file)

        startDaemon(self.log_file, self.pid_file)
        self.daemonized = True
        self.save()
    def startAnnexObserver(self):
        print "STARTING OBSERVER on %s" % ANNEX_DIR

        startDaemon(self.watcher_log_file, self.watcher_pid_file)
        self.annex_observer.schedule(self, ANNEX_DIR, recursive=True)
        self.annex_observer.start()

        while True:
            sleep(1)
        '''
	def startAnnexObserver(self):
		print "STARTING OBSERVER on %s" % ANNEX_DIR

		startDaemon(self.watcher_log_file, self.watcher_pid_file)
		self.annex_observer.schedule(self, ANNEX_DIR, recursive=True)
		self.annex_observer.start()
		
		while True: 
			sleep(1)

		'''
	def create(self):
		ClientXMPP.__init__(self, self.cred['jid'], self.cred['pwd'])

		for plugin in ['xep_0030', 'xep_0199', 'xep_0060']:
			self.register_plugin(plugin)

		self.add_event_handler("session_start", self.__on_session_start)
		self.add_event_handler("message", self.__on_message)

		startDaemon(self.log_file, self.pid_file)
		print "attempting connections..."
		connect = self.connect()
		print "connected?"
		print connect

		if connect:
			self.process(block=True)
	def startElasticsearch(self, catch=True):
		cmd = [ELS_ROOT, '-Des.max-open-files=true', 
			'-Des.config=%s' % os.path.join(CONF_ROOT, "els.settings.yaml")]
		
		print "elasticsearch running in daemon."
		print cmd
		
		p = Popen(cmd, stdout=PIPE, close_fds=True)
		data = p.stdout.readline()
	
		while data:
			print data
			if re.match(r'.*started$', data):
				print "STARTED: %s" % data
				with open(self.els_status_file, 'wb+') as f: f.write("True")
				sleep(1)
				
				if self.first_use: self.initElasticsearch()
				break
		
			data = p.stdout.readline()
		p.stdout.close()

		#if self.first_use:
		startDaemon(self.els_log_file, self.els_pid_file)
		self.startCronJobs()

		try:
			with open(os.path.join(CONF_ROOT, "initial_tasks.json"), 'rb') as IT:
				from lib.Worker.Models.uv_task import UnveillanceTask
				for i_task in json.loads(IT.read()):
					task = UnveillanceTask(inflate=i_task)

					try:
						task.run()
					except Exception as e:
						if DEBUG:
							print "TASK ERROR: %s" % e

		except Exception as e:
			if DEBUG:
				print "No initial tasks...\n%s" % e
			
		if catch:
			while True: sleep(1)
	def startRESTAPI(self):
		if DEBUG: print "Starting REST API on port %d" % API_PORT
				
		rr_group = r"/(?:(?!%s))([a-zA-Z0-9_/]*/$)?" % "|".join(self.reserved_routes)		
		self.routes.append((re.compile(rr_group).pattern, self.RouteHandler))

		s = requests.session()
		self.uv_cache = CacheControl(s)

		tornado.web.Application.__init__(self, self.routes,
			**{ 'cookie_secret' : UV_COOKIE_SECRET, 'xsrf_cookies' : True})
		
		startDaemon(self.api_log_file, self.api_pid_file)
		
		server = tornado.httpserver.HTTPServer(self)
		server.bind(API_PORT)
		server.start(NUM_PROCESSES)

		tornado.ioloop.IOLoop.instance().start()
Exemple #7
0
	def startServer(self):
		from fabric.api import local, settings
		
		this_dir = os.getcwd()
		cmd = "java -mx1000m -cp stanford-ner.jar edu.stanford.nlp.ie.NERServer -loadClassifier classifiers/english.all.3class.distsim.crf.ser.gz -port %d -outputFormat inlineXML" % getConfig('nlp_server.port')
		
		if DEBUG: 
			print "STARTING NLP SERVER:"
			print cmd
		
		os.chdir(getConfig('nlp_ner_base'))
		with settings(warn_only=True):
			local("kill $(lsof -t -i:%d)" % getConfig('nlp_server.port'))
			start_cmd = local(cmd)
		
		print start_cmd
		
		startDaemon(self.log_file, self.pid_file)
		while True: sleep(1)
	def startRESTAPI(self):
		if DEBUG: print "Starting REST API on port %d" % API_PORT
		
		rr_group = r"/(?:(?!%s))([a-zA-Z0-9_/]*/$)?" % "|".join(self.reserved_routes)
		self.routes.append((re.compile(rr_group).pattern, self.RouteHandler))
		tornado.web.Application.__init__(self, self.routes)
		
		server = tornado.httpserver.HTTPServer(self)
		try:
			server.bind(API_PORT)
		except Exception as e:
			if DEBUG: print "** FAILED TO START UP ON PORT %d\n%s" % (API_PORT, e)
			from fabric.api import settings, local
			from fabric.context_managers import hide
			
			with settings(warn_only=True):
				local("kill $(lsof -t -i:%d)" % API_PORT)

			server.bind(API_PORT)
			
		startDaemon(self.api_log_file, self.api_pid_file)
		server.start(NUM_PROCESSES)
		tornado.ioloop.IOLoop.instance().start()
Exemple #9
0
    def startRESTAPI(self):
        if DEBUG: print "Starting REST API on port %d" % API_PORT

        rr_group = r"/(?:(?!%s))([a-zA-Z0-9_/]*/$)?" % "|".join(
            self.reserved_routes)
        self.routes.append((re.compile(rr_group).pattern, self.RouteHandler))

        s = requests.session()
        self.uv_cache = CacheControl(s)

        tornado.web.Application.__init__(
            self, self.routes, **{
                'cookie_secret': UV_COOKIE_SECRET,
                'xsrf_cookies': True
            })

        startDaemon(self.api_log_file, self.api_pid_file)

        server = tornado.httpserver.HTTPServer(self)
        server.bind(API_PORT)
        server.start(NUM_PROCESSES)

        tornado.ioloop.IOLoop.instance().start()
Exemple #10
0
	def startWorker(self):
		printAsLog("STARTING CELERY WORKER!")

		from lib.Worker.vars import TASKS_ROOT, buildCeleryTaskList, ALL_WORKERS
		self.celery_tasks = buildCeleryTaskList()
		
		sys.argv.extend(['worker', '-l', 'info', '-Q', ",".join([ALL_WORKERS, UUID])])
		
		self.celery_app = Celery(TASKS_ROOT,
			broker='amqp://guest@localhost//', include=self.celery_tasks)
		
		startDaemon(self.worker_log_file, self.worker_pid_file)
		logging.getLogger().setLevel(logging.DEBUG)

		self.task_channel = sockjs.tornado.SockJSRouter(TaskChannel, '/annex_channel')
		tc = tornado.web.Application(
			[(r'/info', TaskChannel.InfoHandler)] + self.task_channel.urls)
		tc.listen(TASK_CHANNEL_PORT, no_keep_alive=True)

		if DEBUG:
			print "TaskChannel started on port %d" % TASK_CHANNEL_PORT		
		
		tornado.ioloop.IOLoop.instance().start()