Example #1
0
    def __init__(self, func, domain, trials):
        """
            domain: a list or function from which you can define the input
            function: a function that will be repeatedly run on a random value in domain
            trails: number of trials that can be run
        """
        self.func = func
        self.domain = domain
        self.trials = trials

        self.data = []

        if type(self.domain) != list or type(self.trials) != int: 
            raise TypeError

        self.q = Queue(maxsize=0)
        # generally recommended to have a max of 4 threads per core (according to by boss)
        num_threads = psutil.cpu_count() * 4

        for i in xrange(num_threads):
            worker = Thread(target = self.do_trial)
            worker.setDaemon(True)
            worker.start()
            sys.stdout.flush()
            sys.stdout.write("\rStarted %d threads... " % (i + 1))
        print "all threads started!"
Example #2
0
 def __init__(self, req, proxy, logger, task, exit_check=None, ignored_errors=[]):
     Thread.__init__(self, name = "monitor%s" % task.guid)
     Thread.setDaemon(self, True)
     # the count of votes per error code
     self.vote_result = {}
     # the error code to be ignored
     self.vote_cleared = set().union(ignored_errors)
     self.thread_last_seen = {}
     self.dctlock = RLock()
     self.votelock = RLock()
     self.thread_ref = {}
     self.thread_zombie = set()
     # HttpReq instance
     self.req = req
     # proxy.Pool instance
     self.proxy = proxy
     self.logger = logger
     self.task = task
     self._exit = exit_check if exit_check else lambda x: False
     self._cleaning_up = False
     if os.name == "nt":
         self.set_title = lambda s:os.system("TITLE %s" % (
             s if PY3K else s.encode(CODEPAGE, 'replace')))
     elif os.name == 'posix':
         import sys
         self.set_title = lambda s:sys.stdout.write("\033]2;%s\007" % (
             s if PY3K else s.encode(CODEPAGE, 'replace')))
 def listen_threaded(cls, port, callback):
     """ Starts a new thread listening to the given port """
     thread = Thread(target=cls.__listen_forever, args=(port, callback),
                     name="NC-ListenForever")
     thread.setDaemon(True)
     thread.start()
     return thread
Example #4
0
def main():

    # Configure the example
    opt_parser = optparse.OptionParser()
    opt_parser.add_option("-s", "--source", dest="source", default="stdin", help="Where to read the events from. \
        Can be 'local' (simulates an embedded driver), \
        'http' (listening to XML events submitted by HTTP POST) or \
        'stdin' (reading events on the standard output, separated by an empty line). Defaults to 'stdin'")
    opt_parser.add_option("-p", "--port", dest="port", default="8080",
        help="If source is HTTP, listen on this port for HTTP POST events. Defaults to 8080")
    (options, args) = opt_parser.parse_args()

    # Start the logger thread
    logger_thread = Thread(target=logger_loop)
    logger_thread.setDaemon(True)
    logger_thread.start()

    # Start the chosen listener
    if options.source == 'http':
        listen_http(int(options.port))

    if options.source == 'stdin':
        listen_stdin()

    if options.source == 'local':
        listen_local()

    # Wait
    logger_thread.join()
Example #5
0
    def __init__(self, tname, task_queue, flt, suc, fail, headers={}, proxy=None, proxy_policy=None,
            retry=3, timeout=10, logger=None, keep_alive=None, stream_mode=False):
        """
        Construct a new 'HttpWorker' obkect

        :param tname: The name of this http worker
        :param task_queue: The task Queue instance
        :param flt: the filter function
        :param suc: the function to call when succeeded
        :param fail: the function to call when failed
        :param headers: custom HTTP headers
        :param proxy: proxy dict
        :param proxy_policy: a function to determine whether proxy should be used
        :param retry: retry count
        :param timeout: timeout in seconds
        :param logger: the Logger instance
        :param keep_alive: the callback to send keep alive
        :param stream_mode: set the request to use stream mode, keep_alive will be called every iteration
        :return: returns nothing
        """
        HttpReq.__init__(self, headers, proxy, proxy_policy, retry, timeout, logger, tname = tname)
        Thread.__init__(self, name = tname)
        Thread.setDaemon(self, True)
        self.task_queue = task_queue
        self.logger = logger
        self._keepalive = keep_alive
        self._exit = lambda x: False
        self.flt = flt
        self.f_suc = suc
        self.f_fail = fail
        self.stream_mode = stream_mode
        # if we don't checkin in this zombie_threshold time, monitor will regard us as zombie
        self.zombie_threshold = timeout * (retry + 1) 
        self.run_once = False
Example #6
0
def Watch( Module, Callback, * Args, ** Kwds ):
  if Module.__file__ in WatchList:
    return

  T = Thread(target = WatchThread, args=(Module,Callback,Args,Kwds))
  T.setDaemon(True)
  T.start()
 def send_async(cls, port, message):
     """ Starts a new thread which sends a given message to a port """
     thread = Thread(target=cls.__send_message_async, args=(port, message),
                     name="NC-SendAsync")
     thread.setDaemon(True)
     thread.start()
     return thread
Example #8
0
    def _move(self, pos, velocity, acceleration, deceleration, relative, block, units):
        if velocity is None:
            velocity = self.initial_velocity

        if acceleration is None:
            acceleration = self.acceleration
        if deceleration is None:
            deceleration = self.deceleration

        pos = self._get_steps(pos, units)
        self.debug('converted steps={}'.format(pos))

        def func():
            self.set_initial_velocity(velocity)
            self.set_acceleration(acceleration)
            self.set_deceleration(deceleration)

            cmd = 'MR' if relative else 'MA'
            self.tell('{} {}'.format(cmd, pos))
            self._block()

        if block:
            func()
            return True
        else:
            t = Thread(target=func)
            t.setDaemon(True)
            t.start()
            return True
Example #9
0
class TestStatsdLoggingDelegation(unittest.TestCase):
    def setUp(self):
        self.port = 9177
        self.sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
        self.sock.bind(('localhost', self.port))
        self.queue = Queue()
        self.reader_thread = Thread(target=self.statsd_reader)
        self.reader_thread.setDaemon(1)
        self.reader_thread.start()

    def tearDown(self):
        # The "no-op when disabled" test doesn't set up a real logger, so
        # create one here so we can tell the reader thread to stop.
        if not getattr(self, 'logger', None):
            self.logger = utils.get_logger({
                'log_statsd_host': 'localhost',
                'log_statsd_port': str(self.port),
            }, 'some-name')
        self.logger.increment('STOP')
        self.reader_thread.join(timeout=4)
        self.sock.close()
        del self.logger
        time.sleep(0.15)  # avoid occasional "Address already in use"?

    def statsd_reader(self):
        while True:
            try:
                payload = self.sock.recv(4096)
                if payload and 'STOP' in payload:
                    return 42
                self.queue.put(payload)
            except Exception, e:
                sys.stderr.write('statsd_reader thread: %r' % (e,))
                break
def update_servers():
    server_path = os.path.join(config.get_runtime_path(), "servers", '*.xml')

    server_files = sorted(glob.glob(server_path))

    # ----------------------------
    import xbmc
    import xbmcgui
    progress = xbmcgui.DialogProgressBG()
    progress.create("Update servers list")
    # ----------------------------

    for index, server in enumerate(server_files):
        # ----------------------------
        percentage = index * 100 / len(server_files)
        # ----------------------------
        server_name = os.path.basename(server)[:-4]
        t = Thread(target=updater.update_server, args=[server_name])
        t.setDaemon(True)
        t.start()
        # ----------------------------
        progress.update(percentage, ' Update server: ' + server_name)
        # ----------------------------
        while threading.active_count() >= MAX_THREADS:
            xbmc.sleep(500)

    # ----------------------------
    progress.close()
Example #11
0
File: digit.py Project: jtonk/Digit
def do_main_program():
	
	do_scheduler()

	# read config and init sensors
	
	global sensors
	sensors = config.readConfig()
	
	logger.debug(sensors.keys())

	
	
	
	threadHTTP = Thread(target=inetServer.threadHTTP)
	threadHTTP.setDaemon(True)
	threadHTTP.start()

	
	while 1:
		try:
			time.sleep(0.1)
		except KeyboardInterrupt:
			print >> sys.stderr, '\nExiting by user request.\n'
			sys.exit(0)
Example #12
0
def main():
    global client
    global loop
    client = WebsocketClient()
    device_index = config.get('speech_client').get('device_index')
    if device_index:
        device_index = int(device_index)
    loop = RecognizerLoop(device_index=device_index)
    loop.on('recognizer_loop:utterance', handle_utterance)
    loop.on('recognizer_loop:record_begin', handle_record_begin)
    loop.on('recognizer_loop:wakeword', handle_wakeword)
    loop.on('recognizer_loop:record_end', handle_record_end)
    loop.on('speak', handle_speak)
    client.on('speak', handle_speak)
    client.on(
        'multi_utterance_intent_failure',
        handle_multi_utterance_intent_failure)
    client.on('recognizer_loop:sleep', handle_sleep)
    client.on('recognizer_loop:wake_up', handle_wake_up)
    client.on('mycroft.stop', handle_stop)
    event_thread = Thread(target=connect)
    event_thread.setDaemon(True)
    event_thread.start()

    try:
        subprocess.call('echo "eyes.reset" >/dev/ttyAMA0', shell=True)
    except:
        pass

    try:
        loop.run()
    except KeyboardInterrupt, e:
        logger.exception(e)
        event_thread.exit()
        sys.exit()
Example #13
0
    def run(self):
        '''
        Does the job
        '''
        self.parser.add_option("-l", "--list", default=False, action="store_true", 
            help = "If present, list hosts configured in site.xml")
        self.parser.add_option("-a", "--artm", default=False, action="store_true", 
            help = "If present, include lo-art-1 to cycle/off")
        self.parser.add_option("-c", "--cob", default=False, action="store_true", 
            help = "If present, reboot only cob-* machines. cob-dmc is not rebooted because it belong to CONTROL subsystem.")
        self.parser.add_option("-o", "--off", default=False, action="store_true", 
            help = "If present, turn off the machines instead cycle them.")
        self.parser.add_option("-t", "--timeout", default=150, 
            help = "Set timeout to wait the recovered hosts. Default is 150 secs")
        self.parse()
        self.parse_args()
        self.get_hosts()
        if self.list is False:
            lastpdu = 'none'
            for host in self.hosts:
                currentpdu = str(self.get_pdu(host)[0])
                if currentpdu != lastpdu:
                    lastpdu = currentpdu
                    self.pstrip_cmd(self.get_pdu(host))
                    time.sleep(1)
                else:
                    time.sleep(2)
                    lastpdu = currentpdu
                    self.pstrip_cmd(self.get_pdu(host))
            if self.verbose:
                print self._get_time()+" Waiting for hosts ..."
            if self.off is False:
                queue = Queue()
                for host in self.hosts:
                    queue.put(host)
                    self.remaining_hosts.append(host)
                for host in self.hosts:
                    rh =  Thread(target=self.recover_host,args=(host, self.timeout,queue))
                    rh.setDaemon(True)
                    rh.start()
                queue.all_tasks_done.acquire()
                try:
                    endtime = time.time() + self.timeout
                    while queue.unfinished_tasks:
                        remaining = endtime -  time.time()
                        if remaining <= 0.0:
                            raise timeOut('Time Out Raise!!!')
                        queue.all_tasks_done.wait(remaining)
                except timeOut:
                    print "%s Probably %d hosts are still rebooting, please check ..." % (self._get_time(), int(queue.unfinished_tasks))
                    print "%s Please check these hosts:" % self._get_time()
                    for h in self.remaining_hosts:
                        print "%s ---> \033[31m%s\033[0m" % (self._get_time(), h)
                finally:
                    queue.all_tasks_done.release()

        else:
            print "Hosts configured in site.xml"
            for host in self.hosts:
                print host
Example #14
0
    def __init__( self, date, warcs, viral, logs, identifiers ):
        self.warcs = []
        self.viral = []
        self.date = date
        self.wq = Queue()
        self.vq = Queue()

        for i in range(NUM_THREADS):
            worker = Thread(target=create_warcs, args=(self.wq, self.warcs))
            worker.setDaemon(True)
            worker.start()

        for warc in warcs:
            self.wq.put(warc)
        self.wq.join()

        for i in range(NUM_THREADS):
            worker = Thread(target=create_warcs, args=(self.vq, self.viral))
            worker.setDaemon(True)
            worker.start()

        for warc in viral:
            self.vq.put(warc)
        self.vq.join()

        self.logs = []
        for log in logs:
            self.logs.append( ZipContainer( path=log ) )
        self.identifiers = identifiers
        self.createDomainMets()
        self.createCrawlerMets()
Example #15
0
 def main(self):
     global QUEUE
     QUEUE = TaskQueue(self.config)
     
     indexers = self.buildIndexers()
            
     for indexer in indexers: 
         QUEUE.put(indexer)
     
     #start stat printing
     if self.statManager != None:
         timer = Timer()
         timer.scheduleAtFixedRate(StatLoggerTask(self.statManager, indexers), 0, self.config.statLoggingFrequency*60*1000)
     
     #start worker threads        
     workers = []
     for i in range(self.config.numThreads):
         t = Thread (target=ISWorker(self.config, self.statManager))
         workers.append(t)
         t.setDaemon(1)
         t.start()
             
     for t in workers:
         t.join()
         
     log('Done!')
    def __init__(self, url, dirname, urlfile):
        """
        Creates a random secret, instantiates a ListableNode with that secret,
        starts a Thread with the ListableNode's _start method (making sure the
        Thread is a daemon so it will quit when the application quits),
        reads all the URLs from the URL file and introduces the Node to
        them. Finally, sets up the GUI.
        """
        # Give the server a head start:
        try:
            sleep(HEAD_START)
            self.server = ServerProxy(url)
            super(Client, self).__init__()
            self.secret = randomString(SECRET_LENGTH)
            n = ListableNode(url, dirname, self.secret)
            t = Thread(target=n._start)
            t.setDaemon(1)
            t.start()
            for line in open(urlfile):
                line = line.strip()
                self.server.hello(line)

        except:
            print "init fail"
            exit(1)
Example #17
0
class Worker:
    def __init__(self):
        self.q = Queue()
        self.t = Thread(target=self._handle)
        self.t.setDaemon(True)
        self.t.start()

    def _handle(self):
        while True:
            reset_caches()

            fn = self.q.get()
            try:
                fn()
                self.q.task_done()
            except:
                import traceback
                print traceback.format_exc()

    def do(self, fn, *a, **kw):
        fn1 = lambda: fn(*a, **kw)
        self.q.put(fn1)

    def join(self):
        self.q.join()
Example #18
0
 def get_historical_rates(self, ccy):
     result = self.history.get(ccy)
     if not result and ccy in self.history_ccys():
         t = Thread(target=self.get_historical_rates_safe, args=(ccy,))
         t.setDaemon(True)
         t.start()
     return result
            def thread_do(env, q, envid, election_status, must_be_master) :
                while True :
                    v=q.get()
                    if v is None : return
                    r = env.rep_process_message(v[0],v[1],envid)
                    if must_be_master and self.confirmed_master :
                        self.dbenvMaster.rep_start(flags = db.DB_REP_MASTER)
                        must_be_master = False

                    if r[0] == db.DB_REP_HOLDELECTION :
                        def elect() :
                            while True :
                                try :
                                    env.rep_elect(2, 1)
                                    election_status[0] = False
                                    break
                                except db.DBRepUnavailError :
                                    pass
                        if not election_status[0] and not self.confirmed_master :
                            from threading import Thread
                            election_status[0] = True
                            t=Thread(target=elect)
                            import sys
                            if sys.version_info[0] < 3 :
                                t.setDaemon(True)
                            else :
                                t.daemon = True
                            t.start()
class MultiHTTPServer(ThreadingMixIn,VideoHTTPServer):
    """ MuliThreaded HTTP Server """

    __single = None
    
    def __init__(self,port):
        if MultiHTTPServer.__single:
            raise RuntimeError, "MultiHTTPServer is Singleton"
        MultiHTTPServer.__single = self 

        self.port = port
        BaseHTTPServer.HTTPServer.__init__( self, ("127.0.0.1",self.port), SimpleServer )
        self.daemon_threads = True
        self.allow_reuse_address = True
        #self.request_queue_size = 10

        self.lock = RLock()        
        
        self.urlpath2streaminfo = {} # Maps URL to streaminfo
        self.mappers = [] # List of PathMappers
        
        self.errorcallback = None
        self.statuscallback = None

    def background_serve( self ):
        name = "MultiHTTPServerThread-1"
        self.thread2 = Thread(target=self.serve_forever,name=name)
        self.thread2.setDaemon(True)
        self.thread2.start()
Example #21
0
    def startSimulation(self):
        simulation_thread = Thread(name="ert_gui_simulation_thread")
        simulation_thread.setDaemon(True)
        simulation_thread.run = self.__run_model.startSimulations
        simulation_thread.start()

        self.__update_timer.start()
Example #22
0
    def ssh_submit(nworker, nserver, pass_envs):
        """
        customized submit script
        """
        # thread func to run the job
        def run(prog):
            subprocess.check_call(prog, shell=True)

        # sync programs if necessary
        local_dir = os.getcwd() + "/"
        working_dir = local_dir
        if args.sync_dst_dir is not None and args.sync_dst_dir != "None":
            working_dir = args.sync_dst_dir
            for h in hosts:
                sync_dir(local_dir, h, working_dir)

        # launch jobs
        for i in range(nworker + nserver):
            pass_envs["DMLC_ROLE"] = "server" if i < nserver else "worker"
            (node, port) = hosts[i % len(hosts)]
            prog = get_env(pass_envs) + " cd " + working_dir + "; " + (" ".join(args.command))
            prog = "ssh -o StrictHostKeyChecking=no " + node + " -p " + port + " '" + prog + "'"
            thread = Thread(target=run, args=(prog,))
            thread.setDaemon(True)
            thread.start()

        return ssh_submit
Example #23
0
def setWorkers():
    for i in range(num_threads):
        worker = Thread(target=threadFunc, args=(i,queue))
        # needs to be daemon threads, otherwise main program will not exit
        # after queue.join. 
        worker.setDaemon(True)
        worker.start()
Example #24
0
def put_fb_page(fb_user_obj, brand_name, page_obj):
    url = "%s" % (page_obj["id"])
    page_data = GraphAPI(page_obj["access_token"]).request(url)

    fb_page = FBPage()
    fb_page.page_id = page_data["id"]
    fb_page.name = page_data["name"]
    page_data.pop("id", None)
    page_data.pop("name", None)
    fb_page.fields = page_data

    def save_obj(page):
        return FBPage.collection().save(page.serialize())

    dupe_obj = FBPage.collection().find_one({"page_id": page_obj["id"]})
    if dupe_obj is None:
        fb_page._id = save_obj(fb_page)
    else:
        FBPage.collection().update({"page_id": page_obj["id"]}, fb_page.serialize())
        fb_page = FBPage.unserialize(FBPage.collection().find_one({"page_id": page_obj["id"]}))

    update_brand_mapping(fb_user_obj.u_id, brand_name, "facebook", page_obj["id"], page_obj["access_token"])

    t = Thread(target=load_fb_page_to_db, args=(page_obj["id"], page_obj["access_token"]))
    t.setDaemon(False)
    t.start()

    return fb_page
Example #25
0
 def __init__(self):
     self.for_upload = []
     self.url_stats = {}
     self.tempdir = 'tmp'
     self.current_date = datetime.datetime.today().strftime("%Y-%m-%d")
     self.create_temp_dir()
     self.get_image_data()
     for chunk in self.chunks(glob.glob1(self.tempdir, "*.jpg"), 50):
         worker = Thread(target=self.create_thumbnail, args=(chunk,))
         worker.setDaemon(True)
         worker.start()
     while (activeCount() > 1):
         time.sleep(5)
     s3key = 'AKIAIYZERMTB6Z5NPF5Q'
     s3secret = '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'
     bucket_name = "hrachya-test"
     self.s3_conn = boto.connect_s3(s3key, s3secret)
     self.bucket_obj = self.s3_conn.get_bucket(bucket_name)
     for chunk in self.chunks(glob.glob1(self.tempdir, "*.jpg"), 100):
         worker = Thread(target=self.aws_s3_uploader, args=(chunk,))
         worker.setDaemon(True)
         worker.start()
     while (activeCount() > 1):
         time.sleep(5)
     #self.aws_s3_uploader()
     self.update_record()
     self.cleaner()
Example #26
0
def start_model_pulling_in_worker(model_server: Optional[EndpointConfig],
                                  wait_time_between_pulls: int,
                                  project: 'Project') -> None:
    worker = Thread(target=_run_model_pulling_worker,
                    args=(model_server, wait_time_between_pulls, project))
    worker.setDaemon(True)
    worker.start()
Example #27
0
def main():
    global ws
    global loop
    ws = WebsocketClient()
    tts.init(ws)
    ConfigurationManager.init(ws)
    loop = RecognizerLoop()
    loop.on('recognizer_loop:utterance', handle_utterance)
    loop.on('recognizer_loop:record_begin', handle_record_begin)
    loop.on('recognizer_loop:wakeword', handle_wakeword)
    loop.on('recognizer_loop:record_end', handle_record_end)
    loop.on('speak', handle_speak)
    ws.on('open', handle_open)
    ws.on('speak', handle_speak)
    ws.on(
        'multi_utterance_intent_failure',
        handle_multi_utterance_intent_failure)
    ws.on('recognizer_loop:sleep', handle_sleep)
    ws.on('recognizer_loop:wake_up', handle_wake_up)
    ws.on('mycroft.stop', handle_stop)
    ws.on("mycroft.paired", handle_paired)
    event_thread = Thread(target=connect)
    event_thread.setDaemon(True)
    event_thread.start()

    try:
        loop.run()
    except KeyboardInterrupt, e:
        logger.exception(e)
        event_thread.exit()
        sys.exit()
Example #28
0
 def fetchDatas(self, flow, step=0, *args, **kwargs):
     try:
         start = time.time()
         self.fire(flow, step, *args, **kwargs)
         if self.timeout > -1:
             def check(self, timeout):
                 time.sleep(timeout)
                 self.exit()
                 print 'Time out of %s. ' % str(self.timeout)
             watcher = Thread(
                 target=check, args=(self, self.timeout - (time.time() - start)))
             watcher.setDaemon(True)
             watcher.start()
         self.waitComplete()
         it = self.tinder(flow)
         while True:
             if hasattr(it, 'store'):
                 try:
                     it.store(None, forcexe=True)
                 except:
                     t, v, b = sys.exc_info()
                     err_messages = traceback.format_exception(t, v, b)
                     print(': %s, %s \n' % (str(args), str(kwargs)),
                           ','.join(err_messages), '\n')
             if hasattr(it, 'next'):
                 it = it.next
             else:
                 break
         self.dones.add(flow)
         end = time.time()
         self.totaltime = end - start
         return True
     except:
         return False
Example #29
0
def main():
    global best
    parser = argparse.ArgumentParser(description="ping multiple hosts")
    parser.add_argument("-f", "--file", help="a file contain host list, one line one host")
    parser.add_argument("hosts", nargs="*", metavar="HOST", help="host to ping")
    args = parser.parse_args()

    if args.file is not None:
        try:
            for line in open(args.file).readlines():
                line = line.strip()
                if line == "" or line[0] == "#":
                    continue
                args.hosts.append(line)
        except IOError:
            pass

    for ip in args.hosts:
        queue.put(ip.strip())

    for i in range(num_threads):
        worker = Thread(target=ping)
        worker.setDaemon(True)
        worker.start()

    queue.join()
    print "The best host is \033[92m %s : %s" % (best['host'], best['speed'])
Example #30
0
class Network(object):
    def __init__(self, config,log):
        self.log = log
        
        xmlport = config.getValue("xmlport")
        xmlhost = config.getValue("xmlhost")
        udpport = config.getValue("udpport")
        udphost = config.getValue("udphost")

        self.xml = SimpleXMLRPCServer((xmlhost, xmlport))
        self.udp = netServer(udphost,udpport,log)

        log.StartupMessage("* Attempting to start XML-RPC Server")
        self.udp.serve_forever()
        self.xmlThread = Thread( target = self.startXMLRPCServer )
        self.xmlThread.setDaemon( True )
        self.xmlThread.start()
        log.StartupMessage( "    XML-RPC Server is up at port %d" % xmlport)
        
    def register_function(self,func,funcname):
        self.xml.register_function(func,funcname)
        self.udp.register_function(func,funcname)
        self.log.Notice("Registered funtion %s for network access" % funcname)

    def stopServices(self):
        self.udp.stopServer()
        self.udp.join()
        
        self.log.StartupMessage("* Attempting to stop XML-RPC Server")
        self.xml.server_close()
        #self.xmlThread.join()

    def startXMLRPCServer(self):
        self.xml.serve_forever()
Example #31
0
def start():
    t = Thread(target=job)
    t.setDaemon(True)
    t.start()
    print instace_id


def worker():
    # this worker function assign every instance to each worker thread
    while True:
        Instance_id = q.get()
        backup_task(Instance_id)
        q.task_done()


reservations = ec.describe_instances().get('Reservations', [])

instances = sum(
    [
        [q.put(i['InstanceId'])
         for i in r['Instances']]  #adding instace id to queue
        for r in reservations
    ],
    [])

print "Found %d instances that need backing up" % len(instances)

#creating thread for each instance id
for i in range(len(instances)):
    t = Thread(target=worker)
    t.setDaemon(True)
    t.start()

q.join()  #wait for all threads to complete
Example #33
0
class CobraDaemon(ThreadingTCPServer):

    def __init__(self, host="", port=COBRA_PORT, sslcrt=None, sslkey=None, sslca=None, msgpack=False, json=False):
        '''
        Construct a cobra daemon object.

        Parameters:
        host        - Optional hostname/ip to bind the service to (default: inaddr_any)
        port        - The port to bind (Default: COBRA_PORT)
        msgpack     - Use msgpack serialization

        # SSL Options
        sslcrt / sslkey     - Specify sslcrt and sslkey to enable SSL server side
        sslca               - Specify an SSL CA key to use validating client certs

        '''
        self.thr = None
        self.run = True
        self.shared = {}
        self.dowith = {}
        self.host = host
        self.port = port
        self.reflock = RLock()
        self.refcnts = {}
        self.authmod = None
        self.sflags = 0

        if msgpack and json:
            raise Exception('CobraDaemon can not use both msgpack *and* json!')

        if msgpack:
            requireMsgpack()
            self.sflags |= SFLAG_MSGPACK

        if json:
            self.sflags |= SFLAG_JSON

        # SSL Options
        self.sslca = sslca
        self.sslcrt = sslcrt
        self.sslkey = sslkey

        self.cansetattr = True
        self.cangetattr = True

        if sslcrt and not os.path.isfile(sslcrt):
            raise Exception('CobraDaemon: sslcrt param must be a file!')

        if sslkey and not os.path.isfile(sslkey):
            raise Exception('CobraDaemon: sslkey param must be a file!')

        if sslca and not os.path.isfile(sslca):
            raise Exception('CobraDaemon: sslca param must be a file!')

        self.allow_reuse_address = True
        ThreadingTCPServer.__init__(self, (host, port), CobraRequestHandler)

        if port == 0:
            self.port = self.socket.getsockname()[1]

        self.daemon_threads = True
        self.recvtimeout = None

    def logCallerError(self, oname, args, msg=""):
        pass

    def setGetAttrEnabled(self, status):
        self.cangetattr = status

    def setSetAttrEnabled(self, status):
        self.cansetattr = status

    def setSslCa(self, crtfile):
        '''
        Set the SSL Certificate Authority by this server.
        ( to validate client certs )
        '''
        self.sslca = crtfile

    def setSslServerCert(self, crtfile, keyfile):
        '''
        Set the cert/key used by this server to negotiate SSL.
        '''
        self.sslcrt = crtfile
        self.sslkey = keyfile

    def fireThread(self):
        self.thr = Thread(target=self.serve_forever)
        self.thr.setDaemon(True)
        self.thr.start()

    def stopServer(self):
        self.run = False
        self.server_close()
        self.thr.join()

    def serve_forever(self):
        try:

            ThreadingTCPServer.serve_forever(self)

        except Exception as e:
            if not self.run:
                return

            raise

    def setAuthModule(self, authmod):
        '''
        Enable an authentication module for this server
        ( all connections *must* be authenticated through the authmod )

        NOTE: See cobra.auth.* for various auth module implementations

        Example:
            import cobra.auth.shadow as c_a_shadow
            authmod = c_a_shadow.ShadowFileAuth('passwdfile.txt')
            cdaemon = CobraDaemon()
            cdaemon.setAuthModule()
        '''
        self.authmod = authmod

    def getSharedObject(self, name):
        return self.shared.get(name, None)

    def getSharedObjects(self):
        '''
        Return a list of (name, obj) for the currently shared objects.

        Example:
            for name,obj in daemon.getSharedObjects():
                print('%s: %r' % (name,obj))
        '''
        return self.shared.items()

    def getSharedName(self, obj):
        '''
        If this object is shared already, get the name...
        '''
        for name, sobj in self.shared.items():
            if sobj == obj:
                return name
        return None

    def getRandomName(self):
        ret = ""
        for byte in os.urandom(16):
            ret += "%.2x" % ord(byte)
        return ret

    def shareObject(self, obj, name=None, doref=False, dowith=False):
        """
        Share an object in this cobra server.  By specifying
        doref=True you will let CobraProxy objects decide that
        the object is done and should be un-shared.  Also, if
        name is None a random name is chosen.  Use dowith=True
        to cause sharing/unsharing to enter/exit (requires doref=True).

        Returns: name (or the newly generated random one)

        """
        refcnt = None
        if dowith and not doref:
            raise Exception('dowith *requires* doref!')

        if doref:
            refcnt = 0

        if dowith:
            obj.__enter__()

        if name is None:
            name = self.getRandomName()

        self.shared[name] = obj
        self.dowith[name] = dowith
        self.refcnts[name] = refcnt
        return name

    def getObjectRefCount(self, name):
        return self.refcnts.get(name)

    def decrefObject(self, name, ok=True):
        """
        Decref this object and if it reaches 0, unshare it.
        """
        logger.debug('Decrementing: %s', name)
        self.reflock.acquire()
        try:

            refcnt = self.refcnts.get(name, None)
            if refcnt is not None:
                refcnt -= 1
                self.refcnts[name] = refcnt
                if refcnt == 0:
                    self.unshareObject(name,ok=ok)

        finally:
            self.reflock.release()

    def increfObject(self, name):
        logger.debug('Incrementing: %s', name)
        self.reflock.acquire()
        try:
            refcnt = self.refcnts.get(name, None)
            if refcnt is not None:
                refcnt += 1
                self.refcnts[name] = refcnt
        finally:
            self.reflock.release()

    def unshareObject(self, name, ok=True):
        logger.debug('Unsharing %s', name)
        self.refcnts.pop(name, None)
        obj = self.shared.pop(name, None)

        # If we are using a with block, notify it
        if self.dowith.pop(name, False):
            args = (None,None,None)
            if not ok:
                args = (Exception, Exception('with boom'), None)
            obj.__exit__(*args)
        return obj
Example #34
0
def run_test(i):
    i = i.strip()
    street = i.split(',')[0]
    city = i.split(',')[1]
    zip = i.split(',')[2]
    emm_stuff = i.split(',')[3:]
    test(street, city, zip, emm_stuff)


def do_stuff(q):
    while True:
        run_test(q.get())
        q.task_done()


q = queue.Queue(maxsize=0)
num_threads = 5

for i in range(num_threads):
    worker = Thread(target=do_stuff, args=(q, ))
    worker.setDaemon(True)
    worker.start()

houses = open('addresses', 'r')

for x in houses.readlines():
    q.put(x)

q.join()
Example #35
0
def display_start():
    t = Thread(target=run)
    t.setDaemon(True)
    t.start()
Example #36
0
class WebTrader(object):
    global_config_path = os.path.dirname(__file__) + '/config/global.json'
    config_path = ''

    def __init__(self, debug=True):
        self.__read_config()
        self.trade_prefix = self.config['prefix']
        self.account_config = ''
        self.heart_active = True
        self.heart_thread = Thread(target=self.send_heartbeat)
        self.heart_thread.setDaemon(True)

        self.log_level = logging.DEBUG if debug else logging.INFO

    def read_config(self, path):
        try:
            self.account_config = helpers.file2dict(path)
        except ValueError:
            log.error('配置文件格式有误,请勿使用记事本编辑,推荐使用 notepad++ 或者 sublime text')
        for v in self.account_config:
            if type(v) is int:
                log.warn('配置文件的值最好使用双引号包裹,使用字符串类型,否则可能导致不可知的问题')

    def prepare(self, config_file=None, user=None, password=None, **kwargs):
        """登录的统一接口
        :param config_file 登录数据文件,若无则选择参数登录模式
        :param user: 各家券商的账号或者雪球的用户名
        :param password: 密码, 券商为加密后的密码,雪球为明文密码
        :param account: [雪球登录需要]雪球手机号(邮箱手机二选一)
        :param portfolio_code: [雪球登录需要]组合代码
        :param portfolio_market: [雪球登录需要]交易市场, 可选['cn', 'us', 'hk'] 默认 'cn'
        """
        if config_file is not None:
            self.read_config(config_file)
        else:
            self._prepare_account(user, password, **kwargs)
        self.autologin()

    def _prepare_account(self, user, password, **kwargs):
        """映射用户名密码到对应的字段"""
        raise Exception('支持参数登录需要实现此方法')

    def autologin(self, limit=10):
        """实现自动登录
        :param limit: 登录次数限制
        """
        for _ in range(limit):
            if self.login():
                break
        else:
            raise NotLoginError('登录失败次数过多, 请检查密码是否正确 / 券商服务器是否处于维护中 / 网络连接是否正常')
        self.keepalive()

    def login(self):
        pass

    def keepalive(self):
        """启动保持在线的进程 """
        if self.heart_thread.is_alive():
            self.heart_active = True
        else:
            self.heart_thread.start()

    def send_heartbeat(self):
        """每隔10秒查询指定接口保持 token 的有效性"""
        while True:
            if self.heart_active:
                log.setLevel(logging.ERROR)
                try:
                    response = self.heartbeat()
                    self.check_account_live(response)
                except Exception as e:
                    log.setLevel(self.log_level)
                    log.error('心跳线程发现账户出现错误: {}, 尝试重新登陆'.format(e))
                    self.autologin()
                finally:
                    log.setLevel(self.log_level)
                time.sleep(30)
            else:
                time.sleep(1)

    def heartbeat(self):
        return self.balance

    def check_account_live(self, response):
        pass

    def exit(self):
        """结束保持 token 在线的进程"""
        self.heart_active = False

    def __read_config(self):
        """读取 config"""
        self.config = helpers.file2dict(self.config_path)
        self.global_config = helpers.file2dict(self.global_config_path)
        self.config.update(self.global_config)

    @property
    def balance(self):
        return self.get_balance()

    def get_balance(self):
        """获取账户资金状况"""
        return self.do(self.config['balance'])

    @property
    def position(self):
        return self.get_position()

    def get_position(self):
        """获取持仓"""
        return self.do(self.config['position'])

    @property
    def entrust(self):
        return self.get_entrust()

    def get_entrust(self):
        """获取当日委托列表"""
        return self.do(self.config['entrust'])

    @property
    def current_deal(self):
        return self.get_current_deal()

    def get_current_deal(self):
        """获取当日委托列表"""
        # return self.do(self.config['current_deal'])
        log.warning('目前仅在 佣金宝/银河子类 中实现, 其余券商需要补充')

    @property
    def exchangebill(self):
        """
        默认提供最近30天的交割单, 通常只能返回查询日期内最新的 90 天数据。
        :return:
        """
        # TODO 目前仅在 华泰子类 中实现
        start_date, end_date = helpers.get_30_date()
        return self.get_exchangebill(start_date, end_date)

    def get_exchangebill(self, start_date, end_date):
        """
        查询指定日期内的交割单
        :param start_date: 20160211
        :param end_date: 20160211
        :return:
        """
        log.warning('目前仅在 华泰子类 中实现, 其余券商需要补充')

    def get_ipo_limit(self, stock_code):
        """
        查询新股申购额度申购上限
        :param stock_code: 申购代码 ID
        :return:
        """
        log.warning('目前仅在 佣金宝子类 中实现, 其余券商需要补充')

    def do(self, params):
        """发起对 api 的请求并过滤返回结果
        :param params: 交易所需的动态参数"""
        request_params = self.create_basic_params()
        request_params.update(params)
        response_data = self.request(request_params)
        try:
            format_json_data = self.format_response_data(response_data)
        except:
            # Caused by server force logged out
            return None
        return_data = self.fix_error_data(format_json_data)
        try:
            self.check_login_status(return_data)
        except NotLoginError:
            self.autologin()
        return return_data

    def create_basic_params(self):
        """生成基本的参数"""
        pass

    def request(self, params):
        """请求并获取 JSON 数据
        :param params: Get 参数"""
        pass

    def format_response_data(self, data):
        """格式化返回的 json 数据
        :param data: 请求返回的数据 """
        pass

    def fix_error_data(self, data):
        """若是返回错误移除外层的列表
        :param data: 需要判断是否包含错误信息的数据"""
        return data

    def format_response_data_type(self, response_data):
        """格式化返回的值为正确的类型
        :param response_data: 返回的数据
        """
        if type(response_data) is not list:
            return response_data

        int_match_str = '|'.join(self.config['response_format']['int'])
        float_match_str = '|'.join(self.config['response_format']['float'])
        for item in response_data:
            for key in item:
                try:
                    if re.search(int_match_str, key) is not None:
                        item[key] = helpers.str2num(item[key], 'int')
                    elif re.search(float_match_str, key) is not None:
                        item[key] = helpers.str2num(item[key], 'float')
                except ValueError:
                    continue
        return response_data

    def check_login_status(self, return_data):
        pass
Example #37
0
from threading import Thread
from time import sleep

def fun():
    sleep(3)
    print('线程属性测试')

t = Thread(target=fun,name='02')
t.start()

p =Thread(target=fun,name='016')
p.setDaemon(True)
p.start()

t.setName('nico')
print(p.getName())
print(t.getName())
t.join()
print(p.isAlive())
Example #38
0
def main(args=None):

    # Construct the argument parser
    parser = argparse.ArgumentParser(
        description=
        "Halide Trace Analysis Tool: give reuse distance information from trace-enabled halide program"
    )

    parser.add_argument('-o',
                        '--output',
                        dest='outFile',
                        action='store',
                        required=False,
                        default=None,
                        help="Csv file to store reuse distance frequencies")

    _LOG_LEVEL_STRINGS = ['ERROR', 'INFO', 'DEBUG']

    def _log_level_string_to_int(log_level_string):
        if not log_level_string in _LOG_LEVEL_STRINGS:
            message = 'invalid choice: {0} (choose from {1})'.format(
                log_level_string, _LOG_LEVEL_STRINGS)
            raise argparse.ArgumentTypeError(message)

        log_level_int = getattr(logging, log_level_string, logging.ERROR)
        # check the logging log_level_choices have not changed from our expected values
        assert isinstance(log_level_int, int)

        return log_level_int

    parser.add_argument(
        '--log-level',
        default='ERROR',
        dest='log_level',
        type=_log_level_string_to_int,
        nargs='?',
        help='Set the logging output level. {0}'.format(_LOG_LEVEL_STRINGS))

    # Parse arguments
    args = parser.parse_args()

    #Construct logger
    logging.basicConfig(level=args.log_level,
                        format="%(threadName)s:%(message)s")
    log = logging.getLogger()

    with multiprocessing.Manager() as manager:

        hist = manager.dict()
        load_counter = manager.Value(int, 0)
        load_counter.value = 0

        #Internal settings
        q = Queue(maxsize=1)

        def index(a, x):
            'Locate the leftmost value exactly equal to x'
            i = bisect.bisect_left(a, x)
            if i != len(a) and a[i] == x:
                return i
            raise ValueError

        #thread function to process lines from the halide program
        def process_lines(q, load_counter, hist):

            #keep track of last access
            last_access_addr2time = {}
            last_access_sorted_time = []
            lhist = {'inf': 0}
            load_cnt = 0

            #daemon loop
            while True:

                #get linebuf from the queue
                linebuf = q.get()

                #decode to regular string (python 3)
                linebuf = linebuf.decode('utf8')

                #process all lines in the buffer
                for line in linebuf.split('\n'):
                    if line.startswith('Store'):  # and ')' in line:

                        #Note: uncomment for intermediate storing for backup
                        #if load_cnt&0xFFFFFF==0:
                        #    with open('tmp_histogram.txt','wt') as f:
                        #        f.write(str(lhist))

                        buf = line[6:line.index(')') + 1]

                        # #never accessed before
                        if buf not in last_access_addr2time:
                            #+1 cold miss
                            lhist['inf'] += 1

                        #accessed before, update hist
                        else:
                            #get index of last time this element was accesses
                            last_time_idx = index(last_access_sorted_time,
                                                  last_access_addr2time[buf])

                            ##number of unique accesses ince then
                            unique = len(
                                last_access_sorted_time) - last_time_idx

                            ##remove old last access time
                            del last_access_sorted_time[last_time_idx]

                            ##update the histogram
                            if unique not in lhist:
                                lhist[unique] = 0
                            lhist[unique] += 1

                        #store current access as last access
                        last_access_addr2time[buf] = load_cnt

                        #update last access time
                        last_access_sorted_time += [load_cnt]

                        #increment counter
                        load_cnt += 1

                    elif line[0:4] == 'Load':
                        #we don't care about loads here
                        pass
                    else:
                        sline = line.strip()
                        if sline != '':
                            #This is not a load or store line, pass it through
                            sys.stdout.write(line + '\n')
                            sys.stdout.flush()

                #share results with parent
                #note: explicit assignment to trigger proper sharing with manager object
                for k, v in lhist.items():
                    hist[k] = v
                load_counter.value = load_cnt

                #signal that the linebuf is processed
                q.task_done()

        #launch worker thread
        log.info('Launch worker thread to process lines')
        worker = Thread(target=process_lines, args=(q, load_counter, hist))
        worker.setDaemon(True)
        worker.start()

        #read lines from stdin and put into queue for processing
        log.info('Processing stdin')
        remaining = ''
        while True:
            output = sys.stdin.read(8192 * 32)
            if output == '':
                break

            #prefix output with whatever was remaining from previous iteration
            output = remaining + output

            #find the last newline
            newline_idx = output.rfind('\n')

            #everything after the last new line will be kept until next iteration
            remaining = output[newline_idx + 1:]

            #if there was at least a valid newline, we can yield the output until the last newline
            if newline_idx != -1:
                q.put(output[0:newline_idx])

        #perhaps there was some trailing output that didn't terminate with a newline, yield it here
        if remaining != '':
            q.put(remaining)
        log.info('Done reading from stdin')

        #wait for all lines to get processed
        log.info('Waiting for threads to finish processing')
        q.join()

        #store to csv file
        if args.outFile:
            with open(args.outFile, 'wt') as f:
                writer = csv.writer(f, delimiter=',')
                for dist in sorted(hist.keys()):
                    freq = hist[dist]
                    writer.writerow([dist, freq])
            log.info("Saved to %s" % (args.outFile))

        #nicely format results and report to command line
        s = ['Total loads: %d' % load_counter.value]
        for dist in sorted(hist.keys()):
            freq = hist[dist]
            s += [' %s: %s ' % (dist, freq)]
        log.info('\n'.join(s))
Example #39
0
 def on_cmd_click(self, e):
     t = Thread(target=cmd_thr, args=(self, e.GetId()))
     t.setDaemon(True)
     t.start()
Example #40
0
class RealEdge(AbstractEdge):

    SLOW_DOWN_SPEED = 80

    def __init__(self,
                 i,
                 io,
                 config,
                 verbose=False,
                 update_rate_hz=20,
                 calibration=None):
        super().__init__(i)

        # Hardware config
        self.motor_driver = MotorDriver(
            io['pca9685'],
            config['subsystems']['motors'][self.id - 1]['pwm_io'],
            config['subsystems']['motors'][self.id - 1]['dir_io'],
            config['subsystems']['motors'][self.id - 1]['dir_io_type'],
            config['subsystems']['motors'][self.id - 1]['polarity'])
        beg_mcp_id = config['subsystems']['limit_switches'][self.id -
                                                            1]['beg_mcp_id']
        beg_mcp_io = config['subsystems']['limit_switches'][self.id -
                                                            1]['beg_mcp_io']
        end_mcp_id = config['subsystems']['limit_switches'][self.id -
                                                            1]['end_mcp_id']
        end_mcp_io = config['subsystems']['limit_switches'][self.id -
                                                            1]['end_mcp_io']
        self.limit_switch_beg = LimitSwitch(io['mcp23017'][beg_mcp_id],
                                            beg_mcp_io)
        self.limit_switch_end = LimitSwitch(io['mcp23017'][end_mcp_id],
                                            end_mcp_io)
        self.valve = Valve(config['subsystems']['valves'][self.id - 1]['gpio'])
        self.igniter = Igniter(config['subsystems']['igniters'][self.id -
                                                                1]['gpio'])
        self.dir_polarity = config['subsystems']['motors'][self.id -
                                                           1]['polarity']

        self.position = 0

        self.verbose = verbose

        # Thread daemon inits
        self.speed_request = -1
        self.dir_request = 0
        self._ignore_limit = False
        self.update_rate_hz = update_rate_hz

        # Safety params
        self.time_of_last_limit_switch = time()

        if calibration is None:
            # insert a default calibration
            self.set_calibration(EdgeCalibration(self))

        self.set_stall_time()

        # let limit switches settle
        try_decorator(
            timeout=5)(lambda: not all(self.get_limit_switch_state()))()

        # Threading
        self.lock = Lock()
        self.runner = Thread(target=self.loop)
        self.runner.setDaemon(True)
        self.runner.start()

        logging.info('Starting edge %d' % (self.id),
                     extra={'edge_id': self.id})

    def __str__(self):
        s = "igniter: %d, valve: %d, beg. limit: %d, end. limit: %d, motor speed: %f, motor dir: %s" % (
            self.igniter.get_state(), self.valve.get_state(),
            self.limit_switch_beg.get_state(),
            self.limit_switch_end.get_state(), self.motor_driver.get_speed(),
            self.motor_driver.get_dir_str())
        return s

    def set_stall_time(self):
        rev = self.get_calibration().get_cal_time(1, -1)
        fwd = self.get_calibration().get_cal_time(1, 1)
        self.stall_time = max(rev, fwd) * 2.0

    def loop(self):
        self.pleaseExit = False

        last_cal_time = 0
        prev_time = time()

        while (not self.pleaseExit):
            self.lock.acquire()
            now = time()

            if (not last_cal_time == 0 and self.dir_request != 0):
                add_pos = (now - prev_time) / last_cal_time
                self.position += add_pos
                if (self.position > 1):
                    self.position = 1
                if (self.position < 0):
                    self.position = 0

            prev_time = now

            #safety checks
            #both limit switches on
            if all(self.get_limit_switch_state()):
                raise Exception('Both limit switches on for edge', self.id)

            #stalled motor
            if (any(self.get_limit_switch_state())
                    or self.motor_driver.get_speed() == 0):
                self.time_of_last_limit_switch = time()
            if (self.motor_driver.get_speed() > 0 and
                    time() - self.time_of_last_limit_switch > self.stall_time):
                logging.info('id:%d theoretically stalled' % (self.id),
                             extra={'edge_id': self.id})
                pass
                #raise Exception('Stalled motor')

            self.motor_driver.set_dir(MotorDriver.REVERSE if self.dir_request
                                      == -1 else MotorDriver.FORWARD)

            if (self.motor_driver.get_dir() == MotorDriver.FORWARD
                    and self.motor_driver.get_speed() > 0
                    and not self._ignore_limit
                    and self.get_forward_limit_switch_state() == True):
                self.motor_driver.stop()
                self.speed_request = 0
                self.position = 1
                last_cal_time = 0
                if (self.verbose):
                    logging.info('Fwd limit switch hit for id:%d' % (self.id),
                                 extra={'edge_id': self.id})
            elif (self.motor_driver.get_dir() == MotorDriver.REVERSE
                  and self.motor_driver.get_speed() > 0
                  and not self._ignore_limit
                  and self.get_reverse_limit_switch_state() == True):
                self.motor_driver.stop()
                self.speed_request = 0
                self.position = 0
                last_cal_time = 0
                if (self.verbose):
                    logging.info('Rev limit switch hit for id:%d' % (self.id),
                                 extra={'edge_id': self.id})
            elif (self.speed_request >= 0):

                self.motor_driver.set_speed(self.speed_request)

                if (self.speed_request is not 0):
                    last_cal_time = self.calibration.get_cal_time(
                        self.speed_request, self.dir_request)
                else:
                    last_cal_time = 0
                last_cal_time = last_cal_time * self.dir_request
                self.speed_request = -1

            # Slow down at stops
            speed = self.motor_driver.get_speed()
            if (speed > RealEdge.SLOW_DOWN_SPEED and self.dir_request == 1
                    and self._ignore_limit is False and self.position > 0.90):
                if self.verbose:
                    logging.info('slowing down :%d' % (self.id),
                                 extra={'edge_id': self.id})
                self.motor_driver.set_speed(RealEdge.SLOW_DOWN_SPEED)

            if (speed > RealEdge.SLOW_DOWN_SPEED and self.dir_request == -1
                    and self._ignore_limit is False and self.position < 0.10):
                if self.verbose:
                    logging.info('slowing down :%d' % (self.id),
                                 extra={'edge_id': self.id})
                self.motor_driver.set_speed(RealEdge.SLOW_DOWN_SPEED)

            self.lock.release()

            tosleep = 1.0 / self.update_rate_hz - (time() - now)
            if (tosleep > 0):
                sleep(tosleep)
            else:
                print('edge not calling sleep(), something weird is going on')

    def _ignore_limit_switch(self, state):
        self.lock.acquire()
        self._ignore_limit = state
        self.lock.release()

    def set_motor_state(self, direction, speed):
        self.lock.acquire()
        self.dir_request = direction
        self.speed_request = speed
        if (self.verbose):
            logging.info('Setting edge:%d to spd:%d and dir:%d' %
                         (self.id, speed, direction),
                         extra={'edge_id': self.id})
        self.lock.release()

    def set_valve_state(self, v):
        if (self.verbose):
            logging.info('Valve edge:%d %s' % (self.id, 'ON' if v else 'OFF'),
                         extra={'edge_id': self.id})
        self.valve.set_state(v)

    def set_igniter_state(self, g):
        if (self.verbose):
            logging.info('Igniter edge:%d %s' %
                         (self.id, 'ON' if g else 'OFF'),
                         extra={'edge_id': self.id})
        self.igniter.set_state(g)

    def get_limit_switch_state(self):
        return [
            self.limit_switch_beg.get_state(),
            self.limit_switch_end.get_state()
        ]

    def at_limit(self):
        return self.limit_switch_beg.get_state(
        ) or self.limit_switch_end.get_state()

    def move_to_start(self):
        self.set_motor_state(-1, 75)

    def calibrate(self):
        self._ignore_limit_switch(True)
        self.calibration.calibrate()
        self.set_stall_time()
        self._ignore_limit_switch(False)

    def get_position(self):
        return self.position

    def get_valve_state(self):
        return self.valve.get_state()

    def get_igniter_state(self):
        return self.igniter.get_state()

    def get_calibration(self):
        return self.calibration

    def kill(self):
        logging.info('Stopping edge %d' % (self.id),
                     extra={'edge_id': self.id})
        self.motor_driver.stop()
        self.valve.set_state(0)
        self.igniter.set_state(0)
        self.pleaseExit = True
        self.runner.join(5000)

    def is_healthy(self):
        return self.runner.is_alive()

    def __del__(self):
        self.kill()
Example #41
0
def init_game():
    """
    初始化游戏
    """
    global g_screen, g_sur_role, g_player, g_font

    # 初始化pygame
    pygame.init()
    pygame.display.set_caption('网络游戏Demo')
    g_screen = pygame.display.set_mode([WIDTH, HEIGHT])
    g_sur_role = pygame.image.load("./role.png").convert_alpha()  # 人物图片
    g_font = pygame.font.SysFont("fangsong", 24)
    # 初始化随机种子
    random.seed(int(time.time()))
    # 创建角色
    # 随机生成一个名字
    last_name = [
        '赵',
        '钱',
        '孙',
        '李',
        '周',
        '吴',
        '郑',
        '王',
        '冯',
        '陈',
        '褚',
        '卫',
        '蒋',
        '沈',
        '韩',
        '杨',
        '朱',
        '秦',
        '尤',
        '许',
        '何',
        '吕',
        '施',
        '张',
        '孔',
        '曹',
        '严',
        '华',
        '金',
        '魏',
        '陶',
        '姜',
        '戚',
        '谢',
        '邹',
        '喻',
    ]
    first_name = [
        '梦琪', '忆柳', '之桃', '慕青', '问兰', '尔岚', '元香', '初夏', '沛菡', '傲珊', '曼文', '乐菱',
        '痴珊', '孤风', '雅彤', '宛筠', '飞松', '初瑶', '夜云', '乐珍'
    ]
    name = random.choice(last_name) + random.choice(first_name)
    print("你的昵称是:", name)
    g_player = Role(randint(100, 500), randint(100, 300), name)

    # 与服务器建立连接
    g_client.connect(ADDRESS)
    # 开始接受服务端消息
    thead = Thread(target=msg_handler)
    thead.setDaemon(True)
    thead.start()
    # 告诉服务端有新玩家
    send_new_role()
Example #42
0
    while True:
        bytes = client.recv(1024)
        print("客户端消息:", bytes.decode(encoding='utf8'))
        if len(bytes) == 0:
            client.close()
            # 删除连接
            g_conn_pool.remove(client)
            print("有一个客户端下线了。")
            break


if __name__ == '__main__':
    init()
    # 新开一个线程,用于接收新连接
    thread = Thread(target=accept_client)
    thread.setDaemon(True)
    thread.start()
    # 主线程逻辑
    while True:
        cmd = input("""--------------------------
输入1:查看当前在线人数
输入2:给指定客户端发送消息
输入3:关闭服务端
""")
        if cmd == '1':
            print("--------------------------")
            print("当前在线人数:", len(g_conn_pool))
        elif cmd == '2':
            print("--------------------------")
            index, msg = input("请输入“索引,消息”的形式:").split(",")
            g_conn_pool[int(index)].sendall(msg.encode(encoding='utf8'))
Example #43
0
def do_search(item, categories=[]):
    multithread = config.get_setting("multithread", "buscador")
    result_mode = config.get_setting("result_mode", "buscador")
    logger.info()

    tecleado = item.extra

    itemlist = []

    channels_path = os.path.join(config.get_runtime_path(), "channels", '*.xml')
    logger.info("channels_path="+channels_path)

    channel_language = config.get_setting("channel_language")
    logger.info("channel_language="+channel_language)
    if channel_language == "":
        channel_language = "all"
        logger.info("channel_language="+channel_language)

    # Para Kodi es necesario esperar antes de cargar el progreso, de lo contrario
    # el cuadro de progreso queda "detras" del cuadro "cargando..." y no se le puede dar a cancelar
    time.sleep(0.5)
    progreso = platformtools.dialog_progress("Buscando " + tecleado, "")
    channel_files = glob.glob(channels_path)
    number_of_channels = len(channel_files)

    searches = []
    search_results = {}
    start_time = time.time()

    if multithread:
        progreso.update(0, "Buscando %s..." % tecleado)

    for index, infile in enumerate(channel_files):
        percentage = index*100/number_of_channels

        basename = os.path.basename(infile)
        basename_without_extension = basename[:-4]

        channel_parameters = channeltools.get_channel_parameters(basename_without_extension)

        # No busca si es un canal inactivo
        if channel_parameters["active"] != "true":
            continue

        # En caso de busqueda por categorias
        if categories:
            if not any(cat in channel_parameters["categories"] for cat in categories):
                continue

        # No busca si es un canal para adultos, y el modo adulto está desactivado
        if channel_parameters["adult"] == "true" and config.get_setting("adult_mode") == "false":
            continue

        # No busca si el canal es en un idioma filtrado
        if channel_language != "all" and channel_parameters["language"] != channel_language:
            continue

        # No busca si es un canal excluido de la busqueda global
        include_in_global_search = channel_parameters["include_in_global_search"]
        if include_in_global_search in ["", "true"]:
            # Buscar en la configuracion del canal
            include_in_global_search = str(config.get_setting("include_in_global_search", basename_without_extension))
            # Si no hay valor en la configuración del canal se incluye ya que así estaba por defecto
            if include_in_global_search == "":
                include_in_global_search = "true"

        if include_in_global_search.lower() != "true":
            continue

        if progreso.iscanceled():
            break

        # Modo Multi Thread
        if multithread:
            t = Thread(target=channel_search, args=[search_results, channel_parameters, tecleado])
            t.setDaemon(True)
            t.start()
            searches.append(t)

        # Modo single Thread
        else:
            logger.info("Intentado busqueda en " + basename_without_extension + " de " + tecleado)

            progreso.update(percentage, "Buscando %s en %s..." % (tecleado, channel_parameters["title"]))
            channel_search(search_results, channel_parameters, tecleado)

    # Modo Multi Thread
    # Usando isAlive() no es necesario try-except,
    # ya que esta funcion (a diferencia de is_alive())
    # es compatible tanto con versiones antiguas de python como nuevas
    if multithread:
        pendent = len([a for a in searches if a.isAlive()])
        while pendent:
            pendent = len([a for a in searches if a.isAlive()])
            percentage = (len(searches) - pendent) * 100 / len(searches)
            progreso.update(percentage, "Buscando %s en %d canales..." % (tecleado, len(searches)))
            if progreso.iscanceled():
                break
            time.sleep(0.5)

    total = 0

    for channel in sorted(search_results.keys()):
        for search in search_results[channel]:
            total += len(search["itemlist"])
            if result_mode == 0:
                title = channel
                if len(search_results[channel]) > 1:
                    title += " [" + search["item"].title.strip() + "]"
                title += " (" + str(len(search["itemlist"])) + ")"

                title = re.sub("\[COLOR [^\]]+\]", "", title)
                title = re.sub("\[/COLOR]", "", title)

                extra = search["item"].extra + "{}" + search["item"].channel + "{}" + tecleado
                itemlist.append(Item(title=title, channel="buscador", action="channel_result", url=search["item"].url,
                                     extra=extra, folder=True))
            else:
                itemlist.extend(search["itemlist"])

    title = "Buscando: '%s' | Encontrado: %d vídeos | Tiempo: %2.f segundos" % (tecleado, total, time.time()-start_time)
    itemlist.insert(0, Item(title=title, color='yellow'))

    progreso.close()

    return itemlist
Example #44
0
 def on_evt_click(self, e):
     t = Thread(target=evt_thr, args=(self, ))
     t.setDaemon(True)
     t.start()
Example #45
0
class RTPReceiveClient:
    RTP_TIMEOUT = 10000  # in milliseconds

    def __init__(self, host_address, rtp_port: int):

        self.host_address = host_address
        self.rtp_port = rtp_port
        self._frame_buffer = []  #TODO.. FrameSize Limit
        self.is_receiving_rtp = False
        self.current_frame_number = -1

    def get_next_frame(self):
        if self._frame_buffer:
            self.current_frame_number += 1
            return self._frame_buffer.pop(0), self.current_frame_number

        return None

    def start(self, callback):
        self.is_receiving_rtp = True
        self._start_rtp_receive_thread()
        self.callback = callback

    def pause(self):
        #TODO
        self.is_receiving_rtp = False

    def _handle_audio_receive(self):
        self._rtp_socket = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
        self._rtp_socket.bind((self.host_address, self.rtp_port))
        self._rtp_socket.settimeout(self.RTP_TIMEOUT / 1000.)
        while True:
            if not self.is_receiving_rtp:
                sleep(self.RTP_TIMEOUT / 1000.)  # diminish cpu hogging
                continue

            print('receive packet')
            packet = self._recv_rtp_packet()
            # for debugging
            # packet.print_header()

            audio_data = packet.payload
            print('audio data length :', len(audio_data))
            print('audio data ts :', packet.timestamp)
            self.callback(audio_data)

    def _recv_rtp_packet(self, size=DEFAULT_CHUNK_SIZE) -> RTPPacket:
        recv = bytes()
        print('Waiting RTP packet...')
        while True:
            try:
                recv = self._rtp_socket.recv(size)
                # TODO.. maybe check if packet is full.
                break
            except socket.timeout:
                print('Receive RTP Socket timeout')
                continue
        # print(f"Received from server: {repr(recv)}")

        return RTPPacket.from_packet(recv)

    def _start_rtp_receive_thread(self):
        self._rtp_receive_thread = Thread(target=self._handle_audio_receive)
        self._rtp_receive_thread.setDaemon(True)
        self._rtp_receive_thread.start()
Example #46
0
class JoystickController(object):
    '''
    Joystick client using access to local physical input
    '''
    def __init__(self,
                 poll_delay=0.0166,
                 max_throttle=1.0,
                 steering_axis='x',
                 throttle_axis='rz',
                 steering_scale=1.0,
                 throttle_scale=-1.0,
                 dev_fn='/dev/input/js0',
                 auto_record_on_throttle=True):

        self.angle = 0.0
        self.throttle = 0.0
        self.mode = 'user'
        self.poll_delay = poll_delay
        self.running = True
        self.max_throttle = max_throttle
        self.steering_axis = steering_axis
        self.throttle_axis = throttle_axis
        self.steering_scale = steering_scale
        self.throttle_scale = throttle_scale
        self.recording = False
        self.constant_throttle = False
        self.auto_record_on_throttle = auto_record_on_throttle

        #init joystick
        self.js = Joystick(dev_fn)
        self.js.init()

        #start thread to poll it
        self.thread = Thread(target=self.update)
        self.thread.setDaemon(True)
        self.thread.start()

    def on_throttle_changes(self):
        '''
        turn on recording when non zero throttle in the user mode.
        '''
        if self.auto_record_on_throttle:
            self.recording = (self.throttle != 0.0 and self.mode == 'user')

    def update(self):
        '''
        poll a joystick for input events

        button map name => PS3 button => function
        * top2 = PS3 dpad up => increase throttle scale
        * base = PS3 dpad down => decrease throttle scale
        * base2 = PS3 dpad left => increase steering scale 
        * pinkie = PS3 dpad right => decrease steering scale
        * trigger = PS3 select => switch modes
        * top = PS3 start => toggle constant throttle
        * base5 = PS3 left trigger 1 
        * base3 = PS3 left trigger 2
        * base6 = PS3 right trigger 1 
        * base4 = PS3 right trigger 2
        * thumb2 = PS3 right thumb
        * thumb = PS3 left thumb
        * circle = PS3 circrle => toggle recording
        * triangle = PS3 triangle => increase max throttle
        * cross = PS3 cross => decrease max throttle
        '''

        while self.running:
            button, button_state, axis, axis_val = self.js.poll()

            if axis == self.steering_axis:
                self.angle = self.steering_scale * axis_val
                print("angle", self.angle)

            if axis == self.throttle_axis:
                #this value is often reversed, with positive value when pulling down
                self.throttle = (self.throttle_scale * axis_val *
                                 self.max_throttle)
                print("throttle", self.throttle)
                self.on_throttle_changes()

            if button == 'trigger' and button_state == 1:
                '''
                switch modes from:
                user: human controlled steer and throttle
                local_angle: ai steering, human throttle
                local: ai steering, ai throttle
                '''
                if self.mode == 'user':
                    self.mode = 'local_angle'
                elif self.mode == 'local_angle':
                    self.mode = 'local'
                else:
                    self.mode = 'user'
                print('new mode:', self.mode)

            if button == 'circle' and button_state == 1:
                '''
                toggle recording on/off
                '''
                if self.auto_record_on_throttle:
                    print('auto record on throttle is enabled.')
                elif self.recording:
                    self.recording = False
                else:
                    self.recording = True

                print('recording:', self.recording)

            if button == 'triangle' and button_state == 1:
                '''
                increase max throttle setting
                '''
                self.max_throttle = round(min(1.0, self.max_throttle + 0.01),
                                          2)
                if self.constant_throttle:
                    self.throttle = self.max_throttle
                    self.on_throttle_changes()

                print('max_throttle:', self.max_throttle)

            if button == 'cross' and button_state == 1:
                '''
                decrease max throttle setting
                '''
                self.max_throttle = round(max(0.0, self.max_throttle - 0.01),
                                          2)
                if self.constant_throttle:
                    self.throttle = self.max_throttle
                    self.on_throttle_changes()

                print('max_throttle:', self.max_throttle)

            if button == 'base' and button_state == 1:
                '''
                increase throttle scale
                '''
                self.throttle_scale = round(
                    min(0.0, self.throttle_scale + 0.05), 2)
                print('throttle_scale:', self.throttle_scale)

            if button == 'top2' and button_state == 1:
                '''
                decrease throttle scale
                '''
                self.throttle_scale = round(
                    max(-1.0, self.throttle_scale - 0.05), 2)
                print('throttle_scale:', self.throttle_scale)

            if button == 'base2' and button_state == 1:
                '''
                increase steering scale
                '''
                self.steering_scale = round(
                    min(1.0, self.steering_scale + 0.05), 2)
                print('steering_scale:', self.steering_scale)

            if button == 'pinkie' and button_state == 1:
                '''
                decrease steering scale
                '''
                self.steering_scale = round(
                    max(0.0, self.steering_scale - 0.05), 2)
                print('steering_scale:', self.steering_scale)

            if button == 'top' and button_state == 1:
                '''
                toggle constant throttle
                '''
                if self.constant_throttle:
                    self.constant_throttle = False
                    self.throttle = 0
                    self.on_throttle_changes()
                else:
                    self.constant_throttle = True
                    self.throttle = self.max_throttle
                    self.on_throttle_changes()
                print('constant_throttle:', self.constant_throttle)

            time.sleep(self.poll_delay)

    def run_threaded(self, img_arr_top=None, img_arr_bot=None):
        self.img_arr_top = img_arr_top
        self.img_arr_bot = img_arr_bot
        return self.angle, self.throttle, self.mode, self.recording

    def shutdown(self):
        self.running = False
        time.sleep(0.5)
Example #47
0
class DataBatcher(object):
    """
        Data batcher with queue for loading big dataset
    """

    def __init__(self, data_dir, file_list, batch_size, num_epoch, shuffle=False):
        self.data_dir = data_dir
        self.file_list = file_list
        self.batch_size = batch_size
        self.num_epoch = num_epoch
        self.shuffle = shuffle

        self.cur_epoch = 0
        self.loader_queue = Queue(maxsize=CHUNK_NUM)
        self.loader_queue_size = 0
        self.batch_iter = self.batch_generator()
        self.input_gen = self.loader_generator()

        # Start the threads that load the queues
        self.loader_q_thread = Thread(target=self.fill_loader_queue)
        self.loader_q_thread.setDaemon(True)
        self.loader_q_thread.start()

        # Start a thread that watches the other threads and restarts them if they're dead
        self.watch_thread = Thread(target=self.monitor_threads)
        self.watch_thread.setDaemon(True)
        self.watch_thread.start()

    def get_batch(self):
        try:
            batch_data, local_size = next(self.batch_iter)
        except StopIteration:
            batch_data = None
            local_size = 0
        return batch_data, local_size

    def get_epoch(self):
        return self.cur_epoch

    def full(self):
        if self.loader_queue_size == CHUNK_NUM:
            return True
        else:
            return False

    def batch_generator(self):
        while self.loader_queue_size > 0:
            data_loader = self.loader_queue.get()
            n_batch = data_loader.n_batch
            self.loader_queue_size -= 1
            for batch_idx in range(n_batch):
                batch_data, local_size = data_loader.get_batch(batch_idx=batch_idx)
                yield batch_data, local_size

    def loader_generator(self):
        for epoch in range(self.num_epoch):
            self.cur_epoch = epoch
            if self.shuffle:
                np.random.shuffle(self.file_list)
            for idx, f in enumerate(self.file_list):
                reader = open("%s/%s" % (self.data_dir, f), 'br')
                q_dict = pickle.load(reader)
                data_loader = DataLoader(batch_size=self.batch_size)
                data_loader.feed_by_data(q_dict)
                yield data_loader

    def fill_loader_queue(self):
        while True:
            if self.loader_queue_size <= CHUNK_NUM:
                try:
                    data_loader = next(self.input_gen)
                    self.loader_queue.put(data_loader)
                    self.loader_queue_size += 1
                except StopIteration:
                    break

    def monitor_threads(self):
        """Watch loader queue thread and restart if dead."""
        while True:
            time.sleep(60)
            if not self.loader_q_thread.is_alive():  # if the thread is dead
                print('Found loader queue thread dead. Restarting.')
                new_t = Thread(target=self.fill_loader_queue)
                self.loader_q_thread = new_t
                new_t.daemon = True
                new_t.start()
Example #48
0
def listen_for_midi():
    mido.set_backend(name='mido.backends.rtmidi_python', load=True)

    midi_thread = Thread(target=_forward_midi, name="MidoListeningThread")
    midi_thread.setDaemon(True)
    midi_thread.start()
Example #49
0
File: misc.py Project: gryf/wicd
 def wrapper(*args, **kwargs):
     t = Thread(target=f, args=args, kwargs=kwargs)
     t.setDaemon(True)
     t.start()
Example #50
0
 def async_func(*args, **kwargs):
     func_hl = Thread(target=func, args=args, kwargs=kwargs)
     func_hl.setDaemon(True)
     func_hl.start()
     return func_hl
Example #51
0
class KeyboardAutoSwitchApplet(Budgie.Applet):
    """ Budgie.Applet is in fact a Gtk.Bin """
    def __init__(self, uuid):
        Budgie.Applet.__init__(self)
        self.uuid = uuid
        # setup watching applet presence
        self.currpanelsubject_settings = None
        GLib.timeout_add_seconds(1, self.watchout)
        self.kbautoswitch_onpanel = True
        # general stuff
        self.key = "org.gnome.desktop.input-sources"
        self.settings = Gio.Settings.new(self.key)
        # menugrid
        self.menugrid = Gtk.Grid()
        self.menugrid.set_row_spacing(5)
        self.menugrid.set_column_spacing(20)
        # left space
        self.menugrid.attach(Gtk.Label("   "), 0, 0, 1, 1)
        # Default language section
        self.menugrid.attach(Gtk.Label("Default layout:", xalign=0), 1, 1, 1,
                             1)
        self.langlist_combo = Gtk.ComboBoxText()
        self.langlist_combo.set_entry_text_column(0)
        self.langlist_combo.set_size_request(185, 20)
        self.menugrid.attach(self.langlist_combo, 1, 2, 1, 1)
        self.menugrid.attach(Gtk.Label("\nExceptions: ", xalign=0), 1, 4, 1, 1)
        # Exceptions section
        self.exc_combo = Gtk.ComboBoxText()
        self.exc_combo.set_entry_text_column(0)
        self.menugrid.attach(self.exc_combo, 1, 5, 1, 1)
        delete_button = Gtk.Button()
        delete_img = self.seticon = Gtk.Image.new_from_icon_name(
            "user-trash-symbolic", Gtk.IconSize.MENU)
        delete_button.set_image(delete_img)
        self.menugrid.attach(delete_button, 2, 5, 1, 1)
        # end spacer
        spacer_end = Gtk.Label("")
        self.menugrid.attach(spacer_end, 3, 10, 1, 1)
        # panel
        self.seticon = Gtk.Image.new_from_icon_name(
            "budgie-keyboard-autoswitch-symbolic", Gtk.IconSize.MENU)
        self.box = Gtk.EventBox()
        self.box.add(self.seticon)
        self.add(self.box)
        self.popover = Budgie.Popover.new(self.box)
        self.popover.add(self.menugrid)
        self.popover.get_child().show_all()
        self.box.show_all()
        self.show_all()
        self.box.connect("button-press-event", self.on_press)
        # initiate
        try:
            # get the possible existing dict data
            f = open(lang_datafile)
            val = f.read()
            if val == "":
                raise FileNotFoundError
            self.langdata = ast.literal_eval(val.strip())

        except FileNotFoundError:
            self.langdata = {}
        try:
            # get the possible previously set (and saved) default language
            self.default_lang = open(default_langfile).read().strip()
        except FileNotFoundError:
            lang_index = self.settings.get_uint("current")
            self.default_lang = self.readable_lang(
                self.settings.get_value("sources")[lang_index][1])
        self.langlist_selection_id = self.langlist_combo.connect(
            "changed",
            self.change_ondeflang_select,
        )
        delete_button.connect("clicked", self.remove_exception)
        self.act_on_gsettingschange()
        self.settings.connect("changed::sources", self.act_on_gsettingschange)

        # Use dbus connection to check for the screensaver activity
        self.bus = dbus.SessionBus()
        self.screensaver = None

        # thread
        GObject.threads_init()
        # thread
        self.update = Thread(target=self.watch_yourlanguage)
        # daemonize the thread to make the indicator stopable
        self.update.setDaemon(True)
        self.update.start()

    def watchout(self):
        path = "com.solus-project.budgie-panel"
        panelpath_prestring = "/com/solus-project/budgie-panel/panels/"
        panel_settings = Gio.Settings.new(path)
        allpanels_list = panel_settings.get_strv("panels")
        for p in allpanels_list:
            panelpath = panelpath_prestring + "{" + p + "}/"
            self.currpanelsubject_settings = Gio.Settings.new_with_path(
                path + ".panel", panelpath)
            applets = self.currpanelsubject_settings.get_strv("applets")
            if self.uuid in applets:
                self.currpanelsubject_settings.connect("changed",
                                                       self.check_ifonpanel)
        return False

    def check_ifonpanel(self, *args):
        applets = self.currpanelsubject_settings.get_strv("applets")
        self.kbautoswitch_onpanel = self.uuid in applets

    def on_press(self, box, arg):
        self.manager.show_popover(self.box)

    def do_update_popovers(self, manager):
        self.manager = manager
        self.manager.register_popover(self.box, self.popover)

    def readable_lang(self, lang):
        lang = lang.split("+")
        try:
            lang[1] = "(" + lang[1] + ")"
        except IndexError:
            return lang[0]
        else:
            return " ".join(lang)

    def lockscreen_check(self):
        try:
            if self.screensaver is None:
                self.screensaver = self.bus.get_object("org.gnome.ScreenSaver",
                                                       "/")
            val = self.screensaver.get_dbus_method('GetActive')
            return val()
        except dbus.exceptions.DBusException:
            self.screensaver = None
            return False

    def change_ondeflang_select(self, widget):
        """
        change the default language, update settings file and exceptions list
        """
        self.default_lang = self.langlist_combo.get_active_text()
        open(default_langfile, "wt").write(str(self.default_lang))
        self.clear_deflangkey()
        open(lang_datafile, "wt").write(str(self.langdata))
        self.update_exceptions_gui()

    def clear_deflangkey(self):
        """
        if th newly set default language has any exceptions, they should be
        cleared. there is no point in setting classes as exceptions (anymore)
        then. this function is called on change of the default language from
        the menu.
        """
        # list keys
        keys = list(self.langdata.keys())
        # find index of default lang dict-item
        sub = [self.langdata[k]["readable"]
               for k in keys].index(self.default_lang)
        key = keys[sub]
        # clear the exceptions list of the default language
        self.langdata[key]["classes"] = []

    def remove_wmclass(self, wmclass):
        """
        finds occurrences of wmclass in the lang_data, removes them
        """
        keys = list(self.langdata.keys())
        sub = [self.langdata[k]["classes"] for k in keys]
        for s in sub:
            if wmclass in s:
                index = sub.index(s)
                self.langdata[keys[sub.index(s)]]["classes"].remove(wmclass)
                break
        open(lang_datafile, "wt").write(str(self.langdata))

    def update_langlist_gui(self):
        """
        update the list of languages, as it appears in "Default language"
        -options.
        """
        # disconnect
        self.langlist_combo.disconnect(self.langlist_selection_id)
        # why not fetch from self.raw_langlist?
        readable_list = []
        # delete all entries
        self.langlist_combo.remove_all()
        # add to readable list temporarily to determine index to set
        for k in self.langdata.keys():
            name = self.langdata[k]["readable"]
            readable_list.append(name)
        # add all languages to gui
        for n in readable_list:
            self.langlist_combo.append_text(n)
        # find index
        try:
            index = readable_list.index(self.default_lang)
        except ValueError:
            index = 0
        self.default_lang = readable_list[index]
        open(default_langfile, "wt").write(str(self.default_lang))
        self.langlist_combo.set_active(index)
        # set the connection again
        self.langlist_selection_id = self.langlist_combo.connect(
            "changed",
            self.change_ondeflang_select,
        )

    def act_on_gsettingschange(self, *args):
        """
        fetch current languages, update language data (dict), remove
        obsolete langs, add new ones. then save the new dict to file.
        """
        # fetch current languages from gsettings
        self.raw_langlist = [
            item[1] for item in self.settings.get_value("sources")
        ]
        # add new languages
        curr_keys = list(self.langdata.keys())
        for sl in self.raw_langlist:
            readable = self.readable_lang(sl)
            if sl not in curr_keys:
                self.langdata[sl] = {"classes": [], "readable": readable}
        # remove obsolete languages + data
        for k in curr_keys:
            if k not in self.raw_langlist:
                del self.langdata[k]
        open(lang_datafile, "wt").write(str(self.langdata))
        self.update_langlist_gui()

    def find_exception(self, wmclass):
        """
        search self.langdata for existing exceptions
        """
        keys = self.langdata.keys()
        exist = None
        for k in keys:
            data = self.langdata[k]["classes"]
            if wmclass in data:
                exist = k
                break
        # output = raw lang!
        return exist

    def set_lang_onclasschange(self, wmclass, lang):
        """
        if the wmclass changes (window change), check if an exception exists
        on the wmclass. switch language if lang is not the currently active one
        """
        curr_exception = self.find_exception(wmclass)
        if curr_exception:
            # if the window is an exception, *and* another language; set lang
            if lang != curr_exception:
                self.set_newlang(newlang=curr_exception)
        elif self.readable_lang(lang) != self.default_lang:
            self.set_newlang(default=True)

    def set_newlang(self, newlang=None, default=False):
        """
        switch source for the currently active window
        """
        if newlang:
            index = self.raw_langlist.index(newlang)
        elif default:
            getreadables = [self.readable_lang(sl) for sl in self.raw_langlist]
            index = getreadables.index(self.default_lang)
        self.settings.set_uint("current", index)

    def lock_state(self, oldlang):
        while True:
            time.sleep(1)
            if not self.lockscreen_check() or not self.kbautoswitch_onpanel:
                break
        self.set_newlang(oldlang)

    def watch_yourlanguage(self):
        # fill exceptions (gui) list with data
        self.update_exceptions_gui()
        # fetch set initial data
        wmclass1 = self.get_activeclass()
        activelang1 = self.get_currlangname()
        while self.kbautoswitch_onpanel:
            time.sleep(1)
            # if language is changed during lockstate, revert afterwards
            if self.lockscreen_check():
                self.lock_state(activelang1)
            wmclass2 = self.get_activeclass()
            activelang2 = self.get_currlangname()
            # first set a few conditions to act *at all*
            if all([
                    wmclass2, wmclass2 != "raven",
                    wmclass2 != "Wprviews_window", activelang2
            ]):
                classchange = wmclass2 != wmclass1
                langchange = activelang2 != activelang1
                if classchange:
                    self.set_lang_onclasschange(wmclass2, activelang2)
                    activelang2 = self.get_currlangname()
                elif langchange:
                    self.set_exception(activelang2, wmclass2)
                    GObject.idle_add(
                        self.update_exceptions_gui,
                        priority=GObject.PRIORITY_DEFAULT,
                    )
                    open(lang_datafile, "wt").write(str(self.langdata))
                wmclass1 = wmclass2
                activelang1 = activelang2

    def update_exceptions_gui(self):
        self.exc_combo.remove_all()
        keys = list(self.langdata.keys())
        for k in keys:
            wmclasses = self.langdata[k]["classes"]
            for cl in wmclasses:
                mention = ", ".join([cl, k])
                self.exc_combo.append_text(mention)
        self.exc_combo.set_active(0)

    def remove_exception(self, button):
        """
        remove an exception from the menu (gui)
        """
        try:
            toremove = self.exc_combo.get_active_text().split(", ")[0]
        except AttributeError:
            pass
        else:
            self.remove_wmclass(toremove)
            self.update_exceptions_gui()

    def set_exception(self, lang, wmclass):
        lang = self.readable_lang(lang)
        # remove possible existing exception
        self.remove_wmclass(wmclass)
        # add new exception
        keys = list(self.langdata.keys())
        sub = [self.langdata[k]["readable"] for k in keys].index(lang)
        if lang != self.default_lang:
            self.langdata[keys[sub]]["classes"].append(wmclass)

    def get(self, cmd):
        try:
            return subprocess.check_output(cmd).decode("utf-8").strip()
        except subprocess.CalledProcessError:
            pass

    def show_wmclass(self, wid):
        # handle special cases
        try:
            cl = self.get(["/usr/bin/xprop", "-id", wid, "WM_CLASS"
                           ]).split("=")[-1].split(",")[-1].strip().strip('"')
        except (IndexError, AttributeError):
            pass
        else:
            # exceptions; one application, multiple WM_CLASS
            if "Thunderbird" in cl:
                return "Thunderbird"
            elif "Toplevel" in cl:
                return "Toplevel"
            else:
                return cl

    def get_activeclass(self):
        # get WM_CLASS of active window
        currfront = self.get(["/usr/bin/xdotool", "getactivewindow"])
        return self.show_wmclass(currfront) if currfront else None

    def get_currlangname(self):
        i = self.settings.get_uint("current")
        try:
            return self.raw_langlist[i]
        except IndexError:
            pass
Example #52
0
class NetworkCommandListner(object):
    def __init__(self, hardware, gridmap=None):

        self.logr = logging.getLogger('car.network')
        self.socket = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
        self.csocket = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
        self.socket.bind(('', UDP_PORT_INCOMING))
        self.logr.info(
            "UDP Server Operational, waiting for connections incoming on port %d, outgoing on port %d"
            % (UDP_PORT_INCOMING, UDP_PORT_OUTGOING))
        self.info = Info()
        self.cmd = Command()
        self.grid = gridmap
        self.network_direct = False
        self.running = True
        self.hw = hardware
        self.net_th = Thread(target=self.listen_th)
        self.net_th.setDaemon(True)
        self.net_th.start()
        self.sendInfothread = Thread(target=self.sendInfo_th)
        self.sendInfothread.setDaemon(True)
        self.sendInfothread.start()

    def hasRequest(self):
        if self.network_direct == True:
            return True
        else:
            return False

    def run_direct(self):
        if self.network_direct == False:
            self.logr.warning(
                "network command received but network direct currently disabled"
            )
        else:
            self.logr.info("network command received, %s ; executing..." %
                           (self.cmd.name))
            if self.cmd.name == 'stop':
                self.hw['mainEngine'].move(0)
            elif self.cmd.name == 'go':
                self.hw['mainEngine'].move(self.cmd.value)
            elif self.cmd.name == 'turn':
                if int(self.cmd.value) == 0:
                    self.hw['wheel'].turn('center')
                elif int(self.cmd.value) == -100:
                    self.hw['wheel'].turn('left')
                elif int(self.cmd.value) == 100:
                    self.hw['wheel'].turn('right')

    def sendInfo_th(self):

        while True:
            info = {
                'bsens': self.hw['bsens'].getReading(),
                'fsens': self.hw['fsens'].getReading(),
                'lsens': self.hw['lsens'].getReading(),
                'rsens': self.hw['rsens'].getReading(),
                'bpress': self.hw['tempsens'].read_pressure(),
                'temp': self.hw['tempsens'].read_temperature(),
                'cpu': psutil.cpu_percent()
            }

            if self.grid != None:
                info['grid'] = str((self.grid.prox())).replace('array',
                                                               '').strip()
            self.sendInfo(info)

            time.sleep(1)

    def sendInfo(self, info):

        nSuccess = 0
        for name in info:
            if name in SUPPORTED_PROTO_INFO:
                setattr(self.info, name, info[name])
                nSuccess += 1
        if nSuccess > 0:
            self.send(self.info.SerializeToString())

    def send(self, data):
        self.csocket.sendto(data, (REMOTE_ADDRESS, UDP_PORT_OUTGOING))

    def listen_th(self):

        while self.running == True:

            dataFromClient, address = self.socket.recvfrom(
                60000)  # blocks until packet received
            self.cmd.ParseFromString(dataFromClient)
            if self.cmd.name == "SIG_DIRECT_ENABLE":
                self.network_direct = True
                self.logr.info("Direct network-to-hardware enabled")
            elif self.cmd.name == "SIG_DIRECT_DISABLE":
                self.network_direct = False
                self.logr.info("Direct network-to-hardware disabled")
            else:
                self.run_direct()
#
###############################################################

# Queue experiment
nrows = df.shape[0]
thread_size = 100
q = Queue(maxsize = 0) # 0 puts all of them in the queue, if you have a number then it only puts that number in the queue
_logger = logging.getLogger('Queue Process')
for index, row in df.iterrows():
    q.put((index, row['finallink']), block = True, timeout=2)

results = {}
for i in range(thread_size):
    _logger.info(f'Starting thread {i}')
    workers = Thread(target = crawl_queue, args = (q, results))
    workers.setDaemon(True) # this ensures all threads are killed at the end of the threading
    workers.start()

# ensure all workers are done
q.join()
_logger.info('All tasks are completed')

return_dict = parse_results(results)

match_df = pd.DataFrame.from_dict(return_dict, orient = 'index').sort_index()
df_new = pd.concat([df.zip_, match_df], axis = 1 )
df_new.columns = ['old', 'new', 'clss', 'ziptype' , 'tmz','city1', 'state']
df_new.query('old != new')


Example #54
0
    def threaded_send_request_offer(self, client_id, launcher, receiver, virtual_speaker_id):
        thread = Thread(target=self._threaded_send_request_offer, args= (client_id, launcher, receiver, virtual_speaker_id))
        thread.setDaemon(True)
        thread.start()

        self.request_offer_threads.append(thread)
Example #55
0
class CobraDaemon(ThreadingTCPServer):
    def __init__(self,
                 host="",
                 port=COBRA_PORT,
                 sslcrt=None,
                 sslkey=None,
                 sslca=None,
                 msgpack=False,
                 json=False):
        '''
        Construct a cobra daemon object.

        Parameters:
        host        - Optional hostname/ip to bind the service to (default: inaddr_any)
        port        - The port to bind (Default: COBRA_PORT)
        msgpack     - Use msgpack serialization

        # SSL Options
        sslcrt / sslkey     - Specify sslcrt and sslkey to enable SSL server side
        sslca               - Specify an SSL CA key to use validating client certs

        '''
        self.thr = None
        self.run = True
        self.shared = {}
        self.dowith = {}
        self.host = host
        self.port = port
        self.reflock = RLock()
        self.refcnts = {}
        self.authmod = None
        self.sflags = 0

        if msgpack and json:
            raise Exception('CobraDaemon can not use both msgpack *and* json!')

        if msgpack:
            requireMsgpack()
            self.sflags |= SFLAG_MSGPACK

        if json:
            self.sflags |= SFLAG_JSON

        # SSL Options
        self.sslca = sslca
        self.sslcrt = sslcrt
        self.sslkey = sslkey

        self.cansetattr = True
        self.cangetattr = True

        if sslcrt and not os.path.isfile(sslcrt):
            raise Exception('CobraDaemon: sslcrt param must be a file!')

        if sslkey and not os.path.isfile(sslkey):
            raise Exception('CobraDaemon: sslkey param must be a file!')

        if sslca and not os.path.isfile(sslca):
            raise Exception('CobraDaemon: sslca param must be a file!')

        self.allow_reuse_address = True
        ThreadingTCPServer.__init__(self, (host, port), CobraRequestHandler)

        if port == 0:
            self.port = self.socket.getsockname()[1]

        self.daemon_threads = True
        self.recvtimeout = None

    def logCallerError(self, oname, args, msg=""):
        pass

    def setGetAttrEnabled(self, status):
        self.cangetattr = status

    def setSetAttrEnabled(self, status):
        self.cansetattr = status

    def setSslCa(self, crtfile):
        '''
        Set the SSL Certificate Authority by this server.
        ( to validate client certs )
        '''
        self.sslca = crtfile

    def setSslServerCert(self, crtfile, keyfile):
        '''
        Set the cert/key used by this server to negotiate SSL.
        '''
        self.sslcrt = crtfile
        self.sslkey = keyfile

    def fireThread(self):
        self.thr = Thread(target=self.serve_forever)
        self.thr.setDaemon(True)
        self.thr.start()

    def stopServer(self):
        self.run = False
        self.server_close()
        self.thr.join()

    def serve_forever(self):
        try:

            ThreadingTCPServer.serve_forever(self)

        except Exception, e:
            if not self.run:
                return

            raise
Example #56
0
class WebTrader(object):
    global_config_path = os.path.dirname(__file__) + '/config/global.json'
    config_path = ''

    def __init__(self):
        self.__read_config()
        self.trade_prefix = self.config['prefix']
        self.account_config = ''
        self.heart_active = True
        if six.PY2:
            self.heart_thread = Thread(target=self.send_heartbeat)
            self.heart_thread.setDaemon(True)
        else:
            self.heart_thread = Thread(target=self.send_heartbeat, daemon=True)

    def read_config(self, path):
        try:
            self.account_config = helpers.file2dict(path)
        except ValueError:
            log.error('配置文件格式有误,请勿使用记事本编辑,推荐使用 notepad++ 或者 sublime text')
        for v in self.account_config:
            if type(v) is int:
                log.warn('配置文件的值最好使用双引号包裹,使用字符串类型,否则可能导致不可知的问题')

    def prepare(self, need_data):
        """登录的统一接口
        :param need_data 登录所需数据"""
        self.read_config(need_data)
        self.autologin()

    def autologin(self):
        """实现自动登录"""
        is_login_ok = self.login()
        if not is_login_ok:
            self.autologin()
        self.keepalive()

    def login(self):
        pass

    def keepalive(self):
        """启动保持在线的进程 """
        if self.heart_thread.is_alive():
            self.heart_active = True
        else:
            self.heart_thread.start()

    def send_heartbeat(self):
        """每隔10秒查询指定接口保持 token 的有效性"""
        while True:
            if self.heart_active:
                try:
                    response = self.balance
                except:
                    pass
                self.check_account_live(response)
                time.sleep(10)
            else:
                time.sleep(1)

    def check_account_live(self, response):
        pass

    def exit(self):
        """结束保持 token 在线的进程"""
        self.heart_active = False

    def __read_config(self):
        """读取 config"""
        self.config = helpers.file2dict(self.config_path)
        self.global_config = helpers.file2dict(self.global_config_path)
        self.config.update(self.global_config)

    @property
    def balance(self):
        return self.get_balance()

    def get_balance(self):
        """获取账户资金状况"""
        return self.do(self.config['balance'])

    @property
    def position(self):
        return self.get_position()

    def get_position(self):
        """获取持仓"""
        return self.do(self.config['position'])

    @property
    def entrust(self):
        return self.get_entrust()

    def get_entrust(self):
        """获取当日委托列表"""
        return self.do(self.config['entrust'])

    @property
    def exchangebill(self):
        """
        默认提供最近30天的交割单, 通常只能返回查询日期内最新的 90 天数据。
        :return:
        """
        # TODO 目前仅在 华泰子类 中实现
        start_date, end_date = helpers.get_30_date()
        return self.get_exchangebill(start_date, end_date)

    def get_exchangebill(self, start_date, end_date):
        """
        查询指定日期内的交割单
        :param start_date: 20160211
        :param end_date: 20160211
        :return:
        """
        # TODO 目前仅在 华泰子类 中实现
        log.info('目前仅在 华泰子类 中实现, 其余券商需要补充')

    def do(self, params):
        """发起对 api 的请求并过滤返回结果
        :param params: 交易所需的动态参数"""
        request_params = self.create_basic_params()
        request_params.update(params)
        response_data = self.request(request_params)
        format_json_data = self.format_response_data(response_data)
        return_data = self.fix_error_data(format_json_data)
        try:
            self.check_login_status(return_data)
        except NotLoginError:
            self.autologin()
        return return_data

    def create_basic_params(self):
        """生成基本的参数"""
        pass

    def request(self, params):
        """请求并获取 JSON 数据
        :param params: Get 参数"""
        pass

    def format_response_data(self, data):
        """格式化返回的 json 数据
        :param data: 请求返回的数据 """
        pass

    def fix_error_data(self, data):
        """若是返回错误移除外层的列表
        :param data: 需要判断是否包含错误信息的数据"""
        pass

    def format_response_data_type(self, response_data):
        """格式化返回的值为正确的类型
        :param response_data: 返回的数据
        """
        if type(response_data) is not list:
            return response_data

        int_match_str = '|'.join(self.config['response_format']['int'])
        float_match_str = '|'.join(self.config['response_format']['float'])
        for item in response_data:
            for key in item:
                try:
                    if re.search(int_match_str, key) is not None:
                        item[key] = helpers.str2num(item[key], 'int')
                    elif re.search(float_match_str, key) is not None:
                        item[key] = helpers.str2num(item[key], 'float')
                except ValueError:
                    continue
        return response_data

    def check_login_status(self, return_data):
        pass
    card_id, text = reader.read_no_block()
    while not card_id:
        card_id, text = reader.read_no_block()
        time.sleep(0.1)
    return card_id


def rfid(q):
    reader = SimpleMFRC522()
    while True:
        card_id = rfid_s()
        #card_id = reader.read_id()
        q.put({'src': 'rfid', 'val': card_id})
        time.sleep(0.1)


if __name__ == "__main__":

    def sig_end(sig, frame):
        GPIO.cleanup()
        sys.exit(0)

    signal.signal(signal.SIGINT, sig_end)
    q = Queue()
    thr = Thread(target=rfid, args=(q, ))
    thr.setDaemon(True)
    thr.start()
    while True:
        print(q.get()['src'], q.get()['val'])
        q.task_done()
Example #58
0
class PCPBCCModule(PCPBCCBase):
    """ PCP BCC biotop module """
    def __init__(self, config, log, err):
        """ Constructor """
        PCPBCCBase.__init__(self, MODULE, config, log, err)

        self.ipv4_stats = {}
        self.ipv6_stats = {}

        self.lock = Lock()
        self.thread = Thread(name="bpfpoller", target=self.poller)
        self.thread.setDaemon(True)

        self.log("Initialized.")

    @staticmethod
    def pid_alive(pid):
        """ Test liveliness of PID """
        try:
            kill(int(pid), 0)
            return True
        except Exception:  # pylint: disable=broad-except
            return False

    def poller(self):
        """ BPF poller """
        try:
            while self.bpf:
                self.bpf.kprobe_poll()
        except Exception as error:  # pylint: disable=broad-except
            self.err(str(error))
            self.err("BPF kprobe poll failed!")
        self.log("Poller thread exiting.")

    def handle_ipv4_event(self, _cpu, data, _size):
        """ IPv4 event handler """
        event = ct.cast(data, ct.POINTER(Data_ipv4)).contents
        pid = str(event.pid).zfill(6)
        self.lock.acquire()
        if pid not in self.ipv4_stats:
            self.ipv4_stats[pid] = [int(event.tx_b), int(event.rx_b)]
        else:
            self.ipv4_stats[pid][0] += int(event.tx_b)
            self.ipv4_stats[pid][1] += int(event.rx_b)
        self.lock.release()

    def handle_ipv6_event(self, _cpu, data, _size):
        """ IPv6 event handler """
        event = ct.cast(data, ct.POINTER(Data_ipv6)).contents
        pid = str(event.pid).zfill(6)
        self.lock.acquire()
        if pid not in self.ipv6_stats:
            self.ipv6_stats[pid] = [int(event.tx_b), int(event.rx_b)]
        else:
            self.ipv6_stats[pid][0] += int(event.tx_b)
            self.ipv6_stats[pid][1] += int(event.rx_b)
        self.lock.release()

    def metrics(self):
        """ Get metric definitions """
        name = BASENS
        self.items = (
            # Name - reserved - type - semantics - units - help
            (name + 'tx', None, PM_TYPE_U64, PM_SEM_COUNTER, units_bytes,
             'tcp tx per pid'),
            (name + 'rx', None, PM_TYPE_U64, PM_SEM_COUNTER, units_bytes,
             'tcp rx per pid'),
        )
        return True, self.items

    def compile(self):
        """ Compile BPF """
        try:
            self.bpf = BPF(src_file=bpf_src)
            self.bpf["ipv4_events"].open_perf_buffer(self.handle_ipv4_event,
                                                     page_cnt=64)
            self.bpf["ipv6_events"].open_perf_buffer(self.handle_ipv6_event,
                                                     page_cnt=64)
            self.thread.start()
            self.log("Compiled.")
        except Exception as error:  # pylint: disable=broad-except
            self.err(str(error))
            self.err("Module NOT active!")
            self.bpf = None

    def refresh(self):
        """ Refresh BPF data """
        if self.bpf is None:
            return

        self.insts = {}

        self.lock.acquire()
        for pid in list(self.ipv4_stats):
            if not self.pid_alive(pid):
                del self.ipv4_stats[pid]
            else:
                self.insts[pid] = c_int(1)
        for pid in list(self.ipv6_stats):
            if not self.pid_alive(pid):
                del self.ipv6_stats[pid]
            else:
                self.insts[pid] = c_int(1)
        self.lock.release()

        return self.insts

    def bpfdata(self, item, inst):
        """ Return BPF data as PCP metric value """
        try:
            self.lock.acquire()
            key = self.pmdaIndom.inst_name_lookup(inst)
            value = 0
            if key in self.ipv4_stats:
                value += self.ipv4_stats[key][item]
            if key in self.ipv6_stats:
                value += self.ipv6_stats[key][item]
            self.lock.release()
            return [value, 1]
        except Exception:  # pylint: disable=broad-except
            self.lock.release()
            return [PM_ERR_AGAIN, 0]
Example #59
0
        session = DB_SESSION()
        if msg["type"] == "ScanRecord":
            scan = ScanIn(msg["payload"]["member_id"],
                          msg["payload"]["store_id"],
                          msg["payload"]["timestamp"])
            session.add(scan)
            logger.info("Adding ScanRecord")
            logger.debug(msg)
        else:
            bi = BodyInfo(msg["payload"]["member_id"],
                          msg["payload"]["store_id"],
                          msg["payload"]["timestamp"],
                          msg["payload"]["body_info"]["weight"],
                          msg["payload"]["body_info"]["body_fat"])
            session.add(bi)
            logger.info("Adding BodyInfoRecord")
            logger.debug(msg)
        session.commit()
        session.close()


app = connexion.FlaskApp(__name__, specification_dir='')
app.add_api("openapi.yaml")
CORS(app.app)
app.app.config['CORS_HEADERS'] = 'Content-Type'

if __name__ == "__main__":
    t1 = Thread(target=process_messages)
    t1.setDaemon(True)
    t1.start()
    app.run(port=8090)
Example #60
0
s.bind(ADDR)
s.listen(3)


# 客户端处理
def handle(c):
    print("connect from ", c.getpeername())
    while True:
        data = c.recv(1024)
        if not data:
            break
        print(data.decode())
        c.send(b'ok')
    c.close()


# 循环等待客户端连接
while True:
    try:
        c, addr = s.accept()
    except KeyboardInterrupt:
        sys.exit("退出服务器")
    except Exception as e:
        print(e)
        continue

    # 创建新的线程处理客户端
    t = Thread(target=handle, args=(c, ))
    t.start()
    t.setDaemon(True)  # 主线程退出,分支线程也退出