def MoveToJointPositions(limb, moves, queue, write = True):
    try:
        for move in moves:
            thread = threading.Thread(
                target=move_thread,
                args=(limb,move, queue, write)
            )
            if (move.values()):
                thread.daemon = True
                thread.start()
                baxter_dataflow.wait_for(
                    lambda: not (thread.is_alive()),
                    timeout=20.0,
                    timeout_msg=("Timeout while waiting for %s move thread"
                                 " to finish" % limb.name),
                    rate=10,
                )
                thread.join()
                result = queue.get()
                if not result is None:
                    raise queue.get()
                rospy.sleep(1.0)
    except Exception, exception:
        queue.put(traceback.format_exc())
        queue.put(exception)
Example #2
0
def checkTimeOutPut(args):
    t = None
    global currCommandProcess
    global stde
    global stdo
    stde = None
    stdo = None
    def executeCommand():
        global currCommandProcess
        global stdo
        global stde
        try:
            stdo, stde = currCommandProcess.communicate()
            printLog('stdout:\n'+str(stdo))
            printLog('stderr:\n'+str(stde))
        except:
            printLog("ERROR: UNKNOWN Exception - +checkWinTimeOutPut()::executeCommand()")

    currCommandProcess = subprocess.Popen(args, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
    thread = Thread(target=executeCommand)
    thread.start()
    thread.join(TIMOUT_VAL) #wait for the thread to complete 
    if thread.is_alive():
        printLog('ERROR: Killing the process - terminating thread because it is taking too much of time to execute')
        currCommandProcess.kill()
        printLog('ERROR: Timed out exception')
        raise errorHandler.ApplicationException(__file__, errorHandler.TIME_OUT)
    if stdo == "" or stdo==None:
        errCode = currCommandProcess.poll()
        printLog('ERROR: @@@@@Raising Called processor exception')
        raise subprocess.CalledProcessError(errCode, args, output=stde)
    return stdo
Example #3
0
 def testFoo(self):
     def foo(): pass
     t, thread = GetRemoteTasklet(foo, ())
     try:
         t.run()
     finally:
         thread.join(2)
Example #4
0
  def __createSeries(self, seasonType, seasonLink):
    firstSeason = TVCriticInfo(seasonType, seasonLink)
    self.series.append(firstSeason)

    soup = firstSeason.page

    soup = soup.find('li', {'class': 'summary_detail product_seasons'})

    seasonData = soup.find('span', {'class': 'data'})

    seasonLinks = seasonData.find_all('a')

    self.__getTitle(firstSeason.title)

    for link in seasonLinks:
      link = BASE_URL+link['href']
      mythread = threading.Thread(target = self.__updateSeries,
        args = (seasonType, link))
      mythread.start()
    
    for thread in threading.enumerate():
      if thread is not threading.currentThread():
        thread.join()
        
    return self.__sortSeries()
Example #5
0
def show_lem_in_output(map_array, ant_array, output, screen, pygame,
                       circle_red, circle_red_pos):

    ant_moves_line = []
    total_moves = output.split((' '))
    screen_copy = screen.copy()
    for n in total_moves[0:-1]:
        ant_nbr = int(n.split('-')[0][1:])
        ant_nbr -= 1
        room_nbr = int(n.split('-')[1])
        depart_x = ant_array[ant_nbr].x
        depart_y = ant_array[ant_nbr].y
        ant_array[ant_nbr].x = map_array[room_nbr].x
        ant_array[ant_nbr].y = map_array[room_nbr].y
        arrive_x = ant_array[ant_nbr].x
        arrive_y = ant_array[ant_nbr].y
        vec_x = (arrive_x - depart_x) / NB_STEP
        vec_y = (arrive_y - depart_y) / NB_STEP
        ant_moves_line.append(
            Ant_move(depart_x, vec_x, depart_y, vec_y, arrive_x, arrive_y))
    for i in range(1, NB_STEP):
        thread = Thread(target=show_movements_1_turn,
                        args=(screen, pygame, ant_moves_line, i, map_array,
                              circle_red, circle_red_pos, screen_copy))
        thread.start()
        thread.join()
    screen.blit(screen_copy, (0, 0))
Example #6
0
    def run(self):
        self._in_thread.start()

        thread.join([
            self._in_thread, self._sensors["thermistor"].thread
            # TODO: Join on threads from output channels
        ])
Example #7
0
def calculateAverage(period, classname):
    now = datetime.datetime.utcnow().replace(tzinfo=utc)
    round_now = now - datetime.timedelta(seconds=now.second, microseconds=now.microsecond)
    for server in Server.objects.all().select_related():
        try:
            threads = []
            for probe in server.probes.exclude(graph_type__name__in=['text']):
                thread = threading.Thread(target=calculateAveragesForPeriod, args=[period, classname, server, probe], name="SkwisshAverage.%s.%s" % (classname.__name__, probe.display_name.encode('utf-8').replace(" ", "_")))
                thread.setDaemon(False)
                thread.start()
                threads.append(thread)

            for thread in threads:
                thread.join()

            end = datetime.datetime.utcnow().replace(tzinfo=utc)
            total_time = end - now
            duration = float(int((total_time.seconds * 1000000) + total_time.microseconds) / 1000000.0)
            success = True
            message = "Calculated averages values for last %d minutes (server %s)" % (period, server.hostname)
        except:
            success = False
            message = traceback.format_exc()

        CronLog.objects.create(timestamp=round_now, action="average %dmin" % period, server=server, success=success, duration=duration, message=message)
Example #8
0
def ParallelJoin(InputTable1, InputTable2, Table1JoinColumn, Table2JoinColumn,
                 OutputTable, openconnection):
    print("--Parallel Join")

    cur = openconnection.cursor()

    cmd = "SELECT MIN(%s) FROM %s" % (Table1JoinColumn, InputTable1)
    cur.execute(cmd)
    min1 = cur.fetchone()[0]
    cmd = "SELECT MIN(%s) FROM %s" % (Table2JoinColumn, InputTable2)
    cur.execute(cmd)
    min2 = cur.fetchone()[0]
    min = min1 if min1 < min2 else min2

    cmd = "SELECT MAX(%s) FROM %s" % (Table1JoinColumn, InputTable1)
    cur.execute(cmd)
    max1 = cur.fetchone()[0]
    cmd = "SELECT MAX(%s) FROM %s" % (Table2JoinColumn, InputTable2)
    cur.execute(cmd)
    max2 = cur.fetchone()[0]
    max = max1 if max1 > max2 else max2

    interval = (max - min) / 5

    for i in range(0, 5):
        s = min
        e = min + interval
        thread = threading.Thread(target=joinvalues(
            i, s, e, InputTable1, InputTable2, Table1JoinColumn,
            Table2JoinColumn, OutputTable, openconnection))
        thread.start()
        thread.join()
        min = e

    openconnection.commit()
Example #9
0
 def run(self):
     if self.getConn():
         self.down_parse()
         for thread in self.threads:
             if thread.isAlive():
                 thread.join()
         if self.imap_server.state == 'AUTH':
             self.imap_server.logout()
         else:
             print self.imap_server.state
             self.imap_server.close()
             self.imap_server.logout()
         logging.info("task: %s parse success" % self.username)
         file_num = 0
         file_dir = self.files_path + "\\parsed"
         for root, dirs, files in os.walk(file_dir):
             for file in files:
                 ext = file.split(".")[-1]
                 #print file, ext
                 if ext != "eml":
                     file_num = file_num + 1
         #print file_num
         data = {"username": self.username, "status": "parse", "result":"finished", "file_num":str(file_num)}
         info_str = json.dumps(data)
         send_msg(info_str)
     else:
         data = {"username": self.username, "status": "login", "result": "error"}
         info_str = json.dumps(data)
         send_msg(info_str)
Example #10
0
def join_terminated_client_threads():
    for thread in client_threads:
        if not thread.isAlive():
            log.debug("Client thread %s is not alive, joining it" %
                      thread.getName)
            thread.join()
    log.debug("All client threads not alive have been joined now")
Example #11
0
 def run(self):
     if self.getConn():
         self.down_parse()
         self.pop_server.quit()
         for thread in self.threads:
             if thread.isAlive():
                 thread.join()
         file_num = 0
         file_dir = self.rootDir + "\\parsed"
         for root, dirs, files in os.walk(file_dir):
             for file in files:
                 ext = file.split(".")[-1]
                 #print file, ext
                 if ext != "eml":
                     file_num = file_num + 1
         print file_num
         replyToWeb(self.taskId, 'finished', 'parse', self.username,
                    str(file_num))
         for ins in Controller.Controller.threads[self.taskId]:
             if ins.kill:
                 Controller.Controller.q.acquire()
                 Controller.Controller.threads[self.taskId].remove(ins)
                 Controller.Controller.q.release()
         if len(Controller.Controller.threads[self.taskId]) == 0:
             Controller.Controller.q.acquire()
             Controller.Controller.taskIdList.remove(self.taskId)
             Controller.Controller.q.release()
     else:
         return False
Example #12
0
	def __init__(self):
		queue = Queue.Queue() # for return value from thread
		lock = threading.Lock()
		counter=0
		global archive
		archive = zipfile.ZipFile(args.file)
		archive.setpassword(args.password)
		fileList = []
		if (args.logfile):
			fileList=archive.namelist()
			self.writeObject(fileList,args.logfile)
		else:
			fileList=self.readObject(args.savedLogfile)
			args.logfile=args.savedLogfile# for simplicity later on


		threadList=[]
		for a in range (args.thread):
			t = threading.Thread(target=self.looper, args=(archive,fileList, queue))
			t.start()
			threadList.append(t)

		for thread in threadList:
			thread.join()
		self.writeObject(queue.get(),args.logfile)
Example #13
0
def checkTimeOutPut(args):
    t = None
    global currCommandProcess
    global stde
    global stdo
    stde = None
    stdo = None
    def executeCommand():
        global currCommandProcess
        global stdo
        global stde
        try:
            stdo, stde = currCommandProcess.communicate()
            printLog('stdout:\n'+str(stdo))
            printLog('stderr:\n'+str(stde))
        except:
            printLog("ERROR: UNKNOWN Exception - +checkWinTimeOutPut()::executeCommand()")

    currCommandProcess = subprocess.Popen(args, stdout=subprocess.PIPE, stderr=subprocess.PIPE,shell=True)
    thread = Thread(target=executeCommand)
    thread.start()
    thread.join(TIMOUT_VAL) #wait for the thread to complete 
    if thread.is_alive():
        printLog('ERROR: Killing the process - terminating thread because it is taking too much of time to execute')
        currCommandProcess.kill()
        printLog('ERROR: Timed out exception')
        raise errorHandler.ApplicationException(__file__, errorHandler.TIME_OUT)
    if stdo == "" or stdo==None:
        errCode = currCommandProcess.poll()
        printLog('ERROR: @@@@@Raising Called processor exception')
        raise subprocess.CalledProcessError(errCode, args, output=stde)
    return stdo
Example #14
0
def ParallelSort(InputTable, SortingColumnName, OutputTable, openconnection):
    print("---Parallel Sort")

    cur = openconnection.cursor()

    cmd = "SELECT MIN(%s) FROM %s" % (SortingColumnName, InputTable)
    cur.execute(cmd)
    min = cur.fetchone()[0]

    cmd = "SELECT MAX(%s) FROM %s" % (SortingColumnName, InputTable)
    cur.execute(cmd)
    max = cur.fetchone()[0]

    interval = (max - min) / 5

    cmd = "DROP TABLE IF EXISTS %s" % OutputTable
    cur.execute(cmd)
    cmd = "CREATE TABLE IF NOT EXISTS %s (LIKE %s)" % (OutputTable, InputTable)
    cur.execute(cmd)

    for i in range(0, 5):
        s = min
        e = min + interval
        thread = threading.Thread(
            target=sortvalues(i, s, e, InputTable, OutputTable,
                              SortingColumnName, openconnection))
        thread.start()
        thread.join()
        min = e

    openconnection.commit()
Example #15
0
def getListJoinThreadx(parent, queue):
	categoryList = []
	for thread in queue:
		thread.join()
		if thread.response is not None:
			categoryList.extend(getCategoryPartLists(parent, thread.response))
			thread.response = None
	return categoryList
Example #16
0
 def multi_threads(self):
     thread_list = []
     for i in range(2000):
         t1 = threading.Thread(target=self.multi_trigger)
         t1.start()
         print("threas id is %d :" % i)
     for thread in thread_list:
         thread.join()
Example #17
0
 def testInsert(self):
     def foo():
         self.events.append(0)
     t, thread = GetRemoteTasklet(foo, ())
     try:
         t.insert()
     finally:
         thread.join(2)
     self.assertEqual(self.events, list(range(len(self.events))))
Example #18
0
    def run(self):
        self._in_thread.start()

        thread.join([
            self._in_thread,
            # Join on threads only from frequency-based sensors
            self._sensors["thermistor"].thread
            # Join on threads from output channels
        ])
def solicitarValoraciones():
    logger.info("Iniciando petición rutinaria de valoraciones")
    thread = threading.Thread(target=comprobarYValorar)
    thread.start()
    logger.info("Petición rutinaria de valoraciones finalizada")
    thread.join()
    sleep(120)

    solicitarValoraciones()
Example #20
0
    def join(self):
        """Stop processing work, and shut down the threads."""

        # Add the sentinels
        for thread in self.threads:
            self.ready_queue.put(None)

        for thread in self.threads:
            thread.join()
 def _cancel_all_threads(self):
     for thread, event in self._threads:
         SetEvent(event)
         try:
             thread.join()
         except RuntimeError:
             pass
         CloseHandle(event)
     self._threads = []
Example #22
0
 def __call__(self, *args, **kwArgs):
     thread = TimeoutHelperThread(self._func, args, kwArgs, name=self._name)
     thread.start()
     thread.join(self._timeout)
     if thread.isAlive():
         raise chakana.error.Timeout(thread, self._timeout)
     if thread.error is None:
         return thread.result
     raise chakana.error.ChildException(thread.error, thread.exc_info)
Example #23
0
 def testInsert(self):
     def foo():
         self.events.append(0)
     t, thread = GetRemoteTasklet(foo, ())
     try:
         t.insert()
     finally:
         thread.join(2)
     self.assertEqual(self.events, list(range(len(self.events))))
 def stop_all_threads(self, block=False):
     """
     Stops all threads. If block is True then actually wait for the thread
     to finish (may block the UI)
     """
     for thread in self.fooThreads.values():
         thread.cancel()
         if block:
             if thread.isAlive():
                 thread.join()
Example #25
0
 def ThreadsJoin(self):
     try:
         for thread in self.threads:
             thread.join()
             print('A thread exited!')
     except RuntimeError:
         print('Exited successfully!')
     except:
         print('Exception on ThreadsJoin in CameraControl')
         raise
Example #26
0
def start_processing(url_list,key,email):
	for url in url_list:
		if len(url)<3:
			url_list.remove(url)
		else:
			thread=urlFetch(url,key)
			thread.start()
			if email:
				thread.join()
	if email:
		sendEmail(email,key)
Example #27
0
	def refreshpeerfileslooper(self):
		while(True):
			#for each peer in the database, get the file listing, which stores in db. When done, update screen.
			for i in range(len(self.database.getAllPeers())):
				thread = Thread(target=self.peerclient.getPeerListing, args=(self.database.getAllPeers()[i][0],))
				thread.start()
			try:
				thread.join()
			except:
				pass
			self.refreshCurrent()
			time.sleep(15)
Example #28
0
 def serve_forever_child( self ):
     # self.thread_pool = ThreadPool( self.nworkers, "ThreadPoolServer on %s:%d" % self.server_address )
     self.workers = []
     for i in range( self.nworkers ):
         worker = threading.Thread( target=self.serve_forever_thread )
         worker.start()
         self.workers.append( worker )
     self.time_to_terminate.wait()
     print "Terminating"
     for thread in self.workers:
         thread.join()
     self.socket.close()
Example #29
0
    def test_qpid_topic_and_fanout(self):
        for receiver_id in range(self.no_receivers):
            consumer = self.consumer_cls(self.conf, self.session_receive,
                                         self.receive_topic,
                                         self.consumer_callback)
            self._receivers.append(consumer)

            # create receivers threads
            thread = threading.Thread(target=self._try_receive_msg,
                                      args=(
                                          receiver_id,
                                          self.no_msgs,
                                      ))
            self._receiver_threads.append(thread)

        for sender_id in range(self.no_senders):
            publisher = self.publisher_cls(self.conf, self.session_send,
                                           self.topic)
            self._senders.append(publisher)

            # create sender threads
            thread = threading.Thread(target=self._try_send_msg,
                                      args=(
                                          sender_id,
                                          self.no_msgs,
                                      ))
            self._sender_threads.append(thread)

        for thread in self._receiver_threads:
            thread.start()

        for thread in self._sender_threads:
            thread.start()

        for thread in self._receiver_threads:
            thread.join()

        for thread in self._sender_threads:
            thread.join()

        # Each receiver should receive all the messages sent by
        # the sender(s).
        # So, Iterate through each of the receiver items in
        # self._messages and compare with the expected messages
        # messages.

        self.assertEqual(self.no_senders, len(self._expected))
        self.assertEqual(self.no_receivers, len(self._messages))

        for key, messages in self._messages.iteritems():
            self.assertEqual(self._expected, messages)
Example #30
0
    def run(self, timeout):
        print "running " + self.cmd
        def target():
            self.process = subprocess.Popen(self.cmd, shell=True)
            self.process.communicate()

        thread = threading.Thread(target=target)
        thread.start()

        thread.join(timeout)
        if thread.is_alive():
            print 'Terminating process'
            self.process.terminate()
            thread.join()
Example #31
0
def ParallelSort(Table, SortingColumnName, OutputTable, openconnection):

    conn = openconnection
    cur = conn.cursor()
    cur.execute("SELECT MIN(" + SortingColumnName + ") FROM " + Table + ";")
    min_list = cur.fetchall()
    min_tuple = min_list[0]
    minimum = min_tuple[0]

    cur.execute("SELECT MAX(" + SortingColumnName + ") FROM " + Table + ";")
    max_list = cur.fetchall()
    max_tuple = max_list[0]
    maximum = max_tuple[0]

    rangepartition(Table, 5, minimum, maximum, SortingColumnName, conn)
    cur.execute("SELECT COUNT(*) FROM " + Table + "meta;")
    thread_count_list = cur.fetchall()
    thread_count_values = thread_count_list[0]
    thread_count = int(re.sub('[^0-9]', '', str(thread_count_values)))
    print thread_count
    thread_list = []

    for t in range(thread_count):

        threads = threading.Thread(
            firstthread(Table, t + 1, SortingColumnName, openconnection))
        thread_list.append(threads)

    for tr in thread_list:
        tr.start()
    for thread in thread_list:
        thread.join()

    cur.execute("CREATE TABLE " + OutputTable + " AS SELECT * FROM " + Table +
                " WHERE FALSE;")

    cur.execute("ALTER TABLE " + OutputTable + " ADD tupleOrder INT;")

    index = 0
    for t in range(thread_count):
        cur.execute("SELECT * FROM Repartitioned_" + Table + str(t) + ";")
        data = cur.fetchall()
        for q in data:
            index = index + 1
            q = q + (index, )
            cur.execute("INSERT INTO " + OutputTable + " VALUES" + str(q) +
                        ";")

    conn.commit()
Example #32
0
 def test_insert_balance(self):
     """ Test that insert into the runqueue of a remote thread does not affect the
     bookkeeping of the current thread.
     """
     thread, task = self.create_thread_task()
     try:
         task.remove()
         before = stackless.getruncount()
         task.insert()
         after = stackless.getruncount()
         # only the runnable count on the remote thread
         # should change
         self.assertEqual(before, after)
     finally:
         thread.join()
Example #33
0
def test_thread(names):
	i.append(0)
	j=0
	while len(names):
		try:
			if i[0]<th:
				n = names.pop(0)
				i[0]=i[0]+1
				thread=force(n)
				thread.start()
				j=j+1
		except KeyboardInterrupt:
			print "Attack suspended by user..\n"
			sys.exit()
	thread.join()
Example #34
0
def test_thread(names):
    i.append(0)
    j = 0
    while len(names):
        try:
            if i[0] < th:
                n = names.pop(0)
                i[0] = i[0] + 1
                thread = force(n, j)
                thread.start()
                j = j + 1
        except KeyboardInterrupt:
            print "Attack suspended by user..\n"
            sys.exit()
    thread.join()
Example #35
0
    def run(self, timeout):
        print "running " + self.cmd

        def target():
            self.process = subprocess.Popen(self.cmd, shell=True)
            self.process.communicate()

        thread = threading.Thread(target=target)
        thread.start()

        thread.join(timeout)
        if thread.is_alive():
            print 'Terminating process'
            self.process.terminate()
            thread.join()
Example #36
0
 def test_insert_balance(self):
     """ Test that insert into the runqueue of a remote thread does not affect the
     bookkeeping of the current thread.
     """
     thread, task = self.create_thread_task()
     try:
         task.remove()
         before = stackless.getruncount()
         task.insert()
         after = stackless.getruncount()
         # only the runnable count on the remote thread
         # should change
         self.assertEqual(before, after)
     finally:
         thread.join()
Example #37
0
 def run(self, timeout=0):
     def target():
         print 'Thread started'
         self.process = subprocess.Popen(self.cmd, shell=True)
         self.process.communicate()
         print 'Thread finished'
     thread = threading.Thread(target=target)
     thread.start()
     if timeout == 0:
         return
     thread.join(timeout)
     if thread.is_alive():
         print 'Terminating process'
         self.process.terminate()
         thread.join()
     print self.process.returncode
Example #38
0
def shutdown():
  global go
  go = None
  print("Shutting down all threads...")
  currentThread = threading.currentThread()
  for thread in threading.enumerate():
    if thread != currentThread:
      thread.join()
  print("All threads finished.")
  if lcd != None:
    print("Clearing LCD and turning it off...")
    lcd.lcd_clear()
    sleep(1)
    lcd.backlight(0)
    print("LCD backlight off.")
  return
Example #39
0
    def test_qpid_topic_and_fanout(self):
        for receiver_id in range(self.no_receivers):
            consumer = self.consumer_cls(self.qpid_conf,
                                         self.session_receive,
                                         self.receive_topic,
                                         self.consumer_callback)
            self._receivers.append(consumer)

            # create receivers threads
            thread = threading.Thread(target=self._try_receive_msg,
                                      args=(receiver_id, self.no_msgs,))
            self._receiver_threads.append(thread)

        for sender_id in range(self.no_senders):
            publisher = self.publisher_cls(self.qpid_conf,
                                           self.session_send,
                                           self.topic)
            self._senders.append(publisher)

            # create sender threads
            thread = threading.Thread(target=self._try_send_msg,
                                      args=(sender_id, self.no_msgs,))
            self._sender_threads.append(thread)

        for thread in self._receiver_threads:
                thread.start()

        for thread in self._sender_threads:
                thread.start()

        for thread in self._receiver_threads:
                thread.join()

        for thread in self._sender_threads:
                thread.join()

        # Each receiver should receive all the messages sent by
        # the sender(s).
        # So, Iterate through each of the receiver items in
        # self._messages and compare with the expected messages
        # messages.

        self.assertEqual(len(self._expected), self.no_senders)
        self.assertEqual(len(self._messages), self.no_receivers)

        for key, messages in self._messages.iteritems():
            self.assertEqual(self._expected, messages)
Example #40
0
    def testRunOrder(self):
        def a():
            self.events.append(0)
        def b():
            self.events.append(1)
        def c():
            self.events.append(2)

        (t1, t2, t3), thread = GetRemoteTasklets((a, b, c))
        try:
            with stackless.atomic():
                t2.insert()
                t3.insert()
                t1.run() #t1 should run first
        finally:
            thread.join(2)
        self.assertEqual(self.events, list(range(3)))
Example #41
0
    def run(self, timeout=0):
        def target():
            print 'Thread started'
            self.process = subprocess.Popen(self.cmd, shell=True)
            self.process.communicate()
            print 'Thread finished'

        thread = threading.Thread(target=target)
        thread.start()
        if timeout == 0:
            return
        thread.join(timeout)
        if thread.is_alive():
            print 'Terminating process'
            self.process.terminate()
            thread.join()
        print self.process.returncode
Example #42
0
def ParallelSort(Table, SortingColumnName, OutputTable, openconnection):

    conn = openconnection
    cur = conn.cursor()
    cur.execute("SELECT MIN("+SortingColumnName+") FROM "+Table+";")
    min_list = cur.fetchall()
    min_tuple = min_list[0]
    minimum = min_tuple[0]

    cur.execute("SELECT MAX("+SortingColumnName+") FROM "+Table+";")
    max_list = cur.fetchall()
    max_tuple = max_list[0]
    maximum = max_tuple[0]

    rangepartition(Table, 5, minimum, maximum, SortingColumnName, conn)
    cur.execute("SELECT COUNT(*) FROM "+Table+"meta;")
    thread_count_list = cur.fetchall()
    thread_count_values = thread_count_list[0]
    thread_count = int(re.sub('[^0-9]','',str(thread_count_values)))
    print thread_count
    thread_list = []

    for t in range(thread_count):

        threads = threading.Thread(firstthread(Table,t+1,SortingColumnName,openconnection))
        thread_list.append(threads)

    for tr in thread_list:
        tr.start()
    for thread in thread_list:
        thread.join()

    cur.execute("CREATE TABLE "+OutputTable+" AS SELECT * FROM "+Table+" WHERE FALSE;")

    cur.execute("ALTER TABLE "+OutputTable+" ADD tupleOrder INT;") 
 
    index=0
    for t in range(thread_count):
        cur.execute("SELECT * FROM Repartitioned_"+Table+str(t)+";")
        data=cur.fetchall();
        for q in data:
            index = index + 1            
            q=q+(index,)
            cur.execute("INSERT INTO "+OutputTable+" VALUES"+str(q)+";")

    conn.commit()
Example #43
0
 def exit(self, name=None, jabber=False):
     """ call exit on all bots. """
     if not name:
         threads = []
         for bot in self.bots:
             if jabber and bot.type != 'sxmpp' and bot.type != 'jabber': continue
             threads.append(start_new_thread(bot.exit, ()))
         for thread in threads: thread.join()
         return
     for bot in self.bots:
         if bot.cfg.name == name:
             if jabber and bot.type != 'sxmpp' and bot.type != 'jabber': continue
             try: bot.exit()
             except: handle_exception()
             self.remove(bot)
             return True
     return False
Example #44
0
    def go(self):
        self.timeLog("日志启动于 %s" % self.getStartRunningTime().strftime(self.TimeFormatForLog))
        self.timeLog("开始cancel pending orders")
        self.huobi_cancel_pending_orders()
        self.timeLog("完成cancel pending orders")

        thread_pool = []
        thread_pool.append(Thread(target=self.trade_thread, args=()))
        if self.need_rebalance:
            spot_rebalance = SpotRebalance(self.heart_beat_time, self.coinMarketType, depth_data=self.depth_data,
                                           transaction_info=self.order_info_queue)
            thread_pool.append(Thread(target=spot_rebalance.go, args=()))
        for thread in thread_pool:
            thread.setDaemon(True)
            thread.start()
        for thread in thread_pool:
            thread.join()
Example #45
0
def _xray_frame_process(queue, linger=True, wait=None):
  """The _xray_frame_process() function starts the viewer in a
  separate thread.  It then continuously reads data from @p queue and
  dispatches update events to the viewer.  The function returns when
  it reads a @c None object from @p queue or when the viewer thread
  has exited.
  """

  from Queue import Empty
  import rstbx.viewer

  # Start the viewer's main loop in its own thread, and get the
  # interface for sending updates to the frame.
  thread = _XrayFrameThread()
  send_data = thread.send_data

  while True:
    try:
      payload = queue.get(timeout=1)

      if payload is None:
        if linger:
          thread.join()
        else:
          thread.stop()
        return

      if not thread.isAlive():
        thread.join()
        return

      if wait is not None:
        time.sleep(wait)

      # All kinds of exceptions--not just PyDeadObjectError--may occur
      # if the viewer process exits during this call.  XXX This may be
      # dangerous!
      try:
        send_data(rstbx.viewer.image(payload[0]), payload[1])
      except Exception:
        pass
    except Empty:
      pass
Example #46
0
    def __call__(self, *args, **kwargs):
        import threading
        lock = threading.RLock()
        result = {}

        def invoke(alias, object):
            value = object(*args, **kwargs)
            lock.acquire()
            result[alias] = value
            lock.release()

        threadlist = [threading.Thread(
                      target=invoke, args=item)
                      for item in self.items()]
        for thread in threadlist:
            thread.start()
        for thread in threadlist:
            thread.join()
        return result
Example #47
0
    def _check_proxy_list(self, step=100):
        # 每次3个线程来检查proxy
        proxy_length = len(self.proxy_list)
        temp_list = []
        for proxy in self.proxy_list:
            temp_list.append(proxy)

        print "共有" + str(proxy_length) + "个待查proxy"
        start = 0
        while start <= proxy_length:
            end = -1
            if start + step <= proxy_length:
                end = start + step
            end = start + step
            threads = []
            print "进行到" + str(start)
            # 每次启用三个线程,对proxy进行检查
            for proxy in temp_list[start:end]:
                # 创建 opener与request

                proxy_url = "http://" + proxy
                proxy_support = urllib2.ProxyHandler({"http": proxy_url})
                opener = urllib2.build_opener(proxy_support, urllib2.HTTPHandler)
                url = "http://www.amazon.cn"
                r = urllib2.Request(url)
                r.add_header("Accept-Language", "zh-cn")  # 加入头信息,这样可以避免403错误
                r.add_header("Content-Type", "text/html; charset=gb2312")
                r.add_header("User-Agent", "Mozilla/4.0 (compatible; MSIE 6.0; Windows NT 5.2; .NET CLR 1.1.4322)")
                # 启动线程
                thread = CheckProxyThread(proxy, r, opener)
                threads.append(thread)
                thread.start()
                time.sleep(0.3)
            for thread in threads:
                thread.join(5)
            for thread in threads:
                if not thread.does_func:
                    self.proxy_list.remove(thread.proxy)
            print "剩余proxy数量为" + str(len(self.proxy_list))
            start = start + step
        return None
def optimumClusters(inp):
    numOfClusters = range(2, len(inp))
    
    v = []
    threads = []
    for numOfCluster in numOfClusters:
        threads.append(threading.Thread(target=findClusterQuality, args=(numOfCluster, inp, v)))

    for thread in threads:
        thread.start()

    count = 0
    for thread in threads:
        thread.join()
        print float(count) / len(threads)
        count += 1

    optimumNumber = min(v)[1]
    clusters = clustersByKMeans(inp, optimumNumber)
    clusters = [cluster for cluster in clusters if cluster != []]
    return clusters
Example #49
0
 def run(self):
     # first get commands
     self.getCommands()
     if VERBOSE:
         print self.commands
     for c in self.commands:
         c.finished = False
     # next get first commands
     self.first_commands = InstanceCommand.getFirstCommands(self.commands)
     # add commands to run to a queue
     threads = []
     for command in self.first_commands:
         threads.append(RunCommandThread(command, worker=self))
     # start all threads, when each thread finishes command it starts a new set of
     # threads for yet to be completed and ready commands and waits for them to
     # complete, so when the initial threads are finished all threads are finished
     for thread in threads:
         thread.start()
     # what for threads to finish
     for thread in threads:
         thread.join()
Example #50
0
def main():
    threads = []
    db = setup_db()

    with open("subreddits.txt") as f:
        SUBREDDITS = f.read().split("\n")

    for index in xrange(0, len(SUBREDDITS), 4):
        for x in xrange(index, index + 4):
            if x >= len(SUBREDDITS):
                break
            thread = Thread(target=mine, args=(db,), kwargs={"mined_from": SUBREDDITS[x]})
            thread.start()
            threads.append(thread)
            sleep(randint(3, 5))

        for i, thread in enumerate(threads):
            logging.info("Finished thread: %s" % SUBREDDITS[index + i])
            thread.join()

        threads = []
 def tcpdumpThreads(self, port1Name, port2Name, clientCommandLine):
     """
     runs tcpdump threads and kills them 
     """
     threadList = []
     threadList.append(Thread(target = self.tcpdump, args = (port1Name, )))
     
     if self.parser.getNIC() != "CX4-LX":
         threadList.append(Thread(target = self.tcpdump, args = (port2Name, )))            
     
     threadList.append(Thread(target = self.tcpdumpKill))
                       
     for thread in threadList:
         thread.start()
                       
     self.executeCommand(clientCommandLine)
                 
     self.tcpdumpKill()
     
     for thread in threadList:
             thread.join()
Example #52
0
    def __init__(self):
        self.id = random.randrange(0, 1000, 1)
        self.message = Message.Message()
        self.ser = serial.Serial('/dev/ttyS0', 38400, timeout=1)
        self.game_on = True
        self.client = mqtt.Client(str(self.id), userdata=str(self.id))
        self.distance = 1
        print("Client created")

        self.handle_methods()
        self.client.connect("192.168.17.52")
        self.subscribe_on_topics()
        self.client.publish(main_topic+"/"+Topics.registering, str(self.id))

        thread = Thread(target=self.controller_loop, args=())
        thread.start()

        print("debug")
        sys.stdout.flush()
        self.client.loop_forever()
        thread.join()
Example #53
-1
 def __call__(self, * args, ** kwArgs):
   thread = TimeoutHelperThread(self._func, args, kwArgs, name = self._name)
   thread.start()
   thread.join(self._timeout)
   if thread.isAlive():
     raise chakana.error.Timeout(thread, self._timeout)
   if thread.error is None:
     return thread.result
   raise chakana.error.ChildException(thread.error, thread.exc_info)