Example #1
0
    def add_playlist(self, playlist_id):
        def update_track_uri():
            while True:
                try:
                    current_track = q.get(block=False)
                    current_track.url = self.session.get_media_url(current_track.id)
                    q.task_done()
                except Empty:
                    # nothing to do
                    break
            pass

        playlist_guid = UUID(int=int(playlist_id))
        tracks = self.session.get_playlist_tracks(playlist_guid)
        q = Queue()
        for track in tracks:
            self.tracklist.add_track(track)
            q.put(track)

        for i in range(0, self.max_concurrent_url_resolvers):
            t = Thread(target=update_track_uri)
            t.start()

        q.join()
        return {"total_tracks": len(self.tracklist)}
Example #2
0
def start_testing(unique_info, test_continues):
    """Multithreaded testing of files fetched from database.
    Reports first buttons found. Composes list of unique files with
    specified extentions."""
    worker_threads = Queue(TESTING_THREADS_ALLOWED)
    print("starting ", TESTING_THREADS_ALLOWED, " threads\n")
    test_thread_id = 0
    all_files_tested = False
    while time.time() - db.time_of_update[0] < DB_WATCHDOG_TIME \
           and (not all_files_tested) :
        """ Spawn threads to fetch, test files and update database until
        all files uploaded to DB and tested or no changes happened to DB
        for DB_WATCHDOG_TIME seconds."""
        print(time.time() - db.time_of_update[0])
        if worker_threads.qsize() < TESTING_THREADS_ALLOWED:
            worker_threads.put(test_thread_id)
            worker = threading.Thread(target=tester.tester, \
                                      args=(worker_threads, \
                                      conn_data, \
                                      unique_info,
                                      EXTENTION_TO_FIND,
                                      ALLOWED_APP_RUNTIME
                                      ))
            worker.setDaemon(True)
            worker.start()
            test_thread_id += 1
            time.sleep(0.01)
            if test_continues.qsize() < 2: #  tree composed and uploaded
                all_files_tested = db.check_test_completion(conn_data,
                                                        EXTENTION_TO_FIND)
    print ("Testing thread waiting for all worker-threads to complete\n")
    worker_threads.join()
    print ("Testing Thread Checked all unique ",EXTENTION_TO_FIND, " files\n")
    test_continues.get()
    test_continues.task_done()
Example #3
0
class PreviewDispatcherThread(QThread):
    """
    Thread used to dispatch the element to each preview worker thread.

    :param queue: The main queue containing the elements to process.
    :param mo_signal: The signal to pass to the MO preview worker, updates the MO preview.
    :param nmm_signal: The signal to pass to the NMM preview worker, updates the NMM preview.
    :param code_signal: The signal to pass to the code preview worker, updates the code preview.
    """
    def __init__(self, queue, code_signal, **kwargs):
        super().__init__()
        self.queue = queue
        self.gui_queue = Queue()
        self.code_queue = Queue()

        self.code_thread = PreviewCodeWorker(self.code_queue, code_signal)
        self.code_thread.start()
        self.gui_thread = PreviewGuiWorker(self.gui_queue, **kwargs)
        self.gui_thread.start()

    def run(self):
        while True:
            # wait for next element
            element = self.queue.get()

            if element is not None:
                element.write_attribs()
                element.load_metadata()
                element.sort()

            # dispatch to every queue
            self.gui_queue.put(element)
            self.code_queue.put(element)
Example #4
0
def main():
    """ Function makes whole job.
    """
    queue = Queue()
    pages = (URL + str(i + 1) for i in range(44))

    t0 = time()

    with Pool(10) as p:
        for links in p.imap_unordered(get_links, pages):
            for link in links:
                queue.put(link)

    t1 = time()

    with Pool(20) as p:
        for entry in p.imap_unordered(get_entry, drain(queue)):
            queue.put(entry)

    t2 = time()

    print()
    print(t1 - t0)
    print('entries:', queue.qsize())
    print(t2 - t1)

    with open('data.json', 'w') as f:
        json.dump(queue2list(queue), f)
Example #5
0
class Actor:
    def __init__(self):
        self._mailbox = Queue()

    def send(self, msg):
        self._mailbox.put(msg)

    def recv(self):
        msg = self._mailbox.get()
        if msg is ActorExit:
            raise ActorExit()
        return msg

    def start(self):
        self._terminated = Event()
        t = Thread(target=self._bootstrap)
        t.daemon = True
        t.start()

    def _bootstrap(self):
        try:
            self.run()
        except ActorExit:
            pass
        finally:
            self._terminated.set()

    def join(self):
        self._terminated.wait()

    def run(self):
        while True:
            msg = self.recv()
Example #6
0
class Uploader(threading.Thread):

    def __init__(self, backend):
        super().__init__()
        self._backend = backend
        self._upload_queue = Queue()
        self._keep_going = True
        self._retry = None

    def queue_image(self, path):
        """Queue an image for upload."""
        self._upload_queue.put(path)

    def run(self):
        while self._keep_going:
            if self._retry is None:
                path = self._upload_queue.get()
            else:
                path = self._retry
            if path is None:
                break
            with open(path, "rb") as img:
                success = self._backend.upload_image(img.read())
            if not success:
                # try again
                self._retry = path
            else:
                self._retry = None

    def stop(self):
        """Terminate the thread."""
        self._keep_going = False
        self._upload_queue.put(None)
Example #7
0
def astar(maze, start, end):
    cost = lambda distance, current: distance + ((end[0] - current[0]) ** 2 \
                                     + (end[1] - current[1]) ** 2) ** 0.5

    visited = set()
    will_visit = set()

    queue = Queue()
    queue.put((start, list()))
    will_visit.add(start)

    while not queue.empty():
        current, path = queue.get()
        path.append(current)
        visited.add(current)

        if current == end:
            return path, len(path), len(visited)

        options = moves(maze, current)
        options.sort(key=partial(cost, len(path)))
        for potential in options:
            if potential in will_visit:
                continue

            will_visit.add(potential)
            queue.put((potential, path[:]))
class ScraperThread(QThread):

    result_signal = pyqtSignal(dict)

    def __init__(self, parent=None):
        super(ScraperThread, self).__init__(parent)
        self._queue = Queue()
        self._stop = False

    def run(self):
        self._stop = False
        while not self._queue.empty() and not self._stop:
            processed_url = self._queue.get()
            result = requests.get(processed_url)
            self.result_signal.emit({'headers': result.headers})

    def clear_queue(self):
        self._queue = Queue()

    @property
    def queue(self):
        return self._queue

    @queue.setter
    def queue(self, urls):
        for url in urls:
            self._queue.put(url.strip())

    @property
    def stop(self):
        return self._stop

    @stop.setter
    def stop(self, stop):
        self._stop = stop
Example #9
0
  def crawl(self, urls, follow_links=False):
    links, seen = set(), set()
    queue = Queue()
    converged = threading.Event()

    def execute():
      while not converged.is_set():
        try:
          url = queue.get(timeout=0.1)
        except Empty:
          continue
        if url not in seen:
          seen.add(url)
          hrefs, rel_hrefs = self.execute(url)
          links.update(hrefs)
          if follow_links:
            for href in rel_hrefs:
              if href not in seen:
                queue.put(href)
        queue.task_done()

    for url in urls:
      queue.put(url)
    for _ in range(self._threads):
      worker = threading.Thread(target=execute)
      worker.daemon = True
      worker.start()
    queue.join()
    converged.set()
    return links
def main():
    # These three parameters are user defined
    client_id = "*****"
    username = "******"
    password = "******"

    tokenDict = get_tokenDict(client_id, username, password)

    ts = time()
    download_dir = setup_download_dir()
    links = [l for l in get_links(client_id, tokenDict) if l.endswith(".jpg")]
    # Create a queue to communicate with the worker threads
    queue = Queue()
    # Create worker threads

    for x in range(int(argv[1])):
        worker = DownloadWorker(queue)
        # Setting daemon to True will let the main thread exit even though the workers are blocking
        worker.daemon = True
        worker.start()
    # Put the tasks into the queue as a tuple
    for link in links:
        logger.info("Queueing {}".format(link))
        queue.put((download_dir, link))
    # Causes the main thread to wait for the queue to finish processing all the tasks
    queue.join()
    print("Took {}".format(time() - ts))
Example #11
0
class BlockingInProcessChannel(InProcessChannel):

    def __init__(self, *args, **kwds):
        # type: (object, object) -> object
        super(BlockingInProcessChannel, self).__init__(*args, **kwds)
        self._in_queue = Queue()

    def call_handlers(self, msg):
        self._in_queue.put(msg)

    def get_msg(self, block=True, timeout=None):
        """ Gets a message if there is one that is ready. """
        if timeout is None:
            # Queue.get(timeout=None) has stupid uninteruptible
            # behavior, so wait for a week instead
            timeout = 604800
        return self._in_queue.get(block, timeout)

    def get_msgs(self):
        """ Get all messages that are currently ready. """
        msgs = []
        while True:
            try:
                msgs.append(self.get_msg(block=False))
            except Empty:
                break
        return msgs

    def msg_ready(self):
        """ Is there a message that has been received? """
        return not self._in_queue.empty()
Example #12
0
    def count_links(self):
        """Initialises Url ,Counter threads and jsonify_word_count"""
        queue_url = Queue()
        queue_url_data = Queue()
        queue_word_count = Queue()
        links = self.get_links(self.feed_url)

        for i in range(self.thread_count):
            t = UrlDownloadThread(queue_url, queue_url_data)
            t.setDaemon(True)
            t.start()

        for i in range(self.number_of_links):
            queue_url.put(links[i])

        for i in range(self.thread_count):
            wct = WordCountThread(
                queue_url_data, queue_word_count, self.stop_words)
            wct.setDaemon(True)
            wct.start()
        queue_url.join()
        queue_url_data.join()
        queue_word_count.join()
        for _ in range(self.number_of_links):
            link, word_count = queue_word_count.get()
            self.word_count[link] = word_count
Example #13
0
class Metric(object):
    """
    This class stores generic time-series data in a queue.
    Values are stored as (timestamp, value) tuples
    """

    def __init__(self):
        self.metric = Queue()

    def push(self, value, timestamp=None):
        if timestamp is None:
            timestamp = int(time.time())
        elif not isinstance(timestamp, int):
            raise ValueError(
                "Timestamp should be an integer, but it is '%s'" %
                type(timestamp))
        self.metric.put((timestamp, value))

    def next(self):
        try:
            return self.metric.get_nowait()
        except Empty:
            raise StopIteration

    def get(self):
        # TODO: decide what we should return here
        return None

    def __iter__(self):
        return self
Example #14
0
def spin_server(queue: Queue, transport: AbstractTransport):
    while True:
        '''
        do stuff
        '''
        b64image = transport.handle_client()
        queue.put(b64image, False)
Example #15
0
def main():
    ts = time.time()
    # create a queue to communicate with the worker threads
    queue=Queue()
    # Create 2 wroker threads
    for x in range(6):
        worker = doExpbatWorker(queue)
        # setting daemon to True will let then main thread exit even though the workers are blocking
        worker.demon = True
        worker.start()
    #for i in range(9):
    #    queue.put(('~/'+str(i)+'.bat','dfdf'))
    
    jb = []
    batpath='g:/migration/exp_script/'
    csvpath='g:/migration/mig_xw/'
    jb.append((batpath+'cps_xw_studentcourse.bat', batpath+'cps_xw_studentcourse.bat '+' AcademicAdministration '+ csvpath+'cps_xw_studentcourse.csv 202.205.160.199 jwc wangbin'))
    jb.append((batpath+'cps_xw_avgscore.bat', batpath+'cps_xw_avgscore.bat '+' AcademicAdministration '+ csvpath+'cps_xw_avgscore.csv 202.205.160.199 jwc wangbin'))
    #jb.append((batpath+'exmm_composescore330.bat', batpath+'exmm_composescore330.bat '+' zhejiang '+ csvpath+'exmm_composescore330.csv 202.205.160.183 sa !!!WKSdatatest!!!'))
    #jb.append((batpath+'cps_xw_avgscore.bat', batpath+'cps_xw_avgscore.bat '+' AcademicAdministration '+ csvpath+'cps_xw_avgscore.csv 202.205.160.199 jwc wangbin'))
    #jb.append((batpath+'exmm_xkStandardplan330.bat', batpath+'exmm_xkStandardplan330.bat '+' zhejiang '+ csvpath+'exmm_xkstandsartplan330.csv 202.205.160.183 sa !!!WKSdatatest!!!'))
    #jb.append((batpath+'exmm_xkStandard330.bat', batpath+'exmm_xkStandard330.bat '+' zhejiang '+ csvpath+'exmm_xkstandsart330.csv 202.205.160.183 sa !!!WKSdatatest!!!'))
    excl=[]
    for item in jb:
         find = False
         for i in excl:
            if i in item[0]:
                find = True
                break
         if find == False:
            #if 'exemptapply' in item[0]:
            queue.put(item)
    queue.join()
    print('took %s minuters '%((time.time()-ts)/60,))
class BlockingShellSocketChannel(ShellSocketChannel):

    def __init__(self, context, session, address=None):
        super(BlockingShellSocketChannel, self).__init__(context, session,
                                                        address)
        self._in_queue = Queue()

    def call_handlers(self, msg):
        #io.rprint('[[Shell]]', msg) # dbg
        self._in_queue.put(msg)

    def msg_ready(self):
        """Is there a message that has been received?"""
        if self._in_queue.qsize() == 0:
            return False
        else:
            return True

    def get_msg(self, block=True, timeout=None):
        """Get a message if there is one that is ready."""
        return self._in_queue.get(block, timeout)

    def get_msgs(self):
        """Get all messages that are currently ready."""
        msgs = []
        while True:
            try:
                msgs.append(self.get_msg(block=False))
            except Empty:
                break
        return msgs
Example #17
0
class Mission:
    def __init__(self, max_thread):
        self.queue = Queue()
        self.max_thread = max_thread

    def __enter__(self):
        for x in range(self.max_thread):
            thread = Thread(target=self._threader)
            thread.daemon = True
            thread.start()
        return self

    def __exit__(self, exception_type, exception_value, traceback):
        self.queue.join()

    def send_task(self, func, *args):
        self.queue.put((func, args))

    def _threader(self):
        while True:
            try:
                func, args = self.queue.get()
                func(*args)
                self.queue.task_done()
            except queue.Empty:
                pass
Example #18
0
def wordcount(start_url, max_depth, word_length):
    fetch_queue = Queue()  # (crawl_depth, url)
    fetch_queue.put((0, canonicalize(start_url)))
    count_queue = Queue()  # (url, data)

    seen_urls = set()
    func = lambda: fetcher(fetch_queue, max_depth, seen_urls, count_queue)
    for _ in range(3):
        Thread(target=func, daemon=True).start()

    result_queue = Queue()  # (url, {word: count})
    func = lambda: counter(count_queue, word_length, result_queue)
    for _ in range(3):
        Thread(target=func, daemon=True).start()

    done_object = object()
    output_queue = Queue()  # Will contain the single, final result
    func = lambda: fan_in(result_queue, output_queue, done_object)
    Thread(target=func, daemon=True).start()

    fetch_queue.join()
    count_queue.join()
    result_queue.put(done_object)  # Special signal for "done" :/

    return output_queue.get()
Example #19
0
class EventListener(FileSystemEventHandler):
    """
    Listens for changes to files and re-runs tests after each change.
    """
    def __init__(self, extensions=[]):
        super(EventListener, self).__init__()
        self.event_queue = Queue()
        self.extensions = extensions or DEFAULT_EXTENSIONS

    def on_any_event(self, event):
        """
        Called when a file event occurs.
        Note that this gets called on a worker thread.
        """
        # Filter for allowed event types
        if not isinstance(event, WATCHED_EVENTS):
            return

        src_path = os.path.relpath(event.src_path)
        dest_path = None
        if isinstance(event, FileMovedEvent):
            dest_path = os.path.relpath(event.dest_path)

        # Filter files that don't match the allowed extensions
        if not event.is_directory and self.extensions != ALL_EXTENSIONS:
            src_ext = os.path.splitext(src_path)[1].lower()
            src_included = src_ext in self.extensions
            dest_included = False
            if dest_path:
                dest_ext = os.path.splitext(dest_path)[1].lower()
                dest_included = dest_ext in self.extensions
            if not src_included and not dest_included:
                return

        self.event_queue.put((type(event), src_path, dest_path))
Example #20
0
class HandlerThread(Thread):
    def __init__(self, bot, lock):
        self.bot = bot
        self.queue = Queue()
        self.lock = lock
        super().__init__()

    def run(self):
        while True:
            try:
                items = None
                args = self.queue.get()
                with self.lock:
                    items = self.bot.__irccallbacks__[args[0]]
                
                for item in items:
                    if not get_core(item):
                        if self.bot.verbose:
                            print("[command thread:%s] calling fn %s" % (datetime.datetime.utcnow(), item.__name__))
                        item(self.bot, *(args[1]))

            except BaseException as e:
                if not isinstance(e, SystemExit) and not isinstance(e, KeyboardInterrupt):
                    traceback.print_exc()

    def push(self, cname, *args):
        self.queue.put(tuple([cname] + list(args)))
Example #21
0
 def ac3(self, var_index):
     from queue import Queue
     
     # Put arcs (X, Y) into queue
     # X = self.var_list[var_index]
     # for each unassigned variable Y push arc (X, Y) to queue
     queue = Queue()
     for i in range(len(self.assignment)):
         item_list = self.assignment[i]
         if not item_list:
             # here's variable Y = self.var_list[i], create arc: (Y, X)
             arc = Arc(i, var_index)
             queue.put(arc)
     
     # get each arch from queue and propagate
     while not queue.empty():
         arc = queue.get()
         # if for the arc it is possible to reduce domain of X
         if self.revise_ac3(arc):
             # if no value in domain of X, we reached a failure!
             if len(self.domain_list[arc.x]) < 1:
                 return False
             # Because domain of X has been updated propagate this to neighbors in future \
             # by pushing those arcs into queue
             for i in range(len(self.assignment)):
                 item_list = self.assignment[i]
                 if i != arc.x and not item_list:
                     arc = Arc(i, arc.x)
                     queue.put(arc)
     return True        
Example #22
0
File: lib.py Project: humw/ToolBox
def is_alive(ip_addr):
    lock = threading.Lock()
    probe_ports = [22, 3389]
    q = Queue()
    status = False
    for port in probe_ports:
        q.put(port)

    class Probe(threading.Thread):

        def __init__(self):
            threading.Thread.__init__(self)

        def run(self):
            try:
                self.port = q.get(block=False)
            except Empty:
                return False
            if tcp_probe(ip_addr, self.port):
                with lock:
                    nonlocal status
                    status = True
                # print("Success to connect to " + ip_addr + " " + str(self.port))
            # else:
                # print("Failed to connect to " + ip_addr + " " + str(self.port))
            q.task_done()

    for x in range(5):
        p = Probe()
        p.daemon = True
        p.start()

    q.join()
    return status
Example #23
0
class Work:
    def __init__(self, threads = 100, offset = 0, timeout = 15.0):
        self.lines = Lines(f=_DOMAINS_FILE)
        self.num_lines = len(self.lines.d)
        self.concurrent = threads
        self.q = Queue(self.concurrent * 2)
        for self.i in range(self.concurrent):
            self.t = Thread(target=self.doWork)
            self.t.daemon = True
            self.t.start()
        try:
            for self.i in range(offset, self.num_lines):
                self.q.put(self.i)
            self.q.join()
        except KeyboardInterrupt:
            sys.exit(1)

    def doWork(self):
        while True:
            n = int(self.q.get())
            _domain = self.lines.find_line(n=n)

            c = Check(url = _domain)
            ip = c.ip
            if ip:
                print(ip, end = ' ')
                print(_domain)
                save(ip=ip, domain=_domain)

            self.q.task_done()
Example #24
0
def main(filename):
    username = input('User: '******': script started')

    task_queue = Queue()

    new_task = Task(task_queue)
    new_task.read(filename)

    while not task_queue.empty():
        host_queue = Queue()
        log_queue = Queue()
        task_block = task_queue.get()
        for host_ip in task_block['hosts']:
            host_queue.put(host_ip)
        config_list = task_block['tasks']

        while not host_queue.empty():

            for x in range(50):
                worker = ExecuteTask(host_queue, config_list, log_queue, username, password, mode=task_block['mode'])
                worker.daemon = True
                worker.start()

        host_queue.join()

        while not log_queue.empty():
            st = log_queue.get()
            print(st)

    print(time_stamp(), ': script stopped')
Example #25
0
    def diagram(self):
        from graphviz import Digraph
        from queue import Queue
        diagram=Digraph(comment='The Trie')

        i=0

        diagram.attr('node', shape='circle')
        diagram.node(str(i), self.root.getValue())

        q=Queue()
        q.put((self.root, i))
        
        while not q.empty():

            node, parent_index=q.get()

            for child in node.getChildren():
                i+=1
                if child.getEnding():
                    diagram.attr('node', shape='doublecircle')
                    diagram.node(str(i), child.getValue())
                    diagram.attr('node', shape='circle')
                else:
                    diagram.node(str(i), child.getValue())
                diagram.edge(str(parent_index), str(i))
                q.put((child, i))

        o=open('trie_dot.gv', 'w')
        o.write(diagram.source)
        o.close()
        diagram.render('trie_dot.gv', view=True)
        'trie_dot.gv.pdf'
Example #26
0
def solve():
    
    (node, edge) = read(int)
    graph = [[] for _ in range(node)]
    cost = [-1 for _ in range(node)]
    for i in range(edge):
        (fro, to) = read(int)
        fro, to = fro-1, to-1
        if to not in graph[fro]:
            graph[fro].append(to)
            graph[to].append(fro)

    start = read(int)[0] - 1
    cost[start] = 0
    q = Queue()
    q.put(start)
    while not q.empty():
        select = q.get()
        for i in graph[select]:
            if cost[i] == -1:
                cost[i] = cost[select] + 6
                q.put(i)

    for i in range(node):
        if start == i:
            continue
        end = ' ' if i < node -1 else '\n'
        print(cost[i], end=end)
class GdbBreakpoint(Breakpoint):
    def __init__(self, system, bkpt_num):
        super().__init__()
        self._system = system
        self._bkpt_num = bkpt_num
        self._queue = Queue()
        system.register_event_listener(self._event_receiver)
        
    def wait(self, timeout = None):
        if self._handler:
            raise Exception("Breakpoint cannot have a handler and be waited on")

        if timeout == 0:
            return self._queue.get(False)
        else:
            return self._queue.get(True, timeout)
    
    def delete(self):
        self._system.unregister_event_listener(self._event_receiver)
        self._system.get_target()._gdb_interface.delete_breakpoint(self._bkpt_num)
        
    def _event_receiver(self, evt):
        if EVENT_BREAKPOINT in evt["tags"] and \
                evt["source"] == "target" and \
                evt["properties"]["bkpt_number"] == self._bkpt_num:
            if self._handler:
                self._handler(self._system, self)
            else:
                self._queue.put(evt)
        elif EVENT_SIGABRT in evt["tags"]:
                self._queue.put(evt)
Example #28
0
    def test_producer_consumer_with_queues(self):
        # we currently just stress yappi, no functionality test is done here.
        yappi.start()
        import time
        if utils.is_py3x():
            from queue import Queue
        else:
            from Queue import Queue
        from threading import Thread
        WORKER_THREAD_COUNT = 50
        WORK_ITEM_COUNT = 2000
        def worker():
            while True:
                item = q.get()                
                # do the work with item
                q.task_done()

        q = Queue()
        for i in range(WORKER_THREAD_COUNT):
            t = Thread(target=worker)
            t.daemon = True
            t.start()
             
        for item in range(WORK_ITEM_COUNT):
            q.put(item)
        q.join()# block until all tasks are done
        #yappi.get_func_stats().sort("callcount").print_all()
        yappi.stop()
Example #29
0
class TTS(object):

    def __init__(self):
        self.clients = []
        self.voice_choices = []
        self.queue = Queue()
        if 'win32com' not in globals():
            return
        Thread(target=self._background).start()

    def _background(self):
        pythoncom.CoInitialize()
        self.tts = win32com.client.Dispatch("SAPI.SpVoice")
        self.voices = self.tts.GetVoices()
        self.voices = [self.voices.Item(i) for i in range(self.voices.Count)]
        self.voice_choices = [dict(desc=v.GetDescription(), id=i) for i, v in enumerate(self.voices)]
        self.tts.Rate = -5
        self.event_sink = win32com.client.WithEvents(self.tts, TTSEventSink)
        self.event_sink.setTTS(self)
        while True:
            self._speak(self.queue.get(True))

    def _speak(self, text):
        self._speaking = True
        self.tts.Skip("Sentence", INT32_MAX)
        self.tts.Speak(text, SVSFlagsAsync)
        self._pump()

    def speak(self, text):
        while True:
            try:
                self.queue.get(False)
            except Empty:
                break
        self.queue.put(text)

    def get_voice_choices(self):
        return self.voice_choices

    def set_voice(self, voice_id):
        self.tts.Voice = self.voices[voice_id]

    def handle_event(self, event, *args):
        msg = dict(type=event)
        if event == 'end':
            self._speaking = False
        elif event == 'word':
            msg.update(dict(char_pos=args[0], length=args[1]))
        msg = json.dumps(msg)
        for c in self.clients:
            c.write_message(msg)

    def _pump(self):
        skipped = False
        while self._speaking:
            if not skipped and not self.queue.empty():
                self.tts.Skip("Sentence", INT32_MAX)
                skipped = True
            pythoncom.PumpWaitingMessages()
            time.sleep(0.05)
Example #30
0
class JQueryChaliceRequestHandler(BaseHTTPRequestHandler):

	server_version = "Extremon/0.1"

	def do_GET(self):
		self.outq=Queue(maxsize=10)
		self.running=True
		self.server.add_consumer(self)

		self.send_response(200)
		self.send_header("Content-type", "text/plain")
		self.send_header("Access-Control-Allow-Origin", "*")
		self.end_headers()
		self.missed=0
		self.running=True

		try:
			while self.running:
				try:
					message = self.outq.get() + bytes('%s.timestamp=%.2f\n%s.missed=%d\n\n' % (self.server.prefix,time.time(),self.server.prefix,self.missed),'UTF-8')
					self.wfile.write(bytes(str(len(message)) + ";", 'UTF-8'))
					self.wfile.write(message)
					self.wfile.write(b';')
					self.outq.task_done()
				except error:
					self.running=False
		finally:
			self.server.remove_consumer(self)

	def write(self,data):
		try:
			self.outq.put(data,block=False)
		except Full:
			self.missed+=1
Example #31
0
class Runner:
    def __init__(self, model, ds, threads, post_proc=None):
        self.tasks = Queue(maxsize=threads * 5)
        self.workers = []
        self.model = model
        self.post_process = post_proc
        self.threads = threads
        self.result_dict = {}
        self.take_accuracy = False
        self.ds = ds

    def handle_tasks(self, tasks_queue):
        """Worker thread."""
        while True:
            qitem = tasks_queue.get()
            if qitem is None:
                # None in the queue indicates the parent want us to exit
                tasks_queue.task_done()
                break

            try:
                # run the prediction
                results = self.model.predict({self.model.inputs[0]: qitem.img})
                if self.take_accuracy:
                    response = self.post_process(results, qitem.content_id,
                                                 qitem.label, self.result_dict)
            except Exception as ex:  # pylint: disable=broad-except
                src = [self.ds.get_item_loc(i) for i in qitem.content_id]
                log.error("thread: failed on contentid=%s, %s", src, ex)
            finally:
                response = []
                for query_id in qitem.query_id:
                    # FIXME: unclear what to return here
                    response.append(lg.QuerySampleResponse(query_id, 0, 0))
                lg.QuerySamplesComplete(response)
            tasks_queue.task_done()

    def handle_tasks_nolg(self, tasks_queue):
        """Worker thread."""
        while True:
            qitem = tasks_queue.get()
            if qitem is None:
                # None in the queue indicates the parent want us to exit
                tasks_queue.task_done()
                break

            try:
                # run the prediction
                start = time.time()
                results = self.model.predict({self.model.inputs[0]: qitem.img})
                self.result_dict["timing"].append(time.time() - start)
                if self.take_accuracy:
                    response = self.post_process(results, qitem.content_id,
                                                 qitem.label, self.result_dict)
            except Exception as ex:  # pylint: disable=broad-except
                src = [self.ds.get_item_loc(i) for i in qitem.content_id]
                log.error("thread: failed on contentid=%s, %s", src, ex)
            finally:
                tasks_queue.task_done()

    def start_pool(self, nolg=False):
        if nolg:
            handler = self.handle_tasks_nolg
        else:
            handler = self.handle_tasks
        for _ in range(self.threads):
            worker = threading.Thread(target=handler, args=(self.tasks, ))
            worker.daemon = True
            self.workers.append(worker)
            worker.start()

    def start_run(self, result_dict, take_accuracy):
        self.result_dict = result_dict
        self.take_accuracy = take_accuracy
        self.post_process.start()

    def enqueue(self, id, ids, data, label):
        item = Item(id, ids, data, label)
        self.tasks.put(item)

    def finish(self):
        # exit all threads
        for _ in self.workers:
            self.tasks.put(None)
        for worker in self.workers:
            worker.join()
            time.sleep(3)
            #do_work(item)
            print('%s processed value %s' % (name, item))
            q.task_done()
        except:
            hasItem = False
        finally:
            print(name + " thread is done")


num_worker_threads = 3
q = Queue()

threads = []
for i in range(num_worker_threads):
    t = Thread(target=worker, kwargs={"name": str(i)})
    t.daemon = True
    t.start()
    threads.append(t)

for item in range(10):
    q.put(item)

q.join()  # block until all tasks are done

print('stop')
for t in threads:
    t.join()

print('stopped')
Example #33
0
def threader():
    while True:
        """Get worker from queue, and then finish talks"""
        worker = q.get()
        example_job(worker)
        q.task_done()


if __name__ == '__main__':
    """
    
    the threading object is a daemon, so they die when main dies
    """
    print_lock = threading.Lock()
    q = Queue()

    for x in range(10):
        t = threading.Thread(target=threader)  # Target is all important
        t.daemon = True  # lest they live on
        t.start()

    start_time = time.time()

    for worker in range(20):
        q.put(worker)

    q.join()

    print('Entire job took:', time.time() - start_time)
Example #34
0
class OandaStream(Stream):
    def __init__(self, stream, candle, sleep=10):
        assert isinstance(stream, OandaPriceStream)
        assert isinstance(candle, OandaCandleUpdater)
        self.stream = stream
        self.candle = candle
        self.cache = {}
        self.sleep = sleep
        self.running = False
        self.queue = Queue()
        self.price_thread = Thread(target=self.price_stream, daemon=True)
        self.candle_thread = Thread(target=self.candle_stream, daemon=True)
        self.methods = {
            CANDLE_TAG: self.on_candle,
            MESSAGE: self.on_price,
            LOG: self.on_log
        }

    @classmethod
    def conf(cls,
             token,
             instruments,
             accountID=None,
             sleep=10,
             trade_type=api.PRACTICE):
        assert isinstance(instruments, list)
        _api = api.OandaAPI(token, trade_type=trade_type)
        stream = OandaPriceStream(_api, instruments, accountID)
        candle = OandaCandleUpdater(_api, instruments)
        return cls(stream, candle, sleep)

    def price_stream(self):
        stream = self.stream.__iter__()
        while self.running:
            messages = next(stream)
            self.queue.put(messages)

    def candle_stream(self):
        while self.running:
            for tag, candle in self.candle:
                if tag == CANDLE_SUC:
                    self.queue.put((CANDLE_TAG, candle))
                else:
                    self.queue.put((LOG, candle))
            time.sleep(self.sleep)

    def on_price(self, price):
        instrument = price["instrument"]
        candle = self.cache.get(instrument)
        if candle:
            # return make_ind(price, candle)
            return {"price": price, "candle": candle}

    def on_candle(self, candle):
        instrument = candle["instrument"]
        self.cache[instrument] = candle

    def on_log(self, log):
        return log

    def start(self):
        self.running = True
        if self.price_thread is None:
            self.price_thread = Thread(target=self.price_stream, daemon=True)
        if not self.price_thread.is_alive():
            self.price_thread.start()

        if self.candle_thread is None:
            self.candle_thread = Thread(target=self.candle_stream, daemon=True)
        if not self.candle_thread.is_alive():
            self.candle_thread.start()

    def stop(self):
        self.running = False
        self.join()

    def join(self):
        if self.price_thread and self.price_thread.is_alive():
            self.price_thread.join()
        if self.candle_thread and self.candle_thread.is_alive():
            self.candle_thread.join()

    def __iter__(self):
        if not self.running:
            self.start()
        while self.running or self.queue.qsize():
            try:
                TAG, data = self.queue.get(timeout=2)
            except Empty:
                continue

            try:
                method = self.methods[TAG]
                result = method(data)
            except Exception as e:
                logging.error("Oanda handle | %s | %s | %s", TAG, data, e)
            else:
                if result:
                    yield TAG, result
Example #35
0
class ReaderThread(threading.Thread):

    TIMEOUT = 15
    
    def __init__(self, sock):
        threading.Thread.__init__(self)
        try:
            from queue import Queue
        except ImportError:
            from Queue import Queue
            
        self.setDaemon(True)
        self.sock = sock
        self._queue = Queue()
        self.all_received = []
        self._kill = False
        
    def set_timeout(self, timeout):
        self.TIMEOUT = timeout

    def get_next_message(self, context_message):
        try:
            msg = self._queue.get(block=True, timeout=self.TIMEOUT)
        except:
            raise AssertionError('No message was written in %s seconds. Error message:\n%s' % (self.TIMEOUT, context_message,))
        else:
            frame = sys._getframe().f_back
            frame_info = ''
            while frame:
                stack_msg = ' --  File "%s", line %s, in %s\n' % (frame.f_code.co_filename, frame.f_lineno, frame.f_code.co_name)
                if 'run' == frame.f_code.co_name:
                    frame_info = stack_msg  # Ok, found the writer thread 'run' method (show only that).
                    break
                frame_info += stack_msg
                frame = frame.f_back
            frame = None
            sys.stdout.write('Message returned in get_next_message(): %s --  ctx: %s, asked at:\n%s\n' % (unquote_plus(unquote_plus(msg)), context_message, frame_info))
        return msg

    def run(self):
        try:
            buf = ''
            while not self._kill:
                l = self.sock.recv(1024)
                if IS_PY3K:
                    l = l.decode('utf-8')
                self.all_received.append(l)
                buf += l

                while '\n' in buf:
                    # Print each part...
                    i = buf.index('\n')+1
                    last_received = buf[:i]
                    buf = buf[i:]

                    if SHOW_WRITES_AND_READS:
                        print('Test Reader Thread Received %s' % (last_received, ))
                        
                    self._queue.put(last_received)
        except:
            pass  # ok, finished it
        finally:
            del self.all_received[:]

    def do_kill(self):
        self._kill = True
        if hasattr(self, 'sock'):
            self.sock.close()
class Logger(object):
    """
    Logger class with option for remote connection to log server (Arkenstone)

    Args:
        logfile (str): Path to logfile
        remote (bool): Use remote connection or not
    """
    def __init__(self, logfile, remote=False):
        self.logger = self.get_logger(logfile)
        self.msg_queue = Queue()
        self.client = None

        if remote:
            try:
                self.client = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
                self.client.connect(ARKENSTONE)
                self.remote_initialisation()
                self.log_thread = threading.Thread(target=self.remote_log, args=())
                self.log_thread.start()
            except Exception as e:
                print("Error connecting to Arkenstone, continuing offline.\nError: {}".format(e))


    def __del__(self):
        """
        Destructor for cleaning up the remote connection thread
        """
        try:
            self.msg_queue.put("TERMINATE")
            self.log_thread.join()
        except:
            pass # Only cleans up the thread if remote is active


    def get_logger(self, logfile):
        """
        Sets up the logger object for logging messages

        Args:
            logfile (str): Path to logfile

        Returns:
            logger (Logger): Logger object
        """
        logger = logging.getLogger(PLATFORM_NAME)
        logger.setLevel(logging.DEBUG)

        fh = logging.FileHandler(filename=logfile)
        fh.setLevel(logging.DEBUG)

        ch = logging.StreamHandler()
        ch.setLevel(logging.DEBUG)

        formatter = logging.Formatter("%(asctime)s - %(name)s::%(levelname)s -- %(message)s")
        ch.setFormatter(formatter)
        fh.setFormatter(formatter)

        logger.addHandler(fh)
        logger.addHandler(ch)

        return logger


    def info(self, msg):
        """
        Logs a message with the logger.
        If there is a remote connection, add it to the message queue to be sent

        Args:
            msg (str): Message to log
        """
        msg = "{} -- {}".format(time.strftime("%d_%m_%Y:%H%M"), msg)
        if self.client:
            self.msg_queue.put(msg)
        
        self.logger.info(msg)


    def remote_initialisation(self):
        """
        Initialises the platform on hte remote server
        """
        self.client.sendall("{}::INIT".format(PLATFORM_NAME).encode())

    def remote_log(self):
        """
        Thread method for sending logged messages to the remote server
        """
        while(True):
            msg = self.msg_queue.get()

            if msg == TERMINATE:
                self.cleanup()
                break

            msg = "{0}::LOG::{1}".format(PLATFORM_NAME, msg)
            self.client.sendall(msg.encode())


    def cleanup(self):
        """
        Closes down the connection to the remote server, if present
        """
        if self.client:
            print("Shutting down Arkenstone client...")
            self.client.shutdown(2)
            self.client.close()
Example #37
0
    def predict(self):
        def eval(queue, steps, TPs, relevant, selected, task_names,
                 n_classes_list, lock):
            step = 0
            while step < steps:
                # Get prediction and true labels from prediction queue
                step += 1
                pred, true = queue.get(block=True)

                for p, y, task_name, n_classes in zip(pred, true, task_names,
                                                      n_classes_list):
                    # Argmax and CM elements
                    p = p.argmax(-1).ravel()
                    y = y.ravel()

                    # Compute relevant CM elements
                    # We select the number following the largest class integer when
                    # y != pred, then bincount and remove the added dummy class
                    tps = np.bincount(np.where(y == p, y, n_classes),
                                      minlength=n_classes + 1)[:-1]
                    rel = np.bincount(y, minlength=n_classes)
                    sel = np.bincount(p, minlength=n_classes)

                    # Update counts on shared lists
                    lock.acquire()
                    TPs[task_name] += tps.astype(np.uint64)
                    relevant[task_name] += rel.astype(np.uint64)
                    selected[task_name] += sel.astype(np.uint64)
                    lock.release()

        # Fetch some validation images from the generator
        pool = ThreadPoolExecutor(max_workers=7)
        result = pool.map(self.data.__getitem__, np.arange(self.steps))

        # Prepare arrays for CM summary stats
        TPs, relevant, selected = {}, {}, {}
        for task_name, n_classes in zip(self.task_names, self.n_classes):
            TPs[task_name] = np.zeros(shape=(n_classes, ), dtype=np.uint64)
            relevant[task_name] = np.zeros(shape=(n_classes, ),
                                           dtype=np.uint64)
            selected[task_name] = np.zeros(shape=(n_classes, ),
                                           dtype=np.uint64)

        # Prepare queue and thread for computing counts
        from queue import Queue
        from threading import Thread
        count_queue = Queue(maxsize=self.steps)
        count_thread = Thread(target=eval,
                              args=[
                                  count_queue, self.steps, TPs, relevant,
                                  selected, self.task_names, self.n_classes,
                                  Lock()
                              ])
        count_thread.start()

        # Get tensors to run and their names
        try:
            metrics_tensors = self.model.metrics_tensors
        except AttributeError:
            metrics_tensors = self.model._all_metrics_tensors
        metrics_names = self.model.metrics_names
        assert "loss" in metrics_names and metrics_names.index("loss") == 0
        assert len(metrics_names) - 1 == len(metrics_tensors)
        tensors = [self.model.total_loss
                   ] + metrics_tensors + self.model.outputs
        metrics_sums = {**{name: 0.0 for name in self.model.metrics_names}}

        # Predict on all
        self.logger("")
        sess = tf.keras.backend.get_session()
        for i, res in enumerate(result):
            if self.verbose:
                print("   Validation: %i/%i" % (i + 1, self.steps),
                      end="\r",
                      flush=True)

            if len(res) == 3:
                X, y, _ = res
            else:
                X, y = res
            if len(self.task_names) == 1:
                X, y = [X], [y]
            X_ins = {
                in_tens: in_
                for in_tens, in_ in zip(self.model.inputs, X)
            }
            y_ins = {
                in_tens: in_
                for in_tens, in_ in zip(self.model.targets, y)
            }
            ins = {**X_ins, **y_ins}

            # Run the specified tensors
            outs = sess.run(tensors, feed_dict=ins)
            loss = outs[0]
            metrics = outs[1:1 + len(metrics_tensors)]
            pred = outs[1 + len(metrics_tensors):]

            # Put values in the queue for counting
            count_queue.put([pred, y])

            # Update metric sums
            for name, value in zip(metrics_names, [loss] + metrics):
                metrics_sums[name] += value

        # Compute mean metrics
        metric_means = {
            name: value / self.steps
            for name, value in metrics_sums.items()
        }

        # Terminate count thread
        self.logger("Waiting for counting queue to terminate...")
        count_thread.join()
        pool.shutdown()

        return TPs, relevant, selected, metric_means
Example #38
0
def main():
  usage = 'usage: %prog [options] <params_file> <model_file> <bed_file>'
  parser = OptionParser(usage)
  parser.add_option('-f', dest='genome_fasta',
      default=None,
      help='Genome FASTA for sequences [Default: %default]')
  parser.add_option('-l', dest='mut_len',
      default=200, type='int',
      help='Length of center sequence to mutate [Default: %default]')
  parser.add_option('-o', dest='out_dir',
      default='sat_mut', help='Output directory [Default: %default]')
  parser.add_option('--plots', dest='plots',
      default=False, action='store_true',
      help='Make heatmap plots [Default: %default]')
  parser.add_option('-p', dest='processes',
      default=None, type='int',
      help='Number of processes, passed by multi script')
  parser.add_option('--rc', dest='rc',
      default=False, action='store_true',
      help='Ensemble forward and reverse complement predictions [Default: %default]')
  parser.add_option('--shifts', dest='shifts',
      default='0',
      help='Ensemble prediction shifts [Default: %default]')
  parser.add_option('--stats', dest='sad_stats',
      default='sum',
      help='Comma-separated list of stats to save. [Default: %default]')
  parser.add_option('-t', dest='targets_file',
      default=None, type='str',
      help='File specifying target indexes and labels in table format')
  (options, args) = parser.parse_args()

  if len(args) == 3:
    # single worker
    params_file = args[0]
    model_file = args[1]
    bed_file = args[2]

  elif len(args) == 4:
    # master script
    options_pkl_file = args[0]
    params_file = args[1]
    model_file = args[2]
    bed_file = args[3]

    # load options
    options_pkl = open(options_pkl_file, 'rb')
    options = pickle.load(options_pkl)
    options_pkl.close()

  elif len(args) == 5:
    # multi worker
    options_pkl_file = args[0]
    params_file = args[1]
    model_file = args[2]
    bed_file = args[3]
    worker_index = int(args[4])

    # load options
    options_pkl = open(options_pkl_file, 'rb')
    options = pickle.load(options_pkl)
    options_pkl.close()

    # update output directory
    options.out_dir = '%s/job%d' % (options.out_dir, worker_index)

  else:
    parser.error('Must provide parameter and model files and BED file')

  if not os.path.isdir(options.out_dir):
    os.mkdir(options.out_dir)

  options.shifts = [int(shift) for shift in options.shifts.split(',')]
  options.sad_stats = [sad_stat.lower() for sad_stat in options.sad_stats.split(',')]

  #################################################################
  # read parameters and targets

  # read model parameters
  with open(params_file) as params_open:
    params = json.load(params_open)
  params_model = params['model']
  params_train = params['train']

  # read targets
  if options.targets_file is None:
    target_slice = None
  else:
    targets_df = pd.read_table(options.targets_file, index_col=0)
    target_slice = targets_df.index

  #################################################################
  # setup model

  seqnn_model = seqnn.SeqNN(params_model)
  seqnn_model.restore(model_file)
  seqnn_model.build_slice(target_slice)
  seqnn_model.build_ensemble(options.rc, options.shifts)

  num_targets = seqnn_model.num_targets()

  #################################################################
  # sequence dataset

  # read sequences from BED
  seqs_dna, seqs_coords = bed.make_bed_seqs(
    bed_file, options.genome_fasta, params_model['seq_length'], stranded=True)

  # filter for worker SNPs
  if options.processes is not None:
    worker_bounds = np.linspace(0, len(seqs_dna), options.processes+1, dtype='int')
    seqs_dna = seqs_dna[worker_bounds[worker_index]:worker_bounds[worker_index+1]]
    seqs_coords = seqs_coords[worker_bounds[worker_index]:worker_bounds[worker_index+1]]

  num_seqs = len(seqs_dna)

  # determine mutation region limits
  seq_mid = params_model['seq_length'] // 2
  mut_start = seq_mid - options.mut_len // 2
  mut_end = mut_start + options.mut_len

  # make sequence generator
  seqs_gen = satmut_gen(seqs_dna, mut_start, mut_end)

  #################################################################
  # setup output

  scores_h5_file = '%s/scores.h5' % options.out_dir
  if os.path.isfile(scores_h5_file):
    os.remove(scores_h5_file)
  scores_h5 = h5py.File('%s/scores.h5' % options.out_dir, 'w')
  scores_h5.create_dataset('seqs', dtype='bool',
      shape=(num_seqs, options.mut_len, 4))
  for sad_stat in options.sad_stats:
    scores_h5.create_dataset(sad_stat, dtype='float16',
        shape=(num_seqs, options.mut_len, 4, num_targets))

  # store mutagenesis sequence coordinates
  seqs_chr, seqs_start, _, seqs_strand = zip(*seqs_coords)
  seqs_chr = np.array(seqs_chr, dtype='S')
  seqs_start = np.array(seqs_start) + mut_start
  seqs_end = seqs_start + options.mut_len
  seqs_strand = np.array(seqs_strand, dtype='S')
  scores_h5.create_dataset('chrom', data=seqs_chr)
  scores_h5.create_dataset('start', data=seqs_start)
  scores_h5.create_dataset('end', data=seqs_end)
  scores_h5.create_dataset('strand', data=seqs_strand)

  preds_per_seq = 1 + 3*options.mut_len

  score_threads = []
  score_queue = Queue()
  for i in range(1):
    sw = ScoreWorker(score_queue, scores_h5, options.sad_stats)
    sw.start()
    score_threads.append(sw)

  #################################################################
  # predict scores, write output

  # find center
  preds_length = seqnn_model.target_lengths[0]
  center_start = preds_length // 2
  if preds_length % 2 == 0:
    center_end = center_start + 2
  else:
    center_end = center_start + 1

  # initialize predictions stream
  preds_stream = stream.PredStreamGen(seqnn_model, seqs_gen, params['train']['batch_size'])

  # predictions index
  pi = 0

  for si in range(num_seqs):
    print('Predicting %d' % si, flush=True)

    # collect sequence predictions
    seq_preds_sum = []
    seq_preds_center = []
    seq_preds_scd = []
    preds_mut0 = preds_stream[pi]
    for spi in range(preds_per_seq):
      preds_mut = preds_stream[pi]
      preds_sum = preds_mut.sum(axis=0)
      seq_preds_sum.append(preds_sum)
      if 'center' in options.sad_stats:
        preds_center = preds_mut[center_start:center_end,:].sum(axis=0)
        seq_preds_center.append(preds_center)
      if 'scd' in options.sad_stats:
        preds_scd = np.sqrt(((preds_mut-preds_mut0)**2).sum(axis=0))
        seq_preds_scd.append(preds_scd)
      pi += 1
    seq_preds_sum = np.array(seq_preds_sum)
    seq_preds_center = np.array(seq_preds_center)
    seq_preds_scd = np.array(seq_preds_scd)

    # wait for previous to finish
    score_queue.join()

    # queue sequence for scoring
    seq_pred_stats = (seq_preds_sum, seq_preds_center, seq_preds_scd)
    score_queue.put((seqs_dna[si], seq_pred_stats, si))
    
    # queue sequence for plotting
    if options.plots:
      plot_queue.put((seqs_dna[si], seq_preds_sum, si))

    gc.collect()

  # finish queue
  print('Waiting for threads to finish.', flush=True)
  score_queue.join()

  # close output HDF5
  scores_h5.close()
Example #39
0
from queue import Queue


def reverse(queue):
    if queue.empty():
        return queue
    else:
        data = queue.queue[0]
        queue.get()
        reverse(queue)
        queue.put(data)


def printQueue(queue):
    while (not queue.empty()):
        data = queue.queue[0]
        queue.get()
        print(data, end=" ")


if __name__ == '__main__':
    queue = Queue()
    queue.put(1)
    queue.put(2)
    queue.put(3)
    queue.put(4)
    reverse(queue)
    printQueue(queue)
Example #40
0
class SCU: # SCU: Single Coalesced Unit
    def __init__(self, mtu=1500): # MTU: Maximum Transmission Unit
        self.mtu = mtu

    def bind_as_sender(self, receiver_address):
        self.mode = SCUMode.SendMode
        self.connection_manager = {}

        self.socket =  socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
        self.receiver_address = receiver_address
        self.lock = threading.Lock()

        sender_packet_loop_thread = threading.Thread(target=self._sender_packet_loop)
        sender_packet_loop_thread.setDaemon(True)
        sender_packet_loop_thread.start()

    def bind_as_receiver(self, receiver_address):
        self.mode = SCUMode.RecvMode
        self.received_files_data = {}

        self.socket = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
        self.socket.bind(receiver_address)

        self.file_received = Queue()

        receiver_packet_loop_thread = threading.Thread(target=self._receiver_packet_loop)
        receiver_packet_loop_thread.setDaemon(True)
        receiver_packet_loop_thread.start()

    def drop(self):
        if self.mode == SCUMode.SendMode:
            self.connection_manager.clear()
            self.socket.close()

    def _sender_packet_loop(self):
        if self.mode == SCUMode.RecvMode:
            raise Exception
        while True:
            try:
                packet = SCUPacket()
                packet.from_raw(self.socket.recv(2048))
                if packet.header.id not in self.connection_manager:
                    continue
                if packet.header.typ == SCUPacketType.Fin.value:
                    self.connection_manager[packet.header.id].put((True, packet.header.seq))
                elif packet.header.typ == SCUPacketType.Rtr.value:
                    self.connection_manager[packet.header.id].put((False, packet.header.seq))
            except Exception as e: # When recv fails and when put fails (appropriate)
                if e == KeyboardInterrupt:
                    raise KeyboardInterrupt
                else:
                    import traceback
                    traceback.print_exc()

    def send(self, filepath, id): # will lock the thread
        if self.mode == SCUMode.RecvMode:
            raise Exception
        queue = Queue()
        self.connection_manager[id] = queue # Register a connection

        data_fragments = utils.split_file_into_mtu(filepath, self.mtu)

        all_packets = []
        for (seq, df) in enumerate(data_fragments):
            # create header
            header = SCUHeader()
            if seq == len(data_fragments) - 1:
                header.from_dict({ "typ": SCUPacketType.DataEnd.value, "id": id, "seq": seq, })
            else:
                header.from_dict({ "typ": SCUPacketType.Data.value, "id": id, "seq": seq, })
            # create packet
            packet = SCUPacket()
            packet.from_dict({ "header": header, "payload": df, })

            all_packets.append(packet)

        retransmit_seq = 0 # Manage packets that need to be resent (how far you can receive)
        seq = 0
        while True:
            try:
                while True:
                    try:
                        fin, sq = queue.get(block=False) # Resend request or reception completion report
                        if fin: # Finished sending
                            del(self.connection_manager[id]) # Disconnect
                            return
                        elif sq < len(all_packets): # Request again
                            retransmit_seq = max(sq, retransmit_seq)
                    except Exception as e: # When the queue is empty
                        if e == KeyboardInterrupt:
                            raise KeyboardInterrupt
                        else:
                            break
                with self.lock: # Lock required as multiple send methods may be running concurrently in parallel
                    self.socket.sendto(all_packets[seq].raw(), self.receiver_address) # Packet transmission

                seq = max(seq + 1, retransmit_seq) # SEQ Update
                if seq >= len(all_packets):
                    seq = retransmit_seq
            except Exception as e: # When sendto fails (appropriate)
                if e == KeyboardInterrupt:
                    raise KeyboardInterrupt
                else:
                    import traceback
                    traceback.print_exc()



    def _receiver_packet_loop(self):
        if self.mode == SCUMode.SendMode:
            raise Exception
        received_files_flag = {}
        received_files_length = {}
        while True:
            try:
                data, from_addr = self.socket.recvfrom(2048)
                packet = SCUPacket()
                packet.from_raw(data)

                key = utils.endpoint2str(from_addr, packet.header.id)
                if key not in self.received_files_data:
                    self.received_files_data[key] = [b""]*100
                    received_files_flag[key] = False

                if received_files_flag[key]:
                    self.response(SCUPacketType.Fin.value, from_addr, packet.header.id, 0)
                    continue

                if packet.header.typ == SCUPacketType.DataEnd.value or packet.header.typ == SCUPacketType.Data.value:
                    if packet.header.typ == SCUPacketType.DataEnd.value:
                        received_files_length[key] = packet.header.seq + 1

                    self.received_files_data[key][packet.header.seq] = packet.payload
                    rtr = self.calculate_rtr(key, packet.header.seq)
                    if rtr is not None: # Need to request resend
                        self.response(SCUPacketType.Rtr.value, from_addr, packet.header.id, rtr)
                    elif key in received_files_length and self.is_all_received(key, received_files_length[key]): #  File reception completed
                        received_files_flag[key] = True
                        self.response(SCUPacketType.Fin.value, from_addr, packet.header.id, 0)
                        self.file_received.put((key, received_files_length[key]))

            except Exception as e: # When recv fails and when put fails (appropriate)
                if e == KeyboardInterrupt:
                    raise KeyboardInterrupt
                else:
                    import traceback
                    traceback.print_exc()

    def calculate_rtr(self, key, seq):
        for sq in range(0, seq):
            if not self.received_files_data[key][sq]:
                return sq
        return None

    def is_all_received(self, key, length):
        for i in range(0, length):
            if not self.received_files_data[key][i]:
                return False
        return True

    def response(self, typ, addr, id, rtr):
        if self.mode == SCUMode.SendMode:
            raise Exception
        if typ == SCUPacketType.Rtr.value:
            header = SCUHeader()
            header.from_dict({ "typ": typ, "id": id, "seq": rtr, })
            packet = SCUPacket()
            packet.from_dict({ "header": header, "payload": b'', })
            self.socket.sendto(packet.raw(), addr)

        elif typ == SCUPacketType.Fin.value:
            header = SCUHeader()
            header.from_dict({ "typ": typ, "id": id, "seq": rtr, })
            packet = SCUPacket()
            packet.from_dict({ "header": header, "payload": b'', })
            self.socket.sendto(packet.raw(), addr)

    def recv(self):
        if self.mode == SCUMode.SendMode:
            raise Exception
        key, length = self.file_received.get()
        return utils.fold_data(self.received_files_data[key], length)
Example #41
0
    def inference(self, input, target, beam_size=5):
        ########
        # TODO #
        ########
        # 在這裡實施 Beam Search
        # 此函式的 batch size = 1
        # input  = [batch size, input len, vocab size]
        # target = [batch size, target len, vocab size]
        batch_size = input.shape[0]
        input_len = input.shape[1]  # 取得最大字數
        vocab_size = self.decoder.cn_vocab_size

        # 準備一個儲存空間來儲存輸出
        outputs = torch.zeros(batch_size, input_len,
                              vocab_size).to(self.device)
        # 將輸入放入 Encoder
        encoder_outputs, hidden = self.encoder(input)
        # Encoder 最後的隱藏層(hidden state) 用來初始化 Decoder
        # encoder_outputs 主要是使用在 Attention
        # 因為 Encoder 是雙向的RNN,所以需要將同一層兩個方向的 hidden state 接在一起
        # hidden =  [num_layers * directions, batch size  , hid dim]  --> [num_layers, directions, batch size  , hid dim]
        hidden = hidden.view(self.encoder.n_layers, 2, batch_size, -1)
        hidden = torch.cat((hidden[:, -2, :, :], hidden[:, -1, :, :]), dim=2)
        # 取的 <BOS> token
        input = target[:, 0]
        q = Queue()
        q.put(beam_node(None, hidden, input, 0, 1, None, []))
        preds = []
        out = []
        while not q.empty():
            candi = []
            for i in range(q.qsize()):
                obj = q.get()
                if obj.length >= input_len:
                    out.append(obj)
                    continue
                k = obj.input
                #print(k.shape, k)
                output, hidden = self.decoder(obj.input, obj.hidden,
                                              encoder_outputs)
                r = nn.Softmax(dim=1)
                prob, top = torch.topk(r(output), beam_size)
                prob = prob.squeeze(0)
                top = top.squeeze(0)
                #print(prob.shape, top.shape)
                for j in range(beam_size):
                    #print(prob[j].item())
                    tmp_pred = torch.tensor([top[j]]).cuda()
                    tmp_pred = tmp_pred.unsqueeze(1)
                    candi.append(
                        beam_node(obj, hidden,
                                  torch.tensor([top[j]]).cuda(),
                                  obj.prob + log(prob[j].item()),
                                  obj.length + 1, output,
                                  obj.pred + [tmp_pred]))
            topk_candi = sorted(candi, key=lambda a: a.prob, reverse=True)
            for i in range(min(len(topk_candi), beam_size)):
                q.put(topk_candi[i])
        out = sorted(out, key=lambda a: a.prob, reverse=True)
        current = out[0]
        cnt = input_len - 1
        while cnt > 0:
            outputs[:, cnt] = current.output
            current = current.parent
            cnt -= 1
        return outputs, torch.cat(out[0].pred, 1)
Example #42
0
class Manager:
    def __init__(self):
        self._dbagent = DBAgent(new=0)

        self.scanner_queue = Queue()
        self.scanner_conn, scanner_side = Pipe()
        self.scanner_process = Process(target=s.Scanner, args=(scanner_side, ))
        self.scanner_process.daemon = True
        self.scanner_process.start()

        self.app = QApplication(sys.argv)
        self.main_window = MainWindow(self.loop, self.scan)
        self.main_window.deviceSelected = self._device_selected
        self.main_window.initUi()
        self.scan()
        self._update_devices(True)
        sys.exit(self.app.exec_())

    def loop(self):
        readables = list(filter(lambda c: c.poll(), [self.scanner_conn]))
        for readable in readables:
            data = readable.recv()
            if isinstance(data, ScanResult):
                self._dbagent.add_scan_result(data)
                self._update_devices()
                self.main_window.dbNewDevices = self._dbagent.get_new_device_count(
                )
                if self.main_window.deviceWindow.isVisible():
                    self._update_device_window(
                        self._dbagent.get_device_info(self.open_device_id))
        if not self.scanner_queue.empty():
            self.scanner_conn.send(self.scanner_queue.get())

    def scan(self):
        if self.scanner_queue.empty():
            self.command_scanner(
                ('scan', ('10.100.102.0/24', s.NAME + s.VENDOR)))

    def _update_devices(self, loading=False):
        devices = self._dbagent.get_devices_info()
        if not devices:
            return
        self.main_window.device_ids = list(zip(*devices))[0]
        self.main_window.devicesTable.setRowCount(0)
        for row, device in enumerate([x[1:] for x in devices]):
            self.main_window.devicesTable.insertRow(row)
            for col, item in enumerate(device):
                if col == len(device) - 1 and loading:
                    self.main_window.devicesTable.setItem(
                        row, col, QTableWidgetItem('scanning...'))
                elif col == 0:
                    if item == 1:
                        pixmap = QPixmap(SRC_INFO)
                        pixmap = pixmap.scaled(25, 25, Qt.KeepAspectRatio,
                                               Qt.SmoothTransformation)
                        label = QLabel()
                        label.setPixmap(pixmap)
                        self.main_window.devicesTable.setCellWidget(
                            row, col, label)
                else:
                    self.main_window.devicesTable.setItem(
                        row, col, QTableWidgetItem(str(item)))

    def _device_selected(self, item):
        self.open_device_id = self.main_window.device_ids[item.row()]
        self._update_device_window(
            self._dbagent.get_device_info(self.open_device_id))
        self.main_window.deviceWindow.show()

    def _update_device_window(self, data, scan_clicked=False):
        ports = ''
        if scan_clicked:
            if data[-1]:
                ports = data[-1] + ' '
            ports += '<label>(scanning...)</label>'
        elif data[-1]:
            ports = data[-1] + ' <a href="scan">(scan again)</a>'
        else:
            ports = '<a href="scan">(scan ports)</a>'
        self.main_window.deviceWindow.deviceLabel.setText('''<html>
                 <head>
                 </head>
                 <body>
                   <b>Name:</b> %s<br />
                   <b>IP address:</b> %s<br />
                   <b>MAC address:</b> %s<br />
                   <b>NIC vendor:</b> %s<br />
                   <b>First joined:</b> %s<br />
                   <b>Last detected:</b> %s<br />
                   <b>Open ports:</b> %s<br />
                 </body>
               </html>''' % (*data[:-1], ports))
        self.main_window.deviceWindow.deviceLabel.linkActivated.connect(
            lambda _: 0)
        self.main_window.deviceWindow.deviceLabel.linkActivated.disconnect()
        if not scan_clicked:
            self.main_window.deviceWindow.deviceLabel.linkActivated.connect(
                lambda _: self._device_scan_clicked(data[1], data[2]))

    def _device_scan_clicked(self, ip, mac):
        self.main_window.deviceWindow.deviceLabel.linkActivated.disconnect()
        self._update_device_window(
            self._dbagent.get_device_info(self.open_device_id), True)
        self.command_scanner(('scan_ports', (ip, mac)))

    def _device_selected(self, item):
        self.open_device_id = self.main_window.device_ids[item.row()]
        self._update_device_window(
            self._dbagent.get_device_info(self.open_device_id))
        self.main_window.deviceWindow.show()

    def _update_device_window(self, data, scan_clicked=False):
        ports = ''
        if scan_clicked:
            if data[-1]:
                ports = data[-1] + ' '
            ports += '<label>(scanning...)</label>'
        elif data[-1]:
            ports = data[-1] + ' <a href="scan">(scan again)</a>'
        else:
            ports = '<a href="scan">(scan ports)</a>'
        self.main_window.deviceWindow.deviceLabel.setText('''<html>
                 <head>
                 </head>
                 <body>
                   <b>Name:</b> %s<br />
                   <b>IP address:</b> %s<br />
                   <b>MAC address:</b> %s<br />
                   <b>NIC vendor:</b> %s<br />
                   <b>First joined:</b> %s<br />
                   <b>Last detected:</b> %s<br />
                   <b>Open ports:</b> %s<br />
                 </body>
               </html>''' % (*data[:-1], ports))
        self.main_window.deviceWindow.deviceLabel.linkActivated.connect(
            lambda _: 0)
        self.main_window.deviceWindow.deviceLabel.linkActivated.disconnect()
        if not scan_clicked:
            self.main_window.deviceWindow.deviceLabel.linkActivated.connect(
                lambda _: self._device_scan_clicked(data[1], data[2]))

    def _device_scan_clicked(self, ip, mac):
        self.main_window.deviceWindow.deviceLabel.linkActivated.disconnect()
        self._update_device_window(
            self._dbagent.get_device_info(self.open_device_id), True)
        self.command_scanner(('scan_ports', (ip, mac)))

    def command_scanner(self, command):
        self.scanner_queue.put(command)
Example #43
0
class DatabaseThread(Thread):

    subs = []

    DB_FILENAME = "pyload.db"
    VERSION_FILENAME = "db.version"

    def __init__(self, core):
        super().__init__()
        self.daemon = True
        self.pyload = core
        self._ = core._

        datadir = os.path.join(self.pyload.userdir, "data")
        os.makedirs(datadir, exist_ok=True)

        self.db_path = os.path.join(datadir, self.DB_FILENAME)
        self.version_path = os.path.join(datadir, self.VERSION_FILENAME)

        self.jobs = Queue()

        self.setuplock = Event()

        style.set_db(self)

    def setup(self):
        self.start()
        self.setuplock.wait()

    def run(self):
        """
        main loop, which executes commands.
        """
        convert = self._check_version()  #: returns None or current version

        self.conn = sqlite3.connect(self.db_path, isolation_level=None)
        os.chmod(self.db_path, 0o600)

        self.c = self.conn.cursor()  #: compatibility

        if convert is not None:
            self._convert_db(convert)

        self._create_tables()
        self._migrate_user()

        self.conn.commit()

        self.setuplock.set()

        while True:
            j = self.jobs.get()
            if j == "quit":
                self.c.close()
                self.conn.close()
                break
            j.process_job()

    @style.queue
    def shutdown(self):
        self.conn.commit()
        self.jobs.put("quit")

    def _check_version(self):
        """
        check db version and delete it if needed.
        """
        if not os.path.exists(self.version_path):
            with open(self.version_path, mode="w") as fp:
                fp.write(str(__version__))
            return

        with open(self.version_path) as fp:
            v = int(fp.read().strip())

        if v < __version__:
            if v < 2:
                self.pyload.log.warning(
                    self.
                    _("Filedatabase was deleted due to incompatible version."))
                os.remove(self.version_path)
                shutil.move(self.db_path, "files.backup.db")
            with open(self.version_path, mode="w") as fp:
                fp.write(str(__version__))
            return v

    def _convert_db(self, v):
        try:
            getattr(self, f"_convertV{v}")()
        except Exception:
            self.pyload.log.error(
                self._("Filedatabase could NOT be converted."))

    # --convert scripts start

    def _convertV2(self):
        self.c.execute(
            'CREATE TABLE IF NOT EXISTS "storage" ("id" INTEGER PRIMARY KEY AUTOINCREMENT, "identifier" TEXT NOT NULL, "key" TEXT NOT NULL, "value" TEXT DEFAULT "")'
        )
        self.pyload.log.info(self._("Database was converted from v2 to v3."))
        self._convertV3()

    def _convertV3(self):
        self.c.execute(
            'CREATE TABLE IF NOT EXISTS "users" ("id" INTEGER PRIMARY KEY AUTOINCREMENT, "name" TEXT NOT NULL, "email" TEXT DEFAULT "" NOT NULL, "password" TEXT NOT NULL, "role" INTEGER DEFAULT 0 NOT NULL, "permission" INTEGER DEFAULT 0 NOT NULL, "template" TEXT DEFAULT "default" NOT NULL)'
        )
        self.pyload.log.info(self._("Database was converted from v3 to v4."))

    # --convert scripts end

    def _create_tables(self):
        """
        create tables for database.
        """
        self.c.execute(
            'CREATE TABLE IF NOT EXISTS "packages" ("id" INTEGER PRIMARY KEY AUTOINCREMENT, "name" TEXT NOT NULL, "folder" TEXT, "password" TEXT DEFAULT "", "site" TEXT DEFAULT "", "queue" INTEGER DEFAULT 0 NOT NULL, "packageorder" INTEGER DEFAULT 0 NOT NULL)'
        )
        self.c.execute(
            'CREATE TABLE IF NOT EXISTS "links" ("id" INTEGER PRIMARY KEY AUTOINCREMENT, "url" TEXT NOT NULL, "name" TEXT, "size" INTEGER DEFAULT 0 NOT NULL, "status" INTEGER DEFAULT 3 NOT NULL, "plugin" TEXT DEFAULT "DefaultPlugin" NOT NULL, "error" TEXT DEFAULT "", "linkorder" INTEGER DEFAULT 0 NOT NULL, "package" INTEGER DEFAULT 0 NOT NULL, FOREIGN KEY(package) REFERENCES packages(id))'
        )
        self.c.execute(
            'CREATE INDEX IF NOT EXISTS "p_id_index" ON links(package)')
        self.c.execute(
            'CREATE TABLE IF NOT EXISTS "storage" ("id" INTEGER PRIMARY KEY AUTOINCREMENT, "identifier" TEXT NOT NULL, "key" TEXT NOT NULL, "value" TEXT DEFAULT "")'
        )
        self.c.execute(
            'CREATE TABLE IF NOT EXISTS "users" ("id" INTEGER PRIMARY KEY AUTOINCREMENT, "name" TEXT NOT NULL, "email" TEXT DEFAULT "" NOT NULL, "password" TEXT NOT NULL, "role" INTEGER DEFAULT 0 NOT NULL, "permission" INTEGER DEFAULT 0 NOT NULL, "template" TEXT DEFAULT "default" NOT NULL)'
        )

        self.c.execute('CREATE VIEW IF NOT EXISTS "pstats" AS \
        SELECT p.id AS id, SUM(l.size) AS sizetotal, COUNT(l.id) AS linkstotal, linksdone, sizedone\
        FROM packages p JOIN links l ON p.id = l.package LEFT OUTER JOIN\
        (SELECT p.id AS id, COUNT(*) AS linksdone, SUM(l.size) AS sizedone \
        FROM packages p JOIN links l ON p.id = l.package AND l.status in (0,4,13) GROUP BY p.id) s ON s.id = p.id \
        GROUP BY p.id')

        # try to lower ids
        self.c.execute("SELECT max(id) FROM LINKS")
        fid = self.c.fetchone()[0]
        if fid:
            fid = int(fid)
        else:
            fid = 0
        self.c.execute("UPDATE SQLITE_SEQUENCE SET seq=? WHERE name=?",
                       (fid, "links"))

        self.c.execute("SELECT max(id) FROM packages")
        pid = self.c.fetchone()[0]
        if pid:
            pid = int(pid)
        else:
            pid = 0
        self.c.execute("UPDATE SQLITE_SEQUENCE SET seq=? WHERE name=?",
                       (pid, "packages"))

        self.c.execute("VACUUM")

    def _migrate_user(self):
        if os.path.exists("pyload.db"):
            self.pyload.log.info(self._("Converting old Django DB"))

            with sqlite3.connect("pyload.db", isolation_level=None) as conn:
                with closing(conn.cursor()) as c:
                    c.execute(
                        "SELECT username, password, email from auth_user WHERE is_superuser"
                    )
                    users = []
                    for r in c:
                        pw = r[1].split("$")
                        users.append((r[0], pw[1] + pw[2], r[2]))

            self.c.executemany(
                "INSERT INTO users(name, password, email) VALUES (?, ?, ?)",
                users)
            shutil.move("pyload.db", "pyload.old.db")

    def create_cursor(self):
        return self.conn.cursor()

    @style.async_
    def commit(self):
        self.conn.commit()

    @style.queue
    def sync_save(self):
        self.conn.commit()

    @style.async_
    def rollback(self):
        self.conn.rollback()

    def async_(self, f, *args, **kwargs):
        args = (self, ) + args
        job = DatabaseJob(f, *args, **kwargs)
        self.jobs.put(job)

    def queue(self, f, *args, **kwargs):
        args = (self, ) + args
        job = DatabaseJob(f, *args, **kwargs)
        self.jobs.put(job)
        job.wait()
        return job.result

    @classmethod
    def register_sub(cls, klass):
        cls.subs.append(klass)

    @classmethod
    def unregister_sub(cls, klass):
        cls.subs.remove(klass)

    def __getattr__(self, attr):
        for sub in DatabaseThread.subs:
            if hasattr(sub, attr):
                return getattr(sub, attr)

        raise AttributeError(
            f"'{self.__class__.__name__}' object has no attribute '{attr}'")
Example #44
0
def port_scan(task_id):
    # 连接数据库并获取指定任务数据,目标域名或IP,起始端口
    # 和结束端口,线程数量,扫描方式
    result = get_task_info(task_id)
    target = result[0]
    start_port = result[1]
    end_port = result[2]
    thread_num = result[3]
    scan_method = result[4]

    # 尝试获取目标域名IP
    target_ip = get_ip(target)

    # 指定队列保存待存储数据
    data_queue = Queue()

    # 用队列存储待扫描的端口
    port_queue = Queue()
    for port in range(start_port, (end_port + 1)):
        port_queue.put(port)

    # 列表保存已启动的端口扫描线程
    thread_list = []

    # 列表保存线程名
    thread_name_list = []
    for num in range(thread_num):
        thread_name = f"端口扫描线程_{num}"
        thread_name_list.append(thread_name)

    # 连接数据库并更新任务扫描状态为running
    task_status = "running"
    update_task_status(task_id, task_status)

    # 创建并启动端口扫描线程
    for thread_id in thread_name_list:
        thread = PortScanThread(
            thread_id=thread_id,
            target_ip=target_ip,
            scan_method=scan_method,
            port_queue=port_queue,
            data_queue=data_queue,
        )
        thread.start()
        thread_list.append(thread)

    # 等待所有任务执行结束
    for t in thread_list:
        t.join()

    # 将待存储数据从队列取出并保存到列表
    port_list = list(data_queue.queue)
    print(port_list)

    # 保存并更新任务执行状态
    if port_list:
        save_data(task_id, target_ip, port_list)
        task_status = 'completed'
        update_task_status(task_id, task_status)
    else:
        task_status = 'failed'
        update_task_status(task_id, task_status)
Example #45
0
def evalvideo(net: Yolact, path: str):
    # If the path is a digit, parse it as a webcam index
    is_webcam = path.isdigit()

    if is_webcam:
        vid = cv2.VideoCapture(int(path))
    else:
        vid = cv2.VideoCapture(path)

    if not vid.isOpened():
        print('Could not open video "%s"' % path)
        exit(-1)

    net = CustomDataParallel(net).cuda()
    transform = torch.nn.DataParallel(FastBaseTransform()).cuda()
    frame_times = MovingAverage(100)
    fps = 0
    # The 0.8 is to account for the overhead of time.sleep
    frame_time_target = 1 / vid.get(cv2.CAP_PROP_FPS)
    running = True

    def cleanup_and_exit():
        print()
        pool.terminate()
        vid.release()
        cv2.destroyAllWindows()
        exit()

    def get_next_frame(vid):
        return [vid.read()[1] for _ in range(args.video_multiframe)]

    def transform_frame(frames):
        with torch.no_grad():
            frames = [
                torch.from_numpy(frame).cuda().float() for frame in frames
            ]
            return frames, transform(torch.stack(frames, 0))

    def eval_network(inp):
        with torch.no_grad():
            frames, imgs = inp
            return frames, net(imgs)

    def prep_frame(inp):
        with torch.no_grad():
            frame, preds = inp
            return prep_display(preds,
                                frame,
                                None,
                                None,
                                undo_transform=False,
                                class_color=True)

    frame_buffer = Queue()
    video_fps = 0

    # All this timing code to make sure that
    def play_video():
        nonlocal frame_buffer, running, video_fps, is_webcam

        video_frame_times = MovingAverage(100)
        frame_time_stabilizer = frame_time_target
        last_time = None
        stabilizer_step = 0.0005

        while running:
            frame_time_start = time.time()

            if not frame_buffer.empty():
                next_time = time.time()
                if last_time is not None:
                    video_frame_times.add(next_time - last_time)
                    video_fps = 1 / video_frame_times.get_avg()
                cv2.imshow(path, frame_buffer.get())
                last_time = next_time

            if cv2.waitKey(1) == 27:  # Press Escape to close
                running = False

            buffer_size = frame_buffer.qsize()
            if buffer_size < args.video_multiframe:
                frame_time_stabilizer += stabilizer_step
            elif buffer_size > args.video_multiframe:
                frame_time_stabilizer -= stabilizer_step
                if frame_time_stabilizer < 0:
                    frame_time_stabilizer = 0

            new_target = frame_time_stabilizer if is_webcam else max(
                frame_time_stabilizer, frame_time_target)

            next_frame_target = max(
                2 * new_target - video_frame_times.get_avg(), 0)
            target_time = frame_time_start + next_frame_target - 0.001  # Let's just subtract a millisecond to be safe
            # This gives more accurate timing than if sleeping the whole amount at once
            while time.time() < target_time:
                time.sleep(0.001)

    extract_frame = lambda x, i: (x[0][i] if x[1][i] is None else x[0][i].to(x[
        1][i]['box'].device), [x[1][i]])

    # Prime the network on the first frame because I do some thread unsafe things otherwise
    print('Initializing model... ', end='')
    eval_network(transform_frame(get_next_frame(vid)))
    print('Done.')

    # For each frame the sequence of functions it needs to go through to be processed (in reversed order)
    sequence = [prep_frame, eval_network, transform_frame]
    pool = ThreadPool(processes=len(sequence) + args.video_multiframe + 2)
    pool.apply_async(play_video)

    active_frames = []

    print()
    while vid.isOpened() and running:
        start_time = time.time()

        # Start loading the next frames from the disk
        next_frames = pool.apply_async(get_next_frame, args=(vid, ))

        # For each frame in our active processing queue, dispatch a job
        # for that frame using the current function in the sequence
        for frame in active_frames:
            frame['value'] = pool.apply_async(sequence[frame['idx']],
                                              args=(frame['value'], ))

        # For each frame whose job was the last in the sequence (i.e. for all final outputs)
        for frame in active_frames:
            if frame['idx'] == 0:
                frame_buffer.put(frame['value'].get())

        # Remove the finished frames from the processing queue
        active_frames = [x for x in active_frames if x['idx'] > 0]

        # Finish evaluating every frame in the processing queue and advanced their position in the sequence
        for frame in list(reversed(active_frames)):
            frame['value'] = frame['value'].get()
            frame['idx'] -= 1

            if frame['idx'] == 0:
                # Split this up into individual threads for prep_frame since it doesn't support batch size
                active_frames += [{
                    'value': extract_frame(frame['value'], i),
                    'idx': 0
                } for i in range(1, args.video_multiframe)]
                frame['value'] = extract_frame(frame['value'], 0)

        # Finish loading in the next frames and add them to the processing queue
        active_frames.append({
            'value': next_frames.get(),
            'idx': len(sequence) - 1
        })

        # Compute FPS
        frame_times.add(time.time() - start_time)
        fps = args.video_multiframe / frame_times.get_avg()

        print(
            '\rProcessing FPS: %.2f | Video Playback FPS: %.2f | Frames in Buffer: %d    '
            % (fps, video_fps, frame_buffer.qsize()),
            end='')

    cleanup_and_exit()
Example #46
0
class BitmexRestApi(object):
    """REST API"""

    #----------------------------------------------------------------------
    def __init__(self):
        """Constructor"""
        self.apiKey = ''
        self.apiSecret = ''
        self.host = ''

        self.active = False
        self.reqid = 0
        self.queue = Queue()
        self.pool = None
        self.sessionDict = {}  # 会话对象字典

        self.header = {
            'Content-Type': 'application/x-www-form-urlencoded',
            'Accept': 'application/json'
        }

    #----------------------------------------------------------------------
    def init(self, apiKey, apiSecret, testnet=False):
        """初始化"""
        self.apiKey = apiKey
        self.apiSecret = apiSecret

        if testnet:
            self.host = TESTNET_REST_HOST
        else:
            self.host = REST_HOST

    #----------------------------------------------------------------------
    def start(self, n=3):
        """启动"""
        if self.active:
            return

        self.active = True
        self.pool = Pool(n)
        self.pool.map_async(self.run, range(n))

    #----------------------------------------------------------------------
    def close(self):
        """关闭"""
        self.active = False

        if self.pool:
            self.pool.close()
            self.pool.join()

    #----------------------------------------------------------------------
    def addReq(self, method, path, callback, params=None, postdict=None):
        """添加请求"""
        self.reqid += 1
        req = (method, path, callback, params, postdict, self.reqid)
        self.queue.put(req)
        return self.reqid

    #----------------------------------------------------------------------
    def processReq(self, req, i):
        """处理请求"""
        method, path, callback, params, postdict, reqid = req
        url = self.host + path
        expires = int(time() + 5)

        rq = requests.Request(url=url, data=postdict)
        p = rq.prepare()

        header = copy(self.header)
        header['api-expires'] = str(expires)
        header['api-key'] = self.apiKey
        header['api-signature'] = self.generateSignature(method,
                                                         path,
                                                         expires,
                                                         params,
                                                         body=p.body)

        # 使用长连接的session,比短连接的耗时缩短80%
        session = self.sessionDict[i]
        resp = session.request(method,
                               url,
                               headers=header,
                               params=params,
                               data=postdict)

        #resp = requests.request(method, url, headers=header, params=params, data=postdict)

        code = resp.status_code
        d = resp.json()

        if code == 200:
            callback(d, reqid)
        else:
            self.onError(code, d)

    #----------------------------------------------------------------------
    def run(self, i):
        """连续运行"""
        self.sessionDict[i] = requests.Session()

        while self.active:
            try:
                req = self.queue.get(timeout=1)
                self.processReq(req, i)
            except Empty:
                pass

    #----------------------------------------------------------------------
    def generateSignature(self, method, path, expires, params=None, body=None):
        """生成签名"""
        # 对params在HTTP报文路径中,以请求字段方式序列化
        if params:
            query = urlencode(params.items())
            path = path + '?' + query

        if body is None:
            body = ''

        msg = method + '/api/v1' + path + str(expires) + body
        signature = hmac.new(self.apiSecret, msg,
                             digestmod=hashlib.sha256).hexdigest()
        return signature

    #----------------------------------------------------------------------
    def onError(self, code, error):
        """错误回调"""
        print('on error')
        print(code, error)

    #----------------------------------------------------------------------
    def onData(self, data, reqid):
        """通用回调"""
        print('on data')
        print(data, reqid)
Example #47
0
class ReplaceRobot(Bot):

    """A bot that can do text replacements.

    @param generator: generator that yields Page objects
    @type generator: generator
    @param replacements: a list of Replacement instances or sequences of
        length 2 with the original text (as a compiled regular expression)
        and replacement text (as a string).
    @type replacements: list
    @param exceptions: a dictionary which defines when not to change an
        occurrence. This dictionary can have these keys:

        title
            A list of regular expressions. All pages with titles that
            are matched by one of these regular expressions are skipped.
        text-contains
            A list of regular expressions. All pages with text that
            contains a part which is matched by one of these regular
            expressions are skipped.
        inside
            A list of regular expressions. All occurrences are skipped which
            lie within a text region which is matched by one of these
            regular expressions.
        inside-tags
            A list of strings. These strings must be keys from the
            dictionary in textlib._create_default_regexes() or must be
            accepted by textlib._get_regexes().

    @type exceptions: dict
    @param allowoverlap: when matches overlap, all of them are replaced.
    @type allowoverlap: bool
    @param recursive: Recurse replacement as long as possible.
    @type recursice: bool
    @warning: Be careful, this might lead to an infinite loop.
    @param addedCat: category to be added to every page touched
    @type addedCat: pywikibot.Category or str or None
    @param sleep: slow down between processing multiple regexes
    @type sleep: int
    @param summary: Set the summary message text bypassing the default
    @type summary: str
    @keyword always: the user won't be prompted before changes are made
    @type keyword: bool
    @keyword site: Site the bot is working on.
    @warning: site parameter should be passed to constructor.
        Otherwise the bot takes the current site and warns the operator
        about the missing site
    """

    @deprecated_args(acceptall='always')
    def __init__(self, generator, replacements, exceptions={},
                 allowoverlap=False, recursive=False, addedCat=None,
                 sleep=None, summary='', **kwargs):
        """Initializer."""
        super(ReplaceRobot, self).__init__(generator=generator,
                                           **kwargs)

        for i, replacement in enumerate(replacements):
            if isinstance(replacement, Sequence):
                if len(replacement) != 2:
                    raise ValueError('Replacement number {0} does not have '
                                     'exactly two elements: {1}'.format(
                                         i, replacement))
                # Replacement assumes it gets strings but it's already compiled
                replacements[i] = Replacement.from_compiled(replacement[0],
                                                            replacement[1])
        self.replacements = replacements
        self.exceptions = exceptions
        self.allowoverlap = allowoverlap
        self.recursive = recursive

        if addedCat:
            if isinstance(addedCat, pywikibot.Category):
                self.addedCat = addedCat
            else:
                self.addedCat = pywikibot.Category(self.site, addedCat)

        self.sleep = sleep
        self.summary = summary
        self.changed_pages = 0
        self._pending_processed_titles = Queue()

    def isTitleExcepted(self, title, exceptions=None):
        """
        Return True iff one of the exceptions applies for the given title.

        @rtype: bool
        """
        if exceptions is None:
            exceptions = self.exceptions
        if 'title' in exceptions:
            for exc in exceptions['title']:
                if exc.search(title):
                    return True
        if 'require-title' in exceptions:
            for req in exceptions['require-title']:
                if not req.search(title):
                    return True
        return False

    def isTextExcepted(self, original_text):
        """
        Return True iff one of the exceptions applies for the given text.

        @rtype: bool
        """
        if 'text-contains' in self.exceptions:
            for exc in self.exceptions['text-contains']:
                if exc.search(original_text):
                    return True
        return False

    def apply_replacements(self, original_text, applied, page=None):
        """
        Apply all replacements to the given text.

        @rtype: str, set
        """
        if page is None:
            pywikibot.warn(
                'You must pass the target page as the "page" parameter to '
                'apply_replacements().', DeprecationWarning, stacklevel=2)
        new_text = original_text
        exceptions = _get_text_exceptions(self.exceptions)
        skipped_containers = set()
        for replacement in self.replacements:
            if self.sleep is not None:
                time.sleep(self.sleep)
            if (replacement.container
                    and replacement.container.name in skipped_containers):
                continue
            elif page is not None and self.isTitleExcepted(
                    page.title(), replacement.exceptions):
                if replacement.container:
                    pywikibot.output(
                        'Skipping fix "{0}" on {1} because the title is on '
                        'the exceptions list.'.format(
                            replacement.container.name,
                            page.title(as_link=True)))
                    skipped_containers.add(replacement.container.name)
                else:
                    pywikibot.output(
                        'Skipping unnamed replacement ({0}) on {1} because '
                        'the title is on the exceptions list.'.format(
                            replacement.description, page.title(as_link=True)))
                continue
            old_text = new_text
            new_text = textlib.replaceExcept(
                new_text, replacement.old_regex, replacement.new,
                exceptions + replacement.get_inside_exceptions(),
                allowoverlap=self.allowoverlap, site=self.site)
            if old_text != new_text:
                applied.add(replacement)

        return new_text

    @deprecated('apply_replacements', since='20160816')
    def doReplacements(self, original_text, page=None):
        """Apply replacements to the given text and page."""
        if page is None:
            pywikibot.warn(
                'You must pass the target page as the "page" parameter to '
                'doReplacements().', DeprecationWarning, stacklevel=2)
        with warnings.catch_warnings():
            warnings.simplefilter('ignore', DeprecationWarning)
            new_text = self.apply_replacements(original_text, set(), page=page)
        return new_text

    def _count_changes(self, page, err):
        """Count successfully changed pages; log changed titles for display."""
        # This is an async put callback
        if not isinstance(err, Exception):
            self.changed_pages += 1
            self._pending_processed_titles.put((page.title(
                as_link=True), True))
        else:  # unsuccessful pages
            self._pending_processed_titles.put((page.title(as_link=True),
                                                False))

    def _replace_async_callback(self, page, err):
        """Callback for asynchronous page edit."""
        self._count_changes(page, err)

    def _replace_sync_callback(self, page, err):
        """Callback for synchronous page edit."""
        self._count_changes(page, err)
        if isinstance(err, Exception):
            raise err

    def generate_summary(self, applied_replacements):
        """Generate a summary message for the replacements."""
        # all replacements which are merged into the default message
        default_summaries = set()
        # all message parts
        summary_messages = set()
        for replacement in applied_replacements:
            if replacement.edit_summary:
                summary_messages.add(replacement.edit_summary)
            elif replacement.default_summary:
                default_summaries.add((replacement.old, replacement.new))
        summary_messages = sorted(summary_messages)
        if default_summaries:
            if self.summary:
                summary_messages.insert(0, self.summary)
            else:
                comma = self.site.mediawiki_message('comma-separator')
                default_summary = comma.join(
                    '-{0} +{1}'.format(*default_summary)
                    for default_summary in default_summaries)
                summary_messages.insert(0, i18n.twtranslate(
                    self.site, 'replace-replacing',
                    {'description': ' ({0})'.format(default_summary)}
                ))
        semicolon = self.site.mediawiki_message('semicolon-separator')
        return semicolon.join(summary_messages)

    def run(self):
        """Start the bot."""
        # Run the generator which will yield Pages which might need to be
        # changed.
        for page in self.generator:
            if self.isTitleExcepted(page.title()):
                pywikibot.output(
                    'Skipping {0} because the title is on the exceptions list.'
                    .format(page.title(as_link=True)))
                continue
            try:
                # Load the page's text from the wiki
                original_text = page.get(get_redirect=True)
                if not page.canBeEdited():
                    pywikibot.output("You can't edit page "
                                     + page.title(as_link=True))
                    continue
            except pywikibot.NoPage:
                pywikibot.output('Page {0} not found'
                                 .format(page.title(as_link=True)))
                continue
            applied = set()
            new_text = original_text
            last_text = None
            context = 0
            while True:
                if self.isTextExcepted(new_text):
                    pywikibot.output('Skipping {0} because it contains text '
                                     'that is on the exceptions list.'
                                     .format(page.title(as_link=True)))
                    break
                while new_text != last_text:
                    last_text = new_text
                    new_text = self.apply_replacements(last_text, applied,
                                                       page)
                    if not self.recursive:
                        break
                if new_text == original_text:
                    pywikibot.output('No changes were necessary in '
                                     + page.title(as_link=True))
                    break
                if hasattr(self, 'addedCat'):
                    # Fetch only categories in wikitext, otherwise the others
                    # will be explicitly added.
                    cats = textlib.getCategoryLinks(new_text, site=page.site)
                    if self.addedCat not in cats:
                        cats.append(self.addedCat)
                        new_text = textlib.replaceCategoryLinks(new_text,
                                                                cats,
                                                                site=page.site)
                # Show the title of the page we're working on.
                # Highlight the title in purple.
                pywikibot.output(color_format(
                    '\n\n>>> {lightpurple}{0}{default} <<<', page.title()))
                pywikibot.showDiff(original_text, new_text, context=context)
                if self.getOption('always'):
                    break
                choice = pywikibot.input_choice(
                    'Do you want to accept these changes?',
                    [('Yes', 'y'), ('No', 'n'), ('Edit original', 'e'),
                     ('edit Latest', 'l'), ('open in Browser', 'b'),
                     ('More context', 'm'), ('All', 'a')],
                    default='N')
                if choice == 'm':
                    context = context * 3 if context else 3
                    continue
                if choice == 'e':
                    editor = editarticle.TextEditor()
                    as_edited = editor.edit(original_text)
                    # if user didn't press Cancel
                    if as_edited and as_edited != new_text:
                        new_text = as_edited
                    continue
                if choice == 'l':
                    editor = editarticle.TextEditor()
                    as_edited = editor.edit(new_text)
                    # if user didn't press Cancel
                    if as_edited and as_edited != new_text:
                        new_text = as_edited
                        # prevent changes from being applied again
                        last_text = new_text
                    continue
                if choice == 'b':
                    pywikibot.bot.open_webbrowser(page)
                    try:
                        original_text = page.get(get_redirect=True, force=True)
                    except pywikibot.NoPage:
                        pywikibot.output('Page {0} has been deleted.'
                                         .format(page.title()))
                        break
                    new_text = original_text
                    last_text = None
                    continue
                if choice == 'a':
                    self.options['always'] = True
                if choice == 'y':
                    page.text = new_text
                    page.save(summary=self.generate_summary(applied),
                              asynchronous=True,
                              callback=self._replace_async_callback,
                              quiet=True)
                while not self._pending_processed_titles.empty():
                    proc_title, res = self._pending_processed_titles.get()
                    pywikibot.output('Page {0}{1} saved'
                                     .format(proc_title,
                                             '' if res else ' not'))
                # choice must be 'N'
                break
            if self.getOption('always') and new_text != original_text:
                try:
                    page.text = new_text
                    page.save(summary=self.generate_summary(applied),
                              callback=self._replace_sync_callback, quiet=True)
                except pywikibot.EditConflict:
                    pywikibot.output('Skipping {0} because of edit conflict'
                                     .format(page.title(),))
                except pywikibot.SpamfilterError as e:
                    pywikibot.output(
                        'Cannot change {0} because of blacklist entry {1}'
                        .format(page.title(), e.url))
                except pywikibot.LockedPage:
                    pywikibot.output('Skipping {0} (locked page)'
                                     .format(page.title(),))
                except pywikibot.PageNotSaved as error:
                    pywikibot.output('Error putting page: {0}'
                                     .format(error.args,))
                if self._pending_processed_titles.qsize() > 50:
                    while not self._pending_processed_titles.empty():
                        proc_title, res = self._pending_processed_titles.get()
                        pywikibot.output('Page {0}{1} saved'
                                         .format(proc_title,
                                                 '' if res else ' not'))
Example #48
0
class EventManager:
    def __init__(self):
        """初始化事件管理器"""
        # 事件对象列表
        self.__eventQueue = Queue()
        # 事件管理器开关
        self.__active = False
        # 事件处理线程
        self.__thread = Thread(target=self.__run, name='Synchronous')
        # 事件处理线程池
        self.__pool = ThreadPoolExecutor(3)
        # 阻塞函数列表
        self.__block = []

        # 这里的__handlers是一个字典,用来保存对应的事件的响应函数
        # 其中每个键对应的值是一个列表,列表中保存了对该事件监听的响应函数,一对多
        self.__handlers = {}

        self.__method = {}

    def __run(self):
        """引擎运行"""
        while self.__active is True:
            try:
                # 获取事件的阻塞时间设为1秒
                event = self.__eventQueue.get(block=True, timeout=1)
                self.__event_process(event)
            except Empty:
                pass

    def __event_process(self, event):
        """处理事件"""
        # 检查是否存在对该事件进行监听的处理函数
        if event.type_ in self.__handlers:
            # 若存在,则按顺序将事件传递给处理函数执行
            for handler in self.__handlers[event.type_]:
                if handler.__qualname__ in self.__block:
                    self.__pool.submit(handler, event)
                else:
                    handler(event)

    def start(self):
        """启动"""
        # 将事件管理器设为启动
        self.__active = True
        # 启动事件处理线程
        self.__thread.start()

    def stop(self):
        """停止"""
        # 将事件管理器设为停止
        self.__active = False
        self.__pool.shutdown()
        # 等待事件处理线程退出
        self.__thread.join()

    def add_event_listener(self, type_, handler):
        """绑定事件和监听器处理函数"""
        # 尝试获取该事件类型对应的处理函数列表,若无则创建
        try:
            handlerlist = self.__handlers[type_]
        except KeyError:
            handlerlist = []

        self.__handlers[type_] = handlerlist
        # 若要注册的处理器不在该事件的处理器列表中,则注册该事件
        if handler not in handlerlist:
            handlerlist.append(handler)

    def remove_event_listener(self, type_, handler):
        """移除监听器的处理函数"""
        try:
            handler_list = self.__handlers[type_]
            for method in handler_list:
                # 如果该函数存在于列表中,则移除
                if handler.__qualname__ == method.__qualname__:
                    handler_list.remove(method)

                # 如果函数列表为空,则从引擎中移除该事件类型
            if not handler_list:
                del self.__handlers[type_]

        except KeyError:
            pass

    def send_event(self, event):
        """发送事件,向事件队列中存入事件"""
        self.__eventQueue.put(event)

    def register(self, type_, block=False):
        classname = inspect.getouterframes(inspect.currentframe())[1][3]

        def callback(result):
            if not result:
                pass
            elif isinstance(result, tuple):
                for event in result:
                    self.send_event(event)
            else:
                self.send_event(result)

        def appendblock(fc, blk):
            if blk:
                self.__block.append(fc.__qualname__)

        if classname == '<module>':
            def decorator(func):
                appendblock(func, block)

                @functools.wraps(func)
                def wrapper(event):
                    _event = func(*event.args)
                    callback(_event)
                    return _event

                self.add_event_listener(type_, wrapper)

                return wrapper
        else:
            def decorator(func):
                appendblock(func, block)

                self.__method.setdefault(type_, [])
                self.__method[type_].append(func.__name__)

                @functools.wraps(func)
                def wrapper(this, event):
                    _event = func(this, *event.args)
                    callback(_event)
                    return _event

                return wrapper
        return decorator

    def server(self, *args):
        def decorator(cls):
            instance = cls(*args)
            for type_ in self.__method:
                for handler in self.__method[type_]:
                    self.add_event_listener(type_, getattr(instance, handler))
            self.__method = {}
            return cls

        return decorator
Example #49
0
class Alexa_Manager(hass.Hass):
    def initialize(self) -> None:
        # self.set_log_level("DEBUG")
        self.alexa_service = self.args.get("alexa_service")
        # self.alexa_switch_entity = self.args.get("alexa_switch")
        self.alexa_select_media_player = self.args.get(
            "alexa_select_media_player")
        self.alexa_type = self.args.get("alexa_type")
        self.alexa_method = self.args.get("alexa_method")
        self.alexa_sensor_media_player = self.args.get(
            "alexa_sensor_media_player")
        self.alexa_voice = self.args.get("alexa_voice")
        # self.alexa_language = self.args.get("alexa_language")
        self.prosody = self.args.get("prosody")
        self.wait_time = self.args.get("wait_time")
        self.cehck_alexa_service = self._check_alexa(self.alexa_service)

        self.queue = Queue(maxsize=0)
        self._when_tts_done_callback_queue = Queue()

        t = Thread(target=self.worker)
        t.daemon = True
        t.start()

    def speak(self, alexa):
        """Speak the provided text through the media player."""
        if not self.cehck_alexa_service:
            self.set_sensor(
                "I can't find the Alexa Media component",
                "https://github.com/custom-components/alexa_media_player")
            return
        self.lg(
            f"-------------------- ALEXA START DISPATCH --------------------")
        self.lg(f"FROM DISPATCH: {type(alexa)} value {alexa}")
        # remove keys with None value from a dict # TODO
        alexa = {k: v for k, v in alexa.items() if v not in [None, "None", ""]}
        self.lg(f"REMOVE [NONE] VALUE: {type(alexa)} value {alexa}")
        default_restore_volume = float(
            self.get_state(self.args.get("default_restore_volume"))) / 100
        volume = float(alexa.get("volume", default_restore_volume))
        message = str(alexa.get("message", alexa.get("message_tts")))
        alexa_player = self.player_get(
            alexa.get("media_player",
                      self.get_state(self.alexa_sensor_media_player)))
        alexa_type = (str(alexa.get("type", self.get_state(
            self.alexa_type))).lower().replace("dropin",
                                               "dropin_notification"))

        # Push notification
        push = bool(self.check_bool(alexa.get("push")))
        if push or alexa_type in MOBILE_PUSH and message:
            message_push = self.remove_tags(
                self.replace_regular(message, SUB_TEXT))
            self.call_service(
                NOTIFY + self.alexa_service,
                data={"type": "push"} if push else {"type": alexa_type},
                target=alexa_player[0],  # only one device
                title=str(alexa.get("title", "")),
                message=message_push,
            )
            self.lg(
                f"PUSH: {push} - TYPE: {alexa_type} - MESSAGE: {message_push}")
        # Media Content # TODO Restore volume??
        media_content_id = alexa.get("media_content_id")
        media_content_type = alexa.get("media_content_type")
        if media_content_id:
            self.volume_get(alexa_player, default_restore_volume)
            self.volume_set(alexa_player, volume)
            self.call_service(
                "media_player/play_media",
                entity_id=alexa_player,
                media_content_id=media_content_id,
                media_content_type=media_content_type,
                # extra = {"timer": 10} ##??
            )
            self.lg(
                f"Content id: {media_content_id} - Content type: {media_content_type}"
            )
        # Queues the message to be handled async, use when_tts_done_do method to supply callback when tts is done

        elif alexa_type not in MOBILE_PUSH and message:
            self.queue.put({
                "text":
                message,
                "volume":
                volume,
                "alexa_type":
                alexa_type,
                "alexa_player":
                alexa_player,  # media_player
                "default_restore_volume":
                default_restore_volume,
                "alexa_notifier":
                str(alexa.get("notifier", self.alexa_service)),
                "wait_time":
                float(alexa.get("wait_time", self.get_state(self.wait_time))),
                "language":
                alexa.get("language"),  # self.get_state(self.alexa_language)),
                "alexa_method":
                str(
                    alexa.get("method",
                              self.get_state(self.alexa_method)).lower()),
                "alexa_voice":
                str(alexa.get("voice",
                              self.get_state(self.alexa_voice))).capitalize(),
                "alexa_audio":
                alexa.get("audio", None),
                "rate":
                float(alexa.get("rate", self.get_state(self.prosody["rate"]))),
                "pitch":
                float(alexa.get("pitch",
                                self.get_state(self.prosody["pitch"]))),
                "ssml_volume":
                float(
                    alexa.get("ssml_volume",
                              self.get_state(self.prosody["volume"]))),
                "whisper":
                bool(self.check_bool(alexa.get("whisper", False))),
                "ssml_switch":
                bool(
                    self.check_bool(
                        alexa.get("ssml",
                                  self.get_state(self.args["ssml_switch"])))),
            })
        self.lg(
            f"-------------------- ALEXA  END  DISPATCH --------------------")

    def lg(self, message):
        self.log(message, level="DEBUG", ascii_encode=False)

    def check_bool(self, value):
        return str(value).lower() in ["true", "on", "yes", "1"]

    def inbetween(self, minv, value, maxv):
        return sorted([minv, value, maxv])[1]

    def speak_tag(self, value):  # TODO tags
        return value if "<speak>" in value or not "</" in value else f"<speak>{value}</speak>"

    def effect_tag(self, value):
        return f"<amazon:effect name='whispered'>{value}</amazon:effect>"

    def prosody_tag(self, value, rate, pitch, volume):
        if rate != 100.0 or pitch != 0.0 or volume != 0.0:
            rate = f"{self.inbetween(20, rate, 200)}%"  # min 20% max 200%
            pitch = f"{self.inbetween(-33.3, pitch, 50):+g}%"  # min -33.3 max +50
            volume = f"{self.inbetween(-50, volume, 4.08):+g}dB"  # max +4.08dB
            return f"<prosody rate='{rate}' pitch='{pitch}' volume='{volume}'> {value} </prosody>"
        return value

    def audio_tag(self, value: None):
        if value is None:
            return ""
        return f"<audio src='{value}'/>" if "<audio src=" not in value else value

    def lang_tag(self, value, lang):
        if lang not in SUPPORTED_LANGUAGES:
            self.lg(f"NOT SUPPORTED LANGUAGE: {lang}")
            return value
        self.lg(f"OK ADDED SSML LANGUAGE: {lang}")
        return f"<lang xml:lang='{lang}'>{value}</lang>"

    def voice_tag(self, value, name):
        if name not in VOICE_NAMES:
            self.lg(f"NOT SUPPORTED VOICE: {name}")
            return value
        self.lg(f"OK ADDED VOICE: {name}")
        return f"<voice name='{name}'>{value}</voice>"

    def say_as_tag(self, value):
        return f"<say-as interpret-as='interjection'>{value}</say-as>"

    def find_speechcon(self, value):
        substrings = sorted(SPEECHCON, key=len, reverse=True)
        regex = re.compile(r"\b" + r"\b|\b".join(map(re.escape, substrings)),
                           re.I)
        regex_match = re.findall(regex, value)
        self.lg(f"FOUND SPEECHCON: {len(regex_match)} -> {regex_match}")
        return regex.sub(lambda m: self.say_as_tag(m.group()), value)

    def player_get(self, user_player):
        media_player = []
        user_player = self.converti(str(user_player.lower()))
        for mpu in user_player:  # MediaPlayerUser
            if "test" in mpu:
                media_player.extend(self.player_alexa)
            if not self.entity_exists(mpu):
                mpu = self.dict_select.get(mpu)
            if mpu:
                if "group." in mpu:
                    media_player.extend(
                        self.get_state(mpu, attribute="entity_id"))
                elif "sensor." in mpu:
                    media_player.append(self.get_state(mpu))
                elif "media_player." in mpu:
                    media_player.append(mpu)
                else:
                    self.log(
                        f"Invalid group, sensor or player ENTITY-ID ({mpu})",
                        level="WARNING")
        if not media_player:
            media_player.append(self.get_state(self.alexa_sensor_media_player))
            self.log(
                f"No media player {user_player} found. I use the default one. ({media_player})",
                level="WARNING")
        media_player = list(set(media_player))
        self.lg(f"GET PLAYER: {media_player}")
        return media_player

    def volume_get(self, media_player, volume: float):
        """Get and save the volume of each media player."""
        self.dict_volumes = {
            m: self.get_state(m, attribute="volume_level", default=volume)
            for m in media_player
        }
        self.lg(f"GET VOLUMES: {self.dict_volumes}")
        return self.dict_volumes

    def volume_set(self, media_player, volume: float, **restore: False):
        if self.dict_volumes:
            for i, j in self.dict_volumes.items():
                if j != volume:
                    if restore:
                        self.call_service("media_player/volume_set",
                                          entity_id=i,
                                          volume_level=j)
                        time.sleep(1)
                        self.lg(
                            f"OK RESTORE VOL: {i} {j} [State: {self.get_state(i, attribute='volume_level')}]"
                        )
                    else:
                        self.call_service("media_player/volume_set",
                                          entity_id=media_player,
                                          volume_level=volume)
                        self.lg(f"SET VOLUMES: {media_player} {volume}")
                        break  # time.sleep(2)

    def replace_char(self, text: str, substitutions: dict):
        """Function that does multiple string replace ops in a single pass."""
        substrings = sorted(substitutions, key=len, reverse=True)
        regex = re.compile(r"\b" + r"\b|\b".join(map(re.escape, substrings)),
                           re.I)  # r'\b%s\b' % r'\b|\b'
        return regex.sub(
            lambda match: substitutions[str.lower(match.group(0))],
            text)  # added str.lower()

    def replace_regular(self, text: str, substitutions: list):
        for old, new in substitutions:
            regex = re.compile(old)
            text = re.sub(regex, new, str(text).strip())
        return text

    def remove_tags(self, text: str):
        """Remove all tags from a string."""
        regex = re.compile("<.*?>")
        return re.sub(regex, "", str(text).strip())

    def converti(self, stringa) -> list:
        regex = re.compile(r"\s*,\s*")
        return self.split_device_list(re.sub(regex, ",", stringa))

    def has_numbers(self, string):
        numbers = re.compile("\d{4,}|\d{3,}\.\d")
        return numbers.search(string)

    def set_sensor(self, state, error):
        attributes = {}
        attributes["icon"] = "mdi:amazon-alexa"
        attributes["Error"] = error
        self.set_state("sensor.centro_notifiche",
                       state=state,
                       attributes=attributes)

    def when_tts_done_do(self, callback: callable) -> None:
        """Callback when the queue of tts messages are done."""
        self._when_tts_done_callback_queue.put(callback)

    def worker(self):
        while True:
            try:
                data = self.queue.get()
                self.lg(f"WORKER: {type(data)} value {data}")
                alexa_player = data["alexa_player"]
                self.volume_get(alexa_player, data["default_restore_volume"])
                self.volume_set(alexa_player, data["volume"])

                # Replace and clean message
                message_clean = self.replace_regular(data["text"], SUB_VOICE)
                self.lg(f"INPUT MESSAGE: {data['text']}")
                self.lg(f"MESSAGE CLEAN: {message_clean}")

                # Speech time calculator
                # words = len(message_clean.split())
                # chars = message_clean.count("")
                words = len(self.remove_tags(message_clean).split())
                chars = self.remove_tags(message_clean).count("")
                duration = (words * 0.007) * 60

                # Extra time
                if self.has_numbers(message_clean):
                    data["wait_time"] += 4
                    self.lg(
                        f"OK NUMBER! ADDED EXTRA TIME: {data['wait_time']}")
                if (chars / words
                    ) > 7 and chars > 90 or data["alexa_audio"] is not None:
                    data["wait_time"] += 7
                    self.lg(f"OK ADDED EXTRA TIME: {data['wait_time']}")
                # Alexa type-method
                if "tts" in data["alexa_type"]:
                    alexa_data = {"type": "tts"}
                else:
                    data["wait_time"] += 1.5
                    alexa_data = {
                        "type": data["alexa_type"],
                        "method": data["alexa_method"],
                    }
                # TAGS SSML
                if data["ssml_switch"] and not "<speak>" in message_clean:
                    voice = "Alexa" if data[
                        "alexa_voice"] not in VOICE_NAMES else data[
                            "alexa_voice"]
                    whisper = data["whisper"]
                    if "Alexa" in voice and not whisper:
                        message_clean = self.find_speechcon(message_clean)
                    message_clean = self.lang_tag(message_clean,
                                                  data["language"])
                    if "Alexa" not in voice:
                        message_clean = self.voice_tag(message_clean, voice)
                    message_clean = self.audio_tag(
                        data["alexa_audio"]) + message_clean
                    message_clean = self.prosody_tag(message_clean,
                                                     data["rate"],
                                                     data["pitch"],
                                                     data["ssml_volume"])
                    # -->
                    rate = self.inbetween(20, data["rate"], 200)  # TODO
                    if rate < 100:
                        duration += (100 - rate) * (duration / 100)
                    elif rate > 100:
                        duration /= 2
                    # -->
                    if whisper:
                        message_clean = self.effect_tag(message_clean)
                    if "tts" in data["alexa_type"]:
                        message_clean = self.speak_tag(message_clean)
                    self.lg(f"OK SSML TAGS: {message_clean}")
                # Estimate reading time
                duration += data["wait_time"]
                self.lg(
                    f"DURATION-WAIT: {duration} - words: {words} - Chars: {chars}"
                )

                # Speak >>>
                self.call_service(
                    NOTIFY + data["alexa_notifier"],
                    data=alexa_data,
                    target=alexa_player,
                    message=message_clean.strip(),
                )

                time.sleep(duration if duration > 0 else 0)

                # Restore volume
                self.volume_set(alexa_player, data["volume"], restore=True)
            except Exception as ex:
                self.log(
                    "An error occurred in Alexa Manager (worker): {}".format(
                        ex),
                    level="ERROR")
                self.log(f"DATA: {data}", level="ERROR")
                self.set_sensor("Alexa Manager - Worker Error ", ex)
            self.queue.task_done()

            if self.queue.qsize() == 0:

                try:
                    while self._when_tts_done_callback_queue.qsize() > 0:
                        callback_func = self._when_tts_done_callback_queue.get_nowait(
                        )
                        callback_func()  # Call the callback
                        self._when_tts_done_callback_queue.task_done()
                except:
                    self.log("Errore nel CallBack", level="ERROR")
                    self.set_sensor("Alexa Manager - CallBack Error ", ex)
                    pass  # Nothing in queue
            self.lg(
                "---------------------------------------------------------\n")

    def _check_alexa(self, service):
        """ Get the media players from the alexa_media service in home assistant. """
        self.hass_config = self.get_plugin_config()
        components = self.hass_config["components"]
        if service in components:
            exclude = [service, "this_device", "_apps"]
            # Trova servizi alexa_media, alexa_media_xxname...
            cehck_alexa = [
                s["service"]  # .replace("alexa_media_", "media_player.")
                for s in self.list_services(namespace="default")
                if "notify" in s["domain"] and service in s["service"]
            ]
            self.lg(f"OK, Service: {cehck_alexa}")

            # converti servizi alexa_media_ in media_player. e controlla se esistono media_player.xxname...
            service_replace = [
                mp.replace("alexa_media_", "media_player.")
                for mp in cehck_alexa if mp != "alexa_media"  # Extra
            ]
            self.lg(f"OK, Entity: {service_replace}")

            # Filtro lista exclude - player_alexa list media_player
            self.player_alexa = [
                s for s in service_replace
                if self.entity_exists(s) and not any(player in s
                                                     for player in exclude)
            ]
            self.lg(
                f"OK, found the Alexa Media component. List of media players: {self.player_alexa}"
            )

            ###---------
            # """ GEt Friendly Name from Entity. """
            names = [self.friendly_name(name) for name in self.player_alexa]
            self.lg(f"FRIENDLY_NAME: {names}")
            selectoptions = self.get_state(self.alexa_select_media_player,
                                           attribute="options")
            self.lg(
                str(f"INPUT SELECT OPTIONS: {selectoptions} - TYPE: {type(selectoptions)}"
                    ))

            # controlla se il friendly name esiste nel input select
            check_alexa_options = [
                x for x in self.player_alexa
                if self.friendly_name(x) in selectoptions
            ]
            self.lg(
                str(f"ENTITY_ID MEDIA_PLAYER IN INPUT SELECTS {check_alexa_options}"
                    ))
            ###---------
            ##Prova conversione da friendly name ad entity id - return list and dict entity in input select
            # selectoptions = self.get_state(self.alexa_select_media_player, attribute="options")
            all_state = self.get_state()
            self.list_select = []
            self.dict_select = {}
            for entity, state in all_state.items():
                domain, name = entity.split(".")
                friendly_name = state["attributes"].get("friendly_name")
                if domain in ["media_player", "group", "sensor"
                              ] and friendly_name in selectoptions:
                    self.list_select.append(entity)
                    for select in selectoptions:
                        if select.lower() == friendly_name.lower(
                        ):  # .casefold()
                            self.dict_select[friendly_name.lower()] = entity
            self.lg(str(f"LIST ENTITY_ID SELECT OPTIONS: {self.list_select}"))
            self.lg(str(f"DICTIONARY NAME-ENTITY_ID: {self.dict_select}"))

            return cehck_alexa
        # self.log(
        #     f"I can't find the Alexa Media component\n- https://github.com/custom-components/alexa_media_player",
        #     level="ERROR",
        # )
        return
def main():

    create_tables()

    # Queue for the write to db thread to receive from
    q = Queue()

    # The worker thread will run in the background copying files into the database
    # even while we're still downloading new ones (saves time)
    threading.Thread(target=write_to_database, args=(q, ), daemon=True).start()

    # which subreddits to download from
    subreddits = []

    # limit of submissions to download (per loop period)
    # Pushshift will only allow 100 per file, so use score/gilding/etc filtering to get the best quality submissions
    # If you are combining multiple subreddits, you can reduce this number to reduce download time
    submission_limit = 100

    for subreddit in subreddits:

        # check that the output dir exists, if not create it
        output_dir = f'json_data/{subreddit}'
        if not os.path.exists(output_dir):
            os.makedirs(output_dir)

        for start, end in loop_between_dates(start_date, end_date):

            submission_output_path = f"json_data/{subreddit}/{subreddit}_submissions_{int(start.timestamp())}.json"

            if not os.path.isfile(submission_output_path):
                print(
                    f"submission does not exist on the disk; starting to download {submission_output_path}"
                )
                # The file already exists so just skip ahead in the loop

                # Get the top (x) number of submissions for that period.
                submission_search_link = (
                    'https://api.pushshift.io/reddit/submission/search/'
                    '?subreddit={}&after={}&before={}&stickied=0&sort_type=score&sort=desc&limit={}&mod_removed=0'
                )
                submission_search_link = submission_search_link.format(
                    subreddit, int(start.timestamp()), int(end.timestamp()),
                    submission_limit)

                submission_response = requests.get(submission_search_link)

                if submission_response.status_code != 200:
                    # the response was not OK, skip writing the file
                    continue

                with open(submission_output_path, "w") as f:
                    f.write(submission_response.text)

                time.sleep(0.1)

            else:
                print(
                    f"{submission_output_path} file exists on the disk, skipping download"
                )
                # The file already exists, but we'll go forwards and
                # check the comment files, download if required

            # Put the submission path into the queue to write into the database
            q.put(submission_output_path)

            # now re-open the file and load the json,
            # we'll try and pick up the comments for each submission id
            submission_json = None

            with open(submission_output_path, 'r',
                      encoding='utf8') as json_file:
                submission_json = json.load(json_file)

            for submission_json_item in submission_json['data']:

                if 'num_comments' not in submission_json_item:
                    # Sometimes the json['data'] can be empty
                    continue

                if submission_json_item['num_comments'] == 0:
                    # ignore submissions with no comments
                    continue

                if 'selftext' not in submission_json_item:
                    # ignore submissions with no selftext key (buggy)
                    continue

                if submission_json_item['selftext'] in [
                        '[removed]', '[deleted]'
                ]:
                    # ignore submissions that have no content
                    continue

                comment_output_path = f"json_data/{subreddit}/{subreddit}_{submission_json_item['id']}_comment.json"

                if not os.path.isfile(comment_output_path):
                    print(
                        f"{comment_output_path} does not exist on the disk, downloading..."
                    )
                    # print(submission_json_item)
                    comment_search_link = (
                        'https://api.pushshift.io/reddit/comment/search/'
                        '?subreddit={}&link_id={}&sort_type=created_utc&sort=asc'
                    )
                    comment_search_link = comment_search_link.format(
                        subreddit, submission_json_item['id'])

                    comment_response = requests.get(comment_search_link)

                    if comment_response.status_code != 200:
                        # the response was not OK, skip writing the file
                        continue

                    with open(comment_output_path, "w") as f:
                        f.write(comment_response.text)

                    # Have to sleep a bit here or else pushshift will start to block our requests
                    time.sleep(0.05)

                # Put it into the queue to write into the database
                q.put(comment_output_path)

    q.join()
Example #51
0
def regexToPost(regExpPreConverted):
    """ Converts an infix regular expression to postfix """

    regExp = insertConcats(regExpPreConverted)
    regExp.reverse()  # top of stack at end of list

    outputQueue = Queue()
    opStack = []
    ops = [CONCAT_MARKER, ord("|"), ord("?"), ord("*"), ord("+")]
    prec = {
        CONCAT_MARKER: 1,
        ord("|"): 0,
        ord("?"): 2,
        ord("*"): 2,
        ord("+"): 2,
        ord("("): -1,
        ord("{"): 1.5
    }
    escapes = {
        97: 7,
        34: 34,
        102: 12,
        39: 39,
        98: 8,
        110: 10,
        114: 13,
        116: 9,
        118: 11,
        92: 92
    }

    while regExp:
        token = regExp.pop()

        if token == ord("\\"):
            nextToken = regExp.pop()

            # Check for character classes, otherwise escape the character
            if nextToken == ord("w"):
                regExp += [93, 95, 57, 45, 48, 122, 45, 97, 90, 45, 65, 91]

            elif nextToken == ord("s"):
                regExp += [93, 12, 10, 13, 9, 32, 91]

            elif nextToken == ord("d"):
                regExp += [93, 57, 45, 48, 91]

            elif nextToken == ord("W"):
                regExp += [93, 95, 57, 45, 48, 122, 45, 97, 90, 45, 65, 94, 91]

            elif nextToken == ord("S"):
                regExp += [93, 12, 10, 13, 9, 32, 94, 91]

            elif nextToken == ord("D"):
                regExp += [93, 57, 45, 48, 94, 91]

            else:
                # TODO: parsing of octal and hex characters
                outputQueue.put(ord("\\"))
                outputQueue.put(escapes.get(nextToken, nextToken))

        elif token in ops:  # doesn't treat like a single unit
            while opStack and prec[opStack[-1]] >= prec[token]:
                outputQueue.put(opStack.pop())
            opStack.append(token)

        elif token == ord("("):
            try:
                if regExp[-1] == ord(")"):
                    regExp.pop()
                    outputQueue.put(EPS_MARKER)
                elif regExp[-1] == ord("?"):
                    try:
                        if (regExp[-2], regExp[-3],
                                regExp[-4]) == (CONCAT_MARKER, ord("i"),
                                                ord(")")):
                            outputQueue.put(CASE_INSENSITIVE)
                            regExp.pop()  # 1
                            regExp.pop()  # ?
                            regExp.pop()  # i
                            regExp.pop()  # )
                            regExp.pop()  # 1
                        else:
                            opStack.append(token)
                    except IndexError:
                        opStack.append(token)
                else:
                    opStack.append(token)
            except IndexError:
                raise ReSyntaxError("Mismatched left paren")

        elif token == ord(")"):
            try:
                while opStack[-1] != ord("("):
                    outputQueue.put(opStack.pop())
                opStack.pop()
            except IndexError:
                raise ReSyntaxError("Misatched right paren")

        elif token == ord("{"):
            while opStack and prec[opStack[-1]] >= prec[token]:
                outputQueue.put(opStack.pop())
            outputQueue.put(ord("{"))

            #  Collect the rest of the repetition count
            n = 0
            while True:

                nextToken = regExp.pop()
                if nextToken >= 48 and nextToken <= 57:
                    n = n * 10 + nextToken - 48
                elif nextToken == ord(","):
                    outputQueue.put(-n)  # A horrible hack...
                    n = 0
                    outputQueue.put(nextToken)
                elif nextToken == ord("}"):
                    if n != 0:
                        outputQueue.put(-n)
                    outputQueue.put(nextToken)
                    break

        elif token == ord("["):
            outputQueue.put(token)
            # Parse into rpn
            hyphenFound = False
            while True:
                try:
                    nextToken = regExp.pop()

                    # TODO: Should be able to parse literal "]"
                    if nextToken == ord("-"):
                        hyphenFound = True

                    elif nextToken == ord("]"):
                        if hyphenFound:
                            outputQueue.put(ord("-"))
                        outputQueue.put(nextToken)
                        break

                    elif nextToken == ord("\\"):
                        try:
                            #  TODO: Create negative character classes inside character classes
                            escapedCharacter = regExp.pop()
                            if escapedCharacter == ord("]"):
                                outputQueue.put(
                                    ESCAPED_SQUARE
                                )  # Will be dealt with when building character classes
                            elif escapedCharacter == ord("w"):
                                regExp += [
                                    95, 57, 45, 48, 122, 45, 97, 90, 45, 65
                                ]
                            elif escapedCharacter == ord("s"):
                                regExp += [12, 10, 13, 9, 32]
                            elif escapedCharacter == ord("d"):
                                regExp += [57, 45, 48]
                            else:
                                outputQueue.put(
                                    escapes.get(escapedCharacter,
                                                escapedCharacter))

                        except IndexError:
                            raise ReSyntaxError(
                                "Inappropriate escaped bracket")

                    else:
                        outputQueue.put(nextToken)
                        if hyphenFound:
                            outputQueue.put(ord("-"))
                            hyphenFound = False
                except IndexError:
                    raise ReSyntaxError("Mismatched [")

        else:
            outputQueue.put(token)

    while opStack:
        outputQueue.put(opStack.pop())

    output = outputQueue.queue()
    output.reverse()
    return output  # Python views the top of the stack
Example #52
0
class TCP_IPv4_Listener:
    def __init__(self, src, dst, sport, dport, seq_no, ack_no, verbose=False):

        self.src = src
        self.dst = dst

        self.sport = sport
        self.dport = dport

        self.next_seq = seq_no
        self.next_ack = ack_no

        self.verbose = verbose

        self.tcp_flags = {
            'TCP_FIN': 0x01,
            'TCP_SYN': 0x02,
            'TCP_RST': 0x04,
            'TCP_PSH': 0x08,
            'TCP_ACK': 0x10,
            'TCP_URG': 0x20,
            'TCP_ECE': 0x40,
            'TCP_CWR': 0x80
        }

        self.data_share = Queue(5000)
        self.dst_closed = False
        self.src_closed = False

        self.connection_open = False

        self.basic_pkt = IP(src=self.src, dst=self.dst)/\
                         TCP(sport=self.sport, dport=self.dport)

        self.ack_lock = Lock()
        self.ack_value = None
        self.ack_thread = Thread(target=self.send_ack_pkt)
        self.ack_thread.start()

    def sniff_filter(self, pkt):

        return pkt.haslayer(IP) and \
               (not self.src or self.src == pkt[IP].dst) and \
               (not self.dst or self.dst == pkt[IP].src) and \
               pkt.haslayer(TCP) and \
               (not self.sport or self.sport == pkt[TCP].dport) and \
               (not self.dport or self.dport == pkt[TCP].sport)

    def stop_filter(self, pkt):
        return self.src_closed and self.dst_closed

    def manage_pkt(self, pkt):
        if pkt[TCP].seq < self.next_ack:
            with self.ack_lock:
                if not self.ack_value:
                    self.ack_value = (self.next_seq, self.next_ack)

            return

        if pkt.haslayer(Raw):
            # print (pkt[TCP].seq, self.next_ack)
            # if pkt[TCP].seq >= self.next_ack:
            self.data_share.put(pkt[Raw].load)

        if not self.connection_open and pkt[TCP].flags == self.tcp_flags[
                'TCP_SYN']:
            self.next_ack = pkt[TCP].seq + 1

            self.dst = pkt[IP].src
            self.dport = pkt[TCP].sport

            self.basic_pkt[IP].dst = self.dst
            self.basic_pkt[TCP].dport = self.dport

            pkt = self.basic_pkt
            pkt[TCP].flags = 'SA'
            pkt[TCP].seq = self.next_seq
            pkt[TCP].ack = self.next_ack

            send(pkt, verbose=self.verbose)
            self.connection_open = True
            return

        self.next_seq = pkt[TCP].ack

        if pkt[TCP].flags == self.tcp_flags['TCP_ACK'] and Raw not in pkt:
            pass
        else:
            self.send_ack(pkt)

        if pkt[TCP].flags & self.tcp_flags['TCP_FIN']:
            while self.ack_value:
                pass
            self.dst_closed = True

    def get_next_ack(self, pkt):
        total_len = pkt.getlayer(IP).len
        ip_hdr_len = pkt.getlayer(IP).ihl * 32 / 8
        tcp_hdr_len = pkt.getlayer(TCP).dataofs * 32 / 8
        ans = total_len - ip_hdr_len - tcp_hdr_len
        ans = int(ans)
        if pkt[TCP].flags & self.tcp_flags['TCP_FIN']:
            ans += 1
        return (ans if ans else 1)

    def send_ack_pkt(self):
        tmp_ack_val = None
        while not (self.src_closed and self.dst_closed):
            with self.ack_lock:
                if not self.ack_value:
                    pass
                else:
                    tmp_ack_val = self.ack_value
                    self.ack_value = None

            if tmp_ack_val:
                pkt = self.basic_pkt
                pkt[TCP].flags = 'A'
                pkt[TCP].seq = tmp_ack_val[0]
                pkt[TCP].ack = tmp_ack_val[1]
                tmp_ack_val = None
                send(pkt, verbose=self.verbose)

    def send_ack(self, pkt):
        self.next_ack = pkt[TCP].seq + self.get_next_ack(pkt)
        with self.ack_lock:
            self.ack_value = (self.next_seq, self.next_ack)

    def listen(self):
        sniff(lfilter=self.sniff_filter,
              prn=self.manage_pkt,
              stop_filter=self.stop_filter)
Example #53
0
class GH_Manager(hass.Hass):

    def initialize(self)->None:
        self.gh_wait_time = self.args["gh_wait_time"]
        self.gh_select_media_player = self.args["gh_select_media_player"]
        self.queue = Queue(maxsize=0)
        self._when_tts_done_callback_queue = Queue()
        t = Thread(target=self.worker)
        t.daemon = True
        t.start()

    def check_mplayer(self, gh_player: list):
        media_p = list(self.get_state("media_player").keys())
        gh = []
        for item in [x.strip(" ") for x in gh_player] :
            if item in media_p or item == "all":
                gh.append(item)
        return gh

    def check_volume(self, gh_volume):
        media_state = self.get_state("media_player")
        gh = []
        for entity, state in media_state.items(): 
            friendly_name = state["attributes"].get("friendly_name") 
            for item in gh_volume:
                if "gruppo" not in str(item).lower() and item == friendly_name:
                    gh.append(entity)
        return gh

    def volume_set(self, gh_player: list, volume: float):
        if gh_player != ["all"]:
            for item in gh_player:
                self.call_service("media_player/volume_set", entity_id = item, volume_level = volume)

    def volume_get(self, media_player:list, volume: float):
        self.dict_volumes = {}
        for i in media_player:
            self.dict_volumes[i] = self.get_state(i, attribute="volume_level", default=volume)
        return self.dict_volumes

    def replace_regular(self, text: str, substitutions: list):
        for old,new in substitutions:
            text = re.sub(old, new, str(text).strip())
        return text

    def replace_language(self, s: str):
        return (s[:2])

    def speak(self, google, gh_mode: bool, gh_notifier: str):
        """Speak the provided text through the media player"""
        gh_player = self.check_mplayer(self.split_device_list(google["media_player"]))
        gh_volume = self.check_volume(self.get_state(self.gh_select_media_player, attribute="options"))
        self.volume_get(gh_volume,float(self.get_state(self.args["gh_restore_volume"]))/100)
        wait_time = float(self.get_state(self.gh_wait_time))
        message = self.replace_regular(google["message_tts"], SUB_TTS)
        ### set volume
        self.volume_set(gh_player,google["volume"])
        # queues the message to be handled async, use when_tts_done_do method to supply callback when tts is done
        if google["media_content_id"] != "":
            try:
                self.call_service("media_extractor/play_media", entity_id = gh_player, media_content_id= google["media_content_id"], 
                                media_content_type = google["media_content_type"]) 
            except Exception as ex:
                self.log("An error occurred in GH Manager - Errore in media_content: {}".format(ex),level="ERROR")
                self.log(sys.exc_info())
        else:
            self.queue.put({"type": "tts", "text": message, "volume": google["volume"], "language": self.replace_language(google["language"]), 
                    "gh_player": google["media_player"], "wait_time": wait_time, "gh_mode": gh_mode, "gh_notifier": gh_notifier})

    def when_tts_done_do(self, callback:callable)->None:
        """Callback when the queue of tts messages are done"""
        self._when_tts_done_callback_queue.put(callback)

    def worker(self):
        while True:
            try:
                data = self.queue.get()
                gh_player = self.check_mplayer(self.split_device_list(data["gh_player"]))
                ### SPEAK
                if data["gh_mode"].lower()  == 'google assistant':
                    self.call_service(__NOTIFY__ + data["gh_notifier"], message = data["text"])
                else:
                    if len(gh_player) == 1:
                        entity = gh_player[0]
                    else:
                        entity = gh_player
                    self.call_service(__TTS__ + data["gh_notifier"], entity_id = entity, message = data["text"])#, language = data["language"])
                    if type(entity) is list:
                        duration = float(len(data["text"].split())) / 3 + data["wait_time"]
                    else:
                        if entity == "all":
                            duration = float(len(data["text"].split())) / 3 + data["wait_time"]
                        elif self.get_state(entity, attribute='media_duration') is None:
                            duration = float(len(data["text"].split())) / 3 + data["wait_time"]
                        else: 
                            duration = self.get_state(entity, attribute='media_duration')
                    #Sleep and wait for the tts to finish
                    time.sleep(duration)
            except Exception as ex:
                self.log("An error occurred in GH Manager - Errore nel Worker: {}".format(ex),level="ERROR")
                self.log(sys.exc_info())

            self.queue.task_done()

            if self.queue.qsize() == 0:
                ## RESTORE VOLUME
                if self.dict_volumes:
                    for i,j in self.dict_volumes.items():
                        self.call_service("media_player/volume_set", entity_id = i, volume_level = j)
                        # Force Set state
                        self.set_state(i, state="", attributes = {"volume_level": j})
                # It is empty, make callbacks
                try:
                    while(self._when_tts_done_callback_queue.qsize() > 0):
                        callback_func = self._when_tts_done_callback_queue.get_nowait()
                        callback_func() # Call the callback
                        self._when_tts_done_callback_queue.task_done()
                except:
                    self.log("An error occurred in GH Manager - Errore nel CallBack", level="ERROR")
                    self.log(sys.exc_info()) 
                    pass # Nothing in queue
    def bfs(self, i, j, h, w, heightMap, mark, curr, q):
        count = 0
        neighbors = Queue()
        print(i, j)
        if i == 2 and j == 3:
            print("hello")

        if i > 0 and mark[i-1][j] is False:
            if heightMap[i-1][j] < curr:
                count += (curr - heightMap[i-1][j])
                neighbors.put((heightMap[i-1][j], i-1, j))
            else:
                q.put((heightMap[i-1][j], i-1, j))
            mark[i-1][j] = True

        if i < h-1 and mark[i+1][j] is False:
            if heightMap[i+1][j] < curr:
                count += (curr - heightMap[i+1][j])
                neighbors.put((heightMap[i+1][j], i+1, j))
            else:
                q.put((heightMap[i+1][j], i+1, j))
            mark[i+1][j] = True

        if j < w-1 and mark[i][j+1] is False:
            if heightMap[i][j+1] < curr:
                count += (curr - heightMap[i][j+1])
                neighbors.put((heightMap[i][j+1], i, j+1))
            else:
                q.put((heightMap[i][j+1], i, j+1))
            mark[i][j+1] = True

        if j > 0 and mark[i][j-1] is False:
            if heightMap[i][j-1] < curr:
                count += (curr - heightMap[i][j-1])
                neighbors.put((heightMap[i][j-1], i, j-1))
            else:
                q.put((heightMap[i][j-1], i, j-1))
            mark[i][j-1] = True

        while not neighbors.empty():
            (tmp, i, j) = neighbors.get()

            if i > 0 and mark[i-1][j] is False:
                if heightMap[i-1][j] < curr:
                    count += (curr - heightMap[i-1][j])
                    neighbors.put((heightMap[i-1][j], i-1, j))
                else:
                    q.put((heightMap[i-1][j], i-1, j))
                mark[i-1][j] = True

            if i < h-1 and mark[i+1][j] is False:
                if heightMap[i+1][j] < curr:
                    count += (curr - heightMap[i+1][j])
                    neighbors.put((heightMap[i+1][j], i+1, j))
                else:
                    q.put((heightMap[i+1][j], i+1, j))
                mark[i+1][j] = True

            if j < w-1 and mark[i][j+1] is False:
                if heightMap[i][j+1] < curr:
                    count += (curr - heightMap[i][j+1])
                    neighbors.put((heightMap[i][j+1], i, j+1))
                else:
                    q.put((heightMap[i][j+1], i, j+1))
                mark[i][j+1] = True

            if j > 0 and mark[i][j-1] is False:
                if heightMap[i][j-1] < curr:
                    count += (curr - heightMap[i][j-1])
                    neighbors.put((heightMap[i][j-1], i, j-1))
                else:
                    q.put((heightMap[i][j-1], i, j-1))
                mark[i][j-1] = True
        print(count)

        return count
Example #55
0
	user_agent()
	my_bots()
	time.sleep(5)
	try:
		s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
		s.connect((host,int(port)))
		s.settimeout(1)
	except socket.error as e:
		print("[----- vérifier l'IP et le port du serveur -----]")
		usage()
	while True:
		for i in range(int(thr)):
			t = threading.Thread(target=dos)
			t.daemon = True  # si le fil existe, il meurt
			t.start()
			t2 = threading.Thread(target=dos2)
			t2.daemon = True  # si le fil existe, il meurt
			t2.start()
		start = time.time()
		#tâches
		item = 0
		while True:
			if (item>1800): # pour aucun crash de mémoire
				item=0
				time.sleep(.1)
			item = item + 1
			q.put(item)
			w.put(item)
		q.join()
		w.join()
    count = int(sys.argv[3])
    #print(type(com),type(interal),type(count))
    print("input config", com, interal, count)
else:
    print("default config", com, interal, count)

file_name = "interval_%d__times_%d" % (interal, count) + ".txt"

mqSerial = Queue()
mqPacket = Queue()
mqFileSave = Queue()

serial_instance = SerialThread.SC_thread_init(mqSerial, com, 460800)
FileSave.File_Save_Init("file_save", os.getcwd() + "\\", file_name, mqFileSave)


def un_register(cmd, data, port):
    print(port, hex(cmd), binascii.hexlify(data))
    pass


serial_prase = prase.prase_class_init("serial_prase", mqPacket, un_register)

#mqtt_prase.add_cmd(0x2A0A,Prase_2A0A_Test)

while (True):
    msg = mqSerial.get()
    print("rcv", msg)
    mqFileSave.put(msg)
    mqPacket.put((msg, com))
Example #57
0
class NewsSpider(object):
    def __init__(self):
        self.url = 'https://news.sina.com.cn/'
        self.headers = {'User-Agent':UserAgent().random}
        self.db=pymysql.connect('172.86.140.47','work','123','weather_user',charset='utf8')
        self.cur=self.db.cursor()
        self.one_q = Queue()
        self.two_q = Queue()

    # 1. 请求
    def get_html(self,url):
        html = requests.get(url=url,headers=self.headers).content.decode('utf-8','ignore')
        return html

    # 2. xpath解析
    def xpath_func(self,html,xpath_bds):
        p = etree.HTML(html)
        r_list = p.xpath(xpath_bds)
        return r_list

    # 3. 下载图片
    def save_img(self,img):
        if not img.startswith('http'):
            url='http:'+img
        else:
            url=img
        html= requests.get(url=url,headers=self.headers).content
        path= './static/newsimages/'
        imgurl=img.split('/')[-1]
        filename = path+imgurl
        with open(filename,'wb')as f:
            f.write(html)
        return 'src="'+'/static/newsimages/'+imgurl+'"'

    # 4. 判断新闻是否已经爬取
    def is_news_exist(self,finger):
        sel='select finger from news_finger where finger=%s'
        res = self.cur.execute(sel,[finger])
        if res:
            return True

    # 5. 插入指纹函数
    def insert_finger(self,finger):
        ins='insert into news_finger values(%s)'
        self.cur.execute(ins,[finger])
        self.db.commit()

    # 6. 插入新闻内容
    def insert_news(self,all_list):
        # 插入'\链接\标题\分类\时间\来源\关键词\正文\作者\图片链接'
        ins='insert into news (newsurl,title,categoryid,time,source,tag,context,author,imgurl,part) values(%s,%s,%s,%s,%s,%s,%s,%s,%s,%s)'
        try:
            self.cur.execute(ins, all_list)
            self.db.commit()
            # print("插入成功")
        except Exception as e:
            self.db.rollback()
            print(e)

    # 7. 获取新浪新闻链接
    # 获取不同分类的新闻链接
    def get_news_link(self):
        page_html = self.get_html(self.url)
        # 所有国内(13)
        l1_bds = '//div[@id="blk_gnxw_01"]//a/@href'
        list1 = self.xpath_func(page_html, l1_bds)[:-1]
        # 所有国际(11)
        l2_bds = '//ul[@id="blk_gjxw_011"]//a/@href'
        list2 = self.xpath_func(page_html, l2_bds)
        # 所有财经等(19)
        l3_bds = '//ul[@id="blk_cjkjqcfc_011"]//a/@href'
        list3 = self.xpath_func(page_html, l3_bds)
        # 所有体育等(28)
        l4_bds = '//ul[@id="blk_lctycp_011"]//a/@href'
        list4 = self.xpath_func(page_html, l4_bds)
        nlink_list = list1 + list2 + list3 + list4
        for i in nlink_list:
            self.one_q.put(i)


    # 解析新闻链接主页
    def parse_sina_link(self, nlink_list):
        while True:
            try:
            m = md5()
            m.update(nlink.encode())
            finger = m.hexdigest()
            if not self.is_news_exist(finger):
                # 判断链接是否为新闻,不是则跳过
                all_list = self.parse_sina_news_page(nlink)
                if all_list:
                    self.insert_news(all_list)
                    self.insert_finger(finger)
                    print("已插入")
                    time.sleep(random.uniform(0, 1))
                else:
                    continue
            else:
                print("当前为最新")
                break

    # 解析新闻主页
    def parse_sina_news_page(self, nlink):
        html = self.get_html(nlink)
        # 新闻字段解析式
        category_bds = "//div[@class='channel-path']/a/text()"
        title_bds = "//h1[@class='main-title']/text()"
        source_bds = "//div[@class='date-source']//a/text()|//div[@class='date-source']/span[2]/text()"
        time_bds="//div[@class='date-source']/span[1]/text()"
        tag_bds = "//div[@class='keywords']/a/text()"
        news_bds = "//div[@class='article'] /p/text()"
        img_bds = "//div[@class='article']/div[@class='img_wrapper']/img/@src"
        part_bds='//div[@class="article"]'
        all_list = [nlink]
        # 获取新闻标题
        title_list = self.xpath_func(html, title_bds)
        if len(title_list) > 0:
            title = title_list[0].strip()
            all_list.append(title)
            # 获取新闻分类
            category = self.xpath_func(html, category_bds)[0].strip()
            all_list.append(category)
            # 获取新闻时间
            newstime = self.xpath_func(html, time_bds)[0].strip()
            all_list.append(newstime)
            # 获取新闻来源
            source = self.xpath_func(html, source_bds)[0].strip()
            all_list.append(source)
            # 获取新闻关键词
            tag = ','.join(self.xpath_func(html, tag_bds))
            all_list.append(tag)
            # 获取新闻正文
            text=self.xpath_func(html, news_bds)
            content = '\n'.join(text[:-1])
            all_list.append(content)
            # 添加作者
            all_list.append("Null")
            # 获取新闻代码块
            part_str = self.xpath_func(html, part_bds)[0]
            part=etree.tostring(part_str, encoding='utf-8').decode()
            # 获取新闻图片链接
            img_list = self.xpath_func(html, img_bds)
            imgurl_list=[]
            if len(img_list) > 0:
                for img in img_list:
                    imgurl=self.save_img(img)
                    p='src=".*?"'
                    part=re.sub(p,imgurl,part)
                    imgurl_list.append(imgurl)
            else:
                imgurl_list = ['Null']
            all_list.append(','.join(imgurl_list))
            all_list.append(part)

            print(len(all_list))
            return all_list
        else:
            return False

    # 10. 入口函数
    def run(self):
        #爬取新闻
        self.get_news_link()


if __name__ == '__main__':
    spider=NewsSpider()
    spider.run()
    try:
        with open('./log/SinaNews_insert.log', 'a') as f:
            f.write('insert succeed at {} \n'.format(datetime.now().strftime('%Y-%m-%d %H:%M:%S')))
            print('insert succeed at {} \n'.format(datetime.now().strftime('%Y-%m-%d %H:%M:%S')))
    except Exception as e:
        with open('./log/SinaNews_insert.log', 'a') as f:
            f.write('error:{} {} \n'.format(e, datetime.now().strftime('%Y-%m-%d %H:%M:%S')))
Example #58
0
class KeyClipWriter:
    def __init__(self, bufSize=64, timeout=1.0):
        # store the maximum buffer size of frames to be kept
        # in memory along with the sleep timeout during threading
        self.bufSize = bufSize
        self.timeout = timeout
        # initialize the buffer of frames, queue of frames that
        # need to be written to file, video writer, writer thread,
        # and boolean indicating whether recording has started or not
        self.frames = deque(maxlen=bufSize)
        self.Q = None
        self.writer = None
        self.thread = None
        self.recording = False

    def update(self, frame):
        # update the frames buffer
        self.frames.appendleft(frame)
        # if we are recording, update the queue as well
        if self.recording:
            self.Q.put(frame)

    def start(self, outputPath, fourcc, fps):
        # indicate that we are recording, start the video writer,
        # and initialize the queue of frames that need to be written
        # to the video file
        self.recording = True
        self.writer = cv2.VideoWriter(
            outputPath, fourcc, fps,
            (self.frames[0].shape[1], self.frames[0].shape[0]), True)
        self.Q = Queue()
        # loop over the frames in the deque structure and add them
        # to the queue
        for i in range(len(self.frames), 0, -1):
            self.Q.put(self.frames[i - 1])
        # start a thread write frames to the video file
        self.thread = Thread(target=self.write, args=())
        self.thread.daemon = True
        self.thread.start()

    def write(self):
        # keep looping
        while True:
            # if we are done recording, exit the thread
            if not self.recording:
                return
            # check to see if there are entries in the queue
            if not self.Q.empty():
                # grab the next frame in the queue and write it
                # to the video file
                frame = self.Q.get()
                self.writer.write(frame)
                # otherwise, the queue is empty, so sleep for a bit
                # so we don't waste CPU cycles
            else:
                time.sleep(self.timeout)

    def flush(self):
        # empty the queue by flushing all remaining frames to file
        while not self.Q.empty():
            frame = self.Q.get()
            self.writer.write(frame)

    def finish(self):
        # indicate that we are done recording, join the thread,
        # flush all remaining frames in the queue to file, and
        # release the writer pointer
        self.recording = False
        self.thread.join()
        self.flush()
        self.writer.release()
        return True
def worker(job):
    time.sleep(0.5)
    with print_lock:
        print(threading.current_thread().name, 'Task' + str(job))


def worker_thread():
    while True:
        job = q.get()
        worker(job)
        q.task_done()


q = Queue()
num_jobs = 20
num_workers = 10

for wk in range(num_workers):
    t = threading.Thread(target=worker_thread)
    t.name = 'Worker-' + str(wk)
    t.daemon = True
    t.start()

start = time.time()

for job in range(num_jobs):
    q.put(job)

q.join()

print('Entyre job took: ', time.time() - start)
class ConnectionPool:
    """A simple pool to hold connections."""
    def __init__(self, endpoints, username, password, med_class, min_size,
                 max_size):
        self.endpoints = endpoints
        self.username = username
        self.password = password
        self.med_class = med_class
        self.endpoint_key = settings.ENDPOINTS_SEPARATOR.join(endpoints)

        self.current_size = 0
        self.max_size = max_size
        self.channel = Queue(max_size)
        self.lock = RLock()

        for _ in range(min_size):
            self.put(self.get())

    def __del__(self):
        # delete the free clients in queue, and wait for outside ones.
        with self.lock:
            while self.current_size:
                item = self.get()
                item.disconnect()
                self.current_size -= 1

    def create(self):
        logger.debug("Creating a new connection for endpoint {}".format(
            self.endpoint_key))
        return self.med_class(self.username, self.password, self.endpoints)

    def get(self, block=True, timeout=None):
        """
        Return an item from the pool, when one is available.

        This may cause the calling thread to block. Check if a connection is
        active before returning it. For dead connections, create and return a new connection.

        If optional args *block* is true and *timeout* is ``None`` (the default),
        block if necessary until an item is available. If *timeout* is a positive number,
        it blocks at most *timeout* seconds and raises the :class:`Empty` exception
        if no item was available within that time. Otherwise (*block* is false), return
        an item if one is immediately available, else raise the :class:`Empty` exception
        (*timeout* is ignored in that case).
        """

        # if there is a free and active item in the channel, return it directly.
        while True:
            try:
                item = self.channel.get(block=False)
                if item.is_active():
                    return item
                with self.lock:
                    self.current_size -= 1
                try:
                    logger.debug(
                        "The connection for storage {} is inactive, close it".
                        format(self.endpoint_key))
                    item.disconnect()
                except Exception as ex:
                    # failed to disconnect the mediator, delete the stale client.
                    logger.error(
                        "Failed to disconnect the connection for storage {} before use, "
                        "reason is {}".format(self.endpoint_key, ex))
                    del item
            except Empty:
                break

        # If there is no free items, and current_size is not full, create a new item.
        with self.lock:
            is_full = self.current_size >= self.max_size
            if not is_full:
                self.current_size += 1

        if not is_full:
            try:
                created = self.create()
            except Exception as ex:
                logger.error("Failed to create array connection")
                logger.exception(ex)
                with self.lock:
                    self.current_size -= 1
                raise ex
            return created

        # If current_size is full, waiting for an available one.
        return self.channel.get(block, timeout)

    def put(self, item):
        """
        Put an item back into the pool, when done.  This may cause the putting thread to block.
        """
        with self.lock:
            discard = self.current_size > self.max_size
            if discard:
                self.current_size -= 1

        if not discard:
            try:
                self.channel.put(item, block=False)
                return
            except Full:
                discard = True

        if discard:
            try:
                item.disconnect()
            except Exception as ex:
                # failed to disconnect the mediator, delete the stale client.
                logger.error(
                    "Failed to disconnect the connection for storage {} after use, "
                    "reason is {}".format(self.endpoint_key, ex))
                del item