except socket.timeout:
                if lector_thread.isAlive():
                    pass
                else:
                    break
            except socket.error:
                break
        if mensaje == 'exit\n':
            break
        i = mensaje.find('\n')
        numero = mensaje[:i]
        mensaje = mensaje[i + 1:]
        lista_numeros.append(str(numero))


cola = Queue()
lista_oraciones = []
lista_numeros = []
mensaje = s.recv(1024)
print(mensaje.decode('utf-8'))
lector_thread = threading.Thread(target=lector,
                                 args=(
                                     cola,
                                     lista_oraciones,
                                     lista_numeros,
                                 ))
lector_thread.start()
receptor_thread = threading.Thread(target=receptor,
                                   args=(
                                       cola,
                                       lista_numeros,
from threading import Thread
from queue import Queue
import random

q = Queue(5)
class Producer(Thread):
    def __init__(self, queue):
        super().__init__()
        self.queue = queue

    def run(self):
        while True:
            item = random.randint(0, 99)
            self.queue.put(item)    # 队列未满,向队列中存入数据
            print('生产者生产%s' % item)

class Consumer(Thread):
    def __init__(self, queue):
        super().__init__()
        self.queue = queue

    def run(self):
            while True:
                item = self.queue.get() # 只要列表不为空,就从列表中取出数据
                print('消费者消费%s' % item)
                # self.queue.task_done()  # 告诉队列,这个任务执行完成了

p = Producer(q)
c = Consumer(q)
p.start()
c.start()
Example #3
0
#Queue

from queue import Queue
q = Queue(maxsize=3)

print(q.qsize())
print(q.queue)
print(q.empty())
print(q.full())
q.put('A')
q.put('B')
q.put('C')
q.put_nowait('D')
print(q.qsize())
print(q.queue)
print(q.empty())
print(q.full())

#var = q.get()
#print(var)
#print(q.queue)
#var = q.get()
#var = q.get()
#var = q.get()
#print('処理終了')
Example #4
0
    def __init__(self):
        mpv_config = conffile.get(APP_NAME,"mpv.conf", True)
        input_config = conffile.get(APP_NAME,"input.conf", True)
        extra_options = {}
        self._video = None
        self._lock = RLock()
        self._finished_lock = Lock()
        self.last_update = Timer()
        self.__part = 1
        self.timeline_trigger = None
        self.action_trigger = None
        self.external_subtitles = {}
        self.external_subtitles_rev = {}
        self.url = None
        self.evt_queue = Queue()
        self.is_in_intro = False
        self.intro_has_triggered = False

        if is_using_ext_mpv:
            extra_options = {
                "start_mpv": settings.mpv_ext_start,
                "ipc_socket": settings.mpv_ext_ipc,
                "mpv_location": settings.mpv_ext_path,
                "player-operation-mode": "cplayer"
            }
        # todo figure out how to put these in a file
        extra_options = {
            'script-opts': 'osc-layout=slimbox,osc-deadzonesize=.9,osc-valign=1.05',
        }
        self._player = mpv.MPV(input_default_bindings=True, input_vo_keyboard=True,
                               input_media_keys=True, include=mpv_config, input_conf=input_config,
                               log_handler=mpv_log_handler, loglevel=settings.mpv_log_level,
                               **extra_options)
        self.menu = OSDMenu(self)
        self.auto_insert = False

        def on_new_sub(name, text):
            if not self.auto_insert:
                return
            if not text or not text.strip():
                return
            pyperclip.copy(text.replace('\n', ' '))

        self._player.observe_property('sub-text', on_new_sub)

        if hasattr(self._player, 'osc'):
            self._player.osc = settings.enable_osc
        else:
            log.warning("This mpv version doesn't support on-screen controller.")

        # Wrapper for on_key_press that ignores None.
        def keypress(key):
            def wrapper(func):
                if key is not None:
                    self._player.on_key_press(key)(func)
                return func
            return wrapper

        @self._player.on_key_press('CLOSE_WIN')
        @self._player.on_key_press('STOP')
        @keypress(settings.kb_stop)
        def handle_stop():
            self.stop()
            self.timeline_handle()

        @keypress(settings.kb_prev)
        def handle_prev():
            self.put_task(self.play_prev)

        @keypress(settings.kb_next)
        def handle_next():
            self.put_task(self.play_next)

        @self._player.on_key_press('PREV')
        @self._player.on_key_press('XF86_PREV')
        def handle_media_prev():
            if settings.media_key_seek:
                self._player.command("seek", -15)
            else:
                self.put_task(self.play_prev)

        @self._player.on_key_press('NEXT')
        @self._player.on_key_press('XF86_NEXT')
        def handle_media_next():
            if settings.media_key_seek:
                if self.is_in_intro:
                    self.skip_intro()
                else:
                    self._player.command("seek", 30)
            else:
                self.put_task(self.play_next)

        @keypress(settings.kb_watched)
        def handle_watched():
            self.put_task(self.watched_skip)

        @keypress(settings.kb_unwatched)
        def handle_unwatched():
            self.put_task(self.unwatched_quit)

        @keypress(settings.kb_menu)
        def menu_open():
            if not self.menu.is_menu_shown:
                self.menu.show_menu()
            else:
                self.menu.hide_menu()

        @keypress(settings.kb_menu_esc)
        def menu_back():
            if self.menu.is_menu_shown:
                self.menu.menu_action('back')
            else:
                self._player.command('set', 'fullscreen', 'no')

        @keypress(settings.kb_menu_ok)
        def menu_ok():
            self.menu.menu_action('ok')

        @keypress(settings.kb_menu_left)
        def menu_left():
            if self.menu.is_menu_shown:
                self.menu.menu_action('left')
            else:
                self._player.command("seek", settings.seek_left)

        @keypress(settings.kb_menu_right)
        def menu_right():
            if self.menu.is_menu_shown:
                self.menu.menu_action('right')
            else:
                if self.is_in_intro:
                    self.skip_intro()
                else:
                    self._player.command("seek", settings.seek_right)

        @keypress(settings.kb_menu_up)
        def menu_up():
            if self.menu.is_menu_shown:
                self.menu.menu_action('up')
            else:
                if self.is_in_intro:
                    self.skip_intro()
                else:
                    self._player.command("seek", settings.seek_up)

        @keypress(settings.kb_menu_down)
        def menu_down():
            if self.menu.is_menu_shown:
                self.menu.menu_action('down')
            else:
                self._player.command("seek", settings.seek_down)

        @keypress(settings.kb_pause)
        def handle_pause():
            if self.menu.is_menu_shown:
                self.menu.menu_action('ok')
            else:
                self.toggle_pause()

        # This gives you an interactive python debugger prompt.
        @keypress(settings.kb_debug)
        def handle_debug():
            import pdb
            pdb.set_trace()

        @self._player.on_key_press('ctrl+c')
        def copy_current_sub():
            try:
                sub = self._player.sub_text
                pyperclip.copy(sub)
            except AttributeError:
                pass  # no subtitle available.

        def copy_screenshot(subtitles=True):
            includes = 'subtitles' if subtitles else 'video'
            from io import BytesIO
            import win32clipboard
            image = self._player.screenshot_raw(includes=includes)
            output = BytesIO()
            image.convert("RGB").save(output, "BMP")
            data = output.getvalue()[14:]
            output.close()
            win32clipboard.OpenClipboard()
            win32clipboard.EmptyClipboard()
            win32clipboard.SetClipboardData(win32clipboard.CF_DIB, data)
            win32clipboard.CloseClipboard()

        @self._player.on_key_press('ctrl+s')
        def copy_current_image():
            copy_screenshot(subtitles=True)

        @self._player.on_key_press('ctrl+shift+s')
        def copy_current_image():
            copy_screenshot(subtitles=False)

        @self._player.on_key_press('ctrl+v')
        def output_audio():
            import subprocess
            import string
            import unicodedata
            sub_delay = round(self._player.sub_delay, 4)  # round b/c of weird mpv precision
            sub_start = self._player.sub_start + sub_delay
            if sub_start:
                print("Outputting current subtitle...")
                valid_fn_chars = "-_.() %s%s" % (string.ascii_letters, string.digits)
                fn_dirty = "%s - %s" % (self._player.media_title, str(int(sub_start * 1000)))
                fn = unicodedata.normalize('NFKD', fn_dirty).encode('ASCII', 'ignore')
                fn = ''.join(chr(c) for c in fn if chr(c) in valid_fn_chars)
                aid = [x for x in self._player.track_list
                       if x.get("type") == "audio" and x.get("selected")][0].get("id")
                subprocess.Popen([
                    'mpv',
                    self.url,
                    '-o',
                    '%s.mp3' % fn,
                    '--no-video',
                    '--start=%s' % sub_start,
                    '--end=%s' % (self._player.sub_end + sub_delay),
                    '--aid=%s' % aid,
                ])
                self._player.screenshot_to_file("%s.png" % fn, includes='video')
                with open('%s.txt' % fn, 'w+', encoding='utf-8') as f:
                    f.write(self._player.sub_text)

        @self._player.on_key_press('ctrl+a')
        def toggle_auto_insert():
            self.auto_insert = not self.auto_insert
            self._player.show_text('Auto insert %s' % ("on" if self.auto_insert else "off"))

        # Fires between episodes.
        @self._player.property_observer('eof-reached')
        def handle_end(_name, reached_end):
            if self._video and reached_end:
                has_lock = self._finished_lock.acquire(False)
                self.put_task(self.finished_callback, has_lock)

        # Fires at the end.
        @self._player.event_callback('idle')
        def handle_end_idle(event):
            if self._video:
                has_lock = self._finished_lock.acquire(False)
                self.put_task(self.finished_callback, has_lock)
Example #5
0
# num_pass: 초기화가 성공한 모듈 갯수
# num_fail: 초기화가 실패한 모듈 갯수
num_pass, num_fail = pygame.init()

# 화면의 크기는 640x480
world_width, world_height = 640, 480
screen = pygame.display.set_mode((world_width, world_height))

# 키 정보 [W, A, S, D]
keys = [False, False, False, False]

# 플레이어 위치 정보
player_pos = [100, 100]

# 화살 정보
arrow_infos = Queue()

# 화살 명중률 [명중한 갯수, 발사한 갯수]
accuracy = [0, 0]

# 적들의 출현 시간
timer = 0
badguy_appear = 100

# 적들의 출현 위치
badguy_infos = Queue()
badguy_infos.put([640, 100])

# 플레이어 생명력
player_health = 194
Example #6
0
class GuiView(ABC):
    primary: bool
    name: str = None
    log = logging.getLogger(__name__)
    permissive_handler_names: bool = True
    allow_no_handler: bool = True
    active_view: Optional['GuiView'] = None
    window: Optional[Window] = None
    pending_prompts = Queue()
    config = GuiConfig(auto_save=True, defaults=DEFAULT_SETTINGS)
    default_handler: Optional[Callable] = None
    wildcard_handlers: dict[str, dict[Callable, Callable]] = {}
    event_handlers = {}
    _event_handlers = {}
    _primary_kwargs = {}
    _counter = count()
    _ele_event_match = re.compile(r'^(.*?):::([a-zA-Z_]+)$').match
    _window_size: tuple[Optional[int], Optional[int]] = (None, None)  # width, height
    _window_pos: tuple[Optional[int], Optional[int]] = (None, None)  # x, y
    _log_clicks: bool = False

    # noinspection PyMethodOverriding
    def __init_subclass__(
        cls,
        view_name: str,
        primary: bool = True,
        defaults: Mapping[str, Any] = None,
        permissive_handler_names: bool = None,
        allow_no_handler: bool = None,
        config_path: Union[str, 'Path'] = None,
    ):
        cls.name = view_name
        cls.log = ViewLoggerAdapter(cls)
        cls.primary = primary
        cls.event_handlers = cls.event_handlers.copy() | {k: v[0] for k, v in cls._event_handlers.items()}
        cls._event_handlers.clear()
        cls.default_handler = getattr(cls, '_default_handler', None)
        if cls.default_handler is not None:
            del cls._default_handler  # noqa
        if config_path:  # The latest class to set this wins - does not support multiple paths within the same run
            cls.config.path = config_path
        if defaults:
            cls.config.defaults.update(defaults)
        if permissive_handler_names is not None:
            cls.permissive_handler_names = permissive_handler_names
        if allow_no_handler is not None:
            cls.allow_no_handler = allow_no_handler
        # print(f'Initialized subclass={cls.__name__!r}')

    def __init__(self, binds: Mapping[str, str] = None, read_timeout_ms: int = None, **kwargs):
        self._init_event = kwargs.get('init_event')
        self.parent: Optional[GuiView] = None if self.primary else GuiView.active_view
        self._monitor = None
        self._view_num = next(self._counter)
        self.binds = binds or {}
        self.read_timeout_ms = read_timeout_ms
        # self.log.debug(f'{self} initialized with handlers: {", ".join(sorted(self.event_handlers))}')
        if self.name not in self.wildcard_handlers:  # Populate/compile wildcard patterns once per class
            self.wildcard_handlers[self.name] = wildcard_handlers = {}
            for key, handler in self.event_handlers.items():
                if isinstance(key, str) and any(c in key for c in '*?[]'):
                    wildcard_handlers[_compile_pattern(key)] = handler

    def __repr__(self):
        return f'<{self.__class__.__name__}[{self.name}][{self.primary=!r}][handlers: {len(self.event_handlers)}]>'

    def __iter__(self):
        return self

    def __next__(self) -> tuple[Event, EventData]:
        # self.log.debug(f'[View#{self._view_num}] Calling self.window.read...', extra={'color': 11})
        event, data = self.window.read(self.read_timeout_ms)
        # self.log.debug(f'[View#{self._view_num}] Read {event=}', extra={'color': 10})
        if event == 'Exit' or event == WIN_CLOSED:
            raise StopIteration
        return event, data

    def run(self):
        for event, data in self:
            try:
                self.handle_event(event, data)
            except StopIteration:
                break

        self.window.close()

    @classmethod
    def start(cls, cls_kwargs=None, init_event: tuple[Event, EventData] = None, interactive: bool = False, **kwargs):
        if cls.active_view is not None:
            raise RuntimeError(f'{cls.active_view!r} is already active - only one view may be active at a time')
        theme(cls.config['theme'])
        cls._primary_kwargs.update(kwargs)
        if size := kwargs.get('size'):
            GuiView._window_size = size

        obj = cls(init_event=init_event, **(cls_kwargs or {}))
        obj.render()
        if init_event:
            obj.window.write_event_value(*init_event)  # Note: data[event] => the EventData value passed here

        if not interactive:
            while True:
                try:
                    event, data = next(cls.active_view)  # noqa
                    cls.active_view.handle_event(event, data)  # noqa
                except StopIteration:
                    break

            cls.window.close()
        else:
            Window.unregister_sigint_handler()
    def __init__(self,
                 token=None,
                 base_url=None,
                 workers=4,
                 bot=None,
                 private_key=None,
                 private_key_password=None,
                 user_sig_handler=None,
                 request_kwargs=None,
                 persistence=None,
                 defaults=None,
                 use_context=False,
                 dispatcher=None,
                 base_file_url=None):

        if dispatcher is None:
            if (token is None) and (bot is None):
                raise ValueError('`token` or `bot` must be passed')
            if (token is not None) and (bot is not None):
                raise ValueError('`token` and `bot` are mutually exclusive')
            if (private_key is not None) and (bot is not None):
                raise ValueError('`bot` and `private_key` are mutually exclusive')
        else:
            if bot is not None:
                raise ValueError('`dispatcher` and `bot` are mutually exclusive')
            if persistence is not None:
                raise ValueError('`dispatcher` and `persistence` are mutually exclusive')
            if workers is not None:
                raise ValueError('`dispatcher` and `workers` are mutually exclusive')
            if use_context != dispatcher.use_context:
                raise ValueError('`dispatcher` and `use_context` are mutually exclusive')

        self.logger = logging.getLogger(__name__)

        if dispatcher is None:
            con_pool_size = workers + 4

            if bot is not None:
                self.bot = bot
                if bot.request.con_pool_size < con_pool_size:
                    self.logger.warning(
                        'Connection pool of Request object is smaller than optimal value (%s)',
                        con_pool_size)
            else:
                # we need a connection pool the size of:
                # * for each of the workers
                # * 1 for Dispatcher
                # * 1 for polling Updater (even if webhook is used, we can spare a connection)
                # * 1 for JobQueue
                # * 1 for main thread
                if request_kwargs is None:
                    request_kwargs = {}
                if 'con_pool_size' not in request_kwargs:
                    request_kwargs['con_pool_size'] = con_pool_size
                self._request = Request(**request_kwargs)
                self.bot = Bot(token,
                               base_url,
                               base_file_url=base_file_url,
                               request=self._request,
                               private_key=private_key,
                               private_key_password=private_key_password,
                               defaults=defaults)
            self.update_queue = Queue()
            self.job_queue = JobQueue()
            self.__exception_event = Event()
            self.persistence = persistence
            self.dispatcher = Dispatcher(self.bot,
                                         self.update_queue,
                                         job_queue=self.job_queue,
                                         workers=workers,
                                         exception_event=self.__exception_event,
                                         persistence=persistence,
                                         use_context=use_context)
            self.job_queue.set_dispatcher(self.dispatcher)
        else:
            con_pool_size = dispatcher.workers + 4

            self.bot = dispatcher.bot
            if self.bot.request.con_pool_size < con_pool_size:
                self.logger.warning(
                    'Connection pool of Request object is smaller than optimal value (%s)',
                    con_pool_size)
            self.update_queue = dispatcher.update_queue
            self.__exception_event = dispatcher.exception_event
            self.persistence = dispatcher.persistence
            self.job_queue = dispatcher.job_queue
            self.dispatcher = dispatcher

        self.user_sig_handler = user_sig_handler
        self.last_update_id = 0
        self.running = False
        self.is_idle = False
        self.httpd = None
        self.__lock = Lock()
        self.__threads = []

        # Just for passing to WebhookAppClass
        self._default_quote = defaults.quote if defaults else None
Example #8
0
 def __init__(self):
     self.id = self.instance_id
     self.instance_id += 1
     self.message_queue = Queue()
 def use_queues(self):
     gui_queue = Queue()
     print(gui_queue)
     gui_queue.put('Message from a queue')
     print(gui_queue.get())
Example #10
0
 def __init__(self):
     self.__stream = Queue()
Example #11
0
 def register(self, signal_id):
     if signal_id not in self.signal_message_table:
         self.signal_message_table[signal_id] = Queue()
Example #12
0
        port = 80
    else:
        port = opts.port
    if opts.turbo is None:
        thr = 135
    else:
        thr = opts.turbo


# reading headers
global data
headers = open("headers.txt", "r")
data = headers.read()
headers.close()
#task queue are q,w
q = Queue()
w = Queue()

if __name__ == '__main__':
    if len(sys.argv) < 2:
        usage()
    get_parameters()
    print("\033[92m", host, " port: ", str(port), " turbo: ", str(thr),
          "\033[0m")
    print("\033[94mPlease wait...\033[0m")
    user_agent()
    my_bots()
    time.sleep(5)
    try:
        s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
        s.connect((host, int(port)))
def test_get_completed_trackers(db_manager, gatekeeper, carrier,
                                block_processor):
    responder = Responder(db_manager, gatekeeper, carrier, block_processor)
    chain_monitor = ChainMonitor(Queue(), responder.block_queue,
                                 block_processor, bitcoind_feed_params)
    chain_monitor.monitor_chain()

    # A complete tracker is a tracker which penalty transaction has been irrevocably resolved (i.e. has reached 100
    # confirmations)
    # We'll create 3 type of txs: irrevocably resolved, confirmed but not irrevocably resolved, and unconfirmed
    trackers_ir_resolved = {
        uuid4().hex:
        create_dummy_tracker(penalty_rawtx=create_dummy_transaction().hex())
        for _ in range(10)
    }

    trackers_confirmed = {
        uuid4().hex:
        create_dummy_tracker(penalty_rawtx=create_dummy_transaction().hex())
        for _ in range(10)
    }

    trackers_unconfirmed = {}
    for _ in range(10):
        tracker = create_dummy_tracker(
            penalty_rawtx=create_dummy_transaction().hex())
        responder.unconfirmed_txs.append(tracker.penalty_txid)
        trackers_unconfirmed[uuid4().hex] = tracker

    all_trackers = {}
    all_trackers.update(trackers_ir_resolved)
    all_trackers.update(trackers_confirmed)
    all_trackers.update(trackers_unconfirmed)

    # Let's add all to the Responder
    for uuid, tracker in all_trackers.items():
        responder.trackers[uuid] = tracker.get_summary()

    for uuid, tracker in trackers_ir_resolved.items():
        bitcoin_cli(bitcoind_connect_params).sendrawtransaction(
            tracker.penalty_rawtx)

    generate_block_w_delay()

    for uuid, tracker in trackers_confirmed.items():
        bitcoin_cli(bitcoind_connect_params).sendrawtransaction(
            tracker.penalty_rawtx)

    # ir_resolved have 100 confirmations and confirmed have 99
    generate_blocks_w_delay(99)

    # Let's check
    completed_trackers = responder.get_completed_trackers()
    ended_trackers_keys = list(trackers_ir_resolved.keys())
    assert set(completed_trackers) == set(ended_trackers_keys)

    # Generating 1 additional blocks should also include confirmed
    generate_block_w_delay()

    completed_trackers = responder.get_completed_trackers()
    ended_trackers_keys.extend(list(trackers_confirmed.keys()))
    assert set(completed_trackers) == set(ended_trackers_keys)
def test_do_watch(temp_db_manager, gatekeeper, carrier, block_processor):
    # Create a fresh responder to simplify the test
    responder = Responder(temp_db_manager, gatekeeper, carrier,
                          block_processor)
    chain_monitor = ChainMonitor(Queue(), responder.block_queue,
                                 block_processor, bitcoind_feed_params)
    chain_monitor.monitor_chain()

    trackers = [
        create_dummy_tracker(penalty_rawtx=create_dummy_transaction().hex())
        for _ in range(20)
    ]
    subscription_expiry = responder.block_processor.get_block_count() + 110

    # Let's set up the trackers first
    for tracker in trackers:
        uuid = uuid4().hex

        # Simulate user registration so trackers can properly expire
        responder.gatekeeper.registered_users[tracker.user_id] = UserInfo(
            available_slots=10, subscription_expiry=subscription_expiry)

        # Add data to the Responder
        responder.trackers[uuid] = tracker.get_summary()
        responder.tx_tracker_map[tracker.penalty_txid] = [uuid]
        responder.missed_confirmations[tracker.penalty_txid] = 0
        responder.unconfirmed_txs.append(tracker.penalty_txid)
        # Assuming the appointment only took a single slot
        responder.gatekeeper.registered_users[
            tracker.user_id].appointments[uuid] = 1

        # We also need to store the info in the db
        responder.db_manager.create_triggered_appointment_flag(uuid)
        responder.db_manager.store_responder_tracker(uuid, tracker.to_dict())

    # Let's start to watch
    Thread(target=responder.do_watch, daemon=True).start()

    # And broadcast some of the transactions
    broadcast_txs = []
    for tracker in trackers[:5]:
        bitcoin_cli(bitcoind_connect_params).sendrawtransaction(
            tracker.penalty_rawtx)
        broadcast_txs.append(tracker.penalty_txid)

    # Mine a block
    generate_block_w_delay()

    # The transactions we sent shouldn't be in the unconfirmed transaction list anymore
    assert not set(broadcast_txs).issubset(responder.unconfirmed_txs)

    # CONFIRMATIONS_BEFORE_RETRY+1 blocks after, the responder should rebroadcast the unconfirmed txs (15 remaining)
    generate_blocks_w_delay(CONFIRMATIONS_BEFORE_RETRY + 1)
    assert len(responder.unconfirmed_txs) == 0
    assert len(responder.trackers) == 20

    # Generating 100 - CONFIRMATIONS_BEFORE_RETRY -2 additional blocks should complete the first 5 trackers
    generate_blocks_w_delay(100 - CONFIRMATIONS_BEFORE_RETRY - 2)
    assert len(responder.unconfirmed_txs) == 0
    assert len(responder.trackers) == 15
    # Check they are not in the Gatekeeper either
    for tracker in trackers[:5]:
        assert len(responder.gatekeeper.registered_users[
            tracker.user_id].appointments) == 0

    # CONFIRMATIONS_BEFORE_RETRY additional blocks should complete the rest
    generate_blocks_w_delay(CONFIRMATIONS_BEFORE_RETRY)
    assert len(responder.unconfirmed_txs) == 0
    assert len(responder.trackers) == 0
    # Check they are not in the Gatekeeper either
    for tracker in trackers[5:]:
        assert len(responder.gatekeeper.registered_users[
            tracker.user_id].appointments) == 0
Example #15
0
from flask_socketio import SocketIO
from queue import Queue
from flask_cors import CORS

app = Flask(__name__)

cors = CORS(app, resources={r"/*": {"origins": "*"}})

postgres_DB = os.getenv('postgres_DB', None) 
postgres_DB_users = os.getenv('postgres_DB_users', None)

PORT = os.getenv('PORT', 5000)
socketio = SocketIO(app, async_mode="threading")
thread = None

logs_queue = Queue()

def background_thread():
    count = 0
    while True:
        socketio.sleep(0.2)
        while(not logs_queue.empty()):
            message = logs_queue.get()
            print("emitting obj", message)
            socketio.emit(message[0], message[1])

socketio.start_background_task(background_thread)

@app.route("/Test", methods=['POST', 'GET'])
@requires_auth(["admin"])
def test():
Example #16
0
 def add_queue(self, qname):
     #queue_manager.add_queue(qname, 32)
     #queue = queue_manager.get_queue(qname)
     queue = Queue()
     self.IOqueue = queue
 def __init__(self, args, pipelinecommit, startingtime, scriptpath):
     """
     :param args: command line arguments
     :param pipelinecommit: pipeline commit or version
     :param startingtime: time the script was started
     :param scriptpath: home path of the script
     """
     import multiprocessing
     from queue import Queue
     # Initialise variables
     self.commit = str(pipelinecommit)
     self.starttime = startingtime
     self.homepath = scriptpath
     self.analysistype = args.analysistype
     # Define variables based on supplied arguments
     self.path = os.path.join(args.path, '')
     assert os.path.isdir(
         self.path
     ), u'Supplied path is not a valid directory {0!r:s}'.format(self.path)
     self.sequencepath = os.path.join(args.sequencepath, '')
     assert os.path.isdir(self.sequencepath), u'Sequence path  is not a valid directory {0!r:s}' \
         .format(self.sequencepath)
     self.targetpath = os.path.join(args.targetpath, self.analysistype, '')
     try:
         self.reportpath = args.reportpath
     except AttributeError:
         self.reportpath = os.path.join(self.path, 'reports')
     assert os.path.isdir(self.targetpath), u'Target path is not a valid directory {0!r:s}' \
         .format(self.targetpath)
     self.bcltofastq = args.bcltofastq
     self.miseqpath = args.miseqpath
     self.miseqfolder = args.miseqfolder
     self.fastqdestination = args.fastqdestination
     self.forwardlength = args.forwardlength
     self.reverselength = args.reverselength
     self.numreads = 2 if self.reverselength != 0 else 1
     self.customsamplesheet = args.customsamplesheet
     # Set the custom cutoff value
     self.cutoff = args.cutoff
     # Use the argument for the number of threads to use, or default to the number of cpus in the system
     self.cpus = int(
         args.cpus if args.cpus else multiprocessing.cpu_count())
     self.threads = int()
     self.runmetadata = args.runmetadata
     self.pipeline = args.pipeline
     self.copy = args.copy
     self.devnull = open(os.path.devnull, 'w')
     self.samplequeue = Queue(maxsize=self.cpus)
     self.fastaqueue = Queue(maxsize=self.cpus)
     self.blastqueue = Queue(maxsize=self.cpus)
     self.baitfile = str()
     self.taxonomy = {
         'Escherichia': 'coli',
         'Listeria': 'monocytogenes',
         'Salmonella': 'enterica'
     }
     # Fields used for custom outfmt 6 BLAST output:
     self.fieldnames = [
         'query_id', 'subject_id', 'positives', 'mismatches', 'gaps',
         'evalue', 'bit_score', 'subject_length', 'alignment_length',
         'query_start', 'query_end', 'query_sequence', 'subject_start',
         'subject_end', 'subject_sequence'
     ]
     # Run the analyses
     self.runner()
Example #18
0
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
# Copyright (c) 2017-2019 Random.Zebra (https://github.com/random-zebra/)
# Distributed under the MIT software license, see the accompanying
# file LICENSE.txt or http://www.opensource.org/licenses/mit-license.php.

import os
from queue import Queue

wqueue = Queue()  # type: Queue[str]

MPATH_LEDGER = "44'/77'/"
MPATH_TREZOR = "44'/119'/"
MPATH_TESTNET = "44'/1'/"
WIF_PREFIX = 212  # 212 = d4
MAGIC_BYTE = 30
STAKE_MAGIC_BYTE = 63
TESTNET_WIF_PREFIX = 239
TESTNET_MAGIC_BYTE = 139
TESTNET_STAKE_MAGIC_BYTE = 73
DEFAULT_PROTOCOL_VERSION = 70915
MINIMUM_FEE = 0.0001  # minimum PIV/kB
starting_width = 933
starting_height = 666
APPDATA_DIRNAME = ".SecurePivxMasternodeTool"
home_dir = os.path.expanduser('~')
user_dir = os.path.join(home_dir, APPDATA_DIRNAME)
log_File = os.path.join(user_dir, 'debug.log')
database_File = os.path.join(user_dir, 'application.db')
NEW_SIGS_HEIGHT_MAINNET = 2153200
NEW_SIGS_HEIGHT_TESTNET = 1347000
Example #19
0
# coding:utf-8
import time
from queue import Queue, Empty
from threading import Event, Thread
import slavewg

stoped = Event()
tasks_queue = Queue()

# 要执行的脚本
# ads = slavewg.Ads(stoped, tasks_queue)
# ads.start()

# lbl = slavewg.LootBlackLotus(stoped, tasks_queue)
# lbl.start(lbl.position['东泉谷山顶黑花'])

heal = slavewg.Tabinterval(stoped, tasks_queue)
heal.start('9', 30)

fw = slavewg.Tabinterval(stoped, tasks_queue)
fw.start('2', 60 * 4)


def run():
    time.sleep(5)
    while not stoped.wait(0.1):
        try:
            func = tasks_queue.get(timeout=1)
            func()
        except Empty:
            pass
    def fit(self):
        "Fit the data"
        
        # Create labels column initialized to -1 (unclassified)
        self.df = np.append(self.df, np.array([[-1]*len(self.df)]).reshape(-1,1), axis=1)
        
        for x in range(len(self.df)):
            
            # if the point is not labled already then search for neighbors
            if self.df[x,2] != -1:
                continue
            
            # find neighbors
            p = self.df[x,:2]
            neighbors = self.rangeQuery(p)
            
            # If less neighbors than min_points then label as noise and continue
            if len(neighbors) < self.min_points:
                self.df[x,2] = self.noise
                continue
                
            # increment cluster label
            self.cluster_label += 1

            # set current row to new cluster label
            self.df[x,2] = self.cluster_label

            # create seed set to hold all neighbors of cluster including the neighbors already found
            found_neighbors = neighbors
            
            # create Queue to fold all neighbors of cluster
            q = Queue()
            
            # add original neighbors
            for x in neighbors:
                q.put(x)
            
            # While isnt empty label new neighbors to cluster
            while q.empty() == False:
                
                current = q.get()
                
                # if cur_row labled noise then change to cluster label (border point)
                if self.df[current,2] == 0:
                    self.df[current,2] = self.cluster_label

                # If label is not -1(unclassified) then continue
                if self.df[current,2] != -1:
                    continue
                    
                # label the neighbor
                self.df[current,2] = self.cluster_label

                # look for neightbors of cur_row
                point = self.df[current,:2]
                neighbors2 = self.rangeQuery(point)
                
                # if neighbors2 >= min_points then add those neighbors to seed_set
                if len(neighbors2) >= self.min_points:

                    for x in neighbors2:
                        if x not in found_neighbors:
                            q.put(x)
                            found_neighbors.append(x)                           
def main():
    position_queue = Queue(450)

    request_page(position_queue)
    parse_detail(position_queue)
Example #22
0
def get_question_yggk():
    # 院校咨询页url
    main_url = "https://gaokao.chsi.com.cn"
    file_path = "Data"
    university_formid = []
    with open("university_info", "rb") as p_file:
        university_infos = pickle.load(p_file)
    for info in university_infos:
        if "985" in info["院校特性"] or "211" in info["院校特性"]:
            if info["forum_id"] != "":
                university_formid.append([info["院校名称"], info["forum_id"]])
    print("共有%d所985、211大学" % len(university_formid))
    for university in university_formid:
        begin = time.time()
        print("开始抓取" + university[0] + "的招生问题数据...")
        main_page_url = "https://gaokao.chsi.com.cn/zxdy/forum--method-listDefault,year-2005,forumid-" + university[
            1] + ",start-0.dhtml"
        try:
            main_page_source = request_url(main_page_url)
            main_page_source.encoding = main_page_source.apparent_encoding
            main_page_soup = BeautifulSoup(main_page_source.content, "lxml")
            # 获取页面总数,页面栏含有省略号、不含省略号两种查找方式
            if main_page_soup.find("li", class_="lip dot"):
                page_count = main_page_soup.find(
                    "li", class_="lip dot").next_sibling.a.string
            else:
                page_count = main_page_soup.find(
                    "ul",
                    class_="ch-page clearfix").find_all("li")[-2].a.string
            # 置顶问题个数
            top_question_count = len(
                main_page_soup.find("table",
                                    class_="ch-table zx-table").find_all(
                                        "span", class_="question_top_txt"))
            print("页面总数:%d 置顶问题个数:%d" %
                  (int(page_count), int(top_question_count)))
        except Exception as e:
            # 招生咨询页面没有数据(三个大学)
            print("%s咨询界面没有数据,页面链接为:%s" % (university[0], main_page_url))
            print("错误信息:%s" % e)
            continue
        # 创建该学校的问题集收集表,并写好表头
        table_head = ["标题", "来源", "时间", "问题", "回答"]
        csvfile = open(file_path + "/" + university[0] + "常用问题集.csv",
                       "w",
                       newline="",
                       encoding='utf-8')
        csvfile.truncate()
        writer = csv.writer(csvfile)
        writer.writerow(table_head)
        record_queue = Queue()
        # 每次开启10个线程,进行数据下载和存储
        start_index = 0
        end_index = 10
        while True:
            if start_index > int(page_count):
                break
            else:
                dThread = [
                    DownloadPageInfo(university[1], page_id, int(page_count),
                                     top_question_count, record_queue)
                    for page_id in range(start_index, end_index)
                ]
                sThread = SavePageInfo(record_queue, writer)
                for d in dThread:
                    d.start()
                sThread.start()
                for d in dThread:
                    d.join()
                record_queue.put(-1)
                sThread.join()
                start_index += 10
                end_index += 10
                if end_index > int(page_count):
                    end_index = int(page_count)

        csvfile.close()
        print("抓取%s的信息用时:%ds" % (university[0], time.time() - begin))
    def load_from_xml(self, filename):
        xml = etree.parse(filename)

        # graph id
        text_id = xml.xpath('/arggraph')[0].get('id')
        self.graph['id'] = text_id

        # add all EDU
        for elm in xml.xpath('/arggraph/edu'):
            self.add_edu(elm.get('id'), elm.text)
        # add all EDU-JOINS
        for elm in xml.xpath('/arggraph/joint'):
            self.add_edu_joint(elm.get('id'))
        # add all ADU
        for elm in xml.xpath('/arggraph/adu'):
            self.add_adu(elm.get('id'), elm.get('type'))

        # add all edges
        q = Queue()
        for elm in xml.xpath('/arggraph/edge'):
            q.put(elm)
        while not q.empty():
            # TODO: queue processing might not end for input elements with
            #       malformed targets, cyclic relations
            elm = q.get()

            edge_src = elm.get('src')
            if edge_src not in self.nodes():
                print("Error: source unknown\n", etree.tostring(elm))

            edge_trg = elm.get('trg')
            if edge_trg not in self.nodes():
                # target node (of 'und' or 'add' relations) not there yet.
                # postpone to later
                q.put(elm)
                continue

            edge_type = elm.get('type')
            edge_id = elm.get('id')
            if edge_type == 'seg':
                src_trg_type_pair = (self.node[edge_src]['type'],
                                     self.node[edge_trg]['type'])
                if src_trg_type_pair in [('edu', 'adu'), ('edu', 'joint'),
                                         ('joint', 'adu')]:
                    self.add_seg_edge(edge_src, edge_trg)
                else:
                    print("Error: malformed segmentation edge\n", \
                        etree.tostring(elm))

            elif edge_type in ['sup', 'exa', 'reb']:
                if (self.node[edge_src]['type'] == 'adu'
                        and self.node[edge_trg]['type'] == 'adu'):
                    self.add_edge_with_relation_node(edge_id, edge_src,
                                                     edge_trg, edge_type)
                else:
                    print("Error: malformed direct edge\n",
                          etree.tostring(elm))

            elif edge_type == 'und':
                if (self.node[edge_src]['type'] == 'adu'
                        and self.node[edge_trg]['type'] == 'rel'):
                    self.add_edge_with_relation_node(edge_id, edge_src,
                                                     edge_trg, edge_type)
                else:
                    print(("Error: malformed undercutting edge\n",
                           etree.tostring(elm)))

            elif edge_type == 'add':
                if (self.node[edge_src]['type'] == 'adu'
                        and self.node[edge_trg]['type'] == 'rel'):
                    self.add_edge(elm.get('src'), elm.get('trg'), type='src')
                else:
                    print("Error: malformed adding edge\n",
                          etree.tostring(elm))

            else:
                print("Error: unknown edge type\n", etree.tostring(elm))

        # update adu short names
        self.update_adu_labels()
Example #24
0
import numpy
from queue import Queue
import re
import requests
import json
import base64
from PyQt5.QtWidgets import *
import threading,time,os
from PyQt5.QtCore import QTimer


carre = "^(京[A-HJ-NPQY]|沪[A-HJ-N]|津[A-HJ-NPQR]|渝[A-DFGHN]|冀[A-HJRST]|晋[A-FHJ-M]|蒙[A-HJKLM]|辽[A-HJ-NP]|吉[A-HJK]|黑[A-HJ-NPR]|苏[A-HJ-N]|浙[A-HJKL]|皖[A-HJ-NP-S]|闽[A-HJK]|赣[A-HJKLMS]|鲁[A-HJ-NP-SUVWY]|豫[A-HJ-NP-SU]|鄂[A-HJ-NP-S]|湘[A-HJ-NSU]|粤[A-HJ-NP-Y]|桂[A-HJ-NPR]|琼[A-F]|川[A-HJ-MQ-Z]|贵[A-HJ]|云[AC-HJ-NP-SV]|藏[A-HJ]|陕[A-HJKV]|甘[A-HJ-NP]|青[A-H]|宁[A-E]|新[A-HJ-NP-S])([0-9A-HJ-NP-Z]{4}[0-9A-HJ-NP-Z挂试]|[0-9]{4}学|[A-D0-9][0-9]{3}警|[DF][0-9A-HJ-NP-Z][0-9]{4}|[0-9]{5}[DF])$|^WJ[京沪津渝冀晋蒙辽吉黑苏浙皖闽赣鲁豫鄂湘粤桂琼川贵云藏陕甘青宁新]?[0-9]{4}[0-9JBXTHSD]$|^(V[A-GKMORTV]|K[A-HJ-NORUZ]|H[A-GLOR]|[BCGJLNS][A-DKMNORVY]|G[JS])[0-9]{5}$|^[0-9]{6}使$|^([沪粤川渝辽云桂鄂湘陕藏黑]A|闽D|鲁B|蒙[AEH])[0-9]{4}领$|^粤Z[0-9A-HJ-NP-Z][0-9]{3}[港澳]$"
#↑用来校验车牌的正则表达式

carpailist=[]#存最近识别的100个车牌的手动实现的队列,在队列中查找每个结果确保单个车牌不被多次触发
datalist = Queue()
num=0

def cv2ImgAddText(img, text, left, top, textColor=(0, 255, 0), textSize=20):
    if (isinstance(img, numpy.ndarray)):
        img = Image.fromarray(cv2.cvtColor(img, cv2.COLOR_BGR2RGB))
    draw = ImageDraw.Draw(img)
    fontText = ImageFont.truetype(
        "font/simsun.ttc", textSize, encoding="utf-8")
    draw.text((left, top), text, textColor, font=fontText)
    return cv2.cvtColor(numpy.asarray(img), cv2.COLOR_RGB2BGR)

class lprclass:
    def __init__(self):
        self.running=True
Example #25
0
 def __init__(self):
     self.tag_queue = Queue()
     self.config = get_config_for_testing()
     self.db_backend_service = DatabaseMock(None)
Example #26
0
def read_input():
    num_nodes = int(input())
    queue = Queue()
    queue.put(BSTNode(1))
    return num_nodes, build_tree(queue)
Example #27
0
from datetime import datetime, date, timezone
from numpy.random import randint, uniform
from math import floor
from queue import Queue
from subprocess import Popen
from time import sleep

import threading, sys, os
sys.path.append(
    os.path.abspath(__file__).rsplit('tests/', 1)[0] + '/pysqream/')
import dbapi

q = Queue()
varchar_length = 10
nvarchar_length = 10
max_bigint = sys.maxsize if sys.platform not in ('win32',
                                                 'cygwin') else 2147483647


def generate_varchar(length):
    return ''.join(chr(num) for num in randint(32, 128, length))


col_types = {
    'bool', 'tinyint', 'smallint', 'int', 'bigint', 'real', 'double', 'date',
    'datetime', 'varchar({})'.format(varchar_length),
    'nvarchar({})'.format(varchar_length)
}

pos_test_vals = {
    'bool': (0, 1, True, False, 2, 3.6, 'test', (1997, 5, 9), (1997, 12, 12,
Example #28
0
 def __init__(self, event_factory):
     self._event_factory = event_factory
     self._queue = Queue()
     self._producer = None
     self._consumers = []
     self._event_handlers = defaultdict(list)
Example #29
0
import threading
from queue import Queue
from spider import Spider
from domain import *
from general import *

PROJECT_NAME=''
HOMEPAGE=''
DOMAIN_NAME=get_domain_name(HOMEPAGE)
QUEUE_FILE=PROJECT_NAME+'/queue.txt'
CRAWLED_FILE=PROJECT_NAME+'/crawled.txt'
NUMBER_OF_THREADS=8

#thread queue
queue=Queue()
Spider(PROJECT_NAME,HOMEPAGE,DOMAIN_NAME)

#creating threads
def create_workers():
    for _ in range(NUMBER_OF_THREADS):
        t=threading.Thread(target=work)
        t.daemon=True
        t.start()

#do the next job in the queue
def work():
    while True:
        url=queue.get()
        Spider.crawl_page(threading.current_thread().name,url)
        queue.task_done() 
Example #30
0
 def __init__(self, name: str, accepted_types: list):
     self.name = name
     self.accepted_types = accepted_types
     self.tasks_queue = Queue()
     self.active = False