Ejemplo n.º 1
0
def on_server_loaded(server_context):
    messages['workers'] = {'interval': 1000,
                           'deque': deque(maxlen=1000),
                           'times': deque(maxlen=1000),
                           'condition': Condition()}
    server_context.add_periodic_callback(lambda: http_get('workers'), 1000)

    messages['tasks'] = {'interval': 100,
                         'deque': deque(maxlen=1000),
                         'times': deque(maxlen=1000),
                         'condition': Condition()}
    server_context.add_periodic_callback(lambda: http_get('tasks'), 100)
Ejemplo n.º 2
0
    def __init__(self, child, loop=None):
        self.condition = Condition()
        self.next = []

        Stream.__init__(self, child, loop=loop)

        self.loop.add_callback(self.cb)
Ejemplo n.º 3
0
    def __init__ (self, device_server, stream, address):
        self.recv_msg_cond = Condition()
        self.recv_msg = {}
        self.send_msg_sem = Semaphore(1)
        self.pending_request_cnt = 0
        self.device_server = device_server
        self.stream = stream
        self.address = address
        self.stream.set_nodelay(True)
        self.idle_time = 0;
        self.killed = False
        self.sn = ""
        self.private_key = ""
        self.node_id = 0
        self.name = ""
        self.iv = None
        self.cipher = None

        #self.state_waiters = []
        #self.state_happened = []

        self.event_waiters = []
        self.event_happened = []

        self.ota_ing = False
        self.ota_notify_done_future = None
        self.post_ota = False
        self.online_status = True
Ejemplo n.º 4
0
    def __init__(self, upstream, **kwargs):
        self.condition = Condition()
        self.next = []

        Stream.__init__(self, upstream, ensure_io_loop=True, **kwargs)

        self.loop.add_callback(self.cb)
Ejemplo n.º 5
0
    def prepare(self):
        self.id = str(uuid.uuid4())
        self.final_hyp = ""
        self.worker_done = Condition()
        self.user_id = self.request.headers.get("device-id", "none")
        self.content_id = self.request.headers.get("content-id", "none")
        logging.info("%s: OPEN: user='******', content='%s'" % (self.id, self.user_id, self.content_id))
        self.worker = None
        self.error_status = 0
        self.error_message = None
        #Waiter thread for final hypothesis:
        # self.executor = concurrent.futures.ThreadPoolExecutor(max_workers=1) 
        try:
            self.worker = self.application.available_workers.pop()
            self.application.send_status_update()
            logging.info("%s: Using worker %s" % (self.id, self.__str__()))
            self.worker.set_client_socket(self)
            
            content_type = self.request.headers.get("Content-Type", None)
            if content_type:
                content_type = content_type_to_caps(content_type)
                logging.info("%s: Using content type: %s" % (self.id, content_type))

            self.worker.write_message(json.dumps(dict(id=self.id, content_type=content_type, user_id=self.user_id, content_id=self.content_id)))
        except KeyError:
            logging.warn("%s: No worker available for client request" % self.id)
            self.set_status(503)
            self.finish("No workers available")
Ejemplo n.º 6
0
    def __init__ (self, device_server, stream, address, conn_pool):
        self.fw_version = 0.0
        self.recv_msg_cond = Condition()
        self.recv_msg = {}
        self.send_msg_sem = Semaphore(1)
        self.pending_request_cnt = 0
        self.device_server = device_server
        self.device_server_conn_pool = conn_pool
        self.stream = stream
        self.address = address
        self.stream.set_nodelay(True)
        self.stream.set_close_callback(self.on_close)
        self.timeout_handler_onlinecheck = None
        self.timeout_handler_offline = None
        self.killed = False
        self.is_junk = False
        self.sn = ""
        self.private_key = ""
        self.node_id = ""
        self.user_id = ""
        self.iv = None
        self.cipher_down = None
        self.cipher_up = None

        self.event_waiters = []
        self.event_happened = []

        self.ota_ing = False
        self.ota_notify_done_future = None
        self.post_ota = False
        self.online_status = True
Ejemplo n.º 7
0
 async def refresh(self, fetch_packages=True):
     # TODO: Use python-apt python lib rather than command line for updates
     if self.refresh_condition is None:
         self.refresh_condition = Condition()
     else:
         self.refresh_condition.wait()
         return
     try:
         if fetch_packages:
             await self.execute_cmd(f"{APT_CMD} update",
                                    timeout=300.,
                                    retries=3)
         res = await self.execute_cmd_with_response("apt list --upgradable",
                                                    timeout=60.)
         pkg_list = [p.strip() for p in res.split("\n") if p.strip()]
         if pkg_list:
             pkg_list = pkg_list[2:]
             self.available_packages = [
                 p.split("/", maxsplit=1)[0] for p in pkg_list
             ]
         pkg_list = "\n".join(self.available_packages)
         logging.info(
             f"Detected {len(self.available_packages)} package updates:"
             f"\n{pkg_list}")
     except Exception:
         logging.exception("Error Refreshing System Packages")
     self.init_evt.set()
     self.refresh_condition.notify_all()
     self.refresh_condition = None
Ejemplo n.º 8
0
    def clear(self):
        """Reset this PeerGroup.

        This closes all connections to all known peers and forgets about
        these peers.

        :returns:
            A Future that resolves with a value of None when the operation
            has finished
        """
        if self._resetting:
            # If someone else is already resetting the PeerGroup, just block
            # on them to be finished.
            yield self._reset_condition.wait()
            raise gen.Return(None)

        self._resetting = True
        if self._reset_condition is None:
            self._reset_condition = Condition()

        try:
            for peer in self._peers.values():
                peer.close()
        finally:
            self._peers = {}
            self._resetting = False
            self._reset_condition.notify_all()
 def __init__(self,
              core_pool_size,
              queue,
              reject_handler,
              coroutine_pool_name=None):
     self._core_pool_size = core_pool_size
     self._queue = queue
     self._reject_handler = reject_handler
     self._coroutine_pool_name = coroutine_pool_name or \
         'tornado-coroutine-pool-%s' % uuid.uuid1().hex
     self._core_coroutines_condition = Condition()
     self._core_coroutines = {}
     self._core_coroutines_wait_condition = Condition()
     self._shutting_down = False
     self._shuted_down = False
     self._initialize_core_coroutines()
Ejemplo n.º 10
0
    def get_data(cls, account, source_filter, limit=100, skip=0):
        """
        Gathers card information from Google Sheets
        GET https://spreadsheets.google.com/feeds/list/[spreadsheet]/[worksheet]/private/full
        """
        if not account or not account.enabled:
            raise ValueError('cannot gather information without an account')
        client = AsyncHTTPClient()

        if source_filter.spreadsheet is None:
            raise ValueError('required parameter spreadsheet missing')
        if source_filter.worksheet is None:
            raise ValueError('required parameter worksheet missing')
        uri = "https://docs.google.com/spreadsheets/d/{}/export?format=csv&gid={}".format(
            source_filter.spreadsheet, source_filter.worksheet)

        app_log.info("Start retrieval of worksheet {}/{} for {}".format(
            source_filter.spreadsheet, source_filter.worksheet, account._id))

        lock = Condition()
        oauth_client = account.get_client()
        uri, headers, body = oauth_client.add_token(uri)
        req = HTTPRequest(uri,
                          headers=headers,
                          body=body,
                          streaming_callback=lambda c: cls.write(c))

        client.fetch(req, callback=lambda r: lock.notify())
        yield lock.wait(timeout=timedelta(seconds=MAXIMUM_REQ_TIME))

        app_log.info("Finished retrieving worksheet for {}".format(
            account._id))
Ejemplo n.º 11
0
    def __init__(self, upstream, loop=None):
        loop = loop or upstream.loop or IOLoop.current()
        self.condition = Condition()
        self.next = []

        Stream.__init__(self, upstream, loop=loop)

        self.loop.add_callback(self.cb)
Ejemplo n.º 12
0
 def __init__(self, application, request, **kwargs):
     super(PingHandler, self).__init__(application, request, **kwargs)
     self.callback_queue = None
     self.condition = Condition()
     self.response = None
     self.corr_id = str(uuid.uuid4())
     self.in_channel = self.application.get_app_component().rabbitmq[
         'client'].channels['in']
Ejemplo n.º 13
0
    def __init__(self, child, limit=10, client=None):
        self.client = client or default_client()
        self.queue = Queue(maxsize=limit)
        self.condition = Condition()

        Stream.__init__(self, child)

        self.client.loop.add_callback(self.cb)
Ejemplo n.º 14
0
 def __init__(self, address):
     """
     @brief      Construct new instance
     """
     self._address = address
     self._ioloop = IOLoop.current()
     self._stop_event = Event()
     self._is_stopped = Condition()
     self._socket = None
Ejemplo n.º 15
0
 def send_req(self, req):
     req["uuid"] = str(uuid.uuid4())
     self.send_msg(json.dumps(req, default=json_util.default, indent=4))
     self.reqLocks[req["uuid"]] = Condition()
     yield self.reqLocks[req["uuid"]].wait()
     res = self.resList[req["uuid"]]
     # delete record
     self.reqLocks.pop(req["uuid"], None)
     self.resList.pop(req["uuid"], None)
     raise gen.Return(res)
Ejemplo n.º 16
0
 async def refresh(self):
     if self.refresh_condition is None:
         self.refresh_condition = Condition()
     else:
         self.refresh_condition.wait()
         return
     try:
         await self._check_version()
     except Exception:
         logging.exception("Error Refreshing git state")
     self.init_evt.set()
     self.refresh_condition.notify_all()
     self.refresh_condition = None
Ejemplo n.º 17
0
    def __init__(self, buf=None, auto_close=True):
        """In-Memory based stream

        :param buf: the buffer for the in memory stream
        """
        self._stream = deque()
        if buf:
            self._stream.append(buf)
        self.state = StreamState.init
        self._condition = Condition()
        self.auto_close = auto_close

        self.exception = None
Ejemplo n.º 18
0
    def __init__(self, *upstreams, **kwargs):
        self.maxsize = kwargs.pop('maxsize', 10)
        self.condition = Condition()
        self.literals = [(i, val) for i, val in enumerate(upstreams)
                         if not isinstance(val, Stream)]

        self.buffers = {upstream: deque()
                        for upstream in upstreams
                        if isinstance(upstream, Stream)}

        upstreams2 = [upstream for upstream in upstreams if isinstance(upstream, Stream)]

        Stream.__init__(self, upstreams=upstreams2, **kwargs)
Ejemplo n.º 19
0
 async def refresh(self):
     if self.refresh_condition is None:
         self.refresh_condition = Condition()
     else:
         self.refresh_condition.wait()
         return
     try:
         self._get_local_version()
         await self._get_remote_version()
     except Exception:
         logging.exception("Error Refreshing Client")
     self.init_evt.set()
     self.refresh_condition.notify_all()
     self.refresh_condition = None
Ejemplo n.º 20
0
    def prepare(self):
        self.worker = None
        self.filePath = None
        self.uuid = str(uuid.uuid4())
        self.set_status(200, "Initial statut")
        self.waitResponse = Condition()
        self.waitWorker = Condition()

        if self.request.method != 'POST':
            logging.debug("Received a non-POST request")
            self.set_status(
                403, "Wrong request, server handles only POST requests")
            self.finish()
        #File Retrieval
        # TODO: Adapt input to existing controller API
        if 'wavFile' not in self.request.files.keys():
            self.set_status(
                403, "POST request must contain a 'file_to_transcript' field.")
            self.finish()
            logging.debug(
                "POST request from %s does not contain 'file_to_transcript' field."
            )
        temp_file = self.request.files['wavFile'][0]['body']
        self.temp_file = temp_file

        #Writing file
        try:
            f = open(TEMP_FILE_PATH + self.uuid + '.wav', 'wb')
        except IOError:
            logging.error("Could not write file.")
            self.set_status(
                500, "Server error: Counldn't write file on server side.")
            self.finish()
        else:
            f.write(temp_file)
            self.filePath = TEMP_FILE_PATH + self.uuid + '.wav'
            logging.debug("File correctly received from client")
Ejemplo n.º 21
0
    def __init__(self, *upstreams, **kwargs):
        self.maxsize = kwargs.pop('maxsize', 10)
        self.buffers = [deque() for _ in upstreams]
        self.condition = Condition()
        self.literals = [(i, val) for i, val in enumerate(upstreams)
                         if not isinstance(val, Stream)]
        self.pack_literals()

        self.buffers_by_stream = {upstream: buffer
                    for upstream, buffer in builtins.zip(upstreams, self.buffers)
                    if isinstance(upstream, Stream)}

        upstreams2 = [upstream for upstream in upstreams if isinstance(upstream, Stream)]

        Stream.__init__(self, upstreams=upstreams2, **kwargs)
Ejemplo n.º 22
0
 def __init__(self, address, record_dest):
     """
     @brief      Construct new instance
                 If record_dest is not empty, create a folder named record_dest and record the received packages there.
     """
     self._address = address
     self.__record_dest = record_dest
     if record_dest:
         if not os.path.isdir(record_dest):
             os.makedirs(record_dest)
     self._ioloop = IOLoop.current()
     self._stop_event = Event()
     self._is_stopped = Condition()
     self._socket = None
     self.__last_package = 0
Ejemplo n.º 23
0
    def __init__(self, *children, **kwargs):
        self.maxsize = kwargs.pop('maxsize', 10)
        self.buffers = [deque() for _ in children]
        self.condition = Condition()
        self.literals = [(i, val) for i, val in enumerate(children)
                         if not isinstance(val, Stream)]
        self.pack_literals()

        self.buffers_by_stream = {
            child: buffer
            for child, buffer in builtins.zip(children, self.buffers)
            if isinstance(child, Stream)
        }

        children2 = [child for child in children if isinstance(child, Stream)]

        Stream.__init__(self, children=children2, **kwargs)
Ejemplo n.º 24
0
    def test_future_close_callback(self):
        # Regression test for interaction between the Future read interfaces
        # and IOStream._maybe_add_error_listener.
        rs, ws = yield self.make_iostream_pair()
        closed = [False]
        cond = Condition()

        def close_callback():
            closed[0] = True
            cond.notify()

        rs.set_close_callback(close_callback)
        try:
            ws.write(b"a")
            res = yield rs.read_bytes(1)
            self.assertEqual(res, b"a")
            self.assertFalse(closed[0])
            ws.close()
            yield cond.wait()
            self.assertTrue(closed[0])
        finally:
            rs.close()
            ws.close()
Ejemplo n.º 25
0
Archivo: files.py Proyecto: fstfwd/apps
    def get_data(cls, account, source_filter, limit=100, skip=0):
        source_filter = OneDriveFileFilter(source_filter)

        if source_filter.file is None:
            raise ValueError('required parameter file missing')

        app_log.info("Starting to retrieve file for {}".format(account._id))

        client = AsyncHTTPClient()
        uri = "https://api.onedrive.com/v1.0/drive/items/{}/content".format(
            source_filter.file)
        lock = Condition()

        def crawl_url(url):
            # some yummy regex
            location_header_regex = re.compile(
                r'^Location:\s?(?P<uri>http:/{2}\S+)')
            http_status_regex = re.compile(r'^HTTP/[\d\.]+\s(?P<status>\d+)')
            receiving_file = False

            # define our callbacks
            def header_callback(header):
                m = http_status_regex.match(header)
                if m is not None:
                    # process our HTTP status header
                    status = m.group('status')
                    if int(status) == 200:
                        # if we're 200, we're receiving the file, not just a redirect
                        app_log.info("Receiving file {} for account {}".format(
                            source_filter.file, account._id))
                        global receiving_file
                        receiving_file = True
                m = location_header_regex.match(header)
                if m is not None:
                    # process our location header
                    uri = m.group('uri')
                    # and grab _that_ url
                    app_log.info("Following redirect for file {}".format(
                        source_filter.file))
                    crawl_url(uri)

            def stream_callback(chunk):
                # only dump out chunks that are of the file we're looking for
                global receiving_file
                if receiving_file:
                    app_log.info("Writing chunk of {}B".format(
                        chunk.__len__()))
                    cls.write(chunk)

            def on_completed(resp):
                if 200 <= resp.code <= 299:
                    lock.notify()

            oauth_client = account.get_client()
            uri, headers, body = oauth_client.add_token(url)
            req = HTTPRequest(uri,
                              headers=headers,
                              body=body,
                              header_callback=header_callback,
                              streaming_callback=stream_callback)
            client.fetch(req, callback=on_completed)

        crawl_url(uri)
        # wait for us to complete
        try:
            yield lock.wait(timeout=timedelta(seconds=MAXIMUM_REQ_TIME))
            app_log.info("File {} retrieved successfully".format(
                source_filter.file))
        except gen.TimeoutError:
            app_log.error("Request for file {} => {} timed out!".format(
                source_filter.file, account._id))
 def __init__(self):
     self.cond = Condition()
     self.cache = []  # all messages added here
     self.cache_size = 200  # holds 200 newest msgs
Ejemplo n.º 27
0
 async def acquire_url_lock(self, url):
     if url not in BaseHandler.url_locks:
         BaseHandler.url_locks[url] = Condition()
     else:
         return await BaseHandler.url_locks[url].wait()
Ejemplo n.º 28
0
 def acquire_url_lock(self, url):
     if url not in BaseHandler.url_locks:
         BaseHandler.url_locks[url] = Condition()
     else:
         yield BaseHandler.url_locks[url].wait()
Ejemplo n.º 29
0
    async def initialize(self, need_fetch=True):
        if self.init_condition is not None:
            # No need to initialize multiple requests
            await self.init_condition.wait()
            return
        self.init_condition = Condition()
        try:
            await self.update_repo_status()
            self._verify_repo()
            if not self.head_detached:
                # lookup remote via git config
                self.git_remote = await self.get_config_item(
                    f"branch.{self.git_branch}.remote")

            # Populate list of current branches
            blist = await self.list_branches()
            self.branches = []
            for branch in blist:
                branch = branch.strip()
                if branch[0] == "*":
                    branch = branch[2:]
                if branch[0] == "(":
                    continue
                self.branches.append(branch)

            if need_fetch:
                await self.fetch()

            self.upstream_url = await self.remote("get-url")
            self.current_commit = await self.rev_parse("HEAD")
            self.upstream_commit = await self.rev_parse(
                f"{self.git_remote}/{self.git_branch}")
            current_version = await self.describe(
                "--always --tags --long --dirty")
            upstream_version = await self.describe(
                f"{self.git_remote}/{self.git_branch} "
                "--always --tags --long")

            # Parse GitHub Owner from URL
            owner_match = re.match(r"https?://[^/]+/([^/]+)",
                                   self.upstream_url)
            self.git_owner = "?"
            if owner_match is not None:
                self.git_owner = owner_match.group(1)
            self.dirty = current_version.endswith("dirty")

            # Parse Version Info
            versions = []
            for ver in [current_version, upstream_version]:
                tag_version = "?"
                ver_match = re.match(r"v\d+\.\d+\.\d-\d+", ver)
                if ver_match:
                    tag_version = ver_match.group()
                versions.append(tag_version)
            self.current_version, self.upstream_version = versions
            self.log_repo_info()
        except Exception:
            logging.exception(f"Git Repo {self.alias}: Initialization failure")
            raise
        finally:
            self.init_condition.notify_all()
            self.init_condition = None
Ejemplo n.º 30
0
 def __init__(self):
     self.condition = Condition()
     self.counter = 0