Beispiel #1
0
async def stream_results(api_name, url, response, stream_state):
    limit = stream_state['limit']
    async with aiohttp.ClientSession() as session:
        async with session.get(url) as r:
            data = await r.json()
            objects = data['results']

            # objects = aiojson.items(r.content, 'results.item')
            # async for obj in objects:
            async def iter_objects():
                for obj in objects:
                    yield obj

            async for obj in iter_objects():
                obj['_source'] = {
                    "name": api_name,
                    "url": url,
                }
                stream_state['total'] += 1
                if stream_state['total'] < stream_state['offset']:
                    continue
                if stream_state['count'] < limit:
                    if stream_state['count'] != 0:
                        response.write(',')
                    stream_state['count'] += 1
                    response.write(json.dumps(obj))
Beispiel #2
0
    async def streaming_fn(response):
        loop = asyncio.get_event_loop()
        urls = []

        for api_name, endpoint in app.config.OMI_ENDPOINTS.items():
            url = urllib.parse.urljoin(endpoint, api_resource)
            # url = f"{url}/;limit={limit};offset={offset}?{query}"
            url = f"{url}/;limit={limit}?{query}"
            urls.append((api_name, url))

        response.write('{"results":[')
        stream_state = {
            'offset': offset,
            'limit': limit,
            'total': 0,
            'count': 0,
        }
        outcomes = await asyncio.gather(
            *(stream_results(api_name, url, response, stream_state)
              for api_name, url in urls))
        count = stream_state['count']
        total = stream_state['total']
        response.write('],')
        response.write('"count":{},"total":{},"offset":{}'.format(
            count, total, offset))
        response.write('}')
Beispiel #3
0
 async def streaming_fn(response):
     try:
         while True:
             print('subscription que started')
             file_event = await q.get()
             src_path = file_event['src_path']
             if src_path.startswith(os.path.join(config.Args.logdir, file_path)) and path_match(file_path, query):
                 file_event['src_path'] = src_path[len(config.Args.logdir):]
             print("=>>", file_event)
             response.write(f"data: {json.dumps(file_event)}\r\n\r\n".encode())
             sleep(0.1)
     # todo: this timeout doesn't really work.
     # todo: also add handling of stream is terminated logic (separate from above).
     except RequestTimeout:
         subscriptions.remove(q)
Beispiel #4
0
        async def streaming_response(response):
            """
                Here I could iterate over a generator so I don't need to
                save the file in my hard disk
            """
            def range_file():
                # Watch out for this number (range number)! You don't want to crash you computer.
                # Once the file is fully streamed,
                # it will load itself fully on your browser (ALL THE FILE ON THE MEMORY OF YOUR COMPUTER)
                # So... again.. Watch out!
                # I don't want to say: "I told you so..."
                for i in range(1, 1000000):
                    yield "Line {}\n".format(str(i))

            for line in range_file():
                response.write(line)
Beispiel #5
0
 async def streaming_fn(response):
     response.write(bytes)
Beispiel #6
0
 async def streaming_fn(response):
     response.write('foo')
     response.write('bar')
Beispiel #7
0
 async def streaming_fn(response):
     response.write(data)
Beispiel #8
0
 async def streaming_fn(response):
     fs = open(audio_path, 'rb')
     data = fs.read()
     response.write(data)
 async def streaming_fn(response):
     response.write('foo')
     response.write('bar')
Beispiel #10
0
 async def streaming_fn(response):
     if 'node' in request.raw_args:
         nodes = get_nodes_by_path(request.raw_args['node'])
         response.write(json.dumps(nodes))