async def wait_converged(tmp_path, agreements): for i in range(500): result1 = [] async with AIOFile(tmp_path / "node1" / "journal", "r") as afp: async for row in LineReader(afp): result1.append(tuple(json.loads(row))) result2 = [] async with AIOFile(tmp_path / "node2" / "journal", "r") as afp: async for row in LineReader(afp): result2.append(tuple(json.loads(row))) result3 = [] async with AIOFile(tmp_path / "node3" / "journal", "r") as afp: async for row in LineReader(afp): result3.append(tuple(json.loads(row))) all_matching = result1 == result2 == result3 match_one_agreement = any(result1 == agreement for agreement in agreements) if all_matching and match_one_agreement: break await asyncio.sleep(0.1) else: logger.critical("node 1: %s", result1) logger.critical("node 2: %s", result2) logger.critical("node 3: %s", result3) raise RuntimeError("Did not converge on a valid agreement")
async def pylive_run(action): global PID global SPID global BPID data = await request.get_json() code = data['data'] payload = "" if action in ['cherrypy', 'sanic', 'quart']: if SPID: api.pyll.pylive.kill(SPID) SPID = await api.pyll.pylive.server(code) return str(SPID) elif action == 'live': log_path = f"{ ROOT_PATH }/logs/py_log_single_run.txt" if PID: api.pyll.pylive.kill(PID) PID = await api.pyll.pylive.run(code) while api.pyll.pylive.isActive(PID): await asyncio.sleep(0.1) await asyncio.sleep(0.25) async with AIOFile(log_path, 'r') as asp: async for line in LineReader(asp): payload += line return payload elif action == 'shell': log_path = f"{ ROOT_PATH }/logs/sh_log.txt" if BPID: api.pyll.pylive.kill(BPID) BPID = await api.pyll.pylive.cmd(code) while api.pyll.pylive.isActive(BPID): await asyncio.sleep(0.1) await asyncio.sleep(0.25) async with AIOFile(log_path, 'r') as asp: async for line in LineReader(asp): payload += line return payload else: log_path = f"{ ROOT_PATH }/logs/sh_log.txt" cmd_path = f"{ ROOT_PATH }/coderun/run_shell.sh" if BPID: api.pyll.pylive.kill(BPID) with open(cmd_path, 'w+') as file: file.write(code) await asyncio.sleep(0.1) BPID = await api.pyll.pylive.cmd(f'sh { cmd_path }') while api.pyll.pylive.isActive(BPID): await asyncio.sleep(0.1) await asyncio.sleep(0.25) async with AIOFile(log_path, 'r') as asp: async for line in LineReader(asp): payload += line return payload return ''
async def help(name): async with AIOFile(name, "r") as file: print(file) async for line in LineReader(file): print(line) async with aiohttp.ClientSession() as session: asyncio.ensure_future(text(session, line))
async def read_lines( path: Paths, line_sep: str = SEP, chunk_size: int = CHUNK_SIZE, offset: int = BEGINNING, encoding: str = ENCODING, errors: str = ERRORS, **kwargs ) -> AsyncIterable[str]: if hasattr(path, 'resolve'): if iscoroutinefunction(path.resolve): path = str(await path.resolve()) else: path = str(path.resolve()) path = cast(str, path) async with AIOFile(path, 'rb') as handle: reader = LineReader( handle, line_sep=line_sep, chunk_size=chunk_size, offset=offset ) while line := await reader.readline(): yield line.decode(encoding, errors=errors)
async def handle(filename: str, response: Response, websocket: WebSocket, n: int = 1): # Check file exists cleanfn = Path(filename).name fnpath = Path().cwd() / cleanfn if not fnpath.is_file(): response.status_code = HTTP_404_NOT_FOUND return await websocket.accept() # Create watcher for file with closing(Watcher()) as watcher: watcher.watch(path=str(fnpath), flags=Flags.MODIFY) await watcher.setup(get_event_loop()) async with AIOFile(fnpath, mode='r', encoding='utf-8') as afd: reader = LineReader(afd) i = 0 async for line in reader: i += 1 # print(line, end='') if i >= n: await websocket.send_text(line) while True: event = await watcher.get_event() print('Got event: {} {}'.format(filename, event)) async for line in reader: # print(line, end='') await websocket.send_text(line)
async def pylive_view(): log_path = f"{ ROOT_PATH }/logs/py_log_from_server.txt" payload = "" async with AIOFile(log_path, 'r') as asp: async for line in LineReader(asp): payload += line return payload
async def main(): async with AIOFile("file.csv", 'r') as afp: async for line in LineReader(afp): #print(line[:10]) array = line.split(',') first_item = array[0] print(first_item)
async def write_result(self, file_path: str, view_adapter: AdapterBase) -> None: try: async with AIOFile(file_path, 'rb') as f: async for line in LineReader(f): await view_adapter.write(line) except FileNotFoundError: raise PathNotFoundError(f"Not found {self.images_path}")
async def get_list(filename): alist = set() try: async with AIOFile(filename, "r") as In: async for line in LineReader(In): line = line.rstrip() alist.add(line) except FileNotFoundError: pass log.debug(f"{filename} contains {len(alist)} items") return alist
async def unwatch(self, dafp, fid): # File no longer exists. If it has been renamed try to read it # for the last time in case we're dealing with a rotating log file. self.log("un-watching logfile %s" % dafp['afp'].name) del self._files_map[fid] async with dafp['afp'] as afp: # go through the rest of the lines before stop monitoring the file. # does the same as the "readlines" function with the difference # that it does not update the * offset * as in the "readlines" function async for line in LineReader(afp, offset=dafp['offset'], chunk_size=self._sizehint): await self._callback(dafp['afp'].name, line)
async def load_file(file, queue): # Start timer to use for statistics later start = time() # Open files using AIOFile to fully use async async with AIOFile(file) as afp: async for line in LineReader(afp): line = line[:-1] # Tail-less string await queue.put(line) # End timer and check how long it took to put how many items in queue end = time() print( f"[QUEUE] Added {queue.qsize()} items to queue in {(end - start):.2f} seconds." )
async def help_func(name, name2): async with AIOFile(name, 'r') as file: async for line in LineReader(file): print(f'I\'m here: {line[:-2:]}') async with aiohttp.ClientSession() as session: async with session.get(line) as response: text_file = await response.text() #print(text_file) text_file = re.split(r'[\r\n]', str(text_file)) print(f'Lines that start with <a :') for el in text_file: if el.strip().startswith('<a '): print(el) async with AIOFile(name2, 'a') as file: writer = Writer(file) await writer(f'{el}\n') print('I finished')
def __init__(self, afp, **kwargs): self.buffer = io.BytesIO() self.file_reader = LineReader( afp, line_sep=kwargs.pop('line_sep', '\n'), chunk_size=kwargs.pop('chunk_size', 4096), offset=kwargs.pop('offset', 0), ) self.reader = DictReader( io.TextIOWrapper( self.buffer, encoding=kwargs.pop('encoding', 'utf-8'), errors=kwargs.pop('errors', 'replace'), ), **kwargs, ) self.line_num = 0
async def log_get_handler(request): try: async with AIOFile("log.json", "r") as afp: response = [] async for line in LineReader(afp): string = loads(line) response.append(string) resp_log = "No data", 200 resp_web = response, 200 await write_log(request, resp_log) return web.json_response(resp_web[0], status=resp_web[1]) except Exception: resp_log = "No data", 400 resp_web = {"Error": "Log.json is not read"}, 400 await write_log(request, resp_log) return web.json_response(resp_web[0], status=resp_web[1])
async def chunk_stories_from_file(file: str, batch_size: int = 100) -> Tuple[List[str], List[int]]: """ Async yield batches of stories that are line separated/ """ line_count = 1 lines = [] story_nums = [] async with AIOFile(file, mode="rb") as f: async for line in LineReader(f): line = line.decode('utf-8', errors="ignore") line = line.replace("<newline>", "") lines.append(line) story_nums.append(line_count) line_count += 1 if len(lines) == batch_size: yield lines, story_nums lines = [] story_nums = [] yield lines, story_nums
async def _tail(cls, filename: str, num_lines: int) -> list: """returns the number of lines defined as a parameter or the maximum the file has Args: filename (str): file name num_lines ([type]): number of lines Returns: list: lines """ linesep = '\r\n' if os.name == 'nt' else '\n' lines = [] async with AIOFile(filename, 'r') as afp: async for line in LineReader(afp, line_sep=linesep): lines.append(line) if num_lines == len(lines): break return lines
async def get_skiplist(): skiplist = {} path = "skiplist.txt" try: async with AIOFile(path, "r") as In: async for line in LineReader(In): line = line.strip() if line == "": continue s = line.split(" ", maxsplit=1) if len(s) != 2: continue debug_id, debug_file = s skiplist[debug_id] = debug_file.lower() except FileNotFoundError: pass log.debug(f"{path} contains {len(skiplist)} items") return skiplist
def __init__(self, aio_file: aiofile.AIOFile, csv_reader, on_empty_line: OnError = OnError.skip_and_warn, on_wrong_length: OnError = OnError.skip_and_warn, **kwargs): self.on_empty_line = on_empty_line self.on_wrong_length = on_wrong_length self.line_sep = kwargs.pop('line_sep', '\n') self.file_reader = LineReader(aio_file, line_sep=self.line_sep, chunk_size=kwargs.pop( 'chunk_size', 4096), offset=kwargs.pop('offset', 0)) self.buffer = io.BytesIO() self.csv_reader = csv_reader( io.TextIOWrapper( self.buffer, encoding=kwargs.pop('encoding', 'utf-8'), errors=kwargs.pop('errors', 'replace'), ), **kwargs) self.line_num = 0 self.expected_num_fields = -1
async def write_result(self, file_path, response): async with AIOFile(file_path, 'rb') as f: async for line in LineReader(f): await response.write(line)
async def _read_from_file(self, path): async with AIOFile(path, "r") as f: async for row in LineReader(f): await self._add_field(row)