async def token_producer(out_q: asyncio.Queue): async with aiofile.AIOFile(sys.argv[2], 'rb') as f: async for token in tokenize(f): await out_q.put(token) await out_q.put(OES)
async def async_do_one(self, data): filename = self.get_file_path() async with aiofile.AIOFile(str(filename), 'w') as f: await f.write(data)
async def __dump_text(self) -> None: # and words into /words words_dir = f"{self.__dir}/{WORDS_FOLDER_NAME}/{self.__name}" async with aiofile.AIOFile(words_dir, "w+") as file: writer = aiofile.Writer(file) await writer(self.__raw_text)
async def write_chunks(): async with aiofile.AIOFile(wavfile, 'rb') as afp: reader = aiofile.Reader(afp, chunk_size=1024 * 16) async for chunk in reader: await stream.input_stream.send_audio_event(audio_chunk=chunk) await stream.input_stream.end_stream()
async def load(self): async with aiofile.AIOFile(self._path.absolute().as_posix()) as afp: return yaml.load(await afp.read())
async def main(): config_data = None config = None # read the configuration from file try: async with aiofile.AIOFile("./config.json", 'rb') as afp: config_data = await afp.read() #print(config_data) config = json.loads(config_data) except Exception as e: print(f"the attempt to read the config file failed with: {e}", file=sys.stderr) sys.exit(1) # SSL context for AIOKafka context = create_ssl_context(cafile="./ssl_kafka/ca.pem", certfile="./ssl_kafka/service.cert", keyfile="./ssl_kafka/service.key") kproducer = AIOKafkaProducer(bootstrap_servers=[config["kafka_bootstrap"]], security_protocol='SSL', ssl_context=context) kconsumer = AIOKafkaConsumer(config["kafka_topic"], auto_offset_reset="earliest", bootstrap_servers=[config["kafka_bootstrap"]], security_protocol='SSL', ssl_context=context) q = asyncio.Queue() # postgres # note: # cur.execute("CREATE TABLE health_data (id serial PRIMARY KEY, timestamp varchar, data varchar, status integer, response_time varchar);") pg_con = psycopg2.connect(config["pg_uri"]) pg_cur = pg_con.cursor() producers = [ asyncio.create_task( Producer(config["url_to_monitor"], q, config["sleep_duration"])) ] consumers = [ asyncio.create_task(Consumer(q, kproducer)), asyncio.create_task(KConsumer(kconsumer, pg_con, pg_cur)) ] try: await asyncio.gather(*producers) await q.join() # handle Control+C and stop gracefully except KeyboardInterrupt: for p in producers: p.cancel() for c in consumers: c.cancel() finally: await kproducer.stop() await kconsumer.stop() pg_cur.close() pg_con.close()
async def read(self): async with aiofile.AIOFile(self._f.absolute().as_posix()) as afp: return await afp.read()
async def write(self, data): async with aiofile.AIOFile(self._f.absolute().as_posix()) as afp: await afp.write(data) return data
async def main(): async with aiofile.AIOFile("test.torrent", "rb") as f: text = await f.read() print(text)
async def test_aiofile(chunks: list): dir_name = Dirs.AIOFILE.value for idx, chunk in enumerate(chunks): async with aiofile.AIOFile(f'./{dir_name}/{idx}.txt', 'w+') as f: await f.write(chunk)
async def writeState(): async with aiofile.AIOFile('state.json', 'w') as s: state['time'] = time.time() await s.write(json.dumps(state))
async def seeker(file): async with aiofile.AIOFile(file, 'r') as urls: texts = [] async for url in aiofile.LineReader(urls): texts.append(fetch(url)) await asyncio.gather(*texts)
async def find_line(text): lines = text.split(r'\n') async with aiofile.AIOFile("found.txt", 'w') as found: for line in lines: if line.startswith("<a >"): await found.write(line)
async def performLongFileOperation(self, evt): with aiofile.AIOFile("c:\\tmp\\NTFSUndelete_setup.exe", 'rb') as f: bts = f.read() print("Got", len(bts), "bytes")
async def write_user_data(self): async with aiofile.AIOFile(f'{settings.DATA_DIR}/{self.username}.json', 'w') as afp: body = {'data': self.game_map} await afp.write(encode_json(body))