async def file_iterator(path): async with curio.aopen(path, 'rb') as reader: while True: data = await reader.read(4096) if not data: break yield data
async def streamSave(self, bytechunk): fileName = randomFileName() folder = getTempDir() if folder[-1] not in ('\\', '/'): folder = folder + '/' path = folder + fileName async with curio.aopen(path, 'ab') as out_file: await out_file.write(bytechunk) self.taskList[taskid]['result'] = fileName
async def post_record(request, response): record = json.loads(str(request.stream.data, encoding='utf8')) update_record(record, game_record) response.set_header('Access-Control-Allow-Origin', '*') await response.send_status_code(200) # write records to disk async with aopen(game_record_path, mode='w') as f: game_record_string = json.dumps(game_record, indent='\t') await f.write(game_record_string)
async def _read(self, length=1): async with curio.aopen(self._path_to_file, "rb") as file: lines = await file.read() if len(lines) < length: await curio.sleep(0) return bytes() ret_val = lines[self._last_index:(self._last_index + length)] self._last_index += length return ret_val
async def send_file(self, file_path): async with aopen(file_path, mode='rb') as f: data = await f.read() self.headers['content-length'] = str(len(data)) content_type, content_encoding = mimetypes.guess_type(file_path) if content_type: self.headers['content-type'] = content_type if content_encoding: self.headers['content-encoding'] = content_encoding await self.send(data)
async def create_devices(path): """ Creates devices-instances and its io-devices from given path to json-file """ # Read JSon-File async with curio.aopen(path) as file: content = await file.read() content = json.loads(content) device_classes_dict = create_devices_dict() device_instance_dict = {} for name in content: try: device_dict = content[name] device_type = device_classes_dict[device_dict["type"]] if not hasattr(device_dict, "auto_flush"): device_dict["auto_flush"] = None device_io_dict = device_dict["device_io"] device_io_type = device_io_dict.pop("type") device_io_class = getattr(device_io, device_io_type) # Passes named parameters to to-be-created-objects (even None) # if parameter is not given, default value will be assumed device_io_instance = device_io_class( **{k: v for k, v in device_io_dict.items() if v is not None}) device_instance = device_type(**device_dict, name=name, io_device=device_io_instance) logger.info( f"Instantiated Device {name} of type: {device_type.__name__}, IO {device_io_class.__name__}" ) device_instance_dict[name] = device_instance except Exception: logger.error(f"Error in configuration of device {name}") raise for observable in device_instance_dict.values(): observable_name = observable.get_name() observable_instance = device_instance_dict[observable_name] for observer in content[observable_name]["observers"]: observer_instance = device_instance_dict[observer] observable_instance.set_observer(observer_instance) return device_instance_dict.values()
async def write(self, data: Any, expand: bool = True): '''Write the given data is a JSON element to the stream. Strings will be written assuming they are already valid JSON; this could result in malformed JSON, so care must be taken. Other data types are passed to json.dumps for serialization. Args: data: Data to coerce to JSON. expand: If True, iterables will be expanded into the stream rather than appended as a single item. ''' buf = '' async with curio.aopen(self.filename, 'a') as fp: if self.n_writes != 0: await fp.write(',\n') await fp.write(self._dumps_data(data, expand=expand)) self.n_writes += 1
async def follower(self, file: Path): ''' -- monitor a file spawn new instances of the registered event handlers tail the file, whenever new lines come in, use queues to dispatch them to the prompter tasks for each handler. ''' log.print(term.cyan('new file:'), f' {file.name}') async with curio.TaskGroup() as handlergroup: handlers = dict() queues = dict() ### create handler tasks for trigger, handler in self._events.items(): log.print(term.dcyan(f'spawn line handler for {file.name}:'), f' {trigger}') queue = curio.Queue() queues[trigger] = queue prompter = self.Prompter(queue, trigger, file) ### supported parameters for event handlers: kwargs = dict() kwargs['prompter'] = prompter handlers[trigger] = await handlergroup.spawn(handler, kwargs) ### process the file async with curio.aopen(file, 'r') as fstream: ### fast-forward through already-scanned lines todo self._scannedcount.setdefault(file, 0) ### follow the file and push new lines to prompter queues while True: line = await fstream.readline() if line: log.print(term.dpink(file.name), ' put ', term.dyellow(line.strip())) self._scannedcount[file] += 1 for trigger, queue in queues.items(): await queue.put((line, self._scannedcount[file]))
async def touch(p): new_path = p/'xxx' async with curio.aopen(new_path, 'a'): pass return new_path
async def touch(p): new_path = p / 'xxx' async with curio.aopen(new_path, 'a'): pass return new_path
async def _write(self, data): async with curio.aopen(self._path_to_file, "wb") as file: # Overwrite! return await file.write(data)
async def _read(self, length=1): async with curio.aopen(self._path_to_file, "rb") as file: lines = await file.read() # Read as much as you can return lines[:length]
async def send_file(self, file_path): # 不知道这个 context manager 是否处理文件没找到 async with aopen(file_path, mode='rb') as f: data = await f.read() self.headers['content-length'] = str(len(data)) await self.send(data)
async def echo(msg): mode = 'w' if self.echo_file in [ '/dev/stdout', '/dev/stderr' ] else 'a' async with curio.aopen(self.echo_file, mode) as fp: await fp.write(f'{msg.to_yaml()}\n')