示例#1
0
    async def __init__(self, _guild_id: int) -> None:
        self.guild_id = _guild_id

        async with async_open(
                dirname(abspath(__file__)) + "/../data/settings.json",
                "r") as f:
            self.settings = json.loads(await f.read())
示例#2
0
async def write_file_block_in_fs(file_path, buffer):
    async with write_file_block_in_fs_lock:
        filename = get_filename(file_path)
        logger.info('%s: writeblock into server %d', file_path, settings.server_id)
        file_path = os.path.join(settings.data_dir, filename)
        async with aiofile.async_open(file_path, 'wb') as f:
            await f.write(buffer)
示例#3
0
 async def ng_remove(self, ctx, message):
     if not message in self.content:
         return await ctx.send("そのIDはリストにありません。")
     self.content.remove(message)
     async with aiofile.async_open(self.path, "w", encoding="utf_8") as f:
         await f.write(json.dumps(self.content))
     await ctx.send("完了。")
示例#4
0
    async def fetch_player(self, uuid_: uuid.UUID) -> Player:
        try:
            return self.cache[int(uuid_)]
        except KeyError:
            file = os.path.join(self.data_dir,
                                f"{uuid_}.dat")  # filename of the player

            if not os.path.isfile(file):  # create new player if needed
                level_data = self.server.worlds["minecraft:overworld"].data

                player = Player.new(
                    self.server.api.eid(),
                    uuid_,
                    (level_data["SpawnX"], level_data["SpawnY"],
                     level_data["SpawnX"]),
                    "minecraft:overworld",
                )
                self.cache[int(player.uuid)] = player

                return player

            async with aiofile.async_open(
                    file, "rb") as player_file:  # load preexisting
                player = Player(
                    self.server.eid(),
                    nbt.TAG_Compound.unpack(Buffer(await player_file.read())))
                self.cache[player.uuid] = player

                return player
示例#5
0
 async def ensure_file_has_data():
     while True:
         async with aiofile.async_open(async_write_tmp_file, 'r') as r:
             data = await r.read()
         if data:
             return data
         await asyncio.sleep(0.5)
示例#6
0
文件: pixiv.py 项目: kuttakke/CUAVbot
 async def _url_to_path(self, urls: List[str]) -> \
         Tuple[List[str], List[List[Union[int, str]]], List[List[Union[int, BytesIO]]]]:
     """
     根据url列表,转化为储存路径列表,并对url进行反向代理链接替换
     :param urls: url_list
     :return: paths: Tuple[List[str],
             url_list: List[List[Union[int, str]]], downloaded_img: List[List[Optional[int, BytesIO]]]]
     """
     if isinstance(urls, str):
         urls = [urls]
     paths = []
     num_to_urls = list(zip([i for i in range(len(urls))], urls))
     downloaded_img = []
     url_list = []
     for url in num_to_urls:
         path = os.path.join(self.save_path, url[1].split("/")[-1])
         if os.path.exists(path):
             async with aiofile.async_open(path, "rb") as f:
                 downloaded_img.append([url[0], BytesIO(await f.read())])
         else:
             url_list.append(
                 [url[0], url[1].replace("i.pximg.net", "i.pixiv.cat")])
             # url_list.append([url[0], url[1]])
             paths.append(path)
     return paths, url_list, downloaded_img
示例#7
0
async def read_file(file_path: str, background_tasks: BackgroundTasks, stats: bool = False):
    try:
        tasks = []
        start = time.time()
        for i in range(settings.primary + settings.parity):
            tasks.append(receive_file_block(i, file_path))
        pieces = await asyncio.gather(*tasks)
        transfer_time = time.time() - start
        start = time.time()
        file, piece_map = decode_data(pieces)
        temp_file = tempfile.NamedTemporaryFile(delete=False)
        async with aiofile.async_open(temp_file.name, 'wb') as f:
            await f.write(file.buffer)
        decode_time = time.time() - start
        background_tasks.add_task(delete_temp_file, temp_file.name)
        background_tasks.add_task(rebuild_redundancy, file, piece_map)
        if stats:
            return {
                'delay': [transfer_time, decode_time]
            }
        else:
            return FileResponse(temp_file.name, media_type='application/octet-stream')

    except Exception as e:
        logger.exception(e)
        raise HTTPException(400, str(e))
示例#8
0
async def main():
    # При входе в блок с сессией создается пул соединений, которые
    # можно переиспользовать в целях оптимизации. При выходе из блока,
    # aiohttp проверяет, что все выделенные ресурсы были освобождены

    async with aiohttp.ClientSession(connector=aiohttp.TCPConnector(
            ssl=False)) as session:
        # HTTP-запрос является ассинхронной операцией ввода/вывода,
        # поэтому добавляется в цикл исполнения, до тех пор, пока не придет ответ
        async with session.get(
                "https://foodish-api.herokuapp.com/api") as response:
            # Запрос прерывается, считывая только заголовки, поэтому можно синхронно
            # получить status ответа, но для получения тела ответа
            # придется вызывать соответствующий метод ассинхронно
            print(f"HTTP Response Status: {response.status}")
            json_data = await response.json()
            image_url = json_data["image"]
            extension = image_url.split(".")[-1]
            print(f"Image URL: {image_url}")

        async with session.get(image_url) as response:
            total_bytes, wrote_bytes = response.headers["Content-Length"], 0
            print("Open file...")
            async with async_open(f"random_food_image.{extension}", "wb") as f:
                print("Writing image bytes...")
                # Считываем кусочки данных в том порядке и размере,
                # в которых мы получаем их с сервера.
                # В моменты между получение и записью новых кусочков данных,
                # интерпретатор может выполнять какие-то другие операции
                async for data, _ in response.content.iter_chunks():
                    wrote_bytes += len(data)
                    await f.write(data)
                    print(f"Wrote {wrote_bytes}/{total_bytes}")
        global started
        started = False
示例#9
0
文件: pixiv.py 项目: kuttakke/CUAVbot
 async def change_json(cls, data: dict) -> None:
     """
     修改json数据
     :param data: dict
     :return: None
     """
     async with aiofile.async_open(cls._path, "w") as f:
         await f.write(json.dumps(data, indent=4, ensure_ascii=True))
示例#10
0
async def asynchronous(name_file):
    async with aiofile.async_open(name_file,
                                  'r') as ai_as_o:  # открываем файл urls.txt
        async for line in aiofile.LineReader(
                ai_as_o):  # считываем веб-ссылки построчно
            async with aiohttp.ClientSession() as client:
                asyncio.ensure_future(fetch(client, line))
        print('done')
示例#11
0
async def read(path):
    try:
        async with async_open(path, "r", encoding="utf-8") as f:
            return yes(await f.read())
    except:
        return none

    return ()
示例#12
0
async def appendBytes(path, content):
    try:
        async with async_open(path, "ab+") as f:
            await f.write(bytes([c % 256 for c in content]))
    except:
        pass

    return ()
示例#13
0
    async def load_level_data(self):
        file = os.path.join(self.path, "level.dat")

        if os.path.isfile(file):
            async with aiofile.async_open(file, "rb") as level_data_file:
                return nbt.TAG_Compound.unpack(Buffer(await level_data_file.read()))

        return new_level_nbt((2586, self.server.meta.version, 19133), self.name, (0, 100, 0), self.server.conf["seed"])["Data"]
示例#14
0
    async def upload_decompressed_file(self, blob_service_client, file,
                                       dest_container, metadata_json):
        blob_name = os.path.basename(file)

        blob_client = blob_service_client.get_blob_client(
            container=dest_container, blob=blob_name)

        async with async_open(file, "rb") as afp:
            await blob_client.upload_blob(await afp.read(), overwrite=True)
示例#15
0
 async def _parse_token_file(self, path):
     # type: (*str) -> AccessToken
     async with async_open(path, "r") as file:
         data = json.load(file)
         expires_on = int(
             data["expires_on"]
         ) / 1000  # Convert ms to seconds, since python time.time only handles epoch time in seconds
         token = AccessToken(data["access_token"], expires_on)
         return token
示例#16
0
async def save_file(file_path: Path, target_file: UploadFile) -> None:
    target_dir = os.path.dirname(file_path)
    if not os.path.exists(target_dir):
        os.makedirs(target_dir)
        logger.debug(file_path)
    await target_file.seek(0)
    async with async_open(file_path, "wb") as dest_file:
        while chunk := await target_file.read(1000):
            await dest_file.write(chunk)
示例#17
0
async def main(in_file, out_file):
    with open(in_file, 'r') as f:
        urls = [line.strip() for line in f.readline()]
    async with aiofile.async_open(out_file, 'a') as outfi:
        async with aiohttp.ClientSession() as session:
            task = [
                asyncio.create_task(func(url, session, outfi)) for url in urls
            ]
        await asyncio.gather(*task)
示例#18
0
async def write(path, content):
    try:
        if not os.path.exists(os.path.split(os.path.abspath(path))[0]):
            os.mkdir(os.path.split(os.path.abspath(path))[0])
        async with async_open(path, "w+", encoding="utf-8") as f:
            await f.write(content)
    except:
        pass

    return ()
示例#19
0
async def writeBytes(path, content):
    try:
        if not os.path.exists(os.path.split(os.path.abspath(path))[0]):
            os.mkdir(os.path.split(os.path.abspath(path))[0])
        async with async_open(path, "wb+") as f:
            await f.write(bytes([c % 256 for c in content]))
    except:
        pass

    return ()
示例#20
0
    async def write_term(self, term):
        async with self._commit_lock:
            if term <= self.current_term:
                return

            async with async_open(self._term_path, "w") as fp:
                await fp.write(json.dumps(term))
                await fp.file.fsync()

            self.current_term = term
示例#21
0
async def set_current_offset(offset: int) -> None:
    """
    Sets last successful offset. This allow the application to checkpoint
    or restore state based on it's last successful parquet dump.

    :return: int
    """
    async with async_open(str(app.pickle_path), 'wb') as f:
        pickled = pickle.dumps(offset)
        await f.write(pickled)
    app.current_offset = offset
示例#22
0
async def rebuild_redundancy_all():
    for f in os.listdir(settings.data_dir):
        tasks = []
        async with aiofile.async_open(os.path.join(settings.data_dir, f), 'rb') as fp:
            buffer = await fp.read()
            piece = pickle.loads(buffer)
            file_path = piece.path
            for i in range(settings.primary + settings.parity):
                tasks.append(receive_file_block(i, file_path))
            pieces = await asyncio.gather(*tasks)
            file, piece_map = decode_data(pieces)
            await rebuild_redundancy(file, piece_map)
示例#23
0
async def main(in_file, out_file):

    with open(in_file, 'r', encoding='utf-8') as f:
        urls_to_crawl = [line.strip() for line in f.readlines()]

    with open(out_file, 'w') as f:
        pass

    async with async_open(out_file, 'a', encoding='utf-8') as afp:
        async with ClientSession() as session:
            tasks = [asyncio.create_task(fetch_links_and_pass(url, session, afp))
                    for url in urls_to_crawl]
            await asyncio.gather(*tasks)
示例#24
0
async def fetch(client, service):
    async with client.get(service) as resp:
        a = resp.text()
        a = re.split(r'[\r\n]', a)  # делим текст на строки
        for line in a:
            if line.startswith(
                    '<a >'
            ):  # если строка соответствует данному в задаче шаблону -
                async with aiofile.async_open(
                        'found.txt', 'w') as found:  # записываем её в файл
                    found.write(
                        'Next special line has sent from {}. Look at this: {} \n'
                        .format(service, line))
示例#25
0
async def read_data(file_path, start_index, end_index):
    async with async_open(file_path, "rb") as f:
        ref_start_index = int(np.frombuffer(await f.read(4), dtype="<f")[0])
        si = max(ref_start_index, start_index)
        if si > end_index:
            return pd.Series(dtype=np.float32)
        # calculate offset
        f.seek(4 * (si - ref_start_index) + 4)
        # read nbytes
        count = end_index - si + 1
        data = np.frombuffer(await f.read(4 * count), dtype="<f")
        series = pd.Series(data, index=pd.RangeIndex(si, si + len(data)))
    return series
示例#26
0
文件: pixiv.py 项目: kuttakke/CUAVbot
 async def _save_from_downloader(self, pages_path: list,
                                 img_bytes: List[List[Union[int,
                                                            BytesIO]]]):
     """
     下载至对应路径
     :param pages_path: list
     :param img_bytes: List[BytesIO]
     :return:
     """
     img_len = len(pages_path)
     for i in range(img_len):
         async with aiofile.async_open(pages_path[i], "wb") as f:
             await f.write(img_bytes[i][1].getvalue())
     self.logger.info("{}张图片保存成功".format(str(img_len)))
示例#27
0
async def VkFriends(id, chat_id):
	'''
	получить друзей вк
	'''
	try:
		friends = (vk.friends.get(user_id=id, fields='nickname').get('items'))
		fname = f'data/VkTxt/{chat_id}_{id}.txt'
		async with async_open(fname, 'w+', encoding="utf-8") as f:
			for friend in friends:
				await f.write(
					f'{friend.get("id")}|{friend.get("first_name")}|{friend.get("last_name")}\n')
		await bot.send_document(chat_id, open(fname, 'rb'))
	except Exception as e:
		await bot.send_message(chat_id, 'Друзья не найдены.')
示例#28
0
    async def open(self):
        self.read_term()
        await self.read_log()

        if not self._path.parent.exists():
            os.makedirs(self._path.parent)

        await self.read_session()
        self.session += 1
        await self.write_session()

        self._path.touch()
        self._fp = async_open(self._path, "a+")
        await self._fp.file.open()
示例#29
0
    async def check_status(self, f_name):
        try:
            async with async_open(f_name, "r") as f:
                raw_timestamp = await f.read()
                current_timestamp = int(raw_timestamp,
                                        base=10) if raw_timestamp else 0
        except FileNotFoundError:
            current_timestamp = 0

        while True:
            LOG.info("Checking leaderboard status")

            leaderboard = await self.lookup_leaderboard()
            events = [
                event for event in leaderboard.events
                if event.ts > current_timestamp
            ]

            if events:
                LOG.info("Found %d new event(s)", len(events))
            else:
                LOG.debug("Found 0 new event(s)")

            for event in events:
                await self.seabird.send_message(channel_id=self.channel,
                                                text=str(event))

                # It's arguably worse to cause a write on every message sent,
                # but this will make it possible to properly handle things if we
                # fail to send a message without having to start over.
                async with async_open(f_name, "w") as f:
                    await f.write(str(event.ts))

                current_timestamp = event.ts

            LOG.info("Sleeping for %d seconds", CHECK_STATUS_DELAY)
            await asyncio.sleep(CHECK_STATUS_DELAY)
示例#30
0
async def get_current_offset() -> int:
    """
    Gets last stored offset. This allow the application to checkpoint
    or restore state based on it's last successful parquet dump.

    :return: int
    """
    # TODO: Not loading checkpoint
    if app.pickle_path.is_file():
        async with async_open(str(app.pickle_path), 'rb') as f:
            pickled = await f.read()
        offset = pickle.loads(pickled)
    else:
        offset = 0
    return offset