async def write(store, media_dir): global prev state = store.state user_state = state['app']['user'] books = user_state['books'] selected_book = user_state['current_book'] selected_lang = user_state['current_language'] if selected_book != prev['current_book']: if not selected_book == manual_filename: selected_book = os.path.relpath(selected_book, media_dir) s = toml.dumps({'current_book': selected_book, 'current_language': selected_lang}) path = os.path.join(media_dir, 'sd-card', USER_STATE_FILE) async with aiofiles.open(path, 'w') as f: await f.write(s) for filename in books: book = books[filename] if filename in prev['books']: prev_book = prev['books'][filename] else: prev_book = BookFile() if book.page_number != prev_book.page_number or book.bookmarks != prev_book.bookmarks: path = to_state_file(book.filename) if book.filename == manual_filename: path = os.path.join(media_dir, path) bms = [bm + 1 for bm in book.bookmarks if bm != 'deleted'] # remove start-of-book and end-of-book bookmarks bms = bms[1:-1] # ordered to make sure current_page comes before bookmarks d = OrderedDict([['current_page', book.page_number + 1], ['bookmarks', bms]]) s = toml.dumps(d) async with aiofiles.open(path, 'w') as f: await f.write(s) prev = user_state
async def testfile(request): id = request.match_info['id'] async with aiofiles.open('%s\data\%s.txt' % (sys.path[0], id), 'w') as f: await f.write(str(await readxml())) async with aiofiles.open('%s\data\%s.txt' % (sys.path[0], id), 'r') as f: text = 'file content - %s: \r\n%s' % (id, await f.read()) return web.Response(body=text.encode('utf-8'), content_type='text/html')
async def timestamp(request): t = datetime.now().timestamp() async with aiofiles.open('%s\data\%s.log' % (sys.path[0], t), 'w') as f: await f.write(str(t)) async with aiofiles.open('%s\data\%s.log' % (sys.path[0], t), 'r') as f: text = 'Timestamp: %s' % await f.read() return web.Response(body=text.encode('utf-8'), content_type='text/html')
async def add(self, ctx, gain: int): """ Adds XP. Mod only. """ async with aiofiles.open('./data/sw_xp.json') as f: xp = json.loads(await f.read()) xp[0] += gain # add new XP to total and save xp[1] = gain # save most recent gain xp[2] = dt.date.today().strftime("%B %d") # save formatted date as string async with aiofiles.open('./data/sw_xp.json', mode='w') as f: await f.write(json.dumps(xp)) await ctx.send("**{1}** XP added. New total: {0}.".format(xp[0], xp[1]))
async def remove(self, ctx, *, tag): """Removes a tag from the alias database.""" async with aiofiles.open("./data/aliases.json") as f: aliases = json.loads(await f.read()) target_tag = aliases.pop(tag, None) if not target_tag: await ctx.send("{} is not in the alias database.".format(tag)) return async with aiofiles.open("./data/aliases.json", mode="w") as f: await f.write(json.dumps(aliases)) await ctx.send("Removed {} -> {} from the database.".format(tag, target_tag))
def initialise(self): if not os.path.isdir(self.image_folder): os.makedirs(self.image_folder) f = yield from aiofiles.open(self.pinor_file, mode='w') try: yield from f.write('timestamp,lat,lon,alt,img,dist\n') finally: yield from f.close() f = yield from aiofiles.open(self.location_file, mode='w') try: yield from f.write('img,lat,lon,alt\n') finally: yield from f.close()
async def apply(self, recipe: Recipe) -> Recipe: async with self.sem, \ aiofiles.open(recipe.path, encoding="utf-8") as fdes: recipe_text = await fdes.read() recipe = await self.pipeline.run_sp(recipe.load_from_string, recipe_text) recipe.set_original() return recipe
async def dump(self, loop=None): try: async with aiofiles.open(self.filename, "wb", loop=loop) as f: bytes = pickle.dumps(self.db) await f.write(bytes) except: logger.exception("Unable to dump markov database.")
async def write_file(request, response): controller = Controller.instance() project = await controller.get_loaded_project(request.match_info["project_id"]) path = request.match_info["path"] path = os.path.normpath(path).strip("/") # Raise error if user try to escape if path[0] == ".": raise aiohttp.web.HTTPForbidden() path = os.path.join(project.path, path) response.set_status(200) try: async with aiofiles.open(path, 'wb+') as f: while True: try: chunk = await request.content.read(CHUNK_SIZE) except asyncio.TimeoutError: raise aiohttp.web.HTTPRequestTimeout(text="Timeout when writing to file '{}'".format(path)) if not chunk: break await f.write(chunk) except FileNotFoundError: raise aiohttp.web.HTTPNotFound() except PermissionError: raise aiohttp.web.HTTPForbidden() except OSError as e: raise aiohttp.web.HTTPConflict(text=str(e))
async def create(self): """ Create the snapshot """ if os.path.exists(self.path): raise aiohttp.web.HTTPConflict(text="The snapshot file '{}' already exists".format(self.name)) snapshot_directory = os.path.join(self._project.path, "snapshots") try: os.makedirs(snapshot_directory, exist_ok=True) except OSError as e: raise aiohttp.web.HTTPInternalServerError(text="Could not create the snapshot directory '{}': {}".format(snapshot_directory, e)) try: begin = time.time() with tempfile.TemporaryDirectory() as tmpdir: # Do not compress the snapshots with aiozipstream.ZipFile(compression=zipfile.ZIP_STORED) as zstream: await export_project(zstream, self._project, tmpdir, keep_compute_id=True, allow_all_nodes=True) async with aiofiles.open(self.path, 'wb') as f: async for chunk in zstream: await f.write(chunk) log.info("Snapshot '{}' created in {:.4f} seconds".format(self.name, time.time() - begin)) except (ValueError, OSError, RuntimeError) as e: raise aiohttp.web.HTTPConflict(text="Could not create snapshot file '{}': {}".format(self.path, e))
async def import_project(request, response): controller = Controller.instance() if request.get("path"): config = Config.instance() if config.get_section_config("Server").getboolean("local", False) is False: response.set_status(403) return path = request.json.get("path") name = request.json.get("name") # We write the content to a temporary location and after we extract it all. # It could be more optimal to stream this but it is not implemented in Python. try: begin = time.time() with tempfile.TemporaryDirectory() as tmpdir: temp_project_path = os.path.join(tmpdir, "project.zip") async with aiofiles.open(temp_project_path, 'wb') as f: while True: chunk = await request.content.read(CHUNK_SIZE) if not chunk: break await f.write(chunk) with open(temp_project_path, "rb") as f: project = await import_project(controller, request.match_info["project_id"], f, location=path, name=name) log.info("Project '{}' imported in {:.4f} seconds".format(project.name, time.time() - begin)) except OSError as e: raise aiohttp.web.HTTPInternalServerError(text="Could not import the project: {}".format(e)) response.json(project) response.set_status(201)
async def sw_xp(self, ctx): """ Displays XP from the shipwrecked campaign. """ if ctx.invoked_subcommand is None: async with aiofiles.open('./data/sw_xp.json') as f: xp = json.loads(await f.read()) await ctx.send("The current XP total is **{0}**. The last XP gain was {1} on {2}.".format(xp[0], xp[1], xp[2]))
async def get(self): # TODO For testing purposes only a simple file based template async with aiofiles.open(os.path.normpath(HERE + '/../template/' 'create_account.html'), encoding='utf-8') as template: body = await template.read() return web.Response(body=body, content_type='text/html')
async def add(self, ctx, user: discord.Member): if user.id not in checks.blacklist: checks.blacklist.add(user.id) async with aiofiles.open("./data/blacklist.json", mode="w") as f: await f.write(json.dumps(list(checks.blacklist))) await ctx.send("{} added to the blacklist.".format(user.name)) else: await ctx.send("That user is already blacklisted.")
async def remove(self, ctx, user: discord.Member): if user.id in checks.blacklist: checks.blacklist.remove(user.id) async with aiofiles.open("./data/blacklist.json", mode="w") as f: await f.write(json.dumps(list(checks.blacklist))) await ctx.send("Removed {} from the blacklist.".format(user.name)) else: await ctx.send("That user isn't even blacklisted!")
async def data_generator(self, path): async with aiofiles.open(path, "rb") as f: while True: part = await f.read(self._chunksize) if not part: break await yield_(part) return
def save(self,url,html): res = urlparse(url) filename = res.netloc + ".html" path = os.path.join(self.save_dir, filename) f = yield from aiofiles.open(path,'wb') try: yield from f.write(html) print("Saved: " + url) finally: yield from f.close()
async def load(): global pingGroups logger.info("Loading ping groups.") byte = None async with aiofiles.open("pingdb", "rb") as f: byte = await f.read() try: pingGroups = pickle.loads(byte) except EOFError: logger.warning("Making new pingdb.")
async def fetch_file(self, client, filename): filepath = os.path.join(self.target, filename) with (await self.semaphore): async with client.get(self.url(filename), timeout=None) as resp: contents = await resp.read() async with aiofiles.open(filepath, 'wb') as fh: await fh.write(contents) self.progress.update(len(contents))
async def load(self): async with aiofiles.open(self.filename, "rb") as f: try: bytes = await f.read() self.db = pickle.loads(bytes) except EOFError: logger.exception("Unable to load markov database.") if not self.db: self.db = defaultdict(zero_dict)
async def unblacklist(self, message, *args, **kwargs): ''' Unblacklists the user by removing their 'uid' from the currently maintained list of blacklisted users and removes it from the file. ''' users = message.split(' ') blacklisted_users = kwargs['blacklisted_users'] users = [user for user in users if user in blacklisted_users] for user in users: del blacklisted_users[blacklisted_users.index(user)] users = [user + '\n' for user in users] async with aiofiles.open(self.blacklist_file, mode='r') as f: saved_users = await f.readlines() for user in users: del saved_users[saved_users.index(user)] async with aiofiles.open(self.blacklist_file, mode='w') as f: await f.writelines(saved_users)
def downloadFundData(fund): if not os.path.exists(folder+fund): myFile = yield from aiofiles.open(folder+fund, "w") try: coros = [getWebPage(fund, page) for page in range(1, 16)] tabela = yield from asyncio.gather(*coros) for PAGE_NR in range(1,16): yield from myFile.write(getValues(tabela[PAGE_NR-1])) print ("Finished:", fund, PAGE_NR) finally: yield from myFile.close() else: # file exists, whole download unnecessary, only update # read first line to know how much needs to be downloaded with open(folder+fund, "r") as myFile: newestValueInFile = myFile.readline() values = "" for page in range(1,16): webPage = yield from getWebPage(fund, page) values += getValues(webPage) index = values.find(newestValueInFile) if index == 0: print("{} is up to date".format(fund)) break elif index == -1: print("{} newest value from file not found on downloaded page, another needs to be downloaded".format(fund)) continue else: print("{} needs to be updated".format(fund)) newContent = values[:index] myFile.seek(0) oldContentList = myFile.readlines() oldContentString = "" for line in oldContentList: oldContentString += line myFile.close() os.remove(folder+fund) with open(folder+fund, "w") as myFile: myFile.write(newContent + oldContentString) break
async def add(self, ctx): """ Adds aliases. `!help alias add` for usage info. Usage: &alias add tag aliased_to """ TIMEOUT = 60 async with aiofiles.open("./data/aliases.json") as f: aliases = json.loads(await f.read()) def check(msg): return msg.author == ctx.author and msg.channel == ctx.channel await ctx.send("Please name the tag you want to alias **to**.") try: msg = await self.bot.wait_for("message", check=check, timeout=TIMEOUT) except asyncio.TimeoutError: await ctx.send("Timeout. Exiting.") return target_tag = msg.content.lower() await ctx.send("Please name the tag or tags you want to alias **from**, separated by commas.") try: msg = await self.bot.wait_for("message", check=check, timeout=TIMEOUT) except asyncio.TimeoutError: await ctx.send("Timeout. Exiting.") return aliased_by = [tag.strip() for tag in msg.content.lower().split(",")] for tag in aliased_by: await ctx.send(tag) aliases.update([(tag, target_tag)]) async with aiofiles.open("./data/aliases.json", mode="w") as f: await f.write(json.dumps(aliases)) await ctx.send("Added {} -> {} to aliases.".format(", ".join(aliased_by), target_tag))
async def data_generator(self, src, src_type): if src_type == 's': async for chunk in src: yield chunk return if src_type == 'f': async with aiofiles.open(src, "rb") as fh: while True: part = await fh.read(self.chunksize) if not part: break yield part return
async def serve_file(path: str) -> web.Response: async with aiofiles.open(path, mode='rb') as f: content = await f.read() filename = os.path.basename(path) content_type, charset = mimetypes.guess_type(filename) return web.Response( body=content, content_type=content_type, charset=charset, headers={ 'Content-Disposition': f'inline; filename="{filename}"' }, )
def sendpic(bot, event): filename = "~/picture.png" r = yield from aiofiles.open(filename, mode='rb') try: raw = yield from r.read() finally: yield from r.close() image_data = io.BytesIO(raw) image_id = yield from bot._client.upload_image(image_data, filename=filename) yield from bot.coro_send_message(event.conv.id_, None, image_id=image_id)
def readfiles(self, path): op = {} for f in os.listdir(path): if os.path.isfile(os.path.join(path, f)): fl = yield from aiofiles.open(os.path.join(path, f), mode='rb') try: contents = yield from fl.read() finally: yield from fl.close() op[f] = base64.b64encode(contents).decode('utf-8') else: op[f] = yield from self.readfiles(os.path.join(path, f)) return op
def startup(self): while True: msg = yield from self.messagedispatcher.wait_for_message('direct', 'pinor') timestamp = str(time.time()) timestr = time.strftime('%Y%m%d%H%M%S') location = self.telemetry.get_location() # Write image to file f = yield from aiofiles.open(self.image_folder + timestr + '.jpg', mode='wb') try: yield from f.write(base64.decodestring(msg.img.encode())) finally: yield from f.close() # Write image locations f = yield from aiofiles.open(self.location_file, mode='a') try: yield from f.write(timestr + '.jpg,' + str(location.latitude) + ',' + str(location.longitude) + ',' + str(location.altitude) + '\n') finally: yield from f.close() if msg.pinor: # Write co-ords to file f = yield from aiofiles.open(self.pinor_file, mode='a') try: for pinor in msg.pinor: point = pinor.to_json() yield from f.write(timestamp + ',' + str(point['lat']) + ',' + str(point['lon']) + ',' + str(point['alt']) + ',' + timestr + '.jpg' + ',' + str(location.distance_to(pinor)) + '\n') finally: yield from f.close() yield from self.communicator.send_message(PinorMesh(self.uuid, self.uuid, msg.pinor))
async def blacklist(self, message, *args, **kwargs): ''' Blacklists the user by adding their 'uid' to the currently maintained list of blacklisted users and updates the file. ''' blacklisted_users = kwargs['blacklisted_users'] users = message.split(' ') # Remove users who might have already been blacklisted. users = [user for user in users if user not in blacklisted_users] blacklisted_users.extend(users) users = [user + '\n' for user in users] async with aiofiles.open(self.blacklist_file, mode='a') as f: await f.writelines(users)
async def pre_refresh_callback(self, authorizer): """Load the refresh token from the file.""" if authorizer.refresh_token is None: async with aiofiles.open(self._filename) as fp: authorizer.refresh_token = (await fp.read()).strip()
async def download_page(self, filepath, content): mode = 'wb' if isinstance(content, bytes) else 'w' async with aiofiles.open(filepath, mode=mode) as fw: await fw.write(content) await fw.close() self.download += 1
async def pegar_links(): links = [] async with aiofiles.open('links.txt') as arquivo: async for link in arquivo: links.append(link.strip()) return links
async def from_filename(cls, filename): async with aiofiles.open(filename) as f: return cls(api_spec=yaml.safe_load(await f.read()))
async def _get_team_config(self, force_update=False): if force_update or self._team_config is None: async with aiofiles.open(TEAM_CONFIG_YML) as f: contents = await f.read() self._team_config = yaml.load(contents, Loader=yaml.FullLoader) return self._team_config
async def carbon_(message: Message): if Config.GOOGLE_CHROME_BIN is None: replied = message.reply_to_message if replied: text = replied.text else: text = message.text if not text: await message.err("need input text!") return await message.edit("`Creating a Carbon...`") async with userge.conversation("CarbonNowShBot", timeout=30) as conv: try: await conv.send_message(text) except YouBlockedUser: await message.edit('first **unblock** @CarbonNowShBot') return response = await conv.get_response(mark_read=True) while not response.reply_markup: response = await conv.get_response(mark_read=True) await response.click(x=random.randint(0, 2), y=random.randint(0, 8)) response = await conv.get_response(mark_read=True) while not response.media: response = await conv.get_response(mark_read=True) caption = "\n".join(response.caption.split("\n")[0:2]) file_id = response.document.file_id await asyncio.gather( message.delete(), userge.send_document(chat_id=message.chat.id, document=file_id, caption='`' + caption + '`', reply_to_message_id=replied.message_id if replied else None)) else: input_str = message.filtered_input_str replied = message.reply_to_message theme = 'seti' lang = 'auto' red = message.flags.get('r', random.randint(0, 255)) green = message.flags.get('g', random.randint(0, 255)) blue = message.flags.get('b', random.randint(0, 255)) alpha = message.flags.get('a', random.randint(0, 100)) bg_ = f"rgba({red}, {green}, {blue}, {alpha})" if replied and (replied.text or (replied.document and 'text' in replied.document.mime_type)): message_id = replied.message_id if replied.document: await message.edit("`Downloading File...`") path_ = await message.client.download_media( replied, file_name=Config.DOWN_PATH) async with aiofiles.open(path_) as file_: code = await file_.read() os.remove(path_) else: code = replied.text if input_str: if '|' in input_str: args = input_str.split('|') if len(args) == 2: theme = args[0].strip() lang = args[1].strip() else: theme = input_str elif input_str: message_id = message.message_id if '|' in input_str: args = input_str.split('|') if len(args) == 3: theme = args[0].strip() lang = args[1].strip() code = args[2].strip() elif len(args) == 2: theme = args[0].strip() code = args[1].strip() else: code = input_str else: await message.err("need input text!") return await message.edit("`Creating a Carbon...`") code = quote_plus(code) await message.edit("`Processing... 20%`") carbon_path = os.path.join(Config.DOWN_PATH, "carbon.png") if os.path.isfile(carbon_path): os.remove(carbon_path) url = CARBON.format(theme=theme, lang=lang, code=code, bg=bg_) if len(url) > 2590: await message.err("input too large!") return chrome_options = webdriver.ChromeOptions() chrome_options.binary_location = Config.GOOGLE_CHROME_BIN chrome_options.add_argument("--headless") chrome_options.add_argument("--window-size=1920x1080") chrome_options.add_argument("--disable-dev-shm-usage") chrome_options.add_argument("--no-sandbox") chrome_options.add_argument("--disable-gpu") prefs = {'download.default_directory': Config.DOWN_PATH} chrome_options.add_experimental_option('prefs', prefs) driver = webdriver.Chrome(chrome_options=chrome_options) driver.get(url) await message.edit("`Processing... 40%`") driver.command_executor._commands["send_command"] = ( # pylint: disable=protected-access "POST", '/session/$sessionId/chromium/send_command') params = { 'cmd': 'Page.setDownloadBehavior', 'params': { 'behavior': 'allow', 'downloadPath': Config.DOWN_PATH } } driver.execute("send_command", params) # driver.find_element_by_xpath("//button[contains(text(),'Export')]").click() driver.find_element_by_id("export-menu").click() await asyncio.sleep(1) await message.edit("`Processing... 60%`") driver.find_element_by_xpath("//button[contains(text(),'4x')]").click() await asyncio.sleep(1) driver.find_element_by_id("export-png").click() await message.edit("`Processing... 80%`") while not os.path.isfile(carbon_path): await asyncio.sleep(0.5) await message.edit("`Processing... 100%`") await message.edit("`Uploading Carbon...`") await asyncio.gather( message.delete(), message.client.send_photo(chat_id=message.chat.id, photo=carbon_path, reply_to_message_id=message_id)) os.remove(carbon_path) driver.quit()
async def readFile(self, filePath): # 读取文件 async with aiofiles.open(filePath, "r", encoding="utf-8") as f: print("read file ", filePath) return await f.read()
async def aload_datafile(count, fpath): # with open(fpath, 'rb') as file_: # return (count, file_.read()[0:10]) async with aiofiles.open(fpath, 'rb') as file_: return (count, (await file_.read())[0:10])
async def collect_case(suite, file_path): from . import Case async with aiofiles.open(file_path, mode="r") as f: return Case.loads(await f.read(), file_path=file_path)
async def write_playlists(): async with aiofiles.open('data/playlists.json', mode='w+') as f: await f.write(json.dumps(playlists))
async def writeJson(p, info): async with aiofiles.open(p, 'w', encoding='utf-8') as f: await f.write(ujson.dumps(info)) return SUCCESS
async def post_refresh_callback(self, authorizer): """Update the saved copy of the refresh token.""" async with aiofiles.open(self._filename, "w") as fp: await fp.write(authorizer.refresh_token)
async def get_checksum(data=b'', path=None, chunk_size=32768): if path: async with aiofiles.open(path, 'rb') as f: h = hashlib.blake2b() while chunk := await f.read(chunk_size): h.update(chunk)
async def upload_google_photos(message: Message): creds = await check_creds(message) if not creds: await message.edit_text("😏 <code>gpsetup</code> first 😡😒😒", parse_mode="html") return path_ = "" if message.input_str: if re.search(r"(?:https?|ftp)://[^|\s]+\.[^|\s]+", message.input_str): path_, _ = await url_download(message, message.input_str) elif os.path.exists(message.input_str): path_ = message.input_str elif message.reply_to_message and message.reply_to_message.media: path_, _ = await tg_download(message, message.reply_to_message) if not path_: await message.err("what should i upload ?") return await message.edit("`proccesing ...`") service = build("photoslibrary", "v1", http=creds.authorize(Http())) file_name, mime_type, file_size = file_ops(path_) await message.edit_text("file downloaded, gathering upload informations ") async with aiohttp.ClientSession() as session: headers = { "Content-Length": "0", "X-Goog-Upload-Command": "start", "X-Goog-Upload-Content-Type": mime_type, "X-Goog-Upload-File-Name": file_name, "X-Goog-Upload-Protocol": "resumable", "X-Goog-Upload-Raw-Size": str(file_size), "Authorization": "Bearer " + creds.access_token, } # Step 1: Initiating an upload session step_one_response = await session.post(f"{PHOTOS_BASE_URI}/v1/uploads", headers=headers) if step_one_response.status != 200: await message.edit_text((await step_one_response.text())) return step_one_resp_headers = step_one_response.headers # LOG.info(step_one_resp_headers) # Step 2: Saving the session URL real_upload_url = step_one_resp_headers.get("X-Goog-Upload-URL") # LOG.info(real_upload_url) upload_granularity = int( step_one_resp_headers.get("X-Goog-Upload-Chunk-Granularity")) # LOG.info(upload_granularity) # https://t.me/c/1279877202/74 number_of_req_s = int(file_size / upload_granularity) # LOG.info(number_of_req_s) loop = asyncio.get_event_loop() async with aiofiles.open(path_, mode="rb") as f_d: for i in range(number_of_req_s): current_chunk = await f_d.read(upload_granularity) offset = i * upload_granularity part_size = len(current_chunk) headers = { "Content-Length": str(part_size), "X-Goog-Upload-Command": "upload", "X-Goog-Upload-Offset": str(offset), "Authorization": "Bearer " + creds.access_token, } # LOG.info(i) # LOG.info(headers) response = await session.post(real_upload_url, headers=headers, data=current_chunk) loop.create_task( progress(offset + part_size, file_size, message, "uploading(gphoto)🧐?")) # LOG.info(response.headers) # https://github.com/SpEcHiDe/UniBorg/commit/8267811b1248c00cd1e34041e2ae8c82b207970f # await f_d.seek(upload_granularity) # await f_d.seek(upload_granularity) current_chunk = await f_d.read(upload_granularity) # https://t.me/c/1279877202/74 # LOG.info(number_of_req_s) headers = { "Content-Length": str(len(current_chunk)), "X-Goog-Upload-Command": "upload, finalize", "X-Goog-Upload-Offset": str(number_of_req_s * upload_granularity), "Authorization": "Bearer " + creds.access_token, } # LOG.info(headers) response = await session.post(real_upload_url, headers=headers, data=current_chunk) # LOG.info(response.headers) final_response_text = await response.text() # LOG.info(final_response_text) await message.edit_text("uploaded to Google Photos, getting FILE URI 🤔🤔") response_create_album = (service.mediaItems().batchCreate( body={ "newMediaItems": [{ "description": "uploaded using @UniBorg v7", "simpleMediaItem": { "fileName": file_name, "uploadToken": final_response_text, }, }] }).execute()) # LOG.info(response_create_album) try: photo_url = (response_create_album.get("newMediaItemResults")[0].get( "mediaItem").get("productUrl")) await message.edit_text(photo_url) except Exception as e: # pylint: disable=broad-except await message.edit_text(str(e))
async def read_file_lines(path, mode: READ_MODES = 'rb'): async with aiofiles.open(path, mode) as f: return await f.readlines()
async def store(self, add: Dict[str, Dict]): if self.path is not None: self.state = {**self.state, **add} async with aiofiles.open(self.path, mode='w') as f: await f.write(json.dumps(self.state, indent=self.__INDENT))
async def read_file_chunks(path, chunk_size=32768, mode: READ_MODES = 'rb'): async with aiofiles.open(path, mode) as f: while chunk := await f.read(chunk_size): yield chunk
async def _crawl(self, day): links = await self._get_story_links(day=day) out_file_path = os.path.join(self._out_dir, day) async with aiofiles.open(out_file_path, "w") as f: await f.write('\n'.join(links)) await f.flush()
async def write_binary_buffer_to_file(filename: str, data): print(f"start write file {filename}") async with aiofiles.open(filename, 'wb') as f: await f.write(data) print(f"end write file {filename}")
async def download_file(self, bucket, key, file_name): response = await self.s3.get_object(Bucket=bucket, Key=key) async with response['Body'] as stream, aiofiles.open(file_name, "wb") as f: await f.write(await stream.read())
async def append_to_file(arr, filename: str, separator='\n'): async with aiofiles.open(filename, "a") as f: for element in arr: await f.write(element + separator)
async def main_broadcast_handler(m, db): all_users = await db.get_all_users() broadcast_msg = m.reply_to_message while True: broadcast_id = ''.join([random.choice(string.ascii_letters) for i in range(3)]) if not broadcast_ids.get(broadcast_id): break out = await m.reply_text( text=f"**Broadcasting has Started !** You'll be notified with logs after finishing 😁!" ) start_time = time.time() total_users = await db.total_users_count() done = 0 failed = 0 success = 0 broadcast_ids[broadcast_id] = dict( total=total_users, current=done, failed=failed, success=success ) async with aiofiles.open('broadcast-logs.txt', 'w') as broadcast_log_file: async for user in all_users: sts, msg = await send_msg( user_id=int(user['id']), message=broadcast_msg ) if msg is not None: await broadcast_log_file.write(msg) if sts == 200: success += 1 else: failed += 1 if sts == 400: await db.delete_user(user['id']) done += 1 if broadcast_ids.get(broadcast_id) is None: break else: broadcast_ids[broadcast_id].update( dict( current=done, failed=failed, success=success ) ) if broadcast_ids.get(broadcast_id): broadcast_ids.pop(broadcast_id) completed_in = datetime.timedelta(seconds=int(time.time() - start_time)) await asyncio.sleep(3) await out.delete() if failed == 0: await m.reply_text( text=f"Broadcasting Completed ✅! \n**Completed In:** `{completed_in}` \n\n**Total Users:** `{total_users}` \n**Total Done:** `{done}` \n**Total Success:** `{success}` \n**Total Failed:** `{failed}`", quote=True ) else: await m.reply_document( document='broadcast-logs.txt', caption=f"Broadcasting Completed ✅! \n**Completed In:** `{completed_in}`\n\n**Total Users:** `{total_users}` \n**Total Done:** `{done}` \n**Total Success:** `{success}` \n**Total Failed:** `{failed}`", quote=True ) os.remove('broadcast-logs.txt')
async def process_source(filename, session: ClientSession): """ Process single source file Parameters ---------- filename : str Path to source file session : ClientSession aiohttp ClientSession object Returns ------- list: Good messages list: Warning messages list: Error Messages """ result = {} path_split = filename.split(os.sep) sources_index = path_split.index("sources") result["filename"] = path_split[-1] result["directory"] = path_split[sources_index + 1:-1] async with aiofiles.open(filename, mode="r") as f: contents = await f.read() source = json.loads(contents) result["name"] = source["properties"]["name"] result["type"] = source["properties"]["type"] source_id = source["properties"]["id"] result["id"] = source_id # Check licence url if "license_url" not in source["properties"]: result["license_url"] = create_result(ResultStatus.ERROR, "No license_url set!") else: licence_url = source["properties"]["license_url"] licence_url_status = await test_url(licence_url, session) result["license_url"] = licence_url_status # Check privacy url if "privacy_policy_url" not in source["properties"]: result["privacy_policy_url"] = create_result( ResultStatus.ERROR, "No privacy_policy_url set!") else: privacy_policy_url = source["properties"]["privacy_policy_url"] privacy_policy_url_status = await test_url(privacy_policy_url, session) result["privacy_policy_url"] = privacy_policy_url_status # Check category if "category" not in source["properties"]: result["category"] = "" else: result["category"] = source["properties"]["category"] # Check imagery # Check imagery only for recent imagery if "end_date" in source["properties"]: age = datetime.date.today().year - int( source["properties"]["end_date"].split("-")[0]) if age > 30: result["imagery"] = create_result( ResultStatus.WARNING, "Not checked due to age: {} years".format(age)) if "imagery" not in result: if source_id in imagery_ignore: info_msgs = error_msgs = [] warning_msgs = ["Ignored: {}".format(imagery_ignore[source_id])] elif "User-Agent" in source["properties"]["url"]: info_msgs = error_msgs = [] warning_msgs = ["Not checked, URL includes User-Agent"] else: if source["properties"]["type"] == "tms": info_msgs, warning_msgs, error_msgs = await check_tms( source, session) elif source["properties"]["type"] == "wms": info_msgs, warning_msgs, error_msgs = await check_wms( source, session) elif source["properties"]["type"] == "wms_endpoint": info_msgs, warning_msgs, error_msgs = await check_wms_endpoint( source, session) elif source["properties"]["type"] == "wmts": info_msgs, warning_msgs, error_msgs = await check_wmts( source, session) else: info_msgs = error_msgs = [] warning_msgs = [ "{} is currently not checked.".format( source["properties"]["type"]) ] messages = ["Error: {}".format(m) for m in error_msgs] messages += ["Warning: {}".format(m) for m in warning_msgs] messages += ["Info: {}".format(m) for m in info_msgs] if len(error_msgs) > 0: result["imagery"] = create_result(ResultStatus.ERROR, message=messages) elif len(error_msgs) == 0 and len(warning_msgs) == 0: result["imagery"] = create_result(ResultStatus.GOOD, message=messages) else: result["imagery"] = create_result(ResultStatus.WARNING, message=messages) if "license_url" not in result: result["license_url"] = create_result(ResultStatus.WARNING, "Not checked") if "privacy_policy_url" not in result: result["privacy_policy_url"] = create_result(ResultStatus.WARNING, "Not checked") if "imagery" not in result: result["imagery"] = create_result(ResultStatus.WARNING, "Not checked") return result
async def get_video(): async with session.get(video_url) as resp: async for data in resp.content.iter_any(): async with aiofiles.open(video_filename, "ba") as f: await f.write(data)
async def to_file(url, data): url = url[1:].replace('/', '-').strip() async with aiofiles.open('data/%s.json' % url, mode='w') as f: await f.write(json.dumps(data, ensure_ascii=False))
async def save_model(fpath: str, data: str): async with aiofiles.open(fpath, "w") as f: await f.write(data)
async def handle_index_page(request): async with aiofiles.open('index.html', mode='r') as index_file: index_contents = await index_file.read() return web.Response(text=index_contents, content_type='text/html')
async def get_script(self): filename = os.path.join(self.path, self.filename) async with aiofiles.open(filename, 'r') as file_handle: return await file_handle.read()
async def _fetch_from_cache(self) -> str: cache_filename: str = self._cache_filename() if os.path.isfile(cache_filename): async with aiofiles.open(cache_filename, mode='r') as f: return await f.read() return '' # return an empty string if cache does not exist
async def _save_to_cache(self, content: str) -> None: cache_filename: str = self._cache_filename() async with aiofiles.open(cache_filename, mode='w') as f: await f.write(content)
async def unixstamp(request): t = datetime.now().timestamp() async with aiofiles.open('/var/www/test/data/%s.log' % t, 'w') as f: await f.write(str(t)) async with aiofiles.open('/var/www/test/data/%s.log' % t, 'r') as f: text = 'timestamp: %s' % await f.read() return web.Response(body=text.encode('utf-8'), content_type='text/html')