async def download(self, loop: asyncio.BaseEventLoop = None, load=False): """Download the JSON feed asynchronously and return JSONFeed object.""" # get current metadata await self._metadata.fetch(loop) self._metadata.parse() if self._is_downloaded: # check sha256 data_sha256 = utils.compute_sha256(self.path) if data_sha256 == self._metadata.sha256: # already up to date _LOGGER.info(f"Feed `{self._name}` is already up to date.") if load: await self.load(loop) return self await self._metadata.save() loop = loop or asyncio.get_event_loop() status = ["Downloading", "Updating"][self._is_downloaded] _LOGGER.info(f"{status} feed `{self._name}` url `{self._data_url}`...") data: bytes timeout = aiohttp.ClientTimeout(total=config.FEED_DOWNLOAD_TIMEOUT) async with aiohttp.ClientSession(loop=loop, timeout=timeout) as session: async with session.get(self._data_url) as response: if response.status != 200: raise IOError('Unable to download {feed} feed.'.format( feed=self._name)) data = await response.read() gzip_file = io.BytesIO() gzip_file.write(data) gzip_file.seek(0) json_stream = gzip.GzipFile(fileobj=gzip_file, mode='rb').read() _LOGGER.info(f"Writing feed `{self._name}`...") async with aiofiles.open(self._data_path, 'wb', loop=loop) as f: fcntl.flock(f, fcntl.LOCK_EX | fcntl.LOCK_NB) await f.write(json_stream) await f.flush() fcntl.flock(f, fcntl.LOCK_UN) if load: self._data = ujson.loads(json_stream) self._is_loaded = True self._is_downloaded = True _LOGGER.info(f"Finished downloading feed `{self._name}`") return self
def sha256(self): """Get sha256 hash.""" return utils.compute_sha256(self._data_path)
def sha256(self): return utils.compute_sha256(self._data_path)