Exemplo n.º 1
0
 async def download_curl_version(self, session, version):
     async with session.get(
         f"https://curl.haxx.se/docs/vuln-{version}.html"
     ) as response:
         html = await response.text()
     soup = BeautifulSoup(html, "html.parser")
     table = soup.find("table")
     if not table:
         return
     headers = table.find_all("th")
     headers = list(map(lambda x: x.text.strip().lower(), headers))
     self.LOGGER.debug(headers)
     rows = table.find_all("tr")
     json_data = []
     for row in rows:
         cols = row.find_all("td")
         values = (ele.text.strip() for ele in cols)
         data = dict(zip(headers, values))
         if data:
             json_data.append(data)
     filepath = os.path.abspath(
         os.path.join(self.cachedir, f"curlcve-{version}.json")
     )
     async with FileIO(filepath, "w") as f:
         await f.write(json.dumps(json_data, indent=4))
Exemplo n.º 2
0
    async def extract_file_rpm(self, filename, extraction_path):
        """ Extract rpm packages """
        if sys.platform.startswith("linux"):
            if not await aio_inpath("rpm2cpio") or not await aio_inpath("cpio"
                                                                        ):
                await rpmextract("-xC", extraction_path, filename)
            else:
                stdout, stderr = await aio_run_command(["rpm2cpio", filename])
                if stderr or not stdout:
                    return 1
                cpio_path = os.path.join(extraction_path, "data.cpio")
                async with FileIO(cpio_path, "wb") as f:
                    await f.write(stdout)
                stdout, stderr = await aio_run_command(
                    ["cpio", "-idm", "--file", cpio_path])
                if stdout or not stderr:
                    return 1
        else:
            if not await aio_inpath("7z"):
                with ErrorHandler(mode=self.error_mode, logger=self.logger):
                    raise Exception("7z is required to extract rpm files")
            else:
                stdout, stderr = await aio_run_command(["7z", "x", filename])
                if stderr or not stdout:
                    return 1
                filenames = await aio_glob(
                    os.path.join(extraction_path, "*.cpio"))
                filename = filenames[0]

                stdout, stderr = await aio_run_command(["7z", "x", filename])
                if stderr or not stdout:
                    return 1
        return 0
Exemplo n.º 3
0
    async def cache_update(self, session, url, sha, chunk_size=16 * 1024):
        """
        Update the cache for a single year of NVD data.
        """
        filename = url.split("/")[-1]
        # Ensure we only write to files within the cachedir
        filepath = os.path.abspath(os.path.join(self.cachedir, filename))
        if not filepath.startswith(os.path.abspath(self.cachedir)):
            with ErrorHandler(mode=self.error_mode, logger=self.LOGGER):
                raise AttemptedToWriteOutsideCachedir(filepath)
        # Validate the contents of the cached file
        if os.path.isfile(filepath):
            # Validate the sha and write out
            sha = sha.upper()
            calculate = hashlib.sha256()
            async with GzipFile(filepath, "rb") as f:
                chunk = await f.read(chunk_size)
                while chunk:
                    calculate.update(chunk)
                    chunk = await f.read(chunk_size)
            # Validate the sha and exit if it is correct, otherwise update
            gotsha = calculate.hexdigest().upper()
            if gotsha != sha:
                os.unlink(filepath)
                self.LOGGER.warning(
                    f"SHA mismatch for {filename} (have: {gotsha}, want: {sha})"
                )
            else:
                self.LOGGER.debug(f"Correct SHA for {filename}")
                return
        self.LOGGER.debug(f"Updating CVE cache for {filename}")

        async with session.get(url) as response:
            # Raise better error message on ratelimit by NVD
            if response.status == 403:
                with ErrorHandler(mode=self.error_mode, logger=self.LOGGER):
                    raise NVDRateLimit(
                        f"{url} : download failed, you may have been rate limited."
                    )
            # Raise for all other 4xx errors
            response.raise_for_status()
            gzip_data = await response.read()
        json_data = gzip.decompress(gzip_data)
        gotsha = hashlib.sha256(json_data).hexdigest().upper()
        async with FileIO(filepath, "wb") as filepath_handle:
            await filepath_handle.write(gzip_data)
        # Raise error if there was an issue with the sha
        if gotsha != sha:
            # Remove the file if there was an issue
            # exit(100)
            os.unlink(filepath)
            with ErrorHandler(mode=self.error_mode, logger=self.LOGGER):
                raise SHAMismatch(f"{url} (have: {gotsha}, want: {sha})")
Exemplo n.º 4
0
 async def aio_parse(self):
     async with FileIO(self.filename, "rb") as f:
         tmp = []
         async for line in f:
             for char in line:
                 # remove all unprintable characters
                 if char in Strings.PRINTABLE:
                     tmp.append(chr(char))
                 elif tmp:
                     self.output += "".join(tmp) + "\n"
                     tmp = []
     return self.output
Exemplo n.º 5
0
async def read_signature(filename, length=4):
    """ Read the signature, first length bytes, from filename."""
    async with FileIO(filename, "rb") as file_handle:
        return await file_handle.read(length)