Exemplo n.º 1
0
def setup_tls_certs() -> None:
    """Set up full TLS chain for Compass.

    Compass currently (as of 14/11/20) doesn't pass the Intermediate certificate it uses.
    This is at time of writing the 'Thawte RSA CA 2018', which is in turned signed by DigiCert Global Root CA.

    This function includes the Thawte CA cert in the Certifi chain to allow certificate verification to pass.

    Yes, it's horrid. TSA plz fix.
    """
    thawte_ca_cert_url = "https://thawte.tbs-certificats.com/Thawte_RSA_CA_2018.crt"

    certifi_path = Path(certifi.where())
    certifi_contents = certifi_path.read_text("UTF-8")

    # Check for contents of Thawte CA, if not add
    if "Thawte RSA CA 2018" not in certifi_contents:

        logger.info(
            "Intermediate Certificate for Compass not found - Installing")

        # Fetch Thawte CA from known URL, rather than including PEM
        ca_request = requests.get(thawte_ca_cert_url, allow_redirects=False)

        # Write to certifi PEM
        try:
            with certifi_path.open("a", encoding="utf-8") as f:
                f.write('\n# Label: "Thawte RSA CA 2018"\n')
                f.write(ca_request.text)
        except IOError as e:
            logger.error(
                f"Unable to write to certifi PEM: {e.errno} - {e.strerror}")
Exemplo n.º 2
0
    def get_members_in_units(
            self, parent_id: int, compass_ids: Iterable
    ) -> list[Union[gamih_pydantic, gamih_native]]:
        with contextlib.suppress(FileNotFoundError):
            # Attempt to see if the members dict has been fetched already and is on the local system
            with open(f"all-members-{parent_id}.json", "r",
                      encoding="utf-8") as f:
                all_members = json.load(f)
                if all_members:
                    return all_members

        # Fetch all members
        all_members = []
        for compass_id in set(compass_ids):
            logger.debug(f"Getting members for {compass_id}")
            all_members.append(
                dict(compass_id=compass_id,
                     member=self._scraper.get_members_with_roles_in_unit(
                         compass_id)))

        # Try and write to a file for caching
        try:
            with open(f"all-members-{parent_id}.json", "w",
                      encoding="utf-8") as f:
                json.dump(all_members, f, ensure_ascii=False, indent=4)
        except IOError as e:
            logger.error(
                f"Unable to write cache file: {e.errno} - {e.strerror}")

        if self.validate:
            return schema.HierarchyUnitMembersList.parse_obj(
                all_members).__root__
        else:
            return all_members
Exemplo n.º 3
0
    def get_hierarchy(
        self,
        unit_level: Optional[schema.HierarchyLevel] = None,
        unit_id: Optional[int] = None,
        level: Optional[str] = None,
        use_default: bool = False,
    ) -> Union[dict, schema.UnitData]:
        """Gets all units at given level and below, including sections.

        Unit data can be specified as a pre-constructed model, by passing literals, or
        by signalling to use the data from the user's current role. If all three
        options are unset an exception is raised.

        There is a strict priority ordering as follows:
            1. pre-constructed pydantic model
            2. literals
            3. default data

        Raises:
            ValueError:
                When no unit data information has been provided

        """
        unit_level = self.get_unit_data(unit_level, unit_id, level,
                                        use_default)

        filename = Path(f"hierarchy-{unit_level.id}.json")
        # Attempt to see if the hierarchy has been fetched already and is on the local system
        with contextlib.suppress(FileNotFoundError):
            out = json.loads(filename.read_text(encoding="utf-8"))
            if out:
                if self.validate:
                    return schema.UnitData.parse_obj(out)
                else:
                    return out

        # Fetch the hierarchy
        out = self._get_descendants_recursive(unit_level.id,
                                              hier_level=unit_level.level)

        # Try and write to a file for caching
        try:
            if self.validate:
                filename.write_text(
                    schema.UnitData.parse_obj(out).json(ensure_ascii=False),
                    encoding="utf-8")
            else:
                filename.write_text(json.dumps(out, ensure_ascii=False),
                                    encoding="utf-8")
        except IOError as e:
            logger.error(
                f"Unable to write cache file: {e.errno} - {e.strerror}")

        if self.validate:
            return schema.UnitData.parse_obj(out)
        else:
            return out
Exemplo n.º 4
0
 def download_report_streaming(self, url: str, params: dict, filename: str):
     try:
         with self._get(url, params=params, stream=True) as r:
             r.raise_for_status()
             with open(filename, "wb") as f:
                 for chunk in r.iter_content(
                         chunk_size=1024**2):  # Chunk size == 1MiB
                     f.write(chunk)
     except IOError as e:
         logger.error(
             f"Unable to write report export: {e.errno} - {e.strerror}")
Exemplo n.º 5
0
    def download_report_normal(self, url: str, params: dict,
                               filename: str) -> bytes:
        start = time.time()
        csv_export = self._get(url, params=params)
        logger.debug(f"Exporting took {time.time() - start}s")
        logger.info("Saving report")
        try:
            Path(filename).write_bytes(csv_export.content)  # TODO Debug check
        except IOError as e:
            logger.error(
                f"Unable to write report export: {e.errno} - {e.strerror}")
        logger.info("Report Saved")

        logger.debug(len(csv_export.content))

        return csv_export.content