Ejemplo n.º 1
0
class App(object):
    def __init__(self, domain, api_user, api_token, blacklist_file,
                 max_attachment_size, cache_location,
                 start_date: datetime.date):
        self._cache_location = cache_location
        self._start_date = start_date
        self._domain = domain
        self._text_extractor = TextExtractor()
        self._repository = ConfluenceRepository(
            domain, api_user, api_token, max_attachment_size,
            self._text_extractor.supported_mime_types)
        self._secret_finder = SecretFinder(blacklist_file)

    def __enter__(self):
        if self._cache_location:
            cache_path = self._cache_location
        else:
            current_folder = os.path.dirname(os.path.realpath(__file__))
            cache_path = os.path.join(current_folder, "cache.sqlite")
        self._cache = Cache(cache_path, self._domain)
        return self

    def __exit__(self, *args):
        self._cache.close()

    def get_secrets_from_versions(self, content,
                                  start_version) -> Iterable[VersionSecrets]:
        for version in self._repository.get_versions(content):
            if version.id <= start_version:
                continue

            version_content = self._text_extractor.extract_text_from_version(
                content, version)
            secrets = set()
            for secret in self._secret_finder.find_secrets(version_content):
                secrets.add(secret)

            if any(secrets):
                yield VersionSecrets(content, version, secrets)

    def find_secrets_from_date(self, date) -> Iterable[VersionSecrets]:
        today = datetime.datetime.now().date()
        while date <= today:
            logging.info(f"Fetching changes for {date}...")
            for content in self._repository.get_content_for_date(date):
                crawl_history = self._cache.get_crawl_history(content.id)
                if crawl_history:
                    new_version_secrets = []
                    if crawl_history.latest_version != content.latest_version:
                        logging.info(
                            f"Fetching versions {crawl_history.latest_version}-{content.latest_version} from {content}..."
                        )
                        new_version_secrets = list(
                            self.get_secrets_from_versions(
                                content, crawl_history.latest_version))
                else:
                    logging.info(
                        f"Fetching {content.latest_version} versions from {content}..."
                    )
                    new_version_secrets = list(
                        self.get_secrets_from_versions(content, 0))
                    crawl_history = ContentCrawlHistory()

                for version_secrets in new_version_secrets:
                    version_secrets.secrets = [
                        s for s in version_secrets.secrets
                        if s not in crawl_history.secrets
                    ]
                    crawl_history.secrets.extend(version_secrets.secrets)

                crawl_history.latest_version = content.latest_version
                self._cache.set_crawl_history(content.id, crawl_history)
                for s in new_version_secrets:
                    if any(s.secrets):
                        yield s

            self._cache.set_last_crawl_date(date)
            date += datetime.timedelta(days=1)

    def find_secrets(self) -> Iterable[VersionSecrets]:
        for s in self.find_secrets_from_date(self._get_start_date()):
            yield s

    def _get_start_date(self) -> datetime.date:
        if self._start_date:
            return self._start_date
        cached_date = self._cache.get_last_crawl_date()
        if cached_date:
            return cached_date
        return self._repository.get_oldest_content_creation_date()