def start(self) -> "PyFuncebleConfigUpdater": with importlib.resources.path( "PyFunceble.data.infrastructure", ".PyFunceble_production.yaml") as file_path: local_version = DictHelper(DictHelper().from_yaml_file( str(file_path))).flatten() local_version = Merge( dead_hosts.launcher.defaults.pyfunceble.CONFIGURATION).into( local_version, strict=True) if self.info_manager.custom_pyfunceble_config and isinstance( self.info_manager.custom_pyfunceble_config, dict): logging.info("Custom PyFunceble configuration given, " "appending them to the local configuration file.") local_version = Merge( self.info_manager.custom_pyfunceble_config).into(local_version, strict=True) if self.info_manager.ping: logging.info( "Ping names given, appending them to the commit message.") local_version[ "cli_testing.ci.end_commit_message"] = self.get_commit_message( local_version["cli_testing.ci.end_commit_message"], ping=self.info_manager.get_ping_for_commit(), ) local_version = Merge( dead_hosts.launcher.defaults.pyfunceble.PERSISTENT_CONFIG).into( local_version, strict=True) if FileHelper( os.path.join( self.info_manager.WORKSPACE_DIR, dead_hosts.launcher.defaults.paths.EXAMPLE_INFO_FILENAME, )).exists(): local_version["cli_testing.ci.active"] = False # Default behavior of PyFunceble since 4.0.0b12. local_version["cli_testing.autocontinue"] = False local_version = DictHelper(local_version).unflatten() DictHelper(local_version).to_yaml_file( self.pyfunceble_config_file_instance.path) logging.debug("Configuration:\n%s", self.pyfunceble_config_file_instance.read()) return self
def get_content(self) -> Optional[dict]: """ Provides the cached or the real contend of the dataset (after caching) :raise FileNotFoundError: When the declared file does not exists. """ if ( bool(self.STORAGE_INDEX) and hasattr(PyFunceble.storage, self.STORAGE_INDEX) and bool(getattr(PyFunceble.storage, self.STORAGE_INDEX)) ): return getattr(PyFunceble.storage, self.STORAGE_INDEX) file_helper = FileHelper(self.source_file) if not file_helper.exists() and bool( self.DOWNLOADER ): # pragma: no cover ## This is just a safety endpoint. self.DOWNLOADER.start() if not file_helper.exists(): raise FileNotFoundError(file_helper.path) content = DictHelper().from_json_file( self.source_file, return_dict_on_error=False ) setattr(PyFunceble.storage, self.STORAGE_INDEX, content) return content
def start(self) -> "ConfigLoader": """ Starts the loading processIs. """ config = self.get_config_file_content() if self.custom_config: config = Merge(self.custom_config).into(config) config = self.conditional_switch(config) PyFunceble.storage.CONFIGURATION = Box( config, ) PyFunceble.storage.FLATTEN_CONFIGURATION = DictHelper( PyFunceble.storage.CONFIGURATION ).flatten() PyFunceble.storage.HTTP_CODES = Box( config["http_codes"], ) if "collection" in config: PyFunceble.storage.COLLECTION = Box(config["collection"]) PyFunceble.storage.LINKS = Box(config["links"]) return self
def setUp(self) -> None: """ Setups everything needed for the tests. """ self.test_subject = { "Hello": "world", "World": { "world": "hello" }, "funilrys": ["Fun", "Ilrys"], "Py": "Funceble", "pyfunceble": ["funilrys"], } self.helper = DictHelper()
def start(self, max_workers: Optional[int] = None) -> "IanaDBGenerator": """ Starts the generation of the dataset file. :param max_workers: The maximal number of workers we are allowed to use. """ raw_data = ( DownloadHelper(self.UPSTREAM_LINK) .download_text() .split('<span class="domain tld">') ) with concurrent.futures.ThreadPoolExecutor(max_workers=max_workers) as executor: for extension, whois_server in executor.map( self.get_extension_and_referrer_from_block, raw_data ): if extension: self.database[extension] = whois_server PyFunceble.facility.Logger.debug( "Got: extension: %r ; whois server: %r", extension, whois_server ) DictHelper(self.database).to_json_file(self.destination) return self
def start(self, max_workers: Optional[int] = None): """ Starts the generation of the dataset file. """ raw_data = DownloadHelper( self.UPSTREAM_LINK).download_text().split("\n") with concurrent.futures.ThreadPoolExecutor( max_workers=max_workers) as executor: for result in executor.map(self.parse_line, raw_data): for extension, suffixes in result.items(): if extension not in self.database: self.database[extension] = suffixes else: self.database[extension].extend(suffixes) PyFunceble.facility.Logger.debug( "Got: extension: %r ; suffixes: %r.", extension, suffixes) for extension, suffixes in self.database.items(): self.database[extension] = (ListHelper( suffixes).remove_duplicates().remove_empty().sort().subject) DictHelper(self.database).to_json_file(self.destination) return self
def __save_description(self) -> "FilePreloader": """ Saves the description at its destination. """ self.__description[self.__matching_index].update(self.protocol) DictHelper(self.__description).to_json_file(self.__description_file)
def save_dataset(self) -> "FilesystemCounter": """ Saves the current dataset into it's final destination. """ DictHelper(self.dataset).to_json_file(self.source_file) return self
def save(self) -> None: """ Saves the loaded content of the administration file. """ if self.info_file_helper.exists(): DictHelper(self.convert_data_for_file( self.__our_info)).to_json_file(self.info_file_location)
def __init__(self) -> None: self.info_file_instance = FileHelper(self.INFO_FILE) if self.info_file_instance.exists(): self.content = DictHelper().from_json_file(self.info_file_instance.path) else: self.content = {} logging.debug("Administration file path: %r", self.INFO_FILE) logging.debug( "Administration file exists: %r", self.info_file_instance.exists() ) logging.debug("Administration file content:\n%r", self.content) self.update() self.create_missing_index() self.clean() self.store()
def get_backup_data(self) -> dict: """ Stores the backup at the current destination. """ data = DictHelper().from_json_file(self.source_file) if PlatformUtility.is_windows(): result = {} for directory, files in data.items(): result[os.path.normpath(directory)] = files PyFunceble.facility.Logger.debug("Backup (read) data:\n%r", result) return result PyFunceble.facility.Logger.debug("Backup (read) data:\n%r", data) return data
def store_backup(self) -> "DirectoryStructureBackup": """ Stores the backup at the current destination. """ DictHelper(self.get_backup_data()).to_json_file(self.source_file) PyFunceble.facility.Logger.info("Stored backup into: %r", self.source_file) return self
def load(self) -> dict: """ Loads and return the content of the administration file. """ if self.info_file_helper.read(): content = self.info_file_helper.read() logging.debug("Administration file content:\n%s", content) return self.convert_data_for_system(DictHelper().from_json( content, return_dict_on_error=False)) return dict()
def get_upstream_version() -> Box: """ Provides the state of the upstream version. """ return Box( DictHelper().from_yaml( DownloadHelper( InternalUrlConverter(PyFunceble.cli.storage.VERSION_DUMP_LINK). get_converted()).download_text()), frozen_box=True, )
def __load_description(self) -> "FilePreloader": """ Loads the descriptoin into the interface. """ def set_new_dataset(*, append: bool = False, new_index: int = 0) -> None: """ Sets the default dataset into the given index. :param new_index: The index to write into. :param append: Append instead. """ new_dataset = copy.deepcopy(self.protocol) new_dataset["previous_hash"] = None new_dataset["hash"] = None new_dataset["line_number"] = 1 if append: self.__description.append(new_dataset) elif self.__description: self.__description[new_index] = new_dataset else: self.__description = [new_dataset] if self.does_preloader_description_file_exists(): dataset = DictHelper().from_json_file(self.__description_file) if not isinstance(dataset, list): dataset = [dataset] found = False for index, descr in enumerate(dataset): if all(x in descr and descr[x] == y for x, y in self.protocol.items()): self.__matching_index = index found = True break self.__description = dataset if not found: set_new_dataset(append=True) self.__matching_index = len(self.__description) - 1 else: set_new_dataset() return self
def test_set_subject_through_init(self) -> None: """ Tests the overwritting of the subject to work through the class constructor. """ given = self.test_subject expected = dict(self.test_subject) helper = DictHelper(given) actual = helper.subject self.assertEqual(expected, actual)
def get_local_version() -> Box: """ Provides the state of the local version file. """ return Box( DictHelper().from_yaml_file( os.path.join( PyFunceble.storage.CONFIG_DIRECTORY, PyFunceble.cli.storage.DISTRIBUTED_VERSION_FILENAME, )), frozen_box=True, )
def fetch_dataset(self) -> "FilesystemCounter": """ Fetches the source file into the current instance. """ file_helper = FileHelper(self.source_file) if file_helper.exists(): self.dataset = DictHelper().from_json_file(file_helper.path) else: self.dataset = copy.deepcopy(self.STD_DATASET) return self
def store(self) -> "InfoManager": """ Stores the current state. """ local_copy = {} for index, value in self.content.items(): if index.endswith("_timestamp") and isinstance(value, datetime): local_copy[index] = value.timestamp() elif index.endswith("_datetime") and isinstance(value, datetime): local_copy[index] = value.isoformat() else: local_copy[index] = copy.deepcopy(value) DictHelper(local_copy).to_json_file(self.info_file_instance.path) return self
def inject_into_config(self) -> "SystemIntegrator": """ Injects the configuration variables into the configuration after comparing each value with the current one. """ PyFunceble.facility.Logger.info( "Started to inject arguments info configuration.") dict_helper = DictHelper(PyFunceble.storage.CONFIGURATION) flatten_config = dict_helper.flatten() to_update = {} for key, value in vars(self.args).items(): if value in (False, None): continue if "__" in key: key = key.replace("__", ".") if key in flatten_config: if isinstance(flatten_config[key], bool) and isinstance( value, bool): to_update[key] = not flatten_config[key] else: to_update[key] = value dict_helper.set_subject(to_update) unflatten_to_update = dict_helper.unflatten() # We assume that the configuration was already loaded. PyFunceble.facility.ConfigLoader.custom_config = unflatten_to_update PyFunceble.facility.Logger.debug("Injected into config: %r", unflatten_to_update) PyFunceble.facility.Logger.info( "Finished to inject arguments info configuration.") return self
class DownloaderBase: """ Provides the base of all downloader classes. The interface is actually simple, but the part which may be hard to understand is the "downtime" part. What we do, is that we save the download time inside a JSON file, so this class provides the base around the download mechanism but also the generation or update of that JSON file. """ DOWNTIME_INDEX: Optional[str] = None """ Used to set/track the download time of the current file. """ DOWNLOAD_FREQUENCY: int = 1 """ The download frequency (in day). Example: if 1 is given, it's once every 24 hours. .. warning:: A frequency of :code:`0` or a negative number will force the download every hour. """ all_downtimes: Optional[dict] = {} """ Stores the download time of all files (self managed). """ _destination: Optional[str] = None _download_link: Optional[str] = None dict_helper: DictHelper = DictHelper() def __init__(self) -> None: self.downtimes_file = FileHelper( os.path.join(PyFunceble.storage.CONFIG_DIRECTORY, PyFunceble.storage.DOWN_FILENAME)) self.all_downtimes.update(self.get_all_downtimes()) @property def authorized(self) -> bool: """ Provides the authorization to start the download. """ raise NotImplementedError() @property def destination(self) -> Optional[str]: """ Provides the current state of the :code:`_destination` attribute. """ return self._destination @destination.setter def destination(self, value: str) -> None: """ Sets the destination. :param value: The value to set. :raise TypeError: When value is not a :py:class:`str`. """ if not isinstance(value, str): raise TypeError(f"<value> should be {str}, {type(value)} given.") self._destination = value def set_destination(self, value: str) -> "DownloaderBase": """ Sets the destination. :param value: The value to set. """ self.destination = value return self @property def download_link(self) -> Optional[str]: """ Provides the current state of the :code:`_download_link` attribute. """ return self._download_link @download_link.setter def download_link(self, value: str) -> None: """ Sets the link to download. :param value: The value to set. :raise TypeError: When value is not a :py:class:`str`. """ if not isinstance(value, str): raise TypeError(f"<value> should be {str}, {type(value)} given.") self._download_link = value def set_download_link(self, value: str) -> "DownloaderBase": """ Sets the link to download. :param value: The value to set. """ self.download_link = value return self def get_all_downtimes(self) -> dict: """ Provides the downloadtime of all files. """ return self.dict_helper.from_json_file(self.downtimes_file.path) def is_downtime_set(self) -> bool: """ Checks if the download time of the current object exists. """ return (self.DOWNTIME_INDEX in self.all_downtimes and self.all_downtimes[self.DOWNTIME_INDEX] and all(x in self.all_downtimes[self.DOWNTIME_INDEX] and self.all_downtimes[self.DOWNTIME_INDEX][x] for x in ["iso", "timestamp"])) def get_current_downtime(self): """ Provides the download times of the current :code:`DOWN_TIME_INDEX`. """ if self.is_downtime_set(): return self.all_downtimes[self.DOWNTIME_INDEX] return None def set_current_downtime(self) -> "DownloaderBase": """ Sets the current datetime into our registry. """ current_datetime = datetime.datetime.utcnow() self.all_downtimes[self.DOWNTIME_INDEX] = { "iso": current_datetime.isoformat(), "timestamp": current_datetime.timestamp(), } return self def save_all_downtimes(self) -> None: """ Saves the current state of the all downtimes. """ self.dict_helper.set_subject(self.all_downtimes).to_json_file( self.downtimes_file.path) def is_last_download_expired(self) -> bool: """ Checks if the last downloaded file is expired (if exists). """ if not FileHelper( self.destination).exists() or not self.is_downtime_set(): return True last_downloaded_time = datetime.datetime.fromtimestamp( self.get_current_downtime()["timestamp"]) if (self.DOWNLOAD_FREQUENCY <= 0 and (datetime.datetime.utcnow() - last_downloaded_time).seconds < 3600): return False if (last_downloaded_time + datetime.timedelta(days=self.DOWNLOAD_FREQUENCY) <= datetime.datetime.utcnow()): return True return False def start(self) -> None: """ Starts the download process. """ if self.authorized and self.is_last_download_expired(): if not hasattr(self, "destination") or not self.destination: raise PyFunceble.downloader.exceptions.NoDownloadDestinationGiven( ) if not hasattr(self, "download_link") or not self.download_link: raise PyFunceble.downloader.exceptions.NoDownloadLinkGiven() if DownloadHelper(self.download_link).download_text( destination=self.destination): self.set_current_downtime() self.save_all_downtimes()
class TestDictHelper(unittest.TestCase): """ Provides the test of our dictionnary helper. """ def setUp(self) -> None: """ Setups everything needed for the tests. """ self.test_subject = { "Hello": "world", "World": { "world": "hello" }, "funilrys": ["Fun", "Ilrys"], "Py": "Funceble", "pyfunceble": ["funilrys"], } self.helper = DictHelper() def tearDown(self) -> None: """ Destroy everything needed by the tests. """ del self.test_subject del self.helper def test_set_subject_return(self) -> None: """ Tests the response from the method which let us set the subject to work with. """ actual = self.helper.set_subject(self.test_subject) self.assertIsInstance(actual, DictHelper) def test_set_subject_method(self) -> None: """ Tests the method which let us set the subject to work with. """ given = self.test_subject expected = dict(self.test_subject) self.helper.set_subject(given) actual = self.helper.subject self.assertEqual(expected, actual) def test_set_subject_attribute(self) -> None: """ Tests overwritting of the :code:`subject` attribute. """ given = self.test_subject expected = dict(self.test_subject) self.helper.subject = given actual = self.helper.subject self.assertEqual(expected, actual) def test_set_subject_through_init(self) -> None: """ Tests the overwritting of the subject to work through the class constructor. """ given = self.test_subject expected = dict(self.test_subject) helper = DictHelper(given) actual = helper.subject self.assertEqual(expected, actual) def test_has_same_key_as(self) -> None: """ Tests the method which let us know if the keys of 2 dicts are the same. """ origin = {"a": 1, "b": 1} target = {"a": 1, "b": 2, "c": {"a": 1, "b": 3, "c": {"x": "x"}}} expected = True actual = self.helper.set_subject(target).has_same_keys_as(origin) self.assertEqual(expected, actual) expected = False actual = self.helper.set_subject(origin).has_same_keys_as(target) self.assertEqual(expected, actual) origin["c"] = {"a": 1, "b": 3, "c": {"x": "x"}} expected = True actual = self.helper.set_subject(target).has_same_keys_as(origin) self.assertEqual(expected, actual) actual = self.helper.set_subject(origin).has_same_keys_as(target) self.assertEqual(expected, actual) del origin["c"]["c"] expected = False actual = self.helper.set_subject(origin).has_same_keys_as(target) self.assertEqual(expected, actual) def test_remove_key_not_dict(self) -> None: """ Tests the method which let us remove a key from a given dict for the case that the given subject is not a dict. """ given = "Hello" expected = "Hello" actual = self.helper.set_subject(given).remove_key("Py") self.assertEqual(expected, actual) def test_remove_key(self) -> None: """ Test the method which let us remove a key from a given dict. """ given = copy.deepcopy(self.test_subject) expected = { "Hello": "world", "World": { "world": "hello" }, "funilrys": ["Fun", "Ilrys"], "pyfunceble": ["funilrys"], } actual = self.helper.set_subject(given).remove_key("Py") self.assertEqual(expected, actual) actual = self.helper.set_subject(given).remove_key(["Py", "test"]) self.assertEqual(expected, actual) def test_remove_multiple_key(self) -> None: """ Tests the method which let us remove a key with multiple key to remove. """ given = copy.deepcopy(self.test_subject) expected = { "Hello": "world", "World": { "world": "hello" }, "pyfunceble": ["funilrys"], } actual = self.helper.set_subject(given).remove_key(["funilrys", "Py"]) self.assertEqual(expected, actual) def test_remove_key_not_exists(self) -> None: """ Tests the method which let us remove a key for the cas that the key to remove does not exists. """ given = copy.deepcopy(self.test_subject) expected = copy.deepcopy(self.test_subject) actual = self.helper.set_subject(given).remove_key("xxx.") self.assertEqual(expected, actual) def test_rename_key_not_dict(self) -> None: """ Tests the method which let us rename a key of a dict for the case that the given subject is not a dict. """ given = "Hello, World!" expected = "Hello, World!" actual = self.helper.set_subject(given).rename_key( {"Py": "PyFunceble"}) self.assertEqual(expected, actual) def test_rename_key_strict_single(self) -> None: """ Tests the method which let us rename a key for the case that we only want to strictly rename one key. """ given = copy.deepcopy(self.test_subject) expected = { "Hello": "world", "World": { "world": "hello" }, "funilrys": ["Fun", "Ilrys"], "PyFunceble": "Funceble", "pyfunceble": ["funilrys"], } actual = self.helper.set_subject(given).rename_key( {"Py": "PyFunceble"}, strict=True) self.assertEqual(expected, actual) def test_rename_key_not_strict_single(self) -> None: """ Tests the method which let us rename a key for the case that we only want to rename all occurrences of the given key. """ given = copy.deepcopy(self.test_subject) expected = { "Hello": "world", "World": { "world": "hello" }, "nuilrys": ["Fun", "Ilrys"], "Py": "Funceble", "nuceble": ["funilrys"], } actual = self.helper.set_subject(given).rename_key({"fun": "nuf"}, strict=False) self.assertEqual(expected, actual) def test_to_and_from_json_file(self) -> None: """ Tests the method which let us save and load a dict into/from a JSON file. """ output_file = tempfile.NamedTemporaryFile("w", delete=False) given = copy.deepcopy(self.test_subject) expected = copy.deepcopy(self.test_subject) self.helper.set_subject(given).to_json_file(output_file.name) output_file.seek(0) actual = self.helper.from_json_file(output_file.name) self.assertEqual(expected, actual) output_file.close() os.remove(output_file.name) def test_from_json_file_not_json(self) -> None: """ Tests the method which let us load a JSON file for the case that no JSON file is given. """ output_file = tempfile.NamedTemporaryFile("wb", delete=False) output_file.write(b"Hello, World!") output_file.seek(0) expected = dict() # pylint: disable=use-dict-literal actual = self.helper.from_json_file(output_file.name) self.assertEqual(expected, actual) output_file.close() os.remove(output_file.name) def test_to_json(self) -> None: """ Tests the method which let us convert a dict to a JSON and vice-versa. """ given = copy.deepcopy(self.test_subject) expected = """{ "Hello": "world", "Py": "Funceble", "World": { "world": "hello" }, "funilrys": [ "Fun", "Ilrys" ], "pyfunceble": [ "funilrys" ] }""" actual = self.helper.set_subject(given).to_json() self.assertIsInstance(actual, str) self.assertEqual(expected, actual) actual = self.helper.from_json(expected) expected = copy.deepcopy(self.test_subject) self.assertEqual(expected, actual) def test_from_json_not_json(self) -> None: """ Tests the method which let us convert a JSON to a JSON for the case that no JSON is given. """ given = "Hello, World!" expected = dict() # pylint: disable=use-dict-literal actual = self.helper.from_json(given) self.assertEqual(expected, actual) def test_from_yaml_file(self) -> None: """ Tests the method which let us save and load a dict into/from a YAML file. """ output_file = tempfile.NamedTemporaryFile("w", delete=False) given = copy.deepcopy(self.test_subject) expected = copy.deepcopy(self.test_subject) self.helper.set_subject(given).to_yaml_file(output_file.name) output_file.seek(0) actual = self.helper.from_yaml_file(output_file.name) self.assertEqual(expected, actual) output_file.close() os.remove(output_file.name) def test_to_yaml(self) -> None: """ Tests the method which let us convert a dict into a YAML and vice-versa. """ expected = """Hello: world Py: Funceble World: world: hello funilrys: - Fun - Ilrys pyfunceble: - funilrys """ given = copy.deepcopy(self.test_subject) actual = self.helper.set_subject(given).to_yaml() self.assertEqual(expected, actual) actual = self.helper.from_yaml(expected) expected = copy.deepcopy(self.test_subject) self.assertEqual(expected, actual) def test_flatten(self) -> None: """ Tests the method which let us flatten a dict. """ expected = { "Hello": "world", "World.world": "hello", "funilrys": ["Fun", "Ilrys"], "Py": "Funceble", "pyfunceble": ["funilrys"], } actual = self.helper.set_subject(self.test_subject).flatten() self.assertEqual(expected, actual) def test_deeper_flatten(self) -> None: """ Tests the method which let us flatten a dict with more level. """ given = { "Hello": "world", "World": { "world": "hello" }, "funilrys": ["Fun", "Ilrys"], "Py": "Funceble", "pyfunceble": ["funilrys"], "this": { "is": { "a": { "test": { "id": 1, "deep": { "hello": { "world": ["Hello!"] } }, "response": "World", } }, "b": 1, "c": [{ "hello": { "this": { "is": "a test" } } }], } }, "": { "hello-fun": "world", "": "hehe" }, } expected = { "Hello": "world", "World.world": "hello", "funilrys": ["Fun", "Ilrys"], "Py": "Funceble", "pyfunceble": ["funilrys"], "this.is.a.test.deep.hello.world": ["Hello!"], "this.is.a.test.id": 1, "this.is.a.test.response": "World", "this.is.b": 1, "this.is.c": [{ "hello": { "this": { "is": "a test" } } }], "..": "hehe", ".hello-fun": "world", } actual = self.helper.set_subject(given).flatten() self.assertEqual(expected, actual) def test_unflatten(self) -> None: """ Tests the method which let us unflatten a dict. """ given = { "Hello": "world", "World.world": "hello", "funilrys": ["Fun", "Ilrys"], "Py": "Funceble", "pyfunceble": ["funilrys"], } expected = dict(self.test_subject) actual = self.helper.set_subject(given).unflatten() self.assertEqual(expected, actual) def test_deeper_unflatten(self) -> None: """ Tests the method which let us unflatten a dict with more level. """ given = { "Hello": "world", "World.world": "hello", "funilrys": ["Fun", "Ilrys"], "Py": "Funceble", "pyfunceble": ["funilrys"], "this.is.a.test.deep.hello.world": ["Hello!"], "this.is.a.test.id": 1, "this.is.a.test.response": "World", "this.is.b": 1, "this.is.c": [{ "hello": { "this": { "is": "a test" } } }], "..": "hehe", ".hello-fun": "world", } expected = { "Hello": "world", "World": { "world": "hello" }, "funilrys": ["Fun", "Ilrys"], "Py": "Funceble", "pyfunceble": ["funilrys"], "this": { "is": { "a": { "test": { "id": 1, "deep": { "hello": { "world": ["Hello!"] } }, "response": "World", } }, "b": 1, "c": [{ "hello": { "this": { "is": "a test" } } }], } }, "": { "hello-fun": "world", "": "hehe" }, } actual = self.helper.set_subject(given).unflatten() self.assertEqual(expected, actual)
def update(self) -> "InfoManager": """ Updates and filters the new content. """ # pylint: disable=too-many-branches self.content["name"] = dead_hosts.launcher.defaults.paths.GIT_BASE_NAME logging.debug("Updated the `name` index of the administration file.") to_delete = [ FileHelper(os.path.join(self.WORKSPACE_DIR, ".administrators")), FileHelper(os.path.join(self.WORKSPACE_DIR, "update_me.py")), FileHelper(os.path.join(self.WORKSPACE_DIR, "admin.py")), ] if "list_name" in self.content: to_delete.append( FileHelper(os.path.join(self.WORKSPACE_DIR, self.content["list_name"])) ) if "ping" in self.content: local_ping_result = [] for username in self.content["ping"]: if username.startswith("@"): local_ping_result.append(username) else: local_ping_result.append(f"@{username}") self.content["ping"] = local_ping_result logging.debug( "Updated the `ping` index of the administration file, " "the format has to stay the same everywhere." ) if ( "raw_link" in self.content and isinstance(self.content["raw_link"], str) and not self.content["raw_link"] ): self.content["raw_link"] = None logging.debug( "Updated the `raw_link` index of the administration file, " "empty string not accepted." ) if "custom_pyfunceble_config" in self.content: if self.content["custom_pyfunceble_config"]: if not isinstance(self.content["custom_pyfunceble_config"], dict): self.content["custom_pyfunceble_config"] = {} else: self.content["custom_pyfunceble_config"] = DictHelper( self.content["custom_pyfunceble_config"] ).flatten() else: self.content["custom_pyfunceble_config"] = {} logging.debug( "Updated the `custom_pyfunceble_config` index of the " "administration file, it should be a %r.", dict, ) if ( "custom_pyfunceble_config" in self.content and self.content["custom_pyfunceble_config"] and not isinstance(self.content["custom_pyfunceble_config"], dict) ): self.content["custom_pyfunceble_config"] = {} logging.debug( "Updated the `custom_pyfunceble_config` index of the " "administration file, it should be a %r.", dict, ) for index in ["currently_under_test"]: if index in self.content and not isinstance(self.content[index], bool): self.content[index] = bool(int(self.content[index])) logging.debug( "Updated the %r index of the administration file, " "it should be a %r.", index, bool, ) for index in [ "days_until_next_test", "finish_timestamp", "last_download_datetime" "last_download_timestamp" "lastest_part_finish_timestamp", "lastest_part_start_timestamp", "start_timestamp", ]: if index in self.content and not isinstance(self.content[index], float): self.content[index] = float(self.content[index]) logging.debug( "Updated the %r index of the administration file, " "it should be a %r.", index, float, ) for index in [ "finish_timestamp", "last_download_timestamp", "lastest_part_finish_timestamp", "lastest_part_start_timestamp", "start_timestamp", ]: if index in self.content and not isinstance(self.content[index], datetime): self.content[index] = datetime.fromtimestamp(self.content[index]) logging.debug( "Updated the %r index of the administration file, " "the system understands %r only." " (JSON => %s).", index, datetime, dict, ) for index in [ "finish_datetime", "last_download_datetime", "lastest_part_finish_datetime", "lastest_part_start_datetime", "start_datetime", ]: if index in self.content: if self.content[index] and not isinstance( self.content[index], datetime ): self.content[index] = datetime.fromisoformat(self.content[index]) logging.debug( "Updated the %r index of the administration file, " "the system understands %r only." " (JSON => %r.", index, datetime, dict, ) else: self.content[index] = datetime.fromtimestamp(0) logging.debug( "Set the %r index of the administration file, " "it was not previously set.", repr(index), ) for file in to_delete: if file.exists(): file.delete() logging.debug( "Deleted the %r file, it is not needed anymore.", file.path, )
class InfoManager: """ Provides an interface for the management of the :code:`info.json` file. .. warning:: Keep in mind that this interface provides everything that may be needed by other interfaces. """ WORKSPACE_DIR: str = dead_hosts.launcher.defaults.envs.WORKSPACE_DIR PYFUNCEBLE_CONFIG_DIR: str = ( dead_hosts.launcher.defaults.paths.PYFUNCEBLE_CONFIG_DIRECTORY ) GHA_WORKFLOWS_DIR: str = os.path.join( WORKSPACE_DIR, dead_hosts.launcher.defaults.paths.GHA_WORKFLOW_DIR ) INFO_FILE = os.path.join( WORKSPACE_DIR, dead_hosts.launcher.defaults.paths.INFO_FILENAME ) def __init__(self) -> None: self.info_file_instance = FileHelper(self.INFO_FILE) if self.info_file_instance.exists(): self.content = DictHelper().from_json_file(self.info_file_instance.path) else: self.content = {} logging.debug("Administration file path: %r", self.INFO_FILE) logging.debug( "Administration file exists: %r", self.info_file_instance.exists() ) logging.debug("Administration file content:\n%r", self.content) self.update() self.create_missing_index() self.clean() self.store() def __getattr__(self, index: str) -> Any: if index in self.content: return self.content[index] raise AttributeError(index) def __getitem__(self, index: str) -> Any: if index in self.content: return self.content[index] raise AttributeError(index) def __setitem__(self, index: str, value: Any): self.content[index] = value def __del__(self): self.store() def store(self) -> "InfoManager": """ Stores the current state. """ local_copy = {} for index, value in self.content.items(): if index.endswith("_timestamp") and isinstance(value, datetime): local_copy[index] = value.timestamp() elif index.endswith("_datetime") and isinstance(value, datetime): local_copy[index] = value.isoformat() else: local_copy[index] = copy.deepcopy(value) DictHelper(local_copy).to_json_file(self.info_file_instance.path) return self def clean(self) -> "InfoManager": """ Cleans the unneeded indexes. """ for index in [ "arguments", "clean_list_file", "clean_original", "commit_autosave_message", "last_test", "list_name", "stable", ]: if index in self.content: del self.content[index] logging.debug( "Deleted the %r index of the administration file, " "it is not needed anymore.", index, ) return self def create_missing_index(self) -> "InfoManager": """ Creates the missing indexes. """ default_datetime = datetime.utcnow() - timedelta(days=15) indexes = { "currently_under_test": False, "custom_pyfunceble_config": {}, "days_until_next_test": 2, "finish_datetime": default_datetime, "finish_timestamp": default_datetime.timestamp(), "last_download_datetime": default_datetime, "last_download_timestamp": default_datetime.timestamp(), "latest_part_finish_timestamp": default_datetime.timestamp(), "latest_part_start_timestamp": default_datetime.timestamp(), "latest_part_finish_datetime": default_datetime, "latest_part_start_datetime": default_datetime, "name": dead_hosts.launcher.defaults.paths.GIT_BASE_NAME, "own_management": False, "ping": [], "raw_link": None, "start_datetime": default_datetime, "start_timestamp": default_datetime.timestamp(), "live_update": True, } for index, value in indexes.items(): if index not in self.content: self.content[index] = value logging.debug( "Created the %r index of the administration file, it was not found.", index, ) def update(self) -> "InfoManager": """ Updates and filters the new content. """ # pylint: disable=too-many-branches self.content["name"] = dead_hosts.launcher.defaults.paths.GIT_BASE_NAME logging.debug("Updated the `name` index of the administration file.") to_delete = [ FileHelper(os.path.join(self.WORKSPACE_DIR, ".administrators")), FileHelper(os.path.join(self.WORKSPACE_DIR, "update_me.py")), FileHelper(os.path.join(self.WORKSPACE_DIR, "admin.py")), ] if "list_name" in self.content: to_delete.append( FileHelper(os.path.join(self.WORKSPACE_DIR, self.content["list_name"])) ) if "ping" in self.content: local_ping_result = [] for username in self.content["ping"]: if username.startswith("@"): local_ping_result.append(username) else: local_ping_result.append(f"@{username}") self.content["ping"] = local_ping_result logging.debug( "Updated the `ping` index of the administration file, " "the format has to stay the same everywhere." ) if ( "raw_link" in self.content and isinstance(self.content["raw_link"], str) and not self.content["raw_link"] ): self.content["raw_link"] = None logging.debug( "Updated the `raw_link` index of the administration file, " "empty string not accepted." ) if "custom_pyfunceble_config" in self.content: if self.content["custom_pyfunceble_config"]: if not isinstance(self.content["custom_pyfunceble_config"], dict): self.content["custom_pyfunceble_config"] = {} else: self.content["custom_pyfunceble_config"] = DictHelper( self.content["custom_pyfunceble_config"] ).flatten() else: self.content["custom_pyfunceble_config"] = {} logging.debug( "Updated the `custom_pyfunceble_config` index of the " "administration file, it should be a %r.", dict, ) if ( "custom_pyfunceble_config" in self.content and self.content["custom_pyfunceble_config"] and not isinstance(self.content["custom_pyfunceble_config"], dict) ): self.content["custom_pyfunceble_config"] = {} logging.debug( "Updated the `custom_pyfunceble_config` index of the " "administration file, it should be a %r.", dict, ) for index in ["currently_under_test"]: if index in self.content and not isinstance(self.content[index], bool): self.content[index] = bool(int(self.content[index])) logging.debug( "Updated the %r index of the administration file, " "it should be a %r.", index, bool, ) for index in [ "days_until_next_test", "finish_timestamp", "last_download_datetime" "last_download_timestamp" "lastest_part_finish_timestamp", "lastest_part_start_timestamp", "start_timestamp", ]: if index in self.content and not isinstance(self.content[index], float): self.content[index] = float(self.content[index]) logging.debug( "Updated the %r index of the administration file, " "it should be a %r.", index, float, ) for index in [ "finish_timestamp", "last_download_timestamp", "lastest_part_finish_timestamp", "lastest_part_start_timestamp", "start_timestamp", ]: if index in self.content and not isinstance(self.content[index], datetime): self.content[index] = datetime.fromtimestamp(self.content[index]) logging.debug( "Updated the %r index of the administration file, " "the system understands %r only." " (JSON => %s).", index, datetime, dict, ) for index in [ "finish_datetime", "last_download_datetime", "lastest_part_finish_datetime", "lastest_part_start_datetime", "start_datetime", ]: if index in self.content: if self.content[index] and not isinstance( self.content[index], datetime ): self.content[index] = datetime.fromisoformat(self.content[index]) logging.debug( "Updated the %r index of the administration file, " "the system understands %r only." " (JSON => %r.", index, datetime, dict, ) else: self.content[index] = datetime.fromtimestamp(0) logging.debug( "Set the %r index of the administration file, " "it was not previously set.", repr(index), ) for file in to_delete: if file.exists(): file.delete() logging.debug( "Deleted the %r file, it is not needed anymore.", file.path, ) def get_ping_for_commit(self) -> str: """ Provides the string to append in order to mention the users to ping. """ if "ping" in self.content: return " ".join(self.content["ping"]) return ""
class ConfigLoader: """ Provides the interface which loads and updates the configuration (if needed). :param merge_upstream: Authorizes the merging of the upstream configuration. .. note:: If value is set to :py:class:`None` (default), we fallback to the :code:`PYFUNCEBLE_AUTO_CONFIGURATION` environment variable. """ path_to_config: Optional[str] = None path_to_default_config: Optional[str] = None path_to_overwrite_config: Optional[str] = None _custom_config: dict = {} _merge_upstream: bool = False file_helper: FileHelper = FileHelper() dict_helper: DictHelper = DictHelper() def __init__(self, merge_upstream: Optional[bool] = None) -> None: with package_resources.path( "PyFunceble.data.infrastructure", PyFunceble.storage.DISTRIBUTED_CONFIGURATION_FILENAME, ) as file_path: self.path_to_default_config = str(file_path) self.path_to_config = os.path.join( PyFunceble.storage.CONFIG_DIRECTORY, PyFunceble.storage.CONFIGURATION_FILENAME, ) self.path_to_overwrite_config = os.path.join( PyFunceble.storage.CONFIG_DIRECTORY, PyFunceble.storage.CONFIGURATION_OVERWRITE_FILENAME, ) if merge_upstream is not None: self.merge_upstream = merge_upstream elif EnvironmentVariableHelper("PYFUNCEBLE_AUTO_CONFIGURATION").exists(): self.merge_upstream = True def __del__(self) -> None: self.destroy() def reload_config(func): # pylint: disable=no-self-argument """ Reload the configuration (if it was already loaded) after launching the decorated method. """ @functools.wraps(func) def wrapper(self, *args, **kwargs): result = func(self, *args, **kwargs) # pylint: disable=not-callable if self.is_already_loaded(): self.start() return result return wrapper @staticmethod def conditional_switch(config: dict) -> dict: """ Given the configuration that we are going to load, switches some of setting. :param config: The configuration we are going to load. """ # Conditional autocontinue. # If we are under continuous integration, the autocontinue should be # activated. if bool(config["cli_testing"]["ci"]["active"]) and not bool( config["cli_testing"]["autocontinue"] ): config["cli_testing"]["autocontinue"] = True return config @staticmethod def is_already_loaded() -> bool: """ Checks if the configuration was already loaded. """ return bool(PyFunceble.storage.CONFIGURATION) @property def custom_config(self) -> dict: """ Provides the current state of the :code:`_custom_config` attribute. """ return self._custom_config @custom_config.setter @reload_config def custom_config(self, value: dict) -> None: """ Sets the custom configuration to set after loading. Side Effect: Directly inject into the configuration variables if it was already loaded. :raise TypeError: When :code:`value` is not a :py:class:`dict`. """ if not isinstance(value, dict): raise TypeError(f"<value> should be {dict}, {type(value)} given.") if not self._custom_config: self._custom_config = value else: self._custom_config.update(value) def set_custom_config(self, value: dict) -> "ConfigLoader": """ Sets the custom configuration to set after loading. Side Effect: Directly inject into the configuration variables if it was already loaded. """ self.custom_config = value return self @property def merge_upstream(self) -> bool: """ Provides the current state of the :code:`_merge_upstream` attribute. """ return self._merge_upstream @merge_upstream.setter def merge_upstream(self, value: bool) -> None: """ Updates the value of :code:`_merge_upstream` attribute. :raise TypeError: When :code:`value` is not a :py:class:`bool`. """ if not isinstance(value, bool): raise TypeError(f"<value> should be {bool}, {type(value)} given.") self._merge_upstream = value def set_merge_upstream(self, value: bool) -> "ConfigLoader": """ Updates the value of :code:`_merge_upstream` attribute. """ self.merge_upstream = value return self def config_file_exist( self, ) -> bool: # pragma: no cover ## Existance checker already tested. """ Checks if the config file exists. """ return FileHelper(self.path_to_config).exists() def default_config_file_exist( self, ) -> bool: # pragma: no cover ## Existance checker already tested. """ Checks if the default configuration file exists. """ return self.file_helper.set_path(self.path_to_default_config).exists() def install_missing_infrastructure_files( self, ) -> "ConfigLoader": # pragma: no cover ## Copy method already tested """ Installs the missing files (when needed). .. note:: Installed if missing: - The configuration file. - The directory structure file. """ if not self.is_already_loaded(): if not self.file_helper.set_path(self.path_to_config).exists(): self.file_helper.set_path(self.path_to_default_config).copy( self.path_to_config ) return self @classmethod def download_dynamic_infrastructure_files( cls, ) -> "ConfigLoader": """ Downloads all the dynamicly (generated) infrastructure files. .. note:: Downloaded if missing: - The IANA dump file. - The Public Suffix dump file. """ ## pragma: no cover ## Underlying download methods already tested. if not cls.is_already_loaded(): IANADownloader().start() PublicSuffixDownloader().start() UserAgentsDownloader().start() def get_config_file_content(self) -> dict: """ Provides the content of the configuration file or the one already loaded. """ def is_3_x_version(config: dict) -> bool: """ Checks if the given configuration is an old one. :param config: The config to work with. """ return config and "days_between_inactive_db_clean" in config if not self.is_already_loaded(): self.install_missing_infrastructure_files() self.download_dynamic_infrastructure_files() try: config = self.dict_helper.from_yaml_file(self.path_to_config) except ConstructorError: self.file_helper.set_path(self.path_to_default_config).copy( self.path_to_config ) config = self.dict_helper.from_yaml_file(self.path_to_config) if ( not config or self.merge_upstream or is_3_x_version(config) ): # pragma: no cover ## Testing the underlying comparison method is sufficent config = ConfigComparison( local_config=config, upstream_config=self.dict_helper.from_yaml_file( self.path_to_default_config ), ).get_merged() self.dict_helper.set_subject(config).to_yaml_file(self.path_to_config) if self.file_helper.set_path(self.path_to_overwrite_config).exists(): overwrite_data = self.dict_helper.from_yaml_file( self.path_to_overwrite_config ) if overwrite_data: config = Merge( self.dict_helper.from_yaml_file(self.path_to_overwrite_config) ).into(config) else: # pragma: no cover ## Just make it visible to end-user. self.file_helper.write("") return config def get_configured_value(self, entry: str) -> Any: """ Provides the currently configured value. :param entry: An entry to check. multilevel should be separated with a point. :raise RuntimeError: When the configuration is not loaded yet. :raise ValueError: When the given :code:`entry` is not found. """ if not self.is_already_loaded(): raise RuntimeError("Configuration not loaded, yet.") if entry not in PyFunceble.storage.FLATTEN_CONFIGURATION: raise ValueError(f"<entry> ({entry!r}) not in loaded configuration.") return PyFunceble.storage.FLATTEN_CONFIGURATION[entry] def start(self) -> "ConfigLoader": """ Starts the loading processIs. """ config = self.get_config_file_content() if self.custom_config: config = Merge(self.custom_config).into(config) config = self.conditional_switch(config) PyFunceble.storage.CONFIGURATION = Box( config, ) PyFunceble.storage.FLATTEN_CONFIGURATION = DictHelper( PyFunceble.storage.CONFIGURATION ).flatten() PyFunceble.storage.HTTP_CODES = Box( config["http_codes"], ) if "collection" in config: PyFunceble.storage.COLLECTION = Box(config["collection"]) PyFunceble.storage.LINKS = Box(config["links"]) return self def destroy(self) -> "ConfigLoader": """ Destroys everything loaded. """ try: PyFunceble.storage.CONFIGURATION = Box( {}, ) PyFunceble.storage.FLATTEN_CONFIGURATION = {} PyFunceble.storage.HTTP_CODES = Box({}) PyFunceble.storage.COLLECTION = Box({}) PyFunceble.storage.LINKS = Box({}) except (AttributeError, TypeError): # pragma: no cover ## Safety. pass # This is not a mistake. self._custom_config = {} return self
def to_json(self) -> str: """ Converts the current object to JSON. """ return DictHelper(self.to_dict()).to_json(own_class=ComplexJsonEncoder)
pyfunceble_webworker.storage.CONFIG_DIRECTORY, secrets.token_hex(8), ) DirectoryHelper(PyFunceble.storage.CONFIG_DIRECTORY).create() DirectoryHelper(pyfunceble_webworker.storage.CONFIG_DIRECTORY).create() file_helper = FileHelper() pyfunceble_config_loader = ConfigLoader() if file_helper.set_path( os.path.join( pyfunceble_webworker.storage.CONFIG_DIRECTORY, assets_defaults.OVERWRITE_CONFIG_FILE, )).exists(): local = DictHelper().from_yaml_file(file_helper.path) if local: pyfunceble_config_loader.custom_config = local else: pyfunceble_config_loader.custom_config = dict() else: file_helper.write("") pyfunceble_config_loader.custom_config = Merge( pyfunceble_defaults.PERSISTENT_CONFIG).into( pyfunceble_config_loader.custom_config) pyfunceble_config_loader.start() app = FastAPI( title=assets_defaults.PROJECT_NAME,
class ConfigComparison: """ Provides an interface for comparing 2 configuration. """ DELETED_LINKS: List[str] = [ "config", "dir_structure", "iana", "ipv4_reputation", "mariadb", "mysql", "psl", "repo", "requirements", "user_agents", ] DELETED_CORE: List[str] = [ "dns_lookup_over_tcp", "generate_json", "header_printed", "iana_whois_server", "idna_conversion", "logs", "maximal_processes", "multiprocess_merging_mode", "multiprocess", "no_http_codes", "outputs", "shadow_file", "status", "store_whois_record", "unified", ] OLD_TO_NEW: dict = { "adblock": "cli_decoding.adblock", "aggressive": "cli_decoding.adblock_aggressive", "auto_continue": "cli_testing.autocontinue", "command": "cli_testing.ci.command", "command_before_end": "cli_testing.ci.end_command", "cooldown_time": "cli_testing.cooldown_time", "custom_ip": "cli_testing.hosts_ip", "days_between_inactive_db_clean": "cli_testing.days_between.db_clean", "days_between_db_retest": "cli_testing.days_between.db_retest", "db_type": "cli_testing.db_type", "debug": "debug.active", "dns_server": "dns.server", "filter": "cli_testing.file_filter", "generate_complements": "cli_testing.complements", "generate_hosts": "cli_testing.file_generation.hosts", "hierarchical_sorting": "cli_testing.sorting_mode.hierarchical", "inactive_database": "cli_testing.inactive_db", "less": "cli_testing.display_mode.less", "local": "cli_testing.local_network", "mining": "cli_testing.mining", "no_files": "cli_testing.file_generation.no_file", "plain_list_domain": "cli_testing.file_generation.plain", "print_dots": "cli_testing.display_mode.dots", "quiet": "cli_testing.display_mode.dots", "use_reputation_data": "lookup.reputation", "reputation": "lookup.reputation", "rpz": "cli_decoding.rpz", "show_execution_time": "cli_testing.display_mode.execution_time", "show_percentage": "cli_testing.display_mode.percentage", "simple": "cli_testing.display_mode.simple", "syntax": "cli_testing.testing_mode.syntax", "timeout": "lookup.timeout", "ci": "cli_testing.ci.active", "ci_autosave_commit": "cli_testing.ci.commit_message", "ci_autosave_final_commit": "cli_testing.ci.end_commit_message", "ci_autosave_minutes": "cli_testing.ci.max_exec_minutes", "ci_branch": "cli_testing.ci.branch", "ci_distribution_branch": "cli_testing.ci.distribution_branch", "whois_database": "cli_testing.whois_db", "wildcard": "cli_decoding.wildcard", } OLD_TO_NEW_NEGATE: dict = { "no_special": "lookup.special", "no_whois": "lookup.whois", "split": "cli_testing.file_generation.unified_results", } NEW_STATUS_CODES: dict = { "up": [102, 207, 208, 226, 429], "potentially_down": [451], "potentially_up": [ 308, 403, 418, 421, 422, 423, 424, 426, 428, 431, 506, 507, 508, 510, 511, ], } _local_config: dict = {} _upsteam_config: dict = {} dict_helper: DictHelper = DictHelper() def __init__( self, *, local_config: Optional[dict] = None, upstream_config: Optional[dict] = None, ) -> None: if local_config: self.local_config = local_config if upstream_config: self.upstream_config = upstream_config @property def local_config(self) -> dict: """ Provides the current state of the :code:`_local_config`. """ return self._local_config @local_config.setter def local_config(self, value: dict) -> None: """ Sets the local configuration to work with. :raise TypeError: When :code:`value` is not a :py:class:`dict`. """ if not isinstance(value, dict): raise TypeError(f"<value> should be {dict}, {type(value)} given.") self._local_config = copy.deepcopy(value) def set_local_config(self, value: dict) -> "ConfigComparison": """ Sets the local configuration to work with. """ self.local_config = value return self @property def upstream_config(self) -> dict: """ Provides the current state of the :code:`_upstream_config`. """ return self._upsteam_config @upstream_config.setter def upstream_config(self, value: dict) -> None: """ Sets the upstram configuration to work with. :raise TypeError: When :code:`value` is not a :py:class:`dict` """ if not isinstance(value, dict): raise TypeError(f"<value> should be {dict}, {type(value)} given.") self._upsteam_config = copy.deepcopy(value) def set_upstream_config(self, value: dict) -> "ConfigComparison": """ Sets the upstram configuration to work with. """ self.upstream_config = value return self def is_local_identical(self) -> bool: """ Checks if the local configuration is identical to the upstream one. """ # pylint: disable=too-many-boolean-expressions if (not self.dict_helper.set_subject( self.local_config).has_same_keys_as(self.upstream_config) or "user_agent" not in self.local_config or not isinstance(self.local_config["user_agent"], dict) or "active" in self.local_config["http_codes"] or "not_found_default" in self.local_config["http_codes"] or "self_managed" not in self.local_config["http_codes"] or "dns" not in self.local_config or "follow_server_order" not in self.local_config["dns"] or "trust_server" not in self.local_config["dns"] or "collection" not in self.local_config or "collection" not in self.local_config["lookup"]): return False for index in self.local_config: if index in self.DELETED_CORE: return False for index in self.local_config["links"]: if index in self.DELETED_LINKS: return False if "self_managed" in self.local_config["http_codes"] and not bool( self.local_config["http_codes"]["self_managed"]): for index, values in self.local_config["http_codes"]["list"].items( ): if set(self.upstream_config["http_codes"]["list"] [index]) != set(values): return False return True def get_merged(self) -> dict: """ Provides the merged configuration. """ # pylint: disable=too-many-branches if self.is_local_identical(): return self.local_config if not self.local_config: return self.upstream_config original_local = copy.deepcopy(self.local_config) original_upstream = copy.deepcopy(self.upstream_config) flatten_original = self.dict_helper.set_subject( original_local).flatten() flatten_upstream = self.dict_helper.set_subject( original_upstream).flatten() for key, value in self.OLD_TO_NEW.items(): if key not in flatten_original: continue if value not in flatten_upstream: # pragma: no cover ## Safety. raise RuntimeError(f"<value> ({value!r}) not found.") flatten_original[value] = original_local[key] del flatten_original[key] for key, value in self.OLD_TO_NEW_NEGATE.items(): if key not in flatten_original: continue if value not in flatten_upstream: # pragma: no cover ## Safety.0 raise RuntimeError(f"<value> ({value!r}) not found.") flatten_original[value] = not original_local[key] del flatten_original[key] original_local = self.dict_helper.set_subject( flatten_original).unflatten() del flatten_original merged = Merge(original_local).into(original_upstream) if "dns_lookup_over_tcp" in merged and merged["dns_lookup_over_tcp"]: merged["dns"]["protocol"] = "TCP" for index in self.DELETED_CORE: if index in merged: del merged[index] for index in self.DELETED_LINKS: if index in merged["links"]: del merged["links"][index] if not bool(merged["http_codes"]["self_managed"]): for index, values in PyFunceble.storage.STD_HTTP_CODES.list.items( ): merged["http_codes"]["list"][index] = list(values) if merged["cli_testing"]["db_type"] == "json": merged["cli_testing"]["db_type"] = "csv" if merged["cli_testing"]["cooldown_time"] is None: merged["cli_testing"]["cooldown_time"] = self.upstream_config[ "cli_testing"]["cooldown_time"] if "user_agent" not in self.local_config or not isinstance( self.local_config["user_agent"], dict): merged["user_agent"] = self.upstream_config["user_agent"] if "active" in merged["http_codes"]: del merged["http_codes"]["active"] if "not_found_default" in merged["http_codes"]: del merged["http_codes"]["not_found_default"] return merged
URL = "https://user-agents.net/download" PLATFORMS = ["linux", "win10", "macosx"] REQ_DATA_BASE = { "browser": "chrome", "browser_bits": 64, "platform": "linux", "platform_bits": 64, "download": "json", } BROWSERS = ["chrome", "firefox", "safari", "ie", "edge", "opera"] try: HEADERS = { "User-Agent": DictHelper().from_json_file( OUTPUT_FILE, return_dict_on_error=False)["chrome"]["linux"] } except TypeError: HEADERS = { "User-Agent": "Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 " "(KHTML, like Gecko) Chrome/79.0.3945.130 Safari/537.36" } def __request_latest_user_agent(data): """ Provides the latest user agent from https://user-agents.net/ :param dict data: The data to post
class ProductionPrep: """ Provides an interface for the production file modification. The idea is that we always have 2 branches: the `dev` and the `master` branch. We want to fix all the URL to point to the right one, so this interface just provides everything needed for that job. Another important part is the cleanup of the production environment. What is meant is the cleanup of the `output/` directory and the construction of the dir_structure file. .. warning:: This class assumes that you know what you are doing. Meaning that you should run this only if your are developing PyFunceble. """ VERSION_FILE_PATH: str = os.path.join( PyFunceble.storage.CONFIG_DIRECTORY, PyFunceble.cli.storage.DISTRIBUTED_VERSION_FILENAME, ) AVAILABLE_BRANCHES: List[str] = ["dev", "master"] regex_helper: RegexHelper = RegexHelper() file_helper: FileHelper = FileHelper() dict_helper: DictHelper = DictHelper() version_utility: VersionUtility = VersionUtility(PyFunceble.storage.PROJECT_VERSION) version_file_content: Optional[dict] = None """ A copy of the local version file. """ _branch: Optional[str] = None previous_version: Optional[str] = None """ Provides the previous version (from :code:`version_file_content`) """ def __init__(self, branch: Optional[str] = None) -> None: self.version_file_content = self.dict_helper.from_yaml_file( self.VERSION_FILE_PATH ) self.previous_version = copy.deepcopy( self.version_file_content["current_version"] ) if branch is not None: self.branch = branch def ensure_branch_is_given(func): # pylint: disable=no-self-argument """ Ensures that the branch is given before running the decorated method. :raise TypeError: When the :code:`self.branch` is not set. """ @functools.wraps(func) def wrapper(self, *args, **kwargs): if not isinstance(self.branch, str): raise TypeError( f"<self.branch> should be {str}, " f"{type(self.branch)} given." ) return func(self, *args, **kwargs) # pylint: disable=not-callable return wrapper @property def branch(self) -> Optional[str]: """ Provides the current state of the :code:`_branch` attribute. """ return self._branch @branch.setter def branch(self, value: str) -> None: """ Sets the branch to act with. :param value: The value to set. :raise TypeError: When the given :code:`value` is not a :py:class:`str`. :raise ValueError: When the given :code:`value` is empty. """ if not isinstance(value, str): raise TypeError(f"<value> should be {str}, {type(value)} given.") if not value: raise ValueError("<value> should not be empty.") self._branch = value def set_branch(self, value: str) -> "ProductionPrep": """ Sets the branch to act with. :param value: The value to set. """ self.branch = value return self def should_be_deprecated(self, previous_version: str) -> bool: """ Checks if we should deprecates the current version. """ splitted = self.version_utility.get_splitted(previous_version)[0] local_splitted = self.version_utility.get_splitted( self.version_utility.local_version )[0] for index, version_part in enumerate(splitted[:2]): if int(version_part) < int(local_splitted[index]): return True return False @ensure_branch_is_given def update_urls(self, file: str) -> "ProductionPrep": """ Updates the common URLS which are in the given file. :param file: The file to work with. :raise FileNotFoundError: When the given :code:`file` is not found. """ if self.branch == "dev": regexes = [ (r"PyFunceble\/%s\/" % "master", "PyFunceble/%s/" % "dev"), ("=%s" % "master", "=%s" % "dev"), (r"/{1,}en\/%s" % "latest", "/en/%s" % "dev"), (r"\/pyfunceble-dev.png", "/pyfunceble-%s.png" % "dev"), (r"\/project\/pyfunceble$", "/project/pyfunceble-%s" % "dev"), ( r"\/badge\/pyfunceble(/month|/week|)$", "/badge/pyfunceble-%s\\1" % "dev", ), (r"\/blob\/%s\/" % "master", "/blob/%s/" % "dev"), (r"\/pypi\/v\/pyfunceble\.png$", "/pypi/v/pyfunceble-%s.png" % "dev"), (r"\/(logo|graphmls|gifs\/raw)\/%s\/" % "master", "/\\1/%s/" % "dev"), (r"\/(PyFunceble\/tree)\/%s" % "master", "/\\1/%s" % "dev"), ] elif self.branch == "master": regexes = [ (r"PyFunceble\/%s\/" % "dev", "PyFunceble/%s/" % "master"), ("=%s" % "dev", "=%s" % "master"), (r"/{1,}en\/%s" % "dev", "/en/%s" % "latest"), (r"\/pyfunceble-dev.png", "/pyfunceble-dev.png"), (r"\/project\/pyfunceble-%s$" % "dev", "/project/pyfunceble"), ( r"\/badge\/pyfunceble-%s(/month|/week|)$" % "dev", "/badge/pyfunceble\\1", ), (r"\/blob\/%s\/" % "dev", "/blob/%s/" % "master"), ( r"\/pypi\/v\/pyfunceble-%s\.png$" % "dev", "/pypi/v/pyfunceble.png", ), (r"\/(logo|graphmls|gifs\/raw)\/%s\/" % "dev", "/\\1/%s/" % "master"), (r"\/(PyFunceble\/tree)\/%s" % "dev", "/\\1/%s" % "master"), ] else: regexes = {} self.file_helper.set_path(file) PyFunceble.facility.Logger.info( "Started to update our URL into %r", self.file_helper.path ) if not self.file_helper.exists(): raise FileNotFoundError(self.file_helper.path) to_update = self.file_helper.read() for regex, replacement in regexes: to_update = self.regex_helper.set_regex(regex).replace_match( to_update, replacement, multiline=True ) self.file_helper.write(to_update, overwrite=True) PyFunceble.facility.Logger.info( "Finished to update our URL into %r", self.file_helper.path ) return self def update_docs_urls(self) -> "ProductionPrep": """ Updates all URL in the documentation files. """ to_ignore = ["they-use-d-it.rst"] self.update_urls( os.path.join(PyFunceble.storage.CONFIG_DIRECTORY, "README.rst") ) for root, _, files in os.walk( os.path.join(PyFunceble.storage.CONFIG_DIRECTORY, "docs") ): for file in files: if not file.endswith(".rst"): continue full_path = os.path.join(root, file) if any(x in full_path for x in to_ignore): continue self.update_urls(os.path.join(root, file)) @staticmethod def update_code_format() -> "ProductionPrep": """ Updates the format of the source code using black. """ # pylint: disable=import-outside-toplevel, import-error import black import isort def format_file(file: str, isortconfig: isort.settings.Config) -> None: """ Formats the given file using black. :param file: The file to format. :parm isortconfig: The configuration to apply while sorting the imports. """ isort.api.sort_file(pathlib.Path(file), config=isortconfig) black.format_file_in_place( pathlib.Path(file), fast=False, mode=black.Mode(), write_back=black.WriteBack.YES, ) PyFunceble.facility.Logger.info("Update format of %r", file) isort_config = isort.settings.Config(settings_file="setup.cfg") files = [ os.path.join(PyFunceble.storage.CONFIG_DIRECTORY, "setup.py"), ] for file in files: format_file(file, isort_config) for root, _, files in os.walk( os.path.join( PyFunceble.storage.CONFIG_DIRECTORY, PyFunceble.storage.PROJECT_NAME ) ): if "__pycache__" in root: continue for file in files: if not file.endswith(".py"): continue format_file(os.path.join(root, file), isort_config) for root, _, files in os.walk( os.path.join(PyFunceble.storage.CONFIG_DIRECTORY, "tests") ): if "__pycache__" in root: continue for file in files: if not file.endswith(".py"): continue format_file(os.path.join(root, file), isort_config) @staticmethod def update_documentation() -> "ProductionPrep": """ Updates the code documentation. :raise RuntimeError: When one of the wanted directory is not found. """ PyFunceble.facility.Logger.info( "Started to update and generate the documentation.", ) docs_dir_helper = DirectoryHelper("docs") source_code_dir_helper = DirectoryHelper("PyFunceble") if not docs_dir_helper.exists(): raise RuntimeError(f"{docs_dir_helper.realpath!r} not found.") if not source_code_dir_helper.exists(): raise RuntimeError(f"{source_code_dir_helper.realpath!r} not found.") header = "Code Documentation" source_code_destination = os.path.join(docs_dir_helper.realpath, "code") CommandHelper( f"sphinx-apidoc -d 5 -f -H {header!r} -o " f"{source_code_destination!r} {source_code_dir_helper.realpath}" ).execute(raise_on_error=True) docs_destination = os.path.join(docs_dir_helper.realpath, "_build", "html") CommandHelper( f"sphinx-build -a -Q {docs_dir_helper.realpath!r} {docs_destination!r}" ).execute(raise_on_error=False) PyFunceble.facility.Logger.info( "Finished to update and generate the documentation.", ) def update_code_urls(self) -> "ProductionPrep": """ Updates all URL in the source code. """ to_ignore = [ ".gitignore", ".keep", ] self.update_urls(os.path.join(PyFunceble.storage.CONFIG_DIRECTORY, "setup.py")) for root, _, files in os.walk( os.path.join( PyFunceble.storage.CONFIG_DIRECTORY, PyFunceble.storage.PROJECT_NAME ) ): if "__pycache__" in root: continue for file in files: if file in to_ignore: continue self.update_urls(os.path.join(root, file)) for root, _, files in os.walk( os.path.join(PyFunceble.storage.CONFIG_DIRECTORY, "tests") ): if "__pycache__" in root: continue for file in files: if file in to_ignore: continue self.update_urls(os.path.join(root, file)) @ensure_branch_is_given def update_setup_py(self) -> "ProductionPrep": """ Updates content of :code:`setup.py`. :raise FileNotFoundError: When the :code:`setup.py` file does not exists. """ PyFunceble.facility.Logger.info( "Started to update setup.py.", ) if self.branch == "dev": regexes = [ (r'name=".*"', 'name="PyFunceble-dev"'), (r'"Development\sStatus\s::.*"', '"Development Status :: 4 - Beta"'), ] elif self.branch == "master": regexes = [ (r'name=".*"', 'name="PyFunceble-dev"'), ( r'"Development\sStatus\s::.*"', '"Development Status :: 5 - Production/Stable"', ), ] else: regexes = [ (r'name=".*"', 'name="PyFunceble-dev"'), ( r'"Development\sStatus\s::.*"', '"Development Status :: 3 - Alpha"', ), ] self.file_helper.set_path( os.path.join(PyFunceble.storage.CONFIG_DIRECTORY, "setup.py") ) if not self.file_helper.exists(): raise FileNotFoundError(self.file_helper.path) to_update = self.file_helper.read() for regex, replacement in regexes: to_update = self.regex_helper.set_regex(regex).replace_match( to_update, replacement, multiline=True ) self.file_helper.write(to_update, overwrite=True) PyFunceble.facility.Logger.info( "Started to update setup.py.", ) return self def update_version_file(self) -> "ProductionPrep": """ Updates the version file. """ PyFunceble.facility.Logger.info( "Started to update version file.", ) if self.should_be_deprecated(self.previous_version): to_append = ".".join( self.version_utility.get_splitted(self.version_utility.local_version)[0] ) if to_append not in self.version_file_content["deprecated"]: self.version_file_content["deprecated"].append(to_append) self.version_file_content[ "current_version" ] = PyFunceble.storage.PROJECT_VERSION self.dict_helper.set_subject(self.version_file_content).to_yaml_file( self.VERSION_FILE_PATH ) PyFunceble.facility.Logger.info( "Finished to update version file.", ) return self def update_dir_structure_file(self) -> "ProductionPrep": """ Updates the directory structure. """ DirectoryStructureBackup().start() return self def start(self) -> "ProductionPrep": """ Starts the production process. """ return ( self.update_setup_py() .update_code_urls() .update_code_format() .update_version_file() )