def integrate(self, datamodels): """ Core function that integrates a new RDF file with a collection of Data Models. It removes every previously stored dataset ID. Then, it checks if the param passed is the Data Models' collection or 'all' value (to integrate every Data Model in config file). At last, it serializes the Data Models passed by and writes the entire new RDF into the filesystem. :param tuple datamodels: Data Models that will be added to the RDF file :return: None """ logging.info( msg.EDP_INTEGRATION_START.format(datamodels=', '.join(datamodels))) try: already_integrated = ConfigManager.get_integrated_datasets() for dataset in already_integrated: ConfigManager.remove_dataset_id(dataset) datamodels = EDP.check_datamodels_parameter(datamodels, False) catalogue = Catalogue(datamodels) rdf = Serializer.serialize_rdf_create(catalogue) Serializer.write_rdf(rdf) logging.info(msg.EDP_INTEGRATION_FINISHED_OK) except Exception as error: logging.error(error) logging.info(msg.EDP_INTEGRATION_FINISHED_KO)
def check_datamodels_parameter(parameter, integrated): """ Core function that checks datamodels parameter value. If the first value of the tuple is equal to 'all' (default value), it searches in config file for every Data Model section to return them. If not, it returns the parameter as the user set it. :param tuple parameter: Tuple of strings containing the Data Model informed by the user :param bool integrated: If the Data Models searched are those already integrated or not :return: Collection of Data Models to work with :rtype: list[str] """ if parameter[0] == const.DEFAULT_DATAMODEL_OPTION_COMMAND: return ConfigManager.get_integrated_datasets( ) if integrated else ConfigManager.get_datamodels() return list(parameter)
def __init__(self, sections): """ Initializes Catalogue. :param list[str] sections: Config file sections to integrate. :param bool new: Indicates if the Catalogue to create is a new instance or one instantiated in a previous run """ logging.debug( msg.CATALOGUE_INSTANTIATING_MODEL_START.format( datamodels=', '.join(sections))) self.sections = sections self.title = ConfigManager.get_value(const.CATALOGUE_SECTION, const.CATALOGUE_TITLE) Validators.is_informed(const.CATALOGUE_TITLE, self.title) self.description = ConfigManager.get_value(const.CATALOGUE_SECTION, const.CATALOGUE_DESCRIPTION) Validators.is_informed(const.CATALOGUE_DESCRIPTION, self.description) self.publisher_name = ConfigManager.get_value( const.CATALOGUE_SECTION, const.CATALOGUE_PUBLISHER_NAME) Validators.is_informed(const.CATALOGUE_PUBLISHER_NAME, self.publisher_name) self.publisher_uri = ConfigManager.get_value( const.CATALOGUE_SECTION, const.CATALOGUE_PUBLISHER_URI) Validators.is_informed(const.CATALOGUE_PUBLISHER_URI, self.publisher_uri) Validators.is_valid_url(const.CATALOGUE_PUBLISHER_URI, self.publisher_uri) self.publisher_type = Helpers.transform_vocabulary( const.CATALOGUE_PUBLISHER_TYPE, ConfigManager.get_value(const.CATALOGUE_SECTION, const.CATALOGUE_PUBLISHER_TYPE), const.PUBLISHER_TYPE_RELATION) self.publisher_homepage = ConfigManager.get_value( const.CATALOGUE_SECTION, const.CATALOGUE_PUBLISHER_HOMEPAGE) Validators.is_valid_url(const.CATALOGUE_PUBLISHER_HOMEPAGE, self.publisher_homepage) self.homepage = ConfigManager.get_value(const.CATALOGUE_SECTION, const.CATALOGUE_HOMEPAGE) Validators.is_valid_url(const.CATALOGUE_SECTION, self.homepage) uri_host = ConfigManager.get_value(const.MAIN_SECTION, const.URI_HOST) uri_structure = ConfigManager.get_value(const.MAIN_SECTION, const.URI_STRUCTURE, const.URI_STRUCTURE_DEFAULT) self.uri, self.id = Helpers.generate_uri(uri_host, uri_structure, Model.CATALOGUE) self.issued = Helpers.get_issued_date(self.id) logging.debug(msg.CATALOGUE_INSTANTIATING_MODEL_FINISHED) self.datasets = self.create_datasets(sections)
def get_integrated_datamodels(): """ Core function that returns the collection of Data Models (sections from config file) that are currently included in the RDF/XML file. :return: Collection of Data Models integrated :rtype: list[str] """ return ConfigManager.get_integrated_datasets()
def get_host(key): """ Obtains the host value specified in the config file. :param str key: Key name of wanted host :return: Well-formatted host wanted. :rtype: str """ host = ConfigManager.get_value(const.MAIN_SECTION, key) Validators.is_valid_url(key, host) return APIBuilder.clean_host(host)
def delete(self, datamodels): """ Core function that removes from an existing RDF file datasets from the specified Data Models. It checks if the param passed is the Data Models' collection or 'all' value (to work with every Data Model in config file). Then it removes the datasets from current RDF file and writes it into the filesystem. In case that the Data Model to remove is the last one in the RDF, it deletes the entire file. :param tuple datamodels: Data Models that will be removed from the RDF file :return: None """ try: logging.info( msg.EDP_DELETE_START.format(datamodels=', '.join(datamodels))) rdf = None datamodels = EDP.check_datamodels_parameter(datamodels, True) for dataset in datamodels: rdf = Serializer.serialize_rdf_remove( dataset, ConfigManager.get_dataset_id(dataset), rdf) ConfigManager.remove_dataset_id(dataset) Serializer.write_rdf(rdf) logging.info(msg.EDP_DELETE_FINISHED_OK) except LastDatasetError as error: logging.warning(error) for dataset in ConfigManager.get_integrated_datasets(): ConfigManager.remove_dataset_id(dataset) import os os.remove(Helpers.get_rdf_path()) logging.info(msg.EDP_DELETE_FINISHED_OK) except Exception as error: logging.error(error) logging.info(msg.EDP_DELETE_FINISHED_KO)
def __init__(self, file_path): """ Instantiate the EDP core class. :param str file_path: Path to the configuration file """ try: config_logging() logging.debug(msg.EDP_INITIALIZING) logging.debug(msg.EDP_READING_CONFIG.format(path=file_path)) ConfigManager.set_config_path(file_path) Validators.is_informed( const.URI_STRUCTURE, ConfigManager.get_value(const.MAIN_SECTION, const.URI_STRUCTURE)) Validators.is_informed( const.URI_HOST, ConfigManager.get_value(const.MAIN_SECTION, const.URI_HOST)) integration_api = ConfigManager.get_value(const.MAIN_SECTION, const.INTEGRATION_API) Validators.is_informed(const.INTEGRATION_API, integration_api) Validators.is_valid_url(const.INTEGRATION_API, integration_api) Validators.is_informed( const.MAIN_SECTION, ConfigManager.get_value(const.MAIN_SECTION, const.INTEGRATION_ORION)) integration_api = integration_api.strip('/') logging.debug( msg.EDP_CHECK_API_STATUS.format(host=integration_api)) response = requests.get('{host}/{route}'.format( host=integration_api, route=const.API_URL_STATUS)) if response.status_code != 200: logging.warning( msg.EDP_API_STATUS_DOWN.format(host=integration_api)) except ValueError: import click click.echo( msg.EDP_ERROR_INSTANTIATING_LOGGER.format( date=datetime.strftime(datetime.now(), const.SIMPLE_DATE_FORMAT), script=__name__)) sys.exit() except Exception as error: logging.error(error) sys.exit()
def __init__(self, section): """ Initializes Dataset. :param str section: Config file section the dataset belongs """ logging.debug(msg.DATASET_INSTANTIATING_MODEL_START.format(datamodel=section)) self.section = section Validators.is_expected_value(const.DATAMODEL_SECTION, section, ConfigManager.get_datamodels()) self.service = ConfigManager.get_value(section, const.DATAMODEL_FIWARE_SERVICE) self.service_path = ConfigManager.get_value(section, const.DATAMODEL_FIWARE_SERVICE_PATH) self.type = ConfigManager.get_value(section, const.DATAMODEL_TYPE) Validators.is_informed(const.DATAMODEL_TYPE, self.type) self.title = ConfigManager.get_value(section, const.DATASET_TITLE) Validators.is_informed(const.DATASET_TITLE, self.title) self.description = ConfigManager.get_value(section, const.DATASET_DESCRIPTION) Validators.is_informed(const.DATASET_DESCRIPTION, self.description) self.contact_point = ConfigManager.get_value(section, const.DATASET_CONTACT_POINT) self.keywords = ConfigManager.get_value(section, const.DATASET_KEYWORDS).split('%') if len(self.keywords) > 1: Validators.is_informed(const.DATASET_KEYWORDS, self.keywords) self.publisher_name = ConfigManager.get_value(section, const.DATASET_PUBLISHER_NAME) self.publisher_uri = ConfigManager.get_value(section, const.DATASET_PUBLISHER_URI) Validators.is_valid_url(const.DATASET_PUBLISHER_URI, self.publisher_uri) self.publisher_type = Helpers.transform_vocabulary(const.DATASET_PUBLISHER_TYPE, ConfigManager.get_value(section, const.DATASET_PUBLISHER_TYPE), const.PUBLISHER_TYPE_RELATION) self.publisher_homepage = ConfigManager.get_value(section, const.DATASET_PUBLISHER_HOMEPAGE) Validators.is_valid_url(const.DATASET_PUBLISHER_HOMEPAGE, self.publisher_homepage) self.themes = Helpers.transform_themes(ConfigManager.get_value(section, const.DATASET_THEMES).split()) self.access_rights = Helpers.transform_vocabulary(const.DATASET_ACCESS_RIGHTS, ConfigManager.get_value(section, const.DATASET_ACCESS_RIGHTS), const.DATASET_ACCESS_RIGHTS_RELATION) self.periodicity = Helpers.transform_vocabulary(const.DATASET_PERIODICITY, ConfigManager.get_value(section, const.DATASET_PERIODICITY), const.DATASET_FREQUENCY_RELATION) self.spatial = ConfigManager.get_value(section, const.DATASET_SPATIAL) if self.spatial: Validators.is_valid_path(const.DATASET_SPATIAL, self.spatial) self.spatial = Helpers.get_spatial_polygon(self.spatial) self.landing_page = ConfigManager.get_value(section, const.DATASET_LANDING_PAGE) self.allocations = ConfigManager.get_value(section, const.DATASET_ALLOCATION).split() self.id = ConfigManager.get_dataset_id(section) uri_host = ConfigManager.get_value(const.MAIN_SECTION, const.URI_HOST) uri_structure = ConfigManager.get_value(const.MAIN_SECTION, const.URI_STRUCTURE, const.URI_STRUCTURE_DEFAULT) self.uri, self.id = Helpers.generate_uri(uri_host, uri_structure, Model.DATASET, self.id if self.id else None) logging.debug(msg.DATASET_SAVING_ID.format(datamodel=section, id=self.id)) ConfigManager.save_dataset_id(section, self.id) self.issued = Helpers.get_issued_date(self.id) if self.id else '' self.resources = self.create_resources() logging.debug(msg.DATASET_INSTANTIATING_MODEL_FINISHED)