def _load(self): #TODO: DSD """ url = "xxx/%s" % self.dataset_code download = Downloader(url=url, filename="dataflow-%s.xml" % self.dataset_code) self.xml_dsd.process(download.get_filepath()) """ url = "https://www.destatis.de/sddsplus/%s.xml" % self.dataset_code download = Downloader(url=url, filename="data-%s.xml" % self.dataset_code) self.xml_data = XMLData(provider_name=self.provider_name, dataset_code=self.dataset_code, ns_tag_data=self.ns_tag_data, #dimension_keys=self.xml_dsd.dimension_keys ) #TODO: response and exception try: filepath, response = download.get_filepath_and_response() except requests.exceptions.HTTPError as err: logger.critical("AUTRE ERREUR HTTP : %s" % err.response.status_code) raise self.rows = self.xml_data.process(filepath)
class DESTATIS_Data(object): def __init__(self, dataset=None, ns_tag_data=None): """ :param Datasets dataset: Datasets instance """ self.dataset = dataset self.ns_tag_data = ns_tag_data self.attribute_list = self.dataset.attribute_list self.dimension_list = self.dataset.dimension_list self.provider_name = self.dataset.provider_name self.dataset_code = self.dataset.dataset_code #self.xml_dsd = XMLStructure_2_1(provider_name=self.provider_name, # dataset_code=self.dataset_code) self.rows = None #self.dsd_id = None self._load() def _load(self): url = "https://www.destatis.de/sddsplus/%s.xml" % self.dataset_code download = Downloader(url=url, filename="data-%s.xml" % self.dataset_code, #headers=SDMX_DATA_HEADERS ) self.xml_data = XMLData(provider_name=self.provider_name, dataset_code=self.dataset_code, ns_tag_data=self.ns_tag_data, #dimension_keys=self.xml_dsd.dimension_keys ) #TODO: response and exception try: filepath, response = download.get_filepath_and_response() except requests.exceptions.HTTPError as err: logger.critical("AUTRE ERREUR HTTP : %s" % err.response.status_code) raise self.rows = self.xml_data.process(filepath) def __next__(self): _series = next(self.rows) if not _series: raise StopIteration() return self.build_series(_series) def build_series(self, bson): bson["last_update"] = self.dataset.last_update for key, item in bson['dimensions'].items(): self.dimension_list.update_entry(key, key, item) """ FIXME: attributes = OrderedDict() if 'attributes' in bson and bson['attributes']: for key, item in bson['attributes'].items(): attributes[key] = {key: item} pprint(attributes) self.attribute_list.set_dict(attributes) pprint(self.dataset.bson) pprint(bson) """ return bson
class DESTATIS_Data(SeriesIterator): def __init__(self, dataset=None, ns_tag_data=None): """ :param Datasets dataset: Datasets instance """ super().__init__() self.dataset = dataset self.ns_tag_data = ns_tag_data self.attribute_list = self.dataset.attribute_list self.dimension_list = self.dataset.dimension_list self.provider_name = self.dataset.provider_name self.dataset_code = self.dataset.dataset_code self.xml_dsd = XMLStructure(provider_name=self.provider_name) self.rows = None self._load() def _load(self): #TODO: DSD """ url = "xxx/%s" % self.dataset_code download = Downloader(url=url, filename="dataflow-%s.xml" % self.dataset_code) self.xml_dsd.process(download.get_filepath()) """ url = "https://www.destatis.de/sddsplus/%s.xml" % self.dataset_code download = Downloader(url=url, filename="data-%s.xml" % self.dataset_code) self.xml_data = XMLData(provider_name=self.provider_name, dataset_code=self.dataset_code, ns_tag_data=self.ns_tag_data, #dimension_keys=self.xml_dsd.dimension_keys ) #TODO: response and exception try: filepath, response = download.get_filepath_and_response() except requests.exceptions.HTTPError as err: logger.critical("AUTRE ERREUR HTTP : %s" % err.response.status_code) raise self.rows = self.xml_data.process(filepath) def build_series(self, bson): bson["last_update"] = self.dataset.last_update for key, item in bson['dimensions'].items(): self.dimension_list.update_entry(key, item, item) for key, values in bson['attributes'].items(): values = list(set(values)) for value in values: self.attribute_list.update_entry(key, value, value) return bson