Exemplo n.º 1
0
    def _get_data_by_dimension(self):
        
        dimension_keys, dimensions = get_dimensions_from_dsd(self.xml_dsd,
                                                                       self.provider_name,
                                                                       self.dataset_code)
        
        choice = "avg" 
        if self.dataset_code in ["IPC-2015-COICOP"]:
            choice = "max"        
        
        position, _key, dimension_values = select_dimension(dimension_keys, 
                                                            dimensions, 
                                                            choice=choice)
        
        count_dimensions = len(dimension_keys)
        
        for dimension_value in dimension_values:
            '''Pour chaque valeur de la dimension, generer une key d'url'''
            
            sdmx_key = []
            for i in range(count_dimensions):
                if i == position:
                    sdmx_key.append(dimension_value)
                else:
                    sdmx_key.append(".")
            key = "".join(sdmx_key)

            url = "http://www.bdm.insee.fr/series/sdmx/data/%s/%s" % (self.dataset_code, key)
            filename = "data-%s-%s.xml" % (self.dataset_code, key.replace(".", "_"))
            download = Downloader(url=url, 
                                  filename=filename,
                                  store_filepath=self.store_path,
                                  #client=self.fetcher.requests_client
                                  )
            filepath, response = download.get_filepath_and_response()

            if filepath:
                self.fetcher.for_delete.append(filepath)

            if response.status_code == HTTP_ERROR_NO_RESULT:
                continue
            elif response.status_code >= 400:
                raise response.raise_for_status()
            
            for row, err in self.xml_data.process(filepath):
                yield row, err

            #self.dataset.update_database(save_only=True)
        
        yield None, None
Exemplo n.º 2
0
    def _get_data_by_dimension(self):
        
        dimension_keys, dimensions = get_dimensions_from_dsd(self.xml_dsd,
                                                             self.provider_name,
                                                             self.dataset_code)
        
        position, _key, dimension_values = select_dimension(dimension_keys, dimensions, choice="max")
        
        count_dimensions = len(dimension_keys)
        
        for dimension_value in dimension_values:
            '''Pour chaque valeur de la dimension, generer une key d'url'''
            
            local_count = 0
                        
            sdmx_key = []
            for i in range(count_dimensions):
                if i == position:
                    sdmx_key.append(dimension_value)
                else:
                    sdmx_key.append(".")
            key = "".join(sdmx_key)

            url = "%s/%s" % (self._get_url_data(), key)
            filename = "data-%s-%s.xml" % (self.dataset_code, key.replace(".", "_"))
            download = Downloader(url=url, 
                                  filename=filename,
                                  store_filepath=self.store_path,
                                  client=self.fetcher.requests_client)            
            filepath, response = download.get_filepath_and_response()

            if filepath:
                self.fetcher.for_delete.append(filepath)
            
            if response.status_code >= 400 and response.status_code < 500:
                continue
            elif response.status_code >= 500:
                raise response.raise_for_status()
            
            for row, err in self.xml_data.process(filepath):
                yield row, err
                local_count += 1
                
            if local_count >= 2999:
                logger.warning("TODO: VRFY - series > 2999 for provider[IMF] - dataset[%s] - key[%s]" % (self.dataset_code, key))

            #self.dataset.update_database(save_only=True)
        
        yield None, None
Exemplo n.º 3
0
 def _get_dimensions_from_dsd(self):
     return get_dimensions_from_dsd(self.xml_dsd, self.provider_name, self.dataset_code)
Exemplo n.º 4
0
 def _get_dimensions_from_dsd(self):
     return get_dimensions_from_dsd(self.xml_dsd, self.provider_name,
                                    self.dataset_code)
Exemplo n.º 5
0
    def _get_data_by_dimension(self):

        self.xml_data = XMLData(provider_name=self.provider_name,
                                dataset_code=self.dataset_code,
                                xml_dsd=self.xml_dsd,
                                frequencies_supported=FREQUENCIES_SUPPORTED)
        
        dimension_keys, dimensions = get_dimensions_from_dsd(self.xml_dsd,
                                                             self.provider_name,
                                                             self.dataset_code)
        
        position, _key, dimension_values = select_dimension(dimension_keys, dimensions)
        
        count_dimensions = len(dimension_keys)
        
        for dimension_value in dimension_values:
                        
            sdmx_key = []
            for i in range(count_dimensions):
                if i == position:
                    sdmx_key.append(dimension_value)
                else:
                    sdmx_key.append(".")
            key = "".join(sdmx_key)

            url = "http://sdw-wsrest.ecb.int/service/data/%s/%s" % (self.dataset_code, key)
            headers = SDMX_DATA_HEADERS
            
            last_modified = None
            if self.dataset.metadata and "Last-Modified" in self.dataset.metadata:
                headers["If-Modified-Since"] = self.dataset.metadata["Last-Modified"]
                last_modified = self.dataset.metadata["Last-Modified"]
        
            filename = "data-%s-%s.xml" % (self.dataset_code, key.replace(".", "_"))               
            download = Downloader(url=url, 
                                  filename=filename,
                                  store_filepath=self.store_path,
                                  headers=headers,
                                  client=self.fetcher.requests_client)
            filepath, response = download.get_filepath_and_response()

            if filepath:
                self.fetcher.for_delete.append(filepath)

            if response.status_code == HTTP_ERROR_NOT_MODIFIED:
                msg = "Reject dataset updated for provider[%s] - dataset[%s] - update-date[%s]"
                logger.warning(msg % (self.provider_name, self.dataset_code, last_modified))
                continue
            
            elif response.status_code == HTTP_ERROR_NO_RESULT:
                continue
            
            elif response.status_code >= 400:
                raise response.raise_for_status()
    
            if "Last-Modified" in response.headers:
                if not self.dataset.metadata:
                    self.dataset.metadata = {}
                self.dataset.metadata["Last-Modified"] = response.headers["Last-Modified"]
            
            for row, err in self.xml_data.process(filepath):
                yield row, err

            #self.dataset.update_database(save_only=True)
        
        yield None, None