def download(self, filename, log=False): """Downloads query result to filename""" for i in range(len(api_servers)): try: if log: log.debug(self.get_url(i)) download.wget(self.get_url(i), filename) return except IOError as e: pass raise e
def test_wget0(self, mock_open, mock_pb, mock_gr): mock_gr.return_value = mock.MagicMock() mock_gr.return_value.iter_content = range mock_gr.return_value.headers = {} file_mock = mock.MagicMock() mock_open.return_value = mock.MagicMock() mock_open.return_value.__enter__.return_value = file_mock wget('foo', 'bar') self.assertEquals(file_mock.write.call_count, chunk_size) mock_pb.assert_called_with(0)
def test_wget0(self, mock_open, mock_pb, mock_gr): mock_gr.return_value = mock.MagicMock() mock_gr.return_value.iter_content = range mock_gr.return_value.headers = {} file_mock = mock.MagicMock() mock_open.return_value = mock.MagicMock() mock_open.return_value.__enter__.return_value = file_mock wget('foo', 'bar') self.assertEqual(file_mock.write.call_count, chunk_size) mock_pb.assert_called_once_with(total=0, unit='B', unit_scale=True, unit_divisor=chunk_size, leave=False)
def get_atom_file(self, url): """ Given the url of a Cadastre ATOM service, tries to download the ZIP file for self.zip_code """ s = re.search('INSPIRE/(\w+)/', url) log.debug(_("Searching the url for the '%s' layer of '%s'..."), s.group(1), self.zip_code) response = download.get_response(url) s = re.search('http.+/%s.+zip' % self.zip_code, response.text) if not s: raise ValueError(_("Zip code '%s' don't exists") % self.zip_code) url = s.group(0) filename = url.split('/')[-1] out_path = os.path.join(self.path, filename) log.info(_("Downloading '%s'"), out_path) download.wget(url, out_path)
def read(self, prov_code): if prov_code not in list(andalucia.keys()): raise ValueError(_("Province code '%s' not valid") % prov_code) csv_fn = csv_name.format(andalucia[prov_code]) csv_path = os.path.join(self.path, csv_fn) url = cdau_url.format(csv_fn) if not os.path.exists(csv_path): log.info(_("Downloading '%s'"), csv_path) download.wget(url, csv_path) csv = layer.BaseLayer(csv_path, csv_fn, 'ogr') if not csv.isValid(): raise IOError(_("Failed to load layer '%s'") % csv_path) csv.setCrs(QgsCoordinateReferenceSystem(cdau_crs)) log.info(_("Read %d features in '%s'"), csv.featureCount(), csv_path) self.get_metadata(csv_path.replace('.csv', '.txt')) csv.source_date = self.src_date return csv
def run(): for prov_code in setup.valid_provinces: url = setup.prov_url['BU'].format(code=prov_code) response = download.get_response(url) root = etree.fromstring(response.content) for entry in root.findall("atom:entry[atom:title]", namespaces=ns): title = entry.find('atom:title', ns).text zip_code = title[1:6] mun = title.replace('buildings', '').strip()[6:] url = u"{0}{1}/{2}-{3}/A.ES.SDGC.BU.{2}.zip".format( baseurl, prov_code, zip_code, mun) download.wget(url, 'temp') zf = zipfile.ZipFile('temp') root = etree.parse( zf.open('A.ES.SDGC.BU.MD.{}.xml'.format(zip_code))).getroot() gml_bbox = root.find('.//gmd:EX_GeographicBoundingBox', ns) gml_bbox_l = gml_bbox.find('gmd:westBoundLongitude/gco:Decimal', ns) gml_bbox_r = gml_bbox.find('gmd:eastBoundLongitude/gco:Decimal', ns) gml_bbox_b = gml_bbox.find('gmd:southBoundLatitude/gco:Decimal', ns) gml_bbox_t = gml_bbox.find('gmd:northBoundLatitude/gco:Decimal', ns) bbox = ','.join([ gml_bbox_b.text, gml_bbox_l.text, gml_bbox_t.text, gml_bbox_r.text ]) query = overpass.Query(bbox, 'json', False, False) query.add('rel["admin_level"="8"]') response = download.get_response(query.get_url()) sys.stdout.write(' ' * 70 + '\r') data = response.json() matching = hgwnames.dsmatch(mun, data['elements'], lambda e: e['tags']['name']) match = matching['tags']['name'] if matching else '' ok = mun == match.upper().translate(trans) color = {False: '\033[0;31m', True: '\033[0m'}[ok] print u'{}{}\t{}\t{}\t{}'.format(color, zip_code, mun, match, ok) fh.write(u'{}\t{}\t{}\t{}\n'.format(zip_code, mun, match, ok)) print '\033[0m' if os.path.exists('temp'): os.remove('temp')
def run(): qgs = catatom2osm.QgsSingleton() for prov_code in setup.valid_provinces: url = setup.prov_url['BU'].format(code=prov_code) response = download.get_response(url) root = etree.fromstring(response.content) for entry in root.findall("atom:entry[atom:title]", namespaces=ns): title = entry.find('atom:title', ns).text zip_code = title[1:6] mun = title.replace('buildings', '').strip()[6:] url = u"{0}{1}/{2}-{3}/A.ES.SDGC.BU.{2}.zip".format(baseurl, prov_code, zip_code, mun) gml_fn = ".".join((setup.fn_prefix, 'BU', zip_code, 'building.gml')) download.wget(url, 'temp.zip') gml = layer.BaseLayer('/vsizip/temp.zip/'+gml_fn, 'temp', 'ogr') sys.stdout.write(' '*70+'\r') c = gml.featureCount() print zip_code, mun, c fh.write(u'{}\t{}\t{}\n'.format(zip_code, mun, c)) if os.path.exists('temp'): os.remove('temp')