def setUp(self): # super(MonitoringTestBase, self).setUp() # await startup cl = Client(GEONODE_URL, GEONODE_USER, GEONODE_PASSWD) for i in range(10): time.sleep(.2) try: cl.get_html('/', debug=False) break except BaseException: pass self.catalog = Catalog(GEOSERVER_URL + 'rest', GEOSERVER_USER, GEOSERVER_PASSWD) self.client = TestClient() self._tempfiles = []
def setUp(self): # await startup self.wait_for_progress_cnt = 0 cl = Client( GEONODE_URL, GEONODE_USER, GEONODE_PASSWD ) for i in range(10): time.sleep(.2) try: cl.get_html('/', debug=False) break except Exception: pass self.client = Client( GEONODE_URL, GEONODE_USER, GEONODE_PASSWD ) self.catalog = Catalog( GEOSERVER_URL + 'rest', GEOSERVER_USER, GEOSERVER_PASSWD, retries=ogc_server_settings.MAX_RETRIES, backoff_factor=ogc_server_settings.BACKOFF_FACTOR ) settings.DATABASES['default']['NAME'] = DB_NAME connections['default'].settings_dict['ATOMIC_REQUESTS'] = False connections['default'].connect() self._tempfiles = []
def setUp(self): # super(UploaderBase, self).setUp() # await startup cl = Client(GEONODE_URL, GEONODE_USER, GEONODE_PASSWD) for i in range(10): time.sleep(.2) try: cl.get_html('/', debug=False) break except BaseException: pass self.client = Client(GEONODE_URL, GEONODE_USER, GEONODE_PASSWD) self.catalog = Catalog(GEOSERVER_URL + 'rest', GEOSERVER_USER, GEOSERVER_PASSWD) self._tempfiles = [] # createlayer must use postgis as a datastore # set temporary settings to use a postgis datastore DB_HOST = settings.DATABASES['default']['HOST'] DB_PORT = settings.DATABASES['default']['PORT'] DB_NAME = settings.DATABASES['default']['NAME'] DB_USER = settings.DATABASES['default']['USER'] DB_PASSWORD = settings.DATABASES['default']['PASSWORD'] settings.DATASTORE_URL = 'postgis://{}:{}@{}:{}/{}'.format( DB_USER, DB_PASSWORD, DB_HOST, DB_PORT, DB_NAME) postgis_db = dj_database_url.parse(settings.DATASTORE_URL, conn_max_age=600) settings.DATABASES['datastore'] = postgis_db settings.OGC_SERVER['default']['DATASTORE'] = 'datastore'
def setUp(self): # await startup cl = Client(GEONODE_URL, GEONODE_USER, GEONODE_PASSWD) for i in range(10): time.sleep(.2) try: cl.get_html('/', debug=False) break except BaseException: pass self.client = Client(GEONODE_URL, GEONODE_USER, GEONODE_PASSWD) self.catalog = Catalog(GEOSERVER_URL + 'rest', GEOSERVER_USER, GEOSERVER_PASSWD) settings.DATABASES['default']['NAME'] = DB_NAME connections['default'].settings_dict['ATOMIC_REQUESTS'] = False connections['default'].connect() self._tempfiles = []
class UploaderBase(GeoNodeBaseTestSupport): type = 'layer' @classmethod def setUpClass(cls): pass @classmethod def tearDownClass(cls): if os.path.exists('integration_settings.py'): os.unlink('integration_settings.py') def setUp(self): # await startup self.wait_for_progress_cnt = 0 cl = Client( GEONODE_URL, GEONODE_USER, GEONODE_PASSWD ) for i in range(10): time.sleep(.2) try: cl.get_html('/', debug=False) break except Exception: pass self.client = Client( GEONODE_URL, GEONODE_USER, GEONODE_PASSWD ) self.catalog = Catalog( GEOSERVER_URL + 'rest', GEOSERVER_USER, GEOSERVER_PASSWD, retries=ogc_server_settings.MAX_RETRIES, backoff_factor=ogc_server_settings.BACKOFF_FACTOR ) settings.DATABASES['default']['NAME'] = DB_NAME connections['default'].settings_dict['ATOMIC_REQUESTS'] = False connections['default'].connect() self._tempfiles = [] def _post_teardown(self): pass def tearDown(self): self.wait_for_progress_cnt = 0 connections.databases['default']['ATOMIC_REQUESTS'] = False for temp_file in self._tempfiles: os.unlink(temp_file) # Cleanup try: with transaction.atomic(): Upload.objects.all().delete() Layer.objects.all().delete() Map.objects.all().delete() Document.objects.all().delete() except Exception as e: logger.error(e) if settings.OGC_SERVER['default'].get( "GEOFENCE_SECURITY_ENABLED", False): from geonode.security.utils import purge_geofence_all purge_geofence_all() def check_layer_geonode_page(self, path): """ Check that the final layer page render's correctly after an layer is uploaded """ # the final url for uploader process. This does a redirect to # the final layer page in geonode resp, _ = self.client.get_html(path) self.assertEqual(resp.status_code, 200) self.assertTrue('content-type' in resp.headers) def check_layer_geoserver_caps(self, type_name): """ Check that a layer shows up in GeoServer's get capabilities document """ # using owslib wms = get_wms( type_name=type_name, username=GEOSERVER_USER, password=GEOSERVER_PASSWD) ws, layer_name = type_name.split(':') self.assertTrue(layer_name in wms.contents, '%s is not in %s' % (layer_name, wms.contents)) def check_layer_geoserver_rest(self, layer_name): """ Check that a layer shows up in GeoServer rest api after the uploader is done""" # using gsconfig to test the geoserver rest api. layer = self.catalog.get_layer(layer_name) self.assertIsNotNone(layer) def check_and_pass_through_timestep(self, redirect_to): time_step = upload_step('time') srs_step = upload_step('srs') if srs_step in redirect_to: resp = self.client.make_request(redirect_to) else: self.assertTrue(time_step in redirect_to) resp = self.client.make_request(redirect_to) token = self.client.get_csrf_token(True) self.assertEqual(resp.status_code, 200) resp = self.client.make_request( redirect_to, {'csrfmiddlewaretoken': token}, ajax=True) return resp, resp.json() def complete_raster_upload(self, file_path, resp, data): return self.complete_upload(file_path, resp, data, is_raster=True) def check_save_step(self, resp, data): """Verify the initial save step""" self.assertEqual(resp.status_code, 200) self.assertTrue(isinstance(data, dict)) # make that the upload returns a success True key self.assertTrue(data['success'], 'expected success but got %s' % data) self.assertTrue('redirect_to' in data) def complete_upload(self, file_path, resp, data, is_raster=False): """Method to check if a layer was correctly uploaded to the GeoNode. arguments: file path, the django http response Checks to see if a layer is configured in Django Checks to see if a layer is configured in GeoServer checks the Rest API checks the get cap document """ layer_name, ext = os.path.splitext(os.path.basename(file_path)) if not isinstance(data, string_types): self.check_save_step(resp, data) layer_page = self.finish_upload( data['redirect_to'], layer_name, is_raster) self.check_layer_complete(layer_page, layer_name) def finish_upload( self, current_step, layer_name, is_raster=False, skip_srs=False): if not is_raster and _ALLOW_TIME_STEP: resp, data = self.check_and_pass_through_timestep(current_step) self.assertEqual(resp.status_code, 200) if not isinstance(data, string_types): if data['success']: self.assertTrue( data['success'], 'expected success but got %s' % data) self.assertTrue('redirect_to' in data) current_step = data['redirect_to'] self.wait_for_progress(data.get('progress')) if not is_raster and not skip_srs: self.assertTrue(upload_step('srs') in current_step) # if all is good, the srs step will redirect to the final page final_step = current_step.replace('srs', 'final') resp = self.client.make_request(final_step) else: self.assertTrue( urlsplit(upload_step('final')).path in current_step, f"current_step: {current_step} - upload_step('final'): {upload_step('final')}" ) resp = self.client.get(current_step) self.assertEqual(resp.status_code, 200) try: c = resp.json() url = c['url'] url = unquote(url) # and the final page should redirect to the layer page # @todo - make the check match completely (endswith at least) # currently working around potential 'orphaned' db tables self.assertTrue( layer_name in url, 'expected %s in URL, got %s' % (layer_name, url)) return url except Exception: return current_step def check_upload_model(self, original_name): # we can only test this if we're using the same DB as the test instance if not settings.OGC_SERVER['default']['DATASTORE']: return upload = None try: upload = Upload.objects.filter(name__icontains=str(original_name)).last() # Making sure the Upload object is present on the DB and # the import session is COMPLETE if upload and not upload.complete: logger.warning( "Upload not complete for Layer %s" % original_name) except Upload.DoesNotExist: self.fail('expected to find Upload object for %s' % original_name) def check_layer_complete(self, layer_page, original_name): '''check everything to verify the layer is complete''' self.check_layer_geonode_page(layer_page) # @todo use the original_name # currently working around potential 'orphaned' db tables # this grabs the name from the url (it might contain a 0) type_name = os.path.basename(layer_page) layer_name = original_name try: layer_name = type_name.split(':')[1] except Exception: pass # work around acl caching on geoserver side of things caps_found = False for i in range(10): time.sleep(.5) try: self.check_layer_geoserver_caps(type_name) self.check_layer_geoserver_rest(layer_name) caps_found = True except Exception: pass if not caps_found: logger.warning( "Could not recognize Layer %s on GeoServer WMS Capa" % original_name) self.check_upload_model(layer_name) def check_invalid_projection(self, layer_name, resp, data): """ Makes sure that we got the correct response from an layer that can't be uploaded""" self.assertTrue(resp.status_code, 200) if not isinstance(data, string_types): self.assertTrue(data['success']) srs_step = upload_step("srs") if "srs" in data['redirect_to']: self.assertTrue(srs_step in data['redirect_to']) resp, soup = self.client.get_html(data['redirect_to']) # grab an h2 and find the name there as part of a message saying it's # bad h2 = soup.find_all(['h2'])[0] self.assertTrue(str(h2).find(layer_name)) def check_upload_complete(self, layer_name, resp, data): """ Makes sure that we got the correct response from an layer that can't be uploaded""" self.assertTrue(resp.status_code, 200) if not isinstance(data, string_types): self.assertTrue(data['success']) final_step = upload_step("final") if "final" in data['redirect_to']: self.assertTrue(final_step in data['redirect_to']) def upload_folder_of_files(self, folder, final_check, session_ids=None): mains = ('.tif', '.shp', '.zip', '.asc') def is_main(_file): _, ext = os.path.splitext(_file) return (ext.lower() in mains) for main in filter(is_main, os.listdir(folder)): # get the abs path to the file _file = os.path.join(folder, main) base, _ = os.path.splitext(_file) resp, data = self.client.upload_file(_file) if session_ids is not None: if not isinstance(data, string_types) and data.get('url'): session_id = re.search( r'.*id=(\d+)', data.get('url')).group(1) if session_id: session_ids += [session_id] if not isinstance(data, string_types): self.wait_for_progress(data.get('progress')) final_check(base, resp, data) def upload_file(self, fname, final_check, check_name=None, session_ids=None): if not check_name: check_name, _ = os.path.splitext(fname) resp, data = self.client.upload_file(fname) if session_ids is not None: if not isinstance(data, string_types): if data.get('url'): session_id = re.search( r'.*id=(\d+)', data.get('url')).group(1) if session_id: session_ids += [session_id] if not isinstance(data, string_types): self.wait_for_progress(data.get('progress')) final_check(check_name, resp, data) def wait_for_progress(self, progress_url): try: if progress_url: resp = self.client.get(progress_url) json_data = resp.json() # "COMPLETE" state means done if json_data and json_data.get('state', '') == 'RUNNING' and \ self.wait_for_progress_cnt < 100: self.wait_for_progress_cnt += 1 self.wait_for_progress(progress_url) else: self.wait_for_progress_cnt = 0 else: self.wait_for_progress_cnt = 0 except Exception: self.wait_for_progress_cnt = 0 def temp_file(self, ext): fd, abspath = tempfile.mkstemp(ext) self._tempfiles.append(abspath) return fd, abspath def make_csv(self, fieldnames, *rows): fd, abspath = self.temp_file('.csv') with open(abspath, 'w', newline='') as csvfile: writer = csv.DictWriter(csvfile, fieldnames=fieldnames) writer.writeheader() for r in rows: writer.writerow(r) return abspath
class UploaderBase(GeoNodeBaseTestSupport): settings_overrides = [] @classmethod def setUpClass(cls): pass @classmethod def tearDownClass(cls): if os.path.exists('integration_settings.py'): os.unlink('integration_settings.py') def setUp(self): # super(UploaderBase, self).setUp() # await startup cl = Client(GEONODE_URL, GEONODE_USER, GEONODE_PASSWD) for i in range(10): time.sleep(.2) try: cl.get_html('/', debug=False) break except BaseException: pass self.client = Client(GEONODE_URL, GEONODE_USER, GEONODE_PASSWD) self.catalog = Catalog(GEOSERVER_URL + 'rest', GEOSERVER_USER, GEOSERVER_PASSWD) self._tempfiles = [] # createlayer must use postgis as a datastore # set temporary settings to use a postgis datastore DB_HOST = settings.DATABASES['default']['HOST'] DB_PORT = settings.DATABASES['default']['PORT'] DB_NAME = settings.DATABASES['default']['NAME'] DB_USER = settings.DATABASES['default']['USER'] DB_PASSWORD = settings.DATABASES['default']['PASSWORD'] settings.DATASTORE_URL = 'postgis://{}:{}@{}:{}/{}'.format( DB_USER, DB_PASSWORD, DB_HOST, DB_PORT, DB_NAME) postgis_db = dj_database_url.parse(settings.DATASTORE_URL, conn_max_age=600) settings.DATABASES['datastore'] = postgis_db settings.OGC_SERVER['default']['DATASTORE'] = 'datastore' def tearDown(self): # super(UploaderBase, self).tearDown() map(os.unlink, self._tempfiles) # move to original settings settings.OGC_SERVER['default']['DATASTORE'] = '' del settings.DATABASES['datastore'] # Cleanup Layer.objects.all().delete() Map.objects.all().delete() Document.objects.all().delete() def check_layer_geonode_page(self, path): """ Check that the final layer page render's correctly after an layer is uploaded """ # the final url for uploader process. This does a redirect to # the final layer page in geonode resp, _ = self.client.get_html(path) self.assertTrue(resp.code == 200) self.assertTrue('content-type' in resp.headers) def check_layer_geoserver_caps(self, type_name): """ Check that a layer shows up in GeoServer's get capabilities document """ # using owslib wms = get_wms(type_name=type_name, username=GEOSERVER_USER, password=GEOSERVER_PASSWD) ws, layer_name = type_name.split(':') self.assertTrue(layer_name in wms.contents, '%s is not in %s' % (layer_name, wms.contents)) def check_layer_geoserver_rest(self, layer_name): """ Check that a layer shows up in GeoServer rest api after the uploader is done""" # using gsconfig to test the geoserver rest api. layer = self.catalog.get_layer(layer_name) self.assertIsNotNone(layer is not None) def check_and_pass_through_timestep(self, redirect_to): time_step = upload_step('time') srs_step = upload_step('srs') if srs_step in redirect_to: resp = self.client.make_request(redirect_to) else: self.assertTrue(time_step in redirect_to) resp = self.client.make_request(redirect_to) token = self.client.get_csrf_token(True) self.assertEquals(resp.code, 200) resp = self.client.make_request(redirect_to, {'csrfmiddlewaretoken': token}, ajax=True) data = json.loads(resp.read()) return resp, data def complete_raster_upload(self, file_path, resp, data): return self.complete_upload(file_path, resp, data, is_raster=True) def check_save_step(self, resp, data): """Verify the initial save step""" self.assertEquals(resp.code, 200) self.assertTrue(isinstance(data, dict)) # make that the upload returns a success True key self.assertTrue(data['success'], 'expected success but got %s' % data) self.assertTrue('redirect_to' in data) def complete_upload(self, file_path, resp, data, is_raster=False): """Method to check if a layer was correctly uploaded to the GeoNode. arguments: file path, the django http response Checks to see if a layer is configured in Django Checks to see if a layer is configured in GeoServer checks the Rest API checks the get cap document """ layer_name, ext = os.path.splitext(os.path.basename(file_path)) if not isinstance(data, basestring): self.check_save_step(resp, data) layer_page = self.finish_upload(data['redirect_to'], layer_name, is_raster) self.check_layer_complete(layer_page, layer_name) def finish_upload(self, current_step, layer_name, is_raster=False, skip_srs=False): if not is_raster and _ALLOW_TIME_STEP: resp, data = self.check_and_pass_through_timestep(current_step) self.assertEquals(resp.code, 200) if not isinstance(data, basestring): if data['success']: self.assertTrue(data['success'], 'expected success but got %s' % data) self.assertTrue('redirect_to' in data) current_step = data['redirect_to'] self.wait_for_progress(data.get('progress')) if not is_raster and not skip_srs: self.assertTrue(upload_step('srs') in current_step) # if all is good, the srs step will redirect to the final page resp = self.client.get(current_step) content = json.loads(resp.read()) if not content.get('url') and content.get( 'redirect_to', current_step) == upload_step('final'): resp = self.client.get(content.get('redirect_to')) else: self.assertTrue(upload_step('final') in current_step) resp = self.client.get(current_step) self.assertEquals(resp.code, 200) resp_js = resp.read() try: c = json.loads(resp_js) url = c['url'] url = urllib.unquote(url) # and the final page should redirect to the layer page # @todo - make the check match completely (endswith at least) # currently working around potential 'orphaned' db tables self.assertTrue(layer_name in url, 'expected %s in URL, got %s' % (layer_name, url)) return url except BaseException: return current_step def check_upload_model(self, original_name): # we can only test this if we're using the same DB as the test instance if not settings.OGC_SERVER['default']['DATASTORE']: return upload = None try: # AF: TODO Headhakes here... nose is not accessing to the test # db!!! uploads = Upload.objects.all() if uploads: upload = Upload.objects.filter(name=str(original_name)).last() except Upload.DoesNotExist: self.fail('expected to find Upload object for %s' % original_name) # AF: TODO Headhakes here... nose is not accessing to the test db!!! if upload: self.assertTrue(upload.complete) def check_layer_complete(self, layer_page, original_name): '''check everything to verify the layer is complete''' self.check_layer_geonode_page(layer_page) # @todo use the original_name # currently working around potential 'orphaned' db tables # this grabs the name from the url (it might contain a 0) type_name = os.path.basename(layer_page) layer_name = original_name try: layer_name = type_name.split(':')[1] except BaseException: pass # work around acl caching on geoserver side of things caps_found = False for i in range(10): time.sleep(.5) try: self.check_layer_geoserver_caps(type_name) caps_found = True except BaseException: pass if caps_found: self.check_layer_geoserver_rest(layer_name) self.check_upload_model(layer_name) else: logger.warning("Could not recognize Layer %s on GeoServer WMS" % original_name) def check_invalid_projection(self, layer_name, resp, data): """ Makes sure that we got the correct response from an layer that can't be uploaded""" self.assertTrue(resp.code, 200) if not isinstance(data, basestring): self.assertTrue(data['success']) self.assertTrue(upload_step("srs") in data['redirect_to']) resp, soup = self.client.get_html(data['redirect_to']) # grab an h2 and find the name there as part of a message saying it's # bad h2 = soup.find_all(['h2'])[0] self.assertTrue(str(h2).find(layer_name)) def upload_folder_of_files(self, folder, final_check, session_ids=None): mains = ('.tif', '.shp', '.zip') def is_main(_file): _, ext = os.path.splitext(_file) return (ext.lower() in mains) main_files = filter(is_main, os.listdir(folder)) for main in main_files: # get the abs path to the file _file = os.path.join(folder, main) base, _ = os.path.splitext(_file) resp, data = self.client.upload_file(_file) if session_ids is not None: if not isinstance(data, basestring) and data.get('url'): session_id = re.search(r'.*id=(\d+)', data.get('url')).group(1) if session_id: session_ids += [session_id] if not isinstance(data, basestring): self.wait_for_progress(data.get('progress')) final_check(base, resp, data) def upload_file(self, fname, final_check, check_name=None, session_ids=None): if not check_name: check_name, _ = os.path.splitext(fname) resp, data = self.client.upload_file(fname) if session_ids is not None: if not isinstance(data, basestring): if data.get('url'): session_id = re.search(r'.*id=(\d+)', data.get('url')).group(1) if session_id: session_ids += [session_id] if not isinstance(data, basestring): self.wait_for_progress(data.get('progress')) final_check(check_name, resp, data) def wait_for_progress(self, progress_url): if progress_url: resp = self.client.get(progress_url) assert resp.getcode() == 200, 'Invalid progress status code' raw_data = resp.read() json_data = json.loads(raw_data) # "COMPLETE" state means done if json_data.get('state', '') == 'RUNNING': time.sleep(0.1) self.wait_for_progress(progress_url) def temp_file(self, ext): fd, abspath = tempfile.mkstemp(ext) self._tempfiles.append(abspath) return fd, abspath def make_csv(self, *rows): fd, abspath = self.temp_file('.csv') fp = os.fdopen(fd, 'wb') out = csv.writer(fp) for r in rows: out.writerow(r) fp.close() return abspath