def __init__(self, url, username, passwd, file_name, name_field, sources): self.url = url self.file = file_name self.sources = sources self.name = name_field self.username = username self.client = EventKitClient(self.url, username, passwd)
def setUp(self): self.mock_requests = requests_mock.Mocker() self.mock_requests.start() self.addCleanup(self.mock_requests.stop) self.url = "http://example.test" self.username = "******" self.pcode = "pcode" cookies = {"csrftoken": "token"} self.mock_requests.get("{0}/api/login/".format(self.url), status_code=200) self.mock_requests.post("{0}/api/login/".format(self.url), status_code=200, cookies=cookies) self.mock_requests.get(self.url, status_code=200, cookies=cookies) self.mock_requests.get("{0}/create".format(self.url), status_code=200, cookies=cookies) self.mock_requests.get("{0}/api/providers".format(self.url), status_code=200) self.mock_requests.get("{0}/api/runs".format(self.url), status_code=200, cookies=cookies) with self.settings(SESSION_COOKIE_DOMAIN=self.url): self.client = EventKitClient(self.url, self.username, self.pcode)
def get_client(self, url: str, user: str = None, password: str = None, certificate: str = None, verify: Union[str, bool] = True) -> EventKitClient: tries = 3 client = None while tries: try: client = EventKitClient(url.rstrip("/"), username=user, password=password, certificate=certificate, verify=verify) break except Exception as e: tries -= 1 logger.info("Failed to login.") logger.info(e) logger.info("{} attempts remaining.".format(tries)) sleep(1) if not client: raise Exception( f"Could not login to the url: {url} using username:{user} or certificate:{certificate}" ) return client
def setUp(self): self.mock_requests = requests_mock.Mocker() self.mock_requests.start() self.addCleanup(self.mock_requests.stop) self.url = 'http://example.test' self.username = "******" self.pcode = "pcode" cookies = {'csrftoken': 'token'} self.mock_requests.get("{0}/api/login/".format(self.url), status_code=200) self.mock_requests.post("{0}/api/login/".format(self.url), status_code=200, cookies=cookies) self.mock_requests.get(self.url, status_code=200, cookies=cookies) self.mock_requests.get("{0}/create".format(self.url), status_code=200, cookies=cookies) self.mock_requests.get("{0}/api/runs".format(self.url), status_code=200, cookies=cookies) with self.settings(SESSION_COOKIE_DOMAIN=self.url): self.client = EventKitClient(self.url, self.username, self.pcode)
def main(): parser = argparse.ArgumentParser() parser.add_argument( 'url', help='The EventKit instance base url (i.e. http://cloud.eventkit.test).' ) parser.add_argument('file', help='A geojson file to be used for location data.') parser.add_argument( '--name', default='name', help= 'The field to be used for the name of the location which will be the datapack name.' ) parser.add_argument( '--description', default='description', help= 'The field to be used for the description of the location which will be the datapack description.' ) parser.add_argument( '--project', default='project', help= 'The project name, will be the same for all datapacks (not based on file).' ) parser.add_argument( '-s', '--sources', nargs='+', default='', help= 'The slugs of sources to check, if not included all visible sources are checked.' ) parser.add_argument( '--levels', nargs='+', default='1 10', help='The levels to see (i.e. 1 10) would seed from levels 1-10.') parser.add_argument('--limit', type=int, default=0, help='The max number of jobs to create.') parser.add_argument( '--start', type=int, default=0, help= 'The index (0-based) of the first geojson feature to use to create a datapack' ) parser.add_argument( '--verify', default='true', help= 'True to enable ssl verification, false to disable ssl verification') parser.add_argument( '--certificate', default='', help='The path to a certificate to use for authentication') args = parser.parse_args() user = password = None certificate = args.certificate if not certificate: user = os.getenv('EVENTKIT_USER') if not user: user = input("EventKit Username: "******"EventKit Password: "******"provider": provider.get('slug'), "formats": ["gpkg"], "min_zoom": args.levels[0], 'max_zoom': args.levels[1] }] else: provider_tasks = [{ "provider": provider.get('slug'), "formats": ["gpkg"], "min_zoom": 1, 'max_zoom': 1 } for provider in client.get_providers()] with open(args.file, 'rb') as geojson_file: geojson_data = json.load(geojson_file) count = args.limit or len( geojson_data['features']) # number of jobs to submit index = args.start # current feature # Stop when count gets to zero (user gets desired number of jobs) # or we run out of features to create jobs for. while count or (index > len(geojson_data['features'])): feature = geojson_data['features'][index] # Iterate index independently of the count because we might skip some jobs which would iterate the # features but not change the number of jobs submitted. index += 1 name = feature['properties'].get(args.name) description = feature['properties'].get( args.description) or "Created using the seed_data script." project = feature['properties'].get(args.project) or "seed" if name in [ run['job']['name'] for run in client.search_runs(search_term=name) ]: print(( "Skipping {0} because data already exists in a DataPack with the same name." .format(name))) continue if not name: print(("Skipping: \n {0} \n" "because a valid name wasn't provided or found.".format( feature))) continue response = client.create_job(name=name, description=description, project=project, provider_tasks=provider_tasks, selection=feature) if response: print(('Submitted job for {0}'.format(name))) count -= 1 else: print(('Failed to submit job for {0}'.format(name))) print(response)
def main(): parser = argparse.ArgumentParser() parser.add_argument( 'url', help='The EventKit instance base url (i.e. http://cloud.eventkit.test).' ) parser.add_argument( '-s', '--sources', nargs='+', default='', help= 'The slugs of sources to check, if not included all visible sources are checked.' ) parser.add_argument( '--verify', default='', help= 'True to enable ssl verification, false to disable ssl verification') parser.add_argument( '--certificate', default='', help='The path to a certificate to use for authentication') parser.add_argument('--full', default='', help='') args = parser.parse_args() user = password = None certificate = args.certificate if not certificate: user = os.getenv('EVENTKIT_USER') if not user: user = input("EventKit Username: "******"EventKit Password: "******"Logging in...") try: client = EventKitClient(args.url.rstrip('/'), username=user, password=password, certificate=certificate, verify=verify) break except Exception as e: tries -= 1 print("Failed to login.") print(e) print("{} attempts remaining.".format(tries)) time.sleep(1) if not client: raise Exception( "Could not login to the url: {} using username:{} or certificate:{}" .format(args.url, user, certificate)) providers = client.get_providers() if args.sources: print(f"Selecting {args.sources} from: ") print([provider.get('slug') for provider in providers]) providers = [ provider for provider in providers if provider.get('slug') in args.sources ] if full_test: provider_tasks = [] for provider in providers: if provider.get('display'): level = provider.get('level_from') # Check if level is 0 because job api won't recognize it as a value if not level: level = 1 provider_tasks += [{ "provider": provider.get('slug'), "formats": ["gpkg"], "min_zoom": level, 'max_zoom': level }] feature = { "type": "FeatureCollection", "features": [{ "type": "Feature", "properties": {}, "geometry": { "type": "Polygon", "coordinates": [[[31.128165, 29.971509], [31.128521, 29.971509], [31.128521, 29.971804], [31.128165, 29.971804], [31.128165, 29.971509]]] } }] } name = "System Check" description = "This is a small periodic check to ensure the application is healthy." project = "System" print("Submitting job with provider_tasks: {}".format(provider_tasks)) response = client.create_job(name=name, description=description, project=project, provider_tasks=provider_tasks, selection=feature) print("Successfully submitted the job.") job_uid = response.get('uid') run_uid = client.get_runs({"job_uid": job_uid})[0].get('uid') print("Waiting for run {} to finish...".format(run_uid)) client.wait_for_run(run_uid) print("Run {} successfully finished.".format(run_uid)) print("Attempting to delete the run {}.".format(run_uid)) run = client.get_runs({"job_uid": job_uid}) attempts = 3 while run and attempts: client.delete_run(run_uid) run = client.get_runs({"job_uid": job_uid})[0] if run['deleted']: break else: attempts -= 1 if not run['deleted']: raise Exception("Failed to delete the run {}.".format(run_uid)) print("Successfully deleted the run {}.".format(run_uid)) else: print('Running status checks...') bad_providers = [] for provider in providers: if not provider.get('visible'): continue if not client.check_provider(provider.get('slug')): bad_providers += [provider.get('name')] if bad_providers: raise Exception( "The following providers failed status checks: {0}".format( bad_providers)) print("System check completed successfully.")
class TestClient(TestCase): def setUp(self): self.mock_requests = requests_mock.Mocker() self.mock_requests.start() self.addCleanup(self.mock_requests.stop) self.url = 'http://example.test' self.username = "******" self.pcode = "pcode" cookies = {'csrftoken': 'token'} self.mock_requests.get("{0}/api/login/".format(self.url), status_code=200) self.mock_requests.post("{0}/api/login/".format(self.url), status_code=200, cookies=cookies) self.mock_requests.get(self.url, status_code=200, cookies=cookies) self.mock_requests.get("{0}/create".format(self.url), status_code=200, cookies=cookies) self.mock_requests.get("{0}/api/runs".format(self.url), status_code=200, cookies=cookies) with self.settings(SESSION_COOKIE_DOMAIN=self.url): self.client = EventKitClient(self.url, self.username, self.pcode) def test_get_providers(self): expected_response = {"provider": "provider_name"} self.mock_requests.get(self.client.providers_url, text=json.dumps(expected_response), status_code=200) providers = self.client.get_providers() self.assertEqual(expected_response, providers) with self.assertRaises(Exception): self.mock_requests.get(self.client.providers_url, text=json.dumps(expected_response), status_code=400) self.client.get_providers() def test_get_runs(self): request_response = {"runs": "runs"} expected_response = ["runs"] self.mock_requests.register_uri('GET', "{0}/filter".format(self.client.runs_url), [{'text': json.dumps(request_response), 'status_code': 200}, {'text': '', 'status_code': 404}]) runs = self.client.get_runs() self.assertEqual(expected_response, runs) with self.assertRaises(Exception): self.mock_requests.get("{0}/filter".format(self.client.runs_url), text=json.dumps(expected_response), status_code=400) self.client.get_runs() def test_run_job(self): expected_response = {"runs": "runs"} self.mock_requests.post(self.client.jobs_url, text=json.dumps(expected_response), status_code=202) job_response = self.client.run_job(name='Name', description='Description', project='Project') self.assertEqual(expected_response, job_response) with self.assertRaises(Exception): self.mock_requests.post(self.client.jobs_url, text=json.dumps(expected_response), status_code=400) self.client.run_job(name='Name', description='Description', project='Project') with self.assertRaises(Exception): self.mock_requests.post(self.client.jobs_url, text=json.dumps(expected_response), status_code=202) self.client.run_job(name=None) def test_parse_duration(self): def with_timedelta(td): self.assertEqual(td.seconds, parse_duration(str(td))) # All possible inputs to timedelta - "9 days, 3:04:05.006007" with_timedelta(datetime.timedelta(weeks=1, days=2, hours=3, minutes=4, seconds=5, milliseconds=6, microseconds=7)) with_timedelta(datetime.timedelta(days=1, hours=2, minutes=3)) # No plural - "1 day, 2:03:00" with_timedelta(datetime.timedelta(hours=2, minutes=3, seconds=4)) # Just hms "2:03:04" def test_parse_size_unit(self): self.assertEqual(parse_size_unit('B'), 1) self.assertEqual(parse_size_unit('KB'), 1e3) self.assertEqual(parse_size_unit('MB'), 1e6) self.assertEqual(parse_size_unit('GB'), 1e9) self.assertEqual(parse_size_unit('TB'), 1e12) def test_parse_byte_size(self): self.assertAlmostEqual(256000, parse_byte_size('256 MB', 'KB')) self.assertAlmostEqual(256, parse_byte_size('256 MB', 'MB')) self.assertAlmostEqual(.256, parse_byte_size('256000 KB', 'GB')) self.assertAlmostEqual(.000256, parse_byte_size('256000 KB', 'TB'), places=10)
class TestClient(TestCase): def setUp(self): self.mock_requests = requests_mock.Mocker() self.mock_requests.start() self.addCleanup(self.mock_requests.stop) self.url = "http://example.test" self.username = "******" self.pcode = "pcode" cookies = {"csrftoken": "token"} self.mock_requests.get("{0}/api/login/".format(self.url), status_code=200) self.mock_requests.post("{0}/api/login/".format(self.url), status_code=200, cookies=cookies) self.mock_requests.get(self.url, status_code=200, cookies=cookies) self.mock_requests.get("{0}/create".format(self.url), status_code=200, cookies=cookies) self.mock_requests.get("{0}/api/providers".format(self.url), status_code=200) self.mock_requests.get("{0}/api/runs".format(self.url), status_code=200, cookies=cookies) with self.settings(SESSION_COOKIE_DOMAIN=self.url): self.client = EventKitClient(self.url, self.username, self.pcode) def test_get_providers(self): expected_response = {"provider": "provider_name"} self.mock_requests.get(self.client.providers_url, text=json.dumps(expected_response), status_code=200) providers = self.client.get_providers() self.assertEqual(expected_response, providers) with self.assertRaises(Exception): self.mock_requests.get(self.client.providers_url, text=json.dumps(expected_response), status_code=400) self.client.get_providers() def test_get_runs(self): request_response = {"runs": "runs"} expected_response = ["runs"] self.mock_requests.register_uri( "GET", "{0}/filter".format(self.client.runs_url), [{ "text": json.dumps(request_response), "status_code": 200 }, { "text": "", "status_code": 404 }], ) runs = self.client.search_runs() self.assertEqual(expected_response, runs) with self.assertRaises(Exception): self.mock_requests.get("{0}/filter".format(self.client.runs_url), text=json.dumps(expected_response), status_code=400) self.client.search_runs() def test_run_job(self): expected_response = {"runs": "runs"} self.mock_requests.post(self.client.jobs_url, text=json.dumps(expected_response), status_code=202) job_response = self.client.create_job(name="Name", description="Description", project="Project", selection=None, provider_tasks=None) self.assertEqual(expected_response, job_response) with self.assertRaises(Exception): self.mock_requests.post(self.client.jobs_url, text=json.dumps(expected_response), status_code=400) self.client.create_job(name="Name", description="Description", project="Project", selection=None, provider_tasks=None) def test_delete_run(self): example_run_uid = uuid.uuid4() expected_status = 204 url = "{}/{}".format(self.client.runs_url.rstrip("/"), example_run_uid) self.mock_requests.delete(url, status_code=expected_status) self.client.delete_run(example_run_uid) with self.assertRaises(Exception): wrong_status = 500 self.mock_requests.delete(url, status_code=wrong_status) self.client.delete_run(example_run_uid) def test_delete_job(self): example_job_uid = uuid.uuid4() expected_status = 204 url = "{}/{}".format(self.client.jobs_url.rstrip("/"), example_job_uid) self.mock_requests.delete(url, status_code=expected_status) self.client.delete_job(example_job_uid) with self.assertRaises(Exception): wrong_status = 500 self.mock_requests.delete(url, status_code=wrong_status) self.client.delete_job(example_job_uid) def test_cancel_export_provider_task(self): example_job_uid = uuid.uuid4() expected_status = 200 url = "{}/{}".format(self.client.provider_tasks_url.rstrip("/"), example_job_uid) self.mock_requests.patch(url, status_code=expected_status) self.client.cancel_provider(example_job_uid) with self.assertRaises(Exception): wrong_status = 500 self.mock_requests.patch(url, status_code=wrong_status) self.client.cancel_provider(example_job_uid) def test_wait_for_run(self): example_run_uid = uuid.uuid4() in_progress_response = [{ "status": "PENDING", "provider_tasks": [{ "tasks": [{ "status": "IN_PROGRESS", "errors": "" }] }] }] finished_response = [{ "status": "COMPLETED", "provider_tasks": [{ "tasks": [{ "status": "SUCCESS", "errors": "" }] }] }] expected_errors = ["EXAMPLE", "ERROR"] failure_reponse = [{ "provider_tasks": [{ "tasks": [{ "status": "FAILED", "errors": expected_errors }] }], "status": "INCOMPLETE" }] url = "{}/{}".format(self.client.runs_url.rstrip("/"), example_run_uid) self.mock_requests.register_uri( "GET", url, [{ "text": json.dumps(finished_response), "status_code": 200 }]) response = self.client.wait_for_run(run_uid=example_run_uid, run_timeout=2) self.assertEqual(response.get("status"), finished_response[0].get("status")) # Test failed to get status with self.assertRaises(Exception): self.mock_requests.register_uri( "GET", url, [{ "text": json.dumps(failure_reponse), "status_code": 404 }]) self.client.wait_for_run(run_uid=example_run_uid, run_timeout=2) # Test failed run with self.assertRaises(Exception): self.mock_requests.register_uri( "GET", url, [{ "text": json.dumps(failure_reponse), "status_code": 200 }]) self.client.wait_for_run(run_uid=example_run_uid, run_timeout=2) # Test timeout. with self.assertRaises(Exception): self.mock_requests.register_uri( "GET", url, [{ "text": json.dumps(in_progress_response), "status_code": 200 }]) self.client.wait_for_run(run_uid=example_run_uid, run_timeout=1) def test_check_provider(self): example_slug = "test" url = "{}/{}/status".format(self.client.providers_url, example_slug) success_response = '{"status": "SUCCESS"}' fail_response = '{"status": "ERROR"}' self.mock_requests.get(url, text=success_response) response = self.client.check_provider(example_slug) self.assertTrue(response) self.mock_requests.get(url, text=fail_response) response = self.client.check_provider(example_slug) self.assertFalse(response) def test_parse_duration(self): def with_timedelta(td): self.assertEqual(td.seconds, parse_duration(str(td))) # All possible inputs to timedelta - "9 days, 3:04:05.006007" with_timedelta( datetime.timedelta(weeks=1, days=2, hours=3, minutes=4, seconds=5, milliseconds=6, microseconds=7)) with_timedelta(datetime.timedelta( days=1, hours=2, minutes=3)) # No plural - "1 day, 2:03:00" with_timedelta(datetime.timedelta(hours=2, minutes=3, seconds=4)) # Just hms "2:03:04" def test_parse_size_unit(self): self.assertEqual(parse_size_unit("B"), 1) self.assertEqual(parse_size_unit("KB"), 1e3) self.assertEqual(parse_size_unit("MB"), 1e6) self.assertEqual(parse_size_unit("GB"), 1e9) self.assertEqual(parse_size_unit("TB"), 1e12) def test_parse_byte_size(self): self.assertAlmostEqual(256000, parse_byte_size("256 MB", "KB")) self.assertAlmostEqual(256, parse_byte_size("256 MB", "MB")) self.assertAlmostEqual(0.256, parse_byte_size("256000 KB", "GB")) self.assertAlmostEqual(0.000256, parse_byte_size("256000 KB", "TB"), places=10)
class BenchmarkEventkit(object): def __init__(self, url, username, passwd, file_name, name_field, sources): self.url = url self.file = file_name self.sources = sources self.name = name_field self.username = username self.client = EventKitClient(self.url, username, passwd) def run_tests(self, batch_sizes): times = [] start_time = time.time() times += [start_time] print("Starting tests:") for batch_size in batch_sizes.split(','): batch_size = int(batch_size) print("Running test for batch size: {0}".format(batch_size)) batch_start_time = time.time() run_uid = self.run_iteration(batch_size) self.wait_for_run(run_uid) self.delete_runs() batch_finish_time = time.time() print("Test for batch size: {0} finished in {1} seconds".format(batch_size, batch_finish_time - batch_start_time)) times += [batch_finish_time] print("Tests finished in {0} seconds.".format(batch_finish_time - start_time)) return times def run_iteration(self, batch_size): if self.sources: provider_tasks = [] for provider in self.client.get_providers(): if provider.get('slug') in self.sources: provider_tasks += [{"provider": provider.get('name'), "formats": ["gpkg"]}] else: provider_tasks = [{"provider": provider.get('name'), "formats": ["gpkg"]} for provider in self.client.get_providers()] with open(self.file, 'r') as geojson_file: geojson_data = json.load(geojson_file) count = batch_size index = 0 while count or (index > len(geojson_data['features'])): feature = geojson_data['features'][index] index += 1 name = feature['properties'].get(self.name) description = "Created using the benchmark script." project = "benchmark" runs = [run for run in self.client.get_runs(search_term=name)] if name in [run['job']['name'] for run in runs] and self.username in [run['user'] for run in runs]: print("Skipping {0} because data already exists in a DataPack with the same name.".format(name)) continue if not name: print("Skipping: \n {0} \n" "because a valid name wasn't provided or found.".format(feature)) continue response = self.client.run_job(name=name, description=description, project=project, provider_tasks=provider_tasks, selection=feature) if response: print('Submitted job for {0}'.format(name)) count -= 1 if not count: return response['uid'] else: print('Failed to submit job for {0}'.format(name)) print(response) def delete_runs(self): response = self.client.get_runs() while response: for run in response: if run.get('user') == self.username: job_url = run['job']['url'] if job_url: self.client.client.delete(run['job']['url'], headers={'X-CSRFToken': self.client.csrftoken}) response = self.client.get_runs() def wait_for_run(self, job_uid, run_timeout=0): finished = False response = None first_check = time.time() while not finished: time.sleep(1) response = self.client.client.get( self.client.runs_url, params={"job_uid": job_uid}, headers={'X-CSRFToken': self.client.csrftoken }).json() status = response[0].get('status') if status == "COMPLETED": finished = True last_check = time.time() if run_timeout and last_check - first_check > run_timeout: raise Exception('Run timeout ({}s) exceeded'.format(run_timeout)) return response[0]
class BenchmarkEventkit(object): def __init__(self, url, username, passwd, file_name, name_field, sources): self.url = url self.file = file_name self.sources = sources self.name = name_field self.username = username self.client = EventKitClient(self.url, username, passwd) def run_tests(self, batch_sizes): times = [] start_time = time.time() times += [start_time] print("Starting tests:") for batch_size in batch_sizes.split(','): batch_size = int(batch_size) print("Running test for batch size: {0}".format(batch_size)) batch_start_time = time.time() run_uid = self.run_iteration(batch_size) self.wait_for_run(run_uid) self.delete_runs() batch_finish_time = time.time() print("Test for batch size: {0} finished in {1} seconds".format( batch_size, batch_finish_time - batch_start_time)) times += [batch_finish_time] print("Tests finished in {0} seconds.".format(batch_finish_time - start_time)) return times def run_iteration(self, batch_size): if self.sources: provider_tasks = [] for provider in self.client.get_providers(): if provider.get('slug') in self.sources: provider_tasks += [{ "provider": provider.get('name'), "formats": ["gpkg"] }] else: provider_tasks = [{ "provider": provider.get('name'), "formats": ["gpkg"] } for provider in self.client.get_providers()] with open(self.file, 'r') as geojson_file: geojson_data = json.load(geojson_file) count = batch_size index = 0 while count or (index > len(geojson_data['features'])): feature = geojson_data['features'][index] index += 1 name = feature['properties'].get(self.name) description = "Created using the benchmark script." project = "benchmark" runs = [run for run in self.client.search_runs(search_term=name)] if name in [run['job']['name'] for run in runs ] and self.username in [run['user'] for run in runs]: print( "Skipping {0} because data already exists in a DataPack with the same name." .format(name)) continue if not name: print("Skipping: \n {0} \n" "because a valid name wasn't provided or found.".format( feature)) continue response = self.client.create_job(name=name, description=description, project=project, provider_tasks=provider_tasks, selection=feature) if response: print('Submitted job for {0}'.format(name)) count -= 1 if not count: return response['uid'] else: print('Failed to submit job for {0}'.format(name)) print(response) def delete_runs(self): response = self.client.search_runs() while response: for run in response: if run.get('user') == self.username: job_url = run['job']['url'] if job_url: self.client.client.delete( run['job']['url'], headers={'X-CSRFToken': self.client.csrftoken}) response = self.client.search_runs() def wait_for_run(self, job_uid, run_timeout=0): finished = False response = None first_check = time.time() while not finished: time.sleep(1) response = self.client.client.get(self.client.runs_url, params={ "job_uid": job_uid }, headers={ 'X-CSRFToken': self.client.csrftoken }).json() status = response[0].get('status') if status == "COMPLETED": finished = True last_check = time.time() if run_timeout and last_check - first_check > run_timeout: raise Exception( 'Run timeout ({}s) exceeded'.format(run_timeout)) return response[0]